fork(1) download
  1. import time
  2.  
  3.  
  4. def tokenize_pattern(pattern: str, wildcard: str = '@'):
  5. tokens = (token for token in pattern.split(wildcard) if token)
  6. pattern_position = 0
  7. for token in tokens:
  8. token_shift = pattern.index(token, pattern_position)
  9. yield token_shift, token
  10. pattern_position = pattern_position + len(token) + 1
  11.  
  12.  
  13. def ara_ara(input_string: str, pattern: str, wildcard: str = '@'):
  14. tokens = [*tokenize_pattern(pattern)]
  15. if not tokens:
  16. return 0
  17.  
  18. _, start_token = tokens[0]
  19. skip = 0
  20. probe = input_string.find(start_token)
  21. while probe > -1:
  22. skip = skip + probe
  23. input_string = input_string[probe:]
  24. for token_shift, token in tokens[1:]:
  25. if input_string[token_shift:token_shift + len(token)] != token:
  26. skip += 1
  27. input_string = input_string[1:]
  28. probe = input_string.find(start_token)
  29. break
  30. else:
  31. return skip
  32.  
  33. time_start = time.monotonic()
  34. result = ara_ara(input_string='obosralsya v proshlom primere', pattern='sr@l')
  35. time_end = time.monotonic()
  36.  
  37. worktime = 1_000_000 * (time_end - time_start)
  38. print(f"result: {result}, worktime: {worktime:.1f}us")
Success #stdin #stdout 0.02s 9080KB
stdin
Standard input is empty
stdout
result: 3, worktime: 9.7us