# %%
from multiprocessing import Pool, Process
import hashlib
from random import random
from time import time
import concurrent.futures


start_time = time()
workers = 1
def compute(z):
    id = z
    print('Start id =', id)
    for cnt in range(10):
        ar = [hashlib.md5(b'%s' % bytes(i)) for i in range(30000)]
        print(cnt, 'id =', id)
    print('Finish id =', id)



ids = [random(), random(), random(),random(), random(), random(),random(), random(), random(),]

# %%

# We can use a with statement to ensure threads are cleaned up promptly
with concurrent.futures.ThreadPoolExecutor(max_workers=workers) as executor:
    # Start the load operations and mark each future with its URL
    future_to_url = {executor.submit(compute, url): url for url in ids}
    print('after future')
    for future in concurrent.futures.as_completed(future_to_url):
        url = future_to_url[future]
        try:
            data = future.result()
        except Exception as exc:
            print('Exception' % (url, exc))
        else:
            print('Finished', url)


print('All done. Time taken ', round(time() - start_time, 3), ', Workers ', workers)

'''
# Python 3.6 buggy
All done. Time taken  45.307 , Workers  1
All done. Time taken  32.062 , Workers  2
All done. Time taken  27.553 , Workers  3
All done. Time taken  38.718 , Workers  4
All done. Time taken  40.013 , Workers  4
All done. Time taken  47.477 , Workers  5
All done. Time taken  45.283 , Workers  9
All done. Time taken  45.413 , Workers  12
All done. Time taken  43.496 , Workers  12
All done. Time taken  45.294 , Workers  12
All done. Time taken  45.059 , Workers  None

# Python 3.8
All done. Time taken  48.34 , Workers  12
All done. Time taken  42.096 , Workers  4
All done. Time taken  40.636 , Workers  4
All done. Time taken  30.949 , Workers  3
All done. Time taken  31.034 , Workers  2
All done. Time taken  48.932 , Workers  1

'''
# %%
