-
Notifications
You must be signed in to change notification settings - Fork 0
/
concurrent_threading.py
93 lines (77 loc) · 2.2 KB
/
concurrent_threading.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
"""
Threading is creating multiple threads under single process and play around. They share memory, and resources.
Its optimised to not to create more than 10 threads, and to make it thread-safe, write code cautious, & use Queue.
https://realpython.com/python-concurrency/
..date.. Apr 11 2020
"""
# Synchronous Version
import requests
import time
#
#
# def download_site(url, session):
# with session.get(url) as response:
# print(f"Read {len(response.content)} from {url}")
#
#
# def download_all_sites(sites):
# with requests.Session() as session:
# for url in sites:
# download_site(url, session)
#
#
# if __name__ == "__main__":
# sites = [
# "https://www.jython.org",
# "http://olympus.realpython.org/dice",
# ] * 80
# start_time = time.time()
# download_all_sites(sites)
# duration = time.time() - start_time
# print(f"Downloaded {len(sites)} in {duration} seconds")
# Asynchronous Version
import concurrent.futures
import threading
#
#
# thread_local = threading.local()
#
#
# def get_session():
# if not hasattr(thread_local, "session"):
# thread_local.session = requests.Session()
# return thread_local.session
#
#
# def download_site(url):
# session = get_session()
# with session.get(url) as response:
# print(f"Read {len(response.content)} from {url}")
#
#
# def download_all_sites(sites):
# with concurrent.futures.ThreadPoolExecutor(max_workers=5) as executor:
# executor.map(download_site, sites)
#
#
# if __name__ == "__main__":
# sites = [
# "https://www.jython.org",
# "http://olympus.realpython.org/dice",
# ] * 80
# start_time = time.time()
# download_all_sites(sites)
# duration = time.time() - start_time
# print(f"Downloaded {len(sites)} in {duration} seconds")
fake = [x for x in range(400)]
a = list()
def write_to_list(data):
a.append(data)
def do_something(data):
time.sleep(random.randrange(2, 6))
write_to_list(data)
start_time = time.time()
with concurrent.futures.ThreadPoolExecutor(max_workers=4) as execuor:
execuor.map(do_something, fake)
duration = time.time() - start_time
print(f"Appended {len(a)} in {duration} seconds")