-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathstreams.py
69 lines (47 loc) · 1.66 KB
/
streams.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
import logging
import pprint
from time import sleep
import timeit
from common import MAX_POLL_TIME
logger = logging.getLogger(__name__)
def produce(stream: str, topic: str, record: str):
from confluent_kafka import Producer
p = Producer({"streams.producer.default.stream": stream})
try:
# logger.debug("pushing message: %s", record)
p.produce(topic, record.encode("utf-8"))
except Exception as error:
logger.warning(error)
return False
finally:
p.flush()
return True
def consume(stream: str, topic: str):
from confluent_kafka import Consumer, KafkaError
consumer = Consumer(
{"group.id": "ezshow", "default.topic.config": {"auto.offset.reset": "earliest"}}
)
consumer.subscribe([f"{stream}:{topic}"])
# logger.debug("polling %s", topic)
start_time = timeit.default_timer()
try:
while True:
# enforce timeout so we don't run forever
if timeit.default_timer() - MAX_POLL_TIME > start_time:
raise TimeoutError
message = consumer.poll(timeout=MAX_POLL_TIME)
if message is None: continue
if not message.error(): yield message.value().decode("utf-8")
elif message.error().code() == KafkaError._PARTITION_EOF:
break
# silently ignore errors
else:
# logger.debug(message.error().str())
logger.debug("Kahrolsun kafka")
# add delay
sleep(0.1)
except Exception as error:
pprint.pprint(error)
# if len(str(error)) > 0: logger.debug(error)
finally:
consumer.close()