-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathtest_msgspec.py
More file actions
129 lines (107 loc) · 3.55 KB
/
test_msgspec.py
File metadata and controls
129 lines (107 loc) · 3.55 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
'''
test kafka producer with orjson
'''
import signal
import sys
from multiprocessing import Event, JoinableQueue, Process
from queue import Empty
from confluent_kafka import KafkaException
from msgspec import MsgspecError, json
from common import build_argument_parser
from kafka_admin import KafkaAdminService
from kafka_producer import KafkaProducerService
from logger import log
from structs import PreparedData, Request
def build_header(request: Request, extra_headers: dict = None) -> dict:
'''
@param prepared_data: a prepared data from Kafka
@return: headers for feature data
'''
return {
'supplier': request.supplier,
'fareClass': request.fareClass,
'timestamp': str(request.timestamp),
'depDate': request.queries[0].date,
'retDate': request.queries[1].date,
**(extra_headers or {})
}
def _kafka_producer_worker(
ready_event: Event,
terminate_event: Event,
kafka_producer_queue: JoinableQueue,
bootstrap_server: str
) -> None:
signal.signal(signal.SIGTERM, lambda signum, frame: terminate_event.set())
signal.signal(signal.SIGINT, lambda signum, frame: terminate_event.set())
admin_service = KafkaAdminService(bootstrap_server=bootstrap_server)
admin_service.create_topic(topic=args.kafka_topic)
ready_event.set()
log.info('kafka_producer_worker is started.')
producer_service = KafkaProducerService(
bootstrap_server=bootstrap_server,
topic=args.kafka_topic
)
try:
producer_service.prerequisite_check()
log.info(
'producer service prerequisite check passed for kafka bootstrap servers %s',
bootstrap_server
)
except (KafkaException, LookupError, RuntimeError) as err1:
log.error(err1)
terminate_event.set()
return
encoder = json.Encoder()
while not terminate_event.is_set():
try:
data: PreparedData = kafka_producer_queue.get(timeout=1)
except Empty:
continue
try:
producer_service.publish(
key=data.key,
value=encoder.encode(data),
headers=build_header(data.request_rt)
)
except RuntimeError as error:
log.error(error)
continue
finally:
kafka_producer_queue.task_done()
producer_service.close()
log.info('kafka_producer_worker is terminated.')
if __name__ == '__main__':
argument_parser = build_argument_parser('msgspec')
args = argument_parser.parse_args()
_ready_event = Event()
_terminate_event = Event()
_kafka_producer_queue = JoinableQueue(maxsize=10)
kafka_producer_worker = Process(
name='KafkaProducerWorker',
target=_kafka_producer_worker,
args=(
_ready_event,
_terminate_event,
_kafka_producer_queue,
args.kafka_bootstrap_server,
)
)
kafka_producer_worker.start()
try:
with open('test_prepared_data.json', 'rb') as f:
prepared_data = json.decode(f.read(), type=PreparedData)
except (MsgspecError, FileNotFoundError):
_terminate_event.set()
_kafka_producer_queue.close()
sys.exit(1)
_ready_event.wait()
COUNT = 0
try:
while not _terminate_event.is_set or COUNT < args.count:
prepared_data.key = str(COUNT)
COUNT += 1
_kafka_producer_queue.put(prepared_data)
finally:
_terminate_event.set()
_kafka_producer_queue.close()
log.info('done.')