11
11
import argparse
12
12
import json
13
13
14
+ parser = argparse .ArgumentParser (description = "Kafka Event Consumer" )
15
+ parser .add_argument ("-e" , "--event-type" , help = "Filter by event_type (optional)" , required = False )
16
+ parser .add_argument ("-g" , "--group-id" , help = "Kafka consumer group ID (optional)" , required = False )
17
+ parser .add_argument ("-t" , "--test-mode" , action = "store_true" )
18
+ args = parser .parse_args ()
19
+
14
20
"""
15
21
Apache Kafka Consumer Demo
16
22
@@ -43,39 +49,24 @@ def try_parse_json(value: bytes):
43
49
return value .decode ('utf-8' , errors = 'replace' )
44
50
45
51
46
- parser = argparse .ArgumentParser (description = "Kafka Event Consumer" )
47
- parser .add_argument ("-e" , "--event-type" , help = "Filter by event_type (optional)" , required = False )
48
- parser .add_argument ("-g" , "--group-id" , help = "Kafka consumer group ID (optional)" , required = False )
49
- parser .add_argument ("-t" , "--test-mode" , action = "store_true" )
50
- args = parser .parse_args ()
51
-
52
-
53
- def main ():
54
- bootstrap_servers = os .environ .get ("KAFKA_BOOTSTRAP_SERVERS" , "localhost:9092" )
55
- topic = os .environ .get ("KAFKA_TOPIC" , "test-topic" )
56
-
57
- # Initialize the Kafka consumer with configuration
58
- # - topic: The Kafka topic to subscribe to (from environment variable)
59
- # - bootstrap_servers: Connection string for the Kafka broker
60
- # - auto_offset_reset='earliest': Start reading from the beginning of the topic if no committed offset exists
61
- # - enable_auto_commit=True: Automatically commit offsets
62
- consumer_args = {
63
- 'bootstrap_servers' : bootstrap_servers ,
64
- 'auto_offset_reset' : 'earliest' ,
65
- 'enable_auto_commit' : True
66
- }
67
-
68
- if args .group_id :
69
- consumer_args ['group_id' ] = args .group_id
52
+ def consume_events (topic , consumer_args , event_type = None , group_id = None ):
53
+ """
54
+ Consume messages from a Kafka topic.
70
55
71
- if args .test_mode :
72
- consumer_args ['consumer_timeout_ms' ] = 3000 # pragma: no cover
56
+ This function creates a Kafka consumer with the provided arguments and listens
57
+ to a specified topic for incoming messages. Messages are parsed as JSON where
58
+ possible and logged appropriately. Filtering is available based on an optional
59
+ event type attribute in the message.
73
60
61
+ :param topic: Kafka topic to consume messages from.
62
+ :param consumer_args: Dictionary of arguments to configure the KafkaConsumer.
63
+ :param event_type: Optional. Filters messages by the `event_type` attribute if it's included in the message payload.
64
+ """
74
65
consumer = KafkaConsumer (topic , ** consumer_args )
75
66
76
67
logger = get_logger ("consumer" )
77
68
78
- logger .info (f"Polyglot consumer listening, consumer group: { args . group_id } \n " )
69
+ logger .info (f"Polyglot consumer listening, consumer group: { group_id } \n " )
79
70
80
71
try :
81
72
# Continuously poll for new messages
@@ -90,7 +81,7 @@ def main():
90
81
91
82
# Display the message with an appropriate prefix based on its type
92
83
if isinstance (parsed , dict ):
93
- if args . event_type and parsed .get ("event_type" ) != args . event_type :
84
+ if event_type and parsed .get ("event_type" ) != event_type :
94
85
continue # Skip non-matching event
95
86
logger .info (
96
87
f"✅ JSON ({ parsed ['event_type' ]} ) | key={ key } | partition={ partition } | offset={ offset } → { parsed } " )
@@ -104,5 +95,29 @@ def main():
104
95
consumer .close ()
105
96
106
97
98
+ def main ():
99
+ """Main function that runs when the script is executed directly"""
100
+ kafka_topic = os .environ .get ("KAFKA_TOPIC" , "test-topic" )
101
+ kafka_bootstrap_servers = os .environ .get ("KAFKA_BOOTSTRAP_SERVERS" , "localhost:9092" )
102
+
103
+ # Initialize the Kafka consumer with configuration
104
+ # - bootstrap_servers: Connection string for the Kafka broker
105
+ # - auto_offset_reset='earliest': Start reading from the beginning of the topic if no committed offset exists
106
+ # - enable_auto_commit=True: Automatically commit offsets
107
+ kafka_consumer_args = {
108
+ 'bootstrap_servers' : kafka_bootstrap_servers ,
109
+ 'auto_offset_reset' : 'earliest' ,
110
+ 'enable_auto_commit' : True
111
+ }
112
+
113
+ if args .group_id :
114
+ kafka_consumer_args ['group_id' ] = args .group_id
115
+
116
+ if args .test_mode :
117
+ kafka_consumer_args ['consumer_timeout_ms' ] = 3000 # pragma: no cover
118
+
119
+ consume_events (kafka_topic , kafka_consumer_args , args .event_type , args .group_id )
120
+
121
+
107
122
if __name__ == "__main__" :
108
123
main ()
0 commit comments