5
5
import io .debezium .testing .testcontainers .ConnectorConfiguration ;
6
6
import io .debezium .testing .testcontainers .DebeziumContainer ;
7
7
import io .questdb .client .Sender ;
8
+ import io .questdb .kafka .domain .SensorReading ;
8
9
import io .questdb .kafka .domain .Student ;
9
10
import org .apache .avro .Schema ;
10
11
import org .apache .avro .generic .GenericData ;
17
18
import org .jetbrains .annotations .NotNull ;
18
19
import org .junit .jupiter .api .Test ;
19
20
import org .junit .jupiter .api .extension .RegisterExtension ;
21
+ import org .slf4j .LoggerFactory ;
20
22
import org .testcontainers .containers .GenericContainer ;
21
23
import org .testcontainers .containers .KafkaContainer ;
22
24
import org .testcontainers .containers .Network ;
25
+ import org .testcontainers .containers .output .Slf4jLogConsumer ;
23
26
import org .testcontainers .containers .wait .strategy .HttpWaitStrategy ;
24
27
import org .testcontainers .containers .wait .strategy .Wait ;
25
28
import org .testcontainers .junit .jupiter .Container ;
28
31
import org .testcontainers .utility .MountableFile ;
29
32
30
33
import java .time .Instant ;
34
+ import java .util .Arrays ;
31
35
import java .util .Properties ;
32
36
33
37
import static java .time .Duration .ofMinutes ;
@@ -52,10 +56,10 @@ public class AvroSchemaRegistryIT {
52
56
.withEnv ("KAFKA_CONTROLLER_QUORUM_VOTERS" , "0@kafka:9094" );
53
57
54
58
@ Container
55
- private final GenericContainer <?> questDBContainer = new GenericContainer <>("questdb/questdb:7.4.0 " )
59
+ private final GenericContainer <?> questDBContainer = new GenericContainer <>("questdb/questdb:9.0.1 " )
56
60
.withNetwork (network )
57
61
.withExposedPorts (QuestDBUtils .QUESTDB_HTTP_PORT )
58
- // .withLogConsumer(new Slf4jLogConsumer(LoggerFactory.getLogger("questdb")))
62
+ .withLogConsumer (new Slf4jLogConsumer (LoggerFactory .getLogger ("questdb" )))
59
63
.withEnv ("QDB_CAIRO_COMMIT_LAG" , "100" )
60
64
.withEnv ("JAVA_OPTS" , "-Djava.locale.providers=JRE,SPI" );
61
65
@@ -108,7 +112,7 @@ public void testSmoke() throws Exception {
108
112
producer .send (new ProducerRecord <>(topicName , "foo" , student )).get ();
109
113
}
110
114
111
- startConnector (topicName );
115
+ startConnector (topicName , "birthday" );
112
116
QuestDBUtils .assertSqlEventually ("\" firstname\" ,\" lastname\" ,\" timestamp\" \r \n "
113
117
+ "\" John\" ,\" Doe\" ,\" 2000-01-01T00:00:00.000000Z\" \r \n " ,
114
118
"select * from " + topicName , questDBContainer .getMappedPort (QuestDBUtils .QUESTDB_HTTP_PORT ));
@@ -125,7 +129,7 @@ public void testSchemaEvolution() throws Exception {
125
129
.build ();
126
130
producer .send (new ProducerRecord <>(topicName , "foo" , student )).get ();
127
131
}
128
- startConnector (topicName );
132
+ startConnector (topicName , "birthday" );
129
133
130
134
QuestDBUtils .assertSqlEventually ("\" firstname\" ,\" lastname\" ,\" timestamp\" \r \n "
131
135
+ "\" John\" ,\" Doe\" ,\" 2000-01-01T00:00:00.000000Z\" \r \n " ,
@@ -146,7 +150,41 @@ public void testSchemaEvolution() throws Exception {
146
150
"select * from " + topicName , questDBContainer .getMappedPort (QuestDBUtils .QUESTDB_HTTP_PORT ));
147
151
}
148
152
149
- private void startConnector (String topicName ) {
153
+ @ Test
154
+ public void testAvroRecordsWithArrays () throws Exception {
155
+ String topicName = "sensors" ;
156
+
157
+ // sensor reading with array of double values
158
+ try (Producer <String , SensorReading > producer = new KafkaProducer <>(producerProps ())) {
159
+ SensorReading reading = SensorReading .newBuilder ()
160
+ .setSensorId ("sensor-001" )
161
+ .setTimestamp (Instant .parse ("2024-01-01T10:00:00Z" ))
162
+ .setValues (Arrays .asList (22.5 , 23.1 , 22.8 , 23.3 , 22.9 ))
163
+ .setLocation ("Building A" )
164
+ .build ();
165
+ producer .send (new ProducerRecord <>(topicName , "key1" , reading )).get ();
166
+
167
+ // Send another reading
168
+ SensorReading reading2 = SensorReading .newBuilder ()
169
+ .setSensorId ("sensor-002" )
170
+ .setTimestamp (Instant .parse ("2024-01-01T10:05:00Z" ))
171
+ .setValues (Arrays .asList (18.2 , 18.5 , 18.3 ))
172
+ .setLocation (null )
173
+ .build ();
174
+ producer .send (new ProducerRecord <>(topicName , "key2" , reading2 )).get ();
175
+ }
176
+
177
+ startConnector (topicName , "timestamp" );
178
+
179
+ QuestDBUtils .assertSqlEventually (
180
+ "\" sensorId\" ,\" values\" ,\" location\" ,\" timestamp\" \r \n " +
181
+ "\" sensor-001\" ,\" [22.5,23.1,22.8,23.3,22.9]\" ,\" Building A\" ,\" 2024-01-01T10:00:00.000000Z\" \r \n " +
182
+ "\" sensor-002\" ,\" [18.2,18.5,18.3]\" ,,\" 2024-01-01T10:05:00.000000Z\" \r \n " ,
183
+ "select sensorId, \" values\" , location, timestamp from " + topicName + " order by timestamp" ,
184
+ questDBContainer .getMappedPort (QuestDBUtils .QUESTDB_HTTP_PORT ));
185
+ }
186
+
187
+ private void startConnector (String topicName , String timestampName ) {
150
188
String confString = "http::addr=" + questDBContainer .getNetworkAliases ().get (0 ) + ":" + QuestDBUtils .QUESTDB_HTTP_PORT + ";auto_flush_rows=1;" ;
151
189
ConnectorConfiguration connector = ConnectorConfiguration .create ()
152
190
.with ("connector.class" , QuestDBSinkConnector .class .getName ())
@@ -155,7 +193,7 @@ private void startConnector(String topicName) {
155
193
.with ("value.converter" , "io.confluent.connect.avro.AvroConverter" )
156
194
.with ("value.converter.schema.registry.url" , "http://" + schemaRegistry .getNetworkAliases ().get (0 ) + ":8081" )
157
195
.with ("topics" , topicName )
158
- .with (QuestDBSinkConnectorConfig .DESIGNATED_TIMESTAMP_COLUMN_NAME_CONFIG , "birthday" )
196
+ .with (QuestDBSinkConnectorConfig .DESIGNATED_TIMESTAMP_COLUMN_NAME_CONFIG , timestampName )
159
197
.with (QuestDBSinkConnectorConfig .INCLUDE_KEY_CONFIG , "false" )
160
198
.with ("client.conf.string" , confString );
161
199
connectContainer .registerConnector ("my-connector" , connector );
0 commit comments