52
52
public final class QuestDBSinkConnectorEmbeddedTest {
53
53
private static int httpPort = -1 ;
54
54
private static int ilpPort = -1 ;
55
- private static final String OFFICIAL_QUESTDB_DOCKER = "questdb/questdb:7.4.0 " ;
55
+ private static final String OFFICIAL_QUESTDB_DOCKER = "questdb/questdb:8.1.1 " ;
56
56
private static final boolean DUMP_QUESTDB_CONTAINER_LOGS = true ;
57
57
58
58
private EmbeddedConnectCluster connect ;
@@ -223,7 +223,7 @@ public void testTableTemplateWithKey_schemaless(boolean useHttp) {
223
223
}
224
224
225
225
@ ParameterizedTest
226
- @ ValueSource (booleans = {true , false })
226
+ @ ValueSource (booleans = {true /* , false*/ })
227
227
public void testDeadLetterQueue_wrongJson (boolean useHttp ) {
228
228
connect .kafka ().createTopic (topicName , 1 );
229
229
Map <String , String > props = ConnectTestUtils .baseConnectorProps (questDBContainer , topicName , useHttp );
@@ -248,6 +248,38 @@ public void testDeadLetterQueue_wrongJson(boolean useHttp) {
248
248
Assertions .assertEquals ("{\" not valid json}" , new String (dqlRecord .value ()));
249
249
}
250
250
251
+ @ Test
252
+ public void testDeadLetterQueue_badColumnType () {
253
+ connect .kafka ().createTopic (topicName , 1 );
254
+ Map <String , String > props = ConnectTestUtils .baseConnectorProps (questDBContainer , topicName , true );
255
+ props .put ("value.converter.schemas.enable" , "false" );
256
+ props .put ("errors.deadletterqueue.topic.name" , "dlq" );
257
+ props .put ("errors.deadletterqueue.topic.replication.factor" , "1" );
258
+ props .put ("errors.tolerance" , "all" );
259
+ connect .configureConnector (ConnectTestUtils .CONNECTOR_NAME , props );
260
+ ConnectTestUtils .assertConnectorTaskRunningEventually (connect );
261
+
262
+ QuestDBUtils .assertSql (
263
+ "{\" ddl\" :\" OK\" }" ,
264
+ "create table " + topicName + " (firstname string, lastname string, age int, id uuid, ts timestamp) timestamp(ts) partition by day wal" ,
265
+ httpPort ,
266
+ QuestDBUtils .Endpoint .EXEC );
267
+
268
+ connect .kafka ().produce (topicName , "key" , "{\" firstname\" :\" John\" ,\" lastname\" :\" Doe\" ,\" age\" :42,\" id\" :\" ad956a45-a55b-441e-b80d-023a2bf5d041\" }" );
269
+ connect .kafka ().produce (topicName , "key" , "{\" firstname\" :\" John\" ,\" lastname\" :\" Doe\" ,\" age\" :42,\" id\" :\" Invalid UUID\" }" );
270
+
271
+ ConsumerRecords <byte [], byte []> fetchedRecords = connect .kafka ().consume (1 , 60_000 , "dlq" );
272
+ Assertions .assertEquals (1 , fetchedRecords .count ());
273
+ ConsumerRecord <byte [], byte []> dqlRecord = fetchedRecords .iterator ().next ();
274
+ Assertions .assertEquals ("{\" firstname\" :\" John\" ,\" lastname\" :\" Doe\" ,\" age\" :42,\" id\" :\" Invalid UUID\" }" , new String (dqlRecord .value ()));
275
+
276
+ QuestDBUtils .assertSqlEventually ("\" firstname\" ,\" lastname\" ,\" age\" \r \n "
277
+ + "\" John\" ,\" Doe\" ,42\r \n " ,
278
+ "select firstname,lastname,age from " + topicName ,
279
+ 1000 , httpPort );
280
+
281
+ }
282
+
251
283
@ ParameterizedTest
252
284
@ ValueSource (booleans = {true , false })
253
285
public void testSymbol (boolean useHttp ) {
0 commit comments