Skip to content

Commit 1950c00

Browse files
committed
compatibility with Kafka older than 2.6
1 parent f40da2d commit 1950c00

File tree

2 files changed

+39
-1
lines changed

2 files changed

+39
-1
lines changed

connector/src/main/java/io/questdb/kafka/QuestDBSinkTask.java

Lines changed: 9 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,7 @@
1616
import org.apache.kafka.connect.data.*;
1717
import org.apache.kafka.connect.errors.ConnectException;
1818
import org.apache.kafka.connect.errors.RetriableException;
19+
import org.apache.kafka.connect.sink.ErrantRecordReporter;
1920
import org.apache.kafka.connect.sink.SinkRecord;
2021
import org.apache.kafka.connect.sink.SinkTask;
2122
import org.slf4j.Logger;
@@ -50,6 +51,7 @@ public final class QuestDBSinkTask extends SinkTask {
5051
private int pendingRows;
5152
private final FlushConfig flushConfig = new FlushConfig();
5253
private final ObjList<SinkRecord> inflightSinkRecords = new ObjList<>();
54+
private ErrantRecordReporter reporter;
5355

5456
@Override
5557
public String version() {
@@ -88,6 +90,12 @@ public void start(Map<String, String> map) {
8890
this.allowedLag = config.getAllowedLag();
8991
this.nextFlushNanos = System.nanoTime() + flushConfig.autoFlushNanos;
9092
this.recordToTable = Templating.newTableTableFn(config.getTable());
93+
try {
94+
reporter = context.errantRecordReporter();
95+
} catch (NoSuchMethodError | NoClassDefFoundError e) {
96+
// Kafka older than 2.6
97+
reporter = null;
98+
}
9199
}
92100

93101
private Sender createRawSender() {
@@ -247,7 +255,7 @@ private void onTcpSenderException(Exception e) {
247255

248256
private void onHttpSenderException(Exception e) {
249257
closeSenderSilently();
250-
if (e.getMessage() != null && e.getMessage().contains("error in line")) { // hack to detect data parsing errors
258+
if (reporter != null & e.getMessage() != null && e.getMessage().contains("error in line")) { // hack to detect data parsing errors
251259
// ok, we have a parsing error, let's try to send records one by one to find the problematic record
252260
// and we will report it to the error handler. the rest of the records will make it to QuestDB
253261
sender = createSender();

connector/src/test/java/io/questdb/kafka/QuestDBSinkConnectorEmbeddedTest.java

Lines changed: 30 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -293,6 +293,36 @@ public void testDeadLetterQueue_badColumnType() {
293293

294294
}
295295

296+
@Test
297+
public void testbadColumnType_noDLQ() {
298+
connect.kafka().createTopic(topicName, 1);
299+
Map<String, String> props = ConnectTestUtils.baseConnectorProps(questDBContainer, topicName, true);
300+
props.put("value.converter.schemas.enable", "false");
301+
connect.configureConnector(ConnectTestUtils.CONNECTOR_NAME, props);
302+
ConnectTestUtils.assertConnectorTaskRunningEventually(connect);
303+
304+
QuestDBUtils.assertSql(
305+
"{\"ddl\":\"OK\"}",
306+
"create table " + topicName + " (firstname string, lastname string, age int, id uuid, ts timestamp) timestamp(ts) partition by day wal",
307+
httpPort,
308+
QuestDBUtils.Endpoint.EXEC);
309+
310+
String goodRecordA = "{\"firstname\":\"John\",\"lastname\":\"Doe\",\"age\":42,\"id\":\"ad956a45-a55b-441e-b80d-023a2bf5d041\"}";
311+
String goodRecordB = "{\"firstname\":\"John\",\"lastname\":\"Doe\",\"age\":42,\"id\":\"ad956a45-a55b-441e-b80d-023a2bf5d042\"}";
312+
String goodRecordC = "{\"firstname\":\"John\",\"lastname\":\"Doe\",\"age\":42,\"id\":\"ad956a45-a55b-441e-b80d-023a2bf5d043\"}";
313+
String badRecordA = "{\"firstname\":\"John\",\"lastname\":\"Doe\",\"age\":42,\"id\":\"Invalid UUID\"}";
314+
String badRecordB = "{\"firstname\":\"John\",\"lastname\":\"Doe\",\"age\":\"not a number\",\"id\":\"ad956a45-a55b-441e-b80d-023a2bf5d041\"}";
315+
316+
// interleave good and bad records
317+
connect.kafka().produce(topicName, "key", goodRecordA);
318+
connect.kafka().produce(topicName, "key", badRecordA);
319+
connect.kafka().produce(topicName, "key", goodRecordB);
320+
connect.kafka().produce(topicName, "key", badRecordB);
321+
connect.kafka().produce(topicName, "key", goodRecordC);
322+
323+
ConnectTestUtils.assertConnectorTaskStateEventually(connect, AbstractStatus.State.FAILED);
324+
}
325+
296326
@ParameterizedTest
297327
@ValueSource(booleans = {true, false})
298328
public void testSymbol(boolean useHttp) {

0 commit comments

Comments
 (0)