From aeb8b2063fb08fa558851d89263307ab7572498e Mon Sep 17 00:00:00 2001 From: Joseph Grogan Date: Fri, 7 Feb 2025 12:45:21 -0500 Subject: [PATCH] Fix case statement exception (#100) * Fix case statement * Various checkstyle compaints --- deploy/samples/kafkadb.yaml | 2 ++ deploy/samples/venicedb.yaml | 2 +- .../hoptimator/jdbc/BuiltinCatalogProvider.java | 1 - .../hoptimator/jdbc/HoptimatorConnection.java | 7 +++---- .../hoptimator/jdbc/HoptimatorDdlExecutor.java | 2 +- .../java/com/linkedin/hoptimator/k8s/K8sCatalog.java | 2 +- .../java/com/linkedin/hoptimator/k8s/K8sContext.java | 11 +++-------- hoptimator-kafka/src/test/resources/kafka-ddl.id | 4 ++-- .../hoptimator/operator/HoptimatorOperatorApp.java | 1 - .../com/linkedin/hoptimator/util/ConfigService.java | 1 - .../hoptimator/util/planner/ScriptImplementor.java | 2 +- 11 files changed, 14 insertions(+), 21 deletions(-) diff --git a/deploy/samples/kafkadb.yaml b/deploy/samples/kafkadb.yaml index 6b4ff9a7..8e923d34 100644 --- a/deploy/samples/kafkadb.yaml +++ b/deploy/samples/kafkadb.yaml @@ -37,6 +37,8 @@ spec: properties.bootstrap.servers = one-kafka-bootstrap.kafka.svc.cluster.local:9094 value.format = json scan.startup.mode = earliest-offset + key.fields = KEY + value.fields-include = EXCEPT_KEY --- diff --git a/deploy/samples/venicedb.yaml b/deploy/samples/venicedb.yaml index c3a46849..f5be95d6 100644 --- a/deploy/samples/venicedb.yaml +++ b/deploy/samples/venicedb.yaml @@ -23,4 +23,4 @@ spec: key.fields-prefix = {{keyPrefix:}} key.fields = {{keys:KEY}} key.type = {{keyType:PRIMITIVE}} - value.fields-include: EXCEPT_KEY + value.fields-include = EXCEPT_KEY diff --git a/hoptimator-jdbc/src/main/java/com/linkedin/hoptimator/jdbc/BuiltinCatalogProvider.java b/hoptimator-jdbc/src/main/java/com/linkedin/hoptimator/jdbc/BuiltinCatalogProvider.java index acc6a2e5..54dfefe7 100644 --- a/hoptimator-jdbc/src/main/java/com/linkedin/hoptimator/jdbc/BuiltinCatalogProvider.java +++ b/hoptimator-jdbc/src/main/java/com/linkedin/hoptimator/jdbc/BuiltinCatalogProvider.java @@ -2,7 +2,6 @@ import java.util.Collection; import java.util.Collections; -import java.util.Properties; import com.linkedin.hoptimator.Catalog; import com.linkedin.hoptimator.CatalogProvider; diff --git a/hoptimator-jdbc/src/main/java/com/linkedin/hoptimator/jdbc/HoptimatorConnection.java b/hoptimator-jdbc/src/main/java/com/linkedin/hoptimator/jdbc/HoptimatorConnection.java index b45e8d93..7f020be8 100644 --- a/hoptimator-jdbc/src/main/java/com/linkedin/hoptimator/jdbc/HoptimatorConnection.java +++ b/hoptimator-jdbc/src/main/java/com/linkedin/hoptimator/jdbc/HoptimatorConnection.java @@ -1,8 +1,7 @@ package com.linkedin.hoptimator.jdbc; -import java.sql.Connection; -import java.sql.Statement; import java.sql.SQLException; +import java.sql.Statement; import java.util.Properties; import org.apache.calcite.jdbc.CalciteConnection; @@ -24,7 +23,7 @@ public HoptimatorConnection(CalciteConnection connection, Properties connectionP @Override public Statement createStatement() throws SQLException { - return connection.createStatement(); + return connection.createStatement(); } public Properties connectionProperties() { @@ -32,7 +31,7 @@ public Properties connectionProperties() { } public CalcitePrepare.Context createPrepareContext() { - return connection.createPrepareContext(); + return connection.createPrepareContext(); } public CalciteConnection calciteConnection() { diff --git a/hoptimator-jdbc/src/main/java/com/linkedin/hoptimator/jdbc/HoptimatorDdlExecutor.java b/hoptimator-jdbc/src/main/java/com/linkedin/hoptimator/jdbc/HoptimatorDdlExecutor.java index 4292c29a..bb4132e1 100644 --- a/hoptimator-jdbc/src/main/java/com/linkedin/hoptimator/jdbc/HoptimatorDdlExecutor.java +++ b/hoptimator-jdbc/src/main/java/com/linkedin/hoptimator/jdbc/HoptimatorDdlExecutor.java @@ -173,7 +173,7 @@ public void execute(SqlCreateMaterializedView create, CalcitePrepare.Context con MaterializedViewTable materializedViewTable = new MaterializedViewTable(viewTableMacro); RelDataType viewRowType = materializedViewTable.getRowType(typeFactory); - // Suport "partial views", i.e. CREATE VIEW FOO$BAR, where the view name + // Support "partial views", i.e. CREATE VIEW FOO$BAR, where the view name // is "foo-bar" and the sink is just FOO. String sinkName = viewName.split("\\$", 2)[0]; List sinkPath = new ArrayList<>(); diff --git a/hoptimator-k8s/src/main/java/com/linkedin/hoptimator/k8s/K8sCatalog.java b/hoptimator-k8s/src/main/java/com/linkedin/hoptimator/k8s/K8sCatalog.java index ccd2079a..c234738e 100644 --- a/hoptimator-k8s/src/main/java/com/linkedin/hoptimator/k8s/K8sCatalog.java +++ b/hoptimator-k8s/src/main/java/com/linkedin/hoptimator/k8s/K8sCatalog.java @@ -1,8 +1,8 @@ package com.linkedin.hoptimator.k8s; -import java.util.Properties; import java.sql.SQLException; import java.sql.Wrapper; +import java.util.Properties; import org.apache.calcite.schema.SchemaPlus; import org.slf4j.Logger; diff --git a/hoptimator-k8s/src/main/java/com/linkedin/hoptimator/k8s/K8sContext.java b/hoptimator-k8s/src/main/java/com/linkedin/hoptimator/k8s/K8sContext.java index fde42842..a660a0e0 100644 --- a/hoptimator-k8s/src/main/java/com/linkedin/hoptimator/k8s/K8sContext.java +++ b/hoptimator-k8s/src/main/java/com/linkedin/hoptimator/k8s/K8sContext.java @@ -1,19 +1,14 @@ package com.linkedin.hoptimator.k8s; import java.io.File; -import java.io.InputStream; import java.io.IOException; +import java.io.InputStream; import java.io.Reader; import java.nio.file.Files; -import java.nio.file.Path; import java.nio.file.Paths; import java.time.Duration; -import java.util.Optional; import java.util.Properties; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import io.kubernetes.client.apimachinery.GroupVersion; import io.kubernetes.client.common.KubernetesListObject; import io.kubernetes.client.common.KubernetesObject; @@ -86,12 +81,12 @@ public K8sContext(Properties connectionProperties) { throw new RuntimeException(e); } } - + if (server != null) { info += " Accessing " + server + "."; this.apiClient.setBasePath(server); } - + if (truststore != null) { try { InputStream in = Files.newInputStream(Paths.get(truststore)); diff --git a/hoptimator-kafka/src/test/resources/kafka-ddl.id b/hoptimator-kafka/src/test/resources/kafka-ddl.id index 84993b19..cd7802ab 100644 --- a/hoptimator-kafka/src/test/resources/kafka-ddl.id +++ b/hoptimator-kafka/src/test/resources/kafka-ddl.id @@ -13,9 +13,9 @@ spec: entryClass: com.linkedin.hoptimator.flink.runner.FlinkRunner args: - CREATE DATABASE IF NOT EXISTS `KAFKA` WITH () - - CREATE TABLE IF NOT EXISTS `KAFKA`.`existing-topic-2` (`KEY` VARCHAR, `VALUE` BINARY) WITH ('connector'='kafka', 'properties.bootstrap.servers'='one-kafka-bootstrap.kafka.svc.cluster.local:9094', 'scan.startup.mode'='earliest-offset', 'topic'='existing-topic-2', 'value.format'='json') + - CREATE TABLE IF NOT EXISTS `KAFKA`.`existing-topic-2` (`KEY` VARCHAR, `VALUE` BINARY) WITH ('connector'='kafka', 'key.fields'='KEY', 'properties.bootstrap.servers'='one-kafka-bootstrap.kafka.svc.cluster.local:9094', 'scan.startup.mode'='earliest-offset', 'topic'='existing-topic-2', 'value.fields-include'='EXCEPT_KEY', 'value.format'='json') - CREATE DATABASE IF NOT EXISTS `KAFKA` WITH () - - CREATE TABLE IF NOT EXISTS `KAFKA`.`existing-topic-1` (`KEY` VARCHAR, `VALUE` BINARY) WITH ('connector'='kafka', 'properties.bootstrap.servers'='one-kafka-bootstrap.kafka.svc.cluster.local:9094', 'scan.startup.mode'='earliest-offset', 'topic'='existing-topic-1', 'value.format'='json') + - CREATE TABLE IF NOT EXISTS `KAFKA`.`existing-topic-1` (`KEY` VARCHAR, `VALUE` BINARY) WITH ('connector'='kafka', 'key.fields'='KEY', 'properties.bootstrap.servers'='one-kafka-bootstrap.kafka.svc.cluster.local:9094', 'scan.startup.mode'='earliest-offset', 'topic'='existing-topic-1', 'value.fields-include'='EXCEPT_KEY', 'value.format'='json') - INSERT INTO `KAFKA`.`existing-topic-1` (`KEY`, `VALUE`) SELECT * FROM `KAFKA`.`existing-topic-2` jarURI: file:///opt/hoptimator-flink-runner.jar parallelism: 1 diff --git a/hoptimator-operator/src/main/java/com/linkedin/hoptimator/operator/HoptimatorOperatorApp.java b/hoptimator-operator/src/main/java/com/linkedin/hoptimator/operator/HoptimatorOperatorApp.java index 8ff0532b..48773b57 100644 --- a/hoptimator-operator/src/main/java/com/linkedin/hoptimator/operator/HoptimatorOperatorApp.java +++ b/hoptimator-operator/src/main/java/com/linkedin/hoptimator/operator/HoptimatorOperatorApp.java @@ -21,7 +21,6 @@ import io.kubernetes.client.extended.controller.ControllerManager; import io.kubernetes.client.informer.SharedInformerFactory; import io.kubernetes.client.openapi.ApiClient; -import io.kubernetes.client.util.Config; import com.linkedin.hoptimator.catalog.Resource; import com.linkedin.hoptimator.k8s.K8sApiEndpoints; diff --git a/hoptimator-util/src/main/java/com/linkedin/hoptimator/util/ConfigService.java b/hoptimator-util/src/main/java/com/linkedin/hoptimator/util/ConfigService.java index 4f49cd1e..04943c68 100644 --- a/hoptimator-util/src/main/java/com/linkedin/hoptimator/util/ConfigService.java +++ b/hoptimator-util/src/main/java/com/linkedin/hoptimator/util/ConfigService.java @@ -3,7 +3,6 @@ import java.io.StringReader; import java.util.Properties; import java.util.ServiceLoader; -import javax.annotation.Nullable; import org.slf4j.Logger; import org.slf4j.LoggerFactory; diff --git a/hoptimator-util/src/main/java/com/linkedin/hoptimator/util/planner/ScriptImplementor.java b/hoptimator-util/src/main/java/com/linkedin/hoptimator/util/planner/ScriptImplementor.java index e5ff6bc7..4bc40c68 100644 --- a/hoptimator-util/src/main/java/com/linkedin/hoptimator/util/planner/ScriptImplementor.java +++ b/hoptimator-util/src/main/java/com/linkedin/hoptimator/util/planner/ScriptImplementor.java @@ -179,7 +179,7 @@ public void implement(SqlWriter w) { private static final SqlShuttle REMOVE_ROW_CONSTRUCTOR = new SqlShuttle() { @Override public SqlNode visit(SqlCall call) { - List operands = call.getOperandList().stream().map(x -> x.accept(this)).collect(Collectors.toList()); + List operands = call.getOperandList().stream().map(x -> x == null ? x : x.accept(this)).collect(Collectors.toList()); if ((call.getKind() == SqlKind.ROW || call.getKind() == SqlKind.COLUMN_LIST || call.getOperator() instanceof SqlRowOperator) && operands.size() > 1) { return IMPLIED_ROW_OPERATOR.createCall(call.getParserPosition(), operands);