Skip to content

Commit

Permalink
Fix case statement exception (#100)
Browse files Browse the repository at this point in the history
* Fix case statement

* Various checkstyle compaints
  • Loading branch information
jogrogan authored Feb 7, 2025
1 parent bada97a commit aeb8b20
Show file tree
Hide file tree
Showing 11 changed files with 14 additions and 21 deletions.
2 changes: 2 additions & 0 deletions deploy/samples/kafkadb.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,8 @@ spec:
properties.bootstrap.servers = one-kafka-bootstrap.kafka.svc.cluster.local:9094
value.format = json
scan.startup.mode = earliest-offset
key.fields = KEY
value.fields-include = EXCEPT_KEY
---

Expand Down
2 changes: 1 addition & 1 deletion deploy/samples/venicedb.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -23,4 +23,4 @@ spec:
key.fields-prefix = {{keyPrefix:}}
key.fields = {{keys:KEY}}
key.type = {{keyType:PRIMITIVE}}
value.fields-include: EXCEPT_KEY
value.fields-include = EXCEPT_KEY
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@

import java.util.Collection;
import java.util.Collections;
import java.util.Properties;

import com.linkedin.hoptimator.Catalog;
import com.linkedin.hoptimator.CatalogProvider;
Expand Down
Original file line number Diff line number Diff line change
@@ -1,8 +1,7 @@
package com.linkedin.hoptimator.jdbc;

import java.sql.Connection;
import java.sql.Statement;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.Properties;

import org.apache.calcite.jdbc.CalciteConnection;
Expand All @@ -24,15 +23,15 @@ public HoptimatorConnection(CalciteConnection connection, Properties connectionP

@Override
public Statement createStatement() throws SQLException {
return connection.createStatement();
return connection.createStatement();
}

public Properties connectionProperties() {
return connectionProperties;
}

public CalcitePrepare.Context createPrepareContext() {
return connection.createPrepareContext();
return connection.createPrepareContext();
}

public CalciteConnection calciteConnection() {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -173,7 +173,7 @@ public void execute(SqlCreateMaterializedView create, CalcitePrepare.Context con
MaterializedViewTable materializedViewTable = new MaterializedViewTable(viewTableMacro);
RelDataType viewRowType = materializedViewTable.getRowType(typeFactory);

// Suport "partial views", i.e. CREATE VIEW FOO$BAR, where the view name
// Support "partial views", i.e. CREATE VIEW FOO$BAR, where the view name
// is "foo-bar" and the sink is just FOO.
String sinkName = viewName.split("\\$", 2)[0];
List<String> sinkPath = new ArrayList<>();
Expand Down
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
package com.linkedin.hoptimator.k8s;

import java.util.Properties;
import java.sql.SQLException;
import java.sql.Wrapper;
import java.util.Properties;

import org.apache.calcite.schema.SchemaPlus;
import org.slf4j.Logger;
Expand Down
Original file line number Diff line number Diff line change
@@ -1,19 +1,14 @@
package com.linkedin.hoptimator.k8s;

import java.io.File;
import java.io.InputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.Reader;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.time.Duration;
import java.util.Optional;
import java.util.Properties;

import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import io.kubernetes.client.apimachinery.GroupVersion;
import io.kubernetes.client.common.KubernetesListObject;
import io.kubernetes.client.common.KubernetesObject;
Expand Down Expand Up @@ -86,12 +81,12 @@ public K8sContext(Properties connectionProperties) {
throw new RuntimeException(e);
}
}

if (server != null) {
info += " Accessing " + server + ".";
this.apiClient.setBasePath(server);
}

if (truststore != null) {
try {
InputStream in = Files.newInputStream(Paths.get(truststore));
Expand Down
4 changes: 2 additions & 2 deletions hoptimator-kafka/src/test/resources/kafka-ddl.id
Original file line number Diff line number Diff line change
Expand Up @@ -13,9 +13,9 @@ spec:
entryClass: com.linkedin.hoptimator.flink.runner.FlinkRunner
args:
- CREATE DATABASE IF NOT EXISTS `KAFKA` WITH ()
- CREATE TABLE IF NOT EXISTS `KAFKA`.`existing-topic-2` (`KEY` VARCHAR, `VALUE` BINARY) WITH ('connector'='kafka', 'properties.bootstrap.servers'='one-kafka-bootstrap.kafka.svc.cluster.local:9094', 'scan.startup.mode'='earliest-offset', 'topic'='existing-topic-2', 'value.format'='json')
- CREATE TABLE IF NOT EXISTS `KAFKA`.`existing-topic-2` (`KEY` VARCHAR, `VALUE` BINARY) WITH ('connector'='kafka', 'key.fields'='KEY', 'properties.bootstrap.servers'='one-kafka-bootstrap.kafka.svc.cluster.local:9094', 'scan.startup.mode'='earliest-offset', 'topic'='existing-topic-2', 'value.fields-include'='EXCEPT_KEY', 'value.format'='json')
- CREATE DATABASE IF NOT EXISTS `KAFKA` WITH ()
- CREATE TABLE IF NOT EXISTS `KAFKA`.`existing-topic-1` (`KEY` VARCHAR, `VALUE` BINARY) WITH ('connector'='kafka', 'properties.bootstrap.servers'='one-kafka-bootstrap.kafka.svc.cluster.local:9094', 'scan.startup.mode'='earliest-offset', 'topic'='existing-topic-1', 'value.format'='json')
- CREATE TABLE IF NOT EXISTS `KAFKA`.`existing-topic-1` (`KEY` VARCHAR, `VALUE` BINARY) WITH ('connector'='kafka', 'key.fields'='KEY', 'properties.bootstrap.servers'='one-kafka-bootstrap.kafka.svc.cluster.local:9094', 'scan.startup.mode'='earliest-offset', 'topic'='existing-topic-1', 'value.fields-include'='EXCEPT_KEY', 'value.format'='json')
- INSERT INTO `KAFKA`.`existing-topic-1` (`KEY`, `VALUE`) SELECT * FROM `KAFKA`.`existing-topic-2`
jarURI: file:///opt/hoptimator-flink-runner.jar
parallelism: 1
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,6 @@
import io.kubernetes.client.extended.controller.ControllerManager;
import io.kubernetes.client.informer.SharedInformerFactory;
import io.kubernetes.client.openapi.ApiClient;
import io.kubernetes.client.util.Config;

import com.linkedin.hoptimator.catalog.Resource;
import com.linkedin.hoptimator.k8s.K8sApiEndpoints;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@
import java.io.StringReader;
import java.util.Properties;
import java.util.ServiceLoader;
import javax.annotation.Nullable;

import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -179,7 +179,7 @@ public void implement(SqlWriter w) {
private static final SqlShuttle REMOVE_ROW_CONSTRUCTOR = new SqlShuttle() {
@Override
public SqlNode visit(SqlCall call) {
List<SqlNode> operands = call.getOperandList().stream().map(x -> x.accept(this)).collect(Collectors.toList());
List<SqlNode> operands = call.getOperandList().stream().map(x -> x == null ? x : x.accept(this)).collect(Collectors.toList());
if ((call.getKind() == SqlKind.ROW || call.getKind() == SqlKind.COLUMN_LIST
|| call.getOperator() instanceof SqlRowOperator) && operands.size() > 1) {
return IMPLIED_ROW_OPERATOR.createCall(call.getParserPosition(), operands);
Expand Down

0 comments on commit aeb8b20

Please sign in to comment.