Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[feature][datasource] Main add datasource-jdbc-snowflake #160

Open
wants to merge 3 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -102,6 +102,11 @@
<artifactId>datasource-jdbc-db2</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.apache.seatunnel</groupId>
<artifactId>datasource-jdbc-snowflake</artifactId>
<version>${project.version}</version>
</dependency>
</dependencies>

</project>
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.apache.seatunnel.datasource.plugin.db2.jdbc;

import org.apache.seatunnel.api.configuration.util.OptionRule;
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,81 @@
<?xml version="1.0" encoding="UTF-8"?>
<!--
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.apache.seatunnel</groupId>
<artifactId>seatunnel-datasource-plugins</artifactId>
<version>${revision}</version>
</parent>

<artifactId>datasource-jdbc-snowflake</artifactId>

<properties>
<mysql-connector.version>8.0.28</mysql-connector.version>
</properties>

<dependencies>
<dependency>
<groupId>org.apache.seatunnel</groupId>
<artifactId>datasource-plugins-api</artifactId>
<version>${project.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-lang3</artifactId>
</dependency>
<!-- https://mvnrepository.com/artifact/com.google.auto.service/auto-service -->
<dependency>
<groupId>com.google.auto.service</groupId>
<artifactId>auto-service</artifactId>
</dependency>
<dependency>
<groupId>org.apache.seatunnel</groupId>
<artifactId>seatunnel-api</artifactId>
<scope>provided</scope>
</dependency>

<!-- driver -->
<!-- https://mvnrepository.com/artifact/net.snowflake/snowflake-jdbc -->
<dependency>
<groupId>net.snowflake</groupId>
<artifactId>snowflake-jdbc</artifactId>
<version>3.14.4</version>
</dependency>

</dependencies>

<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-dependency-plugin</artifactId>
<configuration>
<skip>${e2e.dependency.skip}</skip>
<appendOutput>true</appendOutput>
</configuration>
</plugin>

<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-shade-plugin</artifactId>
</plugin>
</plugins>
</build>

</project>
Original file line number Diff line number Diff line change
@@ -0,0 +1,54 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.apache.seatunnel.datasource.plugin.snowflake.jdbc;

import org.apache.seatunnel.api.configuration.util.OptionRule;
import org.apache.seatunnel.datasource.plugin.api.DataSourcePluginInfo;
import org.apache.seatunnel.datasource.plugin.api.DatasourcePluginTypeEnum;

import com.google.common.collect.Sets;

import java.util.Set;

public class SnowFlakeDataSourceConfig {

public static final String PLUGIN_NAME = "JDBC-SnowFlake";

public static final DataSourcePluginInfo SNOWFLAKE_DATASOURCE_PLUGIN_INFO =
DataSourcePluginInfo.builder()
.name(PLUGIN_NAME)
.icon(PLUGIN_NAME)
.version("1.0.0")
.type(DatasourcePluginTypeEnum.DATABASE.getCode())
.build();

public static final Set<String> SNOWFLAKE_SYSTEM_DATABASES =
Sets.newHashSet(
"information_schema", "mysql", "performance_schema", "sys", "SNOWFLAKE");

public static final OptionRule OPTION_RULE =
OptionRule.builder()
.required(SnowFlakeOptionRule.URL, SnowFlakeOptionRule.DRIVER)
.optional(SnowFlakeOptionRule.USER, SnowFlakeOptionRule.PASSWORD)
.build();

public static final OptionRule METADATA_RULE =
OptionRule.builder()
.required(SnowFlakeOptionRule.DATABASE, SnowFlakeOptionRule.TABLE)
.build();
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,200 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.apache.seatunnel.datasource.plugin.snowflake.jdbc;

import org.apache.seatunnel.api.configuration.util.OptionRule;
import org.apache.seatunnel.datasource.plugin.api.DataSourceChannel;
import org.apache.seatunnel.datasource.plugin.api.DataSourcePluginException;
import org.apache.seatunnel.datasource.plugin.api.model.TableField;

import org.apache.commons.lang3.StringUtils;

import lombok.NonNull;

import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.function.Function;
import java.util.stream.Collectors;

import static com.google.common.base.Preconditions.checkNotNull;

public class SnowFlakeJdbcDataSourceChannel implements DataSourceChannel {

@Override
public OptionRule getDataSourceOptions(@NonNull String pluginName) {
return SnowFlakeDataSourceConfig.OPTION_RULE;
}

@Override
public OptionRule getDatasourceMetadataFieldsByDataSourceName(@NonNull String pluginName) {
return SnowFlakeDataSourceConfig.METADATA_RULE;
}

public List<String> getTables(
String pluginName,
Map<String, String> requestParams,
String database,
Map<String, String> options) {
List<String> schemaTableNames = new ArrayList<>();
try {
Class.forName(requestParams.get(SnowFlakeOptionRule.DRIVER.key()));
try (Connection connection = getConnection(requestParams)) {
DatabaseMetaData metaData = connection.getMetaData();
try (ResultSet resultSet =
metaData.getTables(database, null, null, new String[] {"TABLE"})) {
while (resultSet.next()) {
String schemaName = resultSet.getString("TABLE_SCHEM");
String tableName = resultSet.getString("TABLE_NAME");
if (StringUtils.isNotBlank(tableName)) {
schemaTableNames.add(schemaName + "." + tableName);
}
}
}
}
} catch (ClassNotFoundException | SQLException e) {
// Handle exceptions or rethrow as unchecked
throw new RuntimeException("Error accessing database metadata", e);
}
return schemaTableNames;
}

@Override
public List<String> getDatabases(
@NonNull String pluginName, @NonNull Map<String, String> requestParams) {
List<String> dbNames = new ArrayList<>();
try {
Connection connection = getConnection(requestParams);
PreparedStatement statement = connection.prepareStatement("SHOW DATABASES;");
ResultSet re = statement.executeQuery();
// filter system databases
while (re.next()) {
String dbName = re.getString("name");
if (StringUtils.isNotBlank(dbName)
&& !SnowFlakeDataSourceConfig.SNOWFLAKE_SYSTEM_DATABASES.contains(dbName)) {
dbNames.add(dbName);
}
}
return dbNames;
} catch (SQLException | ClassNotFoundException e) {
throw new DataSourcePluginException("Get databases failed", e);
}
}

@Override
public boolean checkDataSourceConnectivity(
@NonNull String pluginName, @NonNull Map<String, String> requestParams) {
try (Connection ignored = getConnection(requestParams)) {
return true;
} catch (Exception e) {
throw new DataSourcePluginException("check jdbc connectivity failed", e);
}
}

@Override
public List<TableField> getTableFields(
@NonNull String pluginName,
@NonNull Map<String, String> requestParams,
@NonNull String database,
@NonNull String table) {
List<TableField> tableFields = new ArrayList<>();
try (Connection connection = getConnection(requestParams, database); ) {
DatabaseMetaData metaData = connection.getMetaData();
String primaryKey = getPrimaryKey(metaData, database, table);
String[] split = table.split("\\.");
if (split.length != 2) {
// throw new DataSourcePluginException(
// "Postgresql tableName should composed by
// schemaName.tableName");
}
try (ResultSet resultSet = metaData.getColumns(database, split[0], split[1], null)) {
while (resultSet.next()) {
TableField tableField = new TableField();
String columnName = resultSet.getString("COLUMN_NAME");
tableField.setPrimaryKey(false);
if (StringUtils.isNotBlank(primaryKey) && primaryKey.equals(columnName)) {
tableField.setPrimaryKey(true);
}
tableField.setName(columnName);
tableField.setType(resultSet.getString("TYPE_NAME"));
tableField.setComment(resultSet.getString("REMARKS"));
Object nullable = resultSet.getObject("IS_NULLABLE");
tableField.setNullable(Boolean.TRUE.toString().equals(nullable.toString()));
tableFields.add(tableField);
}
}
} catch (SQLException | ClassNotFoundException e) {
// throw new DataSourcePluginException("get table fields failed", e);
}
return tableFields;
}

@Override
public Map<String, List<TableField>> getTableFields(
@NonNull String pluginName,
@NonNull Map<String, String> requestParams,
@NonNull String database,
@NonNull List<String> tables) {
return tables.parallelStream()
.collect(
Collectors.toMap(
Function.identity(),
table ->
getTableFields(
pluginName, requestParams, database, table)));
}

private String getPrimaryKey(DatabaseMetaData metaData, String dbName, String tableName)
throws SQLException {
ResultSet primaryKeysInfo = metaData.getPrimaryKeys(dbName, "%", tableName);
while (primaryKeysInfo.next()) {
return primaryKeysInfo.getString("COLUMN_NAME");
}
return null;
}

private Connection getConnection(Map<String, String> requestParams)
throws SQLException, ClassNotFoundException {
return getConnection(requestParams, null);
}

private static Connection getConnection(Map<String, String> requestParams, String databaseName)
throws SQLException, ClassNotFoundException {
checkNotNull(requestParams.get(SnowFlakeOptionRule.DRIVER.key()));
checkNotNull(requestParams.get(SnowFlakeOptionRule.URL.key()), "Jdbc url cannot be null");
String url =
replaceDatabase(requestParams.get(SnowFlakeOptionRule.URL.key()), databaseName);
if (requestParams.containsKey(SnowFlakeOptionRule.USER.key())) {
String username = requestParams.get(SnowFlakeOptionRule.USER.key());
String password = requestParams.get(SnowFlakeOptionRule.PASSWORD.key());
return DriverManager.getConnection(url, username, password);
}
return DriverManager.getConnection(url);
}
// Placeholder for the JdbcUtils.replaceDatabase method
private static String replaceDatabase(String url, String databaseName) {
// Implement database name replacement logic
return url; // Return the original URL or modified URL
}
}
Loading
Loading