diff --git a/docs/content/docs/connectors/table/jdbc.md b/docs/content/docs/connectors/table/jdbc.md index 056f2ab5..4785c97e 100644 --- a/docs/content/docs/connectors/table/jdbc.md +++ b/docs/content/docs/connectors/table/jdbc.md @@ -55,8 +55,7 @@ A driver dependency is also required to connect to a specified database. Here ar | CrateDB | `io.crate` | `crate-jdbc` | [Download](https://repo1.maven.org/maven2/io/crate/crate-jdbc/) | | Db2 | `com.ibm.db2.jcc` | `db2jcc` | [Download](https://www.ibm.com/support/pages/download-db2-fix-packs-version-db2-linux-unix-and-windows) | | Trino | `io.trino` | `trino-jdbc` | [Download](https://repo1.maven.org/maven2/io/trino/trino-jdbc/) | -| OceanBase | `com.oceanbase` | `oceanbase-client` | [Download](https://repo1.maven.org/maven2/com/oceanbase/oceanbase-client/) | - +| OceanBase | `com.oceanbase` | `oceanbase-client` | [Download](https://repo1.maven.org/maven2/net/snowflake/snowflake-jdbc/) JDBC connector and drivers are not part of Flink's binary distribution. See how to link with them for cluster execution [here]({{< ref "docs/dev/configuration/overview" >}}). @@ -433,6 +432,13 @@ As there is no standard syntax for upsert, the following table describes the dat WHEN NOT MATCHED THEN INSERT (..)
VALUES (..) + + Snowflake + MERGE INTO .. USING (..) ON (..)
+ WHEN MATCHED THEN UPDATE SET (..)
+ WHEN NOT MATCHED THEN INSERT (..)
+ VALUES (..) + @@ -670,6 +676,7 @@ Flink supports connect to several databases which uses dialect like MySQL, Oracl Trino type OceanBase MySQL mode type OceanBase Oracle mode type + Snowflake }}">Flink SQL type @@ -684,6 +691,7 @@ Flink supports connect to several databases which uses dialect like MySQL, Oracl TINYINT TINYINT + TINYINT @@ -706,6 +714,7 @@ Flink supports connect to several databases which uses dialect like MySQL, Oracl SMALLINT
TINYINT UNSIGNED + SMALLINT SMALLINT @@ -728,6 +737,7 @@ Flink supports connect to several databases which uses dialect like MySQL, Oracl MEDIUMINT
SMALLINT UNSIGNED + INT INT @@ -748,6 +758,7 @@ Flink supports connect to several databases which uses dialect like MySQL, Oracl BIGINT
INT UNSIGNED + BIGINT BIGINT @@ -760,6 +771,7 @@ Flink supports connect to several databases which uses dialect like MySQL, Oracl BIGINT UNSIGNED + DECIMAL(20, 0) @@ -778,6 +790,9 @@ Flink supports connect to several databases which uses dialect like MySQL, Oracl FLOAT BINARY_FLOAT + + REAL
+ FLOAT FLOAT @@ -796,6 +811,9 @@ Flink supports connect to several databases which uses dialect like MySQL, Oracl DOUBLE DOUBLE BINARY_DOUBLE + + DOUBLE
+ DOUBLE PRECISION DOUBLE @@ -824,6 +842,7 @@ Flink supports connect to several databases which uses dialect like MySQL, Oracl FLOAT(s)
NUMBER(p, s) + NUMERIC(p, s) DECIMAL(p, s) @@ -841,6 +860,7 @@ Flink supports connect to several databases which uses dialect like MySQL, Oracl TINYINT(1) BOOLEAN + BOOLEAN DATE @@ -853,6 +873,7 @@ Flink supports connect to several databases which uses dialect like MySQL, Oracl DATE DATE DATE + DATE TIME [(p)] @@ -864,6 +885,7 @@ Flink supports connect to several databases which uses dialect like MySQL, Oracl TIME_WITHOUT_TIME_ZONE TIME [(p)] DATE + DATE TIME [(p)] [WITHOUT TIMEZONE] @@ -880,6 +902,7 @@ Flink supports connect to several databases which uses dialect like MySQL, Oracl DATETIME [(p)] TIMESTAMP [(p)] [WITHOUT TIMEZONE] TIMESTAMP [(p)] [WITHOUT TIMEZONE] + TIMESTAMP [(p)] [WITHOUT TIMEZONE] @@ -927,6 +950,14 @@ Flink supports connect to several databases which uses dialect like MySQL, Oracl NCHAR(n)
VARCHAR2(n)
CLOB + + CHAR(n)
+ CHARACTER(n)
+ VARCHAR(n)
+ CHARACTER VARYING(n)
+ TEXT + STRING + STRING @@ -951,7 +982,12 @@ Flink supports connect to several databases which uses dialect like MySQL, Oracl BLOB RAW(s)
- BLOB + BLOB + + + BINARY(n)
+ VARBINARY(n)
+ BYTES @@ -965,6 +1001,7 @@ Flink supports connect to several databases which uses dialect like MySQL, Oracl ARRAY + ARRAY diff --git a/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/databases/snowflake/dialect/SnowflakeDialectTypeTest.java b/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/databases/snowflake/dialect/SnowflakeDialectTypeTest.java new file mode 100644 index 00000000..0adbbab3 --- /dev/null +++ b/flink-connector-jdbc/src/test/java/org/apache/flink/connector/jdbc/databases/snowflake/dialect/SnowflakeDialectTypeTest.java @@ -0,0 +1,63 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.jdbc.databases.snowflake.dialect; + +import org.apache.flink.connector.jdbc.dialect.JdbcDialectTypeTest; + +import java.util.Arrays; +import java.util.List; + +/** Tests for all DataTypes and Dialects of JDBC Snowflake connector. */ +public class SnowflakeDialectTypeTest extends JdbcDialectTypeTest { + @Override + protected String testDialect() { + return "snowflake"; + } + + @Override + protected List testData() { + return Arrays.asList( + createTestItem("BOOLEAN"), + + createTestItem("TINYINT"), + createTestItem("SMALLINT"), + createTestItem("BIGINT"), + createTestItem("INT"), + createTestItem("INTEGER"), + + createTestItem("DECIMAL"), + createTestItem("NUMERIC"), + + createTestItem("DOUBLE"), + createTestItem("FLOAT"), + + createTestItem("DECIMAL(10, 4)"), + createTestItem("DECIMAL(38, 18)"), + createTestItem("VARCHAR"), + createTestItem("CHAR"), + createTestItem("VARBINARY"), + createTestItem("DATE"), + createTestItem("TIME"), + createTestItem("TIMESTAMP(3)"), + createTestItem("TIMESTAMP WITHOUT TIME ZONE"), + createTestItem("TIMESTAMP(1) WITHOUT TIME ZONE"), + // Not valid data + createTestItem("TIMESTAMP_LTZ(3)", "Unsupported type:TIMESTAMP_LTZ(3)")); + } +}