diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 20a840fcc289f..1e90ce1e32eb3 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -340,6 +340,7 @@ jobs:
!:trino-delta-lake,
!:trino-docs,
!:trino-druid,
+ !:trino-duckdb,
!:trino-elasticsearch,
!:trino-exasol,
!:trino-faulttolerant-tests,
@@ -450,6 +451,7 @@ jobs:
- { modules: plugin/trino-delta-lake, profile: cloud-tests }
- { modules: plugin/trino-delta-lake, profile: fte-tests }
- { modules: plugin/trino-druid }
+ - { modules: plugin/trino-duckdb }
- { modules: plugin/trino-elasticsearch }
- { modules: plugin/trino-exasol }
- { modules: plugin/trino-google-sheets }
diff --git a/core/trino-server/src/main/provisio/trino.xml b/core/trino-server/src/main/provisio/trino.xml
index 9b96d3d97f2e1..789f056473b10 100644
--- a/core/trino-server/src/main/provisio/trino.xml
+++ b/core/trino-server/src/main/provisio/trino.xml
@@ -72,6 +72,12 @@
+
+
+
+
+
+
diff --git a/docs/src/main/sphinx/connector.md b/docs/src/main/sphinx/connector.md
index e3f53cc935a6d..bfd7d7a27525c 100644
--- a/docs/src/main/sphinx/connector.md
+++ b/docs/src/main/sphinx/connector.md
@@ -14,6 +14,7 @@ Cassandra
ClickHouse
Delta Lake
Druid
+DuckDB
Elasticsearch
Exasol
Faker
diff --git a/docs/src/main/sphinx/connector/duckdb.md b/docs/src/main/sphinx/connector/duckdb.md
new file mode 100644
index 0000000000000..69af660f1b28e
--- /dev/null
+++ b/docs/src/main/sphinx/connector/duckdb.md
@@ -0,0 +1,207 @@
+# DuckDB connector
+
+```{raw} html
+
+```
+
+The DuckDB connector allows querying and creating tables in an external
+[DuckDB](https://duckdb.org/) instance. This can be used to join data between
+different systems like DuckDB and Hive, or between two different
+DuckDB instances.
+
+## Configuration
+
+To configure the DuckDB connector, create a catalog properties file
+in `etc/catalog` named, for example, `example.properties`, to
+mount the DuckDB connector as the `duckdb` catalog.
+Create the file with the following contents, replacing the
+connection properties as appropriate for your setup:
+
+```none
+connector.name=duckdb
+connection-url=jdbc:duckdb://
+connection-user=root
+connection-password=secret
+```
+
+### Multiple DuckDB servers
+
+The DuckDB connector can only access a single database within
+a DuckDB instance. Thus, if you have multiple DuckDB servers,
+or want to connect to multiple DuckDB servers, you must configure
+multiple instances of the DuckDB connector.
+
+(duckdb-type-mapping)=
+## Type mapping
+
+Because Trino and DuckDB each support types that the other does not, this
+connector {ref}`modifies some types ` when reading or
+writing data. Data types may not map the same way in both directions between
+Trino and the data source. Refer to the following sections for type mapping in
+each direction.
+
+List of [DuckDB data types](https://duckdb.org/docs/sql/data_types/overview.html).
+
+### DuckDB type to Trino type mapping
+
+The connector maps DuckDB types to the corresponding Trino types following
+this table:
+
+:::{list-table} DuckDB type to Trino type mapping
+:widths: 30, 30, 40
+:header-rows: 1
+
+* - DuckDB type
+ - Trino type
+ - Notes
+* - `BOOLEAN`
+ - `BOOLEAN`
+ -
+* - `TINYINT`
+ - `TINYINT`
+ -
+* - `SMALLINT`
+ - `SMALLINT`
+ -
+* - `INTEGER`
+ - `INTEGER`
+ -
+* - `BIGINT`
+ - `BIGINT`
+ -
+* - `FLOAT`
+ - `REAL`
+ -
+* - `DOUBLE`
+ - `DOUBLE`
+ -
+* - `DECIMAL`
+ - `DECIMAL`
+ - Default precision and scale are (18,3).
+* - `VARCHAR`
+ - `VARCHAR`
+ -
+* - `DATE`
+ - `DATE`
+ -
+:::
+
+No other types are supported.
+
+### Trino type to DuckDB type mapping
+
+The connector maps Trino types to the corresponding DuckDB types following
+this table:
+
+:::{list-table} Trino type to DuckDB type mapping
+:widths: 30, 30, 40
+:header-rows: 1
+
+* - Trino type
+ - DuckDB type
+ - Notes
+* - `BOOLEAN`
+ - `BOOLEAN`
+ -
+* - `TINYINT`
+ - `TINYINT`
+ -
+* - `SMALLINT`
+ - `SMALLINT`
+ -
+* - `INTEGER`
+ - `INTEGER`
+ -
+* - `BIGINT`
+ - `BIGINT`
+ -
+* - `REAL`
+ - `REAL`
+ -
+* - `DOUBLE`
+ - `DOUBLE`
+ -
+* - `DECIMAL`
+ - `DECIMAL`
+ -
+* - `CHAR`
+ - `VARCHAR`
+ -
+* - `VARCHAR`
+ - `VARCHAR`
+ -
+* - `DATE`
+ - `DATE`
+ -
+:::
+
+No other types are supported.
+
+```{include} jdbc-type-mapping.fragment
+```
+
+(duckdb-sql-support)=
+## SQL support
+
+The connector provides read access and write access to data and metadata in
+a DuckDB database. In addition to the {ref}`globally available
+` and {ref}`read operation `
+statements, the connector supports the following features:
+
+- {doc}`/sql/insert`
+- {doc}`/sql/delete`
+- {doc}`/sql/truncate`
+- {doc}`/sql/create-table`
+- {doc}`/sql/create-table-as`
+- {doc}`/sql/drop-table`
+- {doc}`/sql/alter-table`
+- {doc}`/sql/create-schema`
+- {doc}`/sql/drop-schema`
+
+### Procedures
+
+```{include} jdbc-procedures-flush.fragment
+```
+```{include} procedures-execute.fragment
+```
+
+### Table functions
+
+The connector provides specific [table functions](/functions/table) to
+access DuckDB.
+
+(duckdb-query-function)=
+#### `query(varchar) -> table`
+
+The `query` function allows you to query the underlying database directly. It
+requires syntax native to DuckDB, because the full query is pushed down and
+processed in DuckDB. This can be useful for accessing native features which
+are not available in Trino or for improving query performance in situations
+where running a query natively may be faster.
+
+Find details about the SQL support of DuckDB that you can use in the query in
+the [DuckDB SQL Command
+Reference](https://duckdb.org/docs/sql/query_syntax/select) and
+other statements and functions.
+
+```{include} query-passthrough-warning.fragment
+```
+
+As a simple example, query the `example` catalog and select an entire table:
+
+```
+SELECT
+ *
+FROM
+ TABLE(
+ example.system.query(
+ query => 'SELECT
+ *
+ FROM
+ tpch.nation'
+ )
+ );
+```
+
+```{include} query-table-function-ordering.fragment
+```
diff --git a/docs/src/main/sphinx/static/img/duckdb.png b/docs/src/main/sphinx/static/img/duckdb.png
new file mode 100644
index 0000000000000..88ea6a209750b
Binary files /dev/null and b/docs/src/main/sphinx/static/img/duckdb.png differ
diff --git a/plugin/trino-duckdb/pom.xml b/plugin/trino-duckdb/pom.xml
new file mode 100644
index 0000000000000..81b5b33f1b046
--- /dev/null
+++ b/plugin/trino-duckdb/pom.xml
@@ -0,0 +1,206 @@
+
+
+ 4.0.0
+
+
+ io.trino
+ trino-root
+ 466-SNAPSHOT
+ ../../pom.xml
+
+
+ trino-duckdb
+ trino-plugin
+ Trino - DuckDB Connector
+
+
+ true
+
+
+
+
+
+ com.google.guava
+ guava
+
+
+
+ com.google.inject
+ guice
+
+
+
+ io.airlift
+ configuration
+
+
+
+ io.trino
+ trino-base-jdbc
+
+
+
+ io.trino
+ trino-plugin-toolkit
+
+
+ org.duckdb
+ duckdb_jdbc
+ 1.1.0
+
+
+
+ com.fasterxml.jackson.core
+ jackson-annotations
+ provided
+
+
+
+ io.airlift
+ slice
+ provided
+
+
+
+ io.opentelemetry
+ opentelemetry-api
+ provided
+
+
+
+ io.opentelemetry
+
+ opentelemetry-context
+ provided
+
+
+
+ io.trino
+ trino-spi
+ provided
+
+
+
+ org.openjdk.jol
+ jol-core
+ provided
+
+
+
+ io.airlift
+ log
+ runtime
+
+
+
+ io.airlift
+ log-manager
+ runtime
+
+
+
+ io.airlift
+ units
+ runtime
+
+
+
+ io.airlift
+ junit-extensions
+ test
+
+
+
+ io.airlift
+ testing
+ test
+
+
+
+ io.trino
+ trino-base-jdbc
+ test-jar
+ test
+
+
+
+ io.trino
+ trino-main
+ test
+
+
+
+ io.trino
+ trino-main
+ test-jar
+ test
+
+
+
+ io.trino
+ trino-plugin-toolkit
+ test-jar
+ test
+
+
+
+ io.trino
+ trino-testing
+ test
+
+
+
+ io.trino
+ trino-testing-containers
+ test
+
+
+
+ io.trino
+ trino-testing-services
+ test
+
+
+
+ io.trino
+ trino-tpch
+ test
+
+
+
+ io.trino.tpch
+ tpch
+ test
+
+
+
+ org.assertj
+ assertj-core
+ test
+
+
+
+ org.jetbrains
+ annotations
+ test
+
+
+
+ org.junit.jupiter
+ junit-jupiter-api
+ test
+
+
+
+ org.junit.jupiter
+ junit-jupiter-params
+ test
+
+
+
+ org.testcontainers
+ testcontainers
+ test
+
+
+
diff --git a/plugin/trino-duckdb/src/main/java/io/trino/plugin/duckdb/DuckDbClient.java b/plugin/trino-duckdb/src/main/java/io/trino/plugin/duckdb/DuckDbClient.java
new file mode 100644
index 0000000000000..ba5ee3ebe3e29
--- /dev/null
+++ b/plugin/trino-duckdb/src/main/java/io/trino/plugin/duckdb/DuckDbClient.java
@@ -0,0 +1,300 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.trino.plugin.duckdb;
+
+import com.google.common.collect.ImmutableList;
+import com.google.inject.Inject;
+import io.trino.plugin.base.mapping.IdentifierMapping;
+import io.trino.plugin.jdbc.BaseJdbcClient;
+import io.trino.plugin.jdbc.BaseJdbcConfig;
+import io.trino.plugin.jdbc.ColumnMapping;
+import io.trino.plugin.jdbc.ConnectionFactory;
+import io.trino.plugin.jdbc.JdbcOutputTableHandle;
+import io.trino.plugin.jdbc.JdbcTableHandle;
+import io.trino.plugin.jdbc.JdbcTypeHandle;
+import io.trino.plugin.jdbc.LongWriteFunction;
+import io.trino.plugin.jdbc.QueryBuilder;
+import io.trino.plugin.jdbc.RemoteTableName;
+import io.trino.plugin.jdbc.WriteMapping;
+import io.trino.plugin.jdbc.logging.RemoteQueryModifier;
+import io.trino.spi.TrinoException;
+import io.trino.spi.connector.ColumnMetadata;
+import io.trino.spi.connector.ConnectorSession;
+import io.trino.spi.connector.SchemaTableName;
+import io.trino.spi.type.CharType;
+import io.trino.spi.type.DecimalType;
+import io.trino.spi.type.Type;
+import io.trino.spi.type.VarcharType;
+
+import java.sql.Connection;
+import java.sql.DatabaseMetaData;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.Types;
+import java.time.LocalDate;
+import java.time.format.DateTimeFormatter;
+import java.util.List;
+import java.util.Optional;
+import java.util.Set;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import static com.google.common.base.Preconditions.checkArgument;
+import static io.trino.plugin.jdbc.StandardColumnMappings.bigintColumnMapping;
+import static io.trino.plugin.jdbc.StandardColumnMappings.bigintWriteFunction;
+import static io.trino.plugin.jdbc.StandardColumnMappings.booleanColumnMapping;
+import static io.trino.plugin.jdbc.StandardColumnMappings.booleanWriteFunction;
+import static io.trino.plugin.jdbc.StandardColumnMappings.charWriteFunction;
+import static io.trino.plugin.jdbc.StandardColumnMappings.decimalColumnMapping;
+import static io.trino.plugin.jdbc.StandardColumnMappings.doubleColumnMapping;
+import static io.trino.plugin.jdbc.StandardColumnMappings.doubleWriteFunction;
+import static io.trino.plugin.jdbc.StandardColumnMappings.integerColumnMapping;
+import static io.trino.plugin.jdbc.StandardColumnMappings.integerWriteFunction;
+import static io.trino.plugin.jdbc.StandardColumnMappings.longDecimalWriteFunction;
+import static io.trino.plugin.jdbc.StandardColumnMappings.realColumnMapping;
+import static io.trino.plugin.jdbc.StandardColumnMappings.realWriteFunction;
+import static io.trino.plugin.jdbc.StandardColumnMappings.shortDecimalWriteFunction;
+import static io.trino.plugin.jdbc.StandardColumnMappings.smallintColumnMapping;
+import static io.trino.plugin.jdbc.StandardColumnMappings.smallintWriteFunction;
+import static io.trino.plugin.jdbc.StandardColumnMappings.tinyintColumnMapping;
+import static io.trino.plugin.jdbc.StandardColumnMappings.tinyintWriteFunction;
+import static io.trino.plugin.jdbc.StandardColumnMappings.varcharColumnMapping;
+import static io.trino.plugin.jdbc.StandardColumnMappings.varcharWriteFunction;
+import static io.trino.plugin.jdbc.TypeHandlingJdbcSessionProperties.getUnsupportedTypeHandling;
+import static io.trino.plugin.jdbc.UnsupportedTypeHandling.CONVERT_TO_VARCHAR;
+import static io.trino.spi.StandardErrorCode.NOT_SUPPORTED;
+import static io.trino.spi.type.BigintType.BIGINT;
+import static io.trino.spi.type.BooleanType.BOOLEAN;
+import static io.trino.spi.type.DateType.DATE;
+import static io.trino.spi.type.DecimalType.createDecimalType;
+import static io.trino.spi.type.DoubleType.DOUBLE;
+import static io.trino.spi.type.IntegerType.INTEGER;
+import static io.trino.spi.type.RealType.REAL;
+import static io.trino.spi.type.SmallintType.SMALLINT;
+import static io.trino.spi.type.TinyintType.TINYINT;
+import static java.lang.String.format;
+import static java.time.temporal.ChronoField.EPOCH_DAY;
+
+public final class DuckDbClient
+ extends BaseJdbcClient
+{
+ private static final Pattern DECIMAL_PATTERN = Pattern.compile("DECIMAL\\((?[0-9]+),(?[0-9]+)\\)");
+ private static final DateTimeFormatter DATE_FORMATTER = DateTimeFormatter.ofPattern("uuuu-MM-dd");
+
+ @Inject
+ public DuckDbClient(
+ BaseJdbcConfig config,
+ ConnectionFactory connectionFactory,
+ QueryBuilder queryBuilder,
+ IdentifierMapping identifierMapping,
+ RemoteQueryModifier queryModifier)
+ {
+ super("\"", connectionFactory, queryBuilder, config.getJdbcTypesMappedToVarchar(), identifierMapping, queryModifier, false);
+ }
+
+ @Override
+ public Connection getConnection(ConnectorSession session)
+ throws SQLException
+ {
+ // The method calls Connection.setReadOnly method, but DuckDB does not support changing read-only status on connection level
+ return connectionFactory.openConnection(session);
+ }
+
+ @Override
+ public void renameSchema(ConnectorSession session, String schemaName, String newSchemaName)
+ {
+ throw new TrinoException(NOT_SUPPORTED, "This connector does not support renaming schemas");
+ }
+
+ @Override
+ protected String escapeObjectNameForMetadataQuery(String name, String escape)
+ {
+ // org.duckdb.DuckDBDatabaseMetaData.getSearchStringEscape returns null
+ checkArgument(escape == null, "escape must be null");
+ return name;
+ }
+
+ @Override
+ protected Optional> getTableTypes()
+ {
+ return Optional.of(ImmutableList.of("BASE TABLE", "LOCAL TEMPORARY", "VIEW"));
+ }
+
+ @Override
+ public ResultSet getTables(Connection connection, Optional schemaName, Optional tableName)
+ throws SQLException
+ {
+ DatabaseMetaData metadata = connection.getMetaData();
+ return metadata.getTables(
+ null,
+ schemaName.orElse(null),
+ escapeObjectNameForMetadataQuery(tableName, metadata.getSearchStringEscape()).orElse(null),
+ getTableTypes().map(types -> types.toArray(String[]::new)).orElse(null));
+ }
+
+ @Override
+ public void renameTable(ConnectorSession session, JdbcTableHandle handle, SchemaTableName newTableName)
+ {
+ RemoteTableName remoteTableName = handle.asPlainTable().getRemoteTableName();
+ if (!remoteTableName.getSchemaName().orElseThrow().equals(newTableName.getSchemaName())) {
+ throw new TrinoException(NOT_SUPPORTED, "This connector does not support renaming tables across schemas");
+ }
+ renameTable(session, null, remoteTableName.getSchemaName().orElseThrow(), remoteTableName.getTableName(), newTableName);
+ }
+
+ @Override
+ protected void renameTable(ConnectorSession session, Connection connection, String catalogName, String remoteSchemaName, String remoteTableName, String newRemoteSchemaName, String newRemoteTableName)
+ throws SQLException
+ {
+ execute(session, connection, format(
+ "ALTER TABLE %s RENAME TO %s",
+ quoted(catalogName, remoteSchemaName, remoteTableName),
+ quoted(catalogName, null, newRemoteTableName)));
+ }
+
+ @Override
+ public void commitCreateTable(ConnectorSession session, JdbcOutputTableHandle handle, Set pageSinkIds)
+ {
+ if (handle.getPageSinkIdColumnName().isPresent()) {
+ finishInsertTable(session, handle, pageSinkIds);
+ }
+ else {
+ renameTable(
+ session,
+ null,
+ handle.getRemoteTableName().getSchemaName().orElse(null),
+ handle.getTemporaryTableName().orElseThrow(() -> new IllegalStateException("Temporary table name missing")),
+ handle.getRemoteTableName().getSchemaTableName());
+ }
+ }
+
+ @Override
+ public void addColumn(ConnectorSession session, JdbcTableHandle handle, ColumnMetadata column)
+ {
+ if (!column.isNullable()) {
+ throw new TrinoException(NOT_SUPPORTED, "This connector does not support adding not null columns");
+ }
+ super.addColumn(session, handle, column);
+ }
+
+ @Override
+ public Optional toColumnMapping(ConnectorSession session, Connection connection, JdbcTypeHandle typeHandle)
+ {
+ Optional mapping = getForcedMappingToVarchar(typeHandle);
+ if (mapping.isPresent()) {
+ return mapping;
+ }
+ switch (typeHandle.jdbcType()) {
+ case Types.BOOLEAN:
+ return Optional.of(booleanColumnMapping());
+ case Types.TINYINT:
+ return Optional.of(tinyintColumnMapping());
+ case Types.SMALLINT:
+ return Optional.of(smallintColumnMapping());
+ case Types.INTEGER:
+ return Optional.of(integerColumnMapping());
+ case Types.BIGINT:
+ return Optional.of(bigintColumnMapping());
+ case Types.FLOAT:
+ return Optional.of(realColumnMapping());
+ case Types.DOUBLE:
+ return Optional.of(doubleColumnMapping());
+ case Types.DECIMAL:
+ String decimalTypeName = typeHandle.jdbcTypeName().orElseThrow();
+ Matcher matcher = DECIMAL_PATTERN.matcher(decimalTypeName);
+ checkArgument(matcher.matches(), "Decimal type name does not match pattern: %s", decimalTypeName);
+ int precision = Integer.parseInt(matcher.group("precision"));
+ int scale = Integer.parseInt(matcher.group("scale"));
+ return Optional.of(decimalColumnMapping(createDecimalType(precision, scale)));
+ case Types.VARCHAR:
+ // CHAR is an alias of VARCHAR in DuckDB https://duckdb.org/docs/sql/data_types/text
+ return Optional.of(varcharColumnMapping(VarcharType.VARCHAR, true));
+ case Types.DATE:
+ return Optional.of(ColumnMapping.longMapping(
+ DATE,
+ (resultSet, columnIndex) -> DATE_FORMATTER.parse(resultSet.getString(columnIndex)).getLong(EPOCH_DAY),
+ dateWriteFunction()));
+ }
+
+ if (getUnsupportedTypeHandling(session) == CONVERT_TO_VARCHAR) {
+ return mapToUnboundedVarchar(typeHandle);
+ }
+ return Optional.empty();
+ }
+
+ @Override
+ public WriteMapping toWriteMapping(ConnectorSession session, Type type)
+ {
+ if (type == BOOLEAN) {
+ return WriteMapping.booleanMapping("boolean", booleanWriteFunction());
+ }
+ if (type == TINYINT) {
+ return WriteMapping.longMapping("tinyint", tinyintWriteFunction());
+ }
+ if (type == SMALLINT) {
+ return WriteMapping.longMapping("smallint", smallintWriteFunction());
+ }
+ if (type == INTEGER) {
+ return WriteMapping.longMapping("integer", integerWriteFunction());
+ }
+ if (type == BIGINT) {
+ return WriteMapping.longMapping("bigint", bigintWriteFunction());
+ }
+ if (type == REAL) {
+ return WriteMapping.longMapping("float", realWriteFunction());
+ }
+ if (type == DOUBLE) {
+ return WriteMapping.doubleMapping("double precision", doubleWriteFunction());
+ }
+ if (type instanceof DecimalType decimalType) {
+ String dataType = "decimal(%d, %d)".formatted(decimalType.getPrecision(), decimalType.getScale());
+ if (decimalType.isShort()) {
+ return WriteMapping.longMapping(dataType, shortDecimalWriteFunction(decimalType));
+ }
+ return WriteMapping.objectMapping(dataType, longDecimalWriteFunction(decimalType));
+ }
+ if (type instanceof CharType) {
+ // CHAR is an alias of VARCHAR in DuckDB https://duckdb.org/docs/sql/data_types/text
+ return WriteMapping.sliceMapping("varchar", charWriteFunction());
+ }
+ if (type instanceof VarcharType) {
+ // CHAR is an alias of VARCHAR in DuckDB https://duckdb.org/docs/sql/data_types/text
+ return WriteMapping.sliceMapping("varchar", varcharWriteFunction());
+ }
+ if (type == DATE) {
+ return WriteMapping.longMapping("date", dateWriteFunction());
+ }
+ throw new TrinoException(NOT_SUPPORTED, "Unsupported column type: " + type.getDisplayName());
+ }
+
+ private static LongWriteFunction dateWriteFunction()
+ {
+ return new LongWriteFunction()
+ {
+ @Override
+ public String getBindExpression()
+ {
+ return "CAST(? AS DATE)";
+ }
+
+ @Override
+ public void set(PreparedStatement statement, int index, long day)
+ throws SQLException
+ {
+ statement.setString(index, DATE_FORMATTER.format(LocalDate.ofEpochDay(day)));
+ }
+ };
+ }
+}
diff --git a/plugin/trino-duckdb/src/main/java/io/trino/plugin/duckdb/DuckDbPlugin.java b/plugin/trino-duckdb/src/main/java/io/trino/plugin/duckdb/DuckDbPlugin.java
new file mode 100644
index 0000000000000..fa52a1365933c
--- /dev/null
+++ b/plugin/trino-duckdb/src/main/java/io/trino/plugin/duckdb/DuckDbPlugin.java
@@ -0,0 +1,25 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.trino.plugin.duckdb;
+
+import io.trino.plugin.jdbc.JdbcPlugin;
+
+public final class DuckDbPlugin
+ extends JdbcPlugin
+{
+ public DuckDbPlugin()
+ {
+ super("duckdb", new DuckDblClientModule());
+ }
+}
diff --git a/plugin/trino-duckdb/src/main/java/io/trino/plugin/duckdb/DuckDblClientModule.java b/plugin/trino-duckdb/src/main/java/io/trino/plugin/duckdb/DuckDblClientModule.java
new file mode 100644
index 0000000000000..f243135c9f4e3
--- /dev/null
+++ b/plugin/trino-duckdb/src/main/java/io/trino/plugin/duckdb/DuckDblClientModule.java
@@ -0,0 +1,63 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.trino.plugin.duckdb;
+
+import com.google.inject.Binder;
+import com.google.inject.Provides;
+import com.google.inject.Scopes;
+import com.google.inject.Singleton;
+import io.airlift.configuration.AbstractConfigurationAwareModule;
+import io.opentelemetry.api.OpenTelemetry;
+import io.trino.plugin.jdbc.BaseJdbcConfig;
+import io.trino.plugin.jdbc.ConnectionFactory;
+import io.trino.plugin.jdbc.DriverConnectionFactory;
+import io.trino.plugin.jdbc.ForBaseJdbc;
+import io.trino.plugin.jdbc.JdbcClient;
+import io.trino.plugin.jdbc.JdbcStatisticsConfig;
+import io.trino.plugin.jdbc.credential.CredentialProvider;
+import io.trino.plugin.jdbc.ptf.Query;
+import io.trino.spi.function.table.ConnectorTableFunction;
+import org.duckdb.DuckDBDriver;
+
+import java.util.Properties;
+
+import static com.google.inject.multibindings.Multibinder.newSetBinder;
+import static io.airlift.configuration.ConfigBinder.configBinder;
+
+public final class DuckDblClientModule
+ extends AbstractConfigurationAwareModule
+{
+ @Override
+ protected void setup(Binder binder)
+ {
+ binder.bind(JdbcClient.class).annotatedWith(ForBaseJdbc.class).to(DuckDbClient.class).in(Scopes.SINGLETON);
+ configBinder(binder).bindConfig(JdbcStatisticsConfig.class);
+ newSetBinder(binder, ConnectorTableFunction.class).addBinding().toProvider(Query.class).in(Scopes.SINGLETON);
+ }
+
+ @Provides
+ @Singleton
+ @ForBaseJdbc
+ public static ConnectionFactory connectionFactory(BaseJdbcConfig config, CredentialProvider credentialProvider, OpenTelemetry openTelemetry)
+ {
+ Properties connectionProperties = new Properties();
+ return DriverConnectionFactory.builder(
+ new DuckDBDriver(),
+ config.getConnectionUrl(),
+ credentialProvider)
+ .setConnectionProperties(connectionProperties)
+ .setOpenTelemetry(openTelemetry)
+ .build();
+ }
+}
diff --git a/plugin/trino-duckdb/src/test/java/io/trino/plugin/duckdb/DuckDbQueryRunner.java b/plugin/trino-duckdb/src/test/java/io/trino/plugin/duckdb/DuckDbQueryRunner.java
new file mode 100644
index 0000000000000..25a72ae284b5c
--- /dev/null
+++ b/plugin/trino-duckdb/src/test/java/io/trino/plugin/duckdb/DuckDbQueryRunner.java
@@ -0,0 +1,108 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.trino.plugin.duckdb;
+
+import com.google.common.collect.ImmutableList;
+import io.airlift.log.Logger;
+import io.airlift.log.Logging;
+import io.trino.plugin.tpch.TpchPlugin;
+import io.trino.testing.DistributedQueryRunner;
+import io.trino.tpch.TpchTable;
+
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import static io.airlift.testing.Closeables.closeAllSuppress;
+import static io.trino.plugin.duckdb.TestingDuckDb.TPCH_SCHEMA;
+import static io.trino.plugin.tpch.TpchMetadata.TINY_SCHEMA_NAME;
+import static io.trino.testing.QueryAssertions.copyTpchTables;
+import static io.trino.testing.TestingSession.testSessionBuilder;
+
+public final class DuckDbQueryRunner
+{
+ private DuckDbQueryRunner() {}
+
+ public static Builder builder(TestingDuckDb server)
+ {
+ return new Builder()
+ .addConnectorProperty("connection-url", server.getJdbcUrl());
+ }
+
+ public static final class Builder
+ extends DistributedQueryRunner.Builder
+ {
+ private final Map connectorProperties = new HashMap<>();
+ private List> initialTables = ImmutableList.of();
+
+ private Builder()
+ {
+ super(testSessionBuilder()
+ .setCatalog("duckdb")
+ .setSchema(TPCH_SCHEMA)
+ .build());
+ }
+
+ public Builder addConnectorProperty(String key, String value)
+ {
+ this.connectorProperties.put(key, value);
+ return this;
+ }
+
+ public Builder setInitialTables(Iterable> initialTables)
+ {
+ this.initialTables = ImmutableList.copyOf(initialTables);
+ return this;
+ }
+
+ @Override
+ public DistributedQueryRunner build()
+ throws Exception
+ {
+ DistributedQueryRunner queryRunner = super.build();
+ try {
+ queryRunner.installPlugin(new TpchPlugin());
+ queryRunner.createCatalog("tpch", "tpch");
+
+ queryRunner.installPlugin(new DuckDbPlugin());
+ queryRunner.createCatalog("duckdb", "duckdb", connectorProperties);
+
+ queryRunner.execute("CREATE SCHEMA duckdb.tpch");
+ copyTpchTables(queryRunner, "tpch", TINY_SCHEMA_NAME, initialTables);
+
+ return queryRunner;
+ }
+ catch (Throwable e) {
+ closeAllSuppress(e, queryRunner);
+ throw e;
+ }
+ }
+ }
+
+ public static void main(String[] args)
+ throws Exception
+ {
+ Logging.initialize();
+
+ //noinspection resource
+ DistributedQueryRunner queryRunner = DuckDbQueryRunner.builder(new TestingDuckDb())
+ .addCoordinatorProperty("http-server.http.port", "8080")
+ .setInitialTables(TpchTable.getTables())
+ .build();
+
+ Logger log = Logger.get(DuckDbQueryRunner.class);
+ log.info("======== SERVER STARTED ========");
+ log.info("\n====\n%s\n====", queryRunner.getCoordinator().getBaseUrl());
+ }
+}
diff --git a/plugin/trino-duckdb/src/test/java/io/trino/plugin/duckdb/TestDuckDbCaseInsensitiveMapping.java b/plugin/trino-duckdb/src/test/java/io/trino/plugin/duckdb/TestDuckDbCaseInsensitiveMapping.java
new file mode 100644
index 0000000000000..137ea2579eeb3
--- /dev/null
+++ b/plugin/trino-duckdb/src/test/java/io/trino/plugin/duckdb/TestDuckDbCaseInsensitiveMapping.java
@@ -0,0 +1,94 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.trino.plugin.duckdb;
+
+import io.trino.plugin.jdbc.BaseCaseInsensitiveMappingTest;
+import io.trino.testing.QueryRunner;
+import io.trino.testing.sql.SqlExecutor;
+import org.junit.jupiter.api.Test;
+
+import java.nio.file.Path;
+
+import static io.trino.plugin.base.mapping.RuleBasedIdentifierMappingUtils.REFRESH_PERIOD_DURATION;
+import static io.trino.plugin.base.mapping.RuleBasedIdentifierMappingUtils.createRuleBasedIdentifierMappingFile;
+import static java.util.Objects.requireNonNull;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
+
+final class TestDuckDbCaseInsensitiveMapping
+ extends BaseCaseInsensitiveMappingTest
+{
+ private Path mappingFile;
+ private TestingDuckDb duckDb;
+
+ @Override
+ protected QueryRunner createQueryRunner()
+ throws Exception
+ {
+ mappingFile = createRuleBasedIdentifierMappingFile();
+ duckDb = closeAfterClass(new TestingDuckDb());
+ return DuckDbQueryRunner.builder(duckDb)
+ .addConnectorProperty("case-insensitive-name-matching", "true")
+ .addConnectorProperty("case-insensitive-name-matching.config-file", mappingFile.toFile().getAbsolutePath())
+ .addConnectorProperty("case-insensitive-name-matching.config-file.refresh-period", REFRESH_PERIOD_DURATION.toString())
+ .build();
+ }
+
+ @Override
+ protected Path getMappingFile()
+ {
+ return requireNonNull(mappingFile, "mappingFile is null");
+ }
+
+ @Override
+ protected SqlExecutor onRemoteDatabase()
+ {
+ return duckDb::execute;
+ }
+
+ @Test
+ @Override
+ public void testSchemaNameClash()
+ {
+ assertThatThrownBy(super::testSchemaNameClash)
+ .hasMessageContaining("Failed to execute statement 'CREATE SCHEMA \"CaseSensitiveName\"'")
+ .hasStackTraceContaining("Schema with name \"CaseSensitiveName\" already exists");
+ }
+
+ @Test
+ @Override
+ public void testSchemaNameClashWithRuleMapping()
+ {
+ assertThatThrownBy(super::testSchemaNameClashWithRuleMapping)
+ .hasMessageContaining("Failed to execute statement 'CREATE SCHEMA \"CaseSensitiveName\"'")
+ .hasStackTraceContaining("Schema with name \"CaseSensitiveName\" already exists");
+ }
+
+ @Test
+ @Override
+ public void testTableNameClash()
+ {
+ assertThatThrownBy(super::testTableNameClash)
+ .hasMessageContaining("Failed to execute statement 'CREATE TABLE \"tpch\".\"CaseSensitiveName\"")
+ .hasStackTraceContaining("Table with name \"CaseSensitiveName\" already exists");
+ }
+
+ @Test
+ @Override
+ public void testTableNameClashWithRuleMapping()
+ {
+ assertThatThrownBy(super::testTableNameClashWithRuleMapping)
+ .hasMessageContaining("Failed to execute statement 'CREATE TABLE \"remote_schema\".\"CaseSensitiveName\" (d varchar(5))'")
+ .hasStackTraceContaining("Table with name \"CaseSensitiveName\" already exists");
+ }
+}
diff --git a/plugin/trino-duckdb/src/test/java/io/trino/plugin/duckdb/TestDuckDbConnectorTest.java b/plugin/trino-duckdb/src/test/java/io/trino/plugin/duckdb/TestDuckDbConnectorTest.java
new file mode 100644
index 0000000000000..e27b37c283283
--- /dev/null
+++ b/plugin/trino-duckdb/src/test/java/io/trino/plugin/duckdb/TestDuckDbConnectorTest.java
@@ -0,0 +1,294 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.trino.plugin.duckdb;
+
+import io.trino.plugin.jdbc.BaseJdbcConnectorTest;
+import io.trino.testing.MaterializedResult;
+import io.trino.testing.QueryRunner;
+import io.trino.testing.TestingConnectorBehavior;
+import io.trino.testing.sql.SqlExecutor;
+import io.trino.testing.sql.TestTable;
+import org.intellij.lang.annotations.Language;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.parallel.Isolated;
+
+import java.util.List;
+import java.util.Optional;
+
+import static io.trino.plugin.duckdb.TestingDuckDb.TPCH_SCHEMA;
+import static io.trino.spi.connector.ConnectorMetadata.MODIFYING_ROWS_MESSAGE;
+import static io.trino.spi.type.VarcharType.VARCHAR;
+import static io.trino.testing.MaterializedResult.resultBuilder;
+import static io.trino.testing.TestingConnectorBehavior.SUPPORTS_NATIVE_QUERY;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
+
+@Isolated
+final class TestDuckDbConnectorTest
+ extends BaseJdbcConnectorTest
+{
+ private TestingDuckDb duckDb;
+
+ @Override
+ protected QueryRunner createQueryRunner()
+ throws Exception
+ {
+ duckDb = closeAfterClass(new TestingDuckDb());
+ return DuckDbQueryRunner.builder(duckDb)
+ .setInitialTables(REQUIRED_TPCH_TABLES)
+ .build();
+ }
+
+ @Override
+ protected boolean hasBehavior(TestingConnectorBehavior connectorBehavior)
+ {
+ return switch (connectorBehavior) {
+ case SUPPORTS_ADD_COLUMN_NOT_NULL_CONSTRAINT,
+ SUPPORTS_ADD_COLUMN_WITH_COMMENT,
+ SUPPORTS_AGGREGATION_PUSHDOWN,
+ SUPPORTS_ARRAY,
+ SUPPORTS_COMMENT_ON_COLUMN,
+ SUPPORTS_COMMENT_ON_TABLE,
+ SUPPORTS_CREATE_TABLE_WITH_COLUMN_COMMENT,
+ SUPPORTS_CREATE_TABLE_WITH_TABLE_COMMENT,
+ SUPPORTS_JOIN_PUSHDOWN,
+ SUPPORTS_LIMIT_PUSHDOWN,
+ SUPPORTS_RENAME_SCHEMA,
+ SUPPORTS_RENAME_TABLE_ACROSS_SCHEMAS,
+ SUPPORTS_TOPN_PUSHDOWN,
+ SUPPORTS_ROW_TYPE -> false;
+
+ default -> super.hasBehavior(connectorBehavior);
+ };
+ }
+
+ @Override
+ protected Optional filterDataMappingSmokeTestData(DataMappingTestSetup setup)
+ {
+ String type = setup.getTrinoTypeName();
+
+ if (type.equals("time") ||
+ type.startsWith("time(") ||
+ type.startsWith("timestamp") ||
+ type.equals("varbinary") ||
+ type.startsWith("array") ||
+ type.startsWith("row")) {
+ return Optional.of(setup.asUnsupported());
+ }
+
+ if (setup.getTrinoTypeName().equals("char(3)") && setup.getSampleValueLiteral().equals("'ab'")) {
+ return Optional.of(new DataMappingTestSetup("char(3)", "'abc'", "'zzz'"));
+ }
+ return Optional.of(setup);
+ }
+
+ @Override
+ protected TestTable createTableWithDefaultColumns()
+ {
+ return new TestTable(
+ onRemoteDatabase(),
+ TPCH_SCHEMA + ".test_default_cols",
+ "(col_required decimal(20,0) NOT NULL," +
+ "col_nullable decimal(20,0)," +
+ "col_default decimal(20,0) DEFAULT 43," +
+ "col_nonnull_default decimal(20,0) DEFAULT 42 NOT NULL ," +
+ "col_required2 decimal(20,0) NOT NULL)");
+ }
+
+ @Override
+ protected TestTable createTableWithUnsupportedColumn()
+ {
+ return new TestTable(
+ onRemoteDatabase(),
+ TPCH_SCHEMA + ".test_unsupported_col",
+ "(one bigint, two union(num integer, str varchar), three varchar)");
+ }
+
+ @Override // Override because DuckDB ignores column size of varchar type
+ protected MaterializedResult getDescribeOrdersResult()
+ {
+ return resultBuilder(getSession(), VARCHAR, VARCHAR, VARCHAR, VARCHAR)
+ .row("orderkey", "bigint", "", "")
+ .row("custkey", "bigint", "", "")
+ .row("orderstatus", "varchar", "", "")
+ .row("totalprice", "double", "", "")
+ .row("orderdate", "date", "", "")
+ .row("orderpriority", "varchar", "", "")
+ .row("clerk", "varchar", "", "")
+ .row("shippriority", "integer", "", "")
+ .row("comment", "varchar", "", "")
+ .build();
+ }
+
+ @Test
+ @Override // Override because DuckDB ignores column size of varchar type
+ public void testShowCreateTable()
+ {
+ assertThat((String) computeScalar("SHOW CREATE TABLE orders"))
+ .isEqualTo("""
+ CREATE TABLE duckdb.tpch.orders (
+ orderkey bigint,
+ custkey bigint,
+ orderstatus varchar,
+ totalprice double,
+ orderdate date,
+ orderpriority varchar,
+ clerk varchar,
+ shippriority integer,
+ comment varchar
+ )""");
+ }
+
+ @Test
+ @Override // Override because the connector doesn't support row level delete
+ public void testDeleteWithLike()
+ {
+ assertThatThrownBy(super::testDeleteWithLike)
+ .hasStackTraceContaining("TrinoException: " + MODIFYING_ROWS_MESSAGE);
+ }
+
+ @Override
+ @Language("RegExp")
+ protected String errorMessageForInsertIntoNotNullColumn(String columnName)
+ {
+ return "Constraint Error: NOT NULL constraint failed: .*." + columnName;
+ }
+
+ @Override
+ @Language("RegExp")
+ protected String errorMessageForCreateTableAsSelectNegativeDate(String date)
+ {
+ return "negative date %s as select blubb".formatted(date);
+ }
+
+ @Override
+ @Language("RegExp")
+ protected String errorMessageForInsertNegativeDate(String date)
+ {
+ return "negative date %s insert blubb".formatted(date);
+ }
+
+ @Override
+ protected void verifyAddNotNullColumnToNonEmptyTableFailurePermissible(Throwable e)
+ {
+ assertThat(e).hasMessageContaining("Adding columns with constraints not yet supported");
+ }
+
+ @Override
+ protected void verifyConcurrentAddColumnFailurePermissible(Exception e)
+ {
+ assertThat(e).hasMessageContaining("Catalog write-write conflict");
+ }
+
+ @Override
+ protected void verifySetColumnTypeFailurePermissible(Throwable e)
+ {
+ assertThat(e).hasMessageContaining("Conversion Error");
+ }
+
+ @Override
+ protected Optional filterSetColumnTypesDataProvider(SetColumnTypeSetup setup)
+ {
+ return switch ("%s -> %s".formatted(setup.sourceColumnType(), setup.newColumnType())) {
+ case "varchar(100) -> varchar(50)" -> Optional.of(setup.withNewColumnType("varchar"));
+ case "char(25) -> char(20)",
+ "char(20) -> varchar",
+ "varchar -> char(20)" -> Optional.of(setup.withNewColumnType("varchar").withNewValueLiteral("rtrim(%s)".formatted(setup.newValueLiteral())));
+ default -> Optional.of(setup);
+ };
+ }
+
+ @Override
+ protected void verifySchemaNameLengthFailurePermissible(Throwable e)
+ {
+ assertThat(e).hasMessageContaining("maximum length of identifier exceeded");
+ }
+
+ @Override
+ protected void verifyTableNameLengthFailurePermissible(Throwable e)
+ {
+ assertThat(e).hasMessageStartingWith("maximum length of identifier exceeded");
+ }
+
+ @Override
+ protected void verifyColumnNameLengthFailurePermissible(Throwable e)
+ {
+ assertThat(e).hasMessageContaining("maximum length of identifier exceeded");
+ }
+
+ @Test
+ @Override // Override because the expected error message is different
+ public void testNativeQueryCreateStatement()
+ {
+ assertThat(getQueryRunner().tableExists(getSession(), "numbers")).isFalse();
+ assertThat(query("SELECT * FROM TABLE(system.query(query => 'CREATE TABLE " + TPCH_SCHEMA + ".numbers(n INTEGER)'))"))
+ .failure().hasMessageContaining("java.sql.SQLException: Parser Error: syntax error at or near \"CREATE\"");
+ assertThat(getQueryRunner().tableExists(getSession(), "numbers")).isFalse();
+ }
+
+ @Test
+ @Override // Override because the expected error message is different
+ public void testNativeQueryInsertStatementTableExists()
+ {
+ skipTestUnless(hasBehavior(SUPPORTS_NATIVE_QUERY));
+ try (TestTable testTable = simpleTable()) {
+ assertThat(query("SELECT * FROM TABLE(system.query(query => 'INSERT INTO %s VALUES (3)'))".formatted(testTable.getName())))
+ .failure().hasMessageContaining("java.sql.SQLException: Parser Error: syntax error at or near \"INTO\"");
+ assertQuery("SELECT * FROM " + testTable.getName(), "VALUES 1, 2");
+ }
+ }
+
+ @Test
+ @Override
+ public void testCharTrailingSpace()
+ {
+ String schema = getSession().getSchema().orElseThrow();
+ try (TestTable table = new TestTable(onRemoteDatabase(), schema + ".char_trailing_space", "(x char(10))", List.of("'test'"))) {
+ String tableName = table.getName();
+ assertQuery("SELECT * FROM " + tableName + " WHERE x = char 'test'", "VALUES 'test'");
+ assertQuery("SELECT * FROM " + tableName + " WHERE x = char 'test '", "VALUES 'test'");
+ assertQuery("SELECT * FROM " + tableName + " WHERE x = char 'test '", "VALUES 'test'");
+ assertQueryReturnsEmptyResult("SELECT * FROM " + tableName + " WHERE x = char ' test'");
+ }
+ }
+
+ @Test
+ @Override // Override because char type is an alias of varchar in DuckDB
+ public void testCharVarcharComparison()
+ {
+ // with char->varchar coercion on table creation, this is essentially varchar/varchar comparison
+ try (TestTable table = new TestTable(
+ getQueryRunner()::execute,
+ "test_char_varchar",
+ "(k, v) AS VALUES" +
+ " (-1, CAST(NULL AS CHAR(3))), " +
+ " (3, CAST(' ' AS CHAR(3)))," +
+ " (6, CAST('x ' AS CHAR(3)))")) {
+ // varchar of length shorter than column's length
+ assertThat(query("SELECT k, v FROM " + table.getName() + " WHERE v = CAST(' ' AS varchar(2))")).returnsEmptyResult();
+ // varchar of length longer than column's length
+ assertThat(query("SELECT k, v FROM " + table.getName() + " WHERE v = CAST(' ' AS varchar(4))")).returnsEmptyResult();
+ // value that's not all-spaces
+ assertThat(query("SELECT k, v FROM " + table.getName() + " WHERE v = CAST('x ' AS varchar(2))")).returnsEmptyResult();
+ // exact match
+ assertQuery("SELECT k, v FROM " + table.getName() + " WHERE v = CAST('' AS varchar(3))", "VALUES (3, '')");
+ }
+ }
+
+ @Override
+ protected SqlExecutor onRemoteDatabase()
+ {
+ return duckDb::execute;
+ }
+}
diff --git a/plugin/trino-duckdb/src/test/java/io/trino/plugin/duckdb/TestDuckDbPlugin.java b/plugin/trino-duckdb/src/test/java/io/trino/plugin/duckdb/TestDuckDbPlugin.java
new file mode 100644
index 0000000000000..fac46bf004385
--- /dev/null
+++ b/plugin/trino-duckdb/src/test/java/io/trino/plugin/duckdb/TestDuckDbPlugin.java
@@ -0,0 +1,37 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.trino.plugin.duckdb;
+
+import com.google.common.collect.ImmutableMap;
+import io.trino.spi.Plugin;
+import io.trino.spi.connector.ConnectorFactory;
+import io.trino.testing.TestingConnectorContext;
+import org.junit.jupiter.api.Test;
+
+import static com.google.common.collect.Iterables.getOnlyElement;
+
+final class TestDuckDbPlugin
+{
+ @Test
+ void testCreateConnector()
+ {
+ Plugin plugin = new DuckDbPlugin();
+ ConnectorFactory factory = getOnlyElement(plugin.getConnectorFactories());
+ factory.create(
+ "test",
+ ImmutableMap.of("connection-url", "jdbc:duckdb:"),
+ new TestingConnectorContext())
+ .shutdown();
+ }
+}
diff --git a/plugin/trino-duckdb/src/test/java/io/trino/plugin/duckdb/TestDuckDbTypeMapping.java b/plugin/trino-duckdb/src/test/java/io/trino/plugin/duckdb/TestDuckDbTypeMapping.java
new file mode 100644
index 0000000000000..29fe88a0169ef
--- /dev/null
+++ b/plugin/trino-duckdb/src/test/java/io/trino/plugin/duckdb/TestDuckDbTypeMapping.java
@@ -0,0 +1,331 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.trino.plugin.duckdb;
+
+import io.trino.Session;
+import io.trino.spi.type.TimeZoneKey;
+import io.trino.testing.AbstractTestQueryFramework;
+import io.trino.testing.QueryRunner;
+import io.trino.testing.TestingSession;
+import io.trino.testing.datatype.CreateAndInsertDataSetup;
+import io.trino.testing.datatype.CreateAsSelectDataSetup;
+import io.trino.testing.datatype.DataSetup;
+import io.trino.testing.datatype.SqlDataTypeTest;
+import io.trino.testing.sql.TrinoSqlExecutor;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.TestInstance;
+import org.junit.jupiter.api.parallel.Execution;
+
+import java.time.LocalDate;
+import java.time.LocalDateTime;
+import java.time.ZoneId;
+
+import static com.google.common.base.Preconditions.checkState;
+import static com.google.common.base.Verify.verify;
+import static io.trino.plugin.duckdb.TestingDuckDb.TPCH_SCHEMA;
+import static io.trino.spi.type.BigintType.BIGINT;
+import static io.trino.spi.type.BooleanType.BOOLEAN;
+import static io.trino.spi.type.DateType.DATE;
+import static io.trino.spi.type.DecimalType.createDecimalType;
+import static io.trino.spi.type.DoubleType.DOUBLE;
+import static io.trino.spi.type.IntegerType.INTEGER;
+import static io.trino.spi.type.RealType.REAL;
+import static io.trino.spi.type.SmallintType.SMALLINT;
+import static io.trino.spi.type.TinyintType.TINYINT;
+import static io.trino.spi.type.VarcharType.VARCHAR;
+import static java.time.ZoneOffset.UTC;
+import static org.junit.jupiter.api.TestInstance.Lifecycle.PER_CLASS;
+import static org.junit.jupiter.api.parallel.ExecutionMode.SAME_THREAD;
+
+@TestInstance(PER_CLASS)
+@Execution(SAME_THREAD)
+final class TestDuckDbTypeMapping
+ extends AbstractTestQueryFramework
+{
+ private TestingDuckDb duckDb;
+
+ private static final ZoneId jvmZone = ZoneId.systemDefault();
+ private static final LocalDateTime timeGapInJvmZone1 = LocalDateTime.of(1970, 1, 1, 0, 13, 42);
+ private static final LocalDateTime timeGapInJvmZone2 = LocalDateTime.of(2018, 4, 1, 2, 13, 55, 123_000_000);
+ private static final LocalDateTime timeDoubledInJvmZone = LocalDateTime.of(2018, 10, 28, 1, 33, 17, 456_000_000);
+
+ // no DST in 1970, but has DST in later years (e.g. 2018)
+ private static final ZoneId vilnius = ZoneId.of("Europe/Vilnius");
+ private static final LocalDateTime timeGapInVilnius = LocalDateTime.of(2018, 3, 25, 3, 17, 17);
+ private static final LocalDateTime timeDoubledInVilnius = LocalDateTime.of(2018, 10, 28, 3, 33, 33, 333_000_000);
+
+ // minutes offset change since 1970-01-01, no DST
+ private static final ZoneId kathmandu = ZoneId.of("Asia/Kathmandu");
+ private static final LocalDateTime timeGapInKathmandu = LocalDateTime.of(1986, 1, 1, 0, 13, 7);
+
+ public TestDuckDbTypeMapping()
+ {
+ checkState(jvmZone.getId().equals("America/Bahia_Banderas"), "This test assumes certain JVM time zone");
+ checkIsGap(jvmZone, timeGapInJvmZone1);
+ checkIsGap(jvmZone, timeGapInJvmZone2);
+ checkIsDoubled(jvmZone, timeDoubledInJvmZone);
+
+ LocalDate dateOfLocalTimeChangeForwardAtMidnightInSomeZone = LocalDate.of(1983, 4, 1);
+ checkIsGap(vilnius, dateOfLocalTimeChangeForwardAtMidnightInSomeZone.atStartOfDay());
+ LocalDate dateOfLocalTimeChangeBackwardAtMidnightInSomeZone = LocalDate.of(1983, 10, 1);
+ checkIsDoubled(vilnius, dateOfLocalTimeChangeBackwardAtMidnightInSomeZone.atStartOfDay().minusMinutes(1));
+ checkIsGap(vilnius, timeGapInVilnius);
+ checkIsDoubled(vilnius, timeDoubledInVilnius);
+
+ checkIsGap(kathmandu, timeGapInKathmandu);
+ }
+
+ @Override
+ protected QueryRunner createQueryRunner()
+ throws Exception
+ {
+ duckDb = closeAfterClass(new TestingDuckDb());
+ return DuckDbQueryRunner.builder(duckDb).build();
+ }
+
+ @Test
+ void testBoolean()
+ {
+ SqlDataTypeTest.create()
+ .addRoundTrip("boolean", "true", BOOLEAN)
+ .addRoundTrip("boolean", "false", BOOLEAN)
+ .addRoundTrip("boolean", "NULL", BOOLEAN, "CAST(NULL AS BOOLEAN)")
+ .execute(getQueryRunner(), duckDbCreateAndInsert(TPCH_SCHEMA + ".test_boolean"))
+ .execute(getQueryRunner(), trinoCreateAsSelect(TPCH_SCHEMA + ".test_boolean"))
+ .execute(getQueryRunner(), trinoCreateAndInsert(TPCH_SCHEMA + ".test_boolean"));
+ }
+
+ @Test
+ void testTinyInt()
+ {
+ SqlDataTypeTest.create()
+ .addRoundTrip("tinyint", "tinyint '-128'", TINYINT)
+ .addRoundTrip("tinyint", "tinyint '127'", TINYINT)
+ .addRoundTrip("tinyint", "NULL", TINYINT, "CAST(NULL AS TINYINT)")
+ .execute(getQueryRunner(), duckDbCreateAndInsert(TPCH_SCHEMA + ".test_tinyint"))
+ .execute(getQueryRunner(), trinoCreateAsSelect(TPCH_SCHEMA + ".test_tinyint"))
+ .execute(getQueryRunner(), trinoCreateAndInsert(TPCH_SCHEMA + ".test_tinyint"));
+ }
+
+ @Test
+ void testSmallInt()
+ {
+ SqlDataTypeTest.create()
+ .addRoundTrip("smallint", "smallint '-32768'", SMALLINT)
+ .addRoundTrip("smallint", "smallint '32767'", SMALLINT)
+ .addRoundTrip("smallint", "NULL", SMALLINT, "CAST(NULL AS SMALLINT)")
+ .execute(getQueryRunner(), duckDbCreateAndInsert(TPCH_SCHEMA + ".test_smallint"))
+ .execute(getQueryRunner(), trinoCreateAsSelect(TPCH_SCHEMA + ".test_smallint"))
+ .execute(getQueryRunner(), trinoCreateAndInsert(TPCH_SCHEMA + ".test_smallint"));
+ }
+
+ @Test
+ void testInteger()
+ {
+ SqlDataTypeTest.create()
+ .addRoundTrip("integer", "-2147483648", INTEGER)
+ .addRoundTrip("integer", "2147483647", INTEGER)
+ .addRoundTrip("integer", "NULL", INTEGER, "CAST(NULL AS INTEGER)")
+ .execute(getQueryRunner(), duckDbCreateAndInsert(TPCH_SCHEMA + ".test_integer"))
+ .execute(getQueryRunner(), trinoCreateAsSelect(TPCH_SCHEMA + ".test_integer"))
+ .execute(getQueryRunner(), trinoCreateAndInsert(TPCH_SCHEMA + ".test_integer"));
+ }
+
+ @Test
+ void testBigInt()
+ {
+ SqlDataTypeTest.create()
+ .addRoundTrip("bigint", "-9223372036854775808", BIGINT)
+ .addRoundTrip("bigint", "9223372036854775807", BIGINT)
+ .addRoundTrip("bigint", "NULL", BIGINT, "CAST(NULL AS BIGINT)")
+ .execute(getQueryRunner(), duckDbCreateAndInsert(TPCH_SCHEMA + ".test_bigint"))
+ .execute(getQueryRunner(), trinoCreateAsSelect(TPCH_SCHEMA + ".test_bigint"))
+ .execute(getQueryRunner(), trinoCreateAndInsert(TPCH_SCHEMA + ".test_bigint"));
+ }
+
+ @Test
+ void testReal()
+ {
+ SqlDataTypeTest.create()
+ .addRoundTrip("real", "123.456E10", REAL, "REAL '123.456E10'")
+ .addRoundTrip("real", "NULL", REAL, "CAST(NULL AS real)")
+ .execute(getQueryRunner(), duckDbCreateAndInsert(TPCH_SCHEMA + ".test_real"))
+ .execute(getQueryRunner(), trinoCreateAsSelect(TPCH_SCHEMA + ".test_real"))
+ .execute(getQueryRunner(), trinoCreateAndInsert(TPCH_SCHEMA + ".test_real"));
+ }
+
+ @Test
+ void testDouble()
+ {
+ SqlDataTypeTest.create()
+ .addRoundTrip("double", "1.0E100", DOUBLE, "1.0E100")
+ .addRoundTrip("double", "123.456E10", DOUBLE, "123.456E10")
+ .addRoundTrip("double", "123.456E10", DOUBLE, "123.456E10")
+ .addRoundTrip("double", "NULL", DOUBLE, "CAST(NULL AS double)")
+ .execute(getQueryRunner(), duckDbCreateAndInsert(TPCH_SCHEMA + ".test_double"))
+ .execute(getQueryRunner(), trinoCreateAsSelect(TPCH_SCHEMA + ".test_double"))
+ .execute(getQueryRunner(), trinoCreateAndInsert(TPCH_SCHEMA + ".test_double"));
+ }
+
+ @Test
+ void testDecimal()
+ {
+ SqlDataTypeTest.create()
+ .addRoundTrip("decimal(3, 0)", "CAST(NULL AS decimal(3, 0))", createDecimalType(3, 0), "CAST(NULL AS decimal(3, 0))")
+ .addRoundTrip("decimal(3, 0)", "CAST('193' AS decimal(3, 0))", createDecimalType(3, 0), "CAST('193' AS decimal(3, 0))")
+ .addRoundTrip("decimal(3, 0)", "CAST('19' AS decimal(3, 0))", createDecimalType(3, 0), "CAST('19' AS decimal(3, 0))")
+ .addRoundTrip("decimal(3, 0)", "CAST('-193' AS decimal(3, 0))", createDecimalType(3, 0), "CAST('-193' AS decimal(3, 0))")
+ .addRoundTrip("decimal(4, 0)", "CAST('19' AS decimal(4, 0))", createDecimalType(4, 0), "CAST('19' AS decimal(4, 0))") // JDBC Type SMALLINT
+ .addRoundTrip("decimal(5, 0)", "CAST('19' AS decimal(5, 0))", createDecimalType(5, 0), "CAST('19' AS decimal(5, 0))") // JDBC Type INTEGER
+ .addRoundTrip("decimal(10, 0)", "CAST('19' AS decimal(10, 0))", createDecimalType(10, 0), "CAST('19' AS decimal(10, 0))") // JDBC Type BIGINT
+ .addRoundTrip("decimal(3, 1)", "CAST('10.0' AS decimal(3, 1))", createDecimalType(3, 1), "CAST('10.0' AS decimal(3, 1))")
+ .addRoundTrip("decimal(3, 1)", "CAST('10.1' AS decimal(3, 1))", createDecimalType(3, 1), "CAST('10.1' AS decimal(3, 1))")
+ .addRoundTrip("decimal(3, 1)", "CAST('-10.1' AS decimal(3, 1))", createDecimalType(3, 1), "CAST('-10.1' AS decimal(3, 1))")
+ .addRoundTrip("decimal(4, 2)", "CAST('2' AS decimal(4, 2))", createDecimalType(4, 2), "CAST('2' AS decimal(4, 2))")
+ .addRoundTrip("decimal(4, 2)", "CAST('2.3' AS decimal(4, 2))", createDecimalType(4, 2), "CAST('2.3' AS decimal(4, 2))")
+ .addRoundTrip("decimal(24, 2)", "CAST('2' AS decimal(24, 2))", createDecimalType(24, 2), "CAST('2' AS decimal(24, 2))")
+ .addRoundTrip("decimal(24, 2)", "CAST('2.3' AS decimal(24, 2))", createDecimalType(24, 2), "CAST('2.3' AS decimal(24, 2))")
+ .addRoundTrip("decimal(24, 2)", "CAST('123456789.3' AS decimal(24, 2))", createDecimalType(24, 2), "CAST('123456789.3' AS decimal(24, 2))")
+ .addRoundTrip("decimal(24, 4)", "CAST('12345678901234567890.31' AS decimal(24, 4))", createDecimalType(24, 4), "CAST('12345678901234567890.31' AS decimal(24, 4))")
+ .addRoundTrip("decimal(30, 5)", "CAST('3141592653589793238462643.38327' AS decimal(30, 5))", createDecimalType(30, 5), "CAST('3141592653589793238462643.38327' AS decimal(30, 5))")
+ .addRoundTrip("decimal(30, 5)", "CAST('-3141592653589793238462643.38327' AS decimal(30, 5))", createDecimalType(30, 5), "CAST('-3141592653589793238462643.38327' AS decimal(30, 5))")
+ .addRoundTrip("decimal(36, 0)", "CAST(NULL AS decimal(36, 0))", createDecimalType(36, 0), "CAST(NULL AS decimal(36, 0))")
+ .addRoundTrip("decimal(36, 0)", "CAST('999999999999999999999999999999999999' AS decimal(36, 0))", createDecimalType(36, 0), "CAST('999999999999999999999999999999999999' AS decimal(36, 0))")
+ .addRoundTrip("decimal(36, 0)", "CAST('-999999999999999999999999999999999999' AS decimal(36, 0))", createDecimalType(36, 0), "CAST('-999999999999999999999999999999999999' AS decimal(36, 0))")
+ .addRoundTrip("decimal(36, 36)", "CAST('0.27182818284590452353602874713526624977' AS decimal(36, 36))", createDecimalType(36, 36), "CAST('0.27182818284590452353602874713526624977' AS decimal(36, 36))")
+ .addRoundTrip("decimal(36, 36)", "CAST('-0.27182818284590452353602874713526624977' AS decimal(36, 36))", createDecimalType(36, 36), "CAST('-0.27182818284590452353602874713526624977' AS decimal(36, 36))")
+ .execute(getQueryRunner(), duckDbCreateAndInsert(TPCH_SCHEMA + ".test_decimal"))
+ .execute(getQueryRunner(), trinoCreateAsSelect(TPCH_SCHEMA + ".test_decimal"))
+ .execute(getQueryRunner(), trinoCreateAndInsert(TPCH_SCHEMA + ".test_decimal"));
+ }
+
+ @Test
+ void testDecimalDefault()
+ {
+ SqlDataTypeTest.create()
+ .addRoundTrip("decimal", "CAST('123456789012345.123' AS decimal(18, 3))", createDecimalType(18, 3), "CAST('123456789012345.123' AS decimal(18, 3))")
+ .execute(getQueryRunner(), duckDbCreateAndInsert(TPCH_SCHEMA + ".test_decimal"));
+ }
+
+ @Test
+ void testChar()
+ {
+ SqlDataTypeTest.create()
+ .addRoundTrip("char(10)", "NULL", VARCHAR, "CAST(NULL AS varchar)")
+ .addRoundTrip("char(10)", "''", VARCHAR, "CAST('' AS varchar)")
+ .addRoundTrip("char(6)", "'text_a'", VARCHAR, "CAST('text_a' AS varchar)")
+ .addRoundTrip("char(5)", "'攻殻機動隊'", VARCHAR, "CAST('攻殻機動隊' AS varchar)")
+ .addRoundTrip("char(1)", "'😂'", VARCHAR, "CAST('😂' AS varchar)")
+ .execute(getQueryRunner(), duckDbCreateAndInsert(TPCH_SCHEMA + ".test_char"))
+ .execute(getQueryRunner(), trinoCreateAsSelect(TPCH_SCHEMA + ".test_char"))
+ .execute(getQueryRunner(), trinoCreateAndInsert(TPCH_SCHEMA + ".test_char"));
+ }
+
+ @Test
+ void testVarchar()
+ {
+ SqlDataTypeTest.create()
+ .addRoundTrip("varchar(10)", "NULL", VARCHAR, "CAST(NULL AS varchar)")
+ .addRoundTrip("varchar(10)", "''", VARCHAR, "CAST('' AS varchar)")
+ .addRoundTrip("varchar(10)", "'text_a'", VARCHAR, "CAST('text_a' AS varchar)")
+ .addRoundTrip("varchar(255)", "'text_b'", VARCHAR, "CAST('text_b' AS varchar)")
+ .addRoundTrip("varchar(65535)", "'text_d'", VARCHAR, "CAST('text_d' AS varchar)")
+ .addRoundTrip("varchar(5)", "'攻殻機動隊'", VARCHAR, "CAST('攻殻機動隊' AS varchar)")
+ .addRoundTrip("varchar(32)", "'攻殻機動隊'", VARCHAR, "CAST('攻殻機動隊' AS varchar)")
+ .addRoundTrip("varchar(20000)", "'攻殻機動隊'", VARCHAR, "CAST('攻殻機動隊' AS varchar)")
+ .addRoundTrip("varchar(1)", "'😂'", VARCHAR, "CAST('😂' AS varchar)")
+ .addRoundTrip("varchar(77)", "'Ну, погоди!'", VARCHAR, "CAST('Ну, погоди!' AS varchar)")
+ .addRoundTrip("varchar(2000000)", "'text_f'", VARCHAR, "CAST('text_f' AS varchar)") // too long for a char in Trino
+ .execute(getQueryRunner(), duckDbCreateAndInsert(TPCH_SCHEMA + ".test_varchar"))
+ .execute(getQueryRunner(), trinoCreateAsSelect(TPCH_SCHEMA + ".test_varchar"))
+ .execute(getQueryRunner(), trinoCreateAndInsert(TPCH_SCHEMA + ".test_varchar"));
+ }
+
+ @Test
+ void testDate()
+ {
+ testDate(UTC);
+ testDate(jvmZone);
+ // using two non-JVM zones so that we don't need to worry what DuckDB system zone is
+ testDate(vilnius);
+ testDate(kathmandu);
+ testDate(TestingSession.DEFAULT_TIME_ZONE_KEY.getZoneId());
+ }
+
+ private void testDate(ZoneId sessionZone)
+ {
+ Session session = Session.builder(getSession())
+ .setTimeZoneKey(TimeZoneKey.getTimeZoneKey(sessionZone.getId()))
+ .build();
+
+ SqlDataTypeTest.create()
+ .addRoundTrip("date", "NULL", DATE, "CAST(NULL AS DATE)")
+ .addRoundTrip("date", "DATE '0001-01-01'", DATE, "DATE '0001-01-01'")
+ .addRoundTrip("date", "DATE '1582-09-30'", DATE, "DATE '1582-09-30'")
+ .addRoundTrip("date", "DATE '1582-10-01'", DATE, "DATE '1582-10-01'")
+ .addRoundTrip("date", "DATE '1582-10-02'", DATE, "DATE '1582-10-02'")
+ .addRoundTrip("date", "DATE '1582-10-03'", DATE, "DATE '1582-10-03'")
+ .addRoundTrip("date", "DATE '1582-10-04'", DATE, "DATE '1582-10-04'")
+ .addRoundTrip("date", "DATE '1582-10-05'", DATE, "DATE '1582-10-05'") // Julian-Gregorian calendar cut-over
+ .addRoundTrip("date", "DATE '1582-10-13'", DATE, "DATE '1582-10-13'") // Julian-Gregorian calendar cut-over
+ .addRoundTrip("date", "DATE '1582-10-14'", DATE, "DATE '1582-10-14'")
+ .addRoundTrip("date", "DATE '1582-10-15'", DATE, "DATE '1582-10-15'")
+ .addRoundTrip("date", "DATE '1970-01-01'", DATE, "DATE '1970-01-01'")
+ .addRoundTrip("date", "DATE '1970-02-03'", DATE, "DATE '1970-02-03'")
+ .addRoundTrip("date", "DATE '2017-07-01'", DATE, "DATE '2017-07-01'") // summer on northern hemisphere (possible DST)
+ .addRoundTrip("date", "DATE '2017-01-01'", DATE, "DATE '2017-01-01'") // winter on northern hemisphere (possible DST on southern hemisphere)
+ .addRoundTrip("date", "DATE '1970-01-01'", DATE, "DATE '1970-01-01'") // change forward at midnight in JVM
+ .addRoundTrip("date", "DATE '1983-04-01'", DATE, "DATE '1983-04-01'") // change forward at midnight in Vilnius
+ .addRoundTrip("date", "DATE '1983-10-01'", DATE, "DATE '1983-10-01'") // change backward at midnight in Vilnius
+ .addRoundTrip("date", "DATE '9999-12-31'", DATE, "DATE '9999-12-31'")
+ .execute(getQueryRunner(), session, duckDbCreateAndInsert(TPCH_SCHEMA + ".test_date"))
+ .execute(getQueryRunner(), session, trinoCreateAsSelect(TPCH_SCHEMA + ".test_date"))
+ .execute(getQueryRunner(), session, trinoCreateAndInsert(TPCH_SCHEMA + ".test_date"));
+ }
+
+ private DataSetup duckDbCreateAndInsert(String tableNamePrefix)
+ {
+ return new CreateAndInsertDataSetup(duckDb.getSqlExecutor(), tableNamePrefix);
+ }
+
+ private DataSetup trinoCreateAsSelect(String tableNamePrefix)
+ {
+ return trinoCreateAsSelect(getSession(), tableNamePrefix);
+ }
+
+ private DataSetup trinoCreateAsSelect(Session session, String tableNamePrefix)
+ {
+ return new CreateAsSelectDataSetup(new TrinoSqlExecutor(getQueryRunner(), session), tableNamePrefix);
+ }
+
+ private DataSetup trinoCreateAndInsert(String tableNamePrefix)
+ {
+ return new CreateAndInsertDataSetup(new TrinoSqlExecutor(getQueryRunner()), tableNamePrefix);
+ }
+
+ private static void checkIsGap(ZoneId zone, LocalDateTime dateTime)
+ {
+ verify(isGap(zone, dateTime), "Expected %s to be a gap in %s", dateTime, zone);
+ }
+
+ private static boolean isGap(ZoneId zone, LocalDateTime dateTime)
+ {
+ return zone.getRules().getValidOffsets(dateTime).isEmpty();
+ }
+
+ private static void checkIsDoubled(ZoneId zone, LocalDateTime dateTime)
+ {
+ verify(zone.getRules().getValidOffsets(dateTime).size() == 2, "Expected %s to be doubled in %s", dateTime, zone);
+ }
+}
diff --git a/plugin/trino-duckdb/src/test/java/io/trino/plugin/duckdb/TestingDuckDb.java b/plugin/trino-duckdb/src/test/java/io/trino/plugin/duckdb/TestingDuckDb.java
new file mode 100644
index 0000000000000..7f208a985e6ae
--- /dev/null
+++ b/plugin/trino-duckdb/src/test/java/io/trino/plugin/duckdb/TestingDuckDb.java
@@ -0,0 +1,71 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.trino.plugin.duckdb;
+
+import io.trino.testing.sql.JdbcSqlExecutor;
+import org.intellij.lang.annotations.Language;
+
+import java.io.Closeable;
+import java.io.IOException;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.util.Properties;
+
+public class TestingDuckDb
+ implements Closeable
+{
+ public static final String TPCH_SCHEMA = "tpch";
+
+ private final Path path;
+
+ public TestingDuckDb()
+ throws IOException
+ {
+ path = Files.createTempFile(null, null);
+ Files.delete(path);
+ }
+
+ public String getJdbcUrl()
+ {
+ return "jdbc:duckdb:" + path.toString();
+ }
+
+ public void execute(@Language("SQL") String sql)
+ {
+ try (Connection connection = DriverManager.getConnection(getJdbcUrl(), new Properties());
+ Statement statement = connection.createStatement()) {
+ //noinspection SqlSourceToSinkFlow
+ statement.execute(sql);
+ }
+ catch (SQLException e) {
+ throw new RuntimeException("Failed to execute statement '" + sql + "'", e);
+ }
+ }
+
+ public JdbcSqlExecutor getSqlExecutor()
+ {
+ return new JdbcSqlExecutor(getJdbcUrl(), new Properties());
+ }
+
+ @Override
+ public void close()
+ throws IOException
+ {
+ Files.delete(path);
+ }
+}
diff --git a/pom.xml b/pom.xml
index bd282730530fa..ba5fa62807822 100644
--- a/pom.xml
+++ b/pom.xml
@@ -66,6 +66,7 @@
plugin/trino-clickhouse
plugin/trino-delta-lake
plugin/trino-druid
+ plugin/trino-duckdb
plugin/trino-elasticsearch
plugin/trino-example-http
plugin/trino-example-jdbc
diff --git a/testing/trino-product-tests-launcher/src/main/java/io/trino/tests/product/launcher/env/environment/EnvMultinodeAllConnectors.java b/testing/trino-product-tests-launcher/src/main/java/io/trino/tests/product/launcher/env/environment/EnvMultinodeAllConnectors.java
index 3c83c5f41cca3..2f564b65737a9 100644
--- a/testing/trino-product-tests-launcher/src/main/java/io/trino/tests/product/launcher/env/environment/EnvMultinodeAllConnectors.java
+++ b/testing/trino-product-tests-launcher/src/main/java/io/trino/tests/product/launcher/env/environment/EnvMultinodeAllConnectors.java
@@ -51,6 +51,7 @@ public void extendEnvironment(Environment.Builder builder)
"clickhouse",
"delta_lake",
"druid",
+ "duckdb",
"elasticsearch",
"faker",
"gsheets",
diff --git a/testing/trino-product-tests-launcher/src/main/resources/docker/trino-product-tests/conf/environment/multinode-all/duckdb.properties b/testing/trino-product-tests-launcher/src/main/resources/docker/trino-product-tests/conf/environment/multinode-all/duckdb.properties
new file mode 100644
index 0000000000000..8e23c0b5e7d70
--- /dev/null
+++ b/testing/trino-product-tests-launcher/src/main/resources/docker/trino-product-tests/conf/environment/multinode-all/duckdb.properties
@@ -0,0 +1,2 @@
+connector.name=duckdb
+connection-url=jdbc:duckdb://