diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 2b3e0fa992cc..c44b1210698e 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -898,6 +898,7 @@ jobs: - suite-delta-lake-databricks104 - suite-delta-lake-databricks113 - suite-delta-lake-databricks122 + - suite-delta-lake-databricks133 - suite-gcs - suite-clients - suite-functions @@ -947,6 +948,11 @@ jobs: - suite: suite-delta-lake-databricks122 ignore exclusion if: >- ${{ env.CI_SKIP_SECRETS_PRESENCE_CHECKS != '' || secrets.DATABRICKS_TOKEN != '' }} + - suite: suite-delta-lake-databricks133 + config: hdp3 + - suite: suite-delta-lake-databricks133 + ignore exclusion if: >- + ${{ env.CI_SKIP_SECRETS_PRESENCE_CHECKS != '' || secrets.DATABRICKS_TOKEN != '' }} ignore exclusion if: # Do not use this property outside of the matrix configuration. @@ -1043,6 +1049,7 @@ jobs: DATABRICKS_104_JDBC_URL: DATABRICKS_113_JDBC_URL: DATABRICKS_122_JDBC_URL: + DATABRICKS_133_JDBC_URL: DATABRICKS_LOGIN: DATABRICKS_TOKEN: GCP_CREDENTIALS_KEY: @@ -1109,6 +1116,7 @@ jobs: DATABRICKS_104_JDBC_URL: ${{ secrets.DATABRICKS_104_JDBC_URL }} DATABRICKS_113_JDBC_URL: ${{ secrets.DATABRICKS_113_JDBC_URL }} DATABRICKS_122_JDBC_URL: ${{ secrets.DATABRICKS_122_JDBC_URL }} + DATABRICKS_133_JDBC_URL: ${{ secrets.DATABRICKS_133_JDBC_URL }} DATABRICKS_LOGIN: token DATABRICKS_TOKEN: ${{ secrets.DATABRICKS_TOKEN }} GCP_CREDENTIALS_KEY: ${{ secrets.GCP_CREDENTIALS_KEY }} diff --git a/docs/src/main/sphinx/connector/delta-lake.md b/docs/src/main/sphinx/connector/delta-lake.md index eb05d4291d73..efc835617d12 100644 --- a/docs/src/main/sphinx/connector/delta-lake.md +++ b/docs/src/main/sphinx/connector/delta-lake.md @@ -12,8 +12,8 @@ data. To connect to Databricks Delta Lake, you need: -- Tables written by Databricks Runtime 7.3 LTS, 9.1 LTS, 10.4 LTS, 11.3 LTS, and - 12.2 LTS are supported. +- Tables written by Databricks Runtime 7.3 LTS, 9.1 LTS, 10.4 LTS, 11.3 LTS, + 12.2 LTS and 13.3 LTS are supported. - Deployments using AWS, HDFS, Azure Storage, and Google Cloud Storage (GCS) are fully supported. - Network access from the coordinator and workers to the Delta Lake storage. diff --git a/testing/trino-product-tests-launcher/src/main/java/io/trino/tests/product/launcher/env/environment/EnvSinglenodeDeltaLakeDatabricks133.java b/testing/trino-product-tests-launcher/src/main/java/io/trino/tests/product/launcher/env/environment/EnvSinglenodeDeltaLakeDatabricks133.java new file mode 100644 index 000000000000..e054cec92822 --- /dev/null +++ b/testing/trino-product-tests-launcher/src/main/java/io/trino/tests/product/launcher/env/environment/EnvSinglenodeDeltaLakeDatabricks133.java @@ -0,0 +1,38 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.trino.tests.product.launcher.env.environment; + +import com.google.inject.Inject; +import io.trino.tests.product.launcher.docker.DockerFiles; +import io.trino.tests.product.launcher.env.common.Standard; +import io.trino.tests.product.launcher.env.common.TestsEnvironment; + +import static java.util.Objects.requireNonNull; + +@TestsEnvironment +public class EnvSinglenodeDeltaLakeDatabricks133 + extends AbstractSinglenodeDeltaLakeDatabricks +{ + @Inject + public EnvSinglenodeDeltaLakeDatabricks133(Standard standard, DockerFiles dockerFiles) + { + super(standard, dockerFiles); + } + + @Override + String databricksTestJdbcUrl() + { + return requireNonNull(System.getenv("DATABRICKS_133_JDBC_URL"), "Environment DATABRICKS_133_JDBC_URL was not set"); + } +} diff --git a/testing/trino-product-tests-launcher/src/main/java/io/trino/tests/product/launcher/suite/suites/SuiteDeltaLakeDatabricks133.java b/testing/trino-product-tests-launcher/src/main/java/io/trino/tests/product/launcher/suite/suites/SuiteDeltaLakeDatabricks133.java new file mode 100644 index 000000000000..dff965e3fe22 --- /dev/null +++ b/testing/trino-product-tests-launcher/src/main/java/io/trino/tests/product/launcher/suite/suites/SuiteDeltaLakeDatabricks133.java @@ -0,0 +1,39 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.trino.tests.product.launcher.suite.suites; + +import com.google.common.collect.ImmutableList; +import io.trino.tests.product.launcher.env.EnvironmentConfig; +import io.trino.tests.product.launcher.env.environment.EnvSinglenodeDeltaLakeDatabricks133; +import io.trino.tests.product.launcher.suite.SuiteDeltaLakeDatabricks; +import io.trino.tests.product.launcher.suite.SuiteTestRun; + +import java.util.List; + +import static io.trino.tests.product.launcher.suite.SuiteTestRun.testOnEnvironment; + +public class SuiteDeltaLakeDatabricks133 + extends SuiteDeltaLakeDatabricks +{ + @Override + public List getTestRuns(EnvironmentConfig config) + { + return ImmutableList.of( + testOnEnvironment(EnvSinglenodeDeltaLakeDatabricks133.class) + .withGroups("configured_features", "delta-lake-databricks") + .withExcludedGroups("delta-lake-exclude-133") + .withExcludedTests(getExcludedTests()) + .build()); + } +} diff --git a/testing/trino-product-tests/src/main/java/io/trino/tests/product/TestGroups.java b/testing/trino-product-tests/src/main/java/io/trino/tests/product/TestGroups.java index b50bcc6e436f..73b87bd093c7 100644 --- a/testing/trino-product-tests/src/main/java/io/trino/tests/product/TestGroups.java +++ b/testing/trino-product-tests/src/main/java/io/trino/tests/product/TestGroups.java @@ -91,6 +91,7 @@ public final class TestGroups public static final String DELTA_LAKE_EXCLUDE_104 = "delta-lake-exclude-104"; public static final String DELTA_LAKE_EXCLUDE_113 = "delta-lake-exclude-113"; public static final String DELTA_LAKE_EXCLUDE_122 = "delta-lake-exclude-122"; + public static final String DELTA_LAKE_EXCLUDE_133 = "delta-lake-exclude-133"; public static final String HUDI = "hudi"; public static final String PARQUET = "parquet"; public static final String IGNITE = "ignite"; diff --git a/testing/trino-product-tests/src/main/java/io/trino/tests/product/deltalake/TestDeltaLakeDatabricksCreateTableAsSelectCompatibility.java b/testing/trino-product-tests/src/main/java/io/trino/tests/product/deltalake/TestDeltaLakeDatabricksCreateTableAsSelectCompatibility.java index c545b166bf96..2a93460e5b89 100644 --- a/testing/trino-product-tests/src/main/java/io/trino/tests/product/deltalake/TestDeltaLakeDatabricksCreateTableAsSelectCompatibility.java +++ b/testing/trino-product-tests/src/main/java/io/trino/tests/product/deltalake/TestDeltaLakeDatabricksCreateTableAsSelectCompatibility.java @@ -31,6 +31,7 @@ import static io.trino.tests.product.TestGroups.DELTA_LAKE_DATABRICKS; import static io.trino.tests.product.TestGroups.DELTA_LAKE_EXCLUDE_113; import static io.trino.tests.product.TestGroups.DELTA_LAKE_EXCLUDE_122; +import static io.trino.tests.product.TestGroups.DELTA_LAKE_EXCLUDE_133; import static io.trino.tests.product.TestGroups.PROFILE_SPECIFIC_TESTS; import static io.trino.tests.product.deltalake.TransactionLogAssertions.assertLastEntryIsCheckpointed; import static io.trino.tests.product.deltalake.TransactionLogAssertions.assertTransactionLogVersion; @@ -221,8 +222,8 @@ public void testReplaceTableWithSchemaChange() } } - // Databricks 11.3 and 12.2 don't create a checkpoint file at 'CREATE OR REPLACE TABLE' statement - @Test(groups = {DELTA_LAKE_DATABRICKS, DELTA_LAKE_EXCLUDE_113, DELTA_LAKE_EXCLUDE_122, PROFILE_SPECIFIC_TESTS}) + // Databricks 11.3, 12.2 and 13.3 don't create a checkpoint file at 'CREATE OR REPLACE TABLE' statement + @Test(groups = {DELTA_LAKE_DATABRICKS, DELTA_LAKE_EXCLUDE_113, DELTA_LAKE_EXCLUDE_122, DELTA_LAKE_EXCLUDE_133, PROFILE_SPECIFIC_TESTS}) @Flaky(issue = DATABRICKS_COMMUNICATION_FAILURE_ISSUE, match = DATABRICKS_COMMUNICATION_FAILURE_MATCH) public void testReplaceTableWithSchemaChangeOnCheckpoint() {