Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 0 additions & 6 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -878,7 +878,6 @@ jobs:
- suite-7-non-generic
- suite-hive-transactional
- suite-azure
- suite-delta-lake-databricks91
- suite-delta-lake-databricks104
- suite-delta-lake-databricks113
- suite-delta-lake-databricks122
Expand Down Expand Up @@ -919,9 +918,6 @@ jobs:
ignore exclusion if: >-
${{ env.CI_SKIP_SECRETS_PRESENCE_CHECKS != '' || secrets.GCP_CREDENTIALS_KEY != '' }}

- suite: suite-delta-lake-databricks91
ignore exclusion if: >-
${{ env.CI_SKIP_SECRETS_PRESENCE_CHECKS != '' || secrets.DATABRICKS_TOKEN != '' }}
- suite: suite-delta-lake-databricks104
ignore exclusion if: >-
${{ env.CI_SKIP_SECRETS_PRESENCE_CHECKS != '' || secrets.DATABRICKS_TOKEN != '' }}
Expand Down Expand Up @@ -989,7 +985,6 @@ jobs:
AWS_REGION: ""
TRINO_AWS_ACCESS_KEY_ID: ""
TRINO_AWS_SECRET_ACCESS_KEY: ""
DATABRICKS_91_JDBC_URL: ""
DATABRICKS_104_JDBC_URL: ""
DATABRICKS_113_JDBC_URL: ""
DATABRICKS_122_JDBC_URL: ""
Expand Down Expand Up @@ -1067,7 +1062,6 @@ jobs:
AWS_REGION: ${{ vars.TRINO_AWS_REGION }}
TRINO_AWS_ACCESS_KEY_ID: ${{ vars.TRINO_AWS_ACCESS_KEY_ID }}
TRINO_AWS_SECRET_ACCESS_KEY: ${{ secrets.TRINO_AWS_SECRET_ACCESS_KEY }}
DATABRICKS_91_JDBC_URL: ${{ vars.DATABRICKS_91_JDBC_URL }}
DATABRICKS_104_JDBC_URL: ${{ vars.DATABRICKS_104_JDBC_URL }}
DATABRICKS_113_JDBC_URL: ${{ vars.DATABRICKS_113_JDBC_URL }}
DATABRICKS_122_JDBC_URL: ${{ vars.DATABRICKS_122_JDBC_URL }}
Expand Down
4 changes: 2 additions & 2 deletions plugin/trino-delta-lake/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -163,15 +163,15 @@ for setting up your Databricks personal access token.
### Test the functionality of the Databricks Delta Lake product test environment


Run the following command for spinning up the Databricks 9.1 Delta Lake product test
Run the following command for spinning up the Databricks 15.4 Delta Lake product test
environment for Trino:

```
env S3_BUCKET=my-s3-bucket \
AWS_REGION=us-east-2 \
TRINO_AWS_SECRET_ACCESS_KEY=xxx \
TRINO_AWS_ACCESS_KEY_ID=xxx \
DATABRICKS_91_JDBC_URL='xxx' \
DATABRICKS_154_JDBC_URL='xxx' \
DATABRICKS_LOGIN=token \
DATABRICKS_TOKEN=xxx \
testing/bin/ptl env up --environment singlenode-delta-lake-databricks91
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -123,6 +123,7 @@ public class TestDeltaLakeBasic
new ResourceTable("stats_with_minmax_nulls", "deltalake/stats_with_minmax_nulls"),
new ResourceTable("no_column_stats", "databricks73/no_column_stats"),
new ResourceTable("liquid_clustering", "deltalake/liquid_clustering"),
new ResourceTable("region_91_lts", "databricks91/region"),
new ResourceTable("timestamp_ntz", "databricks131/timestamp_ntz"),
new ResourceTable("timestamp_ntz_partition", "databricks131/timestamp_ntz_partition"),
new ResourceTable("uniform_hudi", "deltalake/uniform_hudi"),
Expand Down Expand Up @@ -206,6 +207,14 @@ public void testSimpleQueries()
}
}

@Test
void testDatabricks91()
{
assertThat(query("SELECT * FROM region_91_lts"))
.skippingTypesCheck() // name and comment columns are unbounded varchar in Delta Lake and bounded varchar in TPCH
.matches("SELECT * FROM tpch.tiny.region");
}

@Test
public void testNoColumnStats()
{
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
Data generated using Databricks 9.1:

```sql
CREATE TABLE default.region (regionkey bigint, name string, comment string)
USING DELTA LOCATION 's3://trino-ci-test/default/region';

INSERT INTO default.region VALUES
(0, 'AFRICA', 'lar deposits. blithely final packages cajole. regular waters are final requests. regular accounts are according to '),
(1, 'AMERICA', 'hs use ironic, even requests. s'),
(2, 'ASIA', 'ges. thinly even pinto beans ca'),
(3, 'EUROPE', 'ly final courts cajole furiously final excuse'),
(4, 'MIDDLE EAST', 'uickly special accounts cajole carefully blithely close requests. carefully final asymptotes haggle furiousl');
```
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
{"protocol":{"minReaderVersion":1,"minWriterVersion":2}}
{"metaData":{"id":"2e681e6c-a691-4e01-b6f5-7be057fba57e","format":{"provider":"parquet","options":{}},"schemaString":"{\"type\":\"struct\",\"fields\":[{\"name\":\"regionkey\",\"type\":\"long\",\"nullable\":true,\"metadata\":{}},{\"name\":\"name\",\"type\":\"string\",\"nullable\":true,\"metadata\":{}},{\"name\":\"comment\",\"type\":\"string\",\"nullable\":true,\"metadata\":{}}]}","partitionColumns":[],"configuration":{},"createdTime":1734911306140}}
{"commitInfo":{"timestamp":1734911306279,"userId":"7853186923043731","userName":"[email protected]","operation":"CREATE TABLE","operationParameters":{"isManaged":"false","description":null,"partitionBy":"[]","properties":"{}"},"notebook":{"notebookId":"1841155838656679"},"clusterId":"0705-100855-ka5jat1a","isolationLevel":"SnapshotIsolation","isBlindAppend":true,"operationMetrics":{}}}
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
{"add":{"path":"part-00000-3a487bbf-c1d3-404f-b99f-d749d1c7419e-c000.snappy.parquet","partitionValues":{},"size":1408,"modificationTime":1734911311000,"dataChange":true,"stats":"{\"numRecords\":2,\"minValues\":{\"regionkey\":0,\"name\":\"AFRICA\",\"comment\":\"hs use ironic, even requests. s\"},\"maxValues\":{\"regionkey\":1,\"name\":\"AMERICA\",\"comment\":\"lar deposits. blithely final pac�\"},\"nullCount\":{\"regionkey\":0,\"name\":0,\"comment\":0}}","tags":{"INSERTION_TIME":"1734911311000000","OPTIMIZE_TARGET_SIZE":"268435456"}}}
{"add":{"path":"part-00001-87059848-17f8-4dc0-a2d6-4f2a6760e3b9-c000.snappy.parquet","partitionValues":{},"size":1446,"modificationTime":1734911311000,"dataChange":true,"stats":"{\"numRecords\":3,\"minValues\":{\"regionkey\":2,\"name\":\"ASIA\",\"comment\":\"ges. thinly even pinto beans ca\"},\"maxValues\":{\"regionkey\":4,\"name\":\"MIDDLE EAST\",\"comment\":\"uickly special accounts cajole c�\"},\"nullCount\":{\"regionkey\":0,\"name\":0,\"comment\":0}}","tags":{"INSERTION_TIME":"1734911311000001","OPTIMIZE_TARGET_SIZE":"268435456"}}}
{"commitInfo":{"timestamp":1734911310660,"userId":"7853186923043731","userName":"[email protected]","operation":"WRITE","operationParameters":{"mode":"Append","partitionBy":"[]"},"notebook":{"notebookId":"1841155838656679"},"clusterId":"0705-100855-ka5jat1a","readVersion":0,"isolationLevel":"WriteSerializable","isBlindAppend":true,"operationMetrics":{"numFiles":"2","numOutputRows":"5","numOutputBytes":"2854"}}}
Binary file not shown.
Binary file not shown.
Original file line number Diff line number Diff line change
Expand Up @@ -102,7 +102,7 @@ public final class TestGroups
public static final String DELTA_LAKE_DATABRICKS_133 = "delta-lake-databricks-133";
public static final String DELTA_LAKE_DATABRICKS_143 = "delta-lake-databricks-143";
public static final String DATABRICKS_UNITY_HTTP_HMS = "databricks-unity-http-hms";
public static final String DELTA_LAKE_EXCLUDE_91 = "delta-lake-exclude-91";
public static final String DELTA_LAKE_EXCLUDE_104 = "delta-lake-exclude-104";
public static final String DELTA_LAKE_ALLUXIO_CACHING = "delta-lake-alluxio-caching";
public static final String HUDI = "hudi";
public static final String PARQUET = "parquet";
Expand Down

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,8 @@
import java.util.List;

import static io.trino.tests.product.TestGroups.CONFIGURED_FEATURES;
import static io.trino.tests.product.TestGroups.DELTA_LAKE_DATABRICKS_104;
import static io.trino.tests.product.TestGroups.DELTA_LAKE_DATABRICKS;
import static io.trino.tests.product.TestGroups.DELTA_LAKE_EXCLUDE_104;
import static io.trino.tests.product.launcher.suite.SuiteTestRun.testOnEnvironment;

public class SuiteDeltaLakeDatabricks104
Expand All @@ -33,7 +34,8 @@ public List<SuiteTestRun> getTestRuns(EnvironmentConfig config)
{
return ImmutableList.of(
testOnEnvironment(EnvSinglenodeDeltaLakeDatabricks104.class)
.withGroups(CONFIGURED_FEATURES, DELTA_LAKE_DATABRICKS_104)
.withGroups(CONFIGURED_FEATURES, DELTA_LAKE_DATABRICKS)
.withExcludedGroups(DELTA_LAKE_EXCLUDE_104)
.withExcludedTests(getExcludedTests())
.build());
}
Expand Down

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@
import static io.trino.tempto.assertions.QueryAssert.assertQueryFailure;
import static io.trino.testing.TestingNames.randomNameSuffix;
import static io.trino.tests.product.TestGroups.DELTA_LAKE_DATABRICKS;
import static io.trino.tests.product.TestGroups.DELTA_LAKE_EXCLUDE_91;
import static io.trino.tests.product.TestGroups.DELTA_LAKE_EXCLUDE_104;
import static io.trino.tests.product.TestGroups.DELTA_LAKE_OSS;
import static io.trino.tests.product.TestGroups.PROFILE_SPECIFIC_TESTS;
import static io.trino.tests.product.deltalake.util.DatabricksVersion.DATABRICKS_143_RUNTIME_VERSION;
Expand Down Expand Up @@ -244,7 +244,7 @@ public void testTrinoPreservesReaderAndWriterVersions()
}
}

@Test(groups = {DELTA_LAKE_DATABRICKS, DELTA_LAKE_EXCLUDE_91, DELTA_LAKE_OSS, PROFILE_SPECIFIC_TESTS})
@Test(groups = {DELTA_LAKE_DATABRICKS, DELTA_LAKE_EXCLUDE_104, DELTA_LAKE_OSS, PROFILE_SPECIFIC_TESTS})
@Flaky(issue = DATABRICKS_COMMUNICATION_FAILURE_ISSUE, match = DATABRICKS_COMMUNICATION_FAILURE_MATCH)
public void testTrinoPreservesTableFeature()
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@
import static io.trino.tests.product.TestGroups.DELTA_LAKE_DATABRICKS_122;
import static io.trino.tests.product.TestGroups.DELTA_LAKE_DATABRICKS_133;
import static io.trino.tests.product.TestGroups.DELTA_LAKE_DATABRICKS_143;
import static io.trino.tests.product.TestGroups.DELTA_LAKE_EXCLUDE_91;
import static io.trino.tests.product.TestGroups.DELTA_LAKE_EXCLUDE_104;
import static io.trino.tests.product.TestGroups.DELTA_LAKE_OSS;
import static io.trino.tests.product.TestGroups.PROFILE_SPECIFIC_TESTS;
import static io.trino.tests.product.deltalake.util.DatabricksVersion.DATABRICKS_143_RUNTIME_VERSION;
Expand Down Expand Up @@ -192,7 +192,7 @@ public void testGeneratedColumnWithNonLowerCaseColumnName()
}

// Exclude 10.4 because it throws MISSING_COLUMN when executing INSERT statement
@Test(groups = {DELTA_LAKE_DATABRICKS, DELTA_LAKE_EXCLUDE_91, PROFILE_SPECIFIC_TESTS})
@Test(groups = {DELTA_LAKE_DATABRICKS, DELTA_LAKE_EXCLUDE_104, PROFILE_SPECIFIC_TESTS})
@Flaky(issue = DATABRICKS_COMMUNICATION_FAILURE_ISSUE, match = DATABRICKS_COMMUNICATION_FAILURE_MATCH)
public void testIdentityColumnWithNonLowerCaseColumnName()
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@
import static io.trino.tests.product.TestGroups.DELTA_LAKE_DATABRICKS_122;
import static io.trino.tests.product.TestGroups.DELTA_LAKE_DATABRICKS_133;
import static io.trino.tests.product.TestGroups.DELTA_LAKE_DATABRICKS_143;
import static io.trino.tests.product.TestGroups.DELTA_LAKE_EXCLUDE_91;
import static io.trino.tests.product.TestGroups.DELTA_LAKE_EXCLUDE_104;
import static io.trino.tests.product.TestGroups.DELTA_LAKE_OSS;
import static io.trino.tests.product.TestGroups.PROFILE_SPECIFIC_TESTS;
import static io.trino.tests.product.deltalake.util.DeltaLakeTestUtils.DATABRICKS_COMMUNICATION_FAILURE_ISSUE;
Expand Down Expand Up @@ -64,7 +64,7 @@ public void setup()
s3Client = new S3ClientFactory().createS3Client(s3ServerType);
}

@Test(groups = {DELTA_LAKE_DATABRICKS, DELTA_LAKE_OSS, DELTA_LAKE_EXCLUDE_91, PROFILE_SPECIFIC_TESTS}, dataProvider = "columnMappingModeDataProvider")
@Test(groups = {DELTA_LAKE_DATABRICKS, DELTA_LAKE_OSS, PROFILE_SPECIFIC_TESTS}, dataProvider = "columnMappingModeDataProvider")
@Flaky(issue = DATABRICKS_COMMUNICATION_FAILURE_ISSUE, match = DATABRICKS_COMMUNICATION_FAILURE_MATCH)
public void testUpdateTableWithCdf(String columnMappingMode)
{
Expand Down Expand Up @@ -100,7 +100,7 @@ public void testUpdateTableWithCdf(String columnMappingMode)
}
}

@Test(groups = {DELTA_LAKE_DATABRICKS, DELTA_LAKE_OSS, DELTA_LAKE_EXCLUDE_91, PROFILE_SPECIFIC_TESTS})
@Test(groups = {DELTA_LAKE_DATABRICKS, DELTA_LAKE_OSS, DELTA_LAKE_EXCLUDE_104, PROFILE_SPECIFIC_TESTS})
@Flaky(issue = DATABRICKS_COMMUNICATION_FAILURE_ISSUE, match = DATABRICKS_COMMUNICATION_FAILURE_MATCH)
public void testUpdateTableWithChangeDataFeedWriterFeature()
{
Expand Down Expand Up @@ -201,7 +201,7 @@ public void testUpdateCdfTableWithNonLowercaseColumn(String columnMappingMode)
}
}

@Test(groups = {DELTA_LAKE_DATABRICKS, DELTA_LAKE_OSS, DELTA_LAKE_EXCLUDE_91, PROFILE_SPECIFIC_TESTS}, dataProvider = "columnMappingModeDataProvider")
@Test(groups = {DELTA_LAKE_DATABRICKS, DELTA_LAKE_OSS, PROFILE_SPECIFIC_TESTS}, dataProvider = "columnMappingModeDataProvider")
@Flaky(issue = DATABRICKS_COMMUNICATION_FAILURE_ISSUE, match = DATABRICKS_COMMUNICATION_FAILURE_MATCH)
public void testUpdatePartitionedTableWithCdf(String columnMappingMode)
{
Expand Down Expand Up @@ -387,7 +387,7 @@ public void testUpdateTableWithCdfEnabledAfterTableIsAlreadyCreated()
}
}

@Test(groups = {DELTA_LAKE_DATABRICKS, DELTA_LAKE_OSS, DELTA_LAKE_EXCLUDE_91, PROFILE_SPECIFIC_TESTS}, dataProvider = "columnMappingModeDataProvider")
@Test(groups = {DELTA_LAKE_DATABRICKS, DELTA_LAKE_OSS, PROFILE_SPECIFIC_TESTS}, dataProvider = "columnMappingModeDataProvider")
@Flaky(issue = DATABRICKS_COMMUNICATION_FAILURE_ISSUE, match = DATABRICKS_COMMUNICATION_FAILURE_MATCH)
public void testDeleteFromTableWithCdf(String columnMappingMode)
{
Expand Down Expand Up @@ -533,7 +533,7 @@ public void testMergeDeleteIntoTableWithCdfEnabled(String columnMappingMode)
}
}

@Test(groups = {DELTA_LAKE_DATABRICKS, DELTA_LAKE_DATABRICKS_104, DELTA_LAKE_DATABRICKS_113, DELTA_LAKE_DATABRICKS_122, DELTA_LAKE_DATABRICKS_133, DELTA_LAKE_DATABRICKS_143, DELTA_LAKE_OSS, DELTA_LAKE_EXCLUDE_91, PROFILE_SPECIFIC_TESTS})
@Test(groups = {DELTA_LAKE_DATABRICKS, DELTA_LAKE_DATABRICKS_104, DELTA_LAKE_DATABRICKS_113, DELTA_LAKE_DATABRICKS_122, DELTA_LAKE_DATABRICKS_133, DELTA_LAKE_DATABRICKS_143, DELTA_LAKE_OSS, PROFILE_SPECIFIC_TESTS})
@Flaky(issue = DATABRICKS_COMMUNICATION_FAILURE_ISSUE, match = DATABRICKS_COMMUNICATION_FAILURE_MATCH)
public void testMergeMixedDeleteAndUpdateIntoTableWithCdfEnabled()
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@
import static io.trino.tempto.assertions.QueryAssert.Row.row;
import static io.trino.testing.TestingNames.randomNameSuffix;
import static io.trino.tests.product.TestGroups.DELTA_LAKE_DATABRICKS;
import static io.trino.tests.product.TestGroups.DELTA_LAKE_EXCLUDE_91;
import static io.trino.tests.product.TestGroups.DELTA_LAKE_EXCLUDE_104;
import static io.trino.tests.product.TestGroups.DELTA_LAKE_OSS;
import static io.trino.tests.product.TestGroups.PROFILE_SPECIFIC_TESTS;
import static io.trino.tests.product.deltalake.util.DeltaLakeTestUtils.DATABRICKS_COMMUNICATION_FAILURE_ISSUE;
Expand Down Expand Up @@ -241,7 +241,7 @@ public void testReadFromSchemaChangedShallowCloneTable()
testReadSchemaChangedCloneTable("SHALLOW", false);
}

@Test(groups = {DELTA_LAKE_DATABRICKS, DELTA_LAKE_EXCLUDE_91, PROFILE_SPECIFIC_TESTS})
@Test(groups = {DELTA_LAKE_DATABRICKS, DELTA_LAKE_EXCLUDE_104, PROFILE_SPECIFIC_TESTS})
@Flaky(issue = DATABRICKS_COMMUNICATION_FAILURE_ISSUE, match = DATABRICKS_COMMUNICATION_FAILURE_MATCH)
public void testReadFromSchemaChangedDeepCloneTable()
{
Expand Down
Loading
Loading