Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
74 changes: 42 additions & 32 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -386,37 +386,21 @@ jobs:
touch gib-impacted.log
cat <<EOF > .github/test-matrix.yaml
include:
- { modules: core/trino-main }
- { modules: testing/trino-tests }
- { modules: testing/trino-faulttolerant-tests, profile: test-fault-tolerant-hive-1 }
- { modules: testing/trino-faulttolerant-tests, profile: test-fault-tolerant-hive-2 }
- { modules: testing/trino-faulttolerant-tests, profile: test-fault-tolerant-delta }
- { modules: testing/trino-faulttolerant-tests, profile: test-fault-tolerant-iceberg }
- { modules: plugin/trino-raptor-legacy }
- { modules: plugin/trino-accumulo }
- { modules: plugin/trino-cassandra }
- { modules: plugin/trino-clickhouse }
- { modules: plugin/trino-delta-lake }
- { modules: plugin/trino-hive }
- { modules: plugin/trino-hive, profile: test-parquet }
- { modules: plugin/trino-elasticsearch }
- { modules: plugin/trino-elasticsearch, profile: test-stats }
- { modules: plugin/trino-mongodb }
- { modules: plugin/trino-kafka }
- { modules: plugin/trino-pinot }
- { modules: plugin/trino-redis }
- { modules: plugin/trino-mysql }
- { modules: plugin/trino-postgresql }
- { modules: plugin/trino-sqlserver }
- { modules: plugin/trino-singlestore }
- { modules: plugin/trino-mariadb }
- { modules: plugin/trino-oracle }
- { modules: plugin/trino-kudu }
- { modules: plugin/trino-druid }
- { modules: plugin/trino-iceberg }
- { modules: plugin/trino-phoenix5 }
- { modules: [ client/trino-jdbc, plugin/trino-base-jdbc, plugin/trino-thrift, plugin/trino-memory ] }
- { modules: plugin/trino-bigquery }
- { modules: plugin/trino-delta-lake }
- { modules: plugin/trino-delta-lake }
- { modules: plugin/trino-delta-lake }
- { modules: plugin/trino-delta-lake }
- { modules: plugin/trino-delta-lake }
- { modules: plugin/trino-delta-lake }
- { modules: plugin/trino-delta-lake }
- { modules: plugin/trino-delta-lake }
- { modules: plugin/trino-delta-lake }
- { modules: plugin/trino-delta-lake }
- { modules: plugin/trino-delta-lake }
- { modules: plugin/trino-delta-lake }
- { modules: plugin/trino-delta-lake }
- { modules: plugin/trino-delta-lake }
EOF
./.github/bin/build-matrix-from-impacted.py -v -i gib-impacted.log -m .github/test-matrix.yaml -o matrix.json
echo "Matrix: $(jq '.' matrix.json)"
Expand Down Expand Up @@ -463,15 +447,18 @@ jobs:
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESSKEY }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRETKEY }}
AWS_REGION: us-east-2
GCP_CREDENTIALS_KEY: ${{ secrets.GCP_CREDENTIALS_KEY }}
# Run tests if any of the secrets is present. Do not skip tests when one secret renamed, or secret name has a typo.
if: >-
contains(matrix.modules, 'trino-delta-lake') &&
(env.ABFS_ACCOUNT != '' || env.ABFS_CONTAINER != '' || env.ABFS_ACCESSKEY != '' || env.AWS_ACCESS_KEY_ID != '' || env.AWS_SECRET_ACCESS_KEY != '')
(env.ABFS_ACCOUNT != '' || env.ABFS_CONTAINER != '' || env.ABFS_ACCESSKEY != '' || env.AWS_ACCESS_KEY_ID != '' || env.AWS_SECRET_ACCESS_KEY != '' || env.GCP_CREDENTIALS_KEY != '')
run: |
$MAVEN test ${MAVEN_TEST} -P cloud-tests -pl :trino-delta-lake \
-Dhive.hadoop2.azure-abfs-container="${ABFS_CONTAINER}" \
-Dhive.hadoop2.azure-abfs-account="${ABFS_ACCOUNT}" \
-Dhive.hadoop2.azure-abfs-access-key="${ABFS_ACCESSKEY}"
-Dhive.hadoop2.azure-abfs-access-key="${ABFS_ACCESSKEY}" \
-Dtesting.gcp-storage-bucket="trino-ci-test" \
-Dtesting.gcp-credentials-key="${GCP_CREDENTIALS_KEY}"
- name: Memsql Tests
env:
MEMSQL_LICENSE: ${{ secrets.MEMSQL_LICENSE }}
Expand Down Expand Up @@ -533,6 +520,7 @@ jobs:
outputs:
have_azure_secrets: ${{ steps.check-secrets.outputs.have_azure_secrets }}
have_databricks_secrets: ${{ steps.check-databricks-secrets.outputs.have_databricks_secrets }}
have_gcp_secrets: ${{ steps.check-secrets.outputs.have_gcp_secrets }}
steps:
- uses: actions/checkout@v2
with:
Expand Down Expand Up @@ -569,6 +557,17 @@ jobs:
echo "Secrets to run Delta Databricks product tests were not configured in the repo"
echo "::set-output name=have_databricks_secrets::false"
fi
- name: Check GCP secrets
id: check-gcp-secrets
run: |
if [[ "${{ secrets.GCP_CREDENTIALS_KEY }}" != "" ]]; \
then
echo "Secrets to run GCP product tests were configured in the repo"
echo "::set-output name=have_gcp_secrets::true"
else
echo "Secrets to run GCP product tests were not configured in the repo"
echo "::set-output name=have_gcp_secrets::false"
fi
- name: Maven Install
run: |
export MAVEN_OPTS="${MAVEN_INSTALL_OPTS}"
Expand Down Expand Up @@ -614,6 +613,7 @@ jobs:
- suite-5
- suite-azure
- suite-delta-lake-databricks
- suite-gcs
exclude:
- config: cdh5
ignore exclusion if: >-
Expand All @@ -638,6 +638,14 @@ jobs:
ignore exclusion if: >-
${{ needs.build-pt.outputs.have_azure_secrets == 'true' }}

- suite: suite-gcs
config: default
- suite: suite-gcs
config: cdh5
- suite: suite-gcs
ignore exclusion if: >-
${{ needs.build-pt.outputs.have_gcp_secrets == 'true' }}

- suite: suite-delta-lake-databricks
config: cdh5
- suite: suite-delta-lake-databricks
Expand Down Expand Up @@ -741,6 +749,8 @@ jobs:
DATABRICKS_104_JDBC_URL: ${{ secrets.DATABRICKS_104_JDBC_URL }}
DATABRICKS_LOGIN: token
DATABRICKS_TOKEN: ${{ secrets.DATABRICKS_TOKEN }}
GCP_CREDENTIALS_KEY: ${{ secrets.GCP_CREDENTIALS_KEY }}
GCP_STORAGE_BUCKET: trino-ci-test
run: |
testing/bin/ptl suite run \
--suite ${{ matrix.suite }} \
Expand Down
2 changes: 2 additions & 0 deletions plugin/trino-delta-lake/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -388,6 +388,7 @@
<exclude>**/TestDeltaLakeCleanUpGlueMetastore.java</exclude>
<exclude>**/TestDeltaLakeSharedGlueMetastoreWithTableRedirections.java</exclude>
<exclude>**/TestDeltaLakeTableWithCustomLocationUsingGlueMetastore.java</exclude>
<exclude>**/TestDeltaLakeGcsConnectorSmokeTest.java</exclude>
</excludes>
</configuration>
</plugin>
Expand Down Expand Up @@ -434,6 +435,7 @@
<include>**/TestDeltaLakeCleanUpGlueMetastore.java</include>
<include>**/TestDeltaLakeSharedGlueMetastoreWithTableRedirections.java</include>
<include>**/TestDeltaLakeTableWithCustomLocationUsingGlueMetastore.java</include>
<include>**/TestDeltaLakeGcsConnectorSmokeTest.java</include>
</includes>
</configuration>
</plugin>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1651,10 +1651,18 @@ private void finishOptimize(ConnectorSession session, DeltaLakeTableExecuteHandl

private boolean allowWrite(ConnectorSession session, DeltaLakeTableHandle tableHandle)
{
String tableLocation = metastore.getTableLocation(tableHandle.getSchemaTableName(), session);
Path tableMetadataDirectory = new Path(new Path(tableLocation).getParent().toString(), tableHandle.getTableName());
boolean requiresOptIn = transactionLogWriterFactory.newWriter(session, tableMetadataDirectory.toString()).isUnsafe();
return !requiresOptIn || unsafeWritesEnabled;
try {
String tableLocation = metastore.getTableLocation(tableHandle.getSchemaTableName(), session);
Path tableMetadataDirectory = new Path(new Path(tableLocation).getParent().toString(), tableHandle.getTableName());
boolean requiresOptIn = transactionLogWriterFactory.newWriter(session, tableMetadataDirectory.toString()).isUnsafe();
return !requiresOptIn || unsafeWritesEnabled;
}
catch (TrinoException e) {
if (e.getErrorCode() == NOT_SUPPORTED.toErrorCode()) {
return false;
}
throw e;
}
}

private void checkSupportedWriterVersion(ConnectorSession session, SchemaTableName schemaTableName)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,13 +14,15 @@
package io.trino.plugin.deltalake.transactionlog.writer;

import com.google.common.collect.ImmutableMap;
import io.trino.spi.TrinoException;
import org.apache.hadoop.fs.Path;

import javax.inject.Inject;

import java.util.Map;

import static com.google.common.base.Preconditions.checkArgument;
import static io.trino.spi.StandardErrorCode.NOT_SUPPORTED;
import static java.lang.String.format;
import static java.util.Locale.ENGLISH;
import static java.util.Objects.requireNonNull;
Expand All @@ -45,7 +47,7 @@ public TransactionLogSynchronizer getSynchronizer(Path tableLocation)
checkArgument(uriScheme != null, "URI scheme undefined for " + tableLocation);
TransactionLogSynchronizer synchronizer = synchronizers.get(uriScheme.toLowerCase(ENGLISH));
if (synchronizer == null) {
throw new IllegalArgumentException(format("Cannot write to table in %s; %s not supported", tableLocation, uriScheme));
throw new TrinoException(NOT_SUPPORTED, format("Cannot write to table in %s; %s not supported", tableLocation, uriScheme));
}
return synchronizer;
}
Expand Down
Loading