Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 0 additions & 1 deletion .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -715,7 +715,6 @@ jobs:
(env.CI_SKIP_SECRETS_PRESENCE_CHECKS != '' || env.AWS_ACCESS_KEY_ID != '' || env.AWS_SECRET_ACCESS_KEY != '' || env.GCP_CREDENTIALS_KEY != '')
run: |
$MAVEN test ${MAVEN_TEST} -pl :trino-iceberg ${{ format('-P {0}', matrix.profile) }} \
-Ds3.bucket=${S3_BUCKET} \
-Dtesting.gcp-storage-bucket="trino-ci-test-us-east" \
-Dtesting.gcp-credentials-key="${GCP_CREDENTIALS_KEY}" \
-Dhive.hadoop2.azure-abfs-container="${ABFS_CONTAINER}" \
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -168,7 +168,6 @@
import static com.google.common.collect.Sets.difference;
import static com.google.common.primitives.Ints.max;
import static io.trino.filesystem.Locations.appendPath;
import static io.trino.filesystem.Locations.getParent;
import static io.trino.plugin.deltalake.DataFileInfo.DataFileType.DATA;
import static io.trino.plugin.deltalake.DeltaLakeAnalyzeProperties.AnalyzeMode.FULL_REFRESH;
import static io.trino.plugin.deltalake.DeltaLakeAnalyzeProperties.AnalyzeMode.INCREMENTAL;
Expand Down Expand Up @@ -1903,7 +1902,7 @@ private void checkWriteAllowed(ConnectorSession session, DeltaLakeTableHandle ta
private boolean allowWrite(ConnectorSession session, DeltaLakeTableHandle tableHandle)
{
try {
String tableMetadataDirectory = appendPath(getParent(tableHandle.getLocation()), tableHandle.getTableName());
String tableMetadataDirectory = getTransactionLogDir(tableHandle.getLocation());
boolean requiresOptIn = transactionLogWriterFactory.newWriter(session, tableMetadataDirectory).isUnsafe();
return !requiresOptIn || unsafeWritesEnabled;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@
package io.trino.plugin.deltalake.metastore.glue;

import com.google.common.collect.ImmutableMap;
import io.trino.hdfs.TrinoFileSystemCache;
import io.trino.plugin.deltalake.DeltaLakeQueryRunner;
import io.trino.plugin.hive.BaseS3AndGlueMetastoreTest;
import io.trino.testing.DistributedQueryRunner;
Expand Down Expand Up @@ -42,8 +41,6 @@ public TestDeltaS3AndGlueMetastoreTest()
protected QueryRunner createQueryRunner()
throws Exception
{
closeAfterClass(TrinoFileSystemCache.INSTANCE::closeAll);

metastore = createTestingGlueHiveMetastore(Path.of(schemaPath()));
DistributedQueryRunner queryRunner = DeltaLakeQueryRunner.builder()
.setCatalogName(DELTA_CATALOG)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1275,12 +1275,24 @@ private static String validateAvroSchemaLiteral(String avroSchemaLiteral)

private static Location getValidatedExternalLocation(String location)
{
Location validated;
try {
return Location.of(location);
validated = Location.of(location);
}
catch (IllegalArgumentException e) {
throw new TrinoException(INVALID_TABLE_PROPERTY, "External location is not a valid file system URI: " + location, e);
}

// TODO (https://github.com/trinodb/trino/issues/17803) We cannot accept locations with double slash until all relevant Hive connector components are migrated off Hadoop Path.
// Hadoop Path "normalizes location", e.g.:
// - removes double slashes (such locations are rejected),
// - removes trailing slash (such locations are accepted; foo/bar and foo/bar/ are treated as equivalent, and rejecting locations with trailing slash could pose UX issues)
// - replaces file:///<local-path> with file:/<local-path> (such locations are accepted).
if (validated.path().contains("//")) {
throw new TrinoException(INVALID_TABLE_PROPERTY, "Unsupported location that cannot be internally represented: " + location);
}

return validated;
}

private void checkExternalPath(HdfsContext context, Path path)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -453,7 +453,10 @@ public static Location getTableDefaultLocation(Database database, HdfsContext co
}
}

return Location.of(location).appendPath(escapeTableName(tableName));
// Note: this results in `databaseLocation` being a "normalized location", e.g. not containing double slashes.
// TODO (https://github.com/trinodb/trino/issues/17803): We need to use normalized location until all relevant Hive connector components are migrated off Hadoop Path.
Location databaseLocation = Location.of(databasePath.toString());
return databaseLocation.appendPath(escapeTableName(tableName));
}

public static boolean pathExists(HdfsContext context, HdfsEnvironment hdfsEnvironment, Path path)
Expand Down
Loading