Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
25 commits
Select commit Hold shift + click to select a range
3bc72ba
Use multi-line format for CI multi-module list
electrum Aug 11, 2023
bbed9b5
Move ORC and Parquet modules to separate CI job
electrum Aug 11, 2023
dbcc4b8
Move file system modules to separate CI job
electrum Aug 11, 2023
2828c70
Include exception cause for invalid aggregation class
electrum Aug 10, 2023
cd5b988
Add missing parenthesis in ORC error message
electrum Aug 9, 2023
61c2a98
Fix tail reads for small ORC and Parquet files
electrum Aug 9, 2023
dc80d73
Use assertThat for testShowSchemas
electrum Aug 9, 2023
3771f68
Convert RoleGrantee to a record
electrum Aug 9, 2023
fd37bde
Convert OperationContext to a record
electrum Aug 7, 2023
00a82df
Skip recording expected exception for listFiles
electrum Aug 1, 2023
3771b34
Make stats field final
electrum Aug 2, 2023
b0cfaea
Update to S3Mock 3.0.1
electrum Aug 3, 2023
579197e
Move PrintingLogConsumer to testing-containers
electrum Aug 2, 2023
1bee281
Cleanup test dependencies for Hudi
electrum Jul 31, 2023
61f9a76
Remove unnecessary TestS3WrongRegionPicked
electrum Aug 10, 2023
dab612c
Handle concurrency failures for Hadoop file listings
electrum Aug 10, 2023
bbe054e
Add createDirectory method to TrinoFileSystem
electrum Aug 1, 2023
a7e56bd
Add renameDirectory method to TrinoFileSystem
electrum Aug 8, 2023
595925b
Run file system tests against Hadoop implementations
electrum Aug 3, 2023
17bc868
Preserve cause for all file system exceptions
electrum Aug 10, 2023
f81ed43
Remove usages of Hadoop Path for RegisterPartitionProcedure
electrum Aug 1, 2023
703a7fa
Remove usages of Hadoop Path for ThriftHiveMetastore
electrum Aug 1, 2023
83f5aff
Remove usages of Hadoop Path for GlueHiveMetastore
electrum Aug 6, 2023
304b5fa
Remove most usages of Hadoop from HiveMetadata
electrum Aug 6, 2023
79c0236
Remove usages of Hadoop from FileHiveMetastore
electrum Aug 10, 2023
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
41 changes: 33 additions & 8 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -444,40 +444,48 @@ jobs:
run: |
$MAVEN test ${MAVEN_TEST} -pl '
!:trino-accumulo,
!:trino-base-jdbc,
!:trino-bigquery,
!:trino-cassandra,
!:trino-clickhouse,
!:trino-delta-lake,
!:trino-docs,!:trino-server,!:trino-server-rpm,
!:trino-docs,
!:trino-druid,
!:trino-elasticsearch,
!:trino-faulttolerant-tests,
!:trino-filesystem,
!:trino-filesystem-azure,
!:trino-filesystem-manager,
!:trino-filesystem-s3,
!:trino-hdfs,
!:trino-hive,
!:trino-hudi,
!:trino-iceberg,
!:trino-ignite,
!:trino-jdbc,!:trino-base-jdbc,!:trino-thrift,!:trino-memory,
!:trino-jdbc,
!:trino-kafka,
!:trino-kudu,
!:trino-main,
!:trino-mariadb,
!:trino-memory,
!:trino-mongodb,
!:trino-mysql,
!:trino-oracle,
!:trino-orc,
!:trino-parquet,
!:trino-phoenix5,
!:trino-pinot,
!:trino-postgresql,
!:trino-raptor-legacy,
!:trino-redis,
!:trino-redshift,
!:trino-server,
!:trino-server-rpm,
!:trino-singlestore,
!:trino-sqlserver,
!:trino-test-jdbc-compatibility-old-server,
!:trino-tests'
- name: trino-hdfs isolated JVM tests
# Isolate HDFS file system cache concurrency test to avoid flakiness
run: |
$MAVEN test ${MAVEN_TEST} -pl :trino-hdfs -P test-isolated-jvm-suites
!:trino-tests,
!:trino-thrift'
- name: Upload test results
uses: actions/upload-artifact@v3
# Upload all test reports only on failure, because the artifacts are large
Expand Down Expand Up @@ -543,7 +551,20 @@ jobs:
touch gib-impacted.log
cat <<EOF > .github/test-matrix.yaml
include:
- { modules: [ client/trino-jdbc, plugin/trino-base-jdbc, plugin/trino-thrift, plugin/trino-memory ] }
- modules:
- client/trino-jdbc
- plugin/trino-base-jdbc
- plugin/trino-memory
- plugin/trino-thrift
- modules:
- lib/trino-orc
- lib/trino-parquet
- modules:
- lib/trino-filesystem
- lib/trino-filesystem-azure
- lib/trino-filesystem-manager
- lib/trino-filesystem-s3
- lib/trino-hdfs
- { modules: core/trino-main }
- { modules: core/trino-main, jdk: 19 }
- { modules: lib/trino-filesystem-s3, profile: cloud-tests }
Expand Down Expand Up @@ -633,6 +654,10 @@ jobs:
&& ! (contains(matrix.modules, 'trino-filesystem-s3') && contains(matrix.profile, 'cloud-tests'))
run: $MAVEN test ${MAVEN_TEST} -pl ${{ matrix.modules }} ${{ matrix.profile != '' && format('-P {0}', matrix.profile) || '' }}
# Additional tests for selected modules
- name: HDFS file system cache isolated JVM tests
if: contains(matrix.modules, 'trino-hdfs')
run: |
$MAVEN test ${MAVEN_TEST} -pl :trino-hdfs -P test-isolated-jvm-suites
- name: S3 FileSystem Cloud Tests
env:
AWS_ACCESS_KEY_ID: ${{ secrets.TRINO_AWS_ACCESS_KEY_ID }}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ public static List<SqlAggregationFunction> createFunctionsByAnnotations(Class<?>
return ImmutableList.copyOf(AggregationFromAnnotationsParser.parseFunctionDefinitions(aggregationDefinition));
}
catch (RuntimeException e) {
throw new IllegalArgumentException("Invalid aggregation class " + aggregationDefinition.getSimpleName());
throw new IllegalArgumentException("Invalid aggregation class " + aggregationDefinition.getSimpleName(), e);
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -299,6 +299,61 @@ public Optional<Boolean> directoryExists(Location location)
}
}

@Override
public void createDirectory(Location location)
throws IOException
{
AzureLocation azureLocation = new AzureLocation(location);
if (!isHierarchicalNamespaceEnabled(azureLocation)) {
return;
}
try {
DataLakeFileSystemClient fileSystemClient = createFileSystemClient(azureLocation);
DataLakeDirectoryClient directoryClient = fileSystemClient.createDirectoryIfNotExists(azureLocation.path());
if (!directoryClient.getProperties().isDirectory()) {
throw new IOException("Location is not a directory: " + azureLocation);
}
}
catch (RuntimeException e) {
throw handleAzureException(e, "creating directory", azureLocation);
}
}

@Override
public void renameDirectory(Location source, Location target)
throws IOException
{
AzureLocation sourceLocation = new AzureLocation(source);
AzureLocation targetLocation = new AzureLocation(target);
if (!sourceLocation.account().equals(targetLocation.account())) {
throw new IOException("Cannot rename across storage accounts");
}
if (!Objects.equals(sourceLocation.container(), targetLocation.container())) {
throw new IOException("Cannot rename across storage account containers");
}
if (!isHierarchicalNamespaceEnabled(sourceLocation)) {
throw new IOException("Azure non-hierarchical does not support directory renames");
}
if (sourceLocation.path().isEmpty() || targetLocation.path().isEmpty()) {
throw new IOException("Cannot rename %s to %s".formatted(source, target));
}

try {
DataLakeFileSystemClient fileSystemClient = createFileSystemClient(sourceLocation);
DataLakeDirectoryClient directoryClient = fileSystemClient.getDirectoryClient(sourceLocation.path());
if (!directoryClient.exists()) {
throw new IOException("Source directory does not exist: " + source);
}
if (!directoryClient.getProperties().isDirectory()) {
throw new IOException("Source is not a directory: " + source);
}
directoryClient.rename(null, targetLocation.path());
}
catch (RuntimeException e) {
throw new IOException("Rename directory from %s to %s failed".formatted(source, target), e);
}
}

private boolean isHierarchicalNamespaceEnabled(AzureLocation location)
throws IOException
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,17 +30,23 @@ public static IOException handleAzureException(RuntimeException exception, Strin
{
if (exception instanceof BlobStorageException blobStorageException) {
if (BlobErrorCode.BLOB_NOT_FOUND.equals(blobStorageException.getErrorCode())) {
throw new FileNotFoundException(location.toString());
throw withCause(new FileNotFoundException(location.toString()), exception);
}
}
if (exception instanceof DataLakeStorageException dataLakeStorageException) {
if ("PathNotFound".equals(dataLakeStorageException.getErrorCode())) {
throw new FileNotFoundException(location.toString());
throw withCause(new FileNotFoundException(location.toString()), exception);
}
}
if (exception instanceof AzureException) {
throw new IOException("Azure service error %s file: %s".formatted(action, location), exception);
}
throw new IOException("Error %s file: %s".formatted(action, location), exception);
}

private static <T extends Throwable> T withCause(T throwable, Throwable cause)
{
throwable.initCause(cause);
return throwable;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -116,6 +116,20 @@ public Optional<Boolean> directoryExists(Location location)
return fileSystem(location).directoryExists(location);
}

@Override
public void createDirectory(Location location)
throws IOException
{
fileSystem(location).createDirectory(location);
}

@Override
public void renameDirectory(Location source, Location target)
throws IOException
{
fileSystem(source).renameDirectory(source, target);
}

private TrinoFileSystem fileSystem(Location location)
{
return createFileSystem(determineFactory(location));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -197,6 +197,20 @@ public Optional<Boolean> directoryExists(Location location)
return Optional.empty();
}

@Override
public void createDirectory(Location location)
{
validateS3Location(location);
// S3 does not have directories
}

@Override
public void renameDirectory(Location source, Location target)
throws IOException
{
throw new IOException("S3 does not support directory renames");
}

@SuppressWarnings("ResultOfObjectAllocationIgnored")
private static void validateS3Location(Location location)
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -202,7 +202,9 @@ private void seekStream()
streamPosition = nextReadPosition;
}
catch (NoSuchKeyException e) {
throw new FileNotFoundException(location.toString());
var ex = new FileNotFoundException(location.toString());
ex.initCause(e);
throw ex;
}
catch (SdkException e) {
throw new IOException("Failed to open S3 file: " + location, e);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,6 @@

import com.adobe.testing.s3mock.testcontainers.S3MockContainer;
import io.airlift.units.DataSize;
import org.junit.jupiter.api.Test;
import org.testcontainers.junit.jupiter.Container;
import org.testcontainers.junit.jupiter.Testcontainers;
import software.amazon.awssdk.auth.credentials.AwsBasicCredentials;
Expand All @@ -25,16 +24,14 @@

import java.net.URI;

import static org.assertj.core.api.Assertions.assertThatThrownBy;

@Testcontainers
public class TestS3FileSystemS3Mock
extends AbstractTestS3FileSystem
{
private static final String BUCKET = "test-bucket";

@Container
private static final S3MockContainer S3_MOCK = new S3MockContainer("2.11.0")
private static final S3MockContainer S3_MOCK = new S3MockContainer("3.0.1")
.withInitialBuckets(BUCKET);

@Override
Expand Down Expand Up @@ -66,12 +63,4 @@ protected S3FileSystemFactory createS3FileSystemFactory()
.setPathStyleAccess(true)
.setStreamingPartSize(DataSize.valueOf("5.5MB")));
}

// TODO: remove when fixed in S3Mock: https://github.com/adobe/S3Mock/pull/1131
@Override
@Test
public void testInputFile()
{
assertThatThrownBy(super::testInputFile).isInstanceOf(AssertionError.class);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -169,4 +169,25 @@ FileIterator listFiles(Location location)
*/
Optional<Boolean> directoryExists(Location location)
throws IOException;

/**
* Creates the specified directory and any parent directories that do not exist.
* For hierarchical file systems, if the location already exists but is not a
* directory, or if the directory cannot be created, an exception is raised.
* This method does nothing for non-hierarchical file systems or if the directory
* already exists.
*
* @throws IllegalArgumentException if location is not valid for this file system
*/
void createDirectory(Location location)
throws IOException;

/**
* Renames source to target. An exception is raised if the target already exists,
* or on non-hierarchical file systems.
*
* @throws IllegalArgumentException if location is not valid for this file system
*/
void renameDirectory(Location source, Location target)
throws IOException;
}
Original file line number Diff line number Diff line change
Expand Up @@ -157,6 +157,43 @@ public Optional<Boolean> directoryExists(Location location)
return Optional.of(Files.isDirectory(toDirectoryPath(location)));
}

@Override
public void createDirectory(Location location)
throws IOException
{
validateLocalLocation(location);
try {
Files.createDirectories(toDirectoryPath(location));
}
catch (IOException e) {
throw new IOException("Failed to create directory: " + location, e);
}
}

@Override
public void renameDirectory(Location source, Location target)
throws IOException
{
Path sourcePath = toDirectoryPath(source);
Path targetPath = toDirectoryPath(target);
try {
if (!Files.exists(sourcePath)) {
throw new IOException("Source does not exist: " + source);
}
if (!Files.isDirectory(sourcePath)) {
throw new IOException("Source is not a directory: " + source);
}

Files.createDirectories(targetPath.getParent());

// Do not specify atomic move, as unix overwrites when atomic is enabled
Files.move(sourcePath, targetPath);
}
catch (IOException e) {
throw new IOException("Directory rename from %s to %s failed: %s".formatted(source, target, e.getMessage()), e);
}
}

private Path toFilePath(Location location)
{
validateLocalLocation(location);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@
import io.trino.filesystem.TrinoInputStream;

import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
Expand Down Expand Up @@ -66,7 +65,7 @@ public TrinoInput newInput()
return new LocalInput(location, path.toFile());
}
catch (IOException e) {
throw new FileNotFoundException(location.toString());
throw handleException(location, e);
}
}

Expand All @@ -77,8 +76,8 @@ public TrinoInputStream newStream()
try {
return new LocalInputStream(location, path.toFile());
}
catch (FileNotFoundException e) {
throw new FileNotFoundException(location.toString());
catch (IOException e) {
throw handleException(location, e);
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,12 +27,18 @@ private LocalUtils() {}
static IOException handleException(Location location, IOException exception)
throws IOException
{
if (exception instanceof NoSuchFileException) {
throw new FileNotFoundException(location.toString());
if (exception instanceof FileNotFoundException || exception instanceof NoSuchFileException) {
throw withCause(new FileNotFoundException(location.toString()), exception);
}
if (exception instanceof FileAlreadyExistsException) {
throw new FileAlreadyExistsException(location.toString());
throw withCause(new FileAlreadyExistsException(location.toString()), exception);
}
throw new IOException(exception.getMessage() + ": " + location, exception);
}

private static <T extends Throwable> T withCause(T throwable, Throwable cause)
{
throwable.initCause(cause);
return throwable;
}
}
Loading