diff --git a/presto-iceberg/src/test/java/com/facebook/presto/iceberg/IcebergQueryRunner.java b/presto-iceberg/src/test/java/com/facebook/presto/iceberg/IcebergQueryRunner.java index 217522ea721a6..c27ed00acbc81 100644 --- a/presto-iceberg/src/test/java/com/facebook/presto/iceberg/IcebergQueryRunner.java +++ b/presto-iceberg/src/test/java/com/facebook/presto/iceberg/IcebergQueryRunner.java @@ -255,8 +255,8 @@ public IcebergQueryRunner build() } } else { - queryRunner.execute("CREATE SCHEMA tpch"); - queryRunner.execute("CREATE SCHEMA tpcds"); + queryRunner.execute("CREATE SCHEMA IF NOT EXISTS tpch"); + queryRunner.execute("CREATE SCHEMA IF NOT EXISTS tpcds"); } if (createTpchTables) { diff --git a/presto-native-execution/README.md b/presto-native-execution/README.md index 5617ab2510676..a27c657a2c11e 100644 --- a/presto-native-execution/README.md +++ b/presto-native-execution/README.md @@ -205,15 +205,14 @@ Run IcebergExternalWorkerQueryRunner, * Main class: `com.facebook.presto.nativeworker.IcebergExternalWorkerQueryRunner`. * VM options: `-ea -Xmx5G -XX:+ExitOnOutOfMemoryError -Duser.timezone=America/Bahia_Banderas -Dhive.security=legacy`. * Working directory: `$MODULE_DIR$` - * Environment variables: `PRESTO_SERVER=/Users//git/presto/presto-native-execution/cmake-build-debug/presto_cpp/main/presto_server;DATA_DIR=/Users//Desktop/data;WORKER_COUNT=0` - * When `addStorageFormatToPath = false` **(Default)**, - - `$DATA_DIR/iceberg_data/`. Here `catalog_type` could be `HIVE | HADOOP | NESSIE | REST`. - - `addStorageFormatToPath` is `false` by default because Java `HiveQueryRunner` and `IcebergQueryRunner` do not add the file format to the path. - * When `addStorageFormatToPath = true`, - - `$DATA_DIR/iceberg_data//`. Here `file_format` could be `PARQUET | ORC | AVRO` and `catalog_type` could be `HIVE | HADOOP | NESSIE | REST`. + * Environment variables: + - PRESTO_SERVER: Absolute path to the native worker binary. For example: `/Users//git/presto/presto-native-execution/cmake-build-debug/presto_cpp/main/presto_server` + - DATA_DIR: Base data directory for test data and catalog warehouses. For example: `/Users//Desktop/data` + - WORKER_COUNT: Number of native workers to launch (default: 4) + - CATALOG_TYPE: Iceberg catalog type to use. One of `HADOOP | HIVE` (default: `HIVE`) + + Example: + `PRESTO_SERVER=/Users//git/presto/presto-native-execution/cmake-build-debug/presto_cpp/main/presto_server;DATA_DIR=/Users//Desktop/data;WORKER_COUNT=1;CATALOG_TYPE=HIVE` * Use classpath of module: choose `presto-native-execution` module. Run NativeSidecarPluginQueryRunner: diff --git a/presto-native-execution/src/test/java/com/facebook/presto/nativeworker/PrestoNativeQueryRunnerUtils.java b/presto-native-execution/src/test/java/com/facebook/presto/nativeworker/PrestoNativeQueryRunnerUtils.java index e287ef7e0cb20..6ba589ad85c62 100644 --- a/presto-native-execution/src/test/java/com/facebook/presto/nativeworker/PrestoNativeQueryRunnerUtils.java +++ b/presto-native-execution/src/test/java/com/facebook/presto/nativeworker/PrestoNativeQueryRunnerUtils.java @@ -24,6 +24,7 @@ import com.facebook.presto.hive.metastore.Storage; import com.facebook.presto.hive.metastore.StorageFormat; import com.facebook.presto.hive.metastore.Table; +import com.facebook.presto.iceberg.CatalogType; import com.facebook.presto.iceberg.FileFormat; import com.facebook.presto.iceberg.IcebergQueryRunner; import com.facebook.presto.spi.PrestoException; @@ -44,6 +45,7 @@ import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; +import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -309,6 +311,10 @@ public static class IcebergQueryRunnerBuilder private Path dataDirectory = nativeQueryRunnerParameters.dataDirectory; private String serverBinary = nativeQueryRunnerParameters.serverBinary.toString(); private Integer workerCount = nativeQueryRunnerParameters.workerCount.orElse(4); + private CatalogType catalogType = Optional + .ofNullable(nativeQueryRunnerParameters.runnerParameters.get("iceberg.catalog.type")) + .map(v -> CatalogType.valueOf(v.toUpperCase())) + .orElse(CatalogType.HIVE); private Integer cacheMaxSize = 0; private String storageFormat = ICEBERG_DEFAULT_STORAGE_FORMAT; private Map extraProperties = new HashMap<>(); @@ -379,6 +385,7 @@ public QueryRunner build() .setAddStorageFormatToPath(addStorageFormatToPath) .setDataDirectory(Optional.of(dataDirectory)) .setTpcdsProperties(getNativeWorkerTpcdsProperties()) + .setCatalogType(catalogType) .build().getQueryRunner(); } } @@ -452,7 +459,13 @@ public static NativeQueryRunnerParameters getNativeQueryRunnerParameters() assertTrue(Files.exists(dataDirectory), format("Data directory at %s is missing. Add -DDATA_DIR= to your JVM arguments to specify the path", dataDirectory)); log.info("using DATA_DIR at %s", dataDirectory); - return new NativeQueryRunnerParameters(prestoServerPath, dataDirectory, workerCount); + Map runnerParams = new HashMap<>(); + getProperty("CATALOG_TYPE").ifPresent(v -> { + runnerParams.put("iceberg.catalog.type", v.toUpperCase()); + log.info("using CATALOG_TYPE %s", v.toUpperCase()); + }); + + return new NativeQueryRunnerParameters(prestoServerPath, dataDirectory, workerCount, runnerParams); } public static Optional> getExternalWorkerLauncher( @@ -569,12 +582,14 @@ public static class NativeQueryRunnerParameters public final Path serverBinary; public final Path dataDirectory; public final Optional workerCount; + public final Map runnerParameters; - public NativeQueryRunnerParameters(Path serverBinary, Path dataDirectory, Optional workerCount) + public NativeQueryRunnerParameters(Path serverBinary, Path dataDirectory, Optional workerCount, Map runnerParameters) { this.serverBinary = requireNonNull(serverBinary, "serverBinary is null"); this.dataDirectory = requireNonNull(dataDirectory, "dataDirectory is null"); this.workerCount = requireNonNull(workerCount, "workerCount is null"); + this.runnerParameters = Collections.unmodifiableMap(new HashMap<>(requireNonNull(runnerParameters, "runnerParameters is null"))); } }