From 99c5951a9764c74f7e6d1399cbac0841691ff509 Mon Sep 17 00:00:00 2001 From: Piotr Findeisen Date: Tue, 20 Jun 2023 11:33:24 +0200 Subject: [PATCH] Fix env not to use secret unnecessarily `EnvSinglenodeDeltaLakeOss` was not working when PR's CI build is triggered from the fork. The secrets and variables are not present, and this resulted in `S3_BUCKET` env variable being defined but empty. However, the environment doesn't need to be parameterized with the bucket name at all. --- .../environment/EnvSinglenodeDeltaLakeOss.java | 17 +++-------------- 1 file changed, 3 insertions(+), 14 deletions(-) diff --git a/testing/trino-product-tests-launcher/src/main/java/io/trino/tests/product/launcher/env/environment/EnvSinglenodeDeltaLakeOss.java b/testing/trino-product-tests-launcher/src/main/java/io/trino/tests/product/launcher/env/environment/EnvSinglenodeDeltaLakeOss.java index 899dbcc8629c..607b4db670ef 100644 --- a/testing/trino-product-tests-launcher/src/main/java/io/trino/tests/product/launcher/env/environment/EnvSinglenodeDeltaLakeOss.java +++ b/testing/trino-product-tests-launcher/src/main/java/io/trino/tests/product/launcher/env/environment/EnvSinglenodeDeltaLakeOss.java @@ -60,7 +60,7 @@ public class EnvSinglenodeDeltaLakeOss private static final String SPARK_CONTAINER_NAME = "spark"; - private static final String DEFAULT_S3_BUCKET_NAME = "trino-ci-test"; + private static final String S3_BUCKET_NAME = "test-bucket"; private final DockerFiles dockerFiles; private final PortBinder portBinder; @@ -86,8 +86,6 @@ public EnvSinglenodeDeltaLakeOss( @Override public void extendEnvironment(Environment.Builder builder) { - String s3Bucket = getS3Bucket(); - // Using hdp3.1 so we are using Hive metastore with version close to versions of hive-*.jars Spark uses builder.configureContainer(HADOOP, container -> { container.setDockerImageName("ghcr.io/trinodb/testing/hdp3.1-hive:" + hadoopImagesVersion); @@ -100,7 +98,7 @@ public void extendEnvironment(Environment.Builder builder) CONTAINER_TRINO_ETC + "/catalog/delta.properties"); builder.configureContainer(TESTS, dockerContainer -> { - dockerContainer.withEnv("S3_BUCKET", s3Bucket) + dockerContainer.withEnv("S3_BUCKET", S3_BUCKET_NAME) .withCopyFileToContainer( forHostPath(dockerFiles.getDockerFilesHostPath("conf/tempto/tempto-configuration-for-hive3.yaml")), CONTAINER_TEMPTO_PROFILE_CONFIG); @@ -121,7 +119,7 @@ public void extendEnvironment(Environment.Builder builder) throw new UncheckedIOException(e); } builder.configureContainer(MINIO_CONTAINER_NAME, container -> - container.withCopyFileToContainer(forHostPath(minioBucketDirectory), "/data/" + s3Bucket)); + container.withCopyFileToContainer(forHostPath(minioBucketDirectory), "/data/" + S3_BUCKET_NAME)); configureTempto(builder, configDir); } @@ -139,13 +137,4 @@ private DockerContainer createSparkContainer() return container; } - - private String getS3Bucket() - { - String s3Bucket = System.getenv("S3_BUCKET"); - if (s3Bucket == null) { - s3Bucket = DEFAULT_S3_BUCKET_NAME; - } - return s3Bucket; - } }