diff --git a/integration-tests/build.gradle.kts b/integration-tests/build.gradle.kts index fca66bc18b..7bd8f84590 100644 --- a/integration-tests/build.gradle.kts +++ b/integration-tests/build.gradle.kts @@ -48,10 +48,6 @@ dependencies { implementation(libs.auth0.jwt) - implementation(platform(libs.testcontainers.bom)) - implementation("org.testcontainers:testcontainers") - implementation(libs.s3mock.testcontainers) - implementation("org.apache.iceberg:iceberg-spark-3.5_2.12") implementation("org.apache.iceberg:iceberg-spark-extensions-3.5_2.12") implementation("org.apache.spark:spark-sql_2.12:3.5.6") { @@ -69,6 +65,8 @@ dependencies { implementation(libs.assertj.core) implementation(libs.mockito.core) implementation(libs.awaitility) + implementation(libs.s3mock.testcontainers) + implementation(project(":polaris-runtime-test-common")) } copiedCodeChecks { diff --git a/integration-tests/src/main/java/org/apache/polaris/service/it/ext/PolarisSparkIntegrationTestBase.java b/integration-tests/src/main/java/org/apache/polaris/service/it/ext/PolarisSparkIntegrationTestBase.java index 922278f3ce..15e10aa658 100644 --- a/integration-tests/src/main/java/org/apache/polaris/service/it/ext/PolarisSparkIntegrationTestBase.java +++ b/integration-tests/src/main/java/org/apache/polaris/service/it/ext/PolarisSparkIntegrationTestBase.java @@ -20,12 +20,10 @@ import static org.apache.polaris.service.it.env.PolarisClient.polarisClient; -import com.adobe.testing.s3mock.testcontainers.S3MockContainer; import java.io.IOException; import java.net.URI; import java.nio.file.Path; import java.util.List; -import java.util.Map; import org.apache.polaris.core.admin.model.AwsStorageConfigInfo; import org.apache.polaris.core.admin.model.Catalog; import org.apache.polaris.core.admin.model.CatalogProperties; @@ -38,6 +36,7 @@ import org.apache.polaris.service.it.env.ManagementApi; import org.apache.polaris.service.it.env.PolarisApiEndpoints; import org.apache.polaris.service.it.env.PolarisClient; +import org.apache.polaris.test.commons.s3mock.S3Mock; import org.apache.spark.sql.Dataset; import org.apache.spark.sql.Row; import org.apache.spark.sql.SparkSession; @@ -52,8 +51,7 @@ @ExtendWith(PolarisIntegrationTestExtension.class) public abstract class PolarisSparkIntegrationTestBase { - protected static final S3MockContainer s3Container = - new S3MockContainer("3.11.0").withInitialBuckets("my-bucket,my-old-bucket"); + protected static final S3Mock s3Container = new S3Mock(); protected static SparkSession spark; protected PolarisApiEndpoints endpoints; protected PolarisClient client; @@ -98,26 +96,8 @@ public void before( .setAllowedLocations(List.of("s3://my-old-bucket/path/to/data")) .build(); CatalogProperties props = new CatalogProperties("s3://my-bucket/path/to/data"); - props.putAll( - Map.of( - "table-default.s3.endpoint", - s3Container.getHttpEndpoint(), - "table-default.s3.path-style-access", - "true", - "table-default.s3.access-key-id", - "foo", - "table-default.s3.secret-access-key", - "bar", - "s3.endpoint", - s3Container.getHttpEndpoint(), - "s3.path-style-access", - "true", - "s3.access-key-id", - "foo", - "s3.secret-access-key", - "bar", - "polaris.config.drop-with-purge.enabled", - "true")); + props.putAll(s3Container.getS3ConfigProperties()); + props.put("polaris.config.drop-with-purge.enabled", "true"); Catalog catalog = PolarisCatalog.builder() .setType(Catalog.TypeEnum.INTERNAL) @@ -129,26 +109,8 @@ public void before( managementApi.createCatalog(catalog); CatalogProperties externalProps = new CatalogProperties("s3://my-bucket/path/to/data"); - externalProps.putAll( - Map.of( - "table-default.s3.endpoint", - s3Container.getHttpEndpoint(), - "table-default.s3.path-style-access", - "true", - "table-default.s3.access-key-id", - "foo", - "table-default.s3.secret-access-key", - "bar", - "s3.endpoint", - s3Container.getHttpEndpoint(), - "s3.path-style-access", - "true", - "s3.access-key-id", - "foo", - "s3.secret-access-key", - "bar", - "polaris.config.drop-with-purge.enabled", - "true")); + externalProps.putAll(s3Container.getS3ConfigProperties()); + externalProps.put("polaris.config.drop-with-purge.enabled", "true"); Catalog externalCatalog = ExternalCatalog.builder() .setType(Catalog.TypeEnum.EXTERNAL) diff --git a/plugins/spark/v3.5/integration/src/intTest/java/org/apache/polaris/spark/quarkus/it/SparkIntegrationBase.java b/plugins/spark/v3.5/integration/src/intTest/java/org/apache/polaris/spark/quarkus/it/SparkIntegrationBase.java index bd30997d94..d2fa1b4a69 100644 --- a/plugins/spark/v3.5/integration/src/intTest/java/org/apache/polaris/spark/quarkus/it/SparkIntegrationBase.java +++ b/plugins/spark/v3.5/integration/src/intTest/java/org/apache/polaris/spark/quarkus/it/SparkIntegrationBase.java @@ -18,7 +18,6 @@ */ package org.apache.polaris.spark.quarkus.it; -import com.adobe.testing.s3mock.testcontainers.S3MockContainer; import com.google.common.collect.ImmutableList; import com.google.errorprone.annotations.FormatMethod; import java.io.File; @@ -26,7 +25,6 @@ import java.net.URI; import java.nio.file.Path; import java.util.List; -import java.util.Map; import java.util.UUID; import java.util.stream.Collectors; import java.util.stream.IntStream; @@ -44,6 +42,7 @@ import org.apache.polaris.service.it.env.PolarisApiEndpoints; import org.apache.polaris.service.it.ext.PolarisIntegrationTestExtension; import org.apache.polaris.service.it.ext.SparkSessionBuilder; +import org.apache.polaris.test.commons.s3mock.S3Mock; import org.apache.spark.sql.Dataset; import org.apache.spark.sql.Row; import org.apache.spark.sql.SparkSession; @@ -58,8 +57,7 @@ @ExtendWith(PolarisIntegrationTestExtension.class) public abstract class SparkIntegrationBase { - protected static final S3MockContainer s3Container = - new S3MockContainer("3.11.0").withInitialBuckets("my-bucket,my-old-bucket"); + protected static final S3Mock s3Container = new S3Mock(); protected static SparkSession spark; protected PolarisApiEndpoints endpoints; protected PolarisManagementClient client; @@ -100,26 +98,8 @@ public void before( .setAllowedLocations(List.of("s3://my-old-bucket/path/to/data")) .build(); CatalogProperties props = new CatalogProperties("s3://my-bucket/path/to/data"); - props.putAll( - Map.of( - "table-default.s3.endpoint", - s3Container.getHttpEndpoint(), - "table-default.s3.path-style-access", - "true", - "table-default.s3.access-key-id", - "foo", - "table-default.s3.secret-access-key", - "bar", - "s3.endpoint", - s3Container.getHttpEndpoint(), - "s3.path-style-access", - "true", - "s3.access-key-id", - "foo", - "s3.secret-access-key", - "bar", - "polaris.config.drop-with-purge.enabled", - "true")); + props.putAll(s3Container.getS3ConfigProperties()); + props.put("polaris.config.drop-with-purge.enabled", "true"); Catalog catalog = PolarisCatalog.builder() .setType(Catalog.TypeEnum.INTERNAL) diff --git a/runtime/test-common/build.gradle.kts b/runtime/test-common/build.gradle.kts index 51433665aa..564ad5aaf0 100644 --- a/runtime/test-common/build.gradle.kts +++ b/runtime/test-common/build.gradle.kts @@ -32,6 +32,7 @@ configurations.all { } dependencies { + implementation(libs.s3mock.testcontainers) implementation(project(":polaris-core")) implementation(libs.jakarta.ws.rs.api) implementation(enforcedPlatform(libs.quarkus.bom)) diff --git a/runtime/test-common/src/main/java/org/apache/polaris/test/commons/s3mock/S3Mock.java b/runtime/test-common/src/main/java/org/apache/polaris/test/commons/s3mock/S3Mock.java new file mode 100644 index 0000000000..c0a10ab2e2 --- /dev/null +++ b/runtime/test-common/src/main/java/org/apache/polaris/test/commons/s3mock/S3Mock.java @@ -0,0 +1,56 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.polaris.test.commons.s3mock; + +import com.adobe.testing.s3mock.testcontainers.S3MockContainer; +import java.util.Map; +import org.apache.polaris.containerspec.ContainerSpecHelper; + +public class S3Mock extends S3MockContainer { + + private static final String DEFAULT_BUCKETS = "my-bucket,my-old-bucket"; + private static final String DEFAULT_ACCESS_KEY = "ap1"; + private static final String DEFAULT_SECRET_KEY = "s3cr3t"; + + public S3Mock() { + this(DEFAULT_BUCKETS); + } + + public S3Mock(String initialBuckets) { + super( + ContainerSpecHelper.containerSpecHelper("s3mock", S3Mock.class) + .dockerImageName(null) + .asCompatibleSubstituteFor("adobe/s3mock")); + this.withInitialBuckets(initialBuckets); + } + + public Map getS3ConfigProperties() { + String endpoint = this.getHttpEndpoint(); + return Map.of( + "table-default.s3.endpoint", endpoint, + "table-default.s3.path-style-access", "true", + "table-default.s3.access-key-id", DEFAULT_ACCESS_KEY, + "table-default.s3.secret-access-key", DEFAULT_SECRET_KEY, + "s3.endpoint", endpoint, + "s3.path-style-access", "true", + "s3.access-key-id", DEFAULT_ACCESS_KEY, + "s3.secret-access-key", DEFAULT_SECRET_KEY); + } +} diff --git a/runtime/test-common/src/main/resources/org/apache/polaris/test/commons/s3mock/Dockerfile-s3mock-version b/runtime/test-common/src/main/resources/org/apache/polaris/test/commons/s3mock/Dockerfile-s3mock-version new file mode 100644 index 0000000000..e6af6c12e8 --- /dev/null +++ b/runtime/test-common/src/main/resources/org/apache/polaris/test/commons/s3mock/Dockerfile-s3mock-version @@ -0,0 +1,22 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +# Dockerfile to provide the image name and tag to a test. +# Version is managed by Renovate - do not edit. +FROM docker.io/adobe/s3mock:4.7.0