diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 1e364553..0b2305a5 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -19,7 +19,6 @@ on: env: SPARK_LOCAL_IP: 127.0.0.1 - AWS_REGION: us-east-1 jobs: build: diff --git a/debezium-server-iceberg-sink/pom.xml b/debezium-server-iceberg-sink/pom.xml index f9079e91..c037b882 100644 --- a/debezium-server-iceberg-sink/pom.xml +++ b/debezium-server-iceberg-sink/pom.xml @@ -109,6 +109,18 @@ org.apache.hadoop hadoop-aws ${version.hadoop} + + + com.amazonaws + aws-java-sdk-bundle + + + + + com.amazonaws + aws-java-sdk-bundle + 1.11.1026 + test diff --git a/debezium-server-iceberg-sink/src/test/java/io/debezium/server/iceberg/testresources/BaseSparkTest.java b/debezium-server-iceberg-sink/src/test/java/io/debezium/server/iceberg/testresources/BaseSparkTest.java index 1608d3db..f53131eb 100644 --- a/debezium-server-iceberg-sink/src/test/java/io/debezium/server/iceberg/testresources/BaseSparkTest.java +++ b/debezium-server-iceberg-sink/src/test/java/io/debezium/server/iceberg/testresources/BaseSparkTest.java @@ -9,6 +9,7 @@ package io.debezium.server.iceberg.testresources; import io.debezium.server.iceberg.IcebergUtil; +import io.debezium.server.iceberg.TestConfigSource; import java.util.HashMap; import java.util.Map; @@ -51,7 +52,15 @@ static void setup() { .set("spark.sql.catalog.spark_catalog.type", "hadoop") .set("spark.sql.catalog.spark_catalog.warehouse", S3_BUCKET) .set("spark.sql.catalog.spark_catalog.default-namespaces", CATALOG_TABLE_NAMESPACE) + .set("spark.sql.catalog.spark_catalog.io-impl", "org.apache.iceberg.aws.s3.S3FileIO") .set("spark.sql.warehouse.dir", S3_BUCKET) + .set("spark.sql.catalog.spark_catalog.s3.endpoint", "http://localhost:" + S3Minio.getMappedPort().toString()) + .set("spark.sql.catalog.spark_catalog.s3.path-style-access", "true") + .set("spark.sql.catalog.spark_catalog.s3.access-key-id", S3Minio.MINIO_ACCESS_KEY) + .set("spark.sql.catalog.spark_catalog.s3.secret-access-key", S3Minio.MINIO_SECRET_KEY) + .set("spark.sql.catalog.spark_catalog.client.region", TestConfigSource.S3_REGION) + .set("spark.sql.catalog.spark_catalog.io-impl", "org.apache.iceberg.aws.s3.S3FileIO") + .set("spark.sql.catalog.spark_catalog.warehouse", S3_BUCKET) // // JdbcCatalog catalog, add additional catalog // .set("spark.sql.defaultCatalog", ICEBERG_CATALOG_NAME) // .set("spark.sql.catalog." + ICEBERG_CATALOG_NAME, "org.apache.iceberg.spark.SparkCatalog") diff --git a/debezium-server-iceberg-sink/src/test/java/io/debezium/server/iceberg/testresources/S3Minio.java b/debezium-server-iceberg-sink/src/test/java/io/debezium/server/iceberg/testresources/S3Minio.java index af330270..7923ce6c 100644 --- a/debezium-server-iceberg-sink/src/test/java/io/debezium/server/iceberg/testresources/S3Minio.java +++ b/debezium-server-iceberg-sink/src/test/java/io/debezium/server/iceberg/testresources/S3Minio.java @@ -135,7 +135,7 @@ public Map start() { config.put("debezium.sink.iceberg.s3.path-style-access", "true"); config.put("debezium.sink.iceberg.s3.access-key-id", S3Minio.MINIO_ACCESS_KEY); config.put("debezium.sink.iceberg.s3.secret-access-key", S3Minio.MINIO_SECRET_KEY); - config.put("debezium.sink.iceberg.s3.region", TestConfigSource.S3_REGION); + config.put("debezium.sink.iceberg.client.region", TestConfigSource.S3_REGION); config.put("debezium.sink.iceberg.io-impl", "org.apache.iceberg.aws.s3.S3FileIO"); config.put("debezium.sink.iceberg.warehouse", S3_BUCKET); // FOR HADOOP CATALOG