diff --git a/core/src/main/java/org/apache/iceberg/hadoop/HadoopFileIO.java b/core/src/main/java/org/apache/iceberg/hadoop/HadoopFileIO.java index fbaa75d8cb85..34d66bfae402 100644 --- a/core/src/main/java/org/apache/iceberg/hadoop/HadoopFileIO.java +++ b/core/src/main/java/org/apache/iceberg/hadoop/HadoopFileIO.java @@ -20,6 +20,7 @@ package org.apache.iceberg.hadoop; import java.io.IOException; +import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -29,9 +30,17 @@ import org.apache.iceberg.io.OutputFile; import org.apache.iceberg.util.SerializableSupplier; -public class HadoopFileIO implements FileIO { +public class HadoopFileIO implements FileIO, Configurable { - private final SerializableSupplier hadoopConf; + private SerializableSupplier hadoopConf; + + /** + * Constructor used for dynamic FileIO loading. + *

+ * {@link Configuration Hadoop configuration} must be set through {@link HadoopFileIO#setConf(Configuration)} + */ + public HadoopFileIO() { + } public HadoopFileIO(Configuration hadoopConf) { this(new SerializableConfiguration(hadoopConf)::get); @@ -65,4 +74,14 @@ public void deleteFile(String path) { throw new RuntimeIOException(e, "Failed to delete file: %s", path); } } + + @Override + public void setConf(Configuration conf) { + this.hadoopConf = new SerializableConfiguration(conf)::get; + } + + @Override + public Configuration getConf() { + return hadoopConf.get(); + } } diff --git a/core/src/test/java/org/apache/iceberg/TestCatalogUtil.java b/core/src/test/java/org/apache/iceberg/TestCatalogUtil.java index 37d66c5db9d4..c2487eb1effd 100644 --- a/core/src/test/java/org/apache/iceberg/TestCatalogUtil.java +++ b/core/src/test/java/org/apache/iceberg/TestCatalogUtil.java @@ -27,6 +27,7 @@ import org.apache.iceberg.catalog.Catalog; import org.apache.iceberg.catalog.Namespace; import org.apache.iceberg.catalog.TableIdentifier; +import org.apache.iceberg.hadoop.HadoopFileIO; import org.apache.iceberg.io.FileIO; import org.apache.iceberg.io.InputFile; import org.apache.iceberg.io.OutputFile; @@ -123,6 +124,15 @@ public void loadCustomFileIO_noArg() { Assert.assertEquals(properties, ((TestFileIONoArg) fileIO).map); } + @Test + public void loadCustomFileIO_hadoopConfigConstructor() { + Configuration configuration = new Configuration(); + configuration.set("key", "val"); + FileIO fileIO = CatalogUtil.loadFileIO(HadoopFileIO.class.getName(), Maps.newHashMap(), configuration); + Assert.assertTrue(fileIO instanceof HadoopFileIO); + Assert.assertEquals("val", ((HadoopFileIO) fileIO).conf().get("key")); + } + @Test public void loadCustomFileIO_configurable() { Configuration configuration = new Configuration();