diff --git a/lib/trino-filesystem/pom.xml b/lib/trino-filesystem/pom.xml
new file mode 100644
index 000000000000..8fdb3ea90aec
--- /dev/null
+++ b/lib/trino-filesystem/pom.xml
@@ -0,0 +1,74 @@
+
+
+ 4.0.0
+
+
+ io.trino
+ trino-root
+ 393-SNAPSHOT
+ ../../pom.xml
+
+
+ trino-filesystem
+ trino-filesystem
+
+
+ ${project.parent.basedir}
+
+
+
+
+ io.trino
+ trino-hdfs
+
+
+
+ io.trino
+ trino-spi
+
+
+
+ io.trino.hadoop
+ hadoop-apache
+
+
+
+ io.airlift
+ slice
+
+
+
+ com.google.guava
+ guava
+
+
+
+ javax.inject
+ javax.inject
+
+
+
+ org.apache.iceberg
+ iceberg-api
+
+
+
+
+ io.trino
+ trino-testing-services
+ test
+
+
+
+ org.assertj
+ assertj-core
+ test
+
+
+
+ org.testng
+ testng
+ test
+
+
+
diff --git a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/io/FileEntry.java b/lib/trino-filesystem/src/main/java/io/trino/filesystem/FileEntry.java
similarity index 96%
rename from plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/io/FileEntry.java
rename to lib/trino-filesystem/src/main/java/io/trino/filesystem/FileEntry.java
index 4dba8d5aa783..0b320986274e 100644
--- a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/io/FileEntry.java
+++ b/lib/trino-filesystem/src/main/java/io/trino/filesystem/FileEntry.java
@@ -11,7 +11,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package io.trino.plugin.iceberg.io;
+package io.trino.filesystem;
import static com.google.common.base.Preconditions.checkArgument;
import static java.util.Objects.requireNonNull;
diff --git a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/io/FileIterator.java b/lib/trino-filesystem/src/main/java/io/trino/filesystem/FileIterator.java
similarity index 96%
rename from plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/io/FileIterator.java
rename to lib/trino-filesystem/src/main/java/io/trino/filesystem/FileIterator.java
index b2525fe6243e..63ed8e319074 100644
--- a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/io/FileIterator.java
+++ b/lib/trino-filesystem/src/main/java/io/trino/filesystem/FileIterator.java
@@ -11,7 +11,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package io.trino.plugin.iceberg.io;
+package io.trino.filesystem;
import java.io.IOException;
import java.util.NoSuchElementException;
diff --git a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/io/TrinoFileSystem.java b/lib/trino-filesystem/src/main/java/io/trino/filesystem/TrinoFileSystem.java
similarity index 96%
rename from plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/io/TrinoFileSystem.java
rename to lib/trino-filesystem/src/main/java/io/trino/filesystem/TrinoFileSystem.java
index a1141b068ccb..1cd461ff7161 100644
--- a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/io/TrinoFileSystem.java
+++ b/lib/trino-filesystem/src/main/java/io/trino/filesystem/TrinoFileSystem.java
@@ -11,7 +11,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package io.trino.plugin.iceberg.io;
+package io.trino.filesystem;
import org.apache.iceberg.io.FileIO;
diff --git a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/io/TrinoFileSystemFactory.java b/lib/trino-filesystem/src/main/java/io/trino/filesystem/TrinoFileSystemFactory.java
similarity index 96%
rename from plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/io/TrinoFileSystemFactory.java
rename to lib/trino-filesystem/src/main/java/io/trino/filesystem/TrinoFileSystemFactory.java
index 43b742112da0..7653960cc3ec 100644
--- a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/io/TrinoFileSystemFactory.java
+++ b/lib/trino-filesystem/src/main/java/io/trino/filesystem/TrinoFileSystemFactory.java
@@ -11,7 +11,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package io.trino.plugin.iceberg.io;
+package io.trino.filesystem;
import io.trino.spi.connector.ConnectorSession;
import io.trino.spi.security.ConnectorIdentity;
diff --git a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/io/TrinoInput.java b/lib/trino-filesystem/src/main/java/io/trino/filesystem/TrinoInput.java
similarity index 97%
rename from plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/io/TrinoInput.java
rename to lib/trino-filesystem/src/main/java/io/trino/filesystem/TrinoInput.java
index c74ea749dcfc..5247e1a5207e 100644
--- a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/io/TrinoInput.java
+++ b/lib/trino-filesystem/src/main/java/io/trino/filesystem/TrinoInput.java
@@ -11,7 +11,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package io.trino.plugin.iceberg.io;
+package io.trino.filesystem;
import io.airlift.slice.Slice;
import io.airlift.slice.Slices;
diff --git a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/io/TrinoInputFile.java b/lib/trino-filesystem/src/main/java/io/trino/filesystem/TrinoInputFile.java
similarity index 96%
rename from plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/io/TrinoInputFile.java
rename to lib/trino-filesystem/src/main/java/io/trino/filesystem/TrinoInputFile.java
index e8af7d0f3853..4cfa88415e2b 100644
--- a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/io/TrinoInputFile.java
+++ b/lib/trino-filesystem/src/main/java/io/trino/filesystem/TrinoInputFile.java
@@ -11,7 +11,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package io.trino.plugin.iceberg.io;
+package io.trino.filesystem;
import java.io.IOException;
diff --git a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/io/TrinoOutputFile.java b/lib/trino-filesystem/src/main/java/io/trino/filesystem/TrinoOutputFile.java
similarity index 95%
rename from plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/io/TrinoOutputFile.java
rename to lib/trino-filesystem/src/main/java/io/trino/filesystem/TrinoOutputFile.java
index b9dc3900c64e..8c5cdaed3aec 100644
--- a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/io/TrinoOutputFile.java
+++ b/lib/trino-filesystem/src/main/java/io/trino/filesystem/TrinoOutputFile.java
@@ -12,7 +12,7 @@
* limitations under the License.
*/
-package io.trino.plugin.iceberg.io;
+package io.trino.filesystem;
import java.io.IOException;
import java.io.OutputStream;
diff --git a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/io/fileio/ForwardingFileIo.java b/lib/trino-filesystem/src/main/java/io/trino/filesystem/fileio/ForwardingFileIo.java
similarity index 94%
rename from plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/io/fileio/ForwardingFileIo.java
rename to lib/trino-filesystem/src/main/java/io/trino/filesystem/fileio/ForwardingFileIo.java
index 320b6875a481..b2bb59ee8e7c 100644
--- a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/io/fileio/ForwardingFileIo.java
+++ b/lib/trino-filesystem/src/main/java/io/trino/filesystem/fileio/ForwardingFileIo.java
@@ -11,9 +11,9 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package io.trino.plugin.iceberg.io.fileio;
+package io.trino.filesystem.fileio;
-import io.trino.plugin.iceberg.io.TrinoFileSystem;
+import io.trino.filesystem.TrinoFileSystem;
import org.apache.iceberg.io.FileIO;
import org.apache.iceberg.io.InputFile;
import org.apache.iceberg.io.OutputFile;
diff --git a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/io/fileio/ForwardingInputFile.java b/lib/trino-filesystem/src/main/java/io/trino/filesystem/fileio/ForwardingInputFile.java
similarity index 95%
rename from plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/io/fileio/ForwardingInputFile.java
rename to lib/trino-filesystem/src/main/java/io/trino/filesystem/fileio/ForwardingInputFile.java
index 6ffdb10f3d5f..488e73afbc53 100644
--- a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/io/fileio/ForwardingInputFile.java
+++ b/lib/trino-filesystem/src/main/java/io/trino/filesystem/fileio/ForwardingInputFile.java
@@ -11,9 +11,9 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package io.trino.plugin.iceberg.io.fileio;
+package io.trino.filesystem.fileio;
-import io.trino.plugin.iceberg.io.TrinoInputFile;
+import io.trino.filesystem.TrinoInputFile;
import org.apache.iceberg.exceptions.NotFoundException;
import org.apache.iceberg.io.InputFile;
import org.apache.iceberg.io.SeekableInputStream;
diff --git a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/io/fileio/ForwardingOutputFile.java b/lib/trino-filesystem/src/main/java/io/trino/filesystem/fileio/ForwardingOutputFile.java
similarity index 95%
rename from plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/io/fileio/ForwardingOutputFile.java
rename to lib/trino-filesystem/src/main/java/io/trino/filesystem/fileio/ForwardingOutputFile.java
index 47bc824cd251..31c21e93edd4 100644
--- a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/io/fileio/ForwardingOutputFile.java
+++ b/lib/trino-filesystem/src/main/java/io/trino/filesystem/fileio/ForwardingOutputFile.java
@@ -11,11 +11,11 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package io.trino.plugin.iceberg.io.fileio;
+package io.trino.filesystem.fileio;
import com.google.common.io.CountingOutputStream;
-import io.trino.plugin.iceberg.io.TrinoFileSystem;
-import io.trino.plugin.iceberg.io.TrinoOutputFile;
+import io.trino.filesystem.TrinoFileSystem;
+import io.trino.filesystem.TrinoOutputFile;
import org.apache.iceberg.io.InputFile;
import org.apache.iceberg.io.OutputFile;
import org.apache.iceberg.io.PositionOutputStream;
diff --git a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/io/hdfs/HadoopPaths.java b/lib/trino-filesystem/src/main/java/io/trino/filesystem/hdfs/HadoopPaths.java
similarity index 82%
rename from plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/io/hdfs/HadoopPaths.java
rename to lib/trino-filesystem/src/main/java/io/trino/filesystem/hdfs/HadoopPaths.java
index 16ded7e9565b..46a5bddb7eb4 100644
--- a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/io/hdfs/HadoopPaths.java
+++ b/lib/trino-filesystem/src/main/java/io/trino/filesystem/hdfs/HadoopPaths.java
@@ -11,15 +11,13 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package io.trino.plugin.iceberg.io.hdfs;
+package io.trino.filesystem.hdfs;
-import io.trino.spi.TrinoException;
import org.apache.hadoop.fs.Path;
import java.net.URI;
import java.net.URLEncoder;
-import static io.trino.plugin.iceberg.IcebergErrorCode.ICEBERG_INVALID_METADATA;
import static java.nio.charset.StandardCharsets.UTF_8;
public final class HadoopPaths
@@ -32,7 +30,7 @@ public static Path hadoopPath(String path)
Path hadoopPath = new Path(path);
if ("s3".equals(hadoopPath.toUri().getScheme()) && !path.equals(hadoopPath.toString())) {
if (hadoopPath.toUri().getFragment() != null) {
- throw new TrinoException(ICEBERG_INVALID_METADATA, "Unexpected URI fragment in path: " + path);
+ throw new IllegalArgumentException("Unexpected URI fragment in path: " + path);
}
URI uri = URI.create(path);
return new Path(uri + "#" + URLEncoder.encode(uri.getPath(), UTF_8));
diff --git a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/io/hdfs/HdfsFileIterator.java b/lib/trino-filesystem/src/main/java/io/trino/filesystem/hdfs/HdfsFileIterator.java
similarity index 94%
rename from plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/io/hdfs/HdfsFileIterator.java
rename to lib/trino-filesystem/src/main/java/io/trino/filesystem/hdfs/HdfsFileIterator.java
index c3d6b5079da2..40bfe733c15a 100644
--- a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/io/hdfs/HdfsFileIterator.java
+++ b/lib/trino-filesystem/src/main/java/io/trino/filesystem/hdfs/HdfsFileIterator.java
@@ -11,10 +11,10 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package io.trino.plugin.iceberg.io.hdfs;
+package io.trino.filesystem.hdfs;
-import io.trino.plugin.iceberg.io.FileEntry;
-import io.trino.plugin.iceberg.io.FileIterator;
+import io.trino.filesystem.FileEntry;
+import io.trino.filesystem.FileIterator;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.LocatedFileStatus;
import org.apache.hadoop.fs.Path;
diff --git a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/io/hdfs/HdfsFileSystem.java b/lib/trino-filesystem/src/main/java/io/trino/filesystem/hdfs/HdfsFileSystem.java
similarity index 87%
rename from plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/io/hdfs/HdfsFileSystem.java
rename to lib/trino-filesystem/src/main/java/io/trino/filesystem/hdfs/HdfsFileSystem.java
index f9b38a54a653..bd51f84f7329 100644
--- a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/io/hdfs/HdfsFileSystem.java
+++ b/lib/trino-filesystem/src/main/java/io/trino/filesystem/hdfs/HdfsFileSystem.java
@@ -11,15 +11,15 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package io.trino.plugin.iceberg.io.hdfs;
+package io.trino.filesystem.hdfs;
-import io.trino.plugin.hive.HdfsEnvironment;
-import io.trino.plugin.hive.HdfsEnvironment.HdfsContext;
-import io.trino.plugin.iceberg.io.FileIterator;
-import io.trino.plugin.iceberg.io.TrinoFileSystem;
-import io.trino.plugin.iceberg.io.TrinoInputFile;
-import io.trino.plugin.iceberg.io.TrinoOutputFile;
-import io.trino.plugin.iceberg.io.fileio.ForwardingFileIo;
+import io.trino.filesystem.FileIterator;
+import io.trino.filesystem.TrinoFileSystem;
+import io.trino.filesystem.TrinoInputFile;
+import io.trino.filesystem.TrinoOutputFile;
+import io.trino.filesystem.fileio.ForwardingFileIo;
+import io.trino.hdfs.HdfsContext;
+import io.trino.hdfs.HdfsEnvironment;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.iceberg.io.FileIO;
@@ -27,7 +27,7 @@
import java.io.FileNotFoundException;
import java.io.IOException;
-import static io.trino.plugin.iceberg.io.hdfs.HadoopPaths.hadoopPath;
+import static io.trino.filesystem.hdfs.HadoopPaths.hadoopPath;
import static java.util.Objects.requireNonNull;
class HdfsFileSystem
diff --git a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/io/hdfs/HdfsFileSystemFactory.java b/lib/trino-filesystem/src/main/java/io/trino/filesystem/hdfs/HdfsFileSystemFactory.java
similarity index 82%
rename from plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/io/hdfs/HdfsFileSystemFactory.java
rename to lib/trino-filesystem/src/main/java/io/trino/filesystem/hdfs/HdfsFileSystemFactory.java
index 2f0ed341327f..ab29a43d8618 100644
--- a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/io/hdfs/HdfsFileSystemFactory.java
+++ b/lib/trino-filesystem/src/main/java/io/trino/filesystem/hdfs/HdfsFileSystemFactory.java
@@ -11,12 +11,12 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package io.trino.plugin.iceberg.io.hdfs;
+package io.trino.filesystem.hdfs;
-import io.trino.plugin.hive.HdfsEnvironment;
-import io.trino.plugin.hive.HdfsEnvironment.HdfsContext;
-import io.trino.plugin.iceberg.io.TrinoFileSystem;
-import io.trino.plugin.iceberg.io.TrinoFileSystemFactory;
+import io.trino.filesystem.TrinoFileSystem;
+import io.trino.filesystem.TrinoFileSystemFactory;
+import io.trino.hdfs.HdfsContext;
+import io.trino.hdfs.HdfsEnvironment;
import io.trino.spi.security.ConnectorIdentity;
import javax.inject.Inject;
diff --git a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/io/hdfs/HdfsInput.java b/lib/trino-filesystem/src/main/java/io/trino/filesystem/hdfs/HdfsInput.java
similarity index 94%
rename from plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/io/hdfs/HdfsInput.java
rename to lib/trino-filesystem/src/main/java/io/trino/filesystem/hdfs/HdfsInput.java
index 1f1945c95c36..eefbf0dea9f1 100644
--- a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/io/hdfs/HdfsInput.java
+++ b/lib/trino-filesystem/src/main/java/io/trino/filesystem/hdfs/HdfsInput.java
@@ -11,12 +11,12 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package io.trino.plugin.iceberg.io.hdfs;
+package io.trino.filesystem.hdfs;
import io.airlift.slice.Slice;
-import io.trino.plugin.hive.util.FSDataInputStreamTail;
-import io.trino.plugin.iceberg.io.TrinoInput;
-import io.trino.plugin.iceberg.io.TrinoInputFile;
+import io.trino.filesystem.TrinoInput;
+import io.trino.filesystem.TrinoInputFile;
+import io.trino.hdfs.FSDataInputStreamTail;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.iceberg.io.SeekableInputStream;
diff --git a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/io/hdfs/HdfsInputFile.java b/lib/trino-filesystem/src/main/java/io/trino/filesystem/hdfs/HdfsInputFile.java
similarity index 90%
rename from plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/io/hdfs/HdfsInputFile.java
rename to lib/trino-filesystem/src/main/java/io/trino/filesystem/hdfs/HdfsInputFile.java
index 1915b208d659..5c412b2fdaf4 100644
--- a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/io/hdfs/HdfsInputFile.java
+++ b/lib/trino-filesystem/src/main/java/io/trino/filesystem/hdfs/HdfsInputFile.java
@@ -11,12 +11,12 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package io.trino.plugin.iceberg.io.hdfs;
+package io.trino.filesystem.hdfs;
-import io.trino.plugin.hive.HdfsEnvironment;
-import io.trino.plugin.hive.HdfsEnvironment.HdfsContext;
-import io.trino.plugin.iceberg.io.TrinoInput;
-import io.trino.plugin.iceberg.io.TrinoInputFile;
+import io.trino.filesystem.TrinoInput;
+import io.trino.filesystem.TrinoInputFile;
+import io.trino.hdfs.HdfsContext;
+import io.trino.hdfs.HdfsEnvironment;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
@@ -25,7 +25,7 @@
import java.io.IOException;
import static com.google.common.base.Preconditions.checkArgument;
-import static io.trino.plugin.iceberg.io.hdfs.HadoopPaths.hadoopPath;
+import static io.trino.filesystem.hdfs.HadoopPaths.hadoopPath;
import static java.util.Objects.requireNonNull;
class HdfsInputFile
diff --git a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/io/hdfs/HdfsOutputFile.java b/lib/trino-filesystem/src/main/java/io/trino/filesystem/hdfs/HdfsOutputFile.java
similarity index 85%
rename from plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/io/hdfs/HdfsOutputFile.java
rename to lib/trino-filesystem/src/main/java/io/trino/filesystem/hdfs/HdfsOutputFile.java
index ded7f564c063..b86ae9c040fc 100644
--- a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/io/hdfs/HdfsOutputFile.java
+++ b/lib/trino-filesystem/src/main/java/io/trino/filesystem/hdfs/HdfsOutputFile.java
@@ -11,18 +11,18 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package io.trino.plugin.iceberg.io.hdfs;
+package io.trino.filesystem.hdfs;
-import io.trino.plugin.hive.HdfsEnvironment;
-import io.trino.plugin.hive.HdfsEnvironment.HdfsContext;
-import io.trino.plugin.iceberg.io.TrinoOutputFile;
+import io.trino.filesystem.TrinoOutputFile;
+import io.trino.hdfs.HdfsContext;
+import io.trino.hdfs.HdfsEnvironment;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import java.io.IOException;
import java.io.OutputStream;
-import static io.trino.plugin.iceberg.io.hdfs.HadoopPaths.hadoopPath;
+import static io.trino.filesystem.hdfs.HadoopPaths.hadoopPath;
import static java.util.Objects.requireNonNull;
class HdfsOutputFile
diff --git a/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/io/hdfs/TestHdfsFileSystem.java b/lib/trino-filesystem/src/test/java/io/trino/filesystem/hdfs/TestHdfsFileSystem.java
similarity index 80%
rename from plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/io/hdfs/TestHdfsFileSystem.java
rename to lib/trino-filesystem/src/test/java/io/trino/filesystem/hdfs/TestHdfsFileSystem.java
index 043b26f9ecb1..d3491cec7398 100644
--- a/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/io/hdfs/TestHdfsFileSystem.java
+++ b/lib/trino-filesystem/src/test/java/io/trino/filesystem/hdfs/TestHdfsFileSystem.java
@@ -11,12 +11,18 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package io.trino.plugin.iceberg.io.hdfs;
+package io.trino.filesystem.hdfs;
import com.google.common.collect.ImmutableList;
-import io.trino.plugin.iceberg.io.FileIterator;
-import io.trino.plugin.iceberg.io.TrinoFileSystem;
-import io.trino.plugin.iceberg.io.TrinoFileSystemFactory;
+import com.google.common.collect.ImmutableSet;
+import io.trino.filesystem.FileIterator;
+import io.trino.filesystem.TrinoFileSystem;
+import io.trino.filesystem.TrinoFileSystemFactory;
+import io.trino.hdfs.DynamicHdfsConfiguration;
+import io.trino.hdfs.HdfsConfig;
+import io.trino.hdfs.HdfsConfigurationInitializer;
+import io.trino.hdfs.HdfsEnvironment;
+import io.trino.hdfs.authentication.NoHdfsAuthentication;
import io.trino.spi.security.ConnectorIdentity;
import org.testng.annotations.Test;
@@ -26,7 +32,6 @@
import static com.google.common.io.MoreFiles.deleteRecursively;
import static com.google.common.io.RecursiveDeleteOption.ALLOW_INSECURE;
-import static io.trino.plugin.hive.HiveTestUtils.HDFS_ENVIRONMENT;
import static java.nio.file.Files.createDirectory;
import static java.nio.file.Files.createFile;
import static java.nio.file.Files.createTempDirectory;
@@ -38,7 +43,11 @@ public class TestHdfsFileSystem
public void testListing()
throws IOException
{
- TrinoFileSystemFactory factory = new HdfsFileSystemFactory(HDFS_ENVIRONMENT);
+ HdfsConfig hdfsConfig = new HdfsConfig();
+ DynamicHdfsConfiguration hdfsConfiguration = new DynamicHdfsConfiguration(new HdfsConfigurationInitializer(hdfsConfig), ImmutableSet.of());
+ HdfsEnvironment hdfsEnvironment = new HdfsEnvironment(hdfsConfiguration, hdfsConfig, new NoHdfsAuthentication());
+
+ TrinoFileSystemFactory factory = new HdfsFileSystemFactory(hdfsEnvironment);
TrinoFileSystem fileSystem = factory.create(ConnectorIdentity.ofUser("test"));
Path tempDir = createTempDirectory("testListing");
diff --git a/lib/trino-hdfs/pom.xml b/lib/trino-hdfs/pom.xml
new file mode 100644
index 000000000000..ce090fe0be09
--- /dev/null
+++ b/lib/trino-hdfs/pom.xml
@@ -0,0 +1,115 @@
+
+
+ 4.0.0
+
+
+ io.trino
+ trino-root
+ 393-SNAPSHOT
+ ../../pom.xml
+
+
+ trino-hdfs
+ trino-hdfs
+
+
+ ${project.parent.basedir}
+
+
+
+
+ io.trino
+ trino-hadoop-toolkit
+
+
+
+ io.trino
+ trino-plugin-toolkit
+
+
+
+ io.trino
+ trino-spi
+
+
+
+ io.trino.hadoop
+ hadoop-apache
+
+
+
+ io.airlift
+ configuration
+
+
+
+ io.airlift
+ log
+
+
+
+ io.airlift
+ slice
+
+
+
+ io.airlift
+ stats
+
+
+
+ io.airlift
+ units
+
+
+
+ com.google.code.findbugs
+ jsr305
+ true
+
+
+
+ com.google.guava
+ guava
+
+
+
+ com.google.inject
+ guice
+
+
+
+ javax.inject
+ javax.inject
+
+
+
+ javax.validation
+ validation-api
+
+
+
+ org.weakref
+ jmxutils
+
+
+
+
+ io.airlift
+ testing
+ test
+
+
+
+ org.assertj
+ assertj-core
+ test
+
+
+
+ org.testng
+ testng
+ test
+
+
+
diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/ConfigurationInitializer.java b/lib/trino-hdfs/src/main/java/io/trino/hdfs/ConfigurationInitializer.java
similarity index 95%
rename from plugin/trino-hive/src/main/java/io/trino/plugin/hive/ConfigurationInitializer.java
rename to lib/trino-hdfs/src/main/java/io/trino/hdfs/ConfigurationInitializer.java
index 55d5565ab58b..63476fd435cf 100644
--- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/ConfigurationInitializer.java
+++ b/lib/trino-hdfs/src/main/java/io/trino/hdfs/ConfigurationInitializer.java
@@ -11,7 +11,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package io.trino.plugin.hive;
+package io.trino.hdfs;
import org.apache.hadoop.conf.Configuration;
diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/util/ConfigurationUtils.java b/lib/trino-hdfs/src/main/java/io/trino/hdfs/ConfigurationUtils.java
similarity index 98%
rename from plugin/trino-hive/src/main/java/io/trino/plugin/hive/util/ConfigurationUtils.java
rename to lib/trino-hdfs/src/main/java/io/trino/hdfs/ConfigurationUtils.java
index 7b62155f2140..6e28ad11053b 100644
--- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/util/ConfigurationUtils.java
+++ b/lib/trino-hdfs/src/main/java/io/trino/hdfs/ConfigurationUtils.java
@@ -11,7 +11,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package io.trino.plugin.hive.util;
+package io.trino.hdfs;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/DynamicConfigurationProvider.java b/lib/trino-hdfs/src/main/java/io/trino/hdfs/DynamicConfigurationProvider.java
similarity index 86%
rename from plugin/trino-hive/src/main/java/io/trino/plugin/hive/DynamicConfigurationProvider.java
rename to lib/trino-hdfs/src/main/java/io/trino/hdfs/DynamicConfigurationProvider.java
index a47ac59eff4b..bfe124a521c4 100644
--- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/DynamicConfigurationProvider.java
+++ b/lib/trino-hdfs/src/main/java/io/trino/hdfs/DynamicConfigurationProvider.java
@@ -11,14 +11,13 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package io.trino.plugin.hive;
+package io.trino.hdfs;
import org.apache.hadoop.conf.Configuration;
import java.net.URI;
-import static io.trino.plugin.hive.HdfsEnvironment.HdfsContext;
-import static io.trino.plugin.hive.fs.TrinoFileSystemCache.CACHE_KEY;
+import static io.trino.hdfs.TrinoFileSystemCache.CACHE_KEY;
public interface DynamicConfigurationProvider
{
diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HiveHdfsConfiguration.java b/lib/trino-hdfs/src/main/java/io/trino/hdfs/DynamicHdfsConfiguration.java
similarity index 85%
rename from plugin/trino-hive/src/main/java/io/trino/plugin/hive/HiveHdfsConfiguration.java
rename to lib/trino-hdfs/src/main/java/io/trino/hdfs/DynamicHdfsConfiguration.java
index e64daeb85f0f..b4b31d39cc0c 100644
--- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HiveHdfsConfiguration.java
+++ b/lib/trino-hdfs/src/main/java/io/trino/hdfs/DynamicHdfsConfiguration.java
@@ -11,10 +11,9 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package io.trino.plugin.hive;
+package io.trino.hdfs;
import com.google.common.collect.ImmutableSet;
-import io.trino.plugin.hive.HdfsEnvironment.HdfsContext;
import org.apache.hadoop.conf.Configuration;
import javax.inject.Inject;
@@ -23,11 +22,11 @@
import java.util.Set;
import static io.trino.hadoop.ConfigurationInstantiator.newEmptyConfiguration;
-import static io.trino.plugin.hive.util.ConfigurationUtils.copy;
-import static io.trino.plugin.hive.util.ConfigurationUtils.getInitialConfiguration;
+import static io.trino.hdfs.ConfigurationUtils.copy;
+import static io.trino.hdfs.ConfigurationUtils.getInitialConfiguration;
import static java.util.Objects.requireNonNull;
-public class HiveHdfsConfiguration
+public class DynamicHdfsConfiguration
implements HdfsConfiguration
{
private static final Configuration INITIAL_CONFIGURATION = getInitialConfiguration();
@@ -49,7 +48,7 @@ protected Configuration initialValue()
private final Set dynamicProviders;
@Inject
- public HiveHdfsConfiguration(HdfsConfigurationInitializer initializer, Set dynamicProviders)
+ public DynamicHdfsConfiguration(HdfsConfigurationInitializer initializer, Set dynamicProviders)
{
this.initializer = requireNonNull(initializer, "initializer is null");
this.dynamicProviders = ImmutableSet.copyOf(requireNonNull(dynamicProviders, "dynamicProviders is null"));
diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/util/FSDataInputStreamTail.java b/lib/trino-hdfs/src/main/java/io/trino/hdfs/FSDataInputStreamTail.java
similarity index 99%
rename from plugin/trino-hive/src/main/java/io/trino/plugin/hive/util/FSDataInputStreamTail.java
rename to lib/trino-hdfs/src/main/java/io/trino/hdfs/FSDataInputStreamTail.java
index 4d193adbce31..77a346027417 100644
--- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/util/FSDataInputStreamTail.java
+++ b/lib/trino-hdfs/src/main/java/io/trino/hdfs/FSDataInputStreamTail.java
@@ -11,7 +11,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package io.trino.plugin.hive.util;
+package io.trino.hdfs;
import io.airlift.slice.Slice;
import io.airlift.slice.Slices;
diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/fs/FileSystemFinalizerService.java b/lib/trino-hdfs/src/main/java/io/trino/hdfs/FileSystemFinalizerService.java
similarity index 99%
rename from plugin/trino-hive/src/main/java/io/trino/plugin/hive/fs/FileSystemFinalizerService.java
rename to lib/trino-hdfs/src/main/java/io/trino/hdfs/FileSystemFinalizerService.java
index 28101e1f4292..0cca43bca722 100644
--- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/fs/FileSystemFinalizerService.java
+++ b/lib/trino-hdfs/src/main/java/io/trino/hdfs/FileSystemFinalizerService.java
@@ -11,7 +11,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package io.trino.plugin.hive.fs;
+package io.trino.hdfs;
import io.airlift.log.Logger;
diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HdfsConfig.java b/lib/trino-hdfs/src/main/java/io/trino/hdfs/HdfsConfig.java
similarity index 99%
rename from plugin/trino-hive/src/main/java/io/trino/plugin/hive/HdfsConfig.java
rename to lib/trino-hdfs/src/main/java/io/trino/hdfs/HdfsConfig.java
index 44b5a3501bdb..a7582bb4f104 100644
--- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HdfsConfig.java
+++ b/lib/trino-hdfs/src/main/java/io/trino/hdfs/HdfsConfig.java
@@ -11,7 +11,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package io.trino.plugin.hive;
+package io.trino.hdfs;
import com.google.common.base.Splitter;
import com.google.common.collect.ImmutableList;
diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HdfsConfiguration.java b/lib/trino-hdfs/src/main/java/io/trino/hdfs/HdfsConfiguration.java
similarity index 89%
rename from plugin/trino-hive/src/main/java/io/trino/plugin/hive/HdfsConfiguration.java
rename to lib/trino-hdfs/src/main/java/io/trino/hdfs/HdfsConfiguration.java
index d783883dcd63..5774bf05ac13 100644
--- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HdfsConfiguration.java
+++ b/lib/trino-hdfs/src/main/java/io/trino/hdfs/HdfsConfiguration.java
@@ -11,9 +11,8 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package io.trino.plugin.hive;
+package io.trino.hdfs;
-import io.trino.plugin.hive.HdfsEnvironment.HdfsContext;
import org.apache.hadoop.conf.Configuration;
import java.net.URI;
diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HdfsConfigurationInitializer.java b/lib/trino-hdfs/src/main/java/io/trino/hdfs/HdfsConfigurationInitializer.java
similarity index 97%
rename from plugin/trino-hive/src/main/java/io/trino/plugin/hive/HdfsConfigurationInitializer.java
rename to lib/trino-hdfs/src/main/java/io/trino/hdfs/HdfsConfigurationInitializer.java
index 9b08408b3e4f..32612aa4fd39 100644
--- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HdfsConfigurationInitializer.java
+++ b/lib/trino-hdfs/src/main/java/io/trino/hdfs/HdfsConfigurationInitializer.java
@@ -11,7 +11,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package io.trino.plugin.hive;
+package io.trino.hdfs;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.ImmutableList;
@@ -30,8 +30,8 @@
import java.util.Set;
import static com.google.common.base.Preconditions.checkArgument;
-import static io.trino.plugin.hive.util.ConfigurationUtils.copy;
-import static io.trino.plugin.hive.util.ConfigurationUtils.readConfiguration;
+import static io.trino.hdfs.ConfigurationUtils.copy;
+import static io.trino.hdfs.ConfigurationUtils.readConfiguration;
import static java.lang.Math.toIntExact;
import static java.util.Objects.requireNonNull;
import static org.apache.hadoop.fs.CommonConfigurationKeys.IPC_PING_INTERVAL_KEY;
diff --git a/lib/trino-hdfs/src/main/java/io/trino/hdfs/HdfsContext.java b/lib/trino-hdfs/src/main/java/io/trino/hdfs/HdfsContext.java
new file mode 100644
index 000000000000..74a220a23472
--- /dev/null
+++ b/lib/trino-hdfs/src/main/java/io/trino/hdfs/HdfsContext.java
@@ -0,0 +1,50 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.trino.hdfs;
+
+import io.trino.spi.connector.ConnectorSession;
+import io.trino.spi.security.ConnectorIdentity;
+
+import static com.google.common.base.MoreObjects.toStringHelper;
+import static java.util.Objects.requireNonNull;
+
+public class HdfsContext
+{
+ private final ConnectorIdentity identity;
+
+ public HdfsContext(ConnectorIdentity identity)
+ {
+ this.identity = requireNonNull(identity, "identity is null");
+ }
+
+ public HdfsContext(ConnectorSession session)
+ {
+ requireNonNull(session, "session is null");
+ this.identity = requireNonNull(session.getIdentity(), "session.getIdentity() is null");
+ }
+
+ public ConnectorIdentity getIdentity()
+ {
+ return identity;
+ }
+
+ @Override
+ public String toString()
+ {
+ return toStringHelper(this)
+ .omitNullValues()
+ .add("user", identity)
+ .toString();
+ }
+}
diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HdfsEnvironment.java b/lib/trino-hdfs/src/main/java/io/trino/hdfs/HdfsEnvironment.java
similarity index 74%
rename from plugin/trino-hive/src/main/java/io/trino/plugin/hive/HdfsEnvironment.java
rename to lib/trino-hdfs/src/main/java/io/trino/hdfs/HdfsEnvironment.java
index 2099a062adfc..f85b83acc47e 100644
--- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HdfsEnvironment.java
+++ b/lib/trino-hdfs/src/main/java/io/trino/hdfs/HdfsEnvironment.java
@@ -11,13 +11,11 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package io.trino.plugin.hive;
+package io.trino.hdfs;
import io.trino.hadoop.HadoopNative;
-import io.trino.plugin.hive.authentication.GenericExceptionAction;
-import io.trino.plugin.hive.authentication.HdfsAuthentication;
-import io.trino.plugin.hive.fs.TrinoFileSystemCache;
-import io.trino.spi.connector.ConnectorSession;
+import io.trino.hdfs.authentication.GenericExceptionAction;
+import io.trino.hdfs.authentication.HdfsAuthentication;
import io.trino.spi.security.ConnectorIdentity;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
@@ -30,7 +28,6 @@
import java.io.IOException;
import java.util.Optional;
-import static com.google.common.base.MoreObjects.toStringHelper;
import static java.util.Objects.requireNonNull;
public class HdfsEnvironment
@@ -101,34 +98,4 @@ public void doAs(ConnectorIdentity identity, Runnable action)
{
hdfsAuthentication.doAs(identity, action);
}
-
- public static class HdfsContext
- {
- private final ConnectorIdentity identity;
-
- public HdfsContext(ConnectorIdentity identity)
- {
- this.identity = requireNonNull(identity, "identity is null");
- }
-
- public HdfsContext(ConnectorSession session)
- {
- requireNonNull(session, "session is null");
- this.identity = requireNonNull(session.getIdentity(), "session.getIdentity() is null");
- }
-
- public ConnectorIdentity getIdentity()
- {
- return identity;
- }
-
- @Override
- public String toString()
- {
- return toStringHelper(this)
- .omitNullValues()
- .add("user", identity)
- .toString();
- }
- }
}
diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HiveHdfsModule.java b/lib/trino-hdfs/src/main/java/io/trino/hdfs/HdfsModule.java
similarity index 77%
rename from plugin/trino-hive/src/main/java/io/trino/plugin/hive/HiveHdfsModule.java
rename to lib/trino-hdfs/src/main/java/io/trino/hdfs/HdfsModule.java
index ffaf1d645cac..a8c036295b22 100644
--- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HiveHdfsModule.java
+++ b/lib/trino-hdfs/src/main/java/io/trino/hdfs/HdfsModule.java
@@ -11,7 +11,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package io.trino.plugin.hive;
+package io.trino.hdfs;
import com.google.inject.Binder;
import com.google.inject.Module;
@@ -19,9 +19,8 @@
import static com.google.inject.multibindings.Multibinder.newSetBinder;
import static io.airlift.configuration.ConfigBinder.configBinder;
-import static org.weakref.jmx.guice.ExportBinder.newExporter;
-public class HiveHdfsModule
+public class HdfsModule
implements Module
{
@Override
@@ -29,14 +28,11 @@ public void configure(Binder binder)
{
configBinder(binder).bindConfig(HdfsConfig.class);
- binder.bind(HdfsConfiguration.class).to(HiveHdfsConfiguration.class).in(Scopes.SINGLETON);
+ binder.bind(HdfsConfiguration.class).to(DynamicHdfsConfiguration.class).in(Scopes.SINGLETON);
binder.bind(HdfsEnvironment.class).in(Scopes.SINGLETON);
binder.bind(HdfsConfigurationInitializer.class).in(Scopes.SINGLETON);
newSetBinder(binder, ConfigurationInitializer.class);
newSetBinder(binder, DynamicConfigurationProvider.class);
-
- binder.bind(NamenodeStats.class).in(Scopes.SINGLETON);
- newExporter(binder).export(NamenodeStats.class).withGeneratedName();
}
}
diff --git a/lib/trino-hdfs/src/main/java/io/trino/hdfs/TrinoFileSystemCache.java b/lib/trino-hdfs/src/main/java/io/trino/hdfs/TrinoFileSystemCache.java
new file mode 100644
index 000000000000..5fceb8cd5c86
--- /dev/null
+++ b/lib/trino-hdfs/src/main/java/io/trino/hdfs/TrinoFileSystemCache.java
@@ -0,0 +1,432 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.trino.hdfs;
+
+import com.google.common.annotations.VisibleForTesting;
+import com.google.common.collect.ImmutableList;
+import com.google.common.collect.ImmutableSet;
+import io.airlift.log.Logger;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.BlockLocation;
+import org.apache.hadoop.fs.CreateFlag;
+import org.apache.hadoop.fs.FSDataInputStream;
+import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.FileSystemCache;
+import org.apache.hadoop.fs.FilterFileSystem;
+import org.apache.hadoop.fs.LocatedFileStatus;
+import org.apache.hadoop.fs.Options;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.RemoteIterator;
+import org.apache.hadoop.fs.permission.FsPermission;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod;
+import org.apache.hadoop.util.Progressable;
+import org.apache.hadoop.util.ReflectionUtils;
+
+import javax.annotation.concurrent.GuardedBy;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.net.URI;
+import java.util.EnumSet;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Objects;
+import java.util.Set;
+import java.util.concurrent.atomic.AtomicLong;
+
+import static com.google.common.base.MoreObjects.toStringHelper;
+import static com.google.common.base.Strings.nullToEmpty;
+import static java.lang.String.format;
+import static java.util.Locale.ENGLISH;
+import static java.util.Objects.requireNonNull;
+import static org.apache.hadoop.fs.FileSystem.getFileSystemClass;
+import static org.apache.hadoop.security.UserGroupInformationShim.getSubject;
+
+public class TrinoFileSystemCache
+ implements FileSystemCache
+{
+ private static final Logger log = Logger.get(TrinoFileSystemCache.class);
+
+ public static final String CACHE_KEY = "fs.cache.credentials";
+
+ public static final TrinoFileSystemCache INSTANCE = new TrinoFileSystemCache();
+
+ private final AtomicLong unique = new AtomicLong();
+
+ private final TrinoFileSystemCacheStats stats;
+
+ @GuardedBy("this")
+ private final Map map = new HashMap<>();
+
+ @VisibleForTesting
+ TrinoFileSystemCache()
+ {
+ this.stats = new TrinoFileSystemCacheStats(() -> {
+ synchronized (this) {
+ return map.size();
+ }
+ });
+ }
+
+ @Override
+ public FileSystem get(URI uri, Configuration conf)
+ throws IOException
+ {
+ stats.newGetCall();
+ return getInternal(uri, conf, 0);
+ }
+
+ @Override
+ public FileSystem getUnique(URI uri, Configuration conf)
+ throws IOException
+ {
+ stats.newGetUniqueCall();
+ return getInternal(uri, conf, unique.incrementAndGet());
+ }
+
+ @VisibleForTesting
+ int getCacheSize()
+ {
+ return map.size();
+ }
+
+ private synchronized FileSystem getInternal(URI uri, Configuration conf, long unique)
+ throws IOException
+ {
+ UserGroupInformation userGroupInformation = UserGroupInformation.getCurrentUser();
+ FileSystemKey key = createFileSystemKey(uri, userGroupInformation, unique);
+ Set> privateCredentials = getPrivateCredentials(userGroupInformation);
+
+ FileSystemHolder fileSystemHolder = map.get(key);
+ if (fileSystemHolder == null) {
+ int maxSize = conf.getInt("fs.cache.max-size", 1000);
+ if (map.size() >= maxSize) {
+ stats.newGetCallFailed();
+ throw new IOException(format("FileSystem max cache size has been reached: %s", maxSize));
+ }
+ try {
+ FileSystem fileSystem = createFileSystem(uri, conf);
+ fileSystemHolder = new FileSystemHolder(fileSystem, privateCredentials);
+ map.put(key, fileSystemHolder);
+ }
+ catch (IOException e) {
+ stats.newGetCallFailed();
+ throw e;
+ }
+ }
+
+ // Update file system instance when credentials change.
+ // - Private credentials are only set when using Kerberos authentication.
+ // When the user is the same, but the private credentials are different,
+ // that means that Kerberos ticket has expired and re-login happened.
+ // To prevent cache leak in such situation, the privateCredentials are not
+ // a part of the FileSystemKey, but part of the FileSystemHolder. When a
+ // Kerberos re-login occurs, re-create the file system and cache it using
+ // the same key.
+ // - Extra credentials are used to authenticate with certain file systems.
+ if ((isHdfs(uri) && !fileSystemHolder.getPrivateCredentials().equals(privateCredentials)) ||
+ extraCredentialsChanged(fileSystemHolder.getFileSystem(), conf)) {
+ map.remove(key);
+ try {
+ FileSystem fileSystem = createFileSystem(uri, conf);
+ fileSystemHolder = new FileSystemHolder(fileSystem, privateCredentials);
+ map.put(key, fileSystemHolder);
+ }
+ catch (IOException e) {
+ stats.newGetCallFailed();
+ throw e;
+ }
+ }
+
+ return fileSystemHolder.getFileSystem();
+ }
+
+ private static FileSystem createFileSystem(URI uri, Configuration conf)
+ throws IOException
+ {
+ Class> clazz = getFileSystemClass(uri.getScheme(), conf);
+ if (clazz == null) {
+ throw new IOException("No FileSystem for scheme: " + uri.getScheme());
+ }
+ FileSystem original = (FileSystem) ReflectionUtils.newInstance(clazz, conf);
+ original.initialize(uri, conf);
+ FilterFileSystem wrapper = new FileSystemWrapper(original);
+ FileSystemFinalizerService.getInstance().addFinalizer(wrapper, () -> {
+ try {
+ original.close();
+ }
+ catch (IOException e) {
+ log.error(e, "Error occurred when finalizing file system");
+ }
+ });
+ return wrapper;
+ }
+
+ @Override
+ public synchronized void remove(FileSystem fileSystem)
+ {
+ stats.newRemoveCall();
+ map.values().removeIf(holder -> holder.getFileSystem().equals(fileSystem));
+ }
+
+ @Override
+ public synchronized void closeAll()
+ throws IOException
+ {
+ for (FileSystemHolder fileSystemHolder : ImmutableList.copyOf(map.values())) {
+ fileSystemHolder.getFileSystem().close();
+ }
+ map.clear();
+ }
+
+ private static FileSystemKey createFileSystemKey(URI uri, UserGroupInformation userGroupInformation, long unique)
+ {
+ String scheme = nullToEmpty(uri.getScheme()).toLowerCase(ENGLISH);
+ String authority = nullToEmpty(uri.getAuthority()).toLowerCase(ENGLISH);
+ String realUser;
+ String proxyUser;
+ AuthenticationMethod authenticationMethod = userGroupInformation.getAuthenticationMethod();
+ switch (authenticationMethod) {
+ case SIMPLE:
+ case KERBEROS:
+ realUser = userGroupInformation.getUserName();
+ proxyUser = null;
+ break;
+ case PROXY:
+ realUser = userGroupInformation.getRealUser().getUserName();
+ proxyUser = userGroupInformation.getUserName();
+ break;
+ default:
+ throw new IllegalArgumentException("Unsupported authentication method: " + authenticationMethod);
+ }
+ return new FileSystemKey(scheme, authority, unique, realUser, proxyUser);
+ }
+
+ private static Set> getPrivateCredentials(UserGroupInformation userGroupInformation)
+ {
+ AuthenticationMethod authenticationMethod = userGroupInformation.getAuthenticationMethod();
+ switch (authenticationMethod) {
+ case SIMPLE:
+ return ImmutableSet.of();
+ case KERBEROS:
+ return ImmutableSet.copyOf(getSubject(userGroupInformation).getPrivateCredentials());
+ case PROXY:
+ return getPrivateCredentials(userGroupInformation.getRealUser());
+ default:
+ throw new IllegalArgumentException("Unsupported authentication method: " + authenticationMethod);
+ }
+ }
+
+ private static boolean isHdfs(URI uri)
+ {
+ String scheme = uri.getScheme();
+ return "hdfs".equals(scheme) || "viewfs".equals(scheme);
+ }
+
+ private static boolean extraCredentialsChanged(FileSystem fileSystem, Configuration configuration)
+ {
+ return !configuration.get(CACHE_KEY, "").equals(
+ fileSystem.getConf().get(CACHE_KEY, ""));
+ }
+
+ private static class FileSystemKey
+ {
+ private final String scheme;
+ private final String authority;
+ private final long unique;
+ private final String realUser;
+ private final String proxyUser;
+
+ public FileSystemKey(String scheme, String authority, long unique, String realUser, String proxyUser)
+ {
+ this.scheme = requireNonNull(scheme, "scheme is null");
+ this.authority = requireNonNull(authority, "authority is null");
+ this.unique = unique;
+ this.realUser = requireNonNull(realUser, "realUser");
+ this.proxyUser = proxyUser;
+ }
+
+ @Override
+ public boolean equals(Object o)
+ {
+ if (this == o) {
+ return true;
+ }
+ if (o == null || getClass() != o.getClass()) {
+ return false;
+ }
+ FileSystemKey that = (FileSystemKey) o;
+ return Objects.equals(scheme, that.scheme) &&
+ Objects.equals(authority, that.authority) &&
+ Objects.equals(unique, that.unique) &&
+ Objects.equals(realUser, that.realUser) &&
+ Objects.equals(proxyUser, that.proxyUser);
+ }
+
+ @Override
+ public int hashCode()
+ {
+ return Objects.hash(scheme, authority, unique, realUser, proxyUser);
+ }
+
+ @Override
+ public String toString()
+ {
+ return toStringHelper(this)
+ .add("scheme", scheme)
+ .add("authority", authority)
+ .add("unique", unique)
+ .add("realUser", realUser)
+ .add("proxyUser", proxyUser)
+ .toString();
+ }
+ }
+
+ private static class FileSystemHolder
+ {
+ private final FileSystem fileSystem;
+ private final Set> privateCredentials;
+
+ public FileSystemHolder(FileSystem fileSystem, Set> privateCredentials)
+ {
+ this.fileSystem = requireNonNull(fileSystem, "fileSystem is null");
+ this.privateCredentials = ImmutableSet.copyOf(requireNonNull(privateCredentials, "privateCredentials is null"));
+ }
+
+ public FileSystem getFileSystem()
+ {
+ return fileSystem;
+ }
+
+ public Set> getPrivateCredentials()
+ {
+ return privateCredentials;
+ }
+
+ @Override
+ public String toString()
+ {
+ return toStringHelper(this)
+ .add("fileSystem", fileSystem)
+ .add("privateCredentials", privateCredentials)
+ .toString();
+ }
+ }
+
+ private static class FileSystemWrapper
+ extends FilterFileSystem
+ {
+ public FileSystemWrapper(FileSystem fs)
+ {
+ super(fs);
+ }
+
+ @Override
+ public FSDataInputStream open(Path f, int bufferSize)
+ throws IOException
+ {
+ return new InputStreamWrapper(getRawFileSystem().open(f, bufferSize), this);
+ }
+
+ @Override
+ public FSDataOutputStream append(Path f, int bufferSize, Progressable progress)
+ throws IOException
+ {
+ return new OutputStreamWrapper(getRawFileSystem().append(f, bufferSize, progress), this);
+ }
+
+ @Override
+ public FSDataOutputStream create(Path f, FsPermission permission, boolean overwrite, int bufferSize, short replication, long blockSize, Progressable progress)
+ throws IOException
+ {
+ return new OutputStreamWrapper(getRawFileSystem().create(f, permission, overwrite, bufferSize, replication, blockSize, progress), this);
+ }
+
+ @Override
+ public FSDataOutputStream create(Path f, FsPermission permission, EnumSet flags, int bufferSize, short replication, long blockSize, Progressable progress, Options.ChecksumOpt checksumOpt)
+ throws IOException
+ {
+ return new OutputStreamWrapper(getRawFileSystem().create(f, permission, flags, bufferSize, replication, blockSize, progress, checksumOpt), this);
+ }
+
+ @Override
+ public FSDataOutputStream createNonRecursive(Path f, FsPermission permission, EnumSet flags, int bufferSize, short replication, long blockSize, Progressable progress)
+ throws IOException
+ {
+ return new OutputStreamWrapper(getRawFileSystem().createNonRecursive(f, permission, flags, bufferSize, replication, blockSize, progress), this);
+ }
+
+ // missing in FilterFileSystem (HADOOP-16399)
+ @Override
+ public BlockLocation[] getFileBlockLocations(Path p, long start, long len)
+ throws IOException
+ {
+ return fs.getFileBlockLocations(p, start, len);
+ }
+
+ // missing in FilterFileSystem
+ @Override
+ public RemoteIterator listFiles(Path path, boolean recursive)
+ throws IOException
+ {
+ return fs.listFiles(path, recursive);
+ }
+ }
+
+ private static class OutputStreamWrapper
+ extends FSDataOutputStream
+ {
+ @SuppressWarnings({"FieldCanBeLocal", "unused"})
+ private final FileSystem fileSystem;
+
+ public OutputStreamWrapper(FSDataOutputStream delegate, FileSystem fileSystem)
+ {
+ super(delegate, null, delegate.getPos());
+ this.fileSystem = fileSystem;
+ }
+
+ @Override
+ public OutputStream getWrappedStream()
+ {
+ return ((FSDataOutputStream) super.getWrappedStream()).getWrappedStream();
+ }
+ }
+
+ private static class InputStreamWrapper
+ extends FSDataInputStream
+ {
+ @SuppressWarnings({"FieldCanBeLocal", "unused"})
+ private final FileSystem fileSystem;
+
+ public InputStreamWrapper(FSDataInputStream inputStream, FileSystem fileSystem)
+ {
+ super(inputStream);
+ this.fileSystem = fileSystem;
+ }
+
+ @Override
+ public InputStream getWrappedStream()
+ {
+ return ((FSDataInputStream) super.getWrappedStream()).getWrappedStream();
+ }
+ }
+
+ public TrinoFileSystemCacheStats getFileSystemCacheStats()
+ {
+ return stats;
+ }
+}
diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/fs/TrinoFileSystemCacheStats.java b/lib/trino-hdfs/src/main/java/io/trino/hdfs/TrinoFileSystemCacheStats.java
similarity index 98%
rename from plugin/trino-hive/src/main/java/io/trino/plugin/hive/fs/TrinoFileSystemCacheStats.java
rename to lib/trino-hdfs/src/main/java/io/trino/hdfs/TrinoFileSystemCacheStats.java
index 88db2588f092..d4c276fa2ab4 100644
--- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/fs/TrinoFileSystemCacheStats.java
+++ b/lib/trino-hdfs/src/main/java/io/trino/hdfs/TrinoFileSystemCacheStats.java
@@ -11,7 +11,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package io.trino.plugin.hive.fs;
+package io.trino.hdfs;
import io.airlift.stats.CounterStat;
import org.weakref.jmx.Managed;
diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/authentication/AuthenticationModules.java b/lib/trino-hdfs/src/main/java/io/trino/hdfs/authentication/AuthenticationModules.java
similarity index 94%
rename from plugin/trino-hive/src/main/java/io/trino/plugin/hive/authentication/AuthenticationModules.java
rename to lib/trino-hdfs/src/main/java/io/trino/hdfs/authentication/AuthenticationModules.java
index ef7e352d29e2..6a2f14314f90 100644
--- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/authentication/AuthenticationModules.java
+++ b/lib/trino-hdfs/src/main/java/io/trino/hdfs/authentication/AuthenticationModules.java
@@ -11,23 +11,22 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package io.trino.plugin.hive.authentication;
+package io.trino.hdfs.authentication;
import com.google.inject.Binder;
import com.google.inject.Key;
import com.google.inject.Module;
import com.google.inject.Provides;
import com.google.inject.Singleton;
+import io.trino.hdfs.HdfsConfigurationInitializer;
import io.trino.plugin.base.authentication.KerberosAuthentication;
-import io.trino.plugin.hive.ForHdfs;
-import io.trino.plugin.hive.HdfsConfigurationInitializer;
import javax.inject.Inject;
import static com.google.inject.Scopes.SINGLETON;
import static com.google.inject.multibindings.OptionalBinder.newOptionalBinder;
import static io.airlift.configuration.ConfigBinder.configBinder;
-import static io.trino.plugin.hive.authentication.KerberosHadoopAuthentication.createKerberosHadoopAuthentication;
+import static io.trino.hdfs.authentication.KerberosHadoopAuthentication.createKerberosHadoopAuthentication;
public final class AuthenticationModules
{
diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/authentication/CachingKerberosHadoopAuthentication.java b/lib/trino-hdfs/src/main/java/io/trino/hdfs/authentication/CachingKerberosHadoopAuthentication.java
similarity index 98%
rename from plugin/trino-hive/src/main/java/io/trino/plugin/hive/authentication/CachingKerberosHadoopAuthentication.java
rename to lib/trino-hdfs/src/main/java/io/trino/hdfs/authentication/CachingKerberosHadoopAuthentication.java
index 02c15564c68b..889e0791967d 100644
--- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/authentication/CachingKerberosHadoopAuthentication.java
+++ b/lib/trino-hdfs/src/main/java/io/trino/hdfs/authentication/CachingKerberosHadoopAuthentication.java
@@ -11,7 +11,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package io.trino.plugin.hive.authentication;
+package io.trino.hdfs.authentication;
import org.apache.hadoop.security.UserGroupInformation;
diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/authentication/DirectHdfsAuthentication.java b/lib/trino-hdfs/src/main/java/io/trino/hdfs/authentication/DirectHdfsAuthentication.java
similarity index 87%
rename from plugin/trino-hive/src/main/java/io/trino/plugin/hive/authentication/DirectHdfsAuthentication.java
rename to lib/trino-hdfs/src/main/java/io/trino/hdfs/authentication/DirectHdfsAuthentication.java
index 34df0f7abfac..70bc26fd5578 100644
--- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/authentication/DirectHdfsAuthentication.java
+++ b/lib/trino-hdfs/src/main/java/io/trino/hdfs/authentication/DirectHdfsAuthentication.java
@@ -11,14 +11,13 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package io.trino.plugin.hive.authentication;
+package io.trino.hdfs.authentication;
-import io.trino.plugin.hive.ForHdfs;
import io.trino.spi.security.ConnectorIdentity;
import javax.inject.Inject;
-import static io.trino.plugin.hive.authentication.UserGroupInformationUtils.executeActionInDoAs;
+import static io.trino.hdfs.authentication.UserGroupInformationUtils.executeActionInDoAs;
import static java.util.Objects.requireNonNull;
public class DirectHdfsAuthentication
diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/ForHdfs.java b/lib/trino-hdfs/src/main/java/io/trino/hdfs/authentication/ForHdfs.java
similarity index 96%
rename from plugin/trino-hive/src/main/java/io/trino/plugin/hive/ForHdfs.java
rename to lib/trino-hdfs/src/main/java/io/trino/hdfs/authentication/ForHdfs.java
index 6d1ad2c30c7e..3395f8bd36dd 100644
--- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/ForHdfs.java
+++ b/lib/trino-hdfs/src/main/java/io/trino/hdfs/authentication/ForHdfs.java
@@ -11,7 +11,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package io.trino.plugin.hive;
+package io.trino.hdfs.authentication;
import javax.inject.Qualifier;
diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/authentication/GenericExceptionAction.java b/lib/trino-hdfs/src/main/java/io/trino/hdfs/authentication/GenericExceptionAction.java
similarity index 93%
rename from plugin/trino-hive/src/main/java/io/trino/plugin/hive/authentication/GenericExceptionAction.java
rename to lib/trino-hdfs/src/main/java/io/trino/hdfs/authentication/GenericExceptionAction.java
index aaab304645ba..943a60b6e227 100644
--- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/authentication/GenericExceptionAction.java
+++ b/lib/trino-hdfs/src/main/java/io/trino/hdfs/authentication/GenericExceptionAction.java
@@ -11,7 +11,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package io.trino.plugin.hive.authentication;
+package io.trino.hdfs.authentication;
public interface GenericExceptionAction
{
diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/authentication/HadoopAuthentication.java b/lib/trino-hdfs/src/main/java/io/trino/hdfs/authentication/HadoopAuthentication.java
similarity index 94%
rename from plugin/trino-hive/src/main/java/io/trino/plugin/hive/authentication/HadoopAuthentication.java
rename to lib/trino-hdfs/src/main/java/io/trino/hdfs/authentication/HadoopAuthentication.java
index ed515b39caf9..40b0dd99e915 100644
--- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/authentication/HadoopAuthentication.java
+++ b/lib/trino-hdfs/src/main/java/io/trino/hdfs/authentication/HadoopAuthentication.java
@@ -11,7 +11,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package io.trino.plugin.hive.authentication;
+package io.trino.hdfs.authentication;
import org.apache.hadoop.security.UserGroupInformation;
diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/authentication/HdfsAuthentication.java b/lib/trino-hdfs/src/main/java/io/trino/hdfs/authentication/HdfsAuthentication.java
similarity index 95%
rename from plugin/trino-hive/src/main/java/io/trino/plugin/hive/authentication/HdfsAuthentication.java
rename to lib/trino-hdfs/src/main/java/io/trino/hdfs/authentication/HdfsAuthentication.java
index e3c02f0cd2ad..ee98bda27d83 100644
--- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/authentication/HdfsAuthentication.java
+++ b/lib/trino-hdfs/src/main/java/io/trino/hdfs/authentication/HdfsAuthentication.java
@@ -11,7 +11,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package io.trino.plugin.hive.authentication;
+package io.trino.hdfs.authentication;
import io.trino.spi.security.ConnectorIdentity;
diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/authentication/HdfsAuthenticationConfig.java b/lib/trino-hdfs/src/main/java/io/trino/hdfs/authentication/HdfsAuthenticationConfig.java
similarity index 97%
rename from plugin/trino-hive/src/main/java/io/trino/plugin/hive/authentication/HdfsAuthenticationConfig.java
rename to lib/trino-hdfs/src/main/java/io/trino/hdfs/authentication/HdfsAuthenticationConfig.java
index b7be07d55f54..1955c29927d5 100644
--- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/authentication/HdfsAuthenticationConfig.java
+++ b/lib/trino-hdfs/src/main/java/io/trino/hdfs/authentication/HdfsAuthenticationConfig.java
@@ -11,7 +11,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package io.trino.plugin.hive.authentication;
+package io.trino.hdfs.authentication;
import io.airlift.configuration.Config;
import io.airlift.configuration.ConfigDescription;
diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/authentication/HdfsAuthenticationModule.java b/lib/trino-hdfs/src/main/java/io/trino/hdfs/authentication/HdfsAuthenticationModule.java
similarity index 79%
rename from plugin/trino-hive/src/main/java/io/trino/plugin/hive/authentication/HdfsAuthenticationModule.java
rename to lib/trino-hdfs/src/main/java/io/trino/hdfs/authentication/HdfsAuthenticationModule.java
index 8718a3aba0f7..5279990e5cdf 100644
--- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/authentication/HdfsAuthenticationModule.java
+++ b/lib/trino-hdfs/src/main/java/io/trino/hdfs/authentication/HdfsAuthenticationModule.java
@@ -11,20 +11,20 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package io.trino.plugin.hive.authentication;
+package io.trino.hdfs.authentication;
import com.google.inject.Binder;
import com.google.inject.Module;
import io.airlift.configuration.AbstractConfigurationAwareModule;
-import io.trino.plugin.hive.authentication.HdfsAuthenticationConfig.HdfsAuthenticationType;
+import io.trino.hdfs.authentication.HdfsAuthenticationConfig.HdfsAuthenticationType;
import java.util.function.Predicate;
import static io.airlift.configuration.ConditionalModule.conditionalModule;
-import static io.trino.plugin.hive.authentication.AuthenticationModules.kerberosHdfsAuthenticationModule;
-import static io.trino.plugin.hive.authentication.AuthenticationModules.kerberosImpersonatingHdfsAuthenticationModule;
-import static io.trino.plugin.hive.authentication.AuthenticationModules.noHdfsAuthenticationModule;
-import static io.trino.plugin.hive.authentication.AuthenticationModules.simpleImpersonatingHdfsAuthenticationModule;
+import static io.trino.hdfs.authentication.AuthenticationModules.kerberosHdfsAuthenticationModule;
+import static io.trino.hdfs.authentication.AuthenticationModules.kerberosImpersonatingHdfsAuthenticationModule;
+import static io.trino.hdfs.authentication.AuthenticationModules.noHdfsAuthenticationModule;
+import static io.trino.hdfs.authentication.AuthenticationModules.simpleImpersonatingHdfsAuthenticationModule;
public class HdfsAuthenticationModule
extends AbstractConfigurationAwareModule
diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/authentication/HdfsKerberosConfig.java b/lib/trino-hdfs/src/main/java/io/trino/hdfs/authentication/HdfsKerberosConfig.java
similarity index 97%
rename from plugin/trino-hive/src/main/java/io/trino/plugin/hive/authentication/HdfsKerberosConfig.java
rename to lib/trino-hdfs/src/main/java/io/trino/hdfs/authentication/HdfsKerberosConfig.java
index 47a89603f623..652559fd4a17 100644
--- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/authentication/HdfsKerberosConfig.java
+++ b/lib/trino-hdfs/src/main/java/io/trino/hdfs/authentication/HdfsKerberosConfig.java
@@ -11,7 +11,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package io.trino.plugin.hive.authentication;
+package io.trino.hdfs.authentication;
import io.airlift.configuration.Config;
import io.airlift.configuration.ConfigDescription;
diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/authentication/ImpersonatingHdfsAuthentication.java b/lib/trino-hdfs/src/main/java/io/trino/hdfs/authentication/ImpersonatingHdfsAuthentication.java
similarity index 90%
rename from plugin/trino-hive/src/main/java/io/trino/plugin/hive/authentication/ImpersonatingHdfsAuthentication.java
rename to lib/trino-hdfs/src/main/java/io/trino/hdfs/authentication/ImpersonatingHdfsAuthentication.java
index 22565dcdf3d3..9549a65c894a 100644
--- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/authentication/ImpersonatingHdfsAuthentication.java
+++ b/lib/trino-hdfs/src/main/java/io/trino/hdfs/authentication/ImpersonatingHdfsAuthentication.java
@@ -11,15 +11,14 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package io.trino.plugin.hive.authentication;
+package io.trino.hdfs.authentication;
-import io.trino.plugin.hive.ForHdfs;
import io.trino.spi.security.ConnectorIdentity;
import org.apache.hadoop.security.UserGroupInformation;
import javax.inject.Inject;
-import static io.trino.plugin.hive.authentication.UserGroupInformationUtils.executeActionInDoAs;
+import static io.trino.hdfs.authentication.UserGroupInformationUtils.executeActionInDoAs;
import static java.util.Objects.requireNonNull;
public class ImpersonatingHdfsAuthentication
diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/authentication/KerberosHadoopAuthentication.java b/lib/trino-hdfs/src/main/java/io/trino/hdfs/authentication/KerberosHadoopAuthentication.java
similarity index 92%
rename from plugin/trino-hive/src/main/java/io/trino/plugin/hive/authentication/KerberosHadoopAuthentication.java
rename to lib/trino-hdfs/src/main/java/io/trino/hdfs/authentication/KerberosHadoopAuthentication.java
index 13e03c794cd2..7898b8b2b6ef 100644
--- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/authentication/KerberosHadoopAuthentication.java
+++ b/lib/trino-hdfs/src/main/java/io/trino/hdfs/authentication/KerberosHadoopAuthentication.java
@@ -11,17 +11,17 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package io.trino.plugin.hive.authentication;
+package io.trino.hdfs.authentication;
import io.trino.hadoop.HadoopNative;
+import io.trino.hdfs.HdfsConfigurationInitializer;
import io.trino.plugin.base.authentication.KerberosAuthentication;
-import io.trino.plugin.hive.HdfsConfigurationInitializer;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.security.UserGroupInformation;
import javax.security.auth.Subject;
-import static io.trino.plugin.hive.util.ConfigurationUtils.getInitialConfiguration;
+import static io.trino.hdfs.ConfigurationUtils.getInitialConfiguration;
import static java.util.Objects.requireNonNull;
import static org.apache.hadoop.security.UserGroupInformationShim.createUserGroupInformationForSubject;
diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/authentication/NoHdfsAuthentication.java b/lib/trino-hdfs/src/main/java/io/trino/hdfs/authentication/NoHdfsAuthentication.java
similarity index 95%
rename from plugin/trino-hive/src/main/java/io/trino/plugin/hive/authentication/NoHdfsAuthentication.java
rename to lib/trino-hdfs/src/main/java/io/trino/hdfs/authentication/NoHdfsAuthentication.java
index ce341a4fd913..eb517098176a 100644
--- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/authentication/NoHdfsAuthentication.java
+++ b/lib/trino-hdfs/src/main/java/io/trino/hdfs/authentication/NoHdfsAuthentication.java
@@ -11,7 +11,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package io.trino.plugin.hive.authentication;
+package io.trino.hdfs.authentication;
import io.trino.spi.security.ConnectorIdentity;
diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/authentication/SimpleHadoopAuthentication.java b/lib/trino-hdfs/src/main/java/io/trino/hdfs/authentication/SimpleHadoopAuthentication.java
similarity index 95%
rename from plugin/trino-hive/src/main/java/io/trino/plugin/hive/authentication/SimpleHadoopAuthentication.java
rename to lib/trino-hdfs/src/main/java/io/trino/hdfs/authentication/SimpleHadoopAuthentication.java
index a9bf316875ed..2e5c1b6204ef 100644
--- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/authentication/SimpleHadoopAuthentication.java
+++ b/lib/trino-hdfs/src/main/java/io/trino/hdfs/authentication/SimpleHadoopAuthentication.java
@@ -11,7 +11,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package io.trino.plugin.hive.authentication;
+package io.trino.hdfs.authentication;
import org.apache.hadoop.security.UserGroupInformation;
diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/authentication/SimpleUserNameProvider.java b/lib/trino-hdfs/src/main/java/io/trino/hdfs/authentication/SimpleUserNameProvider.java
similarity index 94%
rename from plugin/trino-hive/src/main/java/io/trino/plugin/hive/authentication/SimpleUserNameProvider.java
rename to lib/trino-hdfs/src/main/java/io/trino/hdfs/authentication/SimpleUserNameProvider.java
index 0da5e1efbeae..421dfd21a099 100644
--- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/authentication/SimpleUserNameProvider.java
+++ b/lib/trino-hdfs/src/main/java/io/trino/hdfs/authentication/SimpleUserNameProvider.java
@@ -11,7 +11,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package io.trino.plugin.hive.authentication;
+package io.trino.hdfs.authentication;
import io.trino.spi.security.ConnectorIdentity;
diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/authentication/UserGroupInformationUtils.java b/lib/trino-hdfs/src/main/java/io/trino/hdfs/authentication/UserGroupInformationUtils.java
similarity index 97%
rename from plugin/trino-hive/src/main/java/io/trino/plugin/hive/authentication/UserGroupInformationUtils.java
rename to lib/trino-hdfs/src/main/java/io/trino/hdfs/authentication/UserGroupInformationUtils.java
index a57f135e08b1..5e81bce4c0d8 100644
--- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/authentication/UserGroupInformationUtils.java
+++ b/lib/trino-hdfs/src/main/java/io/trino/hdfs/authentication/UserGroupInformationUtils.java
@@ -11,7 +11,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package io.trino.plugin.hive.authentication;
+package io.trino.hdfs.authentication;
import org.apache.hadoop.security.UserGroupInformation;
diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/authentication/UserNameProvider.java b/lib/trino-hdfs/src/main/java/io/trino/hdfs/authentication/UserNameProvider.java
similarity index 93%
rename from plugin/trino-hive/src/main/java/io/trino/plugin/hive/authentication/UserNameProvider.java
rename to lib/trino-hdfs/src/main/java/io/trino/hdfs/authentication/UserNameProvider.java
index 6e14ecf20d5b..c5c09c90ba28 100644
--- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/authentication/UserNameProvider.java
+++ b/lib/trino-hdfs/src/main/java/io/trino/hdfs/authentication/UserNameProvider.java
@@ -11,7 +11,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package io.trino.plugin.hive.authentication;
+package io.trino.hdfs.authentication;
import io.trino.spi.security.ConnectorIdentity;
diff --git a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/util/TestFSDataInputStreamTail.java b/lib/trino-hdfs/src/test/java/io/trino/hdfs/TestFSDataInputStreamTail.java
similarity index 96%
rename from plugin/trino-hive/src/test/java/io/trino/plugin/hive/util/TestFSDataInputStreamTail.java
rename to lib/trino-hdfs/src/test/java/io/trino/hdfs/TestFSDataInputStreamTail.java
index 120d6fcfe1c2..0e1f7a054daa 100644
--- a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/util/TestFSDataInputStreamTail.java
+++ b/lib/trino-hdfs/src/test/java/io/trino/hdfs/TestFSDataInputStreamTail.java
@@ -12,11 +12,10 @@
* limitations under the License.
*/
-package io.trino.plugin.hive.util;
+package io.trino.hdfs;
import io.airlift.slice.Slice;
import io.airlift.slice.Slices;
-import io.trino.spi.TrinoException;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.Path;
@@ -34,7 +33,6 @@
import static io.airlift.testing.Closeables.closeAll;
import static io.trino.hadoop.ConfigurationInstantiator.newEmptyConfiguration;
-import static io.trino.plugin.hive.HiveErrorCode.HIVE_FILESYSTEM_ERROR;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertSame;
import static org.testng.Assert.assertTrue;
@@ -164,10 +162,6 @@ public void testReadTailNoEndOfFileFound()
FSDataInputStreamTail.readTail(tempFile.toString(), 128, is, 16);
fail("Expected failure to find end of stream");
}
- catch (TrinoException e) {
- assertEquals(e.getErrorCode(), HIVE_FILESYSTEM_ERROR.toErrorCode());
- throw e;
- }
}
@Test(expectedExceptions = IOException.class, expectedExceptionsMessageRegExp = "Incorrect file size \\(.*\\) for file \\(end of stream not reached\\): file:.*")
diff --git a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/TestFileSystemCache.java b/lib/trino-hdfs/src/test/java/io/trino/hdfs/TestFileSystemCache.java
similarity index 85%
rename from plugin/trino-hive/src/test/java/io/trino/plugin/hive/TestFileSystemCache.java
rename to lib/trino-hdfs/src/test/java/io/trino/hdfs/TestFileSystemCache.java
index a3b9f0ce86e6..e8c003da8666 100644
--- a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/TestFileSystemCache.java
+++ b/lib/trino-hdfs/src/test/java/io/trino/hdfs/TestFileSystemCache.java
@@ -11,12 +11,12 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package io.trino.plugin.hive;
+package io.trino.hdfs;
import com.google.common.collect.ImmutableSet;
-import io.trino.plugin.hive.authentication.ImpersonatingHdfsAuthentication;
-import io.trino.plugin.hive.authentication.SimpleHadoopAuthentication;
-import io.trino.plugin.hive.authentication.SimpleUserNameProvider;
+import io.trino.hdfs.authentication.ImpersonatingHdfsAuthentication;
+import io.trino.hdfs.authentication.SimpleHadoopAuthentication;
+import io.trino.hdfs.authentication.SimpleUserNameProvider;
import io.trino.spi.security.ConnectorIdentity;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -35,7 +35,7 @@ public void testFileSystemCache()
throws IOException
{
HdfsEnvironment environment = new HdfsEnvironment(
- new HiveHdfsConfiguration(new HdfsConfigurationInitializer(new HdfsConfig()), ImmutableSet.of()),
+ new DynamicHdfsConfiguration(new HdfsConfigurationInitializer(new HdfsConfig()), ImmutableSet.of()),
new HdfsConfig(),
new ImpersonatingHdfsAuthentication(new SimpleHadoopAuthentication(), new SimpleUserNameProvider()));
ConnectorIdentity userId = ConnectorIdentity.ofUser("user");
diff --git a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/TestHdfsConfig.java b/lib/trino-hdfs/src/test/java/io/trino/hdfs/TestHdfsConfig.java
similarity index 99%
rename from plugin/trino-hive/src/test/java/io/trino/plugin/hive/TestHdfsConfig.java
rename to lib/trino-hdfs/src/test/java/io/trino/hdfs/TestHdfsConfig.java
index b1a739c39092..7345da03dbbe 100644
--- a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/TestHdfsConfig.java
+++ b/lib/trino-hdfs/src/test/java/io/trino/hdfs/TestHdfsConfig.java
@@ -11,7 +11,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package io.trino.plugin.hive;
+package io.trino.hdfs;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
diff --git a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/fs/TestTrinoFileSystemCacheStats.java b/lib/trino-hdfs/src/test/java/io/trino/hdfs/TestTrinoFileSystemCacheStats.java
similarity index 90%
rename from plugin/trino-hive/src/test/java/io/trino/plugin/hive/fs/TestTrinoFileSystemCacheStats.java
rename to lib/trino-hdfs/src/test/java/io/trino/hdfs/TestTrinoFileSystemCacheStats.java
index cc4986dd54ac..cc9adbc7d80a 100644
--- a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/fs/TestTrinoFileSystemCacheStats.java
+++ b/lib/trino-hdfs/src/test/java/io/trino/hdfs/TestTrinoFileSystemCacheStats.java
@@ -11,9 +11,8 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package io.trino.plugin.hive.fs;
+package io.trino.hdfs;
-import io.trino.plugin.hive.s3.TrinoS3FileSystem;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.testng.annotations.Test;
@@ -36,19 +35,18 @@ public void testCacheSizeIsCorrect()
assertEquals(trinoFileSystemCache.getCacheSize(), 0);
Configuration configuration = newEmptyConfiguration();
- configuration.set("fs.s3.impl", TrinoS3FileSystem.class.getName());
- trinoFileSystemCache.get(new URI("s3://bucket/path/"), configuration);
+ trinoFileSystemCache.get(new URI("file:///tmp/path/"), configuration);
assertEquals(trinoFileSystemCacheStats.getGetCalls().getTotalCount(), 1);
assertEquals(trinoFileSystemCacheStats.getCacheSize(), 1);
assertEquals(trinoFileSystemCache.getCacheSize(), 1);
- trinoFileSystemCache.get(new URI("s3://bucket/path1/"), configuration);
+ trinoFileSystemCache.get(new URI("file:///tmp/path1/"), configuration);
assertEquals(trinoFileSystemCacheStats.getGetCalls().getTotalCount(), 2);
assertEquals(trinoFileSystemCacheStats.getCacheSize(), 1);
assertEquals(trinoFileSystemCache.getCacheSize(), 1);
// use getUnique to ensure cache size is increased
- FileSystem fileSystem = trinoFileSystemCache.getUnique(new URI("s3://bucket/path2/"), configuration);
+ FileSystem fileSystem = trinoFileSystemCache.getUnique(new URI("file:///tmp/path2/"), configuration);
assertEquals(trinoFileSystemCacheStats.getGetCalls().getTotalCount(), 2);
assertEquals(trinoFileSystemCacheStats.getGetUniqueCalls().getTotalCount(), 1);
assertEquals(trinoFileSystemCacheStats.getCacheSize(), 2);
@@ -71,7 +69,7 @@ public void testFailedCallsCountIsCorrect()
TrinoFileSystemCacheStats trinoFileSystemCacheStats = trinoFileSystemCache.getFileSystemCacheStats();
Configuration configuration = newEmptyConfiguration();
configuration.setInt("fs.cache.max-size", 0);
- assertThatThrownBy(() -> trinoFileSystemCache.get(new URI("s3://bucket/path/"), configuration))
+ assertThatThrownBy(() -> trinoFileSystemCache.get(new URI("file:///tmp/path/"), configuration))
.hasMessageMatching("FileSystem max cache size has been reached: 0");
assertEquals(trinoFileSystemCacheStats.getGetCallsFailed().getTotalCount(), 1);
assertEquals(trinoFileSystemCacheStats.getGetCalls().getTotalCount(), 1);
diff --git a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/authentication/TestHdfsAuthenticationConfig.java b/lib/trino-hdfs/src/test/java/io/trino/hdfs/authentication/TestHdfsAuthenticationConfig.java
similarity index 92%
rename from plugin/trino-hive/src/test/java/io/trino/plugin/hive/authentication/TestHdfsAuthenticationConfig.java
rename to lib/trino-hdfs/src/test/java/io/trino/hdfs/authentication/TestHdfsAuthenticationConfig.java
index 5a5ae1699241..1a4589727af1 100644
--- a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/authentication/TestHdfsAuthenticationConfig.java
+++ b/lib/trino-hdfs/src/test/java/io/trino/hdfs/authentication/TestHdfsAuthenticationConfig.java
@@ -11,10 +11,10 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package io.trino.plugin.hive.authentication;
+package io.trino.hdfs.authentication;
import com.google.common.collect.ImmutableMap;
-import io.trino.plugin.hive.authentication.HdfsAuthenticationConfig.HdfsAuthenticationType;
+import io.trino.hdfs.authentication.HdfsAuthenticationConfig.HdfsAuthenticationType;
import org.testng.annotations.Test;
import java.util.Map;
diff --git a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/authentication/TestHdfsKerberosConfig.java b/lib/trino-hdfs/src/test/java/io/trino/hdfs/authentication/TestHdfsKerberosConfig.java
similarity index 97%
rename from plugin/trino-hive/src/test/java/io/trino/plugin/hive/authentication/TestHdfsKerberosConfig.java
rename to lib/trino-hdfs/src/test/java/io/trino/hdfs/authentication/TestHdfsKerberosConfig.java
index 1669d843db37..efd9aa2e335c 100644
--- a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/authentication/TestHdfsKerberosConfig.java
+++ b/lib/trino-hdfs/src/test/java/io/trino/hdfs/authentication/TestHdfsKerberosConfig.java
@@ -11,7 +11,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package io.trino.plugin.hive.authentication;
+package io.trino.hdfs.authentication;
import com.google.common.collect.ImmutableMap;
import org.testng.annotations.Test;
diff --git a/plugin/trino-delta-lake/pom.xml b/plugin/trino-delta-lake/pom.xml
index cd3626911eb5..e85e46992de5 100644
--- a/plugin/trino-delta-lake/pom.xml
+++ b/plugin/trino-delta-lake/pom.xml
@@ -32,6 +32,11 @@
trino-collect
+
+ io.trino
+ trino-hdfs
+
+
io.trino
trino-hive
diff --git a/plugin/trino-delta-lake/src/main/java/io/trino/plugin/deltalake/DeltaLakeMergeSink.java b/plugin/trino-delta-lake/src/main/java/io/trino/plugin/deltalake/DeltaLakeMergeSink.java
index f2f2cb7e7fb0..cda32e5a9429 100644
--- a/plugin/trino-delta-lake/src/main/java/io/trino/plugin/deltalake/DeltaLakeMergeSink.java
+++ b/plugin/trino-delta-lake/src/main/java/io/trino/plugin/deltalake/DeltaLakeMergeSink.java
@@ -17,13 +17,13 @@
import io.airlift.json.JsonCodec;
import io.airlift.slice.Slice;
import io.airlift.slice.Slices;
+import io.trino.hdfs.HdfsContext;
+import io.trino.hdfs.HdfsEnvironment;
import io.trino.parquet.ParquetReaderOptions;
import io.trino.parquet.writer.ParquetSchemaConverter;
import io.trino.parquet.writer.ParquetWriterOptions;
import io.trino.plugin.hive.FileFormatDataSourceStats;
import io.trino.plugin.hive.FileWriter;
-import io.trino.plugin.hive.HdfsEnvironment;
-import io.trino.plugin.hive.HdfsEnvironment.HdfsContext;
import io.trino.plugin.hive.ReaderPageSource;
import io.trino.plugin.hive.parquet.ParquetFileWriter;
import io.trino.plugin.hive.parquet.ParquetPageSourceFactory;
diff --git a/plugin/trino-delta-lake/src/main/java/io/trino/plugin/deltalake/DeltaLakeMetadata.java b/plugin/trino-delta-lake/src/main/java/io/trino/plugin/deltalake/DeltaLakeMetadata.java
index c9cfd141c3b0..10dda841b51f 100644
--- a/plugin/trino-delta-lake/src/main/java/io/trino/plugin/deltalake/DeltaLakeMetadata.java
+++ b/plugin/trino-delta-lake/src/main/java/io/trino/plugin/deltalake/DeltaLakeMetadata.java
@@ -26,6 +26,8 @@
import io.airlift.slice.Slice;
import io.airlift.stats.cardinality.HyperLogLog;
import io.airlift.units.DataSize;
+import io.trino.hdfs.HdfsContext;
+import io.trino.hdfs.HdfsEnvironment;
import io.trino.plugin.deltalake.metastore.DeltaLakeMetastore;
import io.trino.plugin.deltalake.metastore.NotADeltaLakeTableException;
import io.trino.plugin.deltalake.procedure.DeltaLakeTableExecuteHandle;
@@ -47,8 +49,6 @@
import io.trino.plugin.deltalake.transactionlog.writer.TransactionConflictException;
import io.trino.plugin.deltalake.transactionlog.writer.TransactionLogWriter;
import io.trino.plugin.deltalake.transactionlog.writer.TransactionLogWriterFactory;
-import io.trino.plugin.hive.HdfsEnvironment;
-import io.trino.plugin.hive.HdfsEnvironment.HdfsContext;
import io.trino.plugin.hive.HiveType;
import io.trino.plugin.hive.SchemaAlreadyExistsException;
import io.trino.plugin.hive.TableAlreadyExistsException;
diff --git a/plugin/trino-delta-lake/src/main/java/io/trino/plugin/deltalake/DeltaLakeMetadataFactory.java b/plugin/trino-delta-lake/src/main/java/io/trino/plugin/deltalake/DeltaLakeMetadataFactory.java
index 4989f023c7b0..caa4d08fb95b 100644
--- a/plugin/trino-delta-lake/src/main/java/io/trino/plugin/deltalake/DeltaLakeMetadataFactory.java
+++ b/plugin/trino-delta-lake/src/main/java/io/trino/plugin/deltalake/DeltaLakeMetadataFactory.java
@@ -14,12 +14,12 @@
package io.trino.plugin.deltalake;
import io.airlift.json.JsonCodec;
+import io.trino.hdfs.HdfsEnvironment;
import io.trino.plugin.deltalake.metastore.HiveMetastoreBackedDeltaLakeMetastore;
import io.trino.plugin.deltalake.statistics.CachingExtendedStatisticsAccess;
import io.trino.plugin.deltalake.transactionlog.TransactionLogAccess;
import io.trino.plugin.deltalake.transactionlog.checkpoint.CheckpointWriterManager;
import io.trino.plugin.deltalake.transactionlog.writer.TransactionLogWriterFactory;
-import io.trino.plugin.hive.HdfsEnvironment;
import io.trino.plugin.hive.metastore.HiveMetastoreFactory;
import io.trino.plugin.hive.metastore.cache.CachingHiveMetastore;
import io.trino.spi.NodeManager;
diff --git a/plugin/trino-delta-lake/src/main/java/io/trino/plugin/deltalake/DeltaLakePageSink.java b/plugin/trino-delta-lake/src/main/java/io/trino/plugin/deltalake/DeltaLakePageSink.java
index f67609f68d2b..85dc41403550 100644
--- a/plugin/trino-delta-lake/src/main/java/io/trino/plugin/deltalake/DeltaLakePageSink.java
+++ b/plugin/trino-delta-lake/src/main/java/io/trino/plugin/deltalake/DeltaLakePageSink.java
@@ -22,10 +22,11 @@
import io.airlift.json.JsonCodec;
import io.airlift.log.Logger;
import io.airlift.slice.Slice;
+import io.trino.hdfs.HdfsContext;
+import io.trino.hdfs.HdfsEnvironment;
import io.trino.parquet.writer.ParquetSchemaConverter;
import io.trino.parquet.writer.ParquetWriterOptions;
import io.trino.plugin.hive.FileWriter;
-import io.trino.plugin.hive.HdfsEnvironment;
import io.trino.plugin.hive.HivePartitionKey;
import io.trino.plugin.hive.RecordFileWriter;
import io.trino.plugin.hive.parquet.ParquetFileWriter;
@@ -62,6 +63,7 @@
import static com.google.common.base.Verify.verify;
import static com.google.common.collect.ImmutableList.toImmutableList;
import static io.airlift.slice.Slices.wrappedBuffer;
+import static io.trino.hdfs.ConfigurationUtils.toJobConf;
import static io.trino.plugin.deltalake.DeltaLakeErrorCode.DELTA_LAKE_BAD_WRITE;
import static io.trino.plugin.deltalake.DeltaLakeSchemaProperties.buildHiveSchema;
import static io.trino.plugin.deltalake.DeltaLakeSessionProperties.getCompressionCodec;
@@ -72,7 +74,6 @@
import static io.trino.plugin.hive.HiveStorageFormat.PARQUET;
import static io.trino.plugin.hive.metastore.StorageFormat.fromHiveStorageFormat;
import static io.trino.plugin.hive.util.CompressionConfigUtil.configureCompression;
-import static io.trino.plugin.hive.util.ConfigurationUtils.toJobConf;
import static io.trino.spi.type.TimestampType.TIMESTAMP_MILLIS;
import static java.lang.String.format;
import static java.util.Objects.requireNonNull;
@@ -193,7 +194,7 @@ public DeltaLakePageSink(
this.session = requireNonNull(session, "session is null");
this.stats = stats;
- Configuration conf = hdfsEnvironment.getConfiguration(new HdfsEnvironment.HdfsContext(session), new Path(outputPath));
+ Configuration conf = hdfsEnvironment.getConfiguration(new HdfsContext(session), new Path(outputPath));
configureCompression(conf, getCompressionCodec(session));
this.conf = toJobConf(conf);
this.typeManager = requireNonNull(typeManager, "typeManager is null");
diff --git a/plugin/trino-delta-lake/src/main/java/io/trino/plugin/deltalake/DeltaLakePageSinkProvider.java b/plugin/trino-delta-lake/src/main/java/io/trino/plugin/deltalake/DeltaLakePageSinkProvider.java
index 8ae41b409018..b02f0793acd1 100644
--- a/plugin/trino-delta-lake/src/main/java/io/trino/plugin/deltalake/DeltaLakePageSinkProvider.java
+++ b/plugin/trino-delta-lake/src/main/java/io/trino/plugin/deltalake/DeltaLakePageSinkProvider.java
@@ -14,9 +14,9 @@
package io.trino.plugin.deltalake;
import io.airlift.json.JsonCodec;
+import io.trino.hdfs.HdfsEnvironment;
import io.trino.plugin.deltalake.procedure.DeltaLakeTableExecuteHandle;
import io.trino.plugin.deltalake.procedure.DeltaTableOptimizeHandle;
-import io.trino.plugin.hive.HdfsEnvironment;
import io.trino.plugin.hive.NodeVersion;
import io.trino.spi.PageIndexerFactory;
import io.trino.spi.connector.ConnectorInsertTableHandle;
diff --git a/plugin/trino-delta-lake/src/main/java/io/trino/plugin/deltalake/DeltaLakePageSourceProvider.java b/plugin/trino-delta-lake/src/main/java/io/trino/plugin/deltalake/DeltaLakePageSourceProvider.java
index 32375a19d2e5..9344876ee60e 100644
--- a/plugin/trino-delta-lake/src/main/java/io/trino/plugin/deltalake/DeltaLakePageSourceProvider.java
+++ b/plugin/trino-delta-lake/src/main/java/io/trino/plugin/deltalake/DeltaLakePageSourceProvider.java
@@ -16,10 +16,10 @@
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import io.airlift.json.JsonCodec;
+import io.trino.hdfs.HdfsContext;
+import io.trino.hdfs.HdfsEnvironment;
import io.trino.parquet.ParquetReaderOptions;
import io.trino.plugin.hive.FileFormatDataSourceStats;
-import io.trino.plugin.hive.HdfsEnvironment;
-import io.trino.plugin.hive.HdfsEnvironment.HdfsContext;
import io.trino.plugin.hive.HiveColumnHandle;
import io.trino.plugin.hive.ReaderPageSource;
import io.trino.plugin.hive.parquet.ParquetPageSourceFactory;
diff --git a/plugin/trino-delta-lake/src/main/java/io/trino/plugin/deltalake/DeltaLakeUpdatablePageSource.java b/plugin/trino-delta-lake/src/main/java/io/trino/plugin/deltalake/DeltaLakeUpdatablePageSource.java
index 042bb1961d37..7a04b76ecbab 100644
--- a/plugin/trino-delta-lake/src/main/java/io/trino/plugin/deltalake/DeltaLakeUpdatablePageSource.java
+++ b/plugin/trino-delta-lake/src/main/java/io/trino/plugin/deltalake/DeltaLakeUpdatablePageSource.java
@@ -17,9 +17,10 @@
import com.google.common.collect.ImmutableSet;
import io.airlift.json.JsonCodec;
import io.airlift.slice.Slice;
+import io.trino.hdfs.HdfsContext;
+import io.trino.hdfs.HdfsEnvironment;
import io.trino.parquet.ParquetReaderOptions;
import io.trino.plugin.hive.FileFormatDataSourceStats;
-import io.trino.plugin.hive.HdfsEnvironment;
import io.trino.plugin.hive.HiveColumnHandle;
import io.trino.plugin.hive.ReaderPageSource;
import io.trino.plugin.hive.RecordFileWriter;
@@ -58,6 +59,7 @@
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.collect.ImmutableList.toImmutableList;
import static io.airlift.slice.Slices.utf8Slice;
+import static io.trino.hdfs.ConfigurationUtils.toJobConf;
import static io.trino.plugin.deltalake.DeltaLakeColumnHandle.ROW_ID_COLUMN_NAME;
import static io.trino.plugin.deltalake.DeltaLakeColumnType.PARTITION_KEY;
import static io.trino.plugin.deltalake.DeltaLakeColumnType.REGULAR;
@@ -73,7 +75,6 @@
import static io.trino.plugin.hive.HiveStorageFormat.PARQUET;
import static io.trino.plugin.hive.metastore.StorageFormat.fromHiveStorageFormat;
import static io.trino.plugin.hive.util.CompressionConfigUtil.configureCompression;
-import static io.trino.plugin.hive.util.ConfigurationUtils.toJobConf;
import static io.trino.spi.StandardErrorCode.GENERIC_INTERNAL_ERROR;
import static io.trino.spi.StandardErrorCode.NOT_SUPPORTED;
import static io.trino.spi.block.ColumnarRow.toColumnarRow;
@@ -100,7 +101,7 @@ public class DeltaLakeUpdatablePageSource
private final ConnectorSession session;
private final ExecutorService executorService;
private final HdfsEnvironment hdfsEnvironment;
- private final HdfsEnvironment.HdfsContext hdfsContext;
+ private final HdfsContext hdfsContext;
private final DateTimeZone parquetDateTimeZone;
private final ParquetReaderOptions parquetReaderOptions;
private final TypeManager typeManager;
@@ -130,7 +131,7 @@ public DeltaLakeUpdatablePageSource(
ConnectorSession session,
ExecutorService executorService,
HdfsEnvironment hdfsEnvironment,
- HdfsEnvironment.HdfsContext hdfsContext,
+ HdfsContext hdfsContext,
DateTimeZone parquetDateTimeZone,
ParquetReaderOptions parquetReaderOptions,
TupleDomain parquetPredicate,
@@ -583,7 +584,7 @@ private ReaderPageSource createParquetPageSource(TupleDomain p
private DeltaLakeWriter createWriter(Path targetFile, List allColumns, List dataColumns)
throws IOException
{
- Configuration conf = hdfsEnvironment.getConfiguration(new HdfsEnvironment.HdfsContext(session), targetFile);
+ Configuration conf = hdfsEnvironment.getConfiguration(new HdfsContext(session), targetFile);
configureCompression(conf, SNAPPY);
Properties schema = buildHiveSchema(
diff --git a/plugin/trino-delta-lake/src/main/java/io/trino/plugin/deltalake/InternalDeltaLakeConnectorFactory.java b/plugin/trino-delta-lake/src/main/java/io/trino/plugin/deltalake/InternalDeltaLakeConnectorFactory.java
index 11f529963bd2..4dd604dfdaa4 100644
--- a/plugin/trino-delta-lake/src/main/java/io/trino/plugin/deltalake/InternalDeltaLakeConnectorFactory.java
+++ b/plugin/trino-delta-lake/src/main/java/io/trino/plugin/deltalake/InternalDeltaLakeConnectorFactory.java
@@ -22,6 +22,8 @@
import io.airlift.bootstrap.LifeCycleManager;
import io.airlift.event.client.EventModule;
import io.airlift.json.JsonModule;
+import io.trino.hdfs.HdfsModule;
+import io.trino.hdfs.authentication.HdfsAuthenticationModule;
import io.trino.plugin.base.CatalogName;
import io.trino.plugin.base.CatalogNameModule;
import io.trino.plugin.base.classloader.ClassLoaderSafeConnectorAccessControl;
@@ -34,9 +36,7 @@
import io.trino.plugin.base.jmx.MBeanServerModule;
import io.trino.plugin.base.session.SessionPropertiesProvider;
import io.trino.plugin.deltalake.metastore.DeltaLakeMetastoreModule;
-import io.trino.plugin.hive.HiveHdfsModule;
import io.trino.plugin.hive.NodeVersion;
-import io.trino.plugin.hive.authentication.HdfsAuthenticationModule;
import io.trino.plugin.hive.azure.HiveAzureModule;
import io.trino.plugin.hive.gcs.HiveGcsModule;
import io.trino.plugin.hive.s3.HiveS3Module;
@@ -81,7 +81,7 @@ public static Connector createConnector(
new ConnectorObjectNameGeneratorModule(catalogName, "io.trino.plugin.deltalake", "trino.plugin.deltalake"),
new JsonModule(),
new MBeanServerModule(),
- new HiveHdfsModule(),
+ new HdfsModule(),
new HiveS3Module(),
new HiveAzureModule(),
new HiveGcsModule(),
diff --git a/plugin/trino-delta-lake/src/main/java/io/trino/plugin/deltalake/metastore/HiveMetastoreBackedDeltaLakeMetastore.java b/plugin/trino-delta-lake/src/main/java/io/trino/plugin/deltalake/metastore/HiveMetastoreBackedDeltaLakeMetastore.java
index 3a5c0c8b843b..e0ac60f22fa3 100644
--- a/plugin/trino-delta-lake/src/main/java/io/trino/plugin/deltalake/metastore/HiveMetastoreBackedDeltaLakeMetastore.java
+++ b/plugin/trino-delta-lake/src/main/java/io/trino/plugin/deltalake/metastore/HiveMetastoreBackedDeltaLakeMetastore.java
@@ -13,6 +13,8 @@
*/
package io.trino.plugin.deltalake.metastore;
+import io.trino.hdfs.HdfsContext;
+import io.trino.hdfs.HdfsEnvironment;
import io.trino.plugin.deltalake.DeltaLakeColumnHandle;
import io.trino.plugin.deltalake.DeltaLakeColumnMetadata;
import io.trino.plugin.deltalake.DeltaLakeTableHandle;
@@ -26,7 +28,6 @@
import io.trino.plugin.deltalake.transactionlog.TableSnapshot;
import io.trino.plugin.deltalake.transactionlog.TransactionLogAccess;
import io.trino.plugin.deltalake.transactionlog.statistics.DeltaLakeFileStatistics;
-import io.trino.plugin.hive.HdfsEnvironment;
import io.trino.plugin.hive.metastore.Database;
import io.trino.plugin.hive.metastore.HiveMetastore;
import io.trino.plugin.hive.metastore.PrincipalPrivileges;
@@ -176,7 +177,7 @@ public void dropTable(ConnectorSession session, String databaseName, String tabl
if (!externalTable) {
try {
Path path = new Path(tableLocation);
- FileSystem fileSystem = hdfsEnvironment.getFileSystem(new HdfsEnvironment.HdfsContext(session), path);
+ FileSystem fileSystem = hdfsEnvironment.getFileSystem(new HdfsContext(session), path);
fileSystem.delete(path, true);
}
catch (IOException e) {
diff --git a/plugin/trino-delta-lake/src/main/java/io/trino/plugin/deltalake/procedure/VacuumProcedure.java b/plugin/trino-delta-lake/src/main/java/io/trino/plugin/deltalake/procedure/VacuumProcedure.java
index 6d3606ff4c9f..b442ef7e7932 100644
--- a/plugin/trino-delta-lake/src/main/java/io/trino/plugin/deltalake/procedure/VacuumProcedure.java
+++ b/plugin/trino-delta-lake/src/main/java/io/trino/plugin/deltalake/procedure/VacuumProcedure.java
@@ -16,6 +16,8 @@
import com.google.common.collect.ImmutableList;
import io.airlift.log.Logger;
import io.airlift.units.Duration;
+import io.trino.hdfs.HdfsContext;
+import io.trino.hdfs.HdfsEnvironment;
import io.trino.plugin.base.CatalogName;
import io.trino.plugin.deltalake.DeltaLakeConfig;
import io.trino.plugin.deltalake.DeltaLakeMetadata;
@@ -27,7 +29,6 @@
import io.trino.plugin.deltalake.transactionlog.RemoveFileEntry;
import io.trino.plugin.deltalake.transactionlog.TableSnapshot;
import io.trino.plugin.deltalake.transactionlog.TransactionLogAccess;
-import io.trino.plugin.hive.HdfsEnvironment;
import io.trino.spi.TrinoException;
import io.trino.spi.classloader.ThreadContextClassLoader;
import io.trino.spi.connector.ConnectorAccessControl;
@@ -171,7 +172,7 @@ private void doVacuum(
TableSnapshot tableSnapshot = transactionLogAccess.loadSnapshot(tableName, new Path(handle.getLocation()), session);
Path tableLocation = tableSnapshot.getTableLocation();
Path transactionLogDir = getTransactionLogDir(tableLocation);
- FileSystem fileSystem = hdfsEnvironment.getFileSystem(new HdfsEnvironment.HdfsContext(session), tableLocation);
+ FileSystem fileSystem = hdfsEnvironment.getFileSystem(new HdfsContext(session), tableLocation);
String commonPathPrefix = tableLocation + "/";
String queryId = session.getQueryId();
diff --git a/plugin/trino-delta-lake/src/main/java/io/trino/plugin/deltalake/statistics/MetaDirStatisticsAccess.java b/plugin/trino-delta-lake/src/main/java/io/trino/plugin/deltalake/statistics/MetaDirStatisticsAccess.java
index 3e1b1e726be3..0fcdbba95079 100644
--- a/plugin/trino-delta-lake/src/main/java/io/trino/plugin/deltalake/statistics/MetaDirStatisticsAccess.java
+++ b/plugin/trino-delta-lake/src/main/java/io/trino/plugin/deltalake/statistics/MetaDirStatisticsAccess.java
@@ -15,7 +15,8 @@
import com.google.inject.Inject;
import io.airlift.json.JsonCodec;
-import io.trino.plugin.hive.HdfsEnvironment;
+import io.trino.hdfs.HdfsContext;
+import io.trino.hdfs.HdfsEnvironment;
import io.trino.spi.TrinoException;
import io.trino.spi.connector.ConnectorSession;
import org.apache.hadoop.fs.FileSystem;
@@ -67,7 +68,7 @@ private Optional readExtendedStatistics(ConnectorSession ses
{
try {
Path statisticsPath = new Path(new Path(tableLocation, statisticsDirectory), statisticsFile);
- FileSystem fileSystem = hdfsEnvironment.getFileSystem(new HdfsEnvironment.HdfsContext(session), statisticsPath);
+ FileSystem fileSystem = hdfsEnvironment.getFileSystem(new HdfsContext(session), statisticsPath);
if (!fileSystem.exists(statisticsPath)) {
return Optional.empty();
}
@@ -92,7 +93,7 @@ public void updateExtendedStatistics(
try {
Path statisticsPath = new Path(metaPath, STATISTICS_FILE);
- FileSystem fileSystem = hdfsEnvironment.getFileSystem(new HdfsEnvironment.HdfsContext(session), metaPath);
+ FileSystem fileSystem = hdfsEnvironment.getFileSystem(new HdfsContext(session), metaPath);
try (OutputStream outputStream = fileSystem.create(statisticsPath, true)) {
outputStream.write(statisticsCodec.toJsonBytes(statistics));
}
@@ -110,7 +111,7 @@ public void deleteExtendedStatistics(ConnectorSession session, String tableLocat
{
Path statisticsPath = new Path(new Path(tableLocation, STATISTICS_META_DIR), STATISTICS_FILE);
try {
- FileSystem hdfs = hdfsEnvironment.getFileSystem(new HdfsEnvironment.HdfsContext(session), statisticsPath);
+ FileSystem hdfs = hdfsEnvironment.getFileSystem(new HdfsContext(session), statisticsPath);
if (!hdfs.delete(statisticsPath, false) && hdfs.exists(statisticsPath)) {
throw new TrinoException(GENERIC_INTERNAL_ERROR, format("Failed to delete statistics file %s", statisticsPath));
}
@@ -122,7 +123,7 @@ public void deleteExtendedStatistics(ConnectorSession session, String tableLocat
private void ensureDirectoryExists(ConnectorSession session, Path directoryPath)
{
- HdfsEnvironment.HdfsContext hdfsContext = new HdfsEnvironment.HdfsContext(session);
+ HdfsContext hdfsContext = new HdfsContext(session);
if (!pathExists(hdfsContext, hdfsEnvironment, directoryPath)) {
createDirectory(hdfsContext, hdfsEnvironment, directoryPath);
}
diff --git a/plugin/trino-delta-lake/src/main/java/io/trino/plugin/deltalake/transactionlog/TableSnapshot.java b/plugin/trino-delta-lake/src/main/java/io/trino/plugin/deltalake/transactionlog/TableSnapshot.java
index 4ea8f29c1809..8f2e60dbfd67 100644
--- a/plugin/trino-delta-lake/src/main/java/io/trino/plugin/deltalake/transactionlog/TableSnapshot.java
+++ b/plugin/trino-delta-lake/src/main/java/io/trino/plugin/deltalake/transactionlog/TableSnapshot.java
@@ -15,13 +15,13 @@
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
+import io.trino.hdfs.HdfsEnvironment;
import io.trino.parquet.ParquetReaderOptions;
import io.trino.plugin.deltalake.transactionlog.checkpoint.CheckpointEntryIterator;
import io.trino.plugin.deltalake.transactionlog.checkpoint.CheckpointSchemaManager;
import io.trino.plugin.deltalake.transactionlog.checkpoint.LastCheckpoint;
import io.trino.plugin.deltalake.transactionlog.checkpoint.TransactionLogTail;
import io.trino.plugin.hive.FileFormatDataSourceStats;
-import io.trino.plugin.hive.HdfsEnvironment;
import io.trino.spi.TrinoException;
import io.trino.spi.connector.ConnectorSession;
import io.trino.spi.connector.SchemaTableName;
diff --git a/plugin/trino-delta-lake/src/main/java/io/trino/plugin/deltalake/transactionlog/TransactionLogAccess.java b/plugin/trino-delta-lake/src/main/java/io/trino/plugin/deltalake/transactionlog/TransactionLogAccess.java
index 9710d5bafaac..764697733c8d 100644
--- a/plugin/trino-delta-lake/src/main/java/io/trino/plugin/deltalake/transactionlog/TransactionLogAccess.java
+++ b/plugin/trino-delta-lake/src/main/java/io/trino/plugin/deltalake/transactionlog/TransactionLogAccess.java
@@ -22,6 +22,8 @@
import io.airlift.jmx.CacheStatsMBean;
import io.airlift.log.Logger;
import io.trino.collect.cache.EvictableCacheBuilder;
+import io.trino.hdfs.HdfsContext;
+import io.trino.hdfs.HdfsEnvironment;
import io.trino.parquet.ParquetReaderOptions;
import io.trino.plugin.deltalake.DeltaLakeColumnMetadata;
import io.trino.plugin.deltalake.DeltaLakeConfig;
@@ -29,7 +31,6 @@
import io.trino.plugin.deltalake.transactionlog.checkpoint.CheckpointSchemaManager;
import io.trino.plugin.deltalake.transactionlog.checkpoint.TransactionLogTail;
import io.trino.plugin.hive.FileFormatDataSourceStats;
-import io.trino.plugin.hive.HdfsEnvironment;
import io.trino.plugin.hive.parquet.ParquetReaderConfig;
import io.trino.spi.TrinoException;
import io.trino.spi.connector.ConnectorSession;
@@ -458,7 +459,7 @@ private FileSystem getFileSystem(TableSnapshot tableSnapshot, ConnectorSession s
protected FileSystem getFileSystem(Path tableLocation, SchemaTableName table, ConnectorSession session)
{
try {
- return hdfsEnvironment.getFileSystem(new HdfsEnvironment.HdfsContext(session), tableLocation);
+ return hdfsEnvironment.getFileSystem(new HdfsContext(session), tableLocation);
}
catch (IOException e) {
throw new TrinoException(DELTA_LAKE_INVALID_SCHEMA, "Failed accessing transaction log for table: " + table, e);
diff --git a/plugin/trino-delta-lake/src/main/java/io/trino/plugin/deltalake/transactionlog/checkpoint/CheckpointEntryIterator.java b/plugin/trino-delta-lake/src/main/java/io/trino/plugin/deltalake/transactionlog/checkpoint/CheckpointEntryIterator.java
index f029f5aa7364..fff6515f5954 100644
--- a/plugin/trino-delta-lake/src/main/java/io/trino/plugin/deltalake/transactionlog/checkpoint/CheckpointEntryIterator.java
+++ b/plugin/trino-delta-lake/src/main/java/io/trino/plugin/deltalake/transactionlog/checkpoint/CheckpointEntryIterator.java
@@ -16,6 +16,8 @@
import com.google.common.collect.ImmutableMap;
import com.google.common.math.LongMath;
import io.airlift.log.Logger;
+import io.trino.hdfs.HdfsContext;
+import io.trino.hdfs.HdfsEnvironment;
import io.trino.parquet.ParquetReaderOptions;
import io.trino.plugin.deltalake.DeltaLakeColumnHandle;
import io.trino.plugin.deltalake.DeltaLakeColumnMetadata;
@@ -28,7 +30,6 @@
import io.trino.plugin.deltalake.transactionlog.TransactionEntry;
import io.trino.plugin.deltalake.transactionlog.statistics.DeltaLakeParquetFileStatistics;
import io.trino.plugin.hive.FileFormatDataSourceStats;
-import io.trino.plugin.hive.HdfsEnvironment;
import io.trino.plugin.hive.HiveColumnHandle;
import io.trino.plugin.hive.ReaderPageSource;
import io.trino.plugin.hive.parquet.ParquetPageSourceFactory;
@@ -184,7 +185,7 @@ public CheckpointEntryIterator(
tupleDomain,
true,
hdfsEnvironment,
- hdfsEnvironment.getConfiguration(new HdfsEnvironment.HdfsContext(session), checkpoint),
+ hdfsEnvironment.getConfiguration(new HdfsContext(session), checkpoint),
session.getIdentity(),
DateTimeZone.UTC,
stats,
diff --git a/plugin/trino-delta-lake/src/main/java/io/trino/plugin/deltalake/transactionlog/checkpoint/CheckpointWriter.java b/plugin/trino-delta-lake/src/main/java/io/trino/plugin/deltalake/transactionlog/checkpoint/CheckpointWriter.java
index 553748871e9e..122c875c87d0 100644
--- a/plugin/trino-delta-lake/src/main/java/io/trino/plugin/deltalake/transactionlog/checkpoint/CheckpointWriter.java
+++ b/plugin/trino-delta-lake/src/main/java/io/trino/plugin/deltalake/transactionlog/checkpoint/CheckpointWriter.java
@@ -14,6 +14,8 @@
package io.trino.plugin.deltalake.transactionlog.checkpoint;
import com.google.common.collect.ImmutableList;
+import io.trino.hdfs.HdfsContext;
+import io.trino.hdfs.HdfsEnvironment;
import io.trino.plugin.deltalake.transactionlog.AddFileEntry;
import io.trino.plugin.deltalake.transactionlog.MetadataEntry;
import io.trino.plugin.deltalake.transactionlog.ProtocolEntry;
@@ -21,7 +23,6 @@
import io.trino.plugin.deltalake.transactionlog.TransactionEntry;
import io.trino.plugin.deltalake.transactionlog.statistics.DeltaLakeJsonFileStatistics;
import io.trino.plugin.deltalake.transactionlog.statistics.DeltaLakeParquetFileStatistics;
-import io.trino.plugin.hive.HdfsEnvironment;
import io.trino.plugin.hive.RecordFileWriter;
import io.trino.spi.PageBuilder;
import io.trino.spi.block.Block;
@@ -49,12 +50,12 @@
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkState;
import static io.airlift.slice.Slices.utf8Slice;
+import static io.trino.hdfs.ConfigurationUtils.toJobConf;
import static io.trino.plugin.deltalake.DeltaLakeSchemaProperties.buildHiveSchema;
import static io.trino.plugin.hive.HiveCompressionCodec.SNAPPY;
import static io.trino.plugin.hive.HiveStorageFormat.PARQUET;
import static io.trino.plugin.hive.metastore.StorageFormat.fromHiveStorageFormat;
import static io.trino.plugin.hive.util.CompressionConfigUtil.configureCompression;
-import static io.trino.plugin.hive.util.ConfigurationUtils.toJobConf;
import static io.trino.spi.type.Timestamps.MICROSECONDS_PER_MILLISECOND;
import static io.trino.spi.type.TypeUtils.writeNativeValue;
import static java.lang.Math.multiplyExact;
@@ -106,7 +107,7 @@ public void write(ConnectorSession session, CheckpointEntries entries, Path targ
Properties schema = buildHiveSchema(columnNames, columnTypes);
- Configuration conf = hdfsEnvironment.getConfiguration(new HdfsEnvironment.HdfsContext(session), targetPath);
+ Configuration conf = hdfsEnvironment.getConfiguration(new HdfsContext(session), targetPath);
configureCompression(conf, SNAPPY);
JobConf jobConf = toJobConf(conf);
diff --git a/plugin/trino-delta-lake/src/main/java/io/trino/plugin/deltalake/transactionlog/checkpoint/CheckpointWriterManager.java b/plugin/trino-delta-lake/src/main/java/io/trino/plugin/deltalake/transactionlog/checkpoint/CheckpointWriterManager.java
index 7bf785e099d1..24c39da0b126 100644
--- a/plugin/trino-delta-lake/src/main/java/io/trino/plugin/deltalake/transactionlog/checkpoint/CheckpointWriterManager.java
+++ b/plugin/trino-delta-lake/src/main/java/io/trino/plugin/deltalake/transactionlog/checkpoint/CheckpointWriterManager.java
@@ -15,11 +15,12 @@
import com.google.common.collect.ImmutableSet;
import io.airlift.json.JsonCodec;
+import io.trino.hdfs.HdfsContext;
+import io.trino.hdfs.HdfsEnvironment;
import io.trino.plugin.deltalake.transactionlog.DeltaLakeTransactionLogEntry;
import io.trino.plugin.deltalake.transactionlog.TableSnapshot;
import io.trino.plugin.deltalake.transactionlog.TransactionLogAccess;
import io.trino.plugin.hive.FileFormatDataSourceStats;
-import io.trino.plugin.hive.HdfsEnvironment;
import io.trino.spi.connector.ConnectorSession;
import io.trino.spi.connector.SchemaTableName;
import io.trino.spi.type.TypeManager;
@@ -87,7 +88,7 @@ public void writeCheckpoint(ConnectorSession session, TableSnapshot snapshot)
CheckpointBuilder checkpointBuilder = new CheckpointBuilder();
- FileSystem fileSystem = hdfsEnvironment.getFileSystem(new HdfsEnvironment.HdfsContext(session), snapshot.getTableLocation());
+ FileSystem fileSystem = hdfsEnvironment.getFileSystem(new HdfsContext(session), snapshot.getTableLocation());
Optional checkpointMetadataLogEntry = snapshot.getCheckpointTransactionLogEntries(
session,
ImmutableSet.of(METADATA),
diff --git a/plugin/trino-delta-lake/src/main/java/io/trino/plugin/deltalake/transactionlog/writer/AzureTransactionLogSynchronizer.java b/plugin/trino-delta-lake/src/main/java/io/trino/plugin/deltalake/transactionlog/writer/AzureTransactionLogSynchronizer.java
index d408cfa9c7b7..94030ab34222 100644
--- a/plugin/trino-delta-lake/src/main/java/io/trino/plugin/deltalake/transactionlog/writer/AzureTransactionLogSynchronizer.java
+++ b/plugin/trino-delta-lake/src/main/java/io/trino/plugin/deltalake/transactionlog/writer/AzureTransactionLogSynchronizer.java
@@ -13,7 +13,8 @@
*/
package io.trino.plugin.deltalake.transactionlog.writer;
-import io.trino.plugin.hive.HdfsEnvironment;
+import io.trino.hdfs.HdfsContext;
+import io.trino.hdfs.HdfsEnvironment;
import io.trino.spi.connector.ConnectorSession;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -46,7 +47,7 @@ public void write(ConnectorSession session, String clusterId, Path newLogEntryPa
FileSystem fs = null;
try {
- fs = hdfsEnvironment.getFileSystem(new HdfsEnvironment.HdfsContext(session), newLogEntryPath);
+ fs = hdfsEnvironment.getFileSystem(new HdfsContext(session), newLogEntryPath);
try (OutputStream outputStream = fs.create(tmpFilePath, false)) {
outputStream.write(entryContents);
}
diff --git a/plugin/trino-delta-lake/src/main/java/io/trino/plugin/deltalake/transactionlog/writer/NoIsolationSynchronizer.java b/plugin/trino-delta-lake/src/main/java/io/trino/plugin/deltalake/transactionlog/writer/NoIsolationSynchronizer.java
index 080678dbc6e8..0f7e9a455f6b 100644
--- a/plugin/trino-delta-lake/src/main/java/io/trino/plugin/deltalake/transactionlog/writer/NoIsolationSynchronizer.java
+++ b/plugin/trino-delta-lake/src/main/java/io/trino/plugin/deltalake/transactionlog/writer/NoIsolationSynchronizer.java
@@ -13,7 +13,8 @@
*/
package io.trino.plugin.deltalake.transactionlog.writer;
-import io.trino.plugin.hive.HdfsEnvironment;
+import io.trino.hdfs.HdfsContext;
+import io.trino.hdfs.HdfsEnvironment;
import io.trino.spi.connector.ConnectorSession;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -42,7 +43,7 @@ public void write(ConnectorSession session, String clusterId, Path newLogEntryPa
throws UncheckedIOException
{
try {
- FileSystem fs = hdfsEnvironment.getFileSystem(new HdfsEnvironment.HdfsContext(session), newLogEntryPath);
+ FileSystem fs = hdfsEnvironment.getFileSystem(new HdfsContext(session), newLogEntryPath);
try (OutputStream outputStream = fs.create(newLogEntryPath, false)) {
outputStream.write(entryContents);
}
diff --git a/plugin/trino-delta-lake/src/main/java/io/trino/plugin/deltalake/transactionlog/writer/S3TransactionLogSynchronizer.java b/plugin/trino-delta-lake/src/main/java/io/trino/plugin/deltalake/transactionlog/writer/S3TransactionLogSynchronizer.java
index 0141de1e238e..24b2225ec0ec 100644
--- a/plugin/trino-delta-lake/src/main/java/io/trino/plugin/deltalake/transactionlog/writer/S3TransactionLogSynchronizer.java
+++ b/plugin/trino-delta-lake/src/main/java/io/trino/plugin/deltalake/transactionlog/writer/S3TransactionLogSynchronizer.java
@@ -18,7 +18,8 @@
import com.google.common.collect.ImmutableList;
import io.airlift.json.JsonCodec;
import io.airlift.log.Logger;
-import io.trino.plugin.hive.HdfsEnvironment;
+import io.trino.hdfs.HdfsContext;
+import io.trino.hdfs.HdfsEnvironment;
import io.trino.spi.connector.ConnectorSession;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
@@ -159,7 +160,7 @@ public void write(ConnectorSession session, String clusterId, Path newLogEntryPa
private FileSystem getFileSystem(ConnectorSession session, Path newLogEntryPath)
{
try {
- return hdfsEnvironment.getFileSystem(new HdfsEnvironment.HdfsContext(session), newLogEntryPath);
+ return hdfsEnvironment.getFileSystem(new HdfsContext(session), newLogEntryPath);
}
catch (Exception e) {
throw new RuntimeException(e);
diff --git a/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/AbstractTestDeltaLakeCreateTableStatistics.java b/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/AbstractTestDeltaLakeCreateTableStatistics.java
index 4e07fba436e6..2e91bc10dccb 100644
--- a/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/AbstractTestDeltaLakeCreateTableStatistics.java
+++ b/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/AbstractTestDeltaLakeCreateTableStatistics.java
@@ -17,17 +17,17 @@
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import io.trino.Session;
+import io.trino.hdfs.DynamicHdfsConfiguration;
+import io.trino.hdfs.HdfsConfig;
+import io.trino.hdfs.HdfsConfiguration;
+import io.trino.hdfs.HdfsConfigurationInitializer;
+import io.trino.hdfs.HdfsEnvironment;
+import io.trino.hdfs.authentication.NoHdfsAuthentication;
import io.trino.plugin.deltalake.transactionlog.AddFileEntry;
import io.trino.plugin.deltalake.transactionlog.TransactionLogAccess;
import io.trino.plugin.deltalake.transactionlog.checkpoint.CheckpointSchemaManager;
import io.trino.plugin.deltalake.transactionlog.statistics.DeltaLakeFileStatistics;
import io.trino.plugin.hive.FileFormatDataSourceStats;
-import io.trino.plugin.hive.HdfsConfig;
-import io.trino.plugin.hive.HdfsConfiguration;
-import io.trino.plugin.hive.HdfsConfigurationInitializer;
-import io.trino.plugin.hive.HdfsEnvironment;
-import io.trino.plugin.hive.HiveHdfsConfiguration;
-import io.trino.plugin.hive.authentication.NoHdfsAuthentication;
import io.trino.plugin.hive.containers.HiveMinioDataLake;
import io.trino.plugin.hive.parquet.ParquetReaderConfig;
import io.trino.spi.connector.SchemaTableName;
@@ -542,7 +542,7 @@ protected List getAddFileEntries(String tableName)
{
TestingConnectorContext context = new TestingConnectorContext();
HdfsConfig hdfsConfig = new HdfsConfig();
- HdfsConfiguration hdfsConfiguration = new HiveHdfsConfiguration(new HdfsConfigurationInitializer(hdfsConfig), ImmutableSet.of());
+ HdfsConfiguration hdfsConfiguration = new DynamicHdfsConfiguration(new HdfsConfigurationInitializer(hdfsConfig), ImmutableSet.of());
HdfsEnvironment hdfsEnvironment = new HdfsEnvironment(hdfsConfiguration, hdfsConfig, new NoHdfsAuthentication());
TransactionLogAccess transactionLogAccess = new TransactionLogAccess(
diff --git a/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/AccessTrackingHdfsEnvironment.java b/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/AccessTrackingHdfsEnvironment.java
index e0fbc7b0e7e7..0503fe2fffe5 100644
--- a/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/AccessTrackingHdfsEnvironment.java
+++ b/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/AccessTrackingHdfsEnvironment.java
@@ -14,10 +14,10 @@
package io.trino.plugin.deltalake;
import com.google.common.collect.ImmutableMap;
-import io.trino.plugin.hive.HdfsConfig;
-import io.trino.plugin.hive.HdfsConfiguration;
-import io.trino.plugin.hive.HdfsEnvironment;
-import io.trino.plugin.hive.authentication.HdfsAuthentication;
+import io.trino.hdfs.HdfsConfig;
+import io.trino.hdfs.HdfsConfiguration;
+import io.trino.hdfs.HdfsEnvironment;
+import io.trino.hdfs.authentication.HdfsAuthentication;
import io.trino.spi.security.ConnectorIdentity;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
diff --git a/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/BaseDeltaLakeTableWithCustomLocation.java b/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/BaseDeltaLakeTableWithCustomLocation.java
index 71251488a7ff..3d155f78151a 100644
--- a/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/BaseDeltaLakeTableWithCustomLocation.java
+++ b/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/BaseDeltaLakeTableWithCustomLocation.java
@@ -13,7 +13,8 @@
*/
package io.trino.plugin.deltalake;
-import io.trino.plugin.hive.HdfsEnvironment;
+import io.trino.hdfs.HdfsContext;
+import io.trino.hdfs.HdfsEnvironment;
import io.trino.plugin.hive.metastore.HiveMetastore;
import io.trino.plugin.hive.metastore.Table;
import io.trino.testing.AbstractTestQueryFramework;
@@ -43,7 +44,7 @@ public abstract class BaseDeltaLakeTableWithCustomLocation
protected File metastoreDir;
protected HiveMetastore metastore;
protected HdfsEnvironment hdfsEnvironment;
- protected HdfsEnvironment.HdfsContext hdfsContext;
+ protected HdfsContext hdfsContext;
@Test
public void testTableHasUuidSuffixInLocation()
diff --git a/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/TestDeltaLakeMetadata.java b/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/TestDeltaLakeMetadata.java
index 73d78cadde20..856d68069626 100644
--- a/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/TestDeltaLakeMetadata.java
+++ b/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/TestDeltaLakeMetadata.java
@@ -21,6 +21,7 @@
import com.google.inject.Provides;
import io.airlift.bootstrap.Bootstrap;
import io.airlift.json.JsonModule;
+import io.trino.hdfs.HdfsEnvironment;
import io.trino.plugin.base.CatalogName;
import io.trino.plugin.deltalake.metastore.DeltaLakeMetastore;
import io.trino.plugin.deltalake.metastore.DeltaLakeMetastoreModule;
@@ -28,7 +29,6 @@
import io.trino.plugin.deltalake.statistics.CachingExtendedStatisticsAccess;
import io.trino.plugin.deltalake.transactionlog.MetadataEntry;
import io.trino.plugin.deltalake.transactionlog.TransactionLogAccess;
-import io.trino.plugin.hive.HdfsEnvironment;
import io.trino.plugin.hive.NodeVersion;
import io.trino.plugin.hive.metastore.Database;
import io.trino.plugin.hive.metastore.HiveMetastoreFactory;
diff --git a/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/TestDeltaLakeSharedGlueMetastoreWithTableRedirections.java b/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/TestDeltaLakeSharedGlueMetastoreWithTableRedirections.java
index ff7541aa8458..731d9131bcc9 100644
--- a/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/TestDeltaLakeSharedGlueMetastoreWithTableRedirections.java
+++ b/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/TestDeltaLakeSharedGlueMetastoreWithTableRedirections.java
@@ -18,12 +18,12 @@
import com.google.common.collect.ImmutableSet;
import io.airlift.log.Logger;
import io.trino.Session;
-import io.trino.plugin.hive.HdfsConfig;
-import io.trino.plugin.hive.HdfsConfigurationInitializer;
-import io.trino.plugin.hive.HdfsEnvironment;
-import io.trino.plugin.hive.HiveHdfsConfiguration;
+import io.trino.hdfs.DynamicHdfsConfiguration;
+import io.trino.hdfs.HdfsConfig;
+import io.trino.hdfs.HdfsConfigurationInitializer;
+import io.trino.hdfs.HdfsEnvironment;
+import io.trino.hdfs.authentication.NoHdfsAuthentication;
import io.trino.plugin.hive.TestingHivePlugin;
-import io.trino.plugin.hive.authentication.NoHdfsAuthentication;
import io.trino.plugin.hive.metastore.HiveMetastore;
import io.trino.plugin.hive.metastore.glue.DefaultGlueColumnStatisticsProviderFactory;
import io.trino.plugin.hive.metastore.glue.GlueHiveMetastore;
@@ -79,7 +79,7 @@ protected QueryRunner createQueryRunner()
HdfsConfig hdfsConfig = new HdfsConfig();
HdfsEnvironment hdfsEnvironment = new HdfsEnvironment(
- new HiveHdfsConfiguration(new HdfsConfigurationInitializer(hdfsConfig), ImmutableSet.of()),
+ new DynamicHdfsConfiguration(new HdfsConfigurationInitializer(hdfsConfig), ImmutableSet.of()),
hdfsConfig,
new NoHdfsAuthentication());
this.glueMetastore = new GlueHiveMetastore(
diff --git a/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/TestDeltaLakeTableWithCustomLocationUsingGlueMetastore.java b/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/TestDeltaLakeTableWithCustomLocationUsingGlueMetastore.java
index 59c99a13664e..2b2100ffcfd0 100644
--- a/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/TestDeltaLakeTableWithCustomLocationUsingGlueMetastore.java
+++ b/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/TestDeltaLakeTableWithCustomLocationUsingGlueMetastore.java
@@ -18,11 +18,12 @@
import com.google.common.collect.ImmutableSet;
import io.airlift.log.Logger;
import io.trino.Session;
-import io.trino.plugin.hive.HdfsConfig;
-import io.trino.plugin.hive.HdfsConfigurationInitializer;
-import io.trino.plugin.hive.HdfsEnvironment;
-import io.trino.plugin.hive.HiveHdfsConfiguration;
-import io.trino.plugin.hive.authentication.NoHdfsAuthentication;
+import io.trino.hdfs.DynamicHdfsConfiguration;
+import io.trino.hdfs.HdfsConfig;
+import io.trino.hdfs.HdfsConfigurationInitializer;
+import io.trino.hdfs.HdfsContext;
+import io.trino.hdfs.HdfsEnvironment;
+import io.trino.hdfs.authentication.NoHdfsAuthentication;
import io.trino.plugin.hive.metastore.glue.DefaultGlueColumnStatisticsProviderFactory;
import io.trino.plugin.hive.metastore.glue.GlueHiveMetastore;
import io.trino.plugin.hive.metastore.glue.GlueHiveMetastoreConfig;
@@ -70,7 +71,7 @@ protected QueryRunner createQueryRunner()
HdfsConfig hdfsConfig = new HdfsConfig();
hdfsEnvironment = new HdfsEnvironment(
- new HiveHdfsConfiguration(new HdfsConfigurationInitializer(hdfsConfig), ImmutableSet.of()),
+ new DynamicHdfsConfiguration(new HdfsConfigurationInitializer(hdfsConfig), ImmutableSet.of()),
hdfsConfig,
new NoHdfsAuthentication());
GlueHiveMetastoreConfig glueConfig = new GlueHiveMetastoreConfig()
@@ -83,7 +84,7 @@ protected QueryRunner createQueryRunner()
new DefaultGlueColumnStatisticsProviderFactory(directExecutor(), directExecutor()),
Optional.empty(),
table -> true);
- hdfsContext = new HdfsEnvironment.HdfsContext(queryRunner.getDefaultSession().toConnectorSession());
+ hdfsContext = new HdfsContext(queryRunner.getDefaultSession().toConnectorSession());
queryRunner.execute("CREATE SCHEMA " + SCHEMA + " WITH (location = '" + metastoreDir.getPath() + "')");
return queryRunner;
diff --git a/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/TestDeltaLakeTableWithCustomLocationUsingHiveMetastore.java b/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/TestDeltaLakeTableWithCustomLocationUsingHiveMetastore.java
index 5cc1d1c99826..e33c9a4b90f2 100644
--- a/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/TestDeltaLakeTableWithCustomLocationUsingHiveMetastore.java
+++ b/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/TestDeltaLakeTableWithCustomLocationUsingHiveMetastore.java
@@ -15,14 +15,14 @@
import com.google.common.collect.ImmutableSet;
import io.trino.Session;
-import io.trino.plugin.hive.HdfsConfig;
-import io.trino.plugin.hive.HdfsConfiguration;
-import io.trino.plugin.hive.HdfsConfigurationInitializer;
-import io.trino.plugin.hive.HdfsEnvironment;
-import io.trino.plugin.hive.HdfsEnvironment.HdfsContext;
-import io.trino.plugin.hive.HiveHdfsConfiguration;
+import io.trino.hdfs.DynamicHdfsConfiguration;
+import io.trino.hdfs.HdfsConfig;
+import io.trino.hdfs.HdfsConfiguration;
+import io.trino.hdfs.HdfsConfigurationInitializer;
+import io.trino.hdfs.HdfsContext;
+import io.trino.hdfs.HdfsEnvironment;
+import io.trino.hdfs.authentication.NoHdfsAuthentication;
import io.trino.plugin.hive.NodeVersion;
-import io.trino.plugin.hive.authentication.NoHdfsAuthentication;
import io.trino.plugin.hive.metastore.file.FileHiveMetastore;
import io.trino.plugin.hive.metastore.file.FileHiveMetastoreConfig;
import io.trino.spi.security.ConnectorIdentity;
@@ -53,7 +53,7 @@ protected QueryRunner createQueryRunner()
Map connectorProperties = new HashMap<>();
HdfsConfig hdfsConfig = new HdfsConfig();
- HdfsConfiguration hdfsConfiguration = new HiveHdfsConfiguration(new HdfsConfigurationInitializer(hdfsConfig), ImmutableSet.of());
+ HdfsConfiguration hdfsConfiguration = new DynamicHdfsConfiguration(new HdfsConfigurationInitializer(hdfsConfig), ImmutableSet.of());
hdfsEnvironment = new HdfsEnvironment(hdfsConfiguration, hdfsConfig, new NoHdfsAuthentication());
metastoreDir = Files.createTempDirectory("test_delta_lake").toFile();
FileHiveMetastoreConfig config = new FileHiveMetastoreConfig()
diff --git a/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/TestTransactionLogAccess.java b/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/TestTransactionLogAccess.java
index 02af6edda114..0b7686af6240 100644
--- a/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/TestTransactionLogAccess.java
+++ b/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/TestTransactionLogAccess.java
@@ -17,6 +17,11 @@
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import io.airlift.units.Duration;
+import io.trino.hdfs.DynamicHdfsConfiguration;
+import io.trino.hdfs.HdfsConfig;
+import io.trino.hdfs.HdfsConfiguration;
+import io.trino.hdfs.HdfsConfigurationInitializer;
+import io.trino.hdfs.authentication.NoHdfsAuthentication;
import io.trino.plugin.deltalake.transactionlog.AddFileEntry;
import io.trino.plugin.deltalake.transactionlog.CommitInfoEntry;
import io.trino.plugin.deltalake.transactionlog.MetadataEntry;
@@ -26,11 +31,6 @@
import io.trino.plugin.deltalake.transactionlog.checkpoint.CheckpointSchemaManager;
import io.trino.plugin.deltalake.transactionlog.statistics.DeltaLakeFileStatistics;
import io.trino.plugin.hive.FileFormatDataSourceStats;
-import io.trino.plugin.hive.HdfsConfig;
-import io.trino.plugin.hive.HdfsConfiguration;
-import io.trino.plugin.hive.HdfsConfigurationInitializer;
-import io.trino.plugin.hive.HiveHdfsConfiguration;
-import io.trino.plugin.hive.authentication.NoHdfsAuthentication;
import io.trino.plugin.hive.parquet.ParquetReaderConfig;
import io.trino.spi.connector.ColumnMetadata;
import io.trino.spi.connector.SchemaTableName;
@@ -124,7 +124,7 @@ private void setupTransactionLogAccess(String tableName, Path tableLocation, Del
TypeManager typeManager = context.getTypeManager();
HdfsConfig hdfsConfig = new HdfsConfig();
- HdfsConfiguration hdfsConfiguration = new HiveHdfsConfiguration(new HdfsConfigurationInitializer(hdfsConfig), ImmutableSet.of());
+ HdfsConfiguration hdfsConfiguration = new DynamicHdfsConfiguration(new HdfsConfigurationInitializer(hdfsConfig), ImmutableSet.of());
hdfsEnvironment = new AccessTrackingHdfsEnvironment(hdfsConfiguration, hdfsConfig, new NoHdfsAuthentication());
FileFormatDataSourceStats fileFormatDataSourceStats = new FileFormatDataSourceStats();
diff --git a/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/TrackingTransactionLogAccess.java b/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/TrackingTransactionLogAccess.java
index 9e0440302cf1..cdf1ecf3a807 100644
--- a/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/TrackingTransactionLogAccess.java
+++ b/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/TrackingTransactionLogAccess.java
@@ -13,10 +13,10 @@
*/
package io.trino.plugin.deltalake;
+import io.trino.hdfs.HdfsEnvironment;
import io.trino.plugin.deltalake.transactionlog.TransactionLogAccess;
import io.trino.plugin.deltalake.transactionlog.checkpoint.CheckpointSchemaManager;
import io.trino.plugin.hive.FileFormatDataSourceStats;
-import io.trino.plugin.hive.HdfsEnvironment;
import io.trino.plugin.hive.parquet.ParquetReaderConfig;
import io.trino.spi.connector.ConnectorSession;
import io.trino.spi.connector.SchemaTableName;
diff --git a/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/metastore/TestDeltaLakeMetastoreAccessOperations.java b/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/metastore/TestDeltaLakeMetastoreAccessOperations.java
index 703622973b3f..454d7a3c1bd6 100644
--- a/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/metastore/TestDeltaLakeMetastoreAccessOperations.java
+++ b/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/metastore/TestDeltaLakeMetastoreAccessOperations.java
@@ -21,14 +21,14 @@
import com.google.inject.Binder;
import io.airlift.configuration.AbstractConfigurationAwareModule;
import io.trino.Session;
+import io.trino.hdfs.DynamicHdfsConfiguration;
+import io.trino.hdfs.HdfsConfig;
+import io.trino.hdfs.HdfsConfiguration;
+import io.trino.hdfs.HdfsConfigurationInitializer;
+import io.trino.hdfs.HdfsEnvironment;
+import io.trino.hdfs.authentication.NoHdfsAuthentication;
import io.trino.plugin.deltalake.TestingDeltaLakePlugin;
-import io.trino.plugin.hive.HdfsConfig;
-import io.trino.plugin.hive.HdfsConfiguration;
-import io.trino.plugin.hive.HdfsConfigurationInitializer;
-import io.trino.plugin.hive.HdfsEnvironment;
-import io.trino.plugin.hive.HiveHdfsConfiguration;
import io.trino.plugin.hive.NodeVersion;
-import io.trino.plugin.hive.authentication.NoHdfsAuthentication;
import io.trino.plugin.hive.metastore.CountingAccessHiveMetastore;
import io.trino.plugin.hive.metastore.HiveMetastore;
import io.trino.plugin.hive.metastore.HiveMetastoreFactory;
@@ -73,7 +73,7 @@ protected DistributedQueryRunner createQueryRunner()
File baseDir = queryRunner.getCoordinator().getBaseDataDir().resolve("delta_lake").toFile();
HdfsConfig hdfsConfig = new HdfsConfig();
- HdfsConfiguration hdfsConfiguration = new HiveHdfsConfiguration(new HdfsConfigurationInitializer(hdfsConfig), ImmutableSet.of());
+ HdfsConfiguration hdfsConfiguration = new DynamicHdfsConfiguration(new HdfsConfigurationInitializer(hdfsConfig), ImmutableSet.of());
HdfsEnvironment hdfsEnvironment = new HdfsEnvironment(hdfsConfiguration, hdfsConfig, new NoHdfsAuthentication());
HiveMetastore hiveMetastore = new FileHiveMetastore(
new NodeVersion("testversion"),
diff --git a/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/metastore/TestDeltaLakeMetastoreStatistics.java b/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/metastore/TestDeltaLakeMetastoreStatistics.java
index 7040f8339273..8c393b0e2c66 100644
--- a/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/metastore/TestDeltaLakeMetastoreStatistics.java
+++ b/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/metastore/TestDeltaLakeMetastoreStatistics.java
@@ -18,6 +18,12 @@
import com.google.common.collect.ImmutableSet;
import com.google.common.io.Resources;
import io.airlift.json.JsonCodecFactory;
+import io.trino.hdfs.DynamicHdfsConfiguration;
+import io.trino.hdfs.HdfsConfig;
+import io.trino.hdfs.HdfsConfiguration;
+import io.trino.hdfs.HdfsConfigurationInitializer;
+import io.trino.hdfs.HdfsEnvironment;
+import io.trino.hdfs.authentication.NoHdfsAuthentication;
import io.trino.plugin.deltalake.DeltaLakeColumnHandle;
import io.trino.plugin.deltalake.DeltaLakeConfig;
import io.trino.plugin.deltalake.DeltaLakeTableHandle;
@@ -29,14 +35,8 @@
import io.trino.plugin.deltalake.transactionlog.TransactionLogAccess;
import io.trino.plugin.deltalake.transactionlog.checkpoint.CheckpointSchemaManager;
import io.trino.plugin.hive.FileFormatDataSourceStats;
-import io.trino.plugin.hive.HdfsConfig;
-import io.trino.plugin.hive.HdfsConfiguration;
-import io.trino.plugin.hive.HdfsConfigurationInitializer;
-import io.trino.plugin.hive.HdfsEnvironment;
-import io.trino.plugin.hive.HiveHdfsConfiguration;
import io.trino.plugin.hive.HiveType;
import io.trino.plugin.hive.NodeVersion;
-import io.trino.plugin.hive.authentication.NoHdfsAuthentication;
import io.trino.plugin.hive.metastore.Column;
import io.trino.plugin.hive.metastore.Database;
import io.trino.plugin.hive.metastore.HiveMetastore;
@@ -102,7 +102,7 @@ public void setupMetastore()
CheckpointSchemaManager checkpointSchemaManager = new CheckpointSchemaManager(typeManager);
HdfsConfig hdfsConfig = new HdfsConfig();
- HdfsConfiguration hdfsConfiguration = new HiveHdfsConfiguration(new HdfsConfigurationInitializer(hdfsConfig), ImmutableSet.of());
+ HdfsConfiguration hdfsConfiguration = new DynamicHdfsConfiguration(new HdfsConfigurationInitializer(hdfsConfig), ImmutableSet.of());
HdfsEnvironment hdfsEnvironment = new HdfsEnvironment(hdfsConfiguration, hdfsConfig, new NoHdfsAuthentication());
FileFormatDataSourceStats fileFormatDataSourceStats = new FileFormatDataSourceStats();
diff --git a/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/metastore/glue/TestDeltaLakeGlueMetastore.java b/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/metastore/glue/TestDeltaLakeGlueMetastore.java
index 2af023ccf817..5fb0caef5b7c 100644
--- a/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/metastore/glue/TestDeltaLakeGlueMetastore.java
+++ b/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/metastore/glue/TestDeltaLakeGlueMetastore.java
@@ -23,13 +23,13 @@
import io.airlift.bootstrap.Bootstrap;
import io.airlift.bootstrap.LifeCycleManager;
import io.airlift.json.JsonModule;
+import io.trino.hdfs.HdfsEnvironment;
import io.trino.plugin.base.CatalogName;
import io.trino.plugin.base.session.SessionPropertiesProvider;
import io.trino.plugin.deltalake.DeltaLakeMetadata;
import io.trino.plugin.deltalake.DeltaLakeMetadataFactory;
import io.trino.plugin.deltalake.DeltaLakeModule;
import io.trino.plugin.deltalake.metastore.DeltaLakeMetastoreModule;
-import io.trino.plugin.hive.HdfsEnvironment;
import io.trino.plugin.hive.NodeVersion;
import io.trino.plugin.hive.metastore.Column;
import io.trino.plugin.hive.metastore.Database;
diff --git a/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/transactionlog/TestTableSnapshot.java b/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/transactionlog/TestTableSnapshot.java
index f18ad7c2a81a..f59a1feaca97 100644
--- a/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/transactionlog/TestTableSnapshot.java
+++ b/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/transactionlog/TestTableSnapshot.java
@@ -16,16 +16,16 @@
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
+import io.trino.hdfs.DynamicHdfsConfiguration;
+import io.trino.hdfs.HdfsConfig;
+import io.trino.hdfs.HdfsConfiguration;
+import io.trino.hdfs.HdfsConfigurationInitializer;
+import io.trino.hdfs.HdfsEnvironment;
+import io.trino.hdfs.authentication.NoHdfsAuthentication;
import io.trino.parquet.ParquetReaderOptions;
import io.trino.plugin.deltalake.AccessTrackingFileSystem;
import io.trino.plugin.deltalake.transactionlog.checkpoint.CheckpointSchemaManager;
import io.trino.plugin.hive.FileFormatDataSourceStats;
-import io.trino.plugin.hive.HdfsConfig;
-import io.trino.plugin.hive.HdfsConfiguration;
-import io.trino.plugin.hive.HdfsConfigurationInitializer;
-import io.trino.plugin.hive.HdfsEnvironment;
-import io.trino.plugin.hive.HiveHdfsConfiguration;
-import io.trino.plugin.hive.authentication.NoHdfsAuthentication;
import io.trino.plugin.hive.parquet.ParquetReaderConfig;
import io.trino.spi.connector.ConnectorSession;
import io.trino.spi.connector.SchemaTableName;
@@ -79,7 +79,7 @@ public void setUp()
accessTrackingFileSystem = new AccessTrackingFileSystem(filesystem);
HdfsConfig hdfsConfig = new HdfsConfig();
- HdfsConfiguration hdfsConfiguration = new HiveHdfsConfiguration(new HdfsConfigurationInitializer(hdfsConfig), ImmutableSet.of());
+ HdfsConfiguration hdfsConfiguration = new DynamicHdfsConfiguration(new HdfsConfigurationInitializer(hdfsConfig), ImmutableSet.of());
hdfsEnvironment = new HdfsEnvironment(hdfsConfiguration, hdfsConfig, new NoHdfsAuthentication());
}
diff --git a/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/transactionlog/checkpoint/TestCheckpointEntryIterator.java b/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/transactionlog/checkpoint/TestCheckpointEntryIterator.java
index c596fa132226..c6ea2a39ab7d 100644
--- a/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/transactionlog/checkpoint/TestCheckpointEntryIterator.java
+++ b/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/transactionlog/checkpoint/TestCheckpointEntryIterator.java
@@ -16,18 +16,18 @@
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterators;
+import io.trino.hdfs.DynamicHdfsConfiguration;
+import io.trino.hdfs.HdfsConfig;
+import io.trino.hdfs.HdfsConfiguration;
+import io.trino.hdfs.HdfsConfigurationInitializer;
+import io.trino.hdfs.HdfsEnvironment;
+import io.trino.hdfs.authentication.NoHdfsAuthentication;
import io.trino.plugin.deltalake.transactionlog.AddFileEntry;
import io.trino.plugin.deltalake.transactionlog.DeltaLakeTransactionLogEntry;
import io.trino.plugin.deltalake.transactionlog.MetadataEntry;
import io.trino.plugin.deltalake.transactionlog.ProtocolEntry;
import io.trino.plugin.deltalake.transactionlog.RemoveFileEntry;
import io.trino.plugin.hive.FileFormatDataSourceStats;
-import io.trino.plugin.hive.HdfsConfig;
-import io.trino.plugin.hive.HdfsConfiguration;
-import io.trino.plugin.hive.HdfsConfigurationInitializer;
-import io.trino.plugin.hive.HdfsEnvironment;
-import io.trino.plugin.hive.HiveHdfsConfiguration;
-import io.trino.plugin.hive.authentication.NoHdfsAuthentication;
import io.trino.plugin.hive.parquet.ParquetReaderConfig;
import org.apache.hadoop.fs.Path;
import org.testng.annotations.AfterClass;
@@ -65,7 +65,7 @@ public class TestCheckpointEntryIterator
public void setUp()
{
HdfsConfig hdfsConfig = new HdfsConfig();
- HdfsConfiguration hdfsConfiguration = new HiveHdfsConfiguration(new HdfsConfigurationInitializer(hdfsConfig), ImmutableSet.of());
+ HdfsConfiguration hdfsConfiguration = new DynamicHdfsConfiguration(new HdfsConfigurationInitializer(hdfsConfig), ImmutableSet.of());
hdfsEnvironment = new HdfsEnvironment(hdfsConfiguration, hdfsConfig, new NoHdfsAuthentication());
checkpointSchemaManager = new CheckpointSchemaManager(TESTING_TYPE_MANAGER);
diff --git a/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/transactionlog/checkpoint/TestCheckpointWriter.java b/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/transactionlog/checkpoint/TestCheckpointWriter.java
index 5258891f4ce3..7efe0b231268 100644
--- a/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/transactionlog/checkpoint/TestCheckpointWriter.java
+++ b/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/transactionlog/checkpoint/TestCheckpointWriter.java
@@ -16,6 +16,13 @@
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
+import io.trino.hdfs.DynamicHdfsConfiguration;
+import io.trino.hdfs.HdfsConfig;
+import io.trino.hdfs.HdfsConfiguration;
+import io.trino.hdfs.HdfsConfigurationInitializer;
+import io.trino.hdfs.HdfsContext;
+import io.trino.hdfs.HdfsEnvironment;
+import io.trino.hdfs.authentication.NoHdfsAuthentication;
import io.trino.plugin.deltalake.transactionlog.AddFileEntry;
import io.trino.plugin.deltalake.transactionlog.DeltaLakeTransactionLogEntry;
import io.trino.plugin.deltalake.transactionlog.MetadataEntry;
@@ -26,12 +33,6 @@
import io.trino.plugin.deltalake.transactionlog.statistics.DeltaLakeJsonFileStatistics;
import io.trino.plugin.deltalake.transactionlog.statistics.DeltaLakeParquetFileStatistics;
import io.trino.plugin.hive.FileFormatDataSourceStats;
-import io.trino.plugin.hive.HdfsConfig;
-import io.trino.plugin.hive.HdfsConfiguration;
-import io.trino.plugin.hive.HdfsConfigurationInitializer;
-import io.trino.plugin.hive.HdfsEnvironment;
-import io.trino.plugin.hive.HiveHdfsConfiguration;
-import io.trino.plugin.hive.authentication.NoHdfsAuthentication;
import io.trino.plugin.hive.parquet.ParquetReaderConfig;
import io.trino.spi.block.Block;
import io.trino.spi.block.ColumnarRow;
@@ -86,7 +87,7 @@ public void setUp()
{
checkpointSchemaManager = new CheckpointSchemaManager(typeManager);
HdfsConfig hdfsConfig = new HdfsConfig();
- HdfsConfiguration hdfsConfiguration = new HiveHdfsConfiguration(new HdfsConfigurationInitializer(hdfsConfig), Set.of());
+ HdfsConfiguration hdfsConfiguration = new DynamicHdfsConfiguration(new HdfsConfigurationInitializer(hdfsConfig), Set.of());
hdfsEnvironment = new HdfsEnvironment(hdfsConfiguration, hdfsConfig, new NoHdfsAuthentication());
}
@@ -415,7 +416,7 @@ private Optional
+
+ io.trino
+ trino-hdfs
+ runtime
+
+
io.trino.hadoop
hadoop-apache
diff --git a/plugin/trino-hive-hadoop2/src/test/java/io/trino/plugin/hive/AbstractTestHiveFileSystemAbfs.java b/plugin/trino-hive-hadoop2/src/test/java/io/trino/plugin/hive/AbstractTestHiveFileSystemAbfs.java
index 68c1fb277942..5c4cbaadec01 100644
--- a/plugin/trino-hive-hadoop2/src/test/java/io/trino/plugin/hive/AbstractTestHiveFileSystemAbfs.java
+++ b/plugin/trino-hive-hadoop2/src/test/java/io/trino/plugin/hive/AbstractTestHiveFileSystemAbfs.java
@@ -16,6 +16,11 @@
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
+import io.trino.hdfs.ConfigurationInitializer;
+import io.trino.hdfs.DynamicHdfsConfiguration;
+import io.trino.hdfs.HdfsConfig;
+import io.trino.hdfs.HdfsConfiguration;
+import io.trino.hdfs.HdfsConfigurationInitializer;
import io.trino.plugin.hive.AbstractTestHive.Transaction;
import io.trino.plugin.hive.azure.HiveAzureConfig;
import io.trino.plugin.hive.azure.TrinoAzureConfigurationInitializer;
@@ -99,7 +104,7 @@ private void ensureTableExists(SchemaTableName table, String tableDirectoryName,
private HdfsConfiguration createHdfsConfiguration()
{
ConfigurationInitializer initializer = new TrinoAzureConfigurationInitializer(getConfig());
- return new HiveHdfsConfiguration(new HdfsConfigurationInitializer(new HdfsConfig(), ImmutableSet.of(initializer)), ImmutableSet.of());
+ return new DynamicHdfsConfiguration(new HdfsConfigurationInitializer(new HdfsConfig(), ImmutableSet.of(initializer)), ImmutableSet.of());
}
@Override
diff --git a/plugin/trino-hive-hadoop2/src/test/java/io/trino/plugin/hive/AbstractTestHiveFileSystemS3.java b/plugin/trino-hive-hadoop2/src/test/java/io/trino/plugin/hive/AbstractTestHiveFileSystemS3.java
index 8ba990dc0b8b..a6b4dcccfa8b 100644
--- a/plugin/trino-hive-hadoop2/src/test/java/io/trino/plugin/hive/AbstractTestHiveFileSystemS3.java
+++ b/plugin/trino-hive-hadoop2/src/test/java/io/trino/plugin/hive/AbstractTestHiveFileSystemS3.java
@@ -14,6 +14,11 @@
package io.trino.plugin.hive;
import com.google.common.collect.ImmutableSet;
+import io.trino.hdfs.ConfigurationInitializer;
+import io.trino.hdfs.DynamicHdfsConfiguration;
+import io.trino.hdfs.HdfsConfig;
+import io.trino.hdfs.HdfsConfiguration;
+import io.trino.hdfs.HdfsConfigurationInitializer;
import io.trino.plugin.hive.s3.HiveS3Config;
import io.trino.plugin.hive.s3.TrinoS3ConfigurationInitializer;
import org.apache.hadoop.fs.FileSystem;
@@ -65,7 +70,7 @@ private HdfsConfiguration createHdfsConfiguration()
.setS3AwsAccessKey(awsAccessKey)
.setS3AwsSecretKey(awsSecretKey));
HdfsConfigurationInitializer initializer = new HdfsConfigurationInitializer(new HdfsConfig(), ImmutableSet.of(s3Config));
- return new HiveHdfsConfiguration(initializer, ImmutableSet.of());
+ return new DynamicHdfsConfiguration(initializer, ImmutableSet.of());
}
@Override
diff --git a/plugin/trino-hive-hadoop2/src/test/java/io/trino/plugin/hive/TestHiveFileSystemAdl.java b/plugin/trino-hive-hadoop2/src/test/java/io/trino/plugin/hive/TestHiveFileSystemAdl.java
index 70217919f418..59fe0e7b1be4 100644
--- a/plugin/trino-hive-hadoop2/src/test/java/io/trino/plugin/hive/TestHiveFileSystemAdl.java
+++ b/plugin/trino-hive-hadoop2/src/test/java/io/trino/plugin/hive/TestHiveFileSystemAdl.java
@@ -14,6 +14,11 @@
package io.trino.plugin.hive;
import com.google.common.collect.ImmutableSet;
+import io.trino.hdfs.ConfigurationInitializer;
+import io.trino.hdfs.DynamicHdfsConfiguration;
+import io.trino.hdfs.HdfsConfig;
+import io.trino.hdfs.HdfsConfiguration;
+import io.trino.hdfs.HdfsConfigurationInitializer;
import io.trino.plugin.hive.azure.HiveAzureConfig;
import io.trino.plugin.hive.azure.TrinoAzureConfigurationInitializer;
import org.apache.hadoop.fs.FileSystem;
@@ -77,7 +82,7 @@ private HdfsConfiguration createHdfsConfiguration()
.setAdlClientId(clientId)
.setAdlCredential(credential)
.setAdlRefreshUrl(refreshUrl));
- return new HiveHdfsConfiguration(new HdfsConfigurationInitializer(new HdfsConfig(), ImmutableSet.of(azureConfig)), ImmutableSet.of());
+ return new DynamicHdfsConfiguration(new HdfsConfigurationInitializer(new HdfsConfig(), ImmutableSet.of(azureConfig)), ImmutableSet.of());
}
@Override
diff --git a/plugin/trino-hive-hadoop2/src/test/java/io/trino/plugin/hive/TestHiveFileSystemWasb.java b/plugin/trino-hive-hadoop2/src/test/java/io/trino/plugin/hive/TestHiveFileSystemWasb.java
index ceb7556e2cf3..73b7fcd53c24 100644
--- a/plugin/trino-hive-hadoop2/src/test/java/io/trino/plugin/hive/TestHiveFileSystemWasb.java
+++ b/plugin/trino-hive-hadoop2/src/test/java/io/trino/plugin/hive/TestHiveFileSystemWasb.java
@@ -14,6 +14,11 @@
package io.trino.plugin.hive;
import com.google.common.collect.ImmutableSet;
+import io.trino.hdfs.ConfigurationInitializer;
+import io.trino.hdfs.DynamicHdfsConfiguration;
+import io.trino.hdfs.HdfsConfig;
+import io.trino.hdfs.HdfsConfiguration;
+import io.trino.hdfs.HdfsConfigurationInitializer;
import io.trino.plugin.hive.azure.HiveAzureConfig;
import io.trino.plugin.hive.azure.TrinoAzureConfigurationInitializer;
import org.apache.hadoop.fs.Path;
@@ -64,7 +69,7 @@ private HdfsConfiguration createHdfsConfiguration()
ConfigurationInitializer wasbConfig = new TrinoAzureConfigurationInitializer(new HiveAzureConfig()
.setWasbAccessKey(accessKey)
.setWasbStorageAccount(account));
- return new HiveHdfsConfiguration(new HdfsConfigurationInitializer(new HdfsConfig(), ImmutableSet.of(wasbConfig)), ImmutableSet.of());
+ return new DynamicHdfsConfiguration(new HdfsConfigurationInitializer(new HdfsConfig(), ImmutableSet.of(wasbConfig)), ImmutableSet.of());
}
@Override
diff --git a/plugin/trino-hive/pom.xml b/plugin/trino-hive/pom.xml
index 4ee3071fd584..92718ec6450e 100644
--- a/plugin/trino-hive/pom.xml
+++ b/plugin/trino-hive/pom.xml
@@ -37,6 +37,11 @@
trino-hadoop-toolkit
+
+ io.trino
+ trino-hdfs
+
+
io.trino
trino-memory-context
diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/AbstractHiveAcidWriters.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/AbstractHiveAcidWriters.java
index a72079937bc4..4f00581f8eb2 100644
--- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/AbstractHiveAcidWriters.java
+++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/AbstractHiveAcidWriters.java
@@ -36,6 +36,7 @@
import java.util.regex.Pattern;
import static com.google.common.base.Preconditions.checkArgument;
+import static io.trino.hdfs.ConfigurationUtils.toJobConf;
import static io.trino.orc.OrcWriter.OrcOperation.DELETE;
import static io.trino.orc.OrcWriter.OrcOperation.INSERT;
import static io.trino.plugin.hive.HiveStorageFormat.ORC;
@@ -45,7 +46,6 @@
import static io.trino.plugin.hive.acid.AcidSchema.ACID_COLUMN_NAMES;
import static io.trino.plugin.hive.acid.AcidSchema.createAcidSchema;
import static io.trino.plugin.hive.metastore.StorageFormat.fromHiveStorageFormat;
-import static io.trino.plugin.hive.util.ConfigurationUtils.toJobConf;
import static io.trino.spi.block.ColumnarRow.toColumnarRow;
import static io.trino.spi.predicate.Utils.nativeValueToBlock;
import static io.trino.spi.type.BigintType.BIGINT;
diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/BackgroundHiveSplitLoader.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/BackgroundHiveSplitLoader.java
index f07b750fc91e..ee6830bb2990 100644
--- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/BackgroundHiveSplitLoader.java
+++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/BackgroundHiveSplitLoader.java
@@ -24,7 +24,8 @@
import com.google.common.util.concurrent.Futures;
import com.google.common.util.concurrent.ListenableFuture;
import io.airlift.units.Duration;
-import io.trino.plugin.hive.HdfsEnvironment.HdfsContext;
+import io.trino.hdfs.HdfsContext;
+import io.trino.hdfs.HdfsEnvironment;
import io.trino.plugin.hive.HiveSplit.BucketConversion;
import io.trino.plugin.hive.HiveSplit.BucketValidation;
import io.trino.plugin.hive.acid.AcidTransaction;
@@ -98,6 +99,7 @@
import static com.google.common.util.concurrent.MoreExecutors.directExecutor;
import static io.airlift.concurrent.MoreFutures.addExceptionCallback;
import static io.airlift.concurrent.MoreFutures.toListenableFuture;
+import static io.trino.hdfs.ConfigurationUtils.toJobConf;
import static io.trino.plugin.hive.HiveErrorCode.HIVE_BAD_DATA;
import static io.trino.plugin.hive.HiveErrorCode.HIVE_FILESYSTEM_ERROR;
import static io.trino.plugin.hive.HiveErrorCode.HIVE_INVALID_BUCKET_FILES;
@@ -113,7 +115,6 @@
import static io.trino.plugin.hive.metastore.MetastoreUtil.getHiveSchema;
import static io.trino.plugin.hive.metastore.MetastoreUtil.getPartitionLocation;
import static io.trino.plugin.hive.s3select.S3SelectPushdown.shouldEnablePushdownForTable;
-import static io.trino.plugin.hive.util.ConfigurationUtils.toJobConf;
import static io.trino.plugin.hive.util.HiveUtil.checkCondition;
import static io.trino.plugin.hive.util.HiveUtil.getFooterCount;
import static io.trino.plugin.hive.util.HiveUtil.getHeaderCount;
diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/GenericHiveRecordCursorProvider.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/GenericHiveRecordCursorProvider.java
index 1077a694f7a7..488ceb9d243f 100644
--- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/GenericHiveRecordCursorProvider.java
+++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/GenericHiveRecordCursorProvider.java
@@ -14,6 +14,7 @@
package io.trino.plugin.hive;
import io.airlift.units.DataSize;
+import io.trino.hdfs.HdfsEnvironment;
import io.trino.plugin.hive.util.HiveUtil;
import io.trino.spi.TrinoException;
import io.trino.spi.connector.ConnectorSession;
diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HiveLocationService.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HiveLocationService.java
index 78d84c4f2106..54878a5ee300 100644
--- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HiveLocationService.java
+++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HiveLocationService.java
@@ -13,7 +13,8 @@
*/
package io.trino.plugin.hive;
-import io.trino.plugin.hive.HdfsEnvironment.HdfsContext;
+import io.trino.hdfs.HdfsContext;
+import io.trino.hdfs.HdfsEnvironment;
import io.trino.plugin.hive.LocationHandle.WriteMode;
import io.trino.plugin.hive.metastore.Partition;
import io.trino.plugin.hive.metastore.SemiTransactionalHiveMetastore;
diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HiveMetadata.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HiveMetadata.java
index 9db9cdb688df..17cc2df29824 100644
--- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HiveMetadata.java
+++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HiveMetadata.java
@@ -28,8 +28,9 @@
import io.airlift.log.Logger;
import io.airlift.slice.Slice;
import io.airlift.units.DataSize;
+import io.trino.hdfs.HdfsContext;
+import io.trino.hdfs.HdfsEnvironment;
import io.trino.plugin.base.CatalogName;
-import io.trino.plugin.hive.HdfsEnvironment.HdfsContext;
import io.trino.plugin.hive.HiveApplyProjectionUtil.ProjectedColumnRepresentation;
import io.trino.plugin.hive.HiveSessionProperties.InsertExistingPartitionsBehavior;
import io.trino.plugin.hive.LocationService.WriteInfo;
@@ -161,6 +162,7 @@
import static com.google.common.collect.Iterables.concat;
import static com.google.common.collect.Iterables.getOnlyElement;
import static com.google.common.collect.Sets.intersection;
+import static io.trino.hdfs.ConfigurationUtils.toJobConf;
import static io.trino.plugin.hive.HiveAnalyzeProperties.getColumnNames;
import static io.trino.plugin.hive.HiveAnalyzeProperties.getPartitionList;
import static io.trino.plugin.hive.HiveApplyProjectionUtil.extractSupportedProjectedColumns;
@@ -261,7 +263,6 @@
import static io.trino.plugin.hive.metastore.StorageFormat.VIEW_STORAGE_FORMAT;
import static io.trino.plugin.hive.metastore.StorageFormat.fromHiveStorageFormat;
import static io.trino.plugin.hive.util.CompressionConfigUtil.configureCompression;
-import static io.trino.plugin.hive.util.ConfigurationUtils.toJobConf;
import static io.trino.plugin.hive.util.HiveBucketing.getHiveBucketHandle;
import static io.trino.plugin.hive.util.HiveBucketing.isSupportedBucketing;
import static io.trino.plugin.hive.util.HiveUtil.columnExtraInfo;
diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HiveMetadataFactory.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HiveMetadataFactory.java
index 4f763251ab9b..f6bdb51f156f 100644
--- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HiveMetadataFactory.java
+++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HiveMetadataFactory.java
@@ -16,6 +16,7 @@
import io.airlift.concurrent.BoundedExecutor;
import io.airlift.json.JsonCodec;
import io.airlift.units.Duration;
+import io.trino.hdfs.HdfsEnvironment;
import io.trino.plugin.base.CatalogName;
import io.trino.plugin.hive.aws.athena.PartitionProjectionService;
import io.trino.plugin.hive.fs.DirectoryLister;
diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HiveModule.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HiveModule.java
index 73af36d1533a..0400a47111f2 100644
--- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HiveModule.java
+++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HiveModule.java
@@ -20,10 +20,10 @@
import com.google.inject.Scopes;
import com.google.inject.multibindings.Multibinder;
import io.airlift.event.client.EventClient;
+import io.trino.hdfs.TrinoFileSystemCache;
+import io.trino.hdfs.TrinoFileSystemCacheStats;
import io.trino.plugin.base.CatalogName;
import io.trino.plugin.hive.fs.CachingDirectoryLister;
-import io.trino.plugin.hive.fs.TrinoFileSystemCache;
-import io.trino.plugin.hive.fs.TrinoFileSystemCacheStats;
import io.trino.plugin.hive.metastore.HiveMetastoreConfig;
import io.trino.plugin.hive.metastore.thrift.TranslateHiveViews;
import io.trino.plugin.hive.orc.OrcFileWriterFactory;
@@ -77,6 +77,9 @@ public void configure(Binder binder)
binder.bind(CachingDirectoryLister.class).in(Scopes.SINGLETON);
newExporter(binder).export(CachingDirectoryLister.class).withGeneratedName();
+ binder.bind(NamenodeStats.class).in(Scopes.SINGLETON);
+ newExporter(binder).export(NamenodeStats.class).withGeneratedName();
+
binder.bind(HiveWriterStats.class).in(Scopes.SINGLETON);
newExporter(binder).export(HiveWriterStats.class).withGeneratedName();
@@ -106,7 +109,7 @@ public void configure(Binder binder)
binder.bind(TrinoFileSystemCacheStats.class).toInstance(TrinoFileSystemCache.INSTANCE.getFileSystemCacheStats());
newExporter(binder).export(TrinoFileSystemCacheStats.class)
- .as(generator -> generator.generatedNameOf(TrinoFileSystemCache.class));
+ .as(generator -> generator.generatedNameOf(io.trino.plugin.hive.fs.TrinoFileSystemCache.class));
Multibinder pageSourceFactoryBinder = newSetBinder(binder, HivePageSourceFactory.class);
pageSourceFactoryBinder.addBinding().to(OrcPageSourceFactory.class).in(Scopes.SINGLETON);
diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HivePageSink.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HivePageSink.java
index 03b0b03af5e8..1d2c669e9a9a 100644
--- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HivePageSink.java
+++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HivePageSink.java
@@ -23,6 +23,7 @@
import io.airlift.json.JsonCodec;
import io.airlift.log.Logger;
import io.airlift.slice.Slice;
+import io.trino.hdfs.HdfsEnvironment;
import io.trino.plugin.hive.util.HiveBucketing.BucketingVersion;
import io.trino.spi.Page;
import io.trino.spi.PageIndexer;
diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HivePageSinkProvider.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HivePageSinkProvider.java
index 529563237e06..b19dc39f2200 100644
--- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HivePageSinkProvider.java
+++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HivePageSinkProvider.java
@@ -20,6 +20,7 @@
import io.airlift.event.client.EventClient;
import io.airlift.json.JsonCodec;
import io.airlift.units.DataSize;
+import io.trino.hdfs.HdfsEnvironment;
import io.trino.plugin.hive.metastore.HiveMetastoreFactory;
import io.trino.plugin.hive.metastore.HivePageSinkMetadataProvider;
import io.trino.plugin.hive.metastore.SortingColumn;
diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HivePageSourceProvider.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HivePageSourceProvider.java
index 97626ecd3521..3ba556ce1049 100644
--- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HivePageSourceProvider.java
+++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HivePageSourceProvider.java
@@ -18,10 +18,11 @@
import com.google.common.collect.ImmutableBiMap;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
+import io.trino.hdfs.HdfsContext;
+import io.trino.hdfs.HdfsEnvironment;
import io.trino.orc.metadata.ColumnMetadata;
import io.trino.orc.metadata.OrcColumnId;
import io.trino.orc.metadata.OrcType;
-import io.trino.plugin.hive.HdfsEnvironment.HdfsContext;
import io.trino.plugin.hive.HivePageSource.BucketValidator;
import io.trino.plugin.hive.HiveRecordCursorProvider.ReaderRecordCursorWithProjections;
import io.trino.plugin.hive.HiveSplit.BucketConversion;
diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HiveSplitManager.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HiveSplitManager.java
index 5e171faa1985..039d8c276c17 100644
--- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HiveSplitManager.java
+++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HiveSplitManager.java
@@ -21,6 +21,7 @@
import io.airlift.concurrent.BoundedExecutor;
import io.airlift.stats.CounterStat;
import io.airlift.units.DataSize;
+import io.trino.hdfs.HdfsEnvironment;
import io.trino.plugin.hive.metastore.Column;
import io.trino.plugin.hive.metastore.Partition;
import io.trino.plugin.hive.metastore.SemiTransactionalHiveMetastore;
diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HiveWriterFactory.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HiveWriterFactory.java
index 045229b8ceba..e1db0a850f95 100644
--- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HiveWriterFactory.java
+++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HiveWriterFactory.java
@@ -20,7 +20,8 @@
import com.google.common.collect.Sets;
import io.airlift.event.client.EventClient;
import io.airlift.units.DataSize;
-import io.trino.plugin.hive.HdfsEnvironment.HdfsContext;
+import io.trino.hdfs.HdfsContext;
+import io.trino.hdfs.HdfsEnvironment;
import io.trino.plugin.hive.HiveSessionProperties.InsertExistingPartitionsBehavior;
import io.trino.plugin.hive.LocationService.WriteInfo;
import io.trino.plugin.hive.PartitionUpdate.UpdateMode;
@@ -76,6 +77,7 @@
import static com.google.common.collect.ImmutableList.toImmutableList;
import static com.google.common.collect.ImmutableMap.toImmutableMap;
import static com.google.common.collect.Maps.immutableEntry;
+import static io.trino.hdfs.ConfigurationUtils.toJobConf;
import static io.trino.plugin.hive.HiveCompressionCodecs.selectCompressionCodec;
import static io.trino.plugin.hive.HiveErrorCode.HIVE_FILESYSTEM_ERROR;
import static io.trino.plugin.hive.HiveErrorCode.HIVE_INVALID_METADATA;
@@ -96,7 +98,6 @@
import static io.trino.plugin.hive.metastore.StorageFormat.fromHiveStorageFormat;
import static io.trino.plugin.hive.util.CompressionConfigUtil.assertCompressionConfigured;
import static io.trino.plugin.hive.util.CompressionConfigUtil.configureCompression;
-import static io.trino.plugin.hive.util.ConfigurationUtils.toJobConf;
import static io.trino.plugin.hive.util.HiveUtil.getColumnNames;
import static io.trino.plugin.hive.util.HiveUtil.getColumnTypes;
import static io.trino.plugin.hive.util.HiveWriteUtils.createPartitionValues;
diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/InternalHiveConnectorFactory.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/InternalHiveConnectorFactory.java
index 08a7e40bb043..548158145b69 100644
--- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/InternalHiveConnectorFactory.java
+++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/InternalHiveConnectorFactory.java
@@ -23,6 +23,8 @@
import io.airlift.bootstrap.LifeCycleManager;
import io.airlift.event.client.EventModule;
import io.airlift.json.JsonModule;
+import io.trino.hdfs.HdfsModule;
+import io.trino.hdfs.authentication.HdfsAuthenticationModule;
import io.trino.plugin.base.CatalogNameModule;
import io.trino.plugin.base.TypeDeserializerModule;
import io.trino.plugin.base.classloader.ClassLoaderSafeConnectorAccessControl;
@@ -34,7 +36,6 @@
import io.trino.plugin.base.jmx.ConnectorObjectNameGeneratorModule;
import io.trino.plugin.base.jmx.MBeanServerModule;
import io.trino.plugin.base.session.SessionPropertiesProvider;
-import io.trino.plugin.hive.authentication.HdfsAuthenticationModule;
import io.trino.plugin.hive.aws.athena.PartitionProjectionModule;
import io.trino.plugin.hive.azure.HiveAzureModule;
import io.trino.plugin.hive.fs.CachingDirectoryListerModule;
@@ -106,7 +107,7 @@ public static Connector createConnector(
new HiveModule(),
new PartitionProjectionModule(),
new CachingDirectoryListerModule(directoryLister),
- new HiveHdfsModule(),
+ new HdfsModule(),
new HiveS3Module(),
new HiveGcsModule(),
new HiveAzureModule(),
diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/RcFileFileWriterFactory.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/RcFileFileWriterFactory.java
index c908d03a0688..5a06d34c2ebf 100644
--- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/RcFileFileWriterFactory.java
+++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/RcFileFileWriterFactory.java
@@ -14,6 +14,7 @@
package io.trino.plugin.hive;
import com.google.common.collect.ImmutableMap;
+import io.trino.hdfs.HdfsEnvironment;
import io.trino.plugin.hive.acid.AcidTransaction;
import io.trino.plugin.hive.metastore.StorageFormat;
import io.trino.plugin.hive.rcfile.HdfsRcFileDataSource;
diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/azure/HiveAzureModule.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/azure/HiveAzureModule.java
index a4e0835a2db7..ff3e7540dd8b 100644
--- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/azure/HiveAzureModule.java
+++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/azure/HiveAzureModule.java
@@ -16,7 +16,7 @@
import com.google.inject.Binder;
import com.google.inject.Scopes;
import io.airlift.configuration.AbstractConfigurationAwareModule;
-import io.trino.plugin.hive.ConfigurationInitializer;
+import io.trino.hdfs.ConfigurationInitializer;
import static com.google.inject.multibindings.Multibinder.newSetBinder;
import static io.airlift.configuration.ConfigBinder.configBinder;
diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/azure/TrinoAzureConfigurationInitializer.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/azure/TrinoAzureConfigurationInitializer.java
index e04daad3479a..db21eaee41a7 100644
--- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/azure/TrinoAzureConfigurationInitializer.java
+++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/azure/TrinoAzureConfigurationInitializer.java
@@ -14,7 +14,7 @@
package io.trino.plugin.hive.azure;
import com.google.common.net.HostAndPort;
-import io.trino.plugin.hive.ConfigurationInitializer;
+import io.trino.hdfs.ConfigurationInitializer;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.adl.AdlFileSystem;
import org.apache.hadoop.fs.azurebfs.AzureBlobFileSystem;
diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/fs/TrinoFileSystemCache.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/fs/TrinoFileSystemCache.java
index 0a418de224ba..70d919bf076b 100644
--- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/fs/TrinoFileSystemCache.java
+++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/fs/TrinoFileSystemCache.java
@@ -13,420 +13,7 @@
*/
package io.trino.plugin.hive.fs;
-import com.google.common.annotations.VisibleForTesting;
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.ImmutableSet;
-import io.airlift.log.Logger;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.BlockLocation;
-import org.apache.hadoop.fs.CreateFlag;
-import org.apache.hadoop.fs.FSDataInputStream;
-import org.apache.hadoop.fs.FSDataOutputStream;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.FileSystemCache;
-import org.apache.hadoop.fs.FilterFileSystem;
-import org.apache.hadoop.fs.LocatedFileStatus;
-import org.apache.hadoop.fs.Options;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.fs.RemoteIterator;
-import org.apache.hadoop.fs.permission.FsPermission;
-import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod;
-import org.apache.hadoop.util.Progressable;
-import org.apache.hadoop.util.ReflectionUtils;
-
-import javax.annotation.concurrent.GuardedBy;
-
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
-import java.net.URI;
-import java.util.EnumSet;
-import java.util.HashMap;
-import java.util.Map;
-import java.util.Objects;
-import java.util.Set;
-import java.util.concurrent.atomic.AtomicLong;
-
-import static com.google.common.base.MoreObjects.toStringHelper;
-import static com.google.common.base.Strings.nullToEmpty;
-import static java.lang.String.format;
-import static java.util.Locale.ENGLISH;
-import static java.util.Objects.requireNonNull;
-import static org.apache.hadoop.fs.FileSystem.getFileSystemClass;
-import static org.apache.hadoop.security.UserGroupInformationShim.getSubject;
-
-public class TrinoFileSystemCache
- implements FileSystemCache
-{
- private static final Logger log = Logger.get(TrinoFileSystemCache.class);
-
- public static final String CACHE_KEY = "fs.cache.credentials";
-
- public static final TrinoFileSystemCache INSTANCE = new TrinoFileSystemCache();
-
- private final AtomicLong unique = new AtomicLong();
-
- private final TrinoFileSystemCacheStats stats;
-
- @GuardedBy("this")
- private final Map map = new HashMap<>();
-
- @VisibleForTesting
- TrinoFileSystemCache()
- {
- this.stats = new TrinoFileSystemCacheStats(() -> {
- synchronized (this) {
- return map.size();
- }
- });
- }
-
- @Override
- public FileSystem get(URI uri, Configuration conf)
- throws IOException
- {
- stats.newGetCall();
- return getInternal(uri, conf, 0);
- }
-
- @Override
- public FileSystem getUnique(URI uri, Configuration conf)
- throws IOException
- {
- stats.newGetUniqueCall();
- return getInternal(uri, conf, unique.incrementAndGet());
- }
-
- @VisibleForTesting
- int getCacheSize()
- {
- return map.size();
- }
-
- private synchronized FileSystem getInternal(URI uri, Configuration conf, long unique)
- throws IOException
- {
- UserGroupInformation userGroupInformation = UserGroupInformation.getCurrentUser();
- FileSystemKey key = createFileSystemKey(uri, userGroupInformation, unique);
- Set> privateCredentials = getPrivateCredentials(userGroupInformation);
-
- FileSystemHolder fileSystemHolder = map.get(key);
- if (fileSystemHolder == null) {
- int maxSize = conf.getInt("fs.cache.max-size", 1000);
- if (map.size() >= maxSize) {
- stats.newGetCallFailed();
- throw new IOException(format("FileSystem max cache size has been reached: %s", maxSize));
- }
- try {
- FileSystem fileSystem = createFileSystem(uri, conf);
- fileSystemHolder = new FileSystemHolder(fileSystem, privateCredentials);
- map.put(key, fileSystemHolder);
- }
- catch (IOException e) {
- stats.newGetCallFailed();
- throw e;
- }
- }
-
- // Update file system instance when credentials change.
- // - Private credentials are only set when using Kerberos authentication.
- // When the user is the same, but the private credentials are different,
- // that means that Kerberos ticket has expired and re-login happened.
- // To prevent cache leak in such situation, the privateCredentials are not
- // a part of the FileSystemKey, but part of the FileSystemHolder. When a
- // Kerberos re-login occurs, re-create the file system and cache it using
- // the same key.
- // - Extra credentials are used to authenticate with certain file systems.
- if ((isHdfs(uri) && !fileSystemHolder.getPrivateCredentials().equals(privateCredentials)) ||
- extraCredentialsChanged(fileSystemHolder.getFileSystem(), conf)) {
- map.remove(key);
- try {
- FileSystem fileSystem = createFileSystem(uri, conf);
- fileSystemHolder = new FileSystemHolder(fileSystem, privateCredentials);
- map.put(key, fileSystemHolder);
- }
- catch (IOException e) {
- stats.newGetCallFailed();
- throw e;
- }
- }
-
- return fileSystemHolder.getFileSystem();
- }
-
- private static FileSystem createFileSystem(URI uri, Configuration conf)
- throws IOException
- {
- Class> clazz = getFileSystemClass(uri.getScheme(), conf);
- if (clazz == null) {
- throw new IOException("No FileSystem for scheme: " + uri.getScheme());
- }
- FileSystem original = (FileSystem) ReflectionUtils.newInstance(clazz, conf);
- original.initialize(uri, conf);
- FilterFileSystem wrapper = new FileSystemWrapper(original);
- FileSystemFinalizerService.getInstance().addFinalizer(wrapper, () -> {
- try {
- original.close();
- }
- catch (IOException e) {
- log.error(e, "Error occurred when finalizing file system");
- }
- });
- return wrapper;
- }
-
- @Override
- public synchronized void remove(FileSystem fileSystem)
- {
- stats.newRemoveCall();
- map.values().removeIf(holder -> holder.getFileSystem().equals(fileSystem));
- }
-
- @Override
- public synchronized void closeAll()
- throws IOException
- {
- for (FileSystemHolder fileSystemHolder : ImmutableList.copyOf(map.values())) {
- fileSystemHolder.getFileSystem().close();
- }
- map.clear();
- }
-
- private static FileSystemKey createFileSystemKey(URI uri, UserGroupInformation userGroupInformation, long unique)
- {
- String scheme = nullToEmpty(uri.getScheme()).toLowerCase(ENGLISH);
- String authority = nullToEmpty(uri.getAuthority()).toLowerCase(ENGLISH);
- String realUser;
- String proxyUser;
- AuthenticationMethod authenticationMethod = userGroupInformation.getAuthenticationMethod();
- switch (authenticationMethod) {
- case SIMPLE:
- case KERBEROS:
- realUser = userGroupInformation.getUserName();
- proxyUser = null;
- break;
- case PROXY:
- realUser = userGroupInformation.getRealUser().getUserName();
- proxyUser = userGroupInformation.getUserName();
- break;
- default:
- throw new IllegalArgumentException("Unsupported authentication method: " + authenticationMethod);
- }
- return new FileSystemKey(scheme, authority, unique, realUser, proxyUser);
- }
-
- private static Set> getPrivateCredentials(UserGroupInformation userGroupInformation)
- {
- AuthenticationMethod authenticationMethod = userGroupInformation.getAuthenticationMethod();
- switch (authenticationMethod) {
- case SIMPLE:
- return ImmutableSet.of();
- case KERBEROS:
- return ImmutableSet.copyOf(getSubject(userGroupInformation).getPrivateCredentials());
- case PROXY:
- return getPrivateCredentials(userGroupInformation.getRealUser());
- default:
- throw new IllegalArgumentException("Unsupported authentication method: " + authenticationMethod);
- }
- }
-
- private static boolean isHdfs(URI uri)
- {
- String scheme = uri.getScheme();
- return "hdfs".equals(scheme) || "viewfs".equals(scheme);
- }
-
- private static boolean extraCredentialsChanged(FileSystem fileSystem, Configuration configuration)
- {
- return !configuration.get(CACHE_KEY, "").equals(
- fileSystem.getConf().get(CACHE_KEY, ""));
- }
-
- private static class FileSystemKey
- {
- private final String scheme;
- private final String authority;
- private final long unique;
- private final String realUser;
- private final String proxyUser;
-
- public FileSystemKey(String scheme, String authority, long unique, String realUser, String proxyUser)
- {
- this.scheme = requireNonNull(scheme, "scheme is null");
- this.authority = requireNonNull(authority, "authority is null");
- this.unique = unique;
- this.realUser = requireNonNull(realUser, "realUser");
- this.proxyUser = proxyUser;
- }
-
- @Override
- public boolean equals(Object o)
- {
- if (this == o) {
- return true;
- }
- if (o == null || getClass() != o.getClass()) {
- return false;
- }
- FileSystemKey that = (FileSystemKey) o;
- return Objects.equals(scheme, that.scheme) &&
- Objects.equals(authority, that.authority) &&
- Objects.equals(unique, that.unique) &&
- Objects.equals(realUser, that.realUser) &&
- Objects.equals(proxyUser, that.proxyUser);
- }
-
- @Override
- public int hashCode()
- {
- return Objects.hash(scheme, authority, unique, realUser, proxyUser);
- }
-
- @Override
- public String toString()
- {
- return toStringHelper(this)
- .add("scheme", scheme)
- .add("authority", authority)
- .add("unique", unique)
- .add("realUser", realUser)
- .add("proxyUser", proxyUser)
- .toString();
- }
- }
-
- private static class FileSystemHolder
- {
- private final FileSystem fileSystem;
- private final Set> privateCredentials;
-
- public FileSystemHolder(FileSystem fileSystem, Set> privateCredentials)
- {
- this.fileSystem = requireNonNull(fileSystem, "fileSystem is null");
- this.privateCredentials = ImmutableSet.copyOf(requireNonNull(privateCredentials, "privateCredentials is null"));
- }
-
- public FileSystem getFileSystem()
- {
- return fileSystem;
- }
-
- public Set> getPrivateCredentials()
- {
- return privateCredentials;
- }
-
- @Override
- public String toString()
- {
- return toStringHelper(this)
- .add("fileSystem", fileSystem)
- .add("privateCredentials", privateCredentials)
- .toString();
- }
- }
-
- private static class FileSystemWrapper
- extends FilterFileSystem
- {
- public FileSystemWrapper(FileSystem fs)
- {
- super(fs);
- }
-
- @Override
- public FSDataInputStream open(Path f, int bufferSize)
- throws IOException
- {
- return new InputStreamWrapper(getRawFileSystem().open(f, bufferSize), this);
- }
-
- @Override
- public FSDataOutputStream append(Path f, int bufferSize, Progressable progress)
- throws IOException
- {
- return new OutputStreamWrapper(getRawFileSystem().append(f, bufferSize, progress), this);
- }
-
- @Override
- public FSDataOutputStream create(Path f, FsPermission permission, boolean overwrite, int bufferSize, short replication, long blockSize, Progressable progress)
- throws IOException
- {
- return new OutputStreamWrapper(getRawFileSystem().create(f, permission, overwrite, bufferSize, replication, blockSize, progress), this);
- }
-
- @Override
- public FSDataOutputStream create(Path f, FsPermission permission, EnumSet flags, int bufferSize, short replication, long blockSize, Progressable progress, Options.ChecksumOpt checksumOpt)
- throws IOException
- {
- return new OutputStreamWrapper(getRawFileSystem().create(f, permission, flags, bufferSize, replication, blockSize, progress, checksumOpt), this);
- }
-
- @Override
- public FSDataOutputStream createNonRecursive(Path f, FsPermission permission, EnumSet flags, int bufferSize, short replication, long blockSize, Progressable progress)
- throws IOException
- {
- return new OutputStreamWrapper(getRawFileSystem().createNonRecursive(f, permission, flags, bufferSize, replication, blockSize, progress), this);
- }
-
- // missing in FilterFileSystem (HADOOP-16399)
- @Override
- public BlockLocation[] getFileBlockLocations(Path p, long start, long len)
- throws IOException
- {
- return fs.getFileBlockLocations(p, start, len);
- }
-
- // missing in FilterFileSystem
- @Override
- public RemoteIterator listFiles(Path path, boolean recursive)
- throws IOException
- {
- return fs.listFiles(path, recursive);
- }
- }
-
- private static class OutputStreamWrapper
- extends FSDataOutputStream
- {
- @SuppressWarnings({"FieldCanBeLocal", "unused"})
- private final FileSystem fileSystem;
-
- public OutputStreamWrapper(FSDataOutputStream delegate, FileSystem fileSystem)
- {
- super(delegate, null, delegate.getPos());
- this.fileSystem = fileSystem;
- }
-
- @Override
- public OutputStream getWrappedStream()
- {
- return ((FSDataOutputStream) super.getWrappedStream()).getWrappedStream();
- }
- }
-
- private static class InputStreamWrapper
- extends FSDataInputStream
- {
- @SuppressWarnings({"FieldCanBeLocal", "unused"})
- private final FileSystem fileSystem;
-
- public InputStreamWrapper(FSDataInputStream inputStream, FileSystem fileSystem)
- {
- super(inputStream);
- this.fileSystem = fileSystem;
- }
-
- @Override
- public InputStream getWrappedStream()
- {
- return ((FSDataInputStream) super.getWrappedStream()).getWrappedStream();
- }
- }
-
- public TrinoFileSystemCacheStats getFileSystemCacheStats()
- {
- return stats;
- }
-}
+/**
+ * Dummy class needed to preserve the legacy JMX object name.
+ */
+public final class TrinoFileSystemCache {}
diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/gcs/GcsConfigurationProvider.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/gcs/GcsConfigurationProvider.java
index 4a48865477b2..ee5300e0f526 100644
--- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/gcs/GcsConfigurationProvider.java
+++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/gcs/GcsConfigurationProvider.java
@@ -14,13 +14,13 @@
package io.trino.plugin.hive.gcs;
import com.google.cloud.hadoop.gcsio.GoogleCloudStorageFileSystem;
-import io.trino.plugin.hive.DynamicConfigurationProvider;
+import io.trino.hdfs.DynamicConfigurationProvider;
+import io.trino.hdfs.HdfsContext;
import org.apache.hadoop.conf.Configuration;
import java.net.URI;
-import static io.trino.plugin.hive.DynamicConfigurationProvider.setCacheKey;
-import static io.trino.plugin.hive.HdfsEnvironment.HdfsContext;
+import static io.trino.hdfs.DynamicConfigurationProvider.setCacheKey;
import static io.trino.plugin.hive.gcs.GcsAccessTokenProvider.GCS_ACCESS_TOKEN_CONF;
public class GcsConfigurationProvider
diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/gcs/GoogleGcsConfigurationInitializer.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/gcs/GoogleGcsConfigurationInitializer.java
index 3690d3260cf2..5adeb649f24d 100644
--- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/gcs/GoogleGcsConfigurationInitializer.java
+++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/gcs/GoogleGcsConfigurationInitializer.java
@@ -14,7 +14,7 @@
package io.trino.plugin.hive.gcs;
import com.google.cloud.hadoop.fs.gcs.GoogleHadoopFileSystem;
-import io.trino.plugin.hive.ConfigurationInitializer;
+import io.trino.hdfs.ConfigurationInitializer;
import org.apache.hadoop.conf.Configuration;
import javax.inject.Inject;
diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/gcs/HiveGcsModule.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/gcs/HiveGcsModule.java
index b10867248875..2bcada7ba1ce 100644
--- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/gcs/HiveGcsModule.java
+++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/gcs/HiveGcsModule.java
@@ -16,8 +16,8 @@
import com.google.inject.Binder;
import com.google.inject.Scopes;
import io.airlift.configuration.AbstractConfigurationAwareModule;
-import io.trino.plugin.hive.ConfigurationInitializer;
-import io.trino.plugin.hive.DynamicConfigurationProvider;
+import io.trino.hdfs.ConfigurationInitializer;
+import io.trino.hdfs.DynamicConfigurationProvider;
import io.trino.plugin.hive.rubix.RubixEnabledConfig;
import static com.google.common.base.Preconditions.checkArgument;
diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/SemiTransactionalHiveMetastore.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/SemiTransactionalHiveMetastore.java
index b7a085d4804d..da27d90380f4 100644
--- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/SemiTransactionalHiveMetastore.java
+++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/SemiTransactionalHiveMetastore.java
@@ -23,8 +23,8 @@
import com.google.errorprone.annotations.FormatMethod;
import io.airlift.log.Logger;
import io.airlift.units.Duration;
-import io.trino.plugin.hive.HdfsEnvironment;
-import io.trino.plugin.hive.HdfsEnvironment.HdfsContext;
+import io.trino.hdfs.HdfsContext;
+import io.trino.hdfs.HdfsEnvironment;
import io.trino.plugin.hive.HiveBasicStatistics;
import io.trino.plugin.hive.HiveMetastoreClosure;
import io.trino.plugin.hive.HiveTableHandle;
diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/file/FileHiveMetastore.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/file/FileHiveMetastore.java
index c9062a4f57be..9a599cfa4af4 100644
--- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/file/FileHiveMetastore.java
+++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/file/FileHiveMetastore.java
@@ -21,13 +21,14 @@
import com.google.common.collect.Sets;
import com.google.common.io.ByteStreams;
import io.airlift.json.JsonCodec;
-import io.trino.plugin.hive.HdfsConfig;
-import io.trino.plugin.hive.HdfsConfiguration;
-import io.trino.plugin.hive.HdfsConfigurationInitializer;
-import io.trino.plugin.hive.HdfsEnvironment;
-import io.trino.plugin.hive.HdfsEnvironment.HdfsContext;
+import io.trino.hdfs.DynamicHdfsConfiguration;
+import io.trino.hdfs.HdfsConfig;
+import io.trino.hdfs.HdfsConfiguration;
+import io.trino.hdfs.HdfsConfigurationInitializer;
+import io.trino.hdfs.HdfsContext;
+import io.trino.hdfs.HdfsEnvironment;
+import io.trino.hdfs.authentication.NoHdfsAuthentication;
import io.trino.plugin.hive.HiveBasicStatistics;
-import io.trino.plugin.hive.HiveHdfsConfiguration;
import io.trino.plugin.hive.HiveType;
import io.trino.plugin.hive.NodeVersion;
import io.trino.plugin.hive.PartitionNotFoundException;
@@ -35,7 +36,6 @@
import io.trino.plugin.hive.SchemaAlreadyExistsException;
import io.trino.plugin.hive.TableAlreadyExistsException;
import io.trino.plugin.hive.acid.AcidTransaction;
-import io.trino.plugin.hive.authentication.NoHdfsAuthentication;
import io.trino.plugin.hive.metastore.Column;
import io.trino.plugin.hive.metastore.Database;
import io.trino.plugin.hive.metastore.HiveColumnStatistics;
@@ -163,7 +163,7 @@ public class FileHiveMetastore
public static FileHiveMetastore createTestingFileHiveMetastore(File catalogDirectory)
{
HdfsConfig hdfsConfig = new HdfsConfig();
- HdfsConfiguration hdfsConfiguration = new HiveHdfsConfiguration(new HdfsConfigurationInitializer(hdfsConfig), ImmutableSet.of());
+ HdfsConfiguration hdfsConfiguration = new DynamicHdfsConfiguration(new HdfsConfigurationInitializer(hdfsConfig), ImmutableSet.of());
HdfsEnvironment hdfsEnvironment = new HdfsEnvironment(hdfsConfiguration, hdfsConfig, new NoHdfsAuthentication());
return new FileHiveMetastore(
new NodeVersion("testversion"),
diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/file/FileHiveMetastoreFactory.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/file/FileHiveMetastoreFactory.java
index 56ce940a2b4a..57f44d5192cd 100644
--- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/file/FileHiveMetastoreFactory.java
+++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/file/FileHiveMetastoreFactory.java
@@ -13,7 +13,7 @@
*/
package io.trino.plugin.hive.metastore.file;
-import io.trino.plugin.hive.HdfsEnvironment;
+import io.trino.hdfs.HdfsEnvironment;
import io.trino.plugin.hive.HideDeltaLakeTables;
import io.trino.plugin.hive.NodeVersion;
import io.trino.plugin.hive.metastore.HiveMetastore;
diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/glue/GlueHiveMetastore.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/glue/GlueHiveMetastore.java
index 9881e6fabb27..50681311172f 100644
--- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/glue/GlueHiveMetastore.java
+++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/glue/GlueHiveMetastore.java
@@ -66,8 +66,8 @@
import com.google.common.collect.Lists;
import io.airlift.concurrent.MoreFutures;
import io.airlift.log.Logger;
-import io.trino.plugin.hive.HdfsEnvironment;
-import io.trino.plugin.hive.HdfsEnvironment.HdfsContext;
+import io.trino.hdfs.HdfsContext;
+import io.trino.hdfs.HdfsEnvironment;
import io.trino.plugin.hive.HiveType;
import io.trino.plugin.hive.PartitionNotFoundException;
import io.trino.plugin.hive.PartitionStatistics;
diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/thrift/DefaultThriftMetastoreClientFactory.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/thrift/DefaultThriftMetastoreClientFactory.java
index dfaf89507a08..e79e70afa5bc 100644
--- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/thrift/DefaultThriftMetastoreClientFactory.java
+++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/thrift/DefaultThriftMetastoreClientFactory.java
@@ -16,7 +16,6 @@
import com.google.common.net.HostAndPort;
import io.airlift.security.pem.PemReader;
import io.airlift.units.Duration;
-import io.trino.plugin.hive.authentication.HiveMetastoreAuthentication;
import io.trino.spi.NodeManager;
import org.apache.thrift.transport.TTransport;
import org.apache.thrift.transport.TTransportException;
diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/authentication/HiveMetastoreAuthentication.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/thrift/HiveMetastoreAuthentication.java
similarity index 94%
rename from plugin/trino-hive/src/main/java/io/trino/plugin/hive/authentication/HiveMetastoreAuthentication.java
rename to plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/thrift/HiveMetastoreAuthentication.java
index 5b0b6d584695..cef59523be40 100644
--- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/authentication/HiveMetastoreAuthentication.java
+++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/thrift/HiveMetastoreAuthentication.java
@@ -11,7 +11,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package io.trino.plugin.hive.authentication;
+package io.trino.plugin.hive.metastore.thrift;
import org.apache.thrift.transport.TTransport;
diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/thrift/KerberosHiveMetastoreAuthentication.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/thrift/KerberosHiveMetastoreAuthentication.java
index bf9f6a076f47..2db61a890592 100644
--- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/thrift/KerberosHiveMetastoreAuthentication.java
+++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/thrift/KerberosHiveMetastoreAuthentication.java
@@ -14,10 +14,8 @@
package io.trino.plugin.hive.metastore.thrift;
import com.google.common.collect.ImmutableMap;
+import io.trino.hdfs.authentication.HadoopAuthentication;
import io.trino.plugin.hive.ForHiveMetastore;
-import io.trino.plugin.hive.authentication.HadoopAuthentication;
-import io.trino.plugin.hive.authentication.HiveMetastoreAuthentication;
-import io.trino.plugin.hive.authentication.MetastoreKerberosConfig;
import org.apache.hadoop.hive.metastore.security.DelegationTokenIdentifier;
import org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport;
import org.apache.hadoop.security.SaslRpcServer;
diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/authentication/MetastoreKerberosConfig.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/thrift/MetastoreKerberosConfig.java
similarity index 97%
rename from plugin/trino-hive/src/main/java/io/trino/plugin/hive/authentication/MetastoreKerberosConfig.java
rename to plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/thrift/MetastoreKerberosConfig.java
index ee6b720cdff4..1983938bf288 100644
--- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/authentication/MetastoreKerberosConfig.java
+++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/thrift/MetastoreKerberosConfig.java
@@ -11,7 +11,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package io.trino.plugin.hive.authentication;
+package io.trino.plugin.hive.metastore.thrift;
import io.airlift.configuration.Config;
import io.airlift.configuration.ConfigDescription;
diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/thrift/NoHiveMetastoreAuthentication.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/thrift/NoHiveMetastoreAuthentication.java
index 3c1d59fd1772..2248ac8942fb 100644
--- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/thrift/NoHiveMetastoreAuthentication.java
+++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/thrift/NoHiveMetastoreAuthentication.java
@@ -13,7 +13,6 @@
*/
package io.trino.plugin.hive.metastore.thrift;
-import io.trino.plugin.hive.authentication.HiveMetastoreAuthentication;
import org.apache.thrift.transport.TTransport;
import java.util.Optional;
diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/thrift/ThriftHiveMetastore.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/thrift/ThriftHiveMetastore.java
index 53fca0d8425d..a37d3a496aed 100644
--- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/thrift/ThriftHiveMetastore.java
+++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/thrift/ThriftHiveMetastore.java
@@ -19,8 +19,8 @@
import com.google.common.collect.ImmutableSet;
import io.airlift.log.Logger;
import io.airlift.units.Duration;
-import io.trino.plugin.hive.HdfsEnvironment;
-import io.trino.plugin.hive.HdfsEnvironment.HdfsContext;
+import io.trino.hdfs.HdfsContext;
+import io.trino.hdfs.HdfsEnvironment;
import io.trino.plugin.hive.HiveBasicStatistics;
import io.trino.plugin.hive.HivePartition;
import io.trino.plugin.hive.HiveType;
diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/thrift/ThriftHiveMetastoreFactory.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/thrift/ThriftHiveMetastoreFactory.java
index 632c5b533a06..cefb6431a5b4 100644
--- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/thrift/ThriftHiveMetastoreFactory.java
+++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/thrift/ThriftHiveMetastoreFactory.java
@@ -14,7 +14,7 @@
package io.trino.plugin.hive.metastore.thrift;
import io.airlift.units.Duration;
-import io.trino.plugin.hive.HdfsEnvironment;
+import io.trino.hdfs.HdfsEnvironment;
import io.trino.plugin.hive.HideDeltaLakeTables;
import io.trino.spi.security.ConnectorIdentity;
import org.weakref.jmx.Flatten;
diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/thrift/ThriftMetastoreAuthenticationModule.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/thrift/ThriftMetastoreAuthenticationModule.java
index 89c8df7f72a1..36e8f9c41db8 100644
--- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/thrift/ThriftMetastoreAuthenticationModule.java
+++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/thrift/ThriftMetastoreAuthenticationModule.java
@@ -18,15 +18,13 @@
import com.google.inject.Provides;
import com.google.inject.Singleton;
import io.airlift.configuration.AbstractConfigurationAwareModule;
+import io.trino.hdfs.HdfsConfigurationInitializer;
+import io.trino.hdfs.authentication.HadoopAuthentication;
import io.trino.plugin.hive.ForHiveMetastore;
-import io.trino.plugin.hive.HdfsConfigurationInitializer;
-import io.trino.plugin.hive.authentication.HadoopAuthentication;
-import io.trino.plugin.hive.authentication.HiveMetastoreAuthentication;
-import io.trino.plugin.hive.authentication.MetastoreKerberosConfig;
import static com.google.inject.Scopes.SINGLETON;
import static io.airlift.configuration.ConfigBinder.configBinder;
-import static io.trino.plugin.hive.authentication.AuthenticationModules.createCachingKerberosHadoopAuthentication;
+import static io.trino.hdfs.authentication.AuthenticationModules.createCachingKerberosHadoopAuthentication;
public class ThriftMetastoreAuthenticationModule
extends AbstractConfigurationAwareModule
diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/thrift/TokenDelegationThriftMetastoreFactory.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/thrift/TokenDelegationThriftMetastoreFactory.java
index 72a792dafbd6..b48dc22d50e3 100644
--- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/thrift/TokenDelegationThriftMetastoreFactory.java
+++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/thrift/TokenDelegationThriftMetastoreFactory.java
@@ -17,7 +17,7 @@
import com.google.common.cache.CacheLoader;
import com.google.common.util.concurrent.UncheckedExecutionException;
import io.trino.collect.cache.NonEvictableLoadingCache;
-import io.trino.plugin.hive.HdfsEnvironment;
+import io.trino.hdfs.HdfsEnvironment;
import io.trino.plugin.hive.metastore.thrift.ThriftMetastoreAuthenticationConfig.ThriftMetastoreAuthenticationType;
import io.trino.spi.TrinoException;
import io.trino.spi.security.ConnectorIdentity;
diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/thrift/Transport.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/thrift/Transport.java
index 09ef73c307aa..523ca5e24914 100644
--- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/thrift/Transport.java
+++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/thrift/Transport.java
@@ -14,7 +14,6 @@
package io.trino.plugin.hive.metastore.thrift;
import com.google.common.net.HostAndPort;
-import io.trino.plugin.hive.authentication.HiveMetastoreAuthentication;
import org.apache.thrift.transport.TSocket;
import org.apache.thrift.transport.TTransport;
import org.apache.thrift.transport.TTransportException;
diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/orc/HdfsOrcDataSource.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/orc/HdfsOrcDataSource.java
index e81d8d12c4bf..75c40078d5f8 100644
--- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/orc/HdfsOrcDataSource.java
+++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/orc/HdfsOrcDataSource.java
@@ -14,11 +14,11 @@
package io.trino.plugin.hive.orc;
import io.airlift.slice.Slice;
+import io.trino.hdfs.FSDataInputStreamTail;
import io.trino.orc.AbstractOrcDataSource;
import io.trino.orc.OrcDataSourceId;
import io.trino.orc.OrcReaderOptions;
import io.trino.plugin.hive.FileFormatDataSourceStats;
-import io.trino.plugin.hive.util.FSDataInputStreamTail;
import io.trino.spi.TrinoException;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.hdfs.BlockMissingException;
diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/orc/OrcDeleteDeltaPageSource.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/orc/OrcDeleteDeltaPageSource.java
index 4fb1d4a051d9..6d5ab8a4ca46 100644
--- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/orc/OrcDeleteDeltaPageSource.java
+++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/orc/OrcDeleteDeltaPageSource.java
@@ -14,6 +14,7 @@
package io.trino.plugin.hive.orc;
import com.google.common.collect.ImmutableList;
+import io.trino.hdfs.HdfsEnvironment;
import io.trino.memory.context.AggregatedMemoryContext;
import io.trino.orc.NameBasedFieldMapper;
import io.trino.orc.OrcColumn;
@@ -25,7 +26,6 @@
import io.trino.orc.OrcReaderOptions;
import io.trino.orc.OrcRecordReader;
import io.trino.plugin.hive.FileFormatDataSourceStats;
-import io.trino.plugin.hive.HdfsEnvironment;
import io.trino.spi.Page;
import io.trino.spi.TrinoException;
import io.trino.spi.connector.ConnectorPageSource;
diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/orc/OrcDeleteDeltaPageSourceFactory.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/orc/OrcDeleteDeltaPageSourceFactory.java
index 868e888be652..9411d6048e0b 100644
--- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/orc/OrcDeleteDeltaPageSourceFactory.java
+++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/orc/OrcDeleteDeltaPageSourceFactory.java
@@ -13,9 +13,9 @@
*/
package io.trino.plugin.hive.orc;
+import io.trino.hdfs.HdfsEnvironment;
import io.trino.orc.OrcReaderOptions;
import io.trino.plugin.hive.FileFormatDataSourceStats;
-import io.trino.plugin.hive.HdfsEnvironment;
import io.trino.spi.connector.ConnectorPageSource;
import io.trino.spi.security.ConnectorIdentity;
import org.apache.hadoop.conf.Configuration;
diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/orc/OrcDeletedRows.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/orc/OrcDeletedRows.java
index a65933878d7e..526c3b500f62 100644
--- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/orc/OrcDeletedRows.java
+++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/orc/OrcDeletedRows.java
@@ -14,11 +14,11 @@
package io.trino.plugin.hive.orc;
import com.google.common.collect.ImmutableSet;
+import io.trino.hdfs.HdfsEnvironment;
import io.trino.memory.context.AggregatedMemoryContext;
import io.trino.memory.context.LocalMemoryContext;
import io.trino.orc.OrcCorruptionException;
import io.trino.plugin.hive.AcidInfo;
-import io.trino.plugin.hive.HdfsEnvironment;
import io.trino.spi.Page;
import io.trino.spi.TrinoException;
import io.trino.spi.block.Block;
diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/orc/OrcFileWriterFactory.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/orc/OrcFileWriterFactory.java
index b597a5031f90..e188ad9ea7f0 100644
--- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/orc/OrcFileWriterFactory.java
+++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/orc/OrcFileWriterFactory.java
@@ -14,6 +14,7 @@
package io.trino.plugin.hive.orc;
import com.google.common.collect.ImmutableMap;
+import io.trino.hdfs.HdfsEnvironment;
import io.trino.hive.orc.OrcConf;
import io.trino.orc.OrcDataSink;
import io.trino.orc.OrcDataSource;
@@ -25,7 +26,6 @@
import io.trino.orc.metadata.CompressionKind;
import io.trino.plugin.hive.FileFormatDataSourceStats;
import io.trino.plugin.hive.FileWriter;
-import io.trino.plugin.hive.HdfsEnvironment;
import io.trino.plugin.hive.HiveFileWriterFactory;
import io.trino.plugin.hive.HiveType;
import io.trino.plugin.hive.NodeVersion;
diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/orc/OrcPageSourceFactory.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/orc/OrcPageSourceFactory.java
index 34ffd6a45896..196857fcc435 100644
--- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/orc/OrcPageSourceFactory.java
+++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/orc/OrcPageSourceFactory.java
@@ -17,6 +17,7 @@
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Maps;
import io.airlift.slice.Slice;
+import io.trino.hdfs.HdfsEnvironment;
import io.trino.memory.context.AggregatedMemoryContext;
import io.trino.orc.NameBasedFieldMapper;
import io.trino.orc.OrcColumn;
@@ -30,7 +31,6 @@
import io.trino.orc.metadata.OrcType.OrcTypeKind;
import io.trino.plugin.hive.AcidInfo;
import io.trino.plugin.hive.FileFormatDataSourceStats;
-import io.trino.plugin.hive.HdfsEnvironment;
import io.trino.plugin.hive.HiveColumnHandle;
import io.trino.plugin.hive.HiveColumnProjectionInfo;
import io.trino.plugin.hive.HiveConfig;
diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/orc/OriginalFilesUtils.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/orc/OriginalFilesUtils.java
index 8b3aa405b37d..573c902fb9fd 100644
--- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/orc/OriginalFilesUtils.java
+++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/orc/OriginalFilesUtils.java
@@ -13,12 +13,12 @@
*/
package io.trino.plugin.hive.orc;
+import io.trino.hdfs.HdfsEnvironment;
import io.trino.orc.OrcDataSource;
import io.trino.orc.OrcDataSourceId;
import io.trino.orc.OrcReader;
import io.trino.orc.OrcReaderOptions;
import io.trino.plugin.hive.FileFormatDataSourceStats;
-import io.trino.plugin.hive.HdfsEnvironment;
import io.trino.spi.TrinoException;
import io.trino.spi.security.ConnectorIdentity;
import org.apache.hadoop.conf.Configuration;
diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/parquet/HdfsParquetDataSource.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/parquet/HdfsParquetDataSource.java
index 5f8dfb2eed9a..971fd502bb47 100644
--- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/parquet/HdfsParquetDataSource.java
+++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/parquet/HdfsParquetDataSource.java
@@ -14,11 +14,11 @@
package io.trino.plugin.hive.parquet;
import io.airlift.slice.Slice;
+import io.trino.hdfs.FSDataInputStreamTail;
import io.trino.parquet.AbstractParquetDataSource;
import io.trino.parquet.ParquetDataSourceId;
import io.trino.parquet.ParquetReaderOptions;
import io.trino.plugin.hive.FileFormatDataSourceStats;
-import io.trino.plugin.hive.util.FSDataInputStreamTail;
import io.trino.spi.TrinoException;
import org.apache.hadoop.fs.FSDataInputStream;
diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/parquet/ParquetFileWriterFactory.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/parquet/ParquetFileWriterFactory.java
index bc47c23f031f..186900c09973 100644
--- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/parquet/ParquetFileWriterFactory.java
+++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/parquet/ParquetFileWriterFactory.java
@@ -13,10 +13,10 @@
*/
package io.trino.plugin.hive.parquet;
+import io.trino.hdfs.HdfsEnvironment;
import io.trino.parquet.writer.ParquetSchemaConverter;
import io.trino.parquet.writer.ParquetWriterOptions;
import io.trino.plugin.hive.FileWriter;
-import io.trino.plugin.hive.HdfsEnvironment;
import io.trino.plugin.hive.HiveConfig;
import io.trino.plugin.hive.HiveFileWriterFactory;
import io.trino.plugin.hive.HiveSessionProperties;
diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/parquet/ParquetPageSourceFactory.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/parquet/ParquetPageSourceFactory.java
index 0e2f44e1e331..075c68867097 100644
--- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/parquet/ParquetPageSourceFactory.java
+++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/parquet/ParquetPageSourceFactory.java
@@ -16,6 +16,7 @@
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
+import io.trino.hdfs.HdfsEnvironment;
import io.trino.parquet.Field;
import io.trino.parquet.ParquetCorruptionException;
import io.trino.parquet.ParquetDataSource;
@@ -28,7 +29,6 @@
import io.trino.parquet.reader.TrinoColumnIndexStore;
import io.trino.plugin.hive.AcidInfo;
import io.trino.plugin.hive.FileFormatDataSourceStats;
-import io.trino.plugin.hive.HdfsEnvironment;
import io.trino.plugin.hive.HiveColumnHandle;
import io.trino.plugin.hive.HiveConfig;
import io.trino.plugin.hive.HivePageSourceFactory;
diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/procedure/RegisterPartitionProcedure.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/procedure/RegisterPartitionProcedure.java
index b82d1d7bc74e..521ffdc8b1de 100644
--- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/procedure/RegisterPartitionProcedure.java
+++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/procedure/RegisterPartitionProcedure.java
@@ -15,8 +15,8 @@
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
-import io.trino.plugin.hive.HdfsEnvironment;
-import io.trino.plugin.hive.HdfsEnvironment.HdfsContext;
+import io.trino.hdfs.HdfsContext;
+import io.trino.hdfs.HdfsEnvironment;
import io.trino.plugin.hive.HiveConfig;
import io.trino.plugin.hive.PartitionStatistics;
import io.trino.plugin.hive.TransactionalMetadataFactory;
diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/procedure/SyncPartitionMetadataProcedure.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/procedure/SyncPartitionMetadataProcedure.java
index 0ae129fd6201..cbd1d8585e9e 100644
--- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/procedure/SyncPartitionMetadataProcedure.java
+++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/procedure/SyncPartitionMetadataProcedure.java
@@ -17,7 +17,8 @@
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
-import io.trino.plugin.hive.HdfsEnvironment;
+import io.trino.hdfs.HdfsContext;
+import io.trino.hdfs.HdfsEnvironment;
import io.trino.plugin.hive.PartitionStatistics;
import io.trino.plugin.hive.TransactionalMetadataFactory;
import io.trino.plugin.hive.metastore.Column;
@@ -48,7 +49,6 @@
import java.util.stream.Stream;
import static com.google.common.collect.ImmutableList.toImmutableList;
-import static io.trino.plugin.hive.HdfsEnvironment.HdfsContext;
import static io.trino.plugin.hive.HiveErrorCode.HIVE_FILESYSTEM_ERROR;
import static io.trino.plugin.hive.HiveMetadata.PRESTO_QUERY_ID_NAME;
import static io.trino.plugin.hive.HivePartitionManager.extractPartitionValues;
diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/rcfile/RcFilePageSourceFactory.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/rcfile/RcFilePageSourceFactory.java
index 446ae68cf521..6b9ccfca130e 100644
--- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/rcfile/RcFilePageSourceFactory.java
+++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/rcfile/RcFilePageSourceFactory.java
@@ -18,9 +18,10 @@
import io.airlift.slice.Slices;
import io.airlift.units.DataSize;
import io.airlift.units.DataSize.Unit;
+import io.trino.hdfs.FSDataInputStreamTail;
+import io.trino.hdfs.HdfsEnvironment;
import io.trino.plugin.hive.AcidInfo;
import io.trino.plugin.hive.FileFormatDataSourceStats;
-import io.trino.plugin.hive.HdfsEnvironment;
import io.trino.plugin.hive.HiveColumnHandle;
import io.trino.plugin.hive.HiveConfig;
import io.trino.plugin.hive.HivePageSourceFactory;
@@ -28,7 +29,6 @@
import io.trino.plugin.hive.ReaderColumns;
import io.trino.plugin.hive.ReaderPageSource;
import io.trino.plugin.hive.acid.AcidTransaction;
-import io.trino.plugin.hive.util.FSDataInputStreamTail;
import io.trino.rcfile.AircompressorCodecFactory;
import io.trino.rcfile.HadoopCodecFactory;
import io.trino.rcfile.MemoryRcFileDataSource;
diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/rubix/RubixConfigurationInitializer.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/rubix/RubixConfigurationInitializer.java
index d710a31af0c8..85ad19737ab9 100644
--- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/rubix/RubixConfigurationInitializer.java
+++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/rubix/RubixConfigurationInitializer.java
@@ -13,8 +13,8 @@
*/
package io.trino.plugin.hive.rubix;
-import io.trino.plugin.hive.DynamicConfigurationProvider;
-import io.trino.plugin.hive.HdfsEnvironment.HdfsContext;
+import io.trino.hdfs.DynamicConfigurationProvider;
+import io.trino.hdfs.HdfsContext;
import org.apache.hadoop.conf.Configuration;
import javax.inject.Inject;
diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/rubix/RubixHdfsInitializer.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/rubix/RubixHdfsInitializer.java
index dea418441c09..39b09a67d053 100644
--- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/rubix/RubixHdfsInitializer.java
+++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/rubix/RubixHdfsInitializer.java
@@ -13,7 +13,7 @@
*/
package io.trino.plugin.hive.rubix;
-import io.trino.plugin.hive.ConfigurationInitializer;
+import io.trino.hdfs.ConfigurationInitializer;
public interface RubixHdfsInitializer
extends ConfigurationInitializer
diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/rubix/RubixInitializer.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/rubix/RubixInitializer.java
index b416b6043f07..c3c0ac2e361e 100644
--- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/rubix/RubixInitializer.java
+++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/rubix/RubixInitializer.java
@@ -29,8 +29,8 @@
import com.qubole.rubix.prestosql.CachingPrestoSecureNativeAzureFileSystem;
import io.airlift.log.Logger;
import io.airlift.units.Duration;
+import io.trino.hdfs.HdfsConfigurationInitializer;
import io.trino.plugin.base.CatalogName;
-import io.trino.plugin.hive.HdfsConfigurationInitializer;
import io.trino.plugin.hive.util.RetryDriver;
import io.trino.spi.HostAddress;
import io.trino.spi.Node;
@@ -62,10 +62,10 @@
import static com.qubole.rubix.spi.CacheConfig.setMetricsReporters;
import static com.qubole.rubix.spi.CacheConfig.setOnMaster;
import static com.qubole.rubix.spi.CacheConfig.setPrestoClusterManager;
-import static io.trino.plugin.hive.DynamicConfigurationProvider.setCacheKey;
+import static io.trino.hdfs.ConfigurationUtils.getInitialConfiguration;
+import static io.trino.hdfs.DynamicConfigurationProvider.setCacheKey;
import static io.trino.plugin.hive.rubix.RubixInitializer.Owner.PRESTO;
import static io.trino.plugin.hive.rubix.RubixInitializer.Owner.RUBIX;
-import static io.trino.plugin.hive.util.ConfigurationUtils.getInitialConfiguration;
import static io.trino.plugin.hive.util.RetryDriver.DEFAULT_SCALE_FACTOR;
import static io.trino.plugin.hive.util.RetryDriver.retry;
import static io.trino.spi.StandardErrorCode.GENERIC_INTERNAL_ERROR;
diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/rubix/RubixModule.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/rubix/RubixModule.java
index de8d9d1161e1..53893caa0505 100644
--- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/rubix/RubixModule.java
+++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/rubix/RubixModule.java
@@ -19,8 +19,8 @@
import com.google.inject.Module;
import com.google.inject.Scopes;
import com.qubole.rubix.prestosql.CachingPrestoDistributedFileSystem;
-import io.trino.plugin.hive.DynamicConfigurationProvider;
-import io.trino.plugin.hive.authentication.HdfsAuthenticationConfig;
+import io.trino.hdfs.DynamicConfigurationProvider;
+import io.trino.hdfs.authentication.HdfsAuthenticationConfig;
import org.apache.hadoop.conf.Configuration;
import java.util.Set;
diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/s3/HiveS3Module.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/s3/HiveS3Module.java
index 30a7fcfb5404..56dadc0f6bdf 100644
--- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/s3/HiveS3Module.java
+++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/s3/HiveS3Module.java
@@ -17,8 +17,8 @@
import com.google.inject.Scopes;
import io.airlift.configuration.AbstractConfigurationAwareModule;
import io.airlift.units.Duration;
-import io.trino.plugin.hive.ConfigurationInitializer;
-import io.trino.plugin.hive.DynamicConfigurationProvider;
+import io.trino.hdfs.ConfigurationInitializer;
+import io.trino.hdfs.DynamicConfigurationProvider;
import io.trino.plugin.hive.HiveConfig;
import io.trino.plugin.hive.rubix.RubixEnabledConfig;
import org.apache.hadoop.conf.Configuration;
diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/s3/S3SecurityMappingConfigurationProvider.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/s3/S3SecurityMappingConfigurationProvider.java
index 0f17042bea06..f7f63b80ed88 100644
--- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/s3/S3SecurityMappingConfigurationProvider.java
+++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/s3/S3SecurityMappingConfigurationProvider.java
@@ -18,8 +18,8 @@
import com.google.common.hash.Hasher;
import com.google.common.hash.Hashing;
import io.airlift.log.Logger;
-import io.trino.plugin.hive.DynamicConfigurationProvider;
-import io.trino.plugin.hive.HdfsEnvironment.HdfsContext;
+import io.trino.hdfs.DynamicConfigurationProvider;
+import io.trino.hdfs.HdfsContext;
import io.trino.spi.security.AccessDeniedException;
import org.apache.hadoop.conf.Configuration;
@@ -31,7 +31,7 @@
import java.util.function.Supplier;
import static com.google.common.base.Verify.verify;
-import static io.trino.plugin.hive.DynamicConfigurationProvider.setCacheKey;
+import static io.trino.hdfs.DynamicConfigurationProvider.setCacheKey;
import static io.trino.plugin.hive.s3.TrinoS3FileSystem.S3_ACCESS_KEY;
import static io.trino.plugin.hive.s3.TrinoS3FileSystem.S3_ENDPOINT;
import static io.trino.plugin.hive.s3.TrinoS3FileSystem.S3_IAM_ROLE;
diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/s3/TrinoS3ConfigurationInitializer.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/s3/TrinoS3ConfigurationInitializer.java
index 8c3ccfc24d4b..0dd7c9f5e59c 100644
--- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/s3/TrinoS3ConfigurationInitializer.java
+++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/s3/TrinoS3ConfigurationInitializer.java
@@ -15,7 +15,7 @@
import io.airlift.units.DataSize;
import io.airlift.units.Duration;
-import io.trino.plugin.hive.ConfigurationInitializer;
+import io.trino.hdfs.ConfigurationInitializer;
import org.apache.hadoop.conf.Configuration;
import javax.inject.Inject;
diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/s3/TrinoS3FileSystem.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/s3/TrinoS3FileSystem.java
index b97e87e73360..eeb83d5b7852 100644
--- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/s3/TrinoS3FileSystem.java
+++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/s3/TrinoS3FileSystem.java
@@ -77,7 +77,7 @@
import io.airlift.log.Logger;
import io.airlift.units.DataSize;
import io.airlift.units.Duration;
-import io.trino.plugin.hive.util.FSDataInputStreamTail;
+import io.trino.hdfs.FSDataInputStreamTail;
import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.BlockLocation;
diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/s3select/S3SelectRecordCursorProvider.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/s3select/S3SelectRecordCursorProvider.java
index 8b06009ecd93..3b1a43d80a51 100644
--- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/s3select/S3SelectRecordCursorProvider.java
+++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/s3select/S3SelectRecordCursorProvider.java
@@ -13,7 +13,7 @@
*/
package io.trino.plugin.hive.s3select;
-import io.trino.plugin.hive.HdfsEnvironment;
+import io.trino.hdfs.HdfsEnvironment;
import io.trino.plugin.hive.HiveColumnHandle;
import io.trino.plugin.hive.HiveRecordCursorProvider;
import io.trino.plugin.hive.ReaderColumns;
diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/util/HiveUtil.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/util/HiveUtil.java
index 7828c18f6d53..02945e194231 100644
--- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/util/HiveUtil.java
+++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/util/HiveUtil.java
@@ -104,6 +104,8 @@
import static com.google.common.collect.ImmutableList.toImmutableList;
import static com.google.common.collect.Lists.newArrayList;
import static io.airlift.slice.Slices.utf8Slice;
+import static io.trino.hdfs.ConfigurationUtils.copy;
+import static io.trino.hdfs.ConfigurationUtils.toJobConf;
import static io.trino.plugin.hive.HiveColumnHandle.ColumnType.PARTITION_KEY;
import static io.trino.plugin.hive.HiveColumnHandle.ColumnType.REGULAR;
import static io.trino.plugin.hive.HiveColumnHandle.bucketColumnHandle;
@@ -133,8 +135,6 @@
import static io.trino.plugin.hive.HiveType.toHiveTypes;
import static io.trino.plugin.hive.metastore.SortingColumn.Order.ASCENDING;
import static io.trino.plugin.hive.metastore.SortingColumn.Order.DESCENDING;
-import static io.trino.plugin.hive.util.ConfigurationUtils.copy;
-import static io.trino.plugin.hive.util.ConfigurationUtils.toJobConf;
import static io.trino.plugin.hive.util.HiveBucketing.isSupportedBucketing;
import static io.trino.spi.StandardErrorCode.GENERIC_INTERNAL_ERROR;
import static io.trino.spi.StandardErrorCode.NOT_SUPPORTED;
diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/util/HiveWriteUtils.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/util/HiveWriteUtils.java
index a55e9f96a29d..cd58aee6e5fa 100644
--- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/util/HiveWriteUtils.java
+++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/util/HiveWriteUtils.java
@@ -17,8 +17,8 @@
import com.google.common.collect.ImmutableList;
import com.google.common.primitives.Shorts;
import com.google.common.primitives.SignedBytes;
-import io.trino.plugin.hive.HdfsEnvironment;
-import io.trino.plugin.hive.HdfsEnvironment.HdfsContext;
+import io.trino.hdfs.HdfsContext;
+import io.trino.hdfs.HdfsEnvironment;
import io.trino.plugin.hive.HiveReadOnlyException;
import io.trino.plugin.hive.HiveTimestampPrecision;
import io.trino.plugin.hive.HiveType;
diff --git a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/AbstractTestHive.java b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/AbstractTestHive.java
index ce75b099bfcb..f9d8f9a4aa32 100644
--- a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/AbstractTestHive.java
+++ b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/AbstractTestHive.java
@@ -24,12 +24,14 @@
import io.airlift.stats.CounterStat;
import io.airlift.units.DataSize;
import io.airlift.units.Duration;
+import io.trino.hdfs.HdfsConfig;
+import io.trino.hdfs.HdfsContext;
+import io.trino.hdfs.HdfsEnvironment;
+import io.trino.hdfs.authentication.NoHdfsAuthentication;
import io.trino.operator.GroupByHashPageIndexerFactory;
import io.trino.plugin.base.CatalogName;
import io.trino.plugin.base.metrics.LongCount;
-import io.trino.plugin.hive.HdfsEnvironment.HdfsContext;
import io.trino.plugin.hive.LocationService.WriteInfo;
-import io.trino.plugin.hive.authentication.NoHdfsAuthentication;
import io.trino.plugin.hive.aws.athena.PartitionProjectionService;
import io.trino.plugin.hive.fs.DirectoryLister;
import io.trino.plugin.hive.metastore.Column;
diff --git a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/AbstractTestHiveFileSystem.java b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/AbstractTestHiveFileSystem.java
index 4ca986bfad49..6c84fca00681 100644
--- a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/AbstractTestHiveFileSystem.java
+++ b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/AbstractTestHiveFileSystem.java
@@ -23,12 +23,15 @@
import io.airlift.json.JsonCodec;
import io.airlift.slice.Slice;
import io.airlift.stats.CounterStat;
+import io.trino.hdfs.HdfsConfig;
+import io.trino.hdfs.HdfsConfiguration;
+import io.trino.hdfs.HdfsContext;
+import io.trino.hdfs.HdfsEnvironment;
+import io.trino.hdfs.authentication.NoHdfsAuthentication;
import io.trino.operator.GroupByHashPageIndexerFactory;
import io.trino.plugin.base.CatalogName;
import io.trino.plugin.hive.AbstractTestHive.HiveTransaction;
import io.trino.plugin.hive.AbstractTestHive.Transaction;
-import io.trino.plugin.hive.HdfsEnvironment.HdfsContext;
-import io.trino.plugin.hive.authentication.NoHdfsAuthentication;
import io.trino.plugin.hive.aws.athena.PartitionProjectionService;
import io.trino.plugin.hive.fs.FileSystemDirectoryLister;
import io.trino.plugin.hive.fs.HiveFileIterator;
diff --git a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/HiveTestUtils.java b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/HiveTestUtils.java
index b55e98c42bab..bd526d39eccc 100644
--- a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/HiveTestUtils.java
+++ b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/HiveTestUtils.java
@@ -17,9 +17,13 @@
import com.google.common.collect.ImmutableSet;
import com.google.common.net.HostAndPort;
import io.airlift.units.DataSize;
+import io.trino.hdfs.DynamicHdfsConfiguration;
+import io.trino.hdfs.HdfsConfig;
+import io.trino.hdfs.HdfsConfigurationInitializer;
+import io.trino.hdfs.HdfsEnvironment;
+import io.trino.hdfs.authentication.NoHdfsAuthentication;
import io.trino.operator.PagesIndex;
import io.trino.operator.PagesIndexPageSorter;
-import io.trino.plugin.hive.authentication.NoHdfsAuthentication;
import io.trino.plugin.hive.azure.HiveAzureConfig;
import io.trino.plugin.hive.azure.TrinoAzureConfigurationInitializer;
import io.trino.plugin.hive.gcs.GoogleGcsConfigurationInitializer;
@@ -77,7 +81,7 @@ private HiveTestUtils() {}
public static final Optional SOCKS_PROXY = Optional.ofNullable(System.getProperty("hive.metastore.thrift.client.socks-proxy"))
.map(HostAndPort::fromString);
- public static final HiveHdfsConfiguration HDFS_CONFIGURATION = new HiveHdfsConfiguration(
+ public static final DynamicHdfsConfiguration HDFS_CONFIGURATION = new DynamicHdfsConfiguration(
new HdfsConfigurationInitializer(
new HdfsConfig()
.setSocksProxy(SOCKS_PROXY.orElse(null)),
diff --git a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/TestBackgroundHiveSplitLoader.java b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/TestBackgroundHiveSplitLoader.java
index 71647dda3ced..f6fdb9c20530 100644
--- a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/TestBackgroundHiveSplitLoader.java
+++ b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/TestBackgroundHiveSplitLoader.java
@@ -23,8 +23,12 @@
import io.airlift.stats.CounterStat;
import io.airlift.units.DataSize;
import io.airlift.units.Duration;
+import io.trino.hdfs.DynamicHdfsConfiguration;
+import io.trino.hdfs.HdfsConfig;
+import io.trino.hdfs.HdfsConfigurationInitializer;
+import io.trino.hdfs.HdfsEnvironment;
+import io.trino.hdfs.authentication.NoHdfsAuthentication;
import io.trino.plugin.hive.HiveColumnHandle.ColumnType;
-import io.trino.plugin.hive.authentication.NoHdfsAuthentication;
import io.trino.plugin.hive.fs.CachingDirectoryLister;
import io.trino.plugin.hive.fs.DirectoryLister;
import io.trino.plugin.hive.metastore.Column;
@@ -1326,7 +1330,7 @@ public static class TestingHdfsEnvironment
public TestingHdfsEnvironment(List files)
{
super(
- new HiveHdfsConfiguration(
+ new DynamicHdfsConfiguration(
new HdfsConfigurationInitializer(new HdfsConfig()),
ImmutableSet.of()),
new HdfsConfig(),
diff --git a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/TestHiveLocationService.java b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/TestHiveLocationService.java
index 7b57c03838f5..46f827e65bf1 100644
--- a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/TestHiveLocationService.java
+++ b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/TestHiveLocationService.java
@@ -14,6 +14,7 @@
package io.trino.plugin.hive;
import com.google.common.collect.ImmutableList;
+import io.trino.hdfs.HdfsEnvironment;
import io.trino.plugin.hive.LocationService.WriteInfo;
import io.trino.plugin.hive.TestBackgroundHiveSplitLoader.TestingHdfsEnvironment;
import io.trino.spi.TrinoException;
diff --git a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/TestingThriftHiveMetastoreBuilder.java b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/TestingThriftHiveMetastoreBuilder.java
index 07eb9e9b309d..1e506755c64f 100644
--- a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/TestingThriftHiveMetastoreBuilder.java
+++ b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/TestingThriftHiveMetastoreBuilder.java
@@ -15,7 +15,11 @@
import com.google.common.collect.ImmutableSet;
import com.google.common.net.HostAndPort;
-import io.trino.plugin.hive.authentication.NoHdfsAuthentication;
+import io.trino.hdfs.DynamicHdfsConfiguration;
+import io.trino.hdfs.HdfsConfig;
+import io.trino.hdfs.HdfsConfigurationInitializer;
+import io.trino.hdfs.HdfsEnvironment;
+import io.trino.hdfs.authentication.NoHdfsAuthentication;
import io.trino.plugin.hive.azure.HiveAzureConfig;
import io.trino.plugin.hive.azure.TrinoAzureConfigurationInitializer;
import io.trino.plugin.hive.gcs.GoogleGcsConfigurationInitializer;
@@ -40,7 +44,7 @@
public final class TestingThriftHiveMetastoreBuilder
{
private static final HdfsEnvironment HDFS_ENVIRONMENT = new HdfsEnvironment(
- new HiveHdfsConfiguration(
+ new DynamicHdfsConfiguration(
new HdfsConfigurationInitializer(
new HdfsConfig()
.setSocksProxy(HiveTestUtils.SOCKS_PROXY.orElse(null)),
diff --git a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/benchmark/AbstractFileFormat.java b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/benchmark/AbstractFileFormat.java
index 44ff2f4d51af..c61aab051f71 100644
--- a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/benchmark/AbstractFileFormat.java
+++ b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/benchmark/AbstractFileFormat.java
@@ -16,8 +16,8 @@
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
+import io.trino.hdfs.HdfsEnvironment;
import io.trino.plugin.hive.GenericHiveRecordCursorProvider;
-import io.trino.plugin.hive.HdfsEnvironment;
import io.trino.plugin.hive.HiveColumnHandle;
import io.trino.plugin.hive.HiveConfig;
import io.trino.plugin.hive.HivePageSourceFactory;
diff --git a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/benchmark/FileFormat.java b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/benchmark/FileFormat.java
index 9e327a30a6d0..81e4a9dcde93 100644
--- a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/benchmark/FileFormat.java
+++ b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/benchmark/FileFormat.java
@@ -13,7 +13,7 @@
*/
package io.trino.plugin.hive.benchmark;
-import io.trino.plugin.hive.HdfsEnvironment;
+import io.trino.hdfs.HdfsEnvironment;
import io.trino.plugin.hive.HiveCompressionCodec;
import io.trino.plugin.hive.HivePageSourceFactory;
import io.trino.plugin.hive.HiveRecordCursorProvider;
diff --git a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/benchmark/StandardFileFormats.java b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/benchmark/StandardFileFormats.java
index 2483c0f0f7fa..cb0213bba519 100644
--- a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/benchmark/StandardFileFormats.java
+++ b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/benchmark/StandardFileFormats.java
@@ -15,6 +15,7 @@
import com.google.common.collect.ImmutableMap;
import io.airlift.slice.OutputStreamSliceOutput;
+import io.trino.hdfs.HdfsEnvironment;
import io.trino.orc.OrcReaderOptions;
import io.trino.orc.OrcWriter;
import io.trino.orc.OrcWriterOptions;
@@ -25,7 +26,6 @@
import io.trino.parquet.writer.ParquetWriter;
import io.trino.parquet.writer.ParquetWriterOptions;
import io.trino.plugin.hive.FileFormatDataSourceStats;
-import io.trino.plugin.hive.HdfsEnvironment;
import io.trino.plugin.hive.HiveCompressionCodec;
import io.trino.plugin.hive.HiveConfig;
import io.trino.plugin.hive.HivePageSourceFactory;
diff --git a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/authentication/TestMetastoreKerberosConfig.java b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/metastore/thrift/TestMetastoreKerberosConfig.java
similarity index 97%
rename from plugin/trino-hive/src/test/java/io/trino/plugin/hive/authentication/TestMetastoreKerberosConfig.java
rename to plugin/trino-hive/src/test/java/io/trino/plugin/hive/metastore/thrift/TestMetastoreKerberosConfig.java
index 4625dd81424e..bd10dea053f1 100644
--- a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/authentication/TestMetastoreKerberosConfig.java
+++ b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/metastore/thrift/TestMetastoreKerberosConfig.java
@@ -11,7 +11,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package io.trino.plugin.hive.authentication;
+package io.trino.plugin.hive.metastore.thrift;
import com.google.common.collect.ImmutableMap;
import org.testng.annotations.Test;
diff --git a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/metastore/thrift/TestingMetastoreLocator.java b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/metastore/thrift/TestingMetastoreLocator.java
index 3bd28e163e3d..c3fdb1fb4926 100644
--- a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/metastore/thrift/TestingMetastoreLocator.java
+++ b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/metastore/thrift/TestingMetastoreLocator.java
@@ -15,7 +15,6 @@
import com.google.common.net.HostAndPort;
import io.airlift.units.Duration;
-import io.trino.plugin.hive.authentication.HiveMetastoreAuthentication;
import org.apache.thrift.TException;
import java.util.Optional;
diff --git a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/optimizer/TestConnectorPushdownRulesWithHive.java b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/optimizer/TestConnectorPushdownRulesWithHive.java
index 062d401d19bd..4d93f627c4e9 100644
--- a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/optimizer/TestConnectorPushdownRulesWithHive.java
+++ b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/optimizer/TestConnectorPushdownRulesWithHive.java
@@ -18,19 +18,19 @@
import com.google.common.collect.ImmutableSet;
import io.trino.Session;
import io.trino.cost.ScalarStatsCalculator;
+import io.trino.hdfs.DynamicHdfsConfiguration;
+import io.trino.hdfs.HdfsConfig;
+import io.trino.hdfs.HdfsConfiguration;
+import io.trino.hdfs.HdfsConfigurationInitializer;
+import io.trino.hdfs.HdfsEnvironment;
+import io.trino.hdfs.authentication.NoHdfsAuthentication;
import io.trino.metadata.TableHandle;
-import io.trino.plugin.hive.HdfsConfig;
-import io.trino.plugin.hive.HdfsConfiguration;
-import io.trino.plugin.hive.HdfsConfigurationInitializer;
-import io.trino.plugin.hive.HdfsEnvironment;
import io.trino.plugin.hive.HiveColumnHandle;
import io.trino.plugin.hive.HiveColumnProjectionInfo;
-import io.trino.plugin.hive.HiveHdfsConfiguration;
import io.trino.plugin.hive.HiveTableHandle;
import io.trino.plugin.hive.HiveTransactionHandle;
import io.trino.plugin.hive.NodeVersion;
import io.trino.plugin.hive.TestingHiveConnectorFactory;
-import io.trino.plugin.hive.authentication.NoHdfsAuthentication;
import io.trino.plugin.hive.metastore.Database;
import io.trino.plugin.hive.metastore.HiveMetastore;
import io.trino.plugin.hive.metastore.HiveMetastoreConfig;
@@ -112,7 +112,7 @@ protected Optional createLocalQueryRunner()
}
HdfsConfig config = new HdfsConfig();
- HdfsConfiguration configuration = new HiveHdfsConfiguration(new HdfsConfigurationInitializer(config), ImmutableSet.of());
+ HdfsConfiguration configuration = new DynamicHdfsConfiguration(new HdfsConfigurationInitializer(config), ImmutableSet.of());
HdfsEnvironment environment = new HdfsEnvironment(configuration, config, new NoHdfsAuthentication());
metastore = new FileHiveMetastore(
diff --git a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/optimizer/TestHivePlans.java b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/optimizer/TestHivePlans.java
index c1cdf28e4033..453d0c0bb7e6 100644
--- a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/optimizer/TestHivePlans.java
+++ b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/optimizer/TestHivePlans.java
@@ -15,14 +15,14 @@
import com.google.common.collect.ImmutableSet;
import io.trino.Session;
-import io.trino.plugin.hive.HdfsConfig;
-import io.trino.plugin.hive.HdfsConfiguration;
-import io.trino.plugin.hive.HdfsConfigurationInitializer;
-import io.trino.plugin.hive.HdfsEnvironment;
-import io.trino.plugin.hive.HiveHdfsConfiguration;
+import io.trino.hdfs.DynamicHdfsConfiguration;
+import io.trino.hdfs.HdfsConfig;
+import io.trino.hdfs.HdfsConfiguration;
+import io.trino.hdfs.HdfsConfigurationInitializer;
+import io.trino.hdfs.HdfsEnvironment;
+import io.trino.hdfs.authentication.NoHdfsAuthentication;
import io.trino.plugin.hive.NodeVersion;
import io.trino.plugin.hive.TestingHiveConnectorFactory;
-import io.trino.plugin.hive.authentication.NoHdfsAuthentication;
import io.trino.plugin.hive.metastore.Database;
import io.trino.plugin.hive.metastore.HiveMetastore;
import io.trino.plugin.hive.metastore.HiveMetastoreConfig;
@@ -89,7 +89,7 @@ protected LocalQueryRunner createLocalQueryRunner()
throw new UncheckedIOException(e);
}
HdfsConfig config = new HdfsConfig();
- HdfsConfiguration configuration = new HiveHdfsConfiguration(new HdfsConfigurationInitializer(config), ImmutableSet.of());
+ HdfsConfiguration configuration = new DynamicHdfsConfiguration(new HdfsConfigurationInitializer(config), ImmutableSet.of());
HdfsEnvironment environment = new HdfsEnvironment(configuration, config, new NoHdfsAuthentication());
HiveMetastore metastore = new FileHiveMetastore(
diff --git a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/optimizer/TestHiveProjectionPushdownIntoTableScan.java b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/optimizer/TestHiveProjectionPushdownIntoTableScan.java
index fcfdf37c7570..b6fc6ec1e05b 100644
--- a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/optimizer/TestHiveProjectionPushdownIntoTableScan.java
+++ b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/optimizer/TestHiveProjectionPushdownIntoTableScan.java
@@ -17,18 +17,18 @@
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import io.trino.Session;
+import io.trino.hdfs.DynamicHdfsConfiguration;
+import io.trino.hdfs.HdfsConfig;
+import io.trino.hdfs.HdfsConfiguration;
+import io.trino.hdfs.HdfsConfigurationInitializer;
+import io.trino.hdfs.HdfsEnvironment;
+import io.trino.hdfs.authentication.NoHdfsAuthentication;
import io.trino.metadata.QualifiedObjectName;
import io.trino.metadata.TableHandle;
-import io.trino.plugin.hive.HdfsConfig;
-import io.trino.plugin.hive.HdfsConfiguration;
-import io.trino.plugin.hive.HdfsConfigurationInitializer;
-import io.trino.plugin.hive.HdfsEnvironment;
import io.trino.plugin.hive.HiveColumnHandle;
-import io.trino.plugin.hive.HiveHdfsConfiguration;
import io.trino.plugin.hive.HiveTableHandle;
import io.trino.plugin.hive.NodeVersion;
import io.trino.plugin.hive.TestingHiveConnectorFactory;
-import io.trino.plugin.hive.authentication.NoHdfsAuthentication;
import io.trino.plugin.hive.metastore.Database;
import io.trino.plugin.hive.metastore.HiveMetastore;
import io.trino.plugin.hive.metastore.HiveMetastoreConfig;
@@ -90,7 +90,7 @@ protected LocalQueryRunner createLocalQueryRunner()
throw new UncheckedIOException(e);
}
HdfsConfig config = new HdfsConfig();
- HdfsConfiguration configuration = new HiveHdfsConfiguration(new HdfsConfigurationInitializer(config), ImmutableSet.of());
+ HdfsConfiguration configuration = new DynamicHdfsConfiguration(new HdfsConfigurationInitializer(config), ImmutableSet.of());
HdfsEnvironment environment = new HdfsEnvironment(configuration, config, new NoHdfsAuthentication());
HiveMetastore metastore = new FileHiveMetastore(
diff --git a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/rubix/TestRubixCaching.java b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/rubix/TestRubixCaching.java
index c56f7bd524d9..211505b85814 100644
--- a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/rubix/TestRubixCaching.java
+++ b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/rubix/TestRubixCaching.java
@@ -26,16 +26,16 @@
import com.qubole.rubix.prestosql.CachingPrestoSecureAzureBlobFileSystem;
import io.airlift.units.DataSize;
import io.airlift.units.Duration;
+import io.trino.hdfs.DynamicHdfsConfiguration;
+import io.trino.hdfs.HdfsConfig;
+import io.trino.hdfs.HdfsConfigurationInitializer;
+import io.trino.hdfs.HdfsContext;
+import io.trino.hdfs.HdfsEnvironment;
+import io.trino.hdfs.authentication.HdfsAuthenticationConfig;
+import io.trino.hdfs.authentication.NoHdfsAuthentication;
import io.trino.metadata.InternalNode;
import io.trino.plugin.base.CatalogName;
-import io.trino.plugin.hive.HdfsConfig;
-import io.trino.plugin.hive.HdfsConfigurationInitializer;
-import io.trino.plugin.hive.HdfsEnvironment;
-import io.trino.plugin.hive.HdfsEnvironment.HdfsContext;
import io.trino.plugin.hive.HiveConfig;
-import io.trino.plugin.hive.HiveHdfsConfiguration;
-import io.trino.plugin.hive.authentication.HdfsAuthenticationConfig;
-import io.trino.plugin.hive.authentication.NoHdfsAuthentication;
import io.trino.plugin.hive.orc.OrcReaderConfig;
import io.trino.plugin.hive.rubix.RubixConfig.ReadMode;
import io.trino.plugin.hive.rubix.RubixModule.DefaultRubixHdfsInitializer;
@@ -141,7 +141,7 @@ private FileSystem getNonCachingFileSystem()
throws IOException
{
HdfsConfigurationInitializer configurationInitializer = new HdfsConfigurationInitializer(config);
- HiveHdfsConfiguration configuration = new HiveHdfsConfiguration(configurationInitializer, ImmutableSet.of());
+ DynamicHdfsConfiguration configuration = new DynamicHdfsConfiguration(configurationInitializer, ImmutableSet.of());
HdfsEnvironment environment = new HdfsEnvironment(configuration, config, new NoHdfsAuthentication());
return environment.getFileSystem(context, cacheStoragePath);
}
@@ -215,7 +215,7 @@ private FileSystem getCachingFileSystem(HdfsContext context, Path path)
throws IOException
{
HdfsConfigurationInitializer configurationInitializer = new HdfsConfigurationInitializer(config, ImmutableSet.of());
- HiveHdfsConfiguration configuration = new HiveHdfsConfiguration(
+ DynamicHdfsConfiguration configuration = new DynamicHdfsConfiguration(
configurationInitializer,
ImmutableSet.of(
rubixConfigInitializer,
diff --git a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/s3/TestS3SecurityMapping.java b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/s3/TestS3SecurityMapping.java
index 63d6a0468780..41b70ecd97cb 100644
--- a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/s3/TestS3SecurityMapping.java
+++ b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/s3/TestS3SecurityMapping.java
@@ -15,8 +15,8 @@
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
-import io.trino.plugin.hive.DynamicConfigurationProvider;
-import io.trino.plugin.hive.HdfsEnvironment.HdfsContext;
+import io.trino.hdfs.DynamicConfigurationProvider;
+import io.trino.hdfs.HdfsContext;
import io.trino.plugin.hive.HiveConfig;
import io.trino.plugin.hive.HiveSessionProperties;
import io.trino.spi.connector.ConnectorSession;
diff --git a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/util/TestHiveWriteUtils.java b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/util/TestHiveWriteUtils.java
index 32823483e018..0d33079c758b 100644
--- a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/util/TestHiveWriteUtils.java
+++ b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/util/TestHiveWriteUtils.java
@@ -13,7 +13,7 @@
*/
package io.trino.plugin.hive.util;
-import io.trino.plugin.hive.HdfsEnvironment.HdfsContext;
+import io.trino.hdfs.HdfsContext;
import org.apache.hadoop.fs.Path;
import org.testng.annotations.Test;
diff --git a/plugin/trino-iceberg/pom.xml b/plugin/trino-iceberg/pom.xml
index 7134b7791be2..d4e50c1a6092 100644
--- a/plugin/trino-iceberg/pom.xml
+++ b/plugin/trino-iceberg/pom.xml
@@ -15,7 +15,6 @@
${project.parent.basedir}
- 0.14.0
org.apache.iceberg
iceberg-api
- ${dep.iceberg.version}
-
-
- org.slf4j
- slf4j-api
-
-
org.apache.iceberg
iceberg-core
- ${dep.iceberg.version}
-
-
- org.apache.avro
- avro
-
-
- org.slf4j
- slf4j-api
-
-
org.apache.iceberg
iceberg-hive-metastore
- ${dep.iceberg.version}
-
-
- org.slf4j
- slf4j-api
-
-
org.apache.iceberg
iceberg-parquet
- ${dep.iceberg.version}
-
-
- org.apache.parquet
- parquet-avro
-
-
- org.slf4j
- slf4j-api
-
-
@@ -251,19 +223,7 @@
org.apache.iceberg
iceberg-orc
- ${dep.iceberg.version}
runtime
-
-
-
- org.apache.avro
- avro
-
-
- org.slf4j
- slf4j-api
-
-
diff --git a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergFileWriterFactory.java b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergFileWriterFactory.java
index fbd48f4546e2..4b75db370d02 100644
--- a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergFileWriterFactory.java
+++ b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergFileWriterFactory.java
@@ -17,6 +17,8 @@
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import io.airlift.units.DataSize;
+import io.trino.filesystem.TrinoFileSystem;
+import io.trino.filesystem.TrinoInputFile;
import io.trino.orc.OrcDataSink;
import io.trino.orc.OrcDataSource;
import io.trino.orc.OrcReaderOptions;
@@ -27,9 +29,6 @@
import io.trino.plugin.hive.FileFormatDataSourceStats;
import io.trino.plugin.hive.NodeVersion;
import io.trino.plugin.hive.orc.OrcWriterConfig;
-import io.trino.plugin.iceberg.io.TrinoFileSystem;
-import io.trino.plugin.iceberg.io.TrinoInputFile;
-import io.trino.plugin.iceberg.io.TrinoOrcDataSource;
import io.trino.spi.TrinoException;
import io.trino.spi.connector.ConnectorSession;
import io.trino.spi.type.Type;
diff --git a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergMergeSink.java b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergMergeSink.java
index 85aa83e2268a..b9451cfaf8d8 100644
--- a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergMergeSink.java
+++ b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergMergeSink.java
@@ -17,8 +17,8 @@
import com.google.common.collect.ImmutableMap;
import io.airlift.json.JsonCodec;
import io.airlift.slice.Slice;
+import io.trino.filesystem.TrinoFileSystem;
import io.trino.plugin.iceberg.delete.IcebergPositionDeletePageSink;
-import io.trino.plugin.iceberg.io.TrinoFileSystem;
import io.trino.spi.Page;
import io.trino.spi.PageBuilder;
import io.trino.spi.block.ColumnarRow;
diff --git a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergMetadata.java b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergMetadata.java
index 8aea85478fed..fe33f05f1602 100644
--- a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergMetadata.java
+++ b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergMetadata.java
@@ -25,15 +25,15 @@
import io.airlift.slice.Slice;
import io.airlift.units.DataSize;
import io.airlift.units.Duration;
+import io.trino.filesystem.FileEntry;
+import io.trino.filesystem.FileIterator;
+import io.trino.filesystem.TrinoFileSystem;
+import io.trino.filesystem.TrinoFileSystemFactory;
import io.trino.plugin.base.classloader.ClassLoaderSafeSystemTable;
import io.trino.plugin.hive.HiveApplyProjectionUtil;
import io.trino.plugin.hive.HiveApplyProjectionUtil.ProjectedColumnRepresentation;
import io.trino.plugin.hive.HiveWrittenPartitions;
import io.trino.plugin.iceberg.catalog.TrinoCatalog;
-import io.trino.plugin.iceberg.io.FileEntry;
-import io.trino.plugin.iceberg.io.FileIterator;
-import io.trino.plugin.iceberg.io.TrinoFileSystem;
-import io.trino.plugin.iceberg.io.TrinoFileSystemFactory;
import io.trino.plugin.iceberg.procedure.IcebergExpireSnapshotsHandle;
import io.trino.plugin.iceberg.procedure.IcebergOptimizeHandle;
import io.trino.plugin.iceberg.procedure.IcebergRemoveOrphanFilesHandle;
diff --git a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergMetadataFactory.java b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergMetadataFactory.java
index aa214231fd52..6c0c9936ca34 100644
--- a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergMetadataFactory.java
+++ b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergMetadataFactory.java
@@ -14,8 +14,8 @@
package io.trino.plugin.iceberg;
import io.airlift.json.JsonCodec;
+import io.trino.filesystem.TrinoFileSystemFactory;
import io.trino.plugin.iceberg.catalog.TrinoCatalogFactory;
-import io.trino.plugin.iceberg.io.TrinoFileSystemFactory;
import io.trino.spi.security.ConnectorIdentity;
import io.trino.spi.type.TypeManager;
import io.trino.spi.type.TypeOperators;
diff --git a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergPageSink.java b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergPageSink.java
index 0a4137f8e3fb..9499b7ccca9b 100644
--- a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergPageSink.java
+++ b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergPageSink.java
@@ -17,8 +17,8 @@
import com.google.common.collect.Iterables;
import io.airlift.json.JsonCodec;
import io.airlift.slice.Slice;
+import io.trino.filesystem.TrinoFileSystem;
import io.trino.plugin.iceberg.PartitionTransforms.ColumnTransform;
-import io.trino.plugin.iceberg.io.TrinoFileSystem;
import io.trino.spi.Page;
import io.trino.spi.PageIndexer;
import io.trino.spi.PageIndexerFactory;
diff --git a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergPageSinkProvider.java b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergPageSinkProvider.java
index 5254d5c52a43..fc68120d5560 100644
--- a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergPageSinkProvider.java
+++ b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergPageSinkProvider.java
@@ -14,7 +14,7 @@
package io.trino.plugin.iceberg;
import io.airlift.json.JsonCodec;
-import io.trino.plugin.iceberg.io.TrinoFileSystemFactory;
+import io.trino.filesystem.TrinoFileSystemFactory;
import io.trino.plugin.iceberg.procedure.IcebergOptimizeHandle;
import io.trino.plugin.iceberg.procedure.IcebergTableExecuteHandle;
import io.trino.spi.PageIndexerFactory;
diff --git a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergPageSourceProvider.java b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergPageSourceProvider.java
index 5946bbd59a98..1c7c3e40541e 100644
--- a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergPageSourceProvider.java
+++ b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergPageSourceProvider.java
@@ -21,6 +21,9 @@
import com.google.common.graph.Traverser;
import io.airlift.json.JsonCodec;
import io.airlift.slice.Slice;
+import io.trino.filesystem.TrinoFileSystem;
+import io.trino.filesystem.TrinoFileSystemFactory;
+import io.trino.filesystem.TrinoInputFile;
import io.trino.memory.context.AggregatedMemoryContext;
import io.trino.orc.OrcColumn;
import io.trino.orc.OrcCorruptionException;
@@ -56,11 +59,6 @@
import io.trino.plugin.iceberg.delete.IcebergPositionDeletePageSink;
import io.trino.plugin.iceberg.delete.PositionDeleteFilter;
import io.trino.plugin.iceberg.delete.RowPredicate;
-import io.trino.plugin.iceberg.io.TrinoFileSystem;
-import io.trino.plugin.iceberg.io.TrinoFileSystemFactory;
-import io.trino.plugin.iceberg.io.TrinoInputFile;
-import io.trino.plugin.iceberg.io.TrinoOrcDataSource;
-import io.trino.plugin.iceberg.io.TrinoParquetDataSource;
import io.trino.spi.PageIndexerFactory;
import io.trino.spi.TrinoException;
import io.trino.spi.connector.ColumnHandle;
diff --git a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergParquetFileWriter.java b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergParquetFileWriter.java
index 01a03c702114..446fe586a2be 100644
--- a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergParquetFileWriter.java
+++ b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergParquetFileWriter.java
@@ -13,9 +13,9 @@
*/
package io.trino.plugin.iceberg;
+import io.trino.filesystem.TrinoFileSystem;
import io.trino.parquet.writer.ParquetWriterOptions;
import io.trino.plugin.hive.parquet.ParquetFileWriter;
-import io.trino.plugin.iceberg.io.TrinoFileSystem;
import io.trino.spi.type.Type;
import org.apache.iceberg.Metrics;
import org.apache.iceberg.MetricsConfig;
diff --git a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergSplitManager.java b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergSplitManager.java
index be88fc3e877a..5b64f64582ce 100644
--- a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergSplitManager.java
+++ b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergSplitManager.java
@@ -15,9 +15,9 @@
import com.google.common.collect.ImmutableList;
import io.airlift.units.Duration;
+import io.trino.hdfs.HdfsContext;
+import io.trino.hdfs.HdfsEnvironment;
import io.trino.plugin.base.classloader.ClassLoaderSafeConnectorSplitSource;
-import io.trino.plugin.hive.HdfsEnvironment;
-import io.trino.plugin.hive.HdfsEnvironment.HdfsContext;
import io.trino.spi.connector.ConnectorSession;
import io.trino.spi.connector.ConnectorSplitManager;
import io.trino.spi.connector.ConnectorSplitSource;
diff --git a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergSplitSource.java b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergSplitSource.java
index 412a0982d340..6265e877c6bf 100644
--- a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergSplitSource.java
+++ b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergSplitSource.java
@@ -21,8 +21,8 @@
import com.google.common.io.Closer;
import io.airlift.units.DataSize;
import io.airlift.units.Duration;
-import io.trino.plugin.hive.HdfsEnvironment;
-import io.trino.plugin.hive.HdfsEnvironment.HdfsContext;
+import io.trino.hdfs.HdfsContext;
+import io.trino.hdfs.HdfsEnvironment;
import io.trino.plugin.iceberg.delete.DeleteFile;
import io.trino.plugin.iceberg.util.DataFileWithDeleteFiles;
import io.trino.spi.SplitWeight;
diff --git a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/InternalIcebergConnectorFactory.java b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/InternalIcebergConnectorFactory.java
index 16481713f2f0..59c856debb35 100644
--- a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/InternalIcebergConnectorFactory.java
+++ b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/InternalIcebergConnectorFactory.java
@@ -21,6 +21,10 @@
import io.airlift.bootstrap.LifeCycleManager;
import io.airlift.event.client.EventModule;
import io.airlift.json.JsonModule;
+import io.trino.filesystem.TrinoFileSystemFactory;
+import io.trino.filesystem.hdfs.HdfsFileSystemFactory;
+import io.trino.hdfs.HdfsModule;
+import io.trino.hdfs.authentication.HdfsAuthenticationModule;
import io.trino.plugin.base.CatalogName;
import io.trino.plugin.base.classloader.ClassLoaderSafeConnectorPageSinkProvider;
import io.trino.plugin.base.classloader.ClassLoaderSafeConnectorPageSourceProvider;
@@ -29,16 +33,12 @@
import io.trino.plugin.base.jmx.ConnectorObjectNameGeneratorModule;
import io.trino.plugin.base.jmx.MBeanServerModule;
import io.trino.plugin.base.session.SessionPropertiesProvider;
-import io.trino.plugin.hive.HiveHdfsModule;
import io.trino.plugin.hive.NodeVersion;
-import io.trino.plugin.hive.authentication.HdfsAuthenticationModule;
import io.trino.plugin.hive.azure.HiveAzureModule;
import io.trino.plugin.hive.gcs.HiveGcsModule;
import io.trino.plugin.hive.metastore.HiveMetastore;
import io.trino.plugin.hive.s3.HiveS3Module;
import io.trino.plugin.iceberg.catalog.IcebergCatalogModule;
-import io.trino.plugin.iceberg.io.TrinoFileSystemFactory;
-import io.trino.plugin.iceberg.io.hdfs.HdfsFileSystemFactory;
import io.trino.spi.NodeManager;
import io.trino.spi.PageIndexerFactory;
import io.trino.spi.classloader.ThreadContextClassLoader;
@@ -87,7 +87,7 @@ public static Connector createConnector(
new IcebergModule(),
new IcebergSecurityModule(),
new IcebergCatalogModule(metastore),
- new HiveHdfsModule(),
+ new HdfsModule(),
new HiveS3Module(),
new HiveGcsModule(),
new HiveAzureModule(),
diff --git a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/io/TrinoOrcDataSource.java b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/TrinoOrcDataSource.java
similarity index 94%
rename from plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/io/TrinoOrcDataSource.java
rename to plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/TrinoOrcDataSource.java
index 789f6239083a..ce49c2cb7b5c 100644
--- a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/io/TrinoOrcDataSource.java
+++ b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/TrinoOrcDataSource.java
@@ -11,9 +11,11 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package io.trino.plugin.iceberg.io;
+package io.trino.plugin.iceberg;
import io.airlift.slice.Slice;
+import io.trino.filesystem.TrinoInput;
+import io.trino.filesystem.TrinoInputFile;
import io.trino.orc.AbstractOrcDataSource;
import io.trino.orc.OrcDataSourceId;
import io.trino.orc.OrcReaderOptions;
diff --git a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/io/TrinoParquetDataSource.java b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/TrinoParquetDataSource.java
similarity index 94%
rename from plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/io/TrinoParquetDataSource.java
rename to plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/TrinoParquetDataSource.java
index 9aee5944df78..d60af594bef4 100644
--- a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/io/TrinoParquetDataSource.java
+++ b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/TrinoParquetDataSource.java
@@ -11,9 +11,11 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package io.trino.plugin.iceberg.io;
+package io.trino.plugin.iceberg;
import io.airlift.slice.Slice;
+import io.trino.filesystem.TrinoInput;
+import io.trino.filesystem.TrinoInputFile;
import io.trino.parquet.AbstractParquetDataSource;
import io.trino.parquet.ParquetDataSourceId;
import io.trino.parquet.ParquetReaderOptions;
diff --git a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/catalog/AbstractTrinoCatalog.java b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/catalog/AbstractTrinoCatalog.java
index 3755c2f55622..fd4bfbde91c9 100644
--- a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/catalog/AbstractTrinoCatalog.java
+++ b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/catalog/AbstractTrinoCatalog.java
@@ -14,6 +14,7 @@
package io.trino.plugin.iceberg.catalog;
import com.google.common.collect.ImmutableMap;
+import io.trino.filesystem.TrinoFileSystem;
import io.trino.plugin.base.CatalogName;
import io.trino.plugin.hive.HiveMetadata;
import io.trino.plugin.hive.HiveViewNotSupportedException;
@@ -21,7 +22,6 @@
import io.trino.plugin.iceberg.ColumnIdentity;
import io.trino.plugin.iceberg.IcebergMaterializedViewDefinition;
import io.trino.plugin.iceberg.IcebergUtil;
-import io.trino.plugin.iceberg.io.TrinoFileSystem;
import io.trino.spi.TrinoException;
import io.trino.spi.connector.CatalogSchemaTableName;
import io.trino.spi.connector.ColumnMetadata;
diff --git a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/catalog/file/FileMetastoreTableOperationsProvider.java b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/catalog/file/FileMetastoreTableOperationsProvider.java
index 91c1ad0cd4df..f0e78dee3754 100644
--- a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/catalog/file/FileMetastoreTableOperationsProvider.java
+++ b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/catalog/file/FileMetastoreTableOperationsProvider.java
@@ -13,11 +13,11 @@
*/
package io.trino.plugin.iceberg.catalog.file;
+import io.trino.filesystem.TrinoFileSystemFactory;
import io.trino.plugin.iceberg.catalog.IcebergTableOperations;
import io.trino.plugin.iceberg.catalog.IcebergTableOperationsProvider;
import io.trino.plugin.iceberg.catalog.TrinoCatalog;
import io.trino.plugin.iceberg.catalog.hms.TrinoHiveCatalog;
-import io.trino.plugin.iceberg.io.TrinoFileSystemFactory;
import io.trino.spi.connector.ConnectorSession;
import javax.inject.Inject;
diff --git a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/catalog/glue/GlueIcebergTableOperationsProvider.java b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/catalog/glue/GlueIcebergTableOperationsProvider.java
index 1f28b67d5c5d..ca8a05e3222e 100644
--- a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/catalog/glue/GlueIcebergTableOperationsProvider.java
+++ b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/catalog/glue/GlueIcebergTableOperationsProvider.java
@@ -15,12 +15,12 @@
import com.amazonaws.auth.AWSCredentialsProvider;
import com.amazonaws.services.glue.AWSGlueAsync;
+import io.trino.filesystem.TrinoFileSystemFactory;
import io.trino.plugin.hive.metastore.glue.GlueHiveMetastoreConfig;
import io.trino.plugin.hive.metastore.glue.GlueMetastoreStats;
import io.trino.plugin.iceberg.catalog.IcebergTableOperations;
import io.trino.plugin.iceberg.catalog.IcebergTableOperationsProvider;
import io.trino.plugin.iceberg.catalog.TrinoCatalog;
-import io.trino.plugin.iceberg.io.TrinoFileSystemFactory;
import io.trino.spi.connector.ConnectorSession;
import javax.inject.Inject;
diff --git a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/catalog/glue/TrinoGlueCatalog.java b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/catalog/glue/TrinoGlueCatalog.java
index 058013f81fd3..8144c8a3e46b 100644
--- a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/catalog/glue/TrinoGlueCatalog.java
+++ b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/catalog/glue/TrinoGlueCatalog.java
@@ -34,13 +34,13 @@
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import io.airlift.log.Logger;
+import io.trino.filesystem.TrinoFileSystemFactory;
import io.trino.plugin.base.CatalogName;
import io.trino.plugin.hive.SchemaAlreadyExistsException;
import io.trino.plugin.hive.ViewAlreadyExistsException;
import io.trino.plugin.hive.metastore.glue.GlueMetastoreStats;
import io.trino.plugin.iceberg.catalog.AbstractTrinoCatalog;
import io.trino.plugin.iceberg.catalog.IcebergTableOperationsProvider;
-import io.trino.plugin.iceberg.io.TrinoFileSystemFactory;
import io.trino.spi.TrinoException;
import io.trino.spi.connector.CatalogSchemaTableName;
import io.trino.spi.connector.ConnectorMaterializedViewDefinition;
diff --git a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/catalog/glue/TrinoGlueCatalogFactory.java b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/catalog/glue/TrinoGlueCatalogFactory.java
index ea188bdb1404..5e2b523ea034 100644
--- a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/catalog/glue/TrinoGlueCatalogFactory.java
+++ b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/catalog/glue/TrinoGlueCatalogFactory.java
@@ -15,6 +15,7 @@
import com.amazonaws.auth.AWSCredentialsProvider;
import com.amazonaws.services.glue.AWSGlueAsync;
+import io.trino.filesystem.TrinoFileSystemFactory;
import io.trino.plugin.base.CatalogName;
import io.trino.plugin.hive.NodeVersion;
import io.trino.plugin.hive.metastore.glue.GlueHiveMetastoreConfig;
@@ -23,7 +24,6 @@
import io.trino.plugin.iceberg.catalog.IcebergTableOperationsProvider;
import io.trino.plugin.iceberg.catalog.TrinoCatalog;
import io.trino.plugin.iceberg.catalog.TrinoCatalogFactory;
-import io.trino.plugin.iceberg.io.TrinoFileSystemFactory;
import io.trino.spi.security.ConnectorIdentity;
import io.trino.spi.type.TypeManager;
import org.weakref.jmx.Flatten;
diff --git a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/catalog/hms/HiveMetastoreTableOperationsProvider.java b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/catalog/hms/HiveMetastoreTableOperationsProvider.java
index 5815c746cacd..9d9f57e50b67 100644
--- a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/catalog/hms/HiveMetastoreTableOperationsProvider.java
+++ b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/catalog/hms/HiveMetastoreTableOperationsProvider.java
@@ -13,11 +13,11 @@
*/
package io.trino.plugin.iceberg.catalog.hms;
+import io.trino.filesystem.TrinoFileSystemFactory;
import io.trino.plugin.hive.metastore.thrift.ThriftMetastoreFactory;
import io.trino.plugin.iceberg.catalog.IcebergTableOperations;
import io.trino.plugin.iceberg.catalog.IcebergTableOperationsProvider;
import io.trino.plugin.iceberg.catalog.TrinoCatalog;
-import io.trino.plugin.iceberg.io.TrinoFileSystemFactory;
import io.trino.spi.connector.ConnectorSession;
import javax.inject.Inject;
diff --git a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/catalog/hms/TrinoHiveCatalog.java b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/catalog/hms/TrinoHiveCatalog.java
index b31d5d82a3c4..ffe9a7782c7a 100644
--- a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/catalog/hms/TrinoHiveCatalog.java
+++ b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/catalog/hms/TrinoHiveCatalog.java
@@ -16,6 +16,7 @@
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import io.airlift.log.Logger;
+import io.trino.filesystem.TrinoFileSystemFactory;
import io.trino.plugin.base.CatalogName;
import io.trino.plugin.hive.HiveSchemaProperties;
import io.trino.plugin.hive.TableAlreadyExistsException;
@@ -29,7 +30,6 @@
import io.trino.plugin.iceberg.ColumnIdentity;
import io.trino.plugin.iceberg.catalog.AbstractTrinoCatalog;
import io.trino.plugin.iceberg.catalog.IcebergTableOperationsProvider;
-import io.trino.plugin.iceberg.io.TrinoFileSystemFactory;
import io.trino.spi.TrinoException;
import io.trino.spi.connector.CatalogSchemaTableName;
import io.trino.spi.connector.ConnectorMaterializedViewDefinition;
diff --git a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/catalog/hms/TrinoHiveCatalogFactory.java b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/catalog/hms/TrinoHiveCatalogFactory.java
index 0c92aa21c1c0..4cb3228ad00e 100644
--- a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/catalog/hms/TrinoHiveCatalogFactory.java
+++ b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/catalog/hms/TrinoHiveCatalogFactory.java
@@ -13,6 +13,7 @@
*/
package io.trino.plugin.iceberg.catalog.hms;
+import io.trino.filesystem.TrinoFileSystemFactory;
import io.trino.plugin.base.CatalogName;
import io.trino.plugin.hive.NodeVersion;
import io.trino.plugin.hive.metastore.HiveMetastoreFactory;
@@ -21,7 +22,6 @@
import io.trino.plugin.iceberg.catalog.IcebergTableOperationsProvider;
import io.trino.plugin.iceberg.catalog.TrinoCatalog;
import io.trino.plugin.iceberg.catalog.TrinoCatalogFactory;
-import io.trino.plugin.iceberg.io.TrinoFileSystemFactory;
import io.trino.spi.security.ConnectorIdentity;
import io.trino.spi.type.TypeManager;
diff --git a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/delete/IcebergPositionDeletePageSink.java b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/delete/IcebergPositionDeletePageSink.java
index 704fddb9e37f..60e78391dd98 100644
--- a/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/delete/IcebergPositionDeletePageSink.java
+++ b/plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/delete/IcebergPositionDeletePageSink.java
@@ -15,13 +15,13 @@
import io.airlift.json.JsonCodec;
import io.airlift.slice.Slice;
+import io.trino.filesystem.TrinoFileSystem;
import io.trino.plugin.iceberg.CommitTaskData;
import io.trino.plugin.iceberg.IcebergFileFormat;
import io.trino.plugin.iceberg.IcebergFileWriter;
import io.trino.plugin.iceberg.IcebergFileWriterFactory;
import io.trino.plugin.iceberg.MetricsWrapper;
import io.trino.plugin.iceberg.PartitionData;
-import io.trino.plugin.iceberg.io.TrinoFileSystem;
import io.trino.spi.Page;
import io.trino.spi.block.Block;
import io.trino.spi.block.RunLengthEncodedBlock;
diff --git a/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/BaseIcebergConnectorTest.java b/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/BaseIcebergConnectorTest.java
index 9b4776c84063..c2636e76d9b6 100644
--- a/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/BaseIcebergConnectorTest.java
+++ b/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/BaseIcebergConnectorTest.java
@@ -18,6 +18,7 @@
import com.google.common.collect.ImmutableSet;
import io.airlift.units.DataSize;
import io.trino.Session;
+import io.trino.hdfs.HdfsContext;
import io.trino.metadata.Metadata;
import io.trino.metadata.QualifiedObjectName;
import io.trino.metadata.TableHandle;
@@ -89,7 +90,6 @@
import static com.google.common.util.concurrent.Uninterruptibles.sleepUninterruptibly;
import static io.trino.SystemSessionProperties.PREFERRED_WRITE_PARTITIONING_MIN_NUMBER_OF_PARTITIONS;
import static io.trino.SystemSessionProperties.SCALE_WRITERS;
-import static io.trino.plugin.hive.HdfsEnvironment.HdfsContext;
import static io.trino.plugin.hive.HiveTestUtils.HDFS_ENVIRONMENT;
import static io.trino.plugin.iceberg.IcebergFileFormat.AVRO;
import static io.trino.plugin.iceberg.IcebergFileFormat.ORC;
diff --git a/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/TestIcebergMergeAppend.java b/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/TestIcebergMergeAppend.java
index 086d2db9cde6..74a35a398f56 100644
--- a/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/TestIcebergMergeAppend.java
+++ b/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/TestIcebergMergeAppend.java
@@ -13,6 +13,8 @@
*/
package io.trino.plugin.iceberg;
+import io.trino.filesystem.TrinoFileSystemFactory;
+import io.trino.filesystem.hdfs.HdfsFileSystemFactory;
import io.trino.plugin.base.CatalogName;
import io.trino.plugin.hive.NodeVersion;
import io.trino.plugin.hive.metastore.HiveMetastore;
@@ -22,8 +24,6 @@
import io.trino.plugin.iceberg.catalog.TrinoCatalog;
import io.trino.plugin.iceberg.catalog.file.FileMetastoreTableOperationsProvider;
import io.trino.plugin.iceberg.catalog.hms.TrinoHiveCatalog;
-import io.trino.plugin.iceberg.io.TrinoFileSystemFactory;
-import io.trino.plugin.iceberg.io.hdfs.HdfsFileSystemFactory;
import io.trino.spi.connector.SchemaTableName;
import io.trino.spi.type.TestingTypeManager;
import io.trino.testing.AbstractTestQueryFramework;
diff --git a/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/TestIcebergMetadataFileOperations.java b/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/TestIcebergMetadataFileOperations.java
index eab5e7ad5e21..6df4e01a8c96 100644
--- a/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/TestIcebergMetadataFileOperations.java
+++ b/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/TestIcebergMetadataFileOperations.java
@@ -17,10 +17,10 @@
import com.google.common.collect.ImmutableMultiset;
import com.google.common.collect.Multiset;
import io.trino.Session;
+import io.trino.filesystem.hdfs.HdfsFileSystemFactory;
import io.trino.plugin.hive.metastore.HiveMetastore;
import io.trino.plugin.iceberg.TrackingFileSystemFactory.OperationContext;
import io.trino.plugin.iceberg.TrackingFileSystemFactory.OperationType;
-import io.trino.plugin.iceberg.io.hdfs.HdfsFileSystemFactory;
import io.trino.plugin.tpch.TpchPlugin;
import io.trino.testing.AbstractTestQueryFramework;
import io.trino.testing.DistributedQueryRunner;
diff --git a/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/TestIcebergMetadataListing.java b/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/TestIcebergMetadataListing.java
index d4a320adfa7a..1ec03e91a347 100644
--- a/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/TestIcebergMetadataListing.java
+++ b/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/TestIcebergMetadataListing.java
@@ -16,16 +16,16 @@
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import io.trino.Session;
+import io.trino.hdfs.DynamicHdfsConfiguration;
+import io.trino.hdfs.HdfsConfig;
+import io.trino.hdfs.HdfsConfiguration;
+import io.trino.hdfs.HdfsConfigurationInitializer;
+import io.trino.hdfs.HdfsEnvironment;
+import io.trino.hdfs.authentication.NoHdfsAuthentication;
import io.trino.metadata.MaterializedViewDefinition;
import io.trino.metadata.QualifiedObjectName;
-import io.trino.plugin.hive.HdfsConfig;
-import io.trino.plugin.hive.HdfsConfiguration;
-import io.trino.plugin.hive.HdfsConfigurationInitializer;
-import io.trino.plugin.hive.HdfsEnvironment;
-import io.trino.plugin.hive.HiveHdfsConfiguration;
import io.trino.plugin.hive.NodeVersion;
import io.trino.plugin.hive.TestingHivePlugin;
-import io.trino.plugin.hive.authentication.NoHdfsAuthentication;
import io.trino.plugin.hive.metastore.HiveMetastore;
import io.trino.plugin.hive.metastore.file.FileHiveMetastore;
import io.trino.plugin.hive.metastore.file.FileHiveMetastoreConfig;
@@ -69,7 +69,7 @@ protected DistributedQueryRunner createQueryRunner()
File baseDir = queryRunner.getCoordinator().getBaseDataDir().resolve("iceberg_data").toFile();
HdfsConfig hdfsConfig = new HdfsConfig();
- HdfsConfiguration hdfsConfiguration = new HiveHdfsConfiguration(new HdfsConfigurationInitializer(hdfsConfig), ImmutableSet.of());
+ HdfsConfiguration hdfsConfiguration = new DynamicHdfsConfiguration(new HdfsConfigurationInitializer(hdfsConfig), ImmutableSet.of());
HdfsEnvironment hdfsEnvironment = new HdfsEnvironment(hdfsConfiguration, hdfsConfig, new NoHdfsAuthentication());
metastore = new FileHiveMetastore(
diff --git a/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/TestIcebergMetastoreAccessOperations.java b/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/TestIcebergMetastoreAccessOperations.java
index 507ed6f4c64c..822af432d103 100644
--- a/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/TestIcebergMetastoreAccessOperations.java
+++ b/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/TestIcebergMetastoreAccessOperations.java
@@ -18,13 +18,13 @@
import com.google.common.collect.Multiset;
import com.google.common.collect.Sets;
import io.trino.Session;
-import io.trino.plugin.hive.HdfsConfig;
-import io.trino.plugin.hive.HdfsConfiguration;
-import io.trino.plugin.hive.HdfsConfigurationInitializer;
-import io.trino.plugin.hive.HdfsEnvironment;
-import io.trino.plugin.hive.HiveHdfsConfiguration;
+import io.trino.hdfs.DynamicHdfsConfiguration;
+import io.trino.hdfs.HdfsConfig;
+import io.trino.hdfs.HdfsConfiguration;
+import io.trino.hdfs.HdfsConfigurationInitializer;
+import io.trino.hdfs.HdfsEnvironment;
+import io.trino.hdfs.authentication.NoHdfsAuthentication;
import io.trino.plugin.hive.NodeVersion;
-import io.trino.plugin.hive.authentication.NoHdfsAuthentication;
import io.trino.plugin.hive.metastore.CountingAccessHiveMetastore;
import io.trino.plugin.hive.metastore.HiveMetastore;
import io.trino.plugin.hive.metastore.file.FileHiveMetastore;
@@ -77,7 +77,7 @@ protected DistributedQueryRunner createQueryRunner()
File baseDir = queryRunner.getCoordinator().getBaseDataDir().resolve("iceberg_data").toFile();
HdfsConfig hdfsConfig = new HdfsConfig();
- HdfsConfiguration hdfsConfiguration = new HiveHdfsConfiguration(new HdfsConfigurationInitializer(hdfsConfig), ImmutableSet.of());
+ HdfsConfiguration hdfsConfiguration = new DynamicHdfsConfiguration(new HdfsConfigurationInitializer(hdfsConfig), ImmutableSet.of());
HdfsEnvironment hdfsEnvironment = new HdfsEnvironment(hdfsConfiguration, hdfsConfig, new NoHdfsAuthentication());
HiveMetastore hiveMetastore = new FileHiveMetastore(
new NodeVersion("testversion"),
diff --git a/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/TestIcebergNodeLocalDynamicSplitPruning.java b/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/TestIcebergNodeLocalDynamicSplitPruning.java
index c95de5dd3440..6cc8bb17ff2d 100644
--- a/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/TestIcebergNodeLocalDynamicSplitPruning.java
+++ b/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/TestIcebergNodeLocalDynamicSplitPruning.java
@@ -18,6 +18,7 @@
import com.google.common.collect.ImmutableSet;
import io.airlift.json.JsonCodecFactory;
import io.airlift.testing.TempFile;
+import io.trino.filesystem.hdfs.HdfsFileSystemFactory;
import io.trino.metadata.TableHandle;
import io.trino.operator.GroupByHashPageIndexerFactory;
import io.trino.orc.OrcWriteValidation;
@@ -33,7 +34,6 @@
import io.trino.plugin.hive.orc.OrcWriterConfig;
import io.trino.plugin.hive.parquet.ParquetReaderConfig;
import io.trino.plugin.hive.parquet.ParquetWriterConfig;
-import io.trino.plugin.iceberg.io.hdfs.HdfsFileSystemFactory;
import io.trino.spi.Page;
import io.trino.spi.SplitWeight;
import io.trino.spi.block.BlockBuilder;
diff --git a/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/TestIcebergOrcMetricsCollection.java b/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/TestIcebergOrcMetricsCollection.java
index 214b89bb177b..8f235f316852 100644
--- a/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/TestIcebergOrcMetricsCollection.java
+++ b/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/TestIcebergOrcMetricsCollection.java
@@ -14,6 +14,8 @@
package io.trino.plugin.iceberg;
import io.trino.Session;
+import io.trino.filesystem.TrinoFileSystemFactory;
+import io.trino.filesystem.hdfs.HdfsFileSystemFactory;
import io.trino.plugin.base.CatalogName;
import io.trino.plugin.hive.NodeVersion;
import io.trino.plugin.hive.metastore.HiveMetastore;
@@ -24,8 +26,6 @@
import io.trino.plugin.iceberg.catalog.TrinoCatalog;
import io.trino.plugin.iceberg.catalog.file.FileMetastoreTableOperationsProvider;
import io.trino.plugin.iceberg.catalog.hms.TrinoHiveCatalog;
-import io.trino.plugin.iceberg.io.TrinoFileSystemFactory;
-import io.trino.plugin.iceberg.io.hdfs.HdfsFileSystemFactory;
import io.trino.plugin.tpch.TpchPlugin;
import io.trino.spi.connector.SchemaTableName;
import io.trino.spi.type.TestingTypeManager;
diff --git a/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/TestIcebergSplitSource.java b/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/TestIcebergSplitSource.java
index 85943dc5c3c2..5c23ca01896c 100644
--- a/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/TestIcebergSplitSource.java
+++ b/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/TestIcebergSplitSource.java
@@ -17,14 +17,14 @@
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import io.airlift.units.Duration;
+import io.trino.filesystem.TrinoFileSystemFactory;
+import io.trino.filesystem.hdfs.HdfsFileSystemFactory;
+import io.trino.hdfs.HdfsContext;
import io.trino.plugin.base.CatalogName;
-import io.trino.plugin.hive.HdfsEnvironment.HdfsContext;
import io.trino.plugin.hive.metastore.HiveMetastore;
import io.trino.plugin.iceberg.catalog.TrinoCatalog;
import io.trino.plugin.iceberg.catalog.file.FileMetastoreTableOperationsProvider;
import io.trino.plugin.iceberg.catalog.hms.TrinoHiveCatalog;
-import io.trino.plugin.iceberg.io.TrinoFileSystemFactory;
-import io.trino.plugin.iceberg.io.hdfs.HdfsFileSystemFactory;
import io.trino.spi.connector.ColumnHandle;
import io.trino.spi.connector.DynamicFilter;
import io.trino.spi.connector.SchemaTableName;
diff --git a/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/TestIcebergTableWithCustomLocation.java b/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/TestIcebergTableWithCustomLocation.java
index e821579f07aa..5e6fbe72cdab 100644
--- a/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/TestIcebergTableWithCustomLocation.java
+++ b/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/TestIcebergTableWithCustomLocation.java
@@ -14,14 +14,14 @@
package io.trino.plugin.iceberg;
import com.google.common.collect.ImmutableSet;
-import io.trino.plugin.hive.HdfsConfig;
-import io.trino.plugin.hive.HdfsConfiguration;
-import io.trino.plugin.hive.HdfsConfigurationInitializer;
-import io.trino.plugin.hive.HdfsEnvironment;
-import io.trino.plugin.hive.HdfsEnvironment.HdfsContext;
-import io.trino.plugin.hive.HiveHdfsConfiguration;
+import io.trino.hdfs.DynamicHdfsConfiguration;
+import io.trino.hdfs.HdfsConfig;
+import io.trino.hdfs.HdfsConfiguration;
+import io.trino.hdfs.HdfsConfigurationInitializer;
+import io.trino.hdfs.HdfsContext;
+import io.trino.hdfs.HdfsEnvironment;
+import io.trino.hdfs.authentication.NoHdfsAuthentication;
import io.trino.plugin.hive.NodeVersion;
-import io.trino.plugin.hive.authentication.NoHdfsAuthentication;
import io.trino.plugin.hive.metastore.Table;
import io.trino.plugin.hive.metastore.file.FileHiveMetastore;
import io.trino.plugin.hive.metastore.file.FileHiveMetastoreConfig;
@@ -66,7 +66,7 @@ protected DistributedQueryRunner createQueryRunner()
{
metastoreDir = Files.createTempDirectory("test_iceberg").toFile();
HdfsConfig hdfsConfig = new HdfsConfig();
- HdfsConfiguration hdfsConfiguration = new HiveHdfsConfiguration(new HdfsConfigurationInitializer(hdfsConfig), ImmutableSet.of());
+ HdfsConfiguration hdfsConfiguration = new DynamicHdfsConfiguration(new HdfsConfigurationInitializer(hdfsConfig), ImmutableSet.of());
hdfsEnvironment = new HdfsEnvironment(hdfsConfiguration, hdfsConfig, new NoHdfsAuthentication());
FileHiveMetastoreConfig config = new FileHiveMetastoreConfig()
.setCatalogDirectory(metastoreDir.toURI().toString())
diff --git a/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/TestIcebergTableWithExternalLocation.java b/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/TestIcebergTableWithExternalLocation.java
index a10a84ca4eef..fa0a5c708368 100644
--- a/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/TestIcebergTableWithExternalLocation.java
+++ b/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/TestIcebergTableWithExternalLocation.java
@@ -14,14 +14,14 @@
package io.trino.plugin.iceberg;
import com.google.common.collect.ImmutableSet;
-import io.trino.plugin.hive.HdfsConfig;
-import io.trino.plugin.hive.HdfsConfiguration;
-import io.trino.plugin.hive.HdfsConfigurationInitializer;
-import io.trino.plugin.hive.HdfsEnvironment;
-import io.trino.plugin.hive.HdfsEnvironment.HdfsContext;
-import io.trino.plugin.hive.HiveHdfsConfiguration;
+import io.trino.hdfs.DynamicHdfsConfiguration;
+import io.trino.hdfs.HdfsConfig;
+import io.trino.hdfs.HdfsConfiguration;
+import io.trino.hdfs.HdfsConfigurationInitializer;
+import io.trino.hdfs.HdfsContext;
+import io.trino.hdfs.HdfsEnvironment;
+import io.trino.hdfs.authentication.NoHdfsAuthentication;
import io.trino.plugin.hive.NodeVersion;
-import io.trino.plugin.hive.authentication.NoHdfsAuthentication;
import io.trino.plugin.hive.metastore.Table;
import io.trino.plugin.hive.metastore.file.FileHiveMetastore;
import io.trino.plugin.hive.metastore.file.FileHiveMetastoreConfig;
@@ -64,7 +64,7 @@ protected DistributedQueryRunner createQueryRunner()
{
metastoreDir = Files.createTempDirectory("test_iceberg").toFile();
HdfsConfig hdfsConfig = new HdfsConfig();
- HdfsConfiguration hdfsConfiguration = new HiveHdfsConfiguration(new HdfsConfigurationInitializer(hdfsConfig), ImmutableSet.of());
+ HdfsConfiguration hdfsConfiguration = new DynamicHdfsConfiguration(new HdfsConfigurationInitializer(hdfsConfig), ImmutableSet.of());
hdfsEnvironment = new HdfsEnvironment(hdfsConfiguration, hdfsConfig, new NoHdfsAuthentication());
FileHiveMetastoreConfig config = new FileHiveMetastoreConfig()
.setCatalogDirectory(metastoreDir.toURI().toString())
diff --git a/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/TestIcebergV2.java b/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/TestIcebergV2.java
index 590f03e6ab58..0845a19a8de2 100644
--- a/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/TestIcebergV2.java
+++ b/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/TestIcebergV2.java
@@ -15,21 +15,22 @@
import com.google.common.collect.ImmutableSet;
import io.trino.Session;
+import io.trino.filesystem.TrinoFileSystemFactory;
+import io.trino.filesystem.hdfs.HdfsFileSystemFactory;
+import io.trino.hdfs.DynamicHdfsConfiguration;
+import io.trino.hdfs.HdfsConfig;
+import io.trino.hdfs.HdfsConfiguration;
+import io.trino.hdfs.HdfsConfigurationInitializer;
+import io.trino.hdfs.HdfsContext;
+import io.trino.hdfs.HdfsEnvironment;
+import io.trino.hdfs.authentication.NoHdfsAuthentication;
import io.trino.plugin.base.CatalogName;
-import io.trino.plugin.hive.HdfsConfig;
-import io.trino.plugin.hive.HdfsConfiguration;
-import io.trino.plugin.hive.HdfsConfigurationInitializer;
-import io.trino.plugin.hive.HdfsEnvironment;
-import io.trino.plugin.hive.HiveHdfsConfiguration;
-import io.trino.plugin.hive.authentication.NoHdfsAuthentication;
import io.trino.plugin.hive.metastore.HiveMetastore;
import io.trino.plugin.hive.metastore.cache.CachingHiveMetastore;
import io.trino.plugin.iceberg.catalog.IcebergTableOperationsProvider;
import io.trino.plugin.iceberg.catalog.TrinoCatalog;
import io.trino.plugin.iceberg.catalog.file.FileMetastoreTableOperationsProvider;
import io.trino.plugin.iceberg.catalog.hms.TrinoHiveCatalog;
-import io.trino.plugin.iceberg.io.TrinoFileSystemFactory;
-import io.trino.plugin.iceberg.io.hdfs.HdfsFileSystemFactory;
import io.trino.spi.connector.SchemaTableName;
import io.trino.spi.type.TestingTypeManager;
import io.trino.testing.AbstractTestQueryFramework;
@@ -66,7 +67,6 @@
import static com.google.common.collect.ImmutableList.toImmutableList;
import static com.google.common.io.MoreFiles.deleteRecursively;
import static com.google.common.io.RecursiveDeleteOption.ALLOW_INSECURE;
-import static io.trino.plugin.hive.HdfsEnvironment.HdfsContext;
import static io.trino.plugin.hive.HiveTestUtils.HDFS_ENVIRONMENT;
import static io.trino.plugin.hive.metastore.file.FileHiveMetastore.createTestingFileHiveMetastore;
import static io.trino.plugin.iceberg.IcebergUtil.loadIcebergTable;
@@ -93,7 +93,7 @@ protected QueryRunner createQueryRunner()
throws Exception
{
HdfsConfig config = new HdfsConfig();
- HdfsConfiguration configuration = new HiveHdfsConfiguration(new HdfsConfigurationInitializer(config), ImmutableSet.of());
+ HdfsConfiguration configuration = new DynamicHdfsConfiguration(new HdfsConfigurationInitializer(config), ImmutableSet.of());
hdfsEnvironment = new HdfsEnvironment(configuration, config, new NoHdfsAuthentication());
tempDir = Files.createTempDirectory("test_iceberg_v2");
diff --git a/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/TestSharedHiveMetastore.java b/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/TestSharedHiveMetastore.java
index 376728854835..222d197a03b5 100644
--- a/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/TestSharedHiveMetastore.java
+++ b/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/TestSharedHiveMetastore.java
@@ -17,13 +17,13 @@
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import io.trino.Session;
-import io.trino.plugin.hive.HdfsConfig;
-import io.trino.plugin.hive.HdfsConfigurationInitializer;
-import io.trino.plugin.hive.HdfsEnvironment;
-import io.trino.plugin.hive.HiveHdfsConfiguration;
+import io.trino.hdfs.DynamicHdfsConfiguration;
+import io.trino.hdfs.HdfsConfig;
+import io.trino.hdfs.HdfsConfigurationInitializer;
+import io.trino.hdfs.HdfsEnvironment;
+import io.trino.hdfs.authentication.NoHdfsAuthentication;
import io.trino.plugin.hive.NodeVersion;
import io.trino.plugin.hive.TestingHivePlugin;
-import io.trino.plugin.hive.authentication.NoHdfsAuthentication;
import io.trino.plugin.hive.metastore.HiveMetastore;
import io.trino.plugin.hive.metastore.HiveMetastoreConfig;
import io.trino.plugin.hive.metastore.file.FileHiveMetastore;
@@ -86,7 +86,7 @@ protected QueryRunner createQueryRunner()
HdfsConfig hdfsConfig = new HdfsConfig();
HdfsEnvironment hdfsEnvironment = new HdfsEnvironment(
- new HiveHdfsConfiguration(new HdfsConfigurationInitializer(hdfsConfig), ImmutableSet.of()),
+ new DynamicHdfsConfiguration(new HdfsConfigurationInitializer(hdfsConfig), ImmutableSet.of()),
hdfsConfig,
new NoHdfsAuthentication());
HiveMetastore metastore = new FileHiveMetastore(
diff --git a/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/TestTrinoHiveCatalogTest.java b/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/TestTrinoHiveCatalogTest.java
index 033d6420b0d2..7810ac471bf8 100644
--- a/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/TestTrinoHiveCatalogTest.java
+++ b/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/TestTrinoHiveCatalogTest.java
@@ -13,13 +13,13 @@
*/
package io.trino.plugin.iceberg;
+import io.trino.filesystem.TrinoFileSystemFactory;
+import io.trino.filesystem.hdfs.HdfsFileSystemFactory;
import io.trino.plugin.base.CatalogName;
import io.trino.plugin.hive.metastore.HiveMetastore;
import io.trino.plugin.iceberg.catalog.TrinoCatalog;
import io.trino.plugin.iceberg.catalog.file.FileMetastoreTableOperationsProvider;
import io.trino.plugin.iceberg.catalog.hms.TrinoHiveCatalog;
-import io.trino.plugin.iceberg.io.TrinoFileSystemFactory;
-import io.trino.plugin.iceberg.io.hdfs.HdfsFileSystemFactory;
import io.trino.spi.type.TestingTypeManager;
import org.testng.annotations.AfterClass;
import org.testng.annotations.Test;
diff --git a/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/TestingIcebergConnectorFactory.java b/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/TestingIcebergConnectorFactory.java
index e46a2727c521..1a3ed135981e 100644
--- a/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/TestingIcebergConnectorFactory.java
+++ b/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/TestingIcebergConnectorFactory.java
@@ -14,8 +14,8 @@
package io.trino.plugin.iceberg;
import com.google.inject.Module;
+import io.trino.filesystem.TrinoFileSystemFactory;
import io.trino.plugin.hive.metastore.HiveMetastore;
-import io.trino.plugin.iceberg.io.TrinoFileSystemFactory;
import io.trino.spi.connector.Connector;
import io.trino.spi.connector.ConnectorContext;
import io.trino.spi.connector.ConnectorFactory;
diff --git a/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/TestingIcebergPlugin.java b/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/TestingIcebergPlugin.java
index 3bb82875d4a1..3a876535d8cc 100644
--- a/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/TestingIcebergPlugin.java
+++ b/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/TestingIcebergPlugin.java
@@ -15,8 +15,8 @@
import com.google.common.collect.ImmutableList;
import com.google.inject.Module;
+import io.trino.filesystem.TrinoFileSystemFactory;
import io.trino.plugin.hive.metastore.HiveMetastore;
-import io.trino.plugin.iceberg.io.TrinoFileSystemFactory;
import io.trino.spi.connector.ConnectorFactory;
import java.util.List;
diff --git a/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/TrackingFileSystemFactory.java b/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/TrackingFileSystemFactory.java
index eeba58e635bf..a5e5c3a75217 100644
--- a/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/TrackingFileSystemFactory.java
+++ b/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/TrackingFileSystemFactory.java
@@ -14,11 +14,11 @@
package io.trino.plugin.iceberg;
import com.google.common.collect.ImmutableMap;
-import io.trino.plugin.iceberg.io.FileIterator;
-import io.trino.plugin.iceberg.io.TrinoFileSystem;
-import io.trino.plugin.iceberg.io.TrinoFileSystemFactory;
-import io.trino.plugin.iceberg.io.TrinoInputFile;
-import io.trino.plugin.iceberg.io.TrinoOutputFile;
+import io.trino.filesystem.FileIterator;
+import io.trino.filesystem.TrinoFileSystem;
+import io.trino.filesystem.TrinoFileSystemFactory;
+import io.trino.filesystem.TrinoInputFile;
+import io.trino.filesystem.TrinoOutputFile;
import io.trino.spi.security.ConnectorIdentity;
import org.apache.iceberg.io.FileIO;
import org.apache.iceberg.io.InputFile;
diff --git a/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/catalog/glue/TestSharedGlueMetastore.java b/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/catalog/glue/TestSharedGlueMetastore.java
index da6188ec3cd3..6b0ad05f724e 100644
--- a/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/catalog/glue/TestSharedGlueMetastore.java
+++ b/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/catalog/glue/TestSharedGlueMetastore.java
@@ -19,12 +19,12 @@
import com.google.common.collect.ImmutableSet;
import io.airlift.log.Logger;
import io.trino.Session;
-import io.trino.plugin.hive.HdfsConfig;
-import io.trino.plugin.hive.HdfsConfigurationInitializer;
-import io.trino.plugin.hive.HdfsEnvironment;
-import io.trino.plugin.hive.HiveHdfsConfiguration;
+import io.trino.hdfs.DynamicHdfsConfiguration;
+import io.trino.hdfs.HdfsConfig;
+import io.trino.hdfs.HdfsConfigurationInitializer;
+import io.trino.hdfs.HdfsEnvironment;
+import io.trino.hdfs.authentication.NoHdfsAuthentication;
import io.trino.plugin.hive.TestingHivePlugin;
-import io.trino.plugin.hive.authentication.NoHdfsAuthentication;
import io.trino.plugin.hive.metastore.HiveMetastore;
import io.trino.plugin.hive.metastore.glue.DefaultGlueColumnStatisticsProviderFactory;
import io.trino.plugin.hive.metastore.glue.GlueHiveMetastore;
@@ -100,7 +100,7 @@ protected QueryRunner createQueryRunner()
HdfsConfig hdfsConfig = new HdfsConfig();
HdfsEnvironment hdfsEnvironment = new HdfsEnvironment(
- new HiveHdfsConfiguration(new HdfsConfigurationInitializer(hdfsConfig), ImmutableSet.of()),
+ new DynamicHdfsConfiguration(new HdfsConfigurationInitializer(hdfsConfig), ImmutableSet.of()),
hdfsConfig,
new NoHdfsAuthentication());
this.glueMetastore = new GlueHiveMetastore(
diff --git a/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/catalog/glue/TestTrinoGlueCatalogTest.java b/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/catalog/glue/TestTrinoGlueCatalogTest.java
index a1aefc324e6b..9a572471f64d 100644
--- a/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/catalog/glue/TestTrinoGlueCatalogTest.java
+++ b/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/catalog/glue/TestTrinoGlueCatalogTest.java
@@ -17,13 +17,13 @@
import com.amazonaws.services.glue.AWSGlueAsyncClientBuilder;
import com.google.common.collect.ImmutableMap;
import io.airlift.log.Logger;
+import io.trino.filesystem.TrinoFileSystemFactory;
+import io.trino.filesystem.hdfs.HdfsFileSystemFactory;
import io.trino.plugin.base.CatalogName;
import io.trino.plugin.hive.metastore.glue.GlueHiveMetastoreConfig;
import io.trino.plugin.hive.metastore.glue.GlueMetastoreStats;
import io.trino.plugin.iceberg.BaseTrinoCatalogTest;
import io.trino.plugin.iceberg.catalog.TrinoCatalog;
-import io.trino.plugin.iceberg.io.TrinoFileSystemFactory;
-import io.trino.plugin.iceberg.io.hdfs.HdfsFileSystemFactory;
import io.trino.spi.connector.SchemaTableName;
import io.trino.spi.security.PrincipalType;
import io.trino.spi.security.TrinoPrincipal;
diff --git a/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/optimizer/TestConnectorPushdownRulesWithIceberg.java b/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/optimizer/TestConnectorPushdownRulesWithIceberg.java
index d6b5d95806c0..5b4cd4a3fc2c 100644
--- a/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/optimizer/TestConnectorPushdownRulesWithIceberg.java
+++ b/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/optimizer/TestConnectorPushdownRulesWithIceberg.java
@@ -18,15 +18,15 @@
import com.google.common.collect.ImmutableSet;
import io.trino.Session;
import io.trino.cost.ScalarStatsCalculator;
+import io.trino.hdfs.DynamicHdfsConfiguration;
+import io.trino.hdfs.HdfsConfig;
+import io.trino.hdfs.HdfsConfiguration;
+import io.trino.hdfs.HdfsConfigurationInitializer;
+import io.trino.hdfs.HdfsEnvironment;
+import io.trino.hdfs.authentication.NoHdfsAuthentication;
import io.trino.metadata.TableHandle;
-import io.trino.plugin.hive.HdfsConfig;
-import io.trino.plugin.hive.HdfsConfiguration;
-import io.trino.plugin.hive.HdfsConfigurationInitializer;
-import io.trino.plugin.hive.HdfsEnvironment;
-import io.trino.plugin.hive.HiveHdfsConfiguration;
import io.trino.plugin.hive.HiveTransactionHandle;
import io.trino.plugin.hive.NodeVersion;
-import io.trino.plugin.hive.authentication.NoHdfsAuthentication;
import io.trino.plugin.hive.metastore.Database;
import io.trino.plugin.hive.metastore.HiveMetastore;
import io.trino.plugin.hive.metastore.file.FileHiveMetastore;
@@ -113,7 +113,7 @@ protected Optional createLocalQueryRunner()
throw new UncheckedIOException(e);
}
HdfsConfig config = new HdfsConfig();
- HdfsConfiguration configuration = new HiveHdfsConfiguration(new HdfsConfigurationInitializer(config), ImmutableSet.of());
+ HdfsConfiguration configuration = new DynamicHdfsConfiguration(new HdfsConfigurationInitializer(config), ImmutableSet.of());
HdfsEnvironment environment = new HdfsEnvironment(configuration, config, new NoHdfsAuthentication());
metastore = new FileHiveMetastore(
diff --git a/pom.xml b/pom.xml
index d98d7e280ec5..b78868439e0c 100644
--- a/pom.xml
+++ b/pom.xml
@@ -67,6 +67,7 @@
5.5.2
4.14.0
7.1.4
+ 0.14.0
65
@@ -99,8 +100,10 @@
docs
lib/trino-array
lib/trino-collect
+ lib/trino-filesystem
lib/trino-geospatial-toolkit
lib/trino-hadoop-toolkit
+ lib/trino-hdfs
lib/trino-matching
lib/trino-memory-context
lib/trino-orc
@@ -287,6 +290,12 @@
${project.version}
+
+ io.trino
+ trino-filesystem
+ ${project.version}
+
+
io.trino
trino-geospatial
@@ -305,6 +314,12 @@
${project.version}
+
+ io.trino
+ trino-hdfs
+ ${project.version}
+
+
io.trino
trino-hive
@@ -1512,6 +1527,78 @@
3.6.1
+
+ org.apache.iceberg
+ iceberg-api
+ ${dep.iceberg.version}
+
+
+ org.slf4j
+ slf4j-api
+
+
+
+
+
+ org.apache.iceberg
+ iceberg-core
+ ${dep.iceberg.version}
+
+
+ org.apache.avro
+ avro
+
+
+ org.slf4j
+ slf4j-api
+
+
+
+
+
+ org.apache.iceberg
+ iceberg-hive-metastore
+ ${dep.iceberg.version}
+
+
+ org.slf4j
+ slf4j-api
+
+
+
+
+
+ org.apache.iceberg
+ iceberg-orc
+ ${dep.iceberg.version}
+
+
+ org.apache.avro
+ avro
+
+
+ org.slf4j
+ slf4j-api
+
+
+
+
+
+ org.apache.iceberg
+ iceberg-parquet
+ ${dep.iceberg.version}
+
+
+ org.apache.parquet
+ parquet-avro
+
+
+ org.slf4j
+ slf4j-api
+
+
+
+
org.apache.kafka
kafka-clients
diff --git a/testing/trino-product-tests/src/main/java/io/trino/tests/product/deltalake/TestDeltaLakeJmx.java b/testing/trino-product-tests/src/main/java/io/trino/tests/product/deltalake/TestDeltaLakeJmx.java
index 8c9542d490f7..2fe0a81f609c 100644
--- a/testing/trino-product-tests/src/main/java/io/trino/tests/product/deltalake/TestDeltaLakeJmx.java
+++ b/testing/trino-product-tests/src/main/java/io/trino/tests/product/deltalake/TestDeltaLakeJmx.java
@@ -34,7 +34,6 @@ public void testJmxTablesExposedByDeltaLakeConnectorBackedByGlueMetastore()
row("io.trino.plugin.hive.metastore.glue:name=delta,type=gluehivemetastore"),
row("io.trino.plugin.hive.s3:name=delta,type=trinos3filesystem"),
row("io.trino.plugin.hive:catalog=delta,name=delta,type=fileformatdatasourcestats"),
- row("io.trino.plugin.hive:name=delta,type=namenodestats"),
row("trino.plugin.deltalake.transactionlog:catalog=delta,name=delta,type=transactionlogaccess"));
}
@@ -46,7 +45,6 @@ public void testJmxTablesExposedByDeltaLakeConnectorBackedByThriftMetastore()
row("io.trino.plugin.hive.metastore.thrift:name=delta,type=thrifthivemetastore"),
row("io.trino.plugin.hive.s3:name=delta,type=trinos3filesystem"),
row("io.trino.plugin.hive:catalog=delta,name=delta,type=fileformatdatasourcestats"),
- row("io.trino.plugin.hive:name=delta,type=namenodestats"),
row("trino.plugin.deltalake.transactionlog:catalog=delta,name=delta,type=transactionlogaccess"));
}
}