diff --git a/spark/v3.0/spark/src/main/java/org/apache/iceberg/spark/SparkCatalog.java b/spark/v3.0/spark/src/main/java/org/apache/iceberg/spark/SparkCatalog.java
index ae921c5200a8..d80b9bf5476d 100644
--- a/spark/v3.0/spark/src/main/java/org/apache/iceberg/spark/SparkCatalog.java
+++ b/spark/v3.0/spark/src/main/java/org/apache/iceberg/spark/SparkCatalog.java
@@ -488,6 +488,10 @@ private Pair
load(Identifier ident) {
return Pair.of(icebergCatalog.loadTable(buildIdentifier(ident)), null);
} catch (org.apache.iceberg.exceptions.NoSuchTableException e) {
+ if (ident.namespace().length == 0) {
+ throw e;
+ }
+
// if the original load didn't work, the identifier may be extended and include a snapshot selector
TableIdentifier namespaceAsIdent = buildIdentifier(namespaceToIdentifier(ident.namespace()));
Table table;
@@ -567,6 +571,8 @@ private Pair loadFromPathIdentifier(PathIdentifier ident) {
}
private Identifier namespaceToIdentifier(String[] namespace) {
+ Preconditions.checkArgument(namespace.length > 0,
+ "Cannot convert empty namespace to identifier");
String[] ns = Arrays.copyOf(namespace, namespace.length - 1);
String name = namespace[ns.length];
return Identifier.of(ns, name);
diff --git a/spark/v3.0/spark/src/test/java/org/apache/iceberg/spark/sql/TestCreateTable.java b/spark/v3.0/spark/src/test/java/org/apache/iceberg/spark/sql/TestCreateTable.java
index 303cbb5f932b..0a4c9368cb96 100644
--- a/spark/v3.0/spark/src/test/java/org/apache/iceberg/spark/sql/TestCreateTable.java
+++ b/spark/v3.0/spark/src/test/java/org/apache/iceberg/spark/sql/TestCreateTable.java
@@ -67,6 +67,17 @@ public void testCreateTable() {
table.properties().get(TableProperties.DEFAULT_FILE_FORMAT));
}
+ @Test
+ public void testCreateTableInRootNamespace() {
+ Assume.assumeTrue("Hadoop has no default namespace configured", "testhadoop".equals(catalogName));
+
+ try {
+ sql("CREATE TABLE %s.table (id bigint) USING iceberg", catalogName);
+ } finally {
+ sql("DROP TABLE IF EXISTS %s.table", catalogName);
+ }
+ }
+
@Test
public void testCreateTableUsingParquet() {
Assume.assumeTrue(
diff --git a/spark/v3.1/spark/src/main/java/org/apache/iceberg/spark/SparkCatalog.java b/spark/v3.1/spark/src/main/java/org/apache/iceberg/spark/SparkCatalog.java
index ae921c5200a8..d80b9bf5476d 100644
--- a/spark/v3.1/spark/src/main/java/org/apache/iceberg/spark/SparkCatalog.java
+++ b/spark/v3.1/spark/src/main/java/org/apache/iceberg/spark/SparkCatalog.java
@@ -488,6 +488,10 @@ private Pair load(Identifier ident) {
return Pair.of(icebergCatalog.loadTable(buildIdentifier(ident)), null);
} catch (org.apache.iceberg.exceptions.NoSuchTableException e) {
+ if (ident.namespace().length == 0) {
+ throw e;
+ }
+
// if the original load didn't work, the identifier may be extended and include a snapshot selector
TableIdentifier namespaceAsIdent = buildIdentifier(namespaceToIdentifier(ident.namespace()));
Table table;
@@ -567,6 +571,8 @@ private Pair loadFromPathIdentifier(PathIdentifier ident) {
}
private Identifier namespaceToIdentifier(String[] namespace) {
+ Preconditions.checkArgument(namespace.length > 0,
+ "Cannot convert empty namespace to identifier");
String[] ns = Arrays.copyOf(namespace, namespace.length - 1);
String name = namespace[ns.length];
return Identifier.of(ns, name);
diff --git a/spark/v3.1/spark/src/test/java/org/apache/iceberg/spark/sql/TestCreateTable.java b/spark/v3.1/spark/src/test/java/org/apache/iceberg/spark/sql/TestCreateTable.java
index 303cbb5f932b..0a4c9368cb96 100644
--- a/spark/v3.1/spark/src/test/java/org/apache/iceberg/spark/sql/TestCreateTable.java
+++ b/spark/v3.1/spark/src/test/java/org/apache/iceberg/spark/sql/TestCreateTable.java
@@ -67,6 +67,17 @@ public void testCreateTable() {
table.properties().get(TableProperties.DEFAULT_FILE_FORMAT));
}
+ @Test
+ public void testCreateTableInRootNamespace() {
+ Assume.assumeTrue("Hadoop has no default namespace configured", "testhadoop".equals(catalogName));
+
+ try {
+ sql("CREATE TABLE %s.table (id bigint) USING iceberg", catalogName);
+ } finally {
+ sql("DROP TABLE IF EXISTS %s.table", catalogName);
+ }
+ }
+
@Test
public void testCreateTableUsingParquet() {
Assume.assumeTrue(
diff --git a/spark/v3.2/spark/src/main/java/org/apache/iceberg/spark/SparkCatalog.java b/spark/v3.2/spark/src/main/java/org/apache/iceberg/spark/SparkCatalog.java
index ae921c5200a8..d80b9bf5476d 100644
--- a/spark/v3.2/spark/src/main/java/org/apache/iceberg/spark/SparkCatalog.java
+++ b/spark/v3.2/spark/src/main/java/org/apache/iceberg/spark/SparkCatalog.java
@@ -488,6 +488,10 @@ private Pair load(Identifier ident) {
return Pair.of(icebergCatalog.loadTable(buildIdentifier(ident)), null);
} catch (org.apache.iceberg.exceptions.NoSuchTableException e) {
+ if (ident.namespace().length == 0) {
+ throw e;
+ }
+
// if the original load didn't work, the identifier may be extended and include a snapshot selector
TableIdentifier namespaceAsIdent = buildIdentifier(namespaceToIdentifier(ident.namespace()));
Table table;
@@ -567,6 +571,8 @@ private Pair loadFromPathIdentifier(PathIdentifier ident) {
}
private Identifier namespaceToIdentifier(String[] namespace) {
+ Preconditions.checkArgument(namespace.length > 0,
+ "Cannot convert empty namespace to identifier");
String[] ns = Arrays.copyOf(namespace, namespace.length - 1);
String name = namespace[ns.length];
return Identifier.of(ns, name);
diff --git a/spark/v3.2/spark/src/test/java/org/apache/iceberg/spark/sql/TestCreateTable.java b/spark/v3.2/spark/src/test/java/org/apache/iceberg/spark/sql/TestCreateTable.java
index 303cbb5f932b..0a4c9368cb96 100644
--- a/spark/v3.2/spark/src/test/java/org/apache/iceberg/spark/sql/TestCreateTable.java
+++ b/spark/v3.2/spark/src/test/java/org/apache/iceberg/spark/sql/TestCreateTable.java
@@ -67,6 +67,17 @@ public void testCreateTable() {
table.properties().get(TableProperties.DEFAULT_FILE_FORMAT));
}
+ @Test
+ public void testCreateTableInRootNamespace() {
+ Assume.assumeTrue("Hadoop has no default namespace configured", "testhadoop".equals(catalogName));
+
+ try {
+ sql("CREATE TABLE %s.table (id bigint) USING iceberg", catalogName);
+ } finally {
+ sql("DROP TABLE IF EXISTS %s.table", catalogName);
+ }
+ }
+
@Test
public void testCreateTableUsingParquet() {
Assume.assumeTrue(