diff --git a/mr/src/main/java/org/apache/iceberg/mr/Catalogs.java b/mr/src/main/java/org/apache/iceberg/mr/Catalogs.java
index 526f340165a5..75797580cd9b 100644
--- a/mr/src/main/java/org/apache/iceberg/mr/Catalogs.java
+++ b/mr/src/main/java/org/apache/iceberg/mr/Catalogs.java
@@ -52,20 +52,14 @@
*
In case the value of catalog type is null, iceberg.catalog.catalogName
* .catalog-impl config is used to determine the catalog implementation class.
*
- *
If catalog name is null, get the catalog type from {@link InputFormatConfig#CATALOG
- * iceberg.mr.catalog} config:
+ *
If catalog name is null, get the catalog type from {@link CatalogUtil#ICEBERG_CATALOG_TYPE
+ * catalog type} config:
*
*
* - hive: HiveCatalog
*
- location: HadoopTables
*
- hadoop: HadoopCatalog
*
- *
- * In case the value of catalog type is null, {@link InputFormatConfig#CATALOG_LOADER_CLASS
- * iceberg.mr.catalog.loader.class} is used to determine the catalog implementation class.
- *
- *
Note: null catalog name mode is only supported for backwards compatibility. Using this mode is
- * NOT RECOMMENDED.
*/
public final class Catalogs {
@@ -259,41 +253,12 @@ static Optional loadCatalog(Configuration conf, String catalogName) {
private static Map getCatalogProperties(
Configuration conf, String catalogName, String catalogType) {
String keyPrefix = InputFormatConfig.CATALOG_CONFIG_PREFIX + catalogName;
- Map catalogProperties =
- Streams.stream(conf.iterator())
- .filter(e -> e.getKey().startsWith(keyPrefix))
- .collect(
- Collectors.toMap(
- e -> e.getKey().substring(keyPrefix.length() + 1), Map.Entry::getValue));
- return addCatalogPropertiesIfMissing(conf, catalogType, catalogProperties);
- }
- /**
- * This method is used for backward-compatible catalog configuration. Collect all the catalog
- * specific configuration from the global hive configuration. Note: this should be removed when
- * the old catalog configuration is deprecated.
- *
- * @param conf global hive configuration
- * @param catalogType type of the catalog
- * @param catalogProperties pre-populated catalog properties
- * @return complete map of catalog properties
- */
- private static Map addCatalogPropertiesIfMissing(
- Configuration conf, String catalogType, Map catalogProperties) {
- if (catalogType != null) {
- catalogProperties.putIfAbsent(CatalogUtil.ICEBERG_CATALOG_TYPE, catalogType);
- }
-
- String legacyCatalogImpl = conf.get(InputFormatConfig.CATALOG_LOADER_CLASS);
- if (legacyCatalogImpl != null) {
- catalogProperties.putIfAbsent(CatalogProperties.CATALOG_IMPL, legacyCatalogImpl);
- }
-
- String legacyWarehouseLocation = conf.get(InputFormatConfig.HADOOP_CATALOG_WAREHOUSE_LOCATION);
- if (legacyWarehouseLocation != null) {
- catalogProperties.putIfAbsent(CatalogProperties.WAREHOUSE_LOCATION, legacyWarehouseLocation);
- }
- return catalogProperties;
+ return Streams.stream(conf.iterator())
+ .filter(e -> e.getKey().startsWith(keyPrefix))
+ .collect(
+ Collectors.toMap(
+ e -> e.getKey().substring(keyPrefix.length() + 1), Map.Entry::getValue));
}
/**
@@ -317,7 +282,7 @@ private static String getCatalogType(Configuration conf, String catalogName) {
return catalogType;
}
} else {
- String catalogType = conf.get(InputFormatConfig.CATALOG);
+ String catalogType = conf.get(CatalogUtil.ICEBERG_CATALOG_TYPE);
if (catalogType != null && catalogType.equals(LOCATION)) {
return NO_CATALOG_TYPE;
} else {
diff --git a/mr/src/main/java/org/apache/iceberg/mr/InputFormatConfig.java b/mr/src/main/java/org/apache/iceberg/mr/InputFormatConfig.java
index a53fabe08318..185617aec258 100644
--- a/mr/src/main/java/org/apache/iceberg/mr/InputFormatConfig.java
+++ b/mr/src/main/java/org/apache/iceberg/mr/InputFormatConfig.java
@@ -48,28 +48,6 @@ private InputFormatConfig() {}
public static final String TABLE_CATALOG_PREFIX = "iceberg.mr.table.catalog.";
public static final String LOCALITY = "iceberg.mr.locality";
- /**
- * @deprecated please use {@link #catalogPropertyConfigKey(String, String)} with config key {@link
- * org.apache.iceberg.CatalogUtil#ICEBERG_CATALOG_TYPE} to specify the type of a catalog.
- */
- @Deprecated public static final String CATALOG = "iceberg.mr.catalog";
-
- /**
- * @deprecated please use {@link #catalogPropertyConfigKey(String, String)} with config key {@link
- * org.apache.iceberg.CatalogProperties#WAREHOUSE_LOCATION} to specify the warehouse location
- * of a catalog.
- */
- @Deprecated
- public static final String HADOOP_CATALOG_WAREHOUSE_LOCATION =
- "iceberg.mr.catalog.hadoop.warehouse.location";
-
- /**
- * @deprecated please use {@link #catalogPropertyConfigKey(String, String)} with config key {@link
- * org.apache.iceberg.CatalogProperties#CATALOG_IMPL} to specify the implementation of a
- * catalog.
- */
- @Deprecated public static final String CATALOG_LOADER_CLASS = "iceberg.mr.catalog.loader.class";
-
public static final String SELECTED_COLUMNS = "iceberg.mr.selected.columns";
public static final String EXTERNAL_TABLE_PURGE = "external.table.purge";
diff --git a/mr/src/test/java/org/apache/iceberg/mr/TestCatalogs.java b/mr/src/test/java/org/apache/iceberg/mr/TestCatalogs.java
index 47b633528cf9..3ad68512e684 100644
--- a/mr/src/test/java/org/apache/iceberg/mr/TestCatalogs.java
+++ b/mr/src/test/java/org/apache/iceberg/mr/TestCatalogs.java
@@ -64,7 +64,7 @@ public void before() {
@Test
public void testLoadTableFromLocation() throws IOException {
- conf.set(InputFormatConfig.CATALOG, Catalogs.LOCATION);
+ conf.set(CatalogUtil.ICEBERG_CATALOG_TYPE, Catalogs.LOCATION);
AssertHelpers.assertThrows(
"Should complain about table location not set",
IllegalArgumentException.class,
@@ -109,7 +109,7 @@ public void testCreateDropTableToLocation() throws IOException {
"schema not set",
() -> Catalogs.createTable(conf, missingSchema));
- conf.set(InputFormatConfig.CATALOG, Catalogs.LOCATION);
+ conf.set(CatalogUtil.ICEBERG_CATALOG_TYPE, Catalogs.LOCATION);
Properties missingLocation = new Properties();
missingLocation.put(InputFormatConfig.TABLE_SCHEMA, SchemaParser.toJson(SCHEMA));
AssertHelpers.assertThrows(
@@ -211,60 +211,6 @@ public void testCreateDropTableToCatalog() throws IOException {
() -> Catalogs.loadTable(conf, dropProperties));
}
- @Test
- public void testLegacyLoadCatalogDefault() {
- Optional defaultCatalog = Catalogs.loadCatalog(conf, null);
- Assert.assertTrue(defaultCatalog.isPresent());
- Assertions.assertThat(defaultCatalog.get()).isInstanceOf(HiveCatalog.class);
- Assert.assertTrue(Catalogs.hiveCatalog(conf, new Properties()));
- }
-
- @Test
- public void testLegacyLoadCatalogHive() {
- conf.set(InputFormatConfig.CATALOG, CatalogUtil.ICEBERG_CATALOG_TYPE_HIVE);
- Optional hiveCatalog = Catalogs.loadCatalog(conf, null);
- Assert.assertTrue(hiveCatalog.isPresent());
- Assertions.assertThat(hiveCatalog.get()).isInstanceOf(HiveCatalog.class);
- Assert.assertTrue(Catalogs.hiveCatalog(conf, new Properties()));
- }
-
- @Test
- public void testLegacyLoadCatalogHadoop() {
- conf.set(InputFormatConfig.CATALOG, CatalogUtil.ICEBERG_CATALOG_TYPE_HADOOP);
- conf.set(InputFormatConfig.HADOOP_CATALOG_WAREHOUSE_LOCATION, "/tmp/mylocation");
- Optional hadoopCatalog = Catalogs.loadCatalog(conf, null);
- Assert.assertTrue(hadoopCatalog.isPresent());
- Assertions.assertThat(hadoopCatalog.get()).isInstanceOf(HadoopCatalog.class);
- Assert.assertFalse(Catalogs.hiveCatalog(conf, new Properties()));
- }
-
- @Test
- public void testLegacyLoadCatalogCustom() {
- conf.set(InputFormatConfig.CATALOG_LOADER_CLASS, CustomHadoopCatalog.class.getName());
- conf.set(InputFormatConfig.HADOOP_CATALOG_WAREHOUSE_LOCATION, "/tmp/mylocation");
- Optional customHadoopCatalog = Catalogs.loadCatalog(conf, null);
- Assert.assertTrue(customHadoopCatalog.isPresent());
- Assertions.assertThat(customHadoopCatalog.get()).isInstanceOf(CustomHadoopCatalog.class);
- Assert.assertFalse(Catalogs.hiveCatalog(conf, new Properties()));
- }
-
- @Test
- public void testLegacyLoadCatalogLocation() {
- conf.set(InputFormatConfig.CATALOG, Catalogs.LOCATION);
- Assert.assertFalse(Catalogs.loadCatalog(conf, null).isPresent());
- Assert.assertFalse(Catalogs.hiveCatalog(conf, new Properties()));
- }
-
- @Test
- public void testLegacyLoadCatalogUnknown() {
- conf.set(InputFormatConfig.CATALOG, "fooType");
- AssertHelpers.assertThrows(
- "should complain about catalog not supported",
- UnsupportedOperationException.class,
- "Unknown catalog type",
- () -> Catalogs.loadCatalog(conf, null));
- }
-
@Test
public void testLoadCatalogDefault() {
String catalogName = "barCatalog";
@@ -290,21 +236,6 @@ public void testLoadCatalogHive() {
Assert.assertTrue(Catalogs.hiveCatalog(conf, properties));
}
- @Test
- public void testLegacyLoadCustomCatalogWithHiveCatalogTypeSet() {
- String catalogName = "barCatalog";
- conf.set(
- InputFormatConfig.catalogPropertyConfigKey(catalogName, CatalogUtil.ICEBERG_CATALOG_TYPE),
- CatalogUtil.ICEBERG_CATALOG_TYPE_HIVE);
- conf.set(InputFormatConfig.CATALOG_LOADER_CLASS, CustomHadoopCatalog.class.getName());
- conf.set(InputFormatConfig.HADOOP_CATALOG_WAREHOUSE_LOCATION, "/tmp/mylocation");
- AssertHelpers.assertThrows(
- "Should complain about both configs being set",
- IllegalArgumentException.class,
- "both type and catalog-impl are set",
- () -> Catalogs.loadCatalog(conf, catalogName));
- }
-
@Test
public void testLoadCatalogHadoop() {
String catalogName = "barCatalog";
@@ -325,23 +256,6 @@ public void testLoadCatalogHadoop() {
Assert.assertFalse(Catalogs.hiveCatalog(conf, properties));
}
- @Test
- public void testLoadCatalogHadoopWithLegacyWarehouseLocation() {
- String catalogName = "barCatalog";
- conf.set(
- InputFormatConfig.catalogPropertyConfigKey(catalogName, CatalogUtil.ICEBERG_CATALOG_TYPE),
- CatalogUtil.ICEBERG_CATALOG_TYPE_HADOOP);
- conf.set(InputFormatConfig.HADOOP_CATALOG_WAREHOUSE_LOCATION, "/tmp/mylocation");
- Optional hadoopCatalog = Catalogs.loadCatalog(conf, catalogName);
- Assert.assertTrue(hadoopCatalog.isPresent());
- Assertions.assertThat(hadoopCatalog.get()).isInstanceOf(HadoopCatalog.class);
- Assert.assertEquals(
- "HadoopCatalog{name=barCatalog, location=/tmp/mylocation}", hadoopCatalog.get().toString());
- Properties properties = new Properties();
- properties.put(InputFormatConfig.CATALOG_NAME, catalogName);
- Assert.assertFalse(Catalogs.hiveCatalog(conf, properties));
- }
-
@Test
public void testLoadCatalogCustom() {
String catalogName = "barCatalog";
diff --git a/mr/src/test/java/org/apache/iceberg/mr/TestIcebergInputFormats.java b/mr/src/test/java/org/apache/iceberg/mr/TestIcebergInputFormats.java
index b8199927d052..c16ab18934b0 100644
--- a/mr/src/test/java/org/apache/iceberg/mr/TestIcebergInputFormats.java
+++ b/mr/src/test/java/org/apache/iceberg/mr/TestIcebergInputFormats.java
@@ -105,7 +105,7 @@ public class TestIcebergInputFormats {
@Before
public void before() throws IOException {
conf = new Configuration();
- conf.set(InputFormatConfig.CATALOG, Catalogs.LOCATION);
+ conf.set(CatalogUtil.ICEBERG_CATALOG_TYPE, Catalogs.LOCATION);
HadoopTables tables = new HadoopTables(conf);
File location = temp.newFolder(testInputFormat.name(), fileFormat.name());
diff --git a/mr/src/test/java/org/apache/iceberg/mr/TestInputFormatReaderDeletes.java b/mr/src/test/java/org/apache/iceberg/mr/TestInputFormatReaderDeletes.java
index 45b74f897cb7..73b01da519a3 100644
--- a/mr/src/test/java/org/apache/iceberg/mr/TestInputFormatReaderDeletes.java
+++ b/mr/src/test/java/org/apache/iceberg/mr/TestInputFormatReaderDeletes.java
@@ -24,6 +24,7 @@
import java.util.stream.Collectors;
import org.apache.hadoop.conf.Configuration;
import org.apache.iceberg.BaseTable;
+import org.apache.iceberg.CatalogUtil;
import org.apache.iceberg.FileFormat;
import org.apache.iceberg.PartitionSpec;
import org.apache.iceberg.Schema;
@@ -64,7 +65,7 @@ public static Object[][] parameters() {
@Before
@Override
public void writeTestDataFile() throws IOException {
- conf.set(InputFormatConfig.CATALOG, Catalogs.LOCATION);
+ conf.set(CatalogUtil.ICEBERG_CATALOG_TYPE, Catalogs.LOCATION);
super.writeTestDataFile();
}