Skip to content

Commit c350d05

Browse files
umehrot2codopevinothchandar
authored
Restore 0.8.0 config keys with deprecated annotation (apache#3506)
Co-authored-by: Sagar Sumit <[email protected]> Co-authored-by: Vinoth Chandar <[email protected]>
1 parent 37c29e7 commit c350d05

File tree

137 files changed

+3460
-1527
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

137 files changed

+3460
-1527
lines changed

docker/demo/sparksql-incremental.commands

+2-2
Original file line numberDiff line numberDiff line change
@@ -42,7 +42,7 @@ spark.sql("select key, `_hoodie_partition_path` as datestr, symbol, ts, open, cl
4242
option(DataSourceWriteOptions.RECORDKEY_FIELD.key(), "key").
4343
option(DataSourceWriteOptions.PARTITIONPATH_FIELD.key(), "datestr").
4444
option(DataSourceWriteOptions.PRECOMBINE_FIELD.key(), "ts").
45-
option(HoodieWriteConfig.TABLE_NAME.key(), "stock_ticks_derived_mor").
45+
option(HoodieWriteConfig.TBL_NAME.key(), "stock_ticks_derived_mor").
4646
option(DataSourceWriteOptions.HIVE_TABLE.key(), "stock_ticks_derived_mor").
4747
option(DataSourceWriteOptions.HIVE_DATABASE.key(), "default").
4848
option(DataSourceWriteOptions.HIVE_URL.key(), "jdbc:hive2://hiveserver:10000").
@@ -74,7 +74,7 @@ spark.sql("select key, `_hoodie_partition_path` as datestr, symbol, ts, open, cl
7474
option(DataSourceWriteOptions.RECORDKEY_FIELD.key(), "key").
7575
option(DataSourceWriteOptions.PARTITIONPATH_FIELD.key(), "datestr").
7676
option(DataSourceWriteOptions.PRECOMBINE_FIELD.key(), "ts").
77-
option(HoodieWriteConfig.TABLE_NAME.key(), "stock_ticks_derived_mor_bs").
77+
option(HoodieWriteConfig.TBL_NAME.key(), "stock_ticks_derived_mor_bs").
7878
option(DataSourceWriteOptions.HIVE_TABLE.key(), "stock_ticks_derived_mor_bs").
7979
option(DataSourceWriteOptions.HIVE_DATABASE.key(), "default").
8080
option(DataSourceWriteOptions.HIVE_URL.key(), "jdbc:hive2://hiveserver:10000").

hudi-cli/src/main/java/org/apache/hudi/cli/commands/HoodieLogFileCommand.java

+2-2
Original file line numberDiff line numberDiff line change
@@ -204,10 +204,10 @@ public String showLogFileRecords(
204204
.getCommitTimeline().lastInstant().get().getTimestamp())
205205
.withReadBlocksLazily(
206206
Boolean.parseBoolean(
207-
HoodieCompactionConfig.COMPACTION_LAZY_BLOCK_READ_ENABLED.defaultValue()))
207+
HoodieCompactionConfig.COMPACTION_LAZY_BLOCK_READ_ENABLE.defaultValue()))
208208
.withReverseReader(
209209
Boolean.parseBoolean(
210-
HoodieCompactionConfig.COMPACTION_REVERSE_LOG_READ_ENABLED.defaultValue()))
210+
HoodieCompactionConfig.COMPACTION_REVERSE_LOG_READ_ENABLE.defaultValue()))
211211
.withBufferSize(HoodieMemoryConfig.MAX_DFS_STREAM_BUFFER_SIZE.defaultValue())
212212
.withMaxMemorySizeInBytes(
213213
HoodieMemoryConfig.DEFAULT_MAX_MEMORY_FOR_SPILLABLE_MAP_IN_BYTES)

hudi-cli/src/main/java/org/apache/hudi/cli/commands/SparkMain.java

+8-8
Original file line numberDiff line numberDiff line change
@@ -18,8 +18,8 @@
1818

1919
package org.apache.hudi.cli.commands;
2020

21-
import org.apache.hudi.cli.DeDupeType;
2221
import org.apache.hudi.DataSourceWriteOptions;
22+
import org.apache.hudi.cli.DeDupeType;
2323
import org.apache.hudi.cli.DedupeSparkJob;
2424
import org.apache.hudi.cli.utils.SparkUtil;
2525
import org.apache.hudi.client.SparkRDDWriteClient;
@@ -361,17 +361,17 @@ private static int doBootstrap(JavaSparkContext jsc, String tableName, String ta
361361
TypedProperties properties = propsFilePath == null ? UtilHelpers.buildProperties(configs)
362362
: UtilHelpers.readConfig(FSUtils.getFs(propsFilePath, jsc.hadoopConfiguration()), new Path(propsFilePath), configs).getConfig();
363363

364-
properties.setProperty(HoodieBootstrapConfig.BOOTSTRAP_BASE_PATH.key(), sourcePath);
364+
properties.setProperty(HoodieBootstrapConfig.BASE_PATH.key(), sourcePath);
365365

366-
if (!StringUtils.isNullOrEmpty(keyGenerator) && KeyGeneratorType.getNames().contains(keyGenerator.toUpperCase(Locale.ROOT))) {
367-
properties.setProperty(HoodieBootstrapConfig.BOOTSTRAP_KEYGEN_TYPE.key(), keyGenerator.toUpperCase(Locale.ROOT));
366+
if (!StringUtils.isNullOrEmpty(keyGenerator) && KeyGeneratorType.getNames().contains(keyGenerator.toUpperCase(Locale.ROOT))) {
367+
properties.setProperty(HoodieBootstrapConfig.KEYGEN_TYPE.key(), keyGenerator.toUpperCase(Locale.ROOT));
368368
} else {
369-
properties.setProperty(HoodieBootstrapConfig.BOOTSTRAP_KEYGEN_CLASS.key(), keyGenerator);
369+
properties.setProperty(HoodieBootstrapConfig.KEYGEN_CLASS_NAME.key(), keyGenerator);
370370
}
371371

372-
properties.setProperty(HoodieBootstrapConfig.FULL_BOOTSTRAP_INPUT_PROVIDER.key(), fullBootstrapInputProvider);
373-
properties.setProperty(HoodieBootstrapConfig.BOOTSTRAP_PARALLELISM.key(), parallelism);
374-
properties.setProperty(HoodieBootstrapConfig.BOOTSTRAP_MODE_SELECTOR.key(), selectorClass);
372+
properties.setProperty(HoodieBootstrapConfig.FULL_BOOTSTRAP_INPUT_PROVIDER_CLASS_NAME.key(), fullBootstrapInputProvider);
373+
properties.setProperty(HoodieBootstrapConfig.PARALLELISM_VALUE.key(), parallelism);
374+
properties.setProperty(HoodieBootstrapConfig.MODE_SELECTOR_CLASS_NAME.key(), selectorClass);
375375
properties.setProperty(DataSourceWriteOptions.RECORDKEY_FIELD().key(), recordKeyCols);
376376
properties.setProperty(DataSourceWriteOptions.PARTITIONPATH_FIELD().key(), partitionFields);
377377

hudi-cli/src/main/scala/org/apache/hudi/cli/SparkHelpers.scala

+4-5
Original file line numberDiff line numberDiff line change
@@ -25,8 +25,7 @@ import org.apache.hudi.avro.HoodieAvroWriteSupport
2525
import org.apache.hudi.client.SparkTaskContextSupplier
2626
import org.apache.hudi.common.HoodieJsonPayload
2727
import org.apache.hudi.common.bloom.{BloomFilter, BloomFilterFactory}
28-
import org.apache.hudi.common.model.HoodieFileFormat
29-
import org.apache.hudi.common.model.HoodieRecord
28+
import org.apache.hudi.common.model.{HoodieFileFormat, HoodieRecord}
3029
import org.apache.hudi.common.util.BaseFileUtils
3130
import org.apache.hudi.config.{HoodieIndexConfig, HoodieStorageConfig}
3231
import org.apache.hudi.io.storage.{HoodieAvroParquetConfig, HoodieParquetWriter}
@@ -43,10 +42,10 @@ object SparkHelpers {
4342
def skipKeysAndWriteNewFile(instantTime: String, fs: FileSystem, sourceFile: Path, destinationFile: Path, keysToSkip: Set[String]) {
4443
val sourceRecords = BaseFileUtils.getInstance(HoodieFileFormat.PARQUET).readAvroRecords(fs.getConf, sourceFile)
4544
val schema: Schema = sourceRecords.get(0).getSchema
46-
val filter: BloomFilter = BloomFilterFactory.createBloomFilter(HoodieIndexConfig.BLOOM_FILTER_NUM_ENTRIES.defaultValue.toInt, HoodieIndexConfig.BLOOM_FILTER_FPP.defaultValue.toDouble,
47-
HoodieIndexConfig.HOODIE_BLOOM_INDEX_FILTER_DYNAMIC_MAX_ENTRIES.defaultValue.toInt, HoodieIndexConfig.BLOOM_INDEX_FILTER_TYPE.defaultValue);
45+
val filter: BloomFilter = BloomFilterFactory.createBloomFilter(HoodieIndexConfig.BLOOM_FILTER_NUM_ENTRIES_VALUE.defaultValue.toInt, HoodieIndexConfig.BLOOM_FILTER_FPP_VALUE.defaultValue.toDouble,
46+
HoodieIndexConfig.BLOOM_INDEX_FILTER_DYNAMIC_MAX_ENTRIES.defaultValue.toInt, HoodieIndexConfig.BLOOM_FILTER_TYPE.defaultValue);
4847
val writeSupport: HoodieAvroWriteSupport = new HoodieAvroWriteSupport(new AvroSchemaConverter(fs.getConf).convert(schema), schema, org.apache.hudi.common.util.Option.of(filter))
49-
val parquetConfig: HoodieAvroParquetConfig = new HoodieAvroParquetConfig(writeSupport, CompressionCodecName.GZIP, HoodieStorageConfig.PARQUET_BLOCK_SIZE_BYTES.defaultValue.toInt, HoodieStorageConfig.PARQUET_PAGE_SIZE_BYTES.defaultValue.toInt, HoodieStorageConfig.PARQUET_FILE_MAX_BYTES.defaultValue.toInt, fs.getConf, HoodieStorageConfig.PARQUET_COMPRESSION_RATIO.defaultValue.toDouble)
48+
val parquetConfig: HoodieAvroParquetConfig = new HoodieAvroParquetConfig(writeSupport, CompressionCodecName.GZIP, HoodieStorageConfig.PARQUET_BLOCK_SIZE.defaultValue.toInt, HoodieStorageConfig.PARQUET_PAGE_SIZE.defaultValue.toInt, HoodieStorageConfig.PARQUET_MAX_FILE_SIZE.defaultValue.toInt, fs.getConf, HoodieStorageConfig.PARQUET_COMPRESSION_RATIO_FRACTION.defaultValue.toDouble)
5049

5150
// Add current classLoad for config, if not will throw classNotFound of 'HoodieWrapperFileSystem'.
5251
parquetConfig.getHadoopConf().setClassLoader(Thread.currentThread.getContextClassLoader)

hudi-cli/src/test/java/org/apache/hudi/cli/commands/TestHoodieLogFileCommand.java

+2-2
Original file line numberDiff line numberDiff line change
@@ -208,10 +208,10 @@ public void testShowLogFileRecordsWithMerge() throws IOException, InterruptedExc
208208
HoodieMemoryConfig.DEFAULT_MAX_MEMORY_FOR_SPILLABLE_MAP_IN_BYTES)
209209
.withReadBlocksLazily(
210210
Boolean.parseBoolean(
211-
HoodieCompactionConfig.COMPACTION_LAZY_BLOCK_READ_ENABLED.defaultValue()))
211+
HoodieCompactionConfig.COMPACTION_LAZY_BLOCK_READ_ENABLE.defaultValue()))
212212
.withReverseReader(
213213
Boolean.parseBoolean(
214-
HoodieCompactionConfig.COMPACTION_REVERSE_LOG_READ_ENABLED.defaultValue()))
214+
HoodieCompactionConfig.COMPACTION_REVERSE_LOG_READ_ENABLE.defaultValue()))
215215
.withBufferSize(HoodieMemoryConfig.MAX_DFS_STREAM_BUFFER_SIZE.defaultValue())
216216
.withSpillableMapBasePath(HoodieMemoryConfig.SPILLABLE_MAP_BASE_PATH.defaultValue())
217217
.withDiskMapType(HoodieCommonConfig.SPILLABLE_DISK_MAP_TYPE.defaultValue())

hudi-cli/src/test/java/org/apache/hudi/cli/commands/TestRepairsCommand.java

+1-1
Original file line numberDiff line numberDiff line change
@@ -67,7 +67,7 @@ public void init() throws IOException {
6767
// Create table and connect
6868
new TableCommand().createTable(
6969
tablePath, tableName, HoodieTableType.COPY_ON_WRITE.name(),
70-
HoodieTableConfig.HOODIE_ARCHIVELOG_FOLDER_PROP.defaultValue(), TimelineLayoutVersion.VERSION_1, "org.apache.hudi.common.model.HoodieAvroPayload");
70+
HoodieTableConfig.ARCHIVELOG_FOLDER.defaultValue(), TimelineLayoutVersion.VERSION_1, "org.apache.hudi.common.model.HoodieAvroPayload");
7171
}
7272

7373
/**

hudi-cli/src/test/java/org/apache/hudi/cli/commands/TestUpgradeDowngradeCommand.java

+1-1
Original file line numberDiff line numberDiff line change
@@ -112,6 +112,6 @@ private void assertTableVersionFromPropertyFile() throws IOException {
112112
HoodieConfig hoodieConfig = HoodieConfig.create(fsDataInputStream);
113113
fsDataInputStream.close();
114114
assertEquals(Integer.toString(HoodieTableVersion.ZERO.versionCode()), hoodieConfig
115-
.getString(HoodieTableConfig.HOODIE_TABLE_VERSION_PROP));
115+
.getString(HoodieTableConfig.VERSION));
116116
}
117117
}

hudi-client/hudi-client-common/src/main/java/org/apache/hudi/callback/client/http/HoodieWriteCommitHttpCallbackClient.java

+2-2
Original file line numberDiff line numberDiff line change
@@ -79,7 +79,7 @@ public void send(String callbackMsg) {
7979
}
8080

8181
private String getApiKey() {
82-
return writeConfig.getString(HoodieWriteCommitCallbackConfig.CALLBACK_HTTP_API_KEY);
82+
return writeConfig.getString(HoodieWriteCommitCallbackConfig.CALLBACK_HTTP_API_KEY_VALUE);
8383
}
8484

8585
private String getUrl() {
@@ -97,7 +97,7 @@ private CloseableHttpClient getClient() {
9797
}
9898

9999
private Integer getHttpTimeoutSeconds() {
100-
return writeConfig.getInt(HoodieWriteCommitCallbackConfig.CALLBACK_HTTP_TIMEOUT_SECONDS);
100+
return writeConfig.getInt(HoodieWriteCommitCallbackConfig.CALLBACK_HTTP_TIMEOUT_IN_SECONDS);
101101
}
102102

103103
@Override

hudi-client/hudi-client-common/src/main/java/org/apache/hudi/callback/util/HoodieCommitCallbackFactory.java

+1-1
Original file line numberDiff line numberDiff line change
@@ -39,7 +39,7 @@ public static HoodieWriteCommitCallback create(HoodieWriteConfig config) {
3939
return (HoodieWriteCommitCallback) instance;
4040
} else {
4141
throw new HoodieCommitCallbackException(String.format("The value of the config option %s can not be null or "
42-
+ "empty", HoodieWriteCommitCallbackConfig.CALLBACK_CLASS.key()));
42+
+ "empty", HoodieWriteCommitCallbackConfig.CALLBACK_CLASS_NAME.key()));
4343
}
4444
}
4545

0 commit comments

Comments
 (0)