Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -164,9 +164,9 @@ private void assertTableVersionFromPropertyFile(HoodieTableVersion expectedVersi
Path propertyFile = new Path(metaClient.getMetaPath() + "/" + HoodieTableConfig.HOODIE_PROPERTIES_FILE);
// Load the properties and verify
FSDataInputStream fsDataInputStream = metaClient.getFs().open(propertyFile);
HoodieConfig hoodieConfig = HoodieConfig.create(fsDataInputStream);
HoodieConfig config = new HoodieConfig();
config.getProps().load(fsDataInputStream);
fsDataInputStream.close();
assertEquals(Integer.toString(expectedVersion.versionCode()), hoodieConfig
.getString(HoodieTableConfig.VERSION));
assertEquals(Integer.toString(expectedVersion.versionCode()), config.getString(HoodieTableConfig.VERSION));
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -770,9 +770,9 @@ private void assertTableVersionFromPropertyFile(HoodieTableVersion expectedVersi
Path propertyFile = new Path(metaClient.getMetaPath() + "/" + HoodieTableConfig.HOODIE_PROPERTIES_FILE);
// Load the properties and verify
FSDataInputStream fsDataInputStream = metaClient.getFs().open(propertyFile);
HoodieConfig hoodieConfig = HoodieConfig.create(fsDataInputStream);
HoodieConfig config = new HoodieConfig();
config.getProps().load(fsDataInputStream);
fsDataInputStream.close();
assertEquals(Integer.toString(expectedVersion.versionCode()), hoodieConfig
.getString(HoodieTableConfig.VERSION));
assertEquals(Integer.toString(expectedVersion.versionCode()), config.getString(HoodieTableConfig.VERSION));
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -18,15 +18,14 @@

package org.apache.hudi.common.config;

import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hudi.common.util.Option;
import org.apache.hudi.common.util.ReflectionUtils;
import org.apache.hudi.common.util.StringUtils;
import org.apache.hudi.exception.HoodieException;

import org.apache.log4j.LogManager;
import org.apache.log4j.Logger;

import java.io.IOException;
import java.io.Serializable;
import java.lang.reflect.Modifier;
import java.util.Arrays;
Expand All @@ -42,12 +41,6 @@ public class HoodieConfig implements Serializable {

protected static final String CONFIG_VALUES_DELIMITER = ",";

public static HoodieConfig create(FSDataInputStream inputStream) throws IOException {
HoodieConfig config = new HoodieConfig();
config.props.load(inputStream);
return config;
}

protected TypedProperties props;

public HoodieConfig() {
Expand Down
11 changes: 7 additions & 4 deletions hudi-kafka-connect/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -36,10 +36,10 @@ After installing these dependencies, follow steps based on your requirement.

### 1 - Starting the environment

For runtime dependencies, we encourage using the confluent HDFS connector jars. We have tested our setup with
version `10.1.0`. Either use confluent-hub to install the connector or download it
from [here](https://tinyurl.com/yb472f79). You can install the confluent-hub command-line tool by downloading Confluent
Platform from [here](https://tinyurl.com/s2jjby53).
For runtime dependencies, we encourage using the confluent HDFS connector jars. We have tested our setup with version `10.1.0`
(essentially, `hadoop-common` dependency version 2.10.1 is required which comes as part of confluent HDFS connector).
Either use confluent-hub to install the connector or download it from [here](https://tinyurl.com/yb472f79).
You can install the confluent-hub command-line tool by downloading Confluent Platform from [here](https://tinyurl.com/s2jjby53).

Copy the entire folder to the classpath that will be used by the Hudi Kafka Connector.

Expand Down Expand Up @@ -145,6 +145,9 @@ successful running of the workers.
cd $KAFKA_HOME
./bin/connect-distributed.sh $HUDI_DIR/hudi-kafka-connect/demo/connect-distributed.properties
```
Ensure that the `plugin.path` property points to the location where all connect plugins are installed.
For this doc, it is `/usr/local/share/kafka/plugins`. If your plugins are installed at a different location,
then please change the above property in `$HUDI_DIR/hudi-kafka-connect/demo/connect-distributed.properties`.

### 6 - To add the Hudi Sink to the Connector (delete it if you want to re-configure)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -87,10 +87,11 @@ class TestUpgradeOrDowngradeProcedure extends HoodieSparkProcedureTestBase {
val propertyFile = new Path(metaClient.getMetaPath + "/" + HoodieTableConfig.HOODIE_PROPERTIES_FILE)
// Load the properties and verify
val fsDataInputStream = metaClient.getFs.open(propertyFile)
val hoodieConfig = HoodieConfig.create(fsDataInputStream)
val config = new HoodieConfig
config.getProps.load(fsDataInputStream)
fsDataInputStream.close()
assertResult(Integer.toString(versionCode)) {
hoodieConfig.getString(HoodieTableConfig.VERSION)
config.getString(HoodieTableConfig.VERSION)
}
}
}