Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -37,8 +37,7 @@ public class SparkEnvCommand implements CommandMarker {
public static Map<String, String> env = new HashMap<String, String>();

@CliCommand(value = "set", help = "Set spark launcher env to cli")
public void setEnv(@CliOption(key = {"conf"}, help = "Env config to be set") final String confMap)
throws IllegalArgumentException {
public void setEnv(@CliOption(key = {"conf"}, help = "Env config to be set") final String confMap) {
String[] map = confMap.split("=");
if (map.length != 2) {
throw new IllegalArgumentException("Illegal set parameter, please use like [set --conf SPARK_HOME=/usr/etc/spark]");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -143,15 +143,13 @@ public Object convertToAvro(Object value, String name, Schema schema) {
return res.getRight();
}

protected abstract Pair<Boolean, Object> convert(Object value, String name, Schema schema)
throws HoodieJsonToAvroConversionException;
protected abstract Pair<Boolean, Object> convert(Object value, String name, Schema schema);
}

private static JsonToAvroFieldProcessor generateBooleanTypeHandler() {
return new JsonToAvroFieldProcessor() {
@Override
public Pair<Boolean, Object> convert(Object value, String name, Schema schema)
throws HoodieJsonToAvroConversionException {
public Pair<Boolean, Object> convert(Object value, String name, Schema schema) {
if (value instanceof Boolean) {
return Pair.of(true, value);
}
Expand All @@ -163,8 +161,7 @@ public Pair<Boolean, Object> convert(Object value, String name, Schema schema)
private static JsonToAvroFieldProcessor generateIntTypeHandler() {
return new JsonToAvroFieldProcessor() {
@Override
public Pair<Boolean, Object> convert(Object value, String name, Schema schema)
throws HoodieJsonToAvroConversionException {
public Pair<Boolean, Object> convert(Object value, String name, Schema schema) {
if (value instanceof Number) {
return Pair.of(true, ((Number) value).intValue());
} else if (value instanceof String) {
Expand All @@ -178,8 +175,7 @@ public Pair<Boolean, Object> convert(Object value, String name, Schema schema)
private static JsonToAvroFieldProcessor generateDoubleTypeHandler() {
return new JsonToAvroFieldProcessor() {
@Override
public Pair<Boolean, Object> convert(Object value, String name, Schema schema)
throws HoodieJsonToAvroConversionException {
public Pair<Boolean, Object> convert(Object value, String name, Schema schema) {
if (value instanceof Number) {
return Pair.of(true, ((Number) value).doubleValue());
} else if (value instanceof String) {
Expand All @@ -193,8 +189,7 @@ public Pair<Boolean, Object> convert(Object value, String name, Schema schema)
private static JsonToAvroFieldProcessor generateFloatTypeHandler() {
return new JsonToAvroFieldProcessor() {
@Override
public Pair<Boolean, Object> convert(Object value, String name, Schema schema)
throws HoodieJsonToAvroConversionException {
public Pair<Boolean, Object> convert(Object value, String name, Schema schema) {
if (value instanceof Number) {
return Pair.of(true, ((Number) value).floatValue());
} else if (value instanceof String) {
Expand All @@ -208,8 +203,7 @@ public Pair<Boolean, Object> convert(Object value, String name, Schema schema)
private static JsonToAvroFieldProcessor generateLongTypeHandler() {
return new JsonToAvroFieldProcessor() {
@Override
public Pair<Boolean, Object> convert(Object value, String name, Schema schema)
throws HoodieJsonToAvroConversionException {
public Pair<Boolean, Object> convert(Object value, String name, Schema schema) {
if (value instanceof Number) {
return Pair.of(true, ((Number) value).longValue());
} else if (value instanceof String) {
Expand All @@ -223,8 +217,7 @@ public Pair<Boolean, Object> convert(Object value, String name, Schema schema)
private static JsonToAvroFieldProcessor generateStringTypeHandler() {
return new JsonToAvroFieldProcessor() {
@Override
public Pair<Boolean, Object> convert(Object value, String name, Schema schema)
throws HoodieJsonToAvroConversionException {
public Pair<Boolean, Object> convert(Object value, String name, Schema schema) {
return Pair.of(true, value.toString());
}
};
Expand All @@ -233,8 +226,7 @@ public Pair<Boolean, Object> convert(Object value, String name, Schema schema)
private static JsonToAvroFieldProcessor generateBytesTypeHandler() {
return new JsonToAvroFieldProcessor() {
@Override
public Pair<Boolean, Object> convert(Object value, String name, Schema schema)
throws HoodieJsonToAvroConversionException {
public Pair<Boolean, Object> convert(Object value, String name, Schema schema) {
return Pair.of(true, value.toString().getBytes());
}
};
Expand All @@ -243,8 +235,7 @@ public Pair<Boolean, Object> convert(Object value, String name, Schema schema)
private static JsonToAvroFieldProcessor generateFixedTypeHandler() {
return new JsonToAvroFieldProcessor() {
@Override
public Pair<Boolean, Object> convert(Object value, String name, Schema schema)
throws HoodieJsonToAvroConversionException {
public Pair<Boolean, Object> convert(Object value, String name, Schema schema) {
byte[] src = value.toString().getBytes();
byte[] dst = new byte[schema.getFixedSize()];
System.arraycopy(src, 0, dst, 0, Math.min(schema.getFixedSize(), src.length));
Expand All @@ -256,8 +247,7 @@ public Pair<Boolean, Object> convert(Object value, String name, Schema schema)
private static JsonToAvroFieldProcessor generateEnumTypeHandler() {
return new JsonToAvroFieldProcessor() {
@Override
public Pair<Boolean, Object> convert(Object value, String name, Schema schema)
throws HoodieJsonToAvroConversionException {
public Pair<Boolean, Object> convert(Object value, String name, Schema schema) {
if (schema.getEnumSymbols().contains(value.toString())) {
return Pair.of(true, new GenericData.EnumSymbol(schema, value.toString()));
}
Expand All @@ -270,8 +260,7 @@ public Pair<Boolean, Object> convert(Object value, String name, Schema schema)
private static JsonToAvroFieldProcessor generateRecordTypeHandler() {
return new JsonToAvroFieldProcessor() {
@Override
public Pair<Boolean, Object> convert(Object value, String name, Schema schema)
throws HoodieJsonToAvroConversionException {
public Pair<Boolean, Object> convert(Object value, String name, Schema schema) {
GenericRecord result = new GenericData.Record(schema);
return Pair.of(true, convertJsonToAvro((Map<String, Object>) value, schema));
}
Expand All @@ -281,8 +270,7 @@ public Pair<Boolean, Object> convert(Object value, String name, Schema schema)
private static JsonToAvroFieldProcessor generateArrayTypeHandler() {
return new JsonToAvroFieldProcessor() {
@Override
public Pair<Boolean, Object> convert(Object value, String name, Schema schema)
throws HoodieJsonToAvroConversionException {
public Pair<Boolean, Object> convert(Object value, String name, Schema schema) {
Schema elementSchema = schema.getElementType();
List<Object> listRes = new ArrayList<>();
for (Object v : (List) value) {
Expand All @@ -296,8 +284,7 @@ public Pair<Boolean, Object> convert(Object value, String name, Schema schema)
private static JsonToAvroFieldProcessor generateMapTypeHandler() {
return new JsonToAvroFieldProcessor() {
@Override
public Pair<Boolean, Object> convert(Object value, String name, Schema schema)
throws HoodieJsonToAvroConversionException {
public Pair<Boolean, Object> convert(Object value, String name, Schema schema) {
Schema valueSchema = schema.getValueType();
Map<String, Object> mapRes = new HashMap<>();
for (Map.Entry<String, Object> v : ((Map<String, Object>) value).entrySet()) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -84,7 +84,7 @@ public class HoodieTableMetaClient implements Serializable {
private HoodieArchivedTimeline archivedTimeline;
private ConsistencyGuardConfig consistencyGuardConfig = ConsistencyGuardConfig.newBuilder().build();

public HoodieTableMetaClient(Configuration conf, String basePath) throws TableNotFoundException {
public HoodieTableMetaClient(Configuration conf, String basePath) {
// Do not load any timeline by default
this(conf, basePath, false);
}
Expand All @@ -104,8 +104,7 @@ public HoodieTableMetaClient(Configuration conf, String basePath, boolean loadAc
}

public HoodieTableMetaClient(Configuration conf, String basePath, boolean loadActiveTimelineOnLoad,
ConsistencyGuardConfig consistencyGuardConfig, Option<TimelineLayoutVersion> layoutVersion, String payloadClassName)
throws TableNotFoundException {
ConsistencyGuardConfig consistencyGuardConfig, Option<TimelineLayoutVersion> layoutVersion, String payloadClassName) {
LOG.info("Loading HoodieTableMetaClient from " + basePath);
this.basePath = basePath;
this.consistencyGuardConfig = consistencyGuardConfig;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,7 @@ private RocksDB getRocksDB() {
/**
* Initialized Rocks DB instance.
*/
private void init() throws HoodieException {
private void init() {
try {
LOG.info("DELETING RocksDB persisted at " + rocksDBBasePath);
FileIOUtils.deleteDirectory(new File(rocksDBBasePath));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,8 +38,7 @@ private static String getErrorMessage(String basePath) {
return "Hoodie table not found in path " + basePath;
}

public static void checkTableValidity(FileSystem fs, Path basePathDir, Path metaPathDir)
throws TableNotFoundException {
public static void checkTableValidity(FileSystem fs, Path basePathDir, Path metaPathDir) {
// Check if the base path is found
try {
if (!fs.exists(basePathDir) || !fs.isDirectory(basePathDir)) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -498,12 +498,12 @@ public void updateHiveSQL(String s) {
*
* @param sql SQL statement to execute
*/
public CommandProcessorResponse updateHiveSQLUsingHiveDriver(String sql) throws HoodieHiveSyncException {
public CommandProcessorResponse updateHiveSQLUsingHiveDriver(String sql) {
List<CommandProcessorResponse> responses = updateHiveSQLs(Collections.singletonList(sql));
return responses.get(responses.size() - 1);
}

private List<CommandProcessorResponse> updateHiveSQLs(List<String> sqls) throws HoodieHiveSyncException {
private List<CommandProcessorResponse> updateHiveSQLs(List<String> sqls) {
SessionState ss = null;
org.apache.hadoop.hive.ql.Driver hiveDriver = null;
List<CommandProcessorResponse> responses = new ArrayList<>();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -217,7 +217,7 @@ public static class CommandValidator implements IValueValidator<String> {
List<String> validCommands = Arrays.asList("insert", "upsert", "bulkinsert");

@Override
public void validate(String name, String value) throws ParameterException {
public void validate(String name, String value) {
if (value == null || !validCommands.contains(value.toLowerCase())) {
throw new ParameterException(
String.format("Invalid command: value:%s: supported commands:%s", value, validCommands));
Expand All @@ -230,7 +230,7 @@ public static class FormatValidator implements IValueValidator<String> {
List<String> validFormats = Collections.singletonList("parquet");

@Override
public void validate(String name, String value) throws ParameterException {
public void validate(String name, String value) {
if (value == null || !validFormats.contains(value)) {
throw new ParameterException(
String.format("Invalid format type: value:%s: supported formats:%s", value, validFormats));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -90,7 +90,7 @@ public void run(JavaSparkContext jsc) throws UnknownHostException {
Preconditions.checkArgument(gotMessages.equals(messages), "Got expected reply from Server");
}

public String sendRequest(String driverHost, int port) throws RuntimeException {
public String sendRequest(String driverHost, int port) {
String url = String.format("http://%s:%d/", driverHost, port);
try (CloseableHttpClient client = HttpClientBuilder.create().build()) {

Expand Down