diff --git a/hudi-utilities/src/main/java/org/apache/hudi/utilities/HoodieClusteringJob.java b/hudi-utilities/src/main/java/org/apache/hudi/utilities/HoodieClusteringJob.java index 5cb6b9bbb150d..ef7de13b34f37 100644 --- a/hudi-utilities/src/main/java/org/apache/hudi/utilities/HoodieClusteringJob.java +++ b/hudi-utilities/src/main/java/org/apache/hudi/utilities/HoodieClusteringJob.java @@ -29,6 +29,7 @@ import org.apache.hudi.common.util.Option; import org.apache.hudi.common.util.StringUtils; import org.apache.hudi.config.HoodieCleanConfig; +import org.apache.hudi.exception.HoodieException; import org.apache.hudi.table.HoodieSparkTable; import com.beust.jcommander.JCommander; @@ -146,19 +147,17 @@ public static void main(String[] args) { if (cfg.help || args.length == 0) { cmd.usage(); - System.exit(1); + throw new HoodieException("Clustering failed for basePath: " + cfg.basePath); } final JavaSparkContext jsc = UtilHelpers.buildSparkContext("clustering-" + cfg.tableName, cfg.sparkMaster, cfg.sparkMemory); - HoodieClusteringJob clusteringJob = new HoodieClusteringJob(jsc, cfg); - int result = clusteringJob.cluster(cfg.retry); + int result = new HoodieClusteringJob(jsc, cfg).cluster(cfg.retry); String resultMsg = String.format("Clustering with basePath: %s, tableName: %s, runningMode: %s", cfg.basePath, cfg.tableName, cfg.runningMode); - if (result == -1) { - LOG.error(resultMsg + " failed"); - } else { - LOG.info(resultMsg + " success"); + if (result != 0) { + throw new HoodieException(resultMsg + " failed"); } + LOG.info(resultMsg + " success"); jsc.stop(); } diff --git a/hudi-utilities/src/main/java/org/apache/hudi/utilities/HoodieCompactor.java b/hudi-utilities/src/main/java/org/apache/hudi/utilities/HoodieCompactor.java index c8bdf0da3a084..74229be7ca677 100644 --- a/hudi-utilities/src/main/java/org/apache/hudi/utilities/HoodieCompactor.java +++ b/hudi-utilities/src/main/java/org/apache/hudi/utilities/HoodieCompactor.java @@ -172,18 +172,12 @@ public static void main(String[] args) { throw new HoodieException("Fail to run compaction for " + cfg.tableName + ", return code: " + 1); } final JavaSparkContext jsc = UtilHelpers.buildSparkContext("compactor-" + cfg.tableName, cfg.sparkMaster, cfg.sparkMemory); - int ret = 0; - try { - ret = new HoodieCompactor(jsc, cfg).compact(cfg.retry); - } catch (Throwable throwable) { - throw new HoodieException("Fail to run compaction for " + cfg.tableName + ", return code: " + ret, throwable); - } finally { - jsc.stop(); - } - + int ret = new HoodieCompactor(jsc, cfg).compact(cfg.retry); if (ret != 0) { throw new HoodieException("Fail to run compaction for " + cfg.tableName + ", return code: " + ret); } + LOG.info("Success to run compaction for " + cfg.tableName); + jsc.stop(); } public int compact(int retry) { diff --git a/hudi-utilities/src/main/java/org/apache/hudi/utilities/HoodieIndexer.java b/hudi-utilities/src/main/java/org/apache/hudi/utilities/HoodieIndexer.java index 58c4eb46992f1..5c626a53ae7ef 100644 --- a/hudi-utilities/src/main/java/org/apache/hudi/utilities/HoodieIndexer.java +++ b/hudi-utilities/src/main/java/org/apache/hudi/utilities/HoodieIndexer.java @@ -149,7 +149,7 @@ public static void main(String[] args) { if (cfg.help || args.length == 0) { cmd.usage(); - System.exit(1); + throw new HoodieException("Indexing failed for basePath : " + cfg.basePath); } final JavaSparkContext jsc = UtilHelpers.buildSparkContext("indexing-" + cfg.tableName, cfg.sparkMaster, cfg.sparkMemory); @@ -157,11 +157,10 @@ public static void main(String[] args) { int result = indexer.start(cfg.retry); String resultMsg = String.format("Indexing with basePath: %s, tableName: %s, runningMode: %s", cfg.basePath, cfg.tableName, cfg.runningMode); - if (result == -1) { - LOG.error(resultMsg + " failed"); - } else { - LOG.info(resultMsg + " success"); + if (result != 0) { + throw new HoodieException(resultMsg + " failed"); } + LOG.info(resultMsg + " success"); jsc.stop(); }