diff --git a/docker/demo/compaction-bootstrap.commands b/docker/demo/compaction-bootstrap.commands index 6c246be747124..a44a26ff35e14 100644 --- a/docker/demo/compaction-bootstrap.commands +++ b/docker/demo/compaction-bootstrap.commands @@ -1,19 +1,19 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. connect --path /user/hive/warehouse/stock_ticks_mor_bs compactions show all diff --git a/docker/demo/compaction.commands b/docker/demo/compaction.commands index a8baaff3ed33d..e8d7f39e6b4b4 100644 --- a/docker/demo/compaction.commands +++ b/docker/demo/compaction.commands @@ -1,19 +1,19 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. connect --path /user/hive/warehouse/stock_ticks_mor compactions show all diff --git a/docker/demo/sync-validate.commands b/docker/demo/sync-validate.commands index 32c334eee01ad..e629a049a346f 100644 --- a/docker/demo/sync-validate.commands +++ b/docker/demo/sync-validate.commands @@ -1,18 +1,18 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. connect --path /docker_hoodie_sync_valid_test commits sync --path /docker_hoodie_sync_valid_test_2 diff --git a/hudi-cli/hudi-cli.sh b/hudi-cli/hudi-cli.sh index bbfba85a8010e..df309ca0b8327 100755 --- a/hudi-cli/hudi-cli.sh +++ b/hudi-cli/hudi-cli.sh @@ -27,5 +27,5 @@ fi OTHER_JARS=`ls ${DIR}/target/lib/* | grep -v 'hudi-[^/]*jar' | tr '\n' ':'` -echo "Running : java -cp ${HADOOP_CONF_DIR}:${SPARK_CONF_DIR}:${HOODIE_JAR}:${OTHER_JARS}:${CLIENT_JAR} -DSPARK_CONF_DIR=${SPARK_CONF_DIR} -DHADOOP_CONF_DIR=${HADOOP_CONF_DIR} org.springframework.shell.Bootstrap $@" -java -cp ${HADOOP_CONF_DIR}:${SPARK_CONF_DIR}:${HOODIE_JAR}:${OTHER_JARS}:${CLIENT_JAR} -DSPARK_CONF_DIR=${SPARK_CONF_DIR} -DHADOOP_CONF_DIR=${HADOOP_CONF_DIR} org.springframework.shell.Bootstrap $@ +echo "Running : java -cp ${HADOOP_CONF_DIR}:${SPARK_CONF_DIR}:${HOODIE_JAR}:${OTHER_JARS}:${CLIENT_JAR} -DSPARK_CONF_DIR=${SPARK_CONF_DIR} -DHADOOP_CONF_DIR=${HADOOP_CONF_DIR} org.apache.hudi.cli.Main $@" +java -cp ${HADOOP_CONF_DIR}:${SPARK_CONF_DIR}:${HOODIE_JAR}:${OTHER_JARS}:${CLIENT_JAR} -DSPARK_CONF_DIR=${SPARK_CONF_DIR} -DHADOOP_CONF_DIR=${HADOOP_CONF_DIR} org.apache.hudi.cli.Main $@ diff --git a/hudi-cli/pom.xml b/hudi-cli/pom.xml index d283d17ea93e1..67e31c34de0fe 100644 --- a/hudi-cli/pom.xml +++ b/hudi-cli/pom.xml @@ -27,8 +27,7 @@ jar - 1.2.0.RELEASE - org.springframework.shell.Bootstrap + org.apache.hudi.cli.Main ${project.parent.basedir} @@ -130,8 +129,26 @@ - + + + org.springframework.boot + spring-boot-starter-test + test + + + org.springframework.boot + spring-boot-starter-logging + + + + + org.junit.platform + junit-platform-launcher + ${junit.platform.version} + test + + org.scala-lang @@ -213,7 +230,10 @@ org.apache.logging.log4j log4j-core - compile + + + org.apache.logging.log4j + log4j-api @@ -239,8 +259,7 @@ org.springframework.shell - spring-shell - ${spring.shell.version} + spring-shell-starter com.google.guava @@ -264,12 +283,24 @@ org.apache.hadoop hadoop-common + + + com.google.code.gson + gson + + org.apache.hadoop hadoop-hdfs + + com.google.code.gson + gson + 2.6.2 + + diff --git a/hudi-cli/src/main/java/org/apache/hudi/cli/HoodiePrompt.java b/hudi-cli/src/main/java/org/apache/hudi/cli/HoodiePrompt.java index 44405df5bfac6..347b81cdbf499 100644 --- a/hudi-cli/src/main/java/org/apache/hudi/cli/HoodiePrompt.java +++ b/hudi-cli/src/main/java/org/apache/hudi/cli/HoodiePrompt.java @@ -20,37 +20,33 @@ import org.springframework.core.Ordered; import org.springframework.core.annotation.Order; -import org.springframework.shell.plugin.support.DefaultPromptProvider; +import org.springframework.shell.jline.PromptProvider; import org.springframework.stereotype.Component; +import org.jline.utils.AttributedString; + /** * This class deals with displaying prompt on CLI based on the state. */ @Component @Order(Ordered.HIGHEST_PRECEDENCE) -public class HoodiePrompt extends DefaultPromptProvider { +public class HoodiePrompt implements PromptProvider { @Override - public String getPrompt() { + public AttributedString getPrompt() { if (HoodieCLI.tableMetadata != null) { String tableName = HoodieCLI.tableMetadata.getTableConfig().getTableName(); switch (HoodieCLI.state) { case INIT: - return "hudi->"; + return new AttributedString("hudi->"); case TABLE: - return "hudi:" + tableName + "->"; + return new AttributedString("hudi:" + tableName + "->"); case SYNC: - return "hudi:" + tableName + " <==> " + HoodieCLI.syncTableMetadata.getTableConfig().getTableName() + "->"; + return new AttributedString("hudi:" + tableName + " <==> " + HoodieCLI.syncTableMetadata.getTableConfig().getTableName() + "->"); default: - return "hudi:" + tableName + "->"; + return new AttributedString("hudi:" + tableName + "->"); } } - return "hudi->"; + return new AttributedString("hudi->"); } - - @Override - public String getProviderName() { - return "Hoodie provider"; - } - } diff --git a/hudi-cli/src/main/java/org/apache/hudi/cli/HoodieSplashScreen.java b/hudi-cli/src/main/java/org/apache/hudi/cli/HoodieSplashScreen.java deleted file mode 100644 index f2a458c196c94..0000000000000 --- a/hudi-cli/src/main/java/org/apache/hudi/cli/HoodieSplashScreen.java +++ /dev/null @@ -1,72 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hudi.cli; - -import org.springframework.core.Ordered; -import org.springframework.core.annotation.Order; -import org.springframework.shell.plugin.support.DefaultBannerProvider; -import org.springframework.shell.support.util.OsUtils; -import org.springframework.stereotype.Component; - -/** - * This class is responsible to print the splash screen at the start of the application. - */ -@Component -@Order(Ordered.HIGHEST_PRECEDENCE) -public class HoodieSplashScreen extends DefaultBannerProvider { - - static { - System.out.println("HoodieSplashScreen loaded"); - } - - private static String screen = "===================================================================" + OsUtils.LINE_SEPARATOR - + "* ___ ___ *" + OsUtils.LINE_SEPARATOR - + "* /\\__\\ ___ /\\ \\ ___ *" + OsUtils.LINE_SEPARATOR - + "* / / / /\\__\\ / \\ \\ /\\ \\ *" + OsUtils.LINE_SEPARATOR - + "* / /__/ / / / / /\\ \\ \\ \\ \\ \\ *" + OsUtils.LINE_SEPARATOR - + "* / \\ \\ ___ / / / / / \\ \\__\\ / \\__\\ *" + OsUtils.LINE_SEPARATOR - + "* / /\\ \\ /\\__\\ / /__/ ___ / /__/ \\ |__| / /\\/__/ *" + OsUtils.LINE_SEPARATOR - + "* \\/ \\ \\/ / / \\ \\ \\ /\\__\\ \\ \\ \\ / / / /\\/ / / *" + OsUtils.LINE_SEPARATOR - + "* \\ / / \\ \\ / / / \\ \\ / / / \\ /__/ *" + OsUtils.LINE_SEPARATOR - + "* / / / \\ \\/ / / \\ \\/ / / \\ \\__\\ *" + OsUtils.LINE_SEPARATOR - + "* / / / \\ / / \\ / / \\/__/ *" + OsUtils.LINE_SEPARATOR - + "* \\/__/ \\/__/ \\/__/ Apache Hudi CLI *" + OsUtils.LINE_SEPARATOR - + "* *" + OsUtils.LINE_SEPARATOR - + "===================================================================" + OsUtils.LINE_SEPARATOR; - - @Override - public String getBanner() { - return screen; - } - - @Override - public String getVersion() { - return "1.0"; - } - - @Override - public String getWelcomeMessage() { - return "Welcome to Apache Hudi CLI. Please type help if you are looking for help. "; - } - - @Override - public String getProviderName() { - return "Hoodie Banner"; - } -} diff --git a/hudi-cli/src/main/java/org/apache/hudi/cli/Main.java b/hudi-cli/src/main/java/org/apache/hudi/cli/Main.java index e924be9e50f52..e98707800196e 100644 --- a/hudi-cli/src/main/java/org/apache/hudi/cli/Main.java +++ b/hudi-cli/src/main/java/org/apache/hudi/cli/Main.java @@ -18,18 +18,19 @@ package org.apache.hudi.cli; -import org.springframework.shell.Bootstrap; +import org.springframework.boot.SpringApplication; +import org.springframework.boot.autoconfigure.SpringBootApplication; import java.io.IOException; /** * Main class that delegates to Spring Shell's Bootstrap class in order to simplify debugging inside an IDE. */ +@SpringBootApplication public class Main { public static void main(String[] args) throws IOException { System.out.println("Main called"); - new HoodieSplashScreen(); - Bootstrap.main(args); + SpringApplication.run(Main.class, args); } } diff --git a/hudi-cli/src/main/java/org/apache/hudi/cli/commands/ArchivedCommitsCommand.java b/hudi-cli/src/main/java/org/apache/hudi/cli/commands/ArchivedCommitsCommand.java index 337d6e2a305c6..dcd6a2cf3c8e9 100644 --- a/hudi-cli/src/main/java/org/apache/hudi/cli/commands/ArchivedCommitsCommand.java +++ b/hudi-cli/src/main/java/org/apache/hudi/cli/commands/ArchivedCommitsCommand.java @@ -18,6 +18,11 @@ package org.apache.hudi.cli.commands; +import org.apache.avro.generic.GenericRecord; +import org.apache.avro.generic.IndexedRecord; +import org.apache.avro.specific.SpecificData; +import org.apache.hadoop.fs.FileStatus; +import org.apache.hadoop.fs.Path; import org.apache.hudi.avro.model.HoodieArchivedMetaEntry; import org.apache.hudi.avro.model.HoodieCommitMetadata; import org.apache.hudi.cli.HoodieCLI; @@ -32,16 +37,9 @@ import org.apache.hudi.common.table.timeline.HoodieTimeline; import org.apache.hudi.common.util.ClosableIterator; import org.apache.hudi.common.util.Option; - -import org.apache.avro.generic.GenericRecord; -import org.apache.avro.generic.IndexedRecord; -import org.apache.avro.specific.SpecificData; -import org.apache.hadoop.fs.FileStatus; -import org.apache.hadoop.fs.Path; -import org.springframework.shell.core.CommandMarker; -import org.springframework.shell.core.annotation.CliCommand; -import org.springframework.shell.core.annotation.CliOption; -import org.springframework.stereotype.Component; +import org.springframework.shell.standard.ShellComponent; +import org.springframework.shell.standard.ShellMethod; +import org.springframework.shell.standard.ShellOption; import java.io.IOException; import java.util.ArrayList; @@ -52,17 +50,17 @@ /** * CLI command to display archived commits and stats if available. */ -@Component -public class ArchivedCommitsCommand implements CommandMarker { +@ShellComponent +public class ArchivedCommitsCommand { - @CliCommand(value = "show archived commit stats", help = "Read commits from archived files and show details") + @ShellMethod(key = "show archived commit stats", value = "Read commits from archived files and show details") public String showArchivedCommits( - @CliOption(key = {"archiveFolderPattern"}, help = "Archive Folder", unspecifiedDefaultValue = "") String folder, - @CliOption(key = {"limit"}, help = "Limit commits", unspecifiedDefaultValue = "-1") final Integer limit, - @CliOption(key = {"sortBy"}, help = "Sorting Field", unspecifiedDefaultValue = "") final String sortByField, - @CliOption(key = {"desc"}, help = "Ordering", unspecifiedDefaultValue = "false") final boolean descending, - @CliOption(key = {"headeronly"}, help = "Print Header Only", - unspecifiedDefaultValue = "false") final boolean headerOnly) + @ShellOption(value = {"--archiveFolderPattern"}, help = "Archive Folder", defaultValue = "") String folder, + @ShellOption(value = {"--limit"}, help = "Limit commits", defaultValue = "-1") final Integer limit, + @ShellOption(value = {"--sortBy"}, help = "Sorting Field", defaultValue = "") final String sortByField, + @ShellOption(value = {"--desc"}, help = "Ordering", defaultValue = "false") final boolean descending, + @ShellOption(value = {"--headeronly"}, help = "Print Header Only", + defaultValue = "false") final boolean headerOnly) throws IOException { System.out.println("===============> Showing only " + limit + " archived commits <==============="); String basePath = HoodieCLI.getTableMetaClient().getBasePath(); @@ -128,15 +126,15 @@ public String showArchivedCommits( return HoodiePrintHelper.print(header, new HashMap<>(), sortByField, descending, limit, headerOnly, allStats); } - @CliCommand(value = "show archived commits", help = "Read commits from archived files and show details") + @ShellMethod(key = "show archived commits", value = "Read commits from archived files and show details") public String showCommits( - @CliOption(key = {"skipMetadata"}, help = "Skip displaying commit metadata", - unspecifiedDefaultValue = "true") boolean skipMetadata, - @CliOption(key = {"limit"}, help = "Limit commits", unspecifiedDefaultValue = "10") final Integer limit, - @CliOption(key = {"sortBy"}, help = "Sorting Field", unspecifiedDefaultValue = "") final String sortByField, - @CliOption(key = {"desc"}, help = "Ordering", unspecifiedDefaultValue = "false") final boolean descending, - @CliOption(key = {"headeronly"}, help = "Print Header Only", - unspecifiedDefaultValue = "false") final boolean headerOnly) + @ShellOption(value = {"--skipMetadata"}, help = "Skip displaying commit metadata", + defaultValue = "true") boolean skipMetadata, + @ShellOption(value = {"--limit"}, help = "Limit commits", defaultValue = "10") final Integer limit, + @ShellOption(value = {"--sortBy"}, help = "Sorting Field", defaultValue = "") final String sortByField, + @ShellOption(value = {"--desc"}, help = "Ordering", defaultValue = "false") final boolean descending, + @ShellOption(value = {"--headeronly"}, help = "Print Header Only", + defaultValue = "false") final boolean headerOnly) throws IOException { System.out.println("===============> Showing only " + limit + " archived commits <==============="); diff --git a/hudi-cli/src/main/java/org/apache/hudi/cli/commands/BootstrapCommand.java b/hudi-cli/src/main/java/org/apache/hudi/cli/commands/BootstrapCommand.java index f4ef55943cdf4..98cf9fc0d9067 100644 --- a/hudi-cli/src/main/java/org/apache/hudi/cli/commands/BootstrapCommand.java +++ b/hudi-cli/src/main/java/org/apache/hudi/cli/commands/BootstrapCommand.java @@ -30,13 +30,12 @@ import org.apache.hudi.common.table.HoodieTableMetaClient; import org.apache.hudi.exception.HoodieException; import org.apache.hudi.utilities.UtilHelpers; - import org.apache.spark.launcher.SparkLauncher; import org.apache.spark.util.Utils; -import org.springframework.shell.core.CommandMarker; -import org.springframework.shell.core.annotation.CliCommand; -import org.springframework.shell.core.annotation.CliOption; -import org.springframework.stereotype.Component; +import org.springframework.shell.standard.ShellComponent; +import org.springframework.shell.standard.ShellMethod; +import org.springframework.shell.standard.ShellOption; +import scala.collection.JavaConverters; import java.io.IOException; import java.net.URISyntaxException; @@ -46,44 +45,41 @@ import java.util.List; import java.util.stream.Collectors; -import scala.collection.JavaConverters; - /** * CLI command to perform bootstrap action & display bootstrap index. */ -@Component -public class BootstrapCommand implements CommandMarker { +@ShellComponent +public class BootstrapCommand { - @CliCommand(value = "bootstrap run", help = "Run a bootstrap action for current Hudi table") + @ShellMethod(key = "bootstrap run", value = "Run a bootstrap action for current Hudi table") public String bootstrap( - @CliOption(key = {"srcPath"}, mandatory = true, help = "Bootstrap source data path of the table") final String srcPath, - @CliOption(key = {"targetPath"}, mandatory = true, - help = "Base path for the target hoodie table") final String targetPath, - @CliOption(key = {"tableName"}, mandatory = true, help = "Hoodie table name") final String tableName, - @CliOption(key = {"tableType"}, mandatory = true, help = "Hoodie table type") final String tableType, - @CliOption(key = {"rowKeyField"}, mandatory = true, help = "Record key columns for bootstrap data") final String rowKeyField, - @CliOption(key = {"partitionPathField"}, unspecifiedDefaultValue = "", + @ShellOption(value = {"--srcPath"}, help = "Bootstrap source data path of the table") final String srcPath, + @ShellOption(value = {"--targetPath"}, help = "Base path for the target hoodie table") final String targetPath, + @ShellOption(value = {"--tableName"}, help = "Hoodie table name") final String tableName, + @ShellOption(value = {"--tableType"}, help = "Hoodie table type") final String tableType, + @ShellOption(value = {"--rowKeyField"}, help = "Record key columns for bootstrap data") final String rowKeyField, + @ShellOption(value = {"--partitionPathField"}, defaultValue = "", help = "Partition fields for bootstrap source data") final String partitionPathField, - @CliOption(key = {"bootstrapIndexClass"}, unspecifiedDefaultValue = "org.apache.hudi.common.bootstrap.index.HFileBootstrapIndex", + @ShellOption(value = {"--bootstrapIndexClass"}, defaultValue = "org.apache.hudi.common.bootstrap.index.HFileBootstrapIndex", help = "Bootstrap Index Class") final String bootstrapIndexClass, - @CliOption(key = {"selectorClass"}, unspecifiedDefaultValue = "org.apache.hudi.client.bootstrap.selector.MetadataOnlyBootstrapModeSelector", + @ShellOption(value = {"--selectorClass"}, defaultValue = "org.apache.hudi.client.bootstrap.selector.MetadataOnlyBootstrapModeSelector", help = "Selector class for bootstrap") final String selectorClass, - @CliOption(key = {"keyGeneratorClass"}, unspecifiedDefaultValue = "org.apache.hudi.keygen.SimpleKeyGenerator", + @ShellOption(value = {"--keyGeneratorClass"}, defaultValue = "org.apache.hudi.keygen.SimpleKeyGenerator", help = "Key generator class for bootstrap") final String keyGeneratorClass, - @CliOption(key = {"fullBootstrapInputProvider"}, unspecifiedDefaultValue = "org.apache.hudi.bootstrap.SparkParquetBootstrapDataProvider", + @ShellOption(value = {"--fullBootstrapInputProvider"}, defaultValue = "org.apache.hudi.bootstrap.SparkParquetBootstrapDataProvider", help = "Class for Full bootstrap input provider") final String fullBootstrapInputProvider, - @CliOption(key = {"schemaProviderClass"}, unspecifiedDefaultValue = "", + @ShellOption(value = {"--schemaProviderClass"}, defaultValue = "", help = "SchemaProvider to attach schemas to bootstrap source data") final String schemaProviderClass, - @CliOption(key = {"payloadClass"}, unspecifiedDefaultValue = "org.apache.hudi.common.model.OverwriteWithLatestAvroPayload", + @ShellOption(value = {"--payloadClass"}, defaultValue = "org.apache.hudi.common.model.OverwriteWithLatestAvroPayload", help = "Payload Class") final String payloadClass, - @CliOption(key = {"parallelism"}, unspecifiedDefaultValue = "1500", help = "Bootstrap writer parallelism") final int parallelism, - @CliOption(key = {"sparkMaster"}, unspecifiedDefaultValue = "", help = "Spark Master") String master, - @CliOption(key = {"sparkMemory"}, unspecifiedDefaultValue = "4G", help = "Spark executor memory") final String sparkMemory, - @CliOption(key = {"enableHiveSync"}, unspecifiedDefaultValue = "false", help = "Enable Hive sync") final Boolean enableHiveSync, - @CliOption(key = {"propsFilePath"}, help = "path to properties file on localfs or dfs with configurations for hoodie client for importing", - unspecifiedDefaultValue = "") final String propsFilePath, - @CliOption(key = {"hoodieConfigs"}, help = "Any configuration that can be set in the properties file can be passed here in the form of an array", - unspecifiedDefaultValue = "") final String[] configs) + @ShellOption(value = {"--parallelism"}, defaultValue = "1500", help = "Bootstrap writer parallelism") final int parallelism, + @ShellOption(value = {"--sparkMaster"}, defaultValue = "", help = "Spark Master") String master, + @ShellOption(value = {"--sparkMemory"}, defaultValue = "4G", help = "Spark executor memory") final String sparkMemory, + @ShellOption(value = {"--enableHiveSync"}, defaultValue = "false", help = "Enable Hive sync") final Boolean enableHiveSync, + @ShellOption(value = {"--propsFilePath"}, help = "path to properties file on localfs or dfs with configurations for hoodie client for importing", + defaultValue = "") final String propsFilePath, + @ShellOption(value = {"--hoodieConfigs"}, help = "Any configuration that can be set in the properties file can be passed here in the form of an array", + defaultValue = "") final String[] configs) throws IOException, InterruptedException, URISyntaxException { String sparkPropertiesPath = @@ -106,14 +102,14 @@ public String bootstrap( return "Bootstrapped source data as Hudi dataset"; } - @CliCommand(value = "bootstrap index showmapping", help = "Show bootstrap index mapping") + @ShellMethod(key = "bootstrap index showmapping", value = "Show bootstrap index mapping") public String showBootstrapIndexMapping( - @CliOption(key = {"partitionPath"}, unspecifiedDefaultValue = "", help = "A valid partition path") String partitionPath, - @CliOption(key = {"fileIds"}, unspecifiedDefaultValue = "", help = "Valid fileIds split by comma") String fileIds, - @CliOption(key = {"limit"}, unspecifiedDefaultValue = "-1", help = "Limit rows to be displayed") Integer limit, - @CliOption(key = {"sortBy"}, unspecifiedDefaultValue = "", help = "Sorting Field") final String sortByField, - @CliOption(key = {"desc"}, unspecifiedDefaultValue = "false", help = "Ordering") final boolean descending, - @CliOption(key = {"headeronly"}, unspecifiedDefaultValue = "false", help = "Print Header Only") final boolean headerOnly) { + @ShellOption(value = {"--partitionPath"}, defaultValue = "", help = "A valid partition path") String partitionPath, + @ShellOption(value = {"--fileIds"}, defaultValue = "", help = "Valid fileIds split by comma") String fileIds, + @ShellOption(value = {"--limit"}, defaultValue = "-1", help = "Limit rows to be displayed") Integer limit, + @ShellOption(value = {"--sortBy"}, defaultValue = "", help = "Sorting Field") final String sortByField, + @ShellOption(value = {"--desc"}, defaultValue = "false", help = "Ordering") final boolean descending, + @ShellOption(value = {"--headeronly"}, defaultValue = "false", help = "Print Header Only") final boolean headerOnly) { if (partitionPath.isEmpty() && !fileIds.isEmpty()) { throw new IllegalStateException("PartitionPath is mandatory when passing fileIds."); @@ -151,7 +147,7 @@ public String showBootstrapIndexMapping( limit, headerOnly, rows); } - @CliCommand(value = "bootstrap index showpartitions", help = "Show bootstrap indexed partitions") + @ShellMethod(key = "bootstrap index showpartitions", value = "Show bootstrap indexed partitions") public String showBootstrapIndexPartitions() { BootstrapIndex.IndexReader indexReader = createBootstrapIndexReader(); diff --git a/hudi-cli/src/main/java/org/apache/hudi/cli/commands/CleansCommand.java b/hudi-cli/src/main/java/org/apache/hudi/cli/commands/CleansCommand.java index 4e827dc562c4a..de0e4aa109894 100644 --- a/hudi-cli/src/main/java/org/apache/hudi/cli/commands/CleansCommand.java +++ b/hudi-cli/src/main/java/org/apache/hudi/cli/commands/CleansCommand.java @@ -32,13 +32,12 @@ import org.apache.hudi.common.table.timeline.HoodieTimeline; import org.apache.hudi.common.table.timeline.TimelineMetadataUtils; import org.apache.hudi.utilities.UtilHelpers; - import org.apache.spark.launcher.SparkLauncher; import org.apache.spark.util.Utils; -import org.springframework.shell.core.CommandMarker; -import org.springframework.shell.core.annotation.CliCommand; -import org.springframework.shell.core.annotation.CliOption; -import org.springframework.stereotype.Component; +import org.springframework.shell.standard.ShellComponent; +import org.springframework.shell.standard.ShellMethod; +import org.springframework.shell.standard.ShellOption; +import scala.collection.JavaConverters; import java.io.IOException; import java.net.URISyntaxException; @@ -48,21 +47,19 @@ import java.util.Map; import java.util.stream.Collectors; -import scala.collection.JavaConverters; - /** * CLI command to show cleans options. */ -@Component -public class CleansCommand implements CommandMarker { +@ShellComponent +public class CleansCommand { - @CliCommand(value = "cleans show", help = "Show the cleans") + @ShellMethod(key = "cleans show", value = "Show the cleans") public String showCleans( - @CliOption(key = {"limit"}, help = "Limit commits", unspecifiedDefaultValue = "-1") final Integer limit, - @CliOption(key = {"sortBy"}, help = "Sorting Field", unspecifiedDefaultValue = "") final String sortByField, - @CliOption(key = {"desc"}, help = "Ordering", unspecifiedDefaultValue = "false") final boolean descending, - @CliOption(key = {"headeronly"}, help = "Print Header Only", - unspecifiedDefaultValue = "false") final boolean headerOnly) + @ShellOption(value = {"--limit"}, help = "Limit commits", defaultValue = "-1") final Integer limit, + @ShellOption(value = {"--sortBy"}, help = "Sorting Field", defaultValue = "") final String sortByField, + @ShellOption(value = {"--desc"}, help = "Ordering", defaultValue = "false") final boolean descending, + @ShellOption(value = {"--headeronly"}, help = "Print Header Only", + defaultValue = "false") final boolean headerOnly) throws IOException { HoodieActiveTimeline activeTimeline = HoodieCLI.getTableMetaClient().getActiveTimeline(); @@ -84,14 +81,14 @@ public String showCleans( return HoodiePrintHelper.print(header, new HashMap<>(), sortByField, descending, limit, headerOnly, rows); } - @CliCommand(value = "clean showpartitions", help = "Show partition level details of a clean") + @ShellMethod(key = "clean showpartitions", value = "Show partition level details of a clean") public String showCleanPartitions( - @CliOption(key = {"clean"}, help = "clean to show") final String instantTime, - @CliOption(key = {"limit"}, help = "Limit commits", unspecifiedDefaultValue = "-1") final Integer limit, - @CliOption(key = {"sortBy"}, help = "Sorting Field", unspecifiedDefaultValue = "") final String sortByField, - @CliOption(key = {"desc"}, help = "Ordering", unspecifiedDefaultValue = "false") final boolean descending, - @CliOption(key = {"headeronly"}, help = "Print Header Only", - unspecifiedDefaultValue = "false") final boolean headerOnly) + @ShellOption(value = {"--clean"}, help = "clean to show") final String instantTime, + @ShellOption(value = {"--limit"}, help = "Limit commits", defaultValue = "-1") final Integer limit, + @ShellOption(value = {"--sortBy"}, help = "Sorting Field", defaultValue = "") final String sortByField, + @ShellOption(value = {"--desc"}, help = "Ordering", defaultValue = "false") final boolean descending, + @ShellOption(value = {"--headeronly"}, help = "Print Header Only", + defaultValue = "false") final boolean headerOnly) throws Exception { HoodieActiveTimeline activeTimeline = HoodieCLI.getTableMetaClient().getActiveTimeline(); @@ -122,15 +119,15 @@ public String showCleanPartitions( } - @CliCommand(value = "cleans run", help = "run clean") + @ShellMethod(key = "cleans run", value = "run clean") public String runClean( - @CliOption(key = "sparkMemory", unspecifiedDefaultValue = "4G", + @ShellOption(value = "--sparkMemory", defaultValue = "4G", help = "Spark executor memory") final String sparkMemory, - @CliOption(key = "propsFilePath", help = "path to properties file on localfs or dfs with configurations for hoodie client for cleaning", - unspecifiedDefaultValue = "") final String propsFilePath, - @CliOption(key = "hoodieConfigs", help = "Any configuration that can be set in the properties file can be passed here in the form of an array", - unspecifiedDefaultValue = "") final String[] configs, - @CliOption(key = "sparkMaster", unspecifiedDefaultValue = "", help = "Spark Master ") String master) throws IOException, InterruptedException, URISyntaxException { + @ShellOption(value = "--propsFilePath", help = "path to properties file on localfs or dfs with configurations for hoodie client for cleaning", + defaultValue = "") final String propsFilePath, + @ShellOption(value = "--hoodieConfigs", help = "Any configuration that can be set in the properties file can be passed here in the form of an array", + defaultValue = "") final String[] configs, + @ShellOption(value = "--sparkMaster", defaultValue = "", help = "Spark Master ") String master) throws IOException, InterruptedException, URISyntaxException { boolean initialized = HoodieCLI.initConf(); HoodieCLI.initFS(initialized); HoodieTableMetaClient metaClient = HoodieCLI.getTableMetaClient(); diff --git a/hudi-cli/src/main/java/org/apache/hudi/cli/commands/ClusteringCommand.java b/hudi-cli/src/main/java/org/apache/hudi/cli/commands/ClusteringCommand.java index 8b2a95b55729e..963411bf98a1e 100644 --- a/hudi-cli/src/main/java/org/apache/hudi/cli/commands/ClusteringCommand.java +++ b/hudi-cli/src/main/java/org/apache/hudi/cli/commands/ClusteringCommand.java @@ -25,22 +25,15 @@ import org.apache.hudi.common.table.HoodieTableMetaClient; import org.apache.hudi.common.table.timeline.HoodieActiveTimeline; import org.apache.hudi.utilities.UtilHelpers; - -import org.apache.log4j.LogManager; -import org.apache.log4j.Logger; import org.apache.spark.launcher.SparkLauncher; import org.apache.spark.util.Utils; -import org.springframework.shell.core.CommandMarker; -import org.springframework.shell.core.annotation.CliCommand; -import org.springframework.shell.core.annotation.CliOption; -import org.springframework.stereotype.Component; - +import org.springframework.shell.standard.ShellComponent; +import org.springframework.shell.standard.ShellMethod; +import org.springframework.shell.standard.ShellOption; import scala.collection.JavaConverters; -@Component -public class ClusteringCommand implements CommandMarker { - - private static final Logger LOG = LogManager.getLogger(ClusteringCommand.class); +@ShellComponent +public class ClusteringCommand { /** * Schedule clustering table service. @@ -49,14 +42,14 @@ public class ClusteringCommand implements CommandMarker { * > connect --path {path to hudi table} * > clustering schedule --sparkMaster local --sparkMemory 2g */ - @CliCommand(value = "clustering schedule", help = "Schedule Clustering") + @ShellMethod(key = "clustering schedule", value = "Schedule Clustering") public String scheduleClustering( - @CliOption(key = "sparkMaster", unspecifiedDefaultValue = SparkUtil.DEFAULT_SPARK_MASTER, help = "Spark master") final String master, - @CliOption(key = "sparkMemory", unspecifiedDefaultValue = "1g", help = "Spark executor memory") final String sparkMemory, - @CliOption(key = "propsFilePath", help = "path to properties file on localfs or dfs with configurations " - + "for hoodie client for clustering", unspecifiedDefaultValue = "") final String propsFilePath, - @CliOption(key = "hoodieConfigs", help = "Any configuration that can be set in the properties file can " - + "be passed here in the form of an array", unspecifiedDefaultValue = "") final String[] configs) throws Exception { + @ShellOption(value = "--sparkMaster", defaultValue = SparkUtil.DEFAULT_SPARK_MASTER, help = "Spark master") final String master, + @ShellOption(value = "--sparkMemory", defaultValue = "1g", help = "Spark executor memory") final String sparkMemory, + @ShellOption(value = "--propsFilePath", help = "path to properties file on localfs or dfs with configurations " + + "for hoodie client for clustering", defaultValue = "") final String propsFilePath, + @ShellOption(value = "--hoodieConfigs", help = "Any configuration that can be set in the properties file can " + + "be passed here in the form of an array", defaultValue = "") final String[] configs) throws Exception { HoodieTableMetaClient client = HoodieCLI.getTableMetaClient(); boolean initialized = HoodieCLI.initConf(); HoodieCLI.initFS(initialized); @@ -88,17 +81,18 @@ public String scheduleClustering( * > clustering schedule --sparkMaster local --sparkMemory 2g * > clustering run --sparkMaster local --sparkMemory 2g --clusteringInstant 20211124005208 */ - @CliCommand(value = "clustering run", help = "Run Clustering") + @ShellMethod(key = "clustering run", value = "Run Clustering") public String runClustering( - @CliOption(key = "sparkMaster", unspecifiedDefaultValue = SparkUtil.DEFAULT_SPARK_MASTER, help = "Spark master") final String master, - @CliOption(key = "sparkMemory", help = "Spark executor memory", unspecifiedDefaultValue = "4g") final String sparkMemory, - @CliOption(key = "parallelism", help = "Parallelism for hoodie clustering", unspecifiedDefaultValue = "1") final String parallelism, - @CliOption(key = "retry", help = "Number of retries", unspecifiedDefaultValue = "1") final String retry, - @CliOption(key = "clusteringInstant", help = "Clustering instant time", mandatory = true) final String clusteringInstantTime, - @CliOption(key = "propsFilePath", help = "path to properties file on localfs or dfs with configurations for " - + "hoodie client for compacting", unspecifiedDefaultValue = "") final String propsFilePath, - @CliOption(key = "hoodieConfigs", help = "Any configuration that can be set in the properties file can be " - + "passed here in the form of an array", unspecifiedDefaultValue = "") final String[] configs) throws Exception { + @ShellOption(value = "--sparkMaster", defaultValue = SparkUtil.DEFAULT_SPARK_MASTER, help = "Spark master") final String master, + @ShellOption(value = "--sparkMemory", help = "Spark executor memory", defaultValue = "4g") final String sparkMemory, + @ShellOption(value = "--parallelism", help = "Parallelism for hoodie clustering", defaultValue = "1") final String parallelism, + @ShellOption(value = "--retry", help = "Number of retries", defaultValue = "1") final String retry, + @ShellOption(value = "--clusteringInstant", help = "Clustering instant time", + defaultValue = ShellOption.NULL) final String clusteringInstantTime, + @ShellOption(value = "--propsFilePath", help = "path to properties file on localfs or dfs with configurations for " + + "hoodie client for compacting", defaultValue = "") final String propsFilePath, + @ShellOption(value = "--hoodieConfigs", help = "Any configuration that can be set in the properties file can be " + + "passed here in the form of an array", defaultValue = "") final String[] configs) throws Exception { HoodieTableMetaClient client = HoodieCLI.getTableMetaClient(); boolean initialized = HoodieCLI.initConf(); HoodieCLI.initFS(initialized); @@ -126,16 +120,16 @@ public String runClustering( * > connect --path {path to hudi table} * > clustering scheduleAndExecute --sparkMaster local --sparkMemory 2g */ - @CliCommand(value = "clustering scheduleAndExecute", help = "Run Clustering. Make a cluster plan first and execute that plan immediately") + @ShellMethod(key = "clustering scheduleAndExecute", value = "Run Clustering. Make a cluster plan first and execute that plan immediately") public String runClustering( - @CliOption(key = "sparkMaster", unspecifiedDefaultValue = SparkUtil.DEFAULT_SPARK_MASTER, help = "Spark master") final String master, - @CliOption(key = "sparkMemory", help = "Spark executor memory", unspecifiedDefaultValue = "4g") final String sparkMemory, - @CliOption(key = "parallelism", help = "Parallelism for hoodie clustering", unspecifiedDefaultValue = "1") final String parallelism, - @CliOption(key = "retry", help = "Number of retries", unspecifiedDefaultValue = "1") final String retry, - @CliOption(key = "propsFilePath", help = "path to properties file on localfs or dfs with configurations for " - + "hoodie client for compacting", unspecifiedDefaultValue = "") final String propsFilePath, - @CliOption(key = "hoodieConfigs", help = "Any configuration that can be set in the properties file can be " - + "passed here in the form of an array", unspecifiedDefaultValue = "") final String[] configs) throws Exception { + @ShellOption(value = "--sparkMaster", defaultValue = SparkUtil.DEFAULT_SPARK_MASTER, help = "Spark master") final String master, + @ShellOption(value = "--sparkMemory", help = "Spark executor memory", defaultValue = "4g") final String sparkMemory, + @ShellOption(value = "--parallelism", help = "Parallelism for hoodie clustering", defaultValue = "1") final String parallelism, + @ShellOption(value = "--retry", help = "Number of retries", defaultValue = "1") final String retry, + @ShellOption(value = "--propsFilePath", help = "path to properties file on localfs or dfs with configurations for " + + "hoodie client for compacting", defaultValue = "") final String propsFilePath, + @ShellOption(value = "--hoodieConfigs", help = "Any configuration that can be set in the properties file can be " + + "passed here in the form of an array", defaultValue = "") final String[] configs) throws Exception { HoodieTableMetaClient client = HoodieCLI.getTableMetaClient(); boolean initialized = HoodieCLI.initConf(); HoodieCLI.initFS(initialized); diff --git a/hudi-cli/src/main/java/org/apache/hudi/cli/commands/CommitsCommand.java b/hudi-cli/src/main/java/org/apache/hudi/cli/commands/CommitsCommand.java index c1ed884315f17..e269f8da0cba8 100644 --- a/hudi-cli/src/main/java/org/apache/hudi/cli/commands/CommitsCommand.java +++ b/hudi-cli/src/main/java/org/apache/hudi/cli/commands/CommitsCommand.java @@ -34,10 +34,9 @@ import org.apache.hudi.common.util.Option; import org.apache.hudi.common.util.StringUtils; -import org.springframework.shell.core.CommandMarker; -import org.springframework.shell.core.annotation.CliCommand; -import org.springframework.shell.core.annotation.CliOption; -import org.springframework.stereotype.Component; +import org.springframework.shell.standard.ShellComponent; +import org.springframework.shell.standard.ShellMethod; +import org.springframework.shell.standard.ShellOption; import java.io.IOException; import java.util.ArrayList; @@ -54,8 +53,8 @@ /** * CLI command to display commits options. */ -@Component -public class CommitsCommand implements CommandMarker { +@ShellComponent +public class CommitsCommand { private String printCommits(HoodieDefaultTimeline timeline, final Integer limit, @@ -139,21 +138,23 @@ private String printCommitsWithMetadata(HoodieDefaultTimeline timeline, fieldNameToConverterMap, sortByField, descending, limit, headerOnly, rows, tempTableName); } - @CliCommand(value = "commits show", help = "Show the commits") + @ShellMethod(key = "commits show", value = "Show the commits") public String showCommits( - @CliOption(key = {"includeExtraMetadata"}, help = "Include extra metadata", - unspecifiedDefaultValue = "false") final boolean includeExtraMetadata, - @CliOption(key = {"createView"}, help = "view name to store output table", - unspecifiedDefaultValue = "") final String exportTableName, - @CliOption(key = {"limit"}, help = "Limit commits", - unspecifiedDefaultValue = "-1") final Integer limit, - @CliOption(key = {"sortBy"}, help = "Sorting Field", unspecifiedDefaultValue = "") final String sortByField, - @CliOption(key = {"desc"}, help = "Ordering", unspecifiedDefaultValue = "false") final boolean descending, - @CliOption(key = {"headeronly"}, help = "Print Header Only", - unspecifiedDefaultValue = "false") final boolean headerOnly, - @CliOption(key = {"partition"}, help = "Partition value") final String partition, - @CliOption(key = {"includeArchivedTimeline"}, help = "Include archived commits as well", - unspecifiedDefaultValue = "false") final boolean includeArchivedTimeline) throws IOException { + @ShellOption(value = {"--includeExtraMetadata"}, help = "Include extra metadata", + defaultValue = "false") final boolean includeExtraMetadata, + @ShellOption(value = {"--createView"}, help = "view name to store output table", + defaultValue = "") final String exportTableName, + @ShellOption(value = {"--limit"}, help = "Limit commits", + defaultValue = "-1") final Integer limit, + @ShellOption(value = {"--sortBy"}, help = "Sorting Field", defaultValue = "") final String sortByField, + @ShellOption(value = {"--desc"}, help = "Ordering", defaultValue = "false") final boolean descending, + @ShellOption(value = {"--headeronly"}, help = "Print Header Only", + defaultValue = "false") final boolean headerOnly, + @ShellOption(value = {"--partition"}, help = "Partition value", defaultValue = ShellOption.NULL) final String partition, + @ShellOption(value = {"--includeArchivedTimeline"}, help = "Include archived commits as well", + defaultValue = "false") final boolean includeArchivedTimeline) + throws IOException { + HoodieDefaultTimeline timeline = getTimeline(HoodieCLI.getTableMetaClient(), includeArchivedTimeline); if (includeExtraMetadata) { return printCommitsWithMetadata(timeline, limit, sortByField, descending, headerOnly, exportTableName, partition); @@ -162,21 +163,21 @@ public String showCommits( } } - @CliCommand(value = "commits showarchived", help = "Show the archived commits") + @ShellMethod(key = "commits showarchived", value = "Show the archived commits") public String showArchivedCommits( - @CliOption(key = {"includeExtraMetadata"}, help = "Include extra metadata", - unspecifiedDefaultValue = "false") final boolean includeExtraMetadata, - @CliOption(key = {"createView"}, mandatory = false, help = "view name to store output table", - unspecifiedDefaultValue = "") final String exportTableName, - @CliOption(key = {"startTs"}, mandatory = false, help = "start time for commits, default: now - 10 days") + @ShellOption(value = {"--includeExtraMetadata"}, help = "Include extra metadata", + defaultValue = "false") final boolean includeExtraMetadata, + @ShellOption(value = {"--createView"}, help = "view name to store output table", + defaultValue = "") final String exportTableName, + @ShellOption(value = {"--startTs"}, defaultValue = ShellOption.NULL, help = "start time for commits, default: now - 10 days") String startTs, - @CliOption(key = {"endTs"}, mandatory = false, help = "end time for commits, default: now - 1 day") + @ShellOption(value = {"--endTs"}, defaultValue = ShellOption.NULL, help = "end time for commits, default: now - 1 day") String endTs, - @CliOption(key = {"limit"}, mandatory = false, help = "Limit commits", unspecifiedDefaultValue = "-1") final Integer limit, - @CliOption(key = {"sortBy"}, help = "Sorting Field", unspecifiedDefaultValue = "") final String sortByField, - @CliOption(key = {"desc"}, help = "Ordering", unspecifiedDefaultValue = "false") final boolean descending, - @CliOption(key = {"headeronly"}, help = "Print Header Only", unspecifiedDefaultValue = "false") final boolean headerOnly, - @CliOption(key = {"partition"}, help = "Partition value") final String partition) + @ShellOption(value = {"--limit"}, help = "Limit commits", defaultValue = "-1") final Integer limit, + @ShellOption(value = {"--sortBy"}, help = "Sorting Field", defaultValue = "") final String sortByField, + @ShellOption(value = {"--desc"}, help = "Ordering", defaultValue = "false") final boolean descending, + @ShellOption(value = {"--headeronly"}, help = "Print Header Only", defaultValue = "false") final boolean headerOnly, + @ShellOption(value = {"--partition"}, help = "Partition value", defaultValue = ShellOption.NULL) final String partition) throws IOException { if (StringUtils.isNullOrEmpty(startTs)) { startTs = getTimeDaysAgo(10); @@ -199,18 +200,20 @@ public String showArchivedCommits( } } - @CliCommand(value = "commit showpartitions", help = "Show partition level details of a commit") + @ShellMethod(key = "commit showpartitions", value = "Show partition level details of a commit") public String showCommitPartitions( - @CliOption(key = {"createView"}, help = "view name to store output table", - unspecifiedDefaultValue = "") final String exportTableName, - @CliOption(key = {"commit"}, help = "Commit to show") final String instantTime, - @CliOption(key = {"limit"}, help = "Limit commits", unspecifiedDefaultValue = "-1") final Integer limit, - @CliOption(key = {"sortBy"}, help = "Sorting Field", unspecifiedDefaultValue = "") final String sortByField, - @CliOption(key = {"desc"}, help = "Ordering", unspecifiedDefaultValue = "false") final boolean descending, - @CliOption(key = {"headeronly"}, help = "Print Header Only", - unspecifiedDefaultValue = "false") final boolean headerOnly, - @CliOption(key = {"includeArchivedTimeline"}, help = "Include archived commits as well", - unspecifiedDefaultValue = "false") final boolean includeArchivedTimeline) throws Exception { + @ShellOption(value = {"--createView"}, help = "view name to store output table", + defaultValue = "") final String exportTableName, + @ShellOption(value = {"--commit"}, help = "Commit to show") final String instantTime, + @ShellOption(value = {"--limit"}, help = "Limit commits", defaultValue = "-1") final Integer limit, + @ShellOption(value = {"--sortBy"}, help = "Sorting Field", defaultValue = "") final String sortByField, + @ShellOption(value = {"--desc"}, help = "Ordering", defaultValue = "false") final boolean descending, + @ShellOption(value = {"--headeronly"}, help = "Print Header Only", + defaultValue = "false") final boolean headerOnly, + @ShellOption(value = {"includeArchivedTimeline"}, help = "Include archived commits as well", + defaultValue = "false") final boolean includeArchivedTimeline) + throws Exception { + HoodieDefaultTimeline defaultTimeline = getTimeline(HoodieCLI.getTableMetaClient(), includeArchivedTimeline); HoodieTimeline timeline = defaultTimeline.getCommitsTimeline().filterCompletedInstants(); @@ -265,18 +268,20 @@ public String showCommitPartitions( limit, headerOnly, rows, exportTableName); } - @CliCommand(value = "commit show_write_stats", help = "Show write stats of a commit") + @ShellMethod(key = "commit show_write_stats", value = "Show write stats of a commit") public String showWriteStats( - @CliOption(key = {"createView"}, help = "view name to store output table", - unspecifiedDefaultValue = "") final String exportTableName, - @CliOption(key = {"commit"}, help = "Commit to show") final String instantTime, - @CliOption(key = {"limit"}, help = "Limit commits", unspecifiedDefaultValue = "-1") final Integer limit, - @CliOption(key = {"sortBy"}, help = "Sorting Field", unspecifiedDefaultValue = "") final String sortByField, - @CliOption(key = {"desc"}, help = "Ordering", unspecifiedDefaultValue = "false") final boolean descending, - @CliOption(key = {"headeronly"}, help = "Print Header Only", - unspecifiedDefaultValue = "false") final boolean headerOnly, - @CliOption(key = {"includeArchivedTimeline"}, help = "Include archived commits as well", - unspecifiedDefaultValue = "false") final boolean includeArchivedTimeline) throws Exception { + @ShellOption(value = {"--createView"}, help = "view name to store output table", + defaultValue = "") final String exportTableName, + @ShellOption(value = {"--commit"}, help = "Commit to show") final String instantTime, + @ShellOption(value = {"--limit"}, help = "Limit commits", defaultValue = "-1") final Integer limit, + @ShellOption(value = {"--sortBy"}, help = "Sorting Field", defaultValue = "") final String sortByField, + @ShellOption(value = {"--desc"}, help = "Ordering", defaultValue = "false") final boolean descending, + @ShellOption(value = {"--headeronly"}, help = "Print Header Only", + defaultValue = "false") final boolean headerOnly, + @ShellOption(value = {"includeArchivedTimeline"}, help = "Include archived commits as well", + defaultValue = "false") final boolean includeArchivedTimeline) + throws Exception { + HoodieDefaultTimeline defaultTimeline = getTimeline(HoodieCLI.getTableMetaClient(), includeArchivedTimeline); HoodieTimeline timeline = defaultTimeline.getCommitsTimeline().filterCompletedInstants(); @@ -309,18 +314,20 @@ public String showWriteStats( limit, headerOnly, rows, exportTableName); } - @CliCommand(value = "commit showfiles", help = "Show file level details of a commit") + @ShellMethod(key = "commit showfiles", value = "Show file level details of a commit") public String showCommitFiles( - @CliOption(key = {"createView"}, mandatory = false, help = "view name to store output table", - unspecifiedDefaultValue = "") final String exportTableName, - @CliOption(key = {"commit"}, help = "Commit to show") final String instantTime, - @CliOption(key = {"limit"}, help = "Limit commits", unspecifiedDefaultValue = "-1") final Integer limit, - @CliOption(key = {"sortBy"}, help = "Sorting Field", unspecifiedDefaultValue = "") final String sortByField, - @CliOption(key = {"desc"}, help = "Ordering", unspecifiedDefaultValue = "false") final boolean descending, - @CliOption(key = {"headeronly"}, help = "Print Header Only", - unspecifiedDefaultValue = "false") final boolean headerOnly, - @CliOption(key = {"includeArchivedTimeline"}, help = "Include archived commits as well", - unspecifiedDefaultValue = "false") final boolean includeArchivedTimeline) throws Exception { + @ShellOption(value = {"--createView"}, help = "view name to store output table", + defaultValue = "") final String exportTableName, + @ShellOption(value = {"--commit"}, help = "Commit to show") final String instantTime, + @ShellOption(value = {"--limit"}, help = "Limit commits", defaultValue = "-1") final Integer limit, + @ShellOption(value = {"--sortBy"}, help = "Sorting Field", defaultValue = "") final String sortByField, + @ShellOption(value = {"--desc"}, help = "Ordering", defaultValue = "false") final boolean descending, + @ShellOption(value = {"--headeronly"}, help = "Print Header Only", + defaultValue = "false") final boolean headerOnly, + @ShellOption(value = {"includeArchivedTimeline"}, help = "Include archived commits as well", + defaultValue = "false") final boolean includeArchivedTimeline) + throws Exception { + HoodieDefaultTimeline defaultTimeline = getTimeline(HoodieCLI.getTableMetaClient(), includeArchivedTimeline); HoodieTimeline timeline = defaultTimeline.getCommitsTimeline().filterCompletedInstants(); @@ -357,8 +364,8 @@ public String showCommitFiles( limit, headerOnly, rows, exportTableName); } - @CliCommand(value = "commits compare", help = "Compare commits with another Hoodie table") - public String compareCommits(@CliOption(key = {"path"}, help = "Path of the table to compare to") final String path) { + @ShellMethod(key = "commits compare", value = "Compare commits with another Hoodie table") + public String compareCommits(@ShellOption(value = {"--path"}, help = "Path of the table to compare to") final String path) { HoodieTableMetaClient source = HoodieCLI.getTableMetaClient(); HoodieTableMetaClient target = HoodieTableMetaClient.builder().setConf(HoodieCLI.conf).setBasePath(path).build(); @@ -384,8 +391,8 @@ public String compareCommits(@CliOption(key = {"path"}, help = "Path of the tabl } } - @CliCommand(value = "commits sync", help = "Sync commits with another Hoodie table") - public String syncCommits(@CliOption(key = {"path"}, help = "Path of the table to sync to") final String path) { + @ShellMethod(key = "commits sync", value = "Sync commits with another Hoodie table") + public String syncCommits(@ShellOption(value = {"--path"}, help = "Path of the table to sync to") final String path) { HoodieCLI.syncTableMetadata = HoodieTableMetaClient.builder().setConf(HoodieCLI.conf).setBasePath(path).build(); HoodieCLI.state = HoodieCLI.CLIState.SYNC; return "Load sync state between " + HoodieCLI.getTableMetaClient().getTableConfig().getTableName() + " and " diff --git a/hudi-cli/src/main/java/org/apache/hudi/cli/commands/CompactionCommand.java b/hudi-cli/src/main/java/org/apache/hudi/cli/commands/CompactionCommand.java index 136546ddaef42..cc2dd42c2c627 100644 --- a/hudi-cli/src/main/java/org/apache/hudi/cli/commands/CompactionCommand.java +++ b/hudi-cli/src/main/java/org/apache/hudi/cli/commands/CompactionCommand.java @@ -48,14 +48,13 @@ import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; -import org.apache.log4j.LogManager; -import org.apache.log4j.Logger; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.apache.spark.launcher.SparkLauncher; import org.apache.spark.util.Utils; -import org.springframework.shell.core.CommandMarker; -import org.springframework.shell.core.annotation.CliCommand; -import org.springframework.shell.core.annotation.CliOption; -import org.springframework.stereotype.Component; +import org.springframework.shell.standard.ShellComponent; +import org.springframework.shell.standard.ShellMethod; +import org.springframework.shell.standard.ShellOption; import java.io.IOException; import java.io.ObjectInputStream; @@ -75,8 +74,8 @@ /** * CLI command to display compaction related options. */ -@Component -public class CompactionCommand implements CommandMarker { +@ShellComponent +public class CompactionCommand { private static final Logger LOG = LogManager.getLogger(CompactionCommand.class); @@ -90,16 +89,16 @@ private HoodieTableMetaClient checkAndGetMetaClient() { return client; } - @CliCommand(value = "compactions show all", help = "Shows all compactions that are in active timeline") + @ShellMethod(key = "compactions show all", value = "Shows all compactions that are in active timeline") public String compactionsAll( - @CliOption(key = {"includeExtraMetadata"}, help = "Include extra metadata", - unspecifiedDefaultValue = "false") final boolean includeExtraMetadata, - @CliOption(key = {"limit"}, help = "Limit commits", - unspecifiedDefaultValue = "-1") final Integer limit, - @CliOption(key = {"sortBy"}, help = "Sorting Field", unspecifiedDefaultValue = "") final String sortByField, - @CliOption(key = {"desc"}, help = "Ordering", unspecifiedDefaultValue = "false") final boolean descending, - @CliOption(key = {"headeronly"}, help = "Print Header Only", - unspecifiedDefaultValue = "false") final boolean headerOnly) { + @ShellOption(value = {"--includeExtraMetadata"}, help = "Include extra metadata", + defaultValue = "false") final boolean includeExtraMetadata, + @ShellOption(value = {"--limit"}, help = "Limit commits", + defaultValue = "-1") final Integer limit, + @ShellOption(value = {"--sortBy"}, help = "Sorting Field", defaultValue = "") final String sortByField, + @ShellOption(value = {"--desc"}, help = "Ordering", defaultValue = "false") final boolean descending, + @ShellOption(value = {"--headeronly"}, help = "Print Header Only", + defaultValue = "false") final boolean headerOnly) { HoodieTableMetaClient client = checkAndGetMetaClient(); HoodieActiveTimeline activeTimeline = client.getActiveTimeline(); return printAllCompactions(activeTimeline, @@ -107,17 +106,16 @@ public String compactionsAll( includeExtraMetadata, sortByField, descending, limit, headerOnly); } - @CliCommand(value = "compaction show", help = "Shows compaction details for a specific compaction instant") + @ShellMethod(key = "compaction show", value = "Shows compaction details for a specific compaction instant") public String compactionShow( - @CliOption(key = "instant", mandatory = true, - help = "Base path for the target hoodie table") final String compactionInstantTime, - @CliOption(key = {"limit"}, help = "Limit commits", - unspecifiedDefaultValue = "-1") final Integer limit, - @CliOption(key = {"sortBy"}, help = "Sorting Field", unspecifiedDefaultValue = "") final String sortByField, - @CliOption(key = {"desc"}, help = "Ordering", unspecifiedDefaultValue = "false") final boolean descending, - @CliOption(key = {"headeronly"}, help = "Print Header Only", - unspecifiedDefaultValue = "false") final boolean headerOnly, - @CliOption(key = {"partition"}, help = "Partition value") final String partition) + @ShellOption(value = "--instant", + help = "Base path for the target hoodie table") final String compactionInstantTime, + @ShellOption(value = {"--limit"}, help = "Limit commits", defaultValue = "-1") final Integer limit, + @ShellOption(value = {"--sortBy"}, help = "Sorting Field", defaultValue = "") final String sortByField, + @ShellOption(value = {"--desc"}, help = "Ordering", defaultValue = "false") final boolean descending, + @ShellOption(value = {"--headeronly"}, help = "Print Header Only", + defaultValue = "false") final boolean headerOnly, + @ShellOption(value = {"--partition"}, help = "Partition value", defaultValue = ShellOption.NULL) final String partition) throws Exception { HoodieTableMetaClient client = checkAndGetMetaClient(); HoodieActiveTimeline activeTimeline = client.getActiveTimeline(); @@ -128,20 +126,19 @@ public String compactionShow( return printCompaction(compactionPlan, sortByField, descending, limit, headerOnly, partition); } - @CliCommand(value = "compactions showarchived", help = "Shows compaction details for specified time window") + @ShellMethod(key = "compactions showarchived", value = "Shows compaction details for specified time window") public String compactionsShowArchived( - @CliOption(key = {"includeExtraMetadata"}, help = "Include extra metadata", - unspecifiedDefaultValue = "false") final boolean includeExtraMetadata, - @CliOption(key = {"startTs"}, help = "start time for compactions, default: now - 10 days") - String startTs, - @CliOption(key = {"endTs"}, help = "end time for compactions, default: now - 1 day") - String endTs, - @CliOption(key = {"limit"}, help = "Limit compactions", - unspecifiedDefaultValue = "-1") final Integer limit, - @CliOption(key = {"sortBy"}, help = "Sorting Field", unspecifiedDefaultValue = "") final String sortByField, - @CliOption(key = {"desc"}, help = "Ordering", unspecifiedDefaultValue = "false") final boolean descending, - @CliOption(key = {"headeronly"}, help = "Print Header Only", - unspecifiedDefaultValue = "false") final boolean headerOnly) { + @ShellOption(value = {"--includeExtraMetadata"}, help = "Include extra metadata", + defaultValue = "false") final boolean includeExtraMetadata, + @ShellOption(value = {"--startTs"}, defaultValue = ShellOption.NULL, + help = "start time for compactions, default: now - 10 days") String startTs, + @ShellOption(value = {"--endTs"}, defaultValue = ShellOption.NULL, + help = "end time for compactions, default: now - 1 day") String endTs, + @ShellOption(value = {"--limit"}, help = "Limit compactions", defaultValue = "-1") final Integer limit, + @ShellOption(value = {"--sortBy"}, help = "Sorting Field", defaultValue = "") final String sortByField, + @ShellOption(value = {"--desc"}, help = "Ordering", defaultValue = "false") final boolean descending, + @ShellOption(value = {"--headeronly"}, help = "Print Header Only", + defaultValue = "false") final boolean headerOnly) { if (StringUtils.isNullOrEmpty(startTs)) { startTs = getTimeDaysAgo(10); } @@ -161,17 +158,15 @@ public String compactionsShowArchived( } } - @CliCommand(value = "compaction showarchived", help = "Shows compaction details for a specific compaction instant") + @ShellMethod(key = "compaction showarchived", value = "Shows compaction details for a specific compaction instant") public String compactionShowArchived( - @CliOption(key = "instant", mandatory = true, - help = "instant time") final String compactionInstantTime, - @CliOption(key = {"limit"}, help = "Limit commits", - unspecifiedDefaultValue = "-1") final Integer limit, - @CliOption(key = {"sortBy"}, help = "Sorting Field", unspecifiedDefaultValue = "") final String sortByField, - @CliOption(key = {"desc"}, help = "Ordering", unspecifiedDefaultValue = "false") final boolean descending, - @CliOption(key = {"headeronly"}, help = "Print Header Only", - unspecifiedDefaultValue = "false") final boolean headerOnly, - @CliOption(key = {"partition"}, help = "Partition value") final String partition) + @ShellOption(value = "--instant", help = "instant time") final String compactionInstantTime, + @ShellOption(value = {"--limit"}, help = "Limit commits", defaultValue = "-1") final Integer limit, + @ShellOption(value = {"--sortBy"}, help = "Sorting Field", defaultValue = "") final String sortByField, + @ShellOption(value = {"--desc"}, help = "Ordering", defaultValue = "false") final boolean descending, + @ShellOption(value = {"--headeronly"}, help = "Print Header Only", + defaultValue = "false") final boolean headerOnly, + @ShellOption(value = {"--partition"}, help = "Partition value", defaultValue = ShellOption.NULL) final String partition) throws Exception { HoodieTableMetaClient client = checkAndGetMetaClient(); HoodieArchivedTimeline archivedTimeline = client.getArchivedTimeline(); @@ -187,15 +182,15 @@ public String compactionShowArchived( } } - @CliCommand(value = "compaction schedule", help = "Schedule Compaction") + @ShellMethod(key = "compaction schedule", value = "Schedule Compaction") public String scheduleCompact( - @CliOption(key = "sparkMemory", unspecifiedDefaultValue = "1G", + @ShellOption(value = "--sparkMemory", defaultValue = "1G", help = "Spark executor memory") final String sparkMemory, - @CliOption(key = "propsFilePath", help = "path to properties file on localfs or dfs with configurations for hoodie client for compacting", - unspecifiedDefaultValue = "") final String propsFilePath, - @CliOption(key = "hoodieConfigs", help = "Any configuration that can be set in the properties file can be passed here in the form of an array", - unspecifiedDefaultValue = "") final String[] configs, - @CliOption(key = "sparkMaster", unspecifiedDefaultValue = "local", help = "Spark Master") String master) + @ShellOption(value = "--propsFilePath", help = "path to properties file on localfs or dfs with configurations for hoodie client for compacting", + defaultValue = "") final String propsFilePath, + @ShellOption(value = "--hoodieConfigs", help = "Any configuration that can be set in the properties file can be passed here in the form of an array", + defaultValue = "") final String[] configs, + @ShellOption(value = "--sparkMaster", defaultValue = "local", help = "Spark Master") String master) throws Exception { HoodieTableMetaClient client = checkAndGetMetaClient(); boolean initialized = HoodieCLI.initConf(); @@ -220,22 +215,23 @@ public String scheduleCompact( return "Attempted to schedule compaction for " + compactionInstantTime; } - @CliCommand(value = "compaction run", help = "Run Compaction for given instant time") + @ShellMethod(key = "compaction run", value = "Run Compaction for given instant time") public String compact( - @CliOption(key = {"parallelism"}, mandatory = true, + @ShellOption(value = {"--parallelism"}, defaultValue = "3", help = "Parallelism for hoodie compaction") final String parallelism, - @CliOption(key = "schemaFilePath", mandatory = true, - help = "Path for Avro schema file") final String schemaFilePath, - @CliOption(key = "sparkMaster", unspecifiedDefaultValue = "local", + @ShellOption(value = "--schemaFilePath", + help = "Path for Avro schema file", defaultValue = ShellOption.NULL) final String schemaFilePath, + @ShellOption(value = "--sparkMaster", defaultValue = "local", help = "Spark Master") String master, - @CliOption(key = "sparkMemory", unspecifiedDefaultValue = "4G", + @ShellOption(value = "--sparkMemory", defaultValue = "4G", help = "Spark executor memory") final String sparkMemory, - @CliOption(key = "retry", unspecifiedDefaultValue = "1", help = "Number of retries") final String retry, - @CliOption(key = "compactionInstant", help = "Base path for the target hoodie table") String compactionInstantTime, - @CliOption(key = "propsFilePath", help = "path to properties file on localfs or dfs with configurations for hoodie client for compacting", - unspecifiedDefaultValue = "") final String propsFilePath, - @CliOption(key = "hoodieConfigs", help = "Any configuration that can be set in the properties file can be passed here in the form of an array", - unspecifiedDefaultValue = "") final String[] configs) + @ShellOption(value = "--retry", defaultValue = "1", help = "Number of retries") final String retry, + @ShellOption(value = "--compactionInstant", help = "Base path for the target hoodie table", + defaultValue = ShellOption.NULL) String compactionInstantTime, + @ShellOption(value = "--propsFilePath", help = "path to properties file on localfs or dfs with configurations for hoodie client for compacting", + defaultValue = "") final String propsFilePath, + @ShellOption(value = "--hoodieConfigs", help = "Any configuration that can be set in the properties file can be passed here in the form of an array", + defaultValue = "") final String[] configs) throws Exception { HoodieTableMetaClient client = checkAndGetMetaClient(); boolean initialized = HoodieCLI.initConf(); @@ -268,21 +264,21 @@ public String compact( return "Compaction successfully completed for " + compactionInstantTime; } - @CliCommand(value = "compaction scheduleAndExecute", help = "Schedule compaction plan and execute this plan") + @ShellMethod(key = "compaction scheduleAndExecute", value = "Schedule compaction plan and execute this plan") public String compact( - @CliOption(key = {"parallelism"}, mandatory = true, + @ShellOption(value = {"--parallelism"}, defaultValue = "3", help = "Parallelism for hoodie compaction") final String parallelism, - @CliOption(key = "schemaFilePath", mandatory = true, - help = "Path for Avro schema file") final String schemaFilePath, - @CliOption(key = "sparkMaster", unspecifiedDefaultValue = "local", + @ShellOption(value = "--schemaFilePath", + help = "Path for Avro schema file", defaultValue = ShellOption.NULL) final String schemaFilePath, + @ShellOption(value = "--sparkMaster", defaultValue = "local", help = "Spark Master") String master, - @CliOption(key = "sparkMemory", unspecifiedDefaultValue = "4G", + @ShellOption(value = "--sparkMemory", defaultValue = "4G", help = "Spark executor memory") final String sparkMemory, - @CliOption(key = "retry", unspecifiedDefaultValue = "1", help = "Number of retries") final String retry, - @CliOption(key = "propsFilePath", help = "path to properties file on localfs or dfs with configurations for hoodie client for compacting", - unspecifiedDefaultValue = "") final String propsFilePath, - @CliOption(key = "hoodieConfigs", help = "Any configuration that can be set in the properties file can be passed here in the form of an array", - unspecifiedDefaultValue = "") final String[] configs) + @ShellOption(value = "--retry", defaultValue = "1", help = "Number of retries") final String retry, + @ShellOption(value = "--propsFilePath", help = "path to properties file on localfs or dfs with configurations for hoodie client for compacting", + defaultValue = "") final String propsFilePath, + @ShellOption(value = "--hoodieConfigs", help = "Any configuration that can be set in the properties file can be passed here in the form of an array", + defaultValue = "") final String[] configs) throws Exception { HoodieTableMetaClient client = checkAndGetMetaClient(); boolean initialized = HoodieCLI.initConf(); @@ -453,17 +449,17 @@ private T deSerializeOperationResult(String inputP, FileSystem fs) throws Ex } } - @CliCommand(value = "compaction validate", help = "Validate Compaction") + @ShellMethod(key = "compaction validate", value = "Validate Compaction") public String validateCompaction( - @CliOption(key = "instant", mandatory = true, help = "Compaction Instant") String compactionInstant, - @CliOption(key = {"parallelism"}, unspecifiedDefaultValue = "3", help = "Parallelism") String parallelism, - @CliOption(key = "sparkMaster", unspecifiedDefaultValue = "local", help = "Spark Master") String master, - @CliOption(key = "sparkMemory", unspecifiedDefaultValue = "2G", help = "executor memory") String sparkMemory, - @CliOption(key = {"limit"}, help = "Limit commits", unspecifiedDefaultValue = "-1") Integer limit, - @CliOption(key = {"sortBy"}, help = "Sorting Field", unspecifiedDefaultValue = "") String sortByField, - @CliOption(key = {"desc"}, help = "Ordering", unspecifiedDefaultValue = "false") boolean descending, - @CliOption(key = {"headeronly"}, help = "Print Header Only", - unspecifiedDefaultValue = "false") boolean headerOnly) + @ShellOption(value = "--instant", help = "Compaction Instant") String compactionInstant, + @ShellOption(value = {"--parallelism"}, defaultValue = "3", help = "Parallelism") String parallelism, + @ShellOption(value = "--sparkMaster", defaultValue = "local", help = "Spark Master") String master, + @ShellOption(value = "--sparkMemory", defaultValue = "2G", help = "executor memory") String sparkMemory, + @ShellOption(value = {"--limit"}, help = "Limit commits", defaultValue = "-1") Integer limit, + @ShellOption(value = {"--sortBy"}, help = "Sorting Field", defaultValue = "") String sortByField, + @ShellOption(value = {"--desc"}, help = "Ordering", defaultValue = "false") boolean descending, + @ShellOption(value = {"--headeronly"}, help = "Print Header Only", + defaultValue = "false") boolean headerOnly) throws Exception { HoodieTableMetaClient client = checkAndGetMetaClient(); boolean initialized = HoodieCLI.initConf(); @@ -516,19 +512,19 @@ public String validateCompaction( return output; } - @CliCommand(value = "compaction unschedule", help = "Unschedule Compaction") + @ShellMethod(key = "compaction unschedule", value = "Unschedule Compaction") public String unscheduleCompaction( - @CliOption(key = "instant", mandatory = true, help = "Compaction Instant") String compactionInstant, - @CliOption(key = {"parallelism"}, unspecifiedDefaultValue = "3", help = "Parallelism") String parallelism, - @CliOption(key = "sparkMaster", unspecifiedDefaultValue = "local", help = "Spark Master") String master, - @CliOption(key = "sparkMemory", unspecifiedDefaultValue = "2G", help = "executor memory") String sparkMemory, - @CliOption(key = {"skipValidation"}, help = "skip validation", unspecifiedDefaultValue = "false") boolean skipV, - @CliOption(key = {"dryRun"}, help = "Dry Run Mode", unspecifiedDefaultValue = "false") boolean dryRun, - @CliOption(key = {"limit"}, help = "Limit commits", unspecifiedDefaultValue = "-1") Integer limit, - @CliOption(key = {"sortBy"}, help = "Sorting Field", unspecifiedDefaultValue = "") String sortByField, - @CliOption(key = {"desc"}, help = "Ordering", unspecifiedDefaultValue = "false") boolean descending, - @CliOption(key = {"headeronly"}, help = "Print Header Only", - unspecifiedDefaultValue = "false") boolean headerOnly) + @ShellOption(value = "--instant", help = "Compaction Instant") String compactionInstant, + @ShellOption(value = {"--parallelism"}, defaultValue = "3", help = "Parallelism") String parallelism, + @ShellOption(value = "--sparkMaster", defaultValue = "local", help = "Spark Master") String master, + @ShellOption(value = "--sparkMemory", defaultValue = "2G", help = "executor memory") String sparkMemory, + @ShellOption(value = {"--skipValidation"}, help = "skip validation", defaultValue = "false") boolean skipV, + @ShellOption(value = {"--dryRun"}, help = "Dry Run Mode", defaultValue = "false") boolean dryRun, + @ShellOption(value = {"--limit"}, help = "Limit commits", defaultValue = "-1") Integer limit, + @ShellOption(value = {"--sortBy"}, help = "Sorting Field", defaultValue = "") String sortByField, + @ShellOption(value = {"--desc"}, help = "Ordering", defaultValue = "false") boolean descending, + @ShellOption(value = {"--headeronly"}, help = "Print Header Only", + defaultValue = "false") boolean headerOnly) throws Exception { HoodieTableMetaClient client = checkAndGetMetaClient(); boolean initialized = HoodieCLI.initConf(); @@ -562,18 +558,18 @@ public String unscheduleCompaction( return output; } - @CliCommand(value = "compaction unscheduleFileId", help = "UnSchedule Compaction for a fileId") + @ShellMethod(key = "compaction unscheduleFileId", value = "UnSchedule Compaction for a fileId") public String unscheduleCompactFile( - @CliOption(key = "fileId", mandatory = true, help = "File Id") final String fileId, - @CliOption(key = "partitionPath", unspecifiedDefaultValue = "", help = "partition path") final String partitionPath, - @CliOption(key = "sparkMaster", unspecifiedDefaultValue = "local", help = "Spark Master") String master, - @CliOption(key = "sparkMemory", unspecifiedDefaultValue = "2G", help = "executor memory") String sparkMemory, - @CliOption(key = {"skipValidation"}, help = "skip validation", unspecifiedDefaultValue = "false") boolean skipV, - @CliOption(key = {"dryRun"}, help = "Dry Run Mode", unspecifiedDefaultValue = "false") boolean dryRun, - @CliOption(key = {"limit"}, help = "Limit commits", unspecifiedDefaultValue = "-1") Integer limit, - @CliOption(key = {"sortBy"}, help = "Sorting Field", unspecifiedDefaultValue = "") String sortByField, - @CliOption(key = {"desc"}, help = "Ordering", unspecifiedDefaultValue = "false") boolean descending, - @CliOption(key = {"headeronly"}, help = "Header Only", unspecifiedDefaultValue = "false") boolean headerOnly) + @ShellOption(value = "--fileId", help = "File Id") final String fileId, + @ShellOption(value = "--partitionPath", defaultValue = "", help = "partition path") final String partitionPath, + @ShellOption(value = "--sparkMaster", defaultValue = "local", help = "Spark Master") String master, + @ShellOption(value = "--sparkMemory", defaultValue = "2G", help = "executor memory") String sparkMemory, + @ShellOption(value = {"--skipValidation"}, help = "skip validation", defaultValue = "false") boolean skipV, + @ShellOption(value = {"--dryRun"}, help = "Dry Run Mode", defaultValue = "false") boolean dryRun, + @ShellOption(value = {"--limit"}, help = "Limit commits", defaultValue = "-1") Integer limit, + @ShellOption(value = {"--sortBy"}, help = "Sorting Field", defaultValue = "") String sortByField, + @ShellOption(value = {"--desc"}, help = "Ordering", defaultValue = "false") boolean descending, + @ShellOption(value = {"--headeronly"}, help = "Header Only", defaultValue = "false") boolean headerOnly) throws Exception { HoodieTableMetaClient client = checkAndGetMetaClient(); boolean initialized = HoodieCLI.initConf(); @@ -607,19 +603,19 @@ public String unscheduleCompactFile( return output; } - @CliCommand(value = "compaction repair", help = "Renames the files to make them consistent with the timeline as " + @ShellMethod(key = "compaction repair", value = "Renames the files to make them consistent with the timeline as " + "dictated by Hoodie metadata. Use when compaction unschedule fails partially.") public String repairCompaction( - @CliOption(key = "instant", mandatory = true, help = "Compaction Instant") String compactionInstant, - @CliOption(key = {"parallelism"}, unspecifiedDefaultValue = "3", help = "Parallelism") String parallelism, - @CliOption(key = "sparkMaster", unspecifiedDefaultValue = "local", help = "Spark Master") String master, - @CliOption(key = "sparkMemory", unspecifiedDefaultValue = "2G", help = "executor memory") String sparkMemory, - @CliOption(key = {"dryRun"}, help = "Dry Run Mode", unspecifiedDefaultValue = "false") boolean dryRun, - @CliOption(key = {"limit"}, help = "Limit commits", unspecifiedDefaultValue = "-1") Integer limit, - @CliOption(key = {"sortBy"}, help = "Sorting Field", unspecifiedDefaultValue = "") String sortByField, - @CliOption(key = {"desc"}, help = "Ordering", unspecifiedDefaultValue = "false") boolean descending, - @CliOption(key = {"headeronly"}, help = "Print Header Only", - unspecifiedDefaultValue = "false") boolean headerOnly) + @ShellOption(value = "--instant", help = "Compaction Instant") String compactionInstant, + @ShellOption(value = {"--parallelism"}, defaultValue = "3", help = "Parallelism") String parallelism, + @ShellOption(value = "--sparkMaster", defaultValue = "local", help = "Spark Master") String master, + @ShellOption(value = "--sparkMemory", defaultValue = "2G", help = "executor memory") String sparkMemory, + @ShellOption(value = {"--dryRun"}, help = "Dry Run Mode", defaultValue = "false") boolean dryRun, + @ShellOption(value = {"--limit"}, help = "Limit commits", defaultValue = "-1") Integer limit, + @ShellOption(value = {"--sortBy"}, help = "Sorting Field", defaultValue = "") String sortByField, + @ShellOption(value = {"--desc"}, help = "Ordering", defaultValue = "false") boolean descending, + @ShellOption(value = {"--headeronly"}, help = "Print Header Only", + defaultValue = "false") boolean headerOnly) throws Exception { HoodieTableMetaClient client = checkAndGetMetaClient(); boolean initialized = HoodieCLI.initConf(); diff --git a/hudi-cli/src/main/java/org/apache/hudi/cli/commands/DiffCommand.java b/hudi-cli/src/main/java/org/apache/hudi/cli/commands/DiffCommand.java index 29b5c6e51c3dc..07d21fe022668 100644 --- a/hudi-cli/src/main/java/org/apache/hudi/cli/commands/DiffCommand.java +++ b/hudi-cli/src/main/java/org/apache/hudi/cli/commands/DiffCommand.java @@ -32,10 +32,9 @@ import org.apache.hudi.common.util.NumericUtils; import org.apache.hudi.common.util.Option; -import org.springframework.shell.core.CommandMarker; -import org.springframework.shell.core.annotation.CliCommand; -import org.springframework.shell.core.annotation.CliOption; -import org.springframework.stereotype.Component; +import org.springframework.shell.standard.ShellComponent; +import org.springframework.shell.standard.ShellMethod; +import org.springframework.shell.standard.ShellOption; import java.io.IOException; import java.util.ArrayList; @@ -55,38 +54,42 @@ * Given a file id or partition value, this command line utility tracks the changes to the file group or partition across range of commits. * Usage: diff file --fileId */ -@Component -public class DiffCommand implements CommandMarker { +@ShellComponent +public class DiffCommand { private static final BiFunction FILE_ID_CHECKER = (writeStat, fileId) -> fileId.equals(writeStat.getFileId()); private static final BiFunction PARTITION_CHECKER = (writeStat, partitionPath) -> partitionPath.equals(writeStat.getPartitionPath()); - @CliCommand(value = "diff file", help = "Check how file differs across range of commits") + @ShellMethod(key = "diff file", value = "Check how file differs across range of commits") public String diffFile( - @CliOption(key = {"fileId"}, help = "File ID to diff across range of commits", mandatory = true) String fileId, - @CliOption(key = {"startTs"}, help = "start time for compactions, default: now - 10 days") String startTs, - @CliOption(key = {"endTs"}, help = "end time for compactions, default: now - 1 day") String endTs, - @CliOption(key = {"limit"}, help = "Limit compactions", unspecifiedDefaultValue = "-1") final Integer limit, - @CliOption(key = {"sortBy"}, help = "Sorting Field", unspecifiedDefaultValue = "") final String sortByField, - @CliOption(key = {"desc"}, help = "Ordering", unspecifiedDefaultValue = "false") final boolean descending, - @CliOption(key = {"headeronly"}, help = "Print Header Only", unspecifiedDefaultValue = "false") final boolean headerOnly, - @CliOption(key = {"includeArchivedTimeline"}, help = "Include archived commits as well", - unspecifiedDefaultValue = "false") final boolean includeArchivedTimeline) throws IOException { + @ShellOption(value = {"--fileId"}, help = "File ID to diff across range of commits") String fileId, + @ShellOption(value = {"--startTs"}, help = "start time for compactions, default: now - 10 days", + defaultValue = ShellOption.NULL) String startTs, + @ShellOption(value = {"--endTs"}, help = "end time for compactions, default: now - 1 day", + defaultValue = ShellOption.NULL) String endTs, + @ShellOption(value = {"--limit"}, help = "Limit compactions", defaultValue = "-1") final Integer limit, + @ShellOption(value = {"--sortBy"}, help = "Sorting Field", defaultValue = "") final String sortByField, + @ShellOption(value = {"--desc"}, help = "Ordering", defaultValue = "false") final boolean descending, + @ShellOption(value = {"--headeronly"}, help = "Print Header Only", defaultValue = "false") final boolean headerOnly, + @ShellOption(value = {"--includeArchivedTimeline"}, help = "Include archived commits as well", + defaultValue = "false") final boolean includeArchivedTimeline) throws IOException { HoodieDefaultTimeline timeline = getTimelineInRange(startTs, endTs, includeArchivedTimeline); return printCommitsWithMetadataForFileId(timeline, limit, sortByField, descending, headerOnly, "", fileId); } - @CliCommand(value = "diff partition", help = "Check how file differs across range of commits. It is meant to be used only for partitioned tables.") + @ShellMethod(key = "diff partition", value = "Check how file differs across range of commits. It is meant to be used only for partitioned tables.") public String diffPartition( - @CliOption(key = {"partitionPath"}, help = "Relative partition path to diff across range of commits", mandatory = true) String partitionPath, - @CliOption(key = {"startTs"}, help = "start time for compactions, default: now - 10 days") String startTs, - @CliOption(key = {"endTs"}, help = "end time for compactions, default: now - 1 day") String endTs, - @CliOption(key = {"limit"}, help = "Limit compactions", unspecifiedDefaultValue = "-1") final Integer limit, - @CliOption(key = {"sortBy"}, help = "Sorting Field", unspecifiedDefaultValue = "") final String sortByField, - @CliOption(key = {"desc"}, help = "Ordering", unspecifiedDefaultValue = "false") final boolean descending, - @CliOption(key = {"headeronly"}, help = "Print Header Only", unspecifiedDefaultValue = "false") final boolean headerOnly, - @CliOption(key = {"includeArchivedTimeline"}, help = "Include archived commits as well", - unspecifiedDefaultValue = "false") final boolean includeArchivedTimeline) throws IOException { + @ShellOption(value = {"--partitionPath"}, help = "Relative partition path to diff across range of commits") String partitionPath, + @ShellOption(value = {"--startTs"}, help = "start time for compactions, default: now - 10 days", + defaultValue = ShellOption.NULL) String startTs, + @ShellOption(value = {"--endTs"}, help = "end time for compactions, default: now - 1 day", + defaultValue = ShellOption.NULL) String endTs, + @ShellOption(value = {"--limit"}, help = "Limit compactions", defaultValue = "-1") final Integer limit, + @ShellOption(value = {"--sortBy"}, help = "Sorting Field", defaultValue = "") final String sortByField, + @ShellOption(value = {"--desc"}, help = "Ordering", defaultValue = "false") final boolean descending, + @ShellOption(value = {"--headeronly"}, help = "Print Header Only", defaultValue = "false") final boolean headerOnly, + @ShellOption(value = {"--includeArchivedTimeline"}, help = "Include archived commits as well", + defaultValue = "false") final boolean includeArchivedTimeline) throws IOException { HoodieDefaultTimeline timeline = getTimelineInRange(startTs, endTs, includeArchivedTimeline); return printCommitsWithMetadataForPartition(timeline, limit, sortByField, descending, headerOnly, "", partitionPath); } diff --git a/hudi-cli/src/main/java/org/apache/hudi/cli/commands/ExportCommand.java b/hudi-cli/src/main/java/org/apache/hudi/cli/commands/ExportCommand.java index 91d13bcd17967..2406eddacf320 100644 --- a/hudi-cli/src/main/java/org/apache/hudi/cli/commands/ExportCommand.java +++ b/hudi-cli/src/main/java/org/apache/hudi/cli/commands/ExportCommand.java @@ -18,6 +18,12 @@ package org.apache.hudi.cli.commands; +import org.apache.avro.generic.GenericRecord; +import org.apache.avro.generic.IndexedRecord; +import org.apache.avro.specific.SpecificData; +import org.apache.hadoop.fs.FileStatus; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; import org.apache.hudi.avro.HoodieAvroUtils; import org.apache.hudi.avro.model.HoodieArchivedMetaEntry; import org.apache.hudi.avro.model.HoodieCleanMetadata; @@ -36,17 +42,9 @@ import org.apache.hudi.common.table.timeline.TimelineMetadataUtils; import org.apache.hudi.common.util.ClosableIterator; import org.apache.hudi.exception.HoodieException; - -import org.apache.avro.generic.GenericRecord; -import org.apache.avro.generic.IndexedRecord; -import org.apache.avro.specific.SpecificData; -import org.apache.hadoop.fs.FileStatus; -import org.apache.hadoop.fs.FileSystem; -import org.apache.hadoop.fs.Path; -import org.springframework.shell.core.CommandMarker; -import org.springframework.shell.core.annotation.CliCommand; -import org.springframework.shell.core.annotation.CliOption; -import org.springframework.stereotype.Component; +import org.springframework.shell.standard.ShellComponent; +import org.springframework.shell.standard.ShellMethod; +import org.springframework.shell.standard.ShellOption; import java.io.File; import java.io.FileOutputStream; @@ -64,16 +62,16 @@ * directory specified by the parameter --localFolder * The instants are exported in the json format. */ -@Component -public class ExportCommand implements CommandMarker { +@ShellComponent +public class ExportCommand { - @CliCommand(value = "export instants", help = "Export Instants and their metadata from the Timeline") + @ShellMethod(key = "export instants", value = "Export Instants and their metadata from the Timeline") public String exportInstants( - @CliOption(key = {"limit"}, help = "Limit Instants", unspecifiedDefaultValue = "-1") final Integer limit, - @CliOption(key = {"actions"}, help = "Comma separated list of Instant actions to export", - unspecifiedDefaultValue = "clean,commit,deltacommit,rollback,savepoint,restore") final String filter, - @CliOption(key = {"desc"}, help = "Ordering", unspecifiedDefaultValue = "false") final boolean descending, - @CliOption(key = {"localFolder"}, help = "Local Folder to export to", mandatory = true) String localFolder) + @ShellOption(value = {"--limit"}, help = "Limit Instants", defaultValue = "-1") final Integer limit, + @ShellOption(value = {"--actions"}, help = "Comma separated list of Instant actions to export", + defaultValue = "clean,commit,deltacommit,rollback,savepoint,restore") final String filter, + @ShellOption(value = {"--desc"}, help = "Ordering", defaultValue = "false") final boolean descending, + @ShellOption(value = {"--localFolder"}, help = "Local Folder to export to") String localFolder) throws Exception { final String basePath = HoodieCLI.getTableMetaClient().getBasePath(); diff --git a/hudi-cli/src/main/java/org/apache/hudi/cli/commands/FileSystemViewCommand.java b/hudi-cli/src/main/java/org/apache/hudi/cli/commands/FileSystemViewCommand.java index d5647d860ddff..78e7d90195d58 100644 --- a/hudi-cli/src/main/java/org/apache/hudi/cli/commands/FileSystemViewCommand.java +++ b/hudi-cli/src/main/java/org/apache/hudi/cli/commands/FileSystemViewCommand.java @@ -18,6 +18,9 @@ package org.apache.hudi.cli.commands; +import org.apache.hadoop.fs.FileStatus; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; import org.apache.hudi.cli.HoodieCLI; import org.apache.hudi.cli.HoodiePrintHelper; import org.apache.hudi.cli.HoodieTableHeaderFields; @@ -32,14 +35,9 @@ import org.apache.hudi.common.table.view.HoodieTableFileSystemView; import org.apache.hudi.common.util.NumericUtils; import org.apache.hudi.common.util.Option; - -import org.apache.hadoop.fs.FileStatus; -import org.apache.hadoop.fs.FileSystem; -import org.apache.hadoop.fs.Path; -import org.springframework.shell.core.CommandMarker; -import org.springframework.shell.core.annotation.CliCommand; -import org.springframework.shell.core.annotation.CliOption; -import org.springframework.stereotype.Component; +import org.springframework.shell.standard.ShellComponent; +import org.springframework.shell.standard.ShellMethod; +import org.springframework.shell.standard.ShellOption; import java.io.IOException; import java.io.Serializable; @@ -55,35 +53,31 @@ /** * CLI command to display file system options. */ -@Component -public class FileSystemViewCommand implements CommandMarker { +@ShellComponent +public class FileSystemViewCommand { - @CliCommand(value = "show fsview all", help = "Show entire file-system view") + @ShellMethod(key = "show fsview all", value = "Show entire file-system view") public String showAllFileSlices( - @CliOption(key = {"pathRegex"}, help = "regex to select files, eg: 2016/08/02", - unspecifiedDefaultValue = "") String globRegex, - @CliOption(key = {"baseFileOnly"}, help = "Only display base files view", - unspecifiedDefaultValue = "false") boolean baseFileOnly, - @CliOption(key = {"maxInstant"}, help = "File-Slices upto this instant are displayed", - unspecifiedDefaultValue = "") String maxInstant, - @CliOption(key = {"includeMax"}, help = "Include Max Instant", - unspecifiedDefaultValue = "false") boolean includeMaxInstant, - @CliOption(key = {"includeInflight"}, help = "Include Inflight Instants", - unspecifiedDefaultValue = "false") boolean includeInflight, - @CliOption(key = {"excludeCompaction"}, help = "Exclude compaction Instants", - unspecifiedDefaultValue = "false") boolean excludeCompaction, - @CliOption(key = {"limit"}, help = "Limit rows to be displayed", unspecifiedDefaultValue = "-1") Integer limit, - @CliOption(key = {"sortBy"}, help = "Sorting Field", unspecifiedDefaultValue = "") final String sortByField, - @CliOption(key = {"desc"}, help = "Ordering", unspecifiedDefaultValue = "false") final boolean descending, - @CliOption(key = {"headeronly"}, help = "Print Header Only", - unspecifiedDefaultValue = "false") final boolean headerOnly) + @ShellOption(value = {"--pathRegex"}, help = "regex to select files, eg: par1", + defaultValue = "*") String globRegex, + @ShellOption(value = {"--baseFileOnly"}, help = "Only display base files view", + defaultValue = "false") boolean baseFileOnly, + @ShellOption(value = {"--maxInstant"}, help = "File-Slices upto this instant are displayed", + defaultValue = "") String maxInstant, + @ShellOption(value = {"--includeMax"}, help = "Include Max Instant", + defaultValue = "false") boolean includeMaxInstant, + @ShellOption(value = {"--includeInflight"}, help = "Include Inflight Instants", + defaultValue = "false") boolean includeInflight, + @ShellOption(value = {"--excludeCompaction"}, help = "Exclude compaction Instants", + defaultValue = "false") boolean excludeCompaction, + @ShellOption(value = {"--limit"}, help = "Limit rows to be displayed", defaultValue = "-1") Integer limit, + @ShellOption(value = {"--sortBy"}, help = "Sorting Field", defaultValue = "") final String sortByField, + @ShellOption(value = {"--desc"}, help = "Ordering", defaultValue = "false") final boolean descending, + @ShellOption(value = {"--headeronly"}, help = "Print Header Only", + defaultValue = "false") final boolean headerOnly) throws IOException { globRegex = globRegex == null ? "" : globRegex; - // TODO: There is a bug in spring shell, if we pass */*/* to pathRegex, the last '/' will be lost, pathRegex will be */** - if (globRegex.endsWith("**")) { - globRegex = globRegex.replace("**", "*/*"); - } HoodieTableFileSystemView fsView = buildFileSystemView(globRegex, maxInstant, baseFileOnly, includeMaxInstant, includeInflight, excludeCompaction); @@ -123,26 +117,26 @@ public String showAllFileSlices( return HoodiePrintHelper.print(header, fieldNameToConverterMap, sortByField, descending, limit, headerOnly, rows); } - @CliCommand(value = "show fsview latest", help = "Show latest file-system view") + @ShellMethod(key = "show fsview latest", value = "Show latest file-system view") public String showLatestFileSlices( - @CliOption(key = {"partitionPath"}, help = "A valid partition path", unspecifiedDefaultValue = "") String partition, - @CliOption(key = {"baseFileOnly"}, help = "Only display base file view", - unspecifiedDefaultValue = "false") boolean baseFileOnly, - @CliOption(key = {"maxInstant"}, help = "File-Slices upto this instant are displayed", - unspecifiedDefaultValue = "") String maxInstant, - @CliOption(key = {"merge"}, help = "Merge File Slices due to pending compaction", - unspecifiedDefaultValue = "true") final boolean merge, - @CliOption(key = {"includeMax"}, help = "Include Max Instant", - unspecifiedDefaultValue = "false") boolean includeMaxInstant, - @CliOption(key = {"includeInflight"}, help = "Include Inflight Instants", - unspecifiedDefaultValue = "false") boolean includeInflight, - @CliOption(key = {"excludeCompaction"}, help = "Exclude compaction Instants", - unspecifiedDefaultValue = "false") boolean excludeCompaction, - @CliOption(key = {"limit"}, help = "Limit rows to be displayed", unspecifiedDefaultValue = "-1") Integer limit, - @CliOption(key = {"sortBy"}, help = "Sorting Field", unspecifiedDefaultValue = "") final String sortByField, - @CliOption(key = {"desc"}, help = "Ordering", unspecifiedDefaultValue = "false") final boolean descending, - @CliOption(key = {"headeronly"}, help = "Print Header Only", - unspecifiedDefaultValue = "false") final boolean headerOnly) + @ShellOption(value = {"--partitionPath"}, help = "A valid partition path", defaultValue = "") String partition, + @ShellOption(value = {"--baseFileOnly"}, help = "Only display base file view", + defaultValue = "false") boolean baseFileOnly, + @ShellOption(value = {"--maxInstant"}, help = "File-Slices upto this instant are displayed", + defaultValue = "") String maxInstant, + @ShellOption(value = {"--merge"}, help = "Merge File Slices due to pending compaction", + defaultValue = "true") final boolean merge, + @ShellOption(value = {"--includeMax"}, help = "Include Max Instant", + defaultValue = "false") boolean includeMaxInstant, + @ShellOption(value = {"--includeInflight"}, help = "Include Inflight Instants", + defaultValue = "false") boolean includeInflight, + @ShellOption(value = {"--excludeCompaction"}, help = "Exclude compaction Instants", + defaultValue = "false") boolean excludeCompaction, + @ShellOption(value = {"--limit"}, help = "Limit rows to be displayed", defaultValue = "-1") Integer limit, + @ShellOption(value = {"--sortBy"}, help = "Sorting Field", defaultValue = "") final String sortByField, + @ShellOption(value = {"--desc"}, help = "Ordering", defaultValue = "false") final boolean descending, + @ShellOption(value = {"--headeronly"}, help = "Print Header Only", + defaultValue = "false") final boolean headerOnly) throws IOException { HoodieTableFileSystemView fsView = buildFileSystemView(partition, maxInstant, baseFileOnly, includeMaxInstant, diff --git a/hudi-cli/src/main/java/org/apache/hudi/cli/commands/HDFSParquetImportCommand.java b/hudi-cli/src/main/java/org/apache/hudi/cli/commands/HDFSParquetImportCommand.java index dc59f8a650fa7..9ea5bbab04bda 100644 --- a/hudi-cli/src/main/java/org/apache/hudi/cli/commands/HDFSParquetImportCommand.java +++ b/hudi-cli/src/main/java/org/apache/hudi/cli/commands/HDFSParquetImportCommand.java @@ -24,14 +24,11 @@ import org.apache.hudi.utilities.HDFSParquetImporter.FormatValidator; import org.apache.hudi.utilities.UtilHelpers; import org.apache.hudi.utilities.deltastreamer.HoodieDeltaStreamer; - import org.apache.spark.launcher.SparkLauncher; import org.apache.spark.util.Utils; -import org.springframework.shell.core.CommandMarker; -import org.springframework.shell.core.annotation.CliCommand; -import org.springframework.shell.core.annotation.CliOption; -import org.springframework.stereotype.Component; - +import org.springframework.shell.standard.ShellComponent; +import org.springframework.shell.standard.ShellMethod; +import org.springframework.shell.standard.ShellOption; import scala.collection.JavaConverters; /** @@ -40,33 +37,33 @@ * @see HoodieDeltaStreamer * @deprecated This utility is deprecated in 0.10.0 and will be removed in 0.11.0. Use {@link HoodieDeltaStreamer.Config#runBootstrap} instead. */ -@Component -public class HDFSParquetImportCommand implements CommandMarker { +@ShellComponent +public class HDFSParquetImportCommand { - @CliCommand(value = "hdfsparquetimport", help = "Imports Parquet table to a hoodie table") + @ShellMethod(key = "hdfsparquetimport", value = "Imports Parquet table to a hoodie table") public String convert( - @CliOption(key = "upsert", unspecifiedDefaultValue = "false", + @ShellOption(value = "--upsert", defaultValue = "false", help = "Uses upsert API instead of the default insert API of WriteClient") boolean useUpsert, - @CliOption(key = "srcPath", mandatory = true, help = "Base path for the input table") final String srcPath, - @CliOption(key = "targetPath", mandatory = true, + @ShellOption(value = "--srcPath", help = "Base path for the input table") final String srcPath, + @ShellOption(value = "--targetPath", help = "Base path for the target hoodie table") final String targetPath, - @CliOption(key = "tableName", mandatory = true, help = "Table name") final String tableName, - @CliOption(key = "tableType", mandatory = true, help = "Table type") final String tableType, - @CliOption(key = "rowKeyField", mandatory = true, help = "Row key field name") final String rowKeyField, - @CliOption(key = "partitionPathField", unspecifiedDefaultValue = "", + @ShellOption(value = "--tableName", help = "Table name") final String tableName, + @ShellOption(value = "--tableType", help = "Table type") final String tableType, + @ShellOption(value = "--rowKeyField", help = "Row key field name") final String rowKeyField, + @ShellOption(value = "--partitionPathField", defaultValue = "", help = "Partition path field name") final String partitionPathField, - @CliOption(key = {"parallelism"}, mandatory = true, + @ShellOption(value = {"--parallelism"}, help = "Parallelism for hoodie insert") final String parallelism, - @CliOption(key = "schemaFilePath", mandatory = true, + @ShellOption(value = "--schemaFilePath", help = "Path for Avro schema file") final String schemaFilePath, - @CliOption(key = "format", mandatory = true, help = "Format for the input data") final String format, - @CliOption(key = "sparkMaster", unspecifiedDefaultValue = "", help = "Spark Master") String master, - @CliOption(key = "sparkMemory", mandatory = true, help = "Spark executor memory") final String sparkMemory, - @CliOption(key = "retry", mandatory = true, help = "Number of retries") final String retry, - @CliOption(key = "propsFilePath", help = "path to properties file on localfs or dfs with configurations for hoodie client for importing", - unspecifiedDefaultValue = "") final String propsFilePath, - @CliOption(key = "hoodieConfigs", help = "Any configuration that can be set in the properties file can be passed here in the form of an array", - unspecifiedDefaultValue = "") final String[] configs) throws Exception { + @ShellOption(value = "--format", help = "Format for the input data") final String format, + @ShellOption(value = "--sparkMaster", defaultValue = "", help = "Spark Master") String master, + @ShellOption(value = "--sparkMemory", help = "Spark executor memory") final String sparkMemory, + @ShellOption(value = "--retry", help = "Number of retries") final String retry, + @ShellOption(value = "--propsFilePath", help = "path to properties file on localfs or dfs with configurations for hoodie client for importing", + defaultValue = "") final String propsFilePath, + @ShellOption(value = "--hoodieConfigs", help = "Any configuration that can be set in the properties file can be passed here in the form of an array", + defaultValue = "") final String[] configs) throws Exception { (new FormatValidator()).validate("format", format); diff --git a/hudi-cli/src/main/java/org/apache/hudi/cli/commands/HoodieLogFileCommand.java b/hudi-cli/src/main/java/org/apache/hudi/cli/commands/HoodieLogFileCommand.java index 49cc25b895730..56e00aa24cd7c 100644 --- a/hudi-cli/src/main/java/org/apache/hudi/cli/commands/HoodieLogFileCommand.java +++ b/hudi-cli/src/main/java/org/apache/hudi/cli/commands/HoodieLogFileCommand.java @@ -18,6 +18,12 @@ package org.apache.hudi.cli.commands; +import com.fasterxml.jackson.databind.ObjectMapper; +import org.apache.avro.Schema; +import org.apache.avro.generic.IndexedRecord; +import org.apache.hadoop.fs.FileStatus; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; import org.apache.hudi.cli.HoodieCLI; import org.apache.hudi.cli.HoodiePrintHelper; import org.apache.hudi.cli.HoodieTableHeaderFields; @@ -41,18 +47,12 @@ import org.apache.hudi.common.util.Option; import org.apache.hudi.config.HoodieCompactionConfig; import org.apache.hudi.config.HoodieMemoryConfig; - -import com.fasterxml.jackson.databind.ObjectMapper; -import org.apache.avro.Schema; -import org.apache.avro.generic.IndexedRecord; -import org.apache.hadoop.fs.FileStatus; -import org.apache.hadoop.fs.FileSystem; -import org.apache.hadoop.fs.Path; import org.apache.parquet.avro.AvroSchemaConverter; -import org.springframework.shell.core.CommandMarker; -import org.springframework.shell.core.annotation.CliCommand; -import org.springframework.shell.core.annotation.CliOption; -import org.springframework.stereotype.Component; +import org.springframework.shell.standard.ShellComponent; +import org.springframework.shell.standard.ShellMethod; +import org.springframework.shell.standard.ShellOption; +import scala.Tuple2; +import scala.Tuple3; import java.io.IOException; import java.util.ArrayList; @@ -64,26 +64,23 @@ import java.util.concurrent.atomic.AtomicInteger; import java.util.stream.Collectors; -import scala.Tuple2; -import scala.Tuple3; - import static org.apache.hudi.common.util.ValidationUtils.checkArgument; /** * CLI command to display log file options. */ -@Component -public class HoodieLogFileCommand implements CommandMarker { +@ShellComponent +public class HoodieLogFileCommand { - @CliCommand(value = "show logfile metadata", help = "Read commit metadata from log files") + @ShellMethod(key = "show logfile metadata", value = "Read commit metadata from log files") public String showLogFileCommits( - @CliOption(key = "logFilePathPattern", mandatory = true, + @ShellOption(value = "--logFilePathPattern", help = "Fully qualified path for the log file") final String logFilePathPattern, - @CliOption(key = {"limit"}, help = "Limit commits", unspecifiedDefaultValue = "-1") final Integer limit, - @CliOption(key = {"sortBy"}, help = "Sorting Field", unspecifiedDefaultValue = "") final String sortByField, - @CliOption(key = {"desc"}, help = "Ordering", unspecifiedDefaultValue = "false") final boolean descending, - @CliOption(key = {"headeronly"}, help = "Print Header Only", - unspecifiedDefaultValue = "false") final boolean headerOnly) + @ShellOption(value = {"--limit"}, help = "Limit commits", defaultValue = "-1") final Integer limit, + @ShellOption(value = {"--sortBy"}, help = "Sorting Field", defaultValue = "") final String sortByField, + @ShellOption(value = {"--desc"}, help = "Ordering", defaultValue = "false") final boolean descending, + @ShellOption(value = {"--headeronly"}, help = "Print Header Only", + defaultValue = "false") final boolean headerOnly) throws IOException { FileSystem fs = HoodieCLI.getTableMetaClient().getFs(); @@ -168,14 +165,14 @@ public String showLogFileCommits( return HoodiePrintHelper.print(header, new HashMap<>(), sortByField, descending, limit, headerOnly, rows); } - @CliCommand(value = "show logfile records", help = "Read records from log files") + @ShellMethod(key = "show logfile records", value = "Read records from log files") public String showLogFileRecords( - @CliOption(key = {"limit"}, help = "Limit commits", - unspecifiedDefaultValue = "10") final Integer limit, - @CliOption(key = "logFilePathPattern", mandatory = true, + @ShellOption(value = {"--limit"}, help = "Limit commits", + defaultValue = "10") final Integer limit, + @ShellOption(value = "--logFilePathPattern", help = "Fully qualified paths for the log files") final String logFilePathPattern, - @CliOption(key = "mergeRecords", help = "If the records in the log files should be merged", - unspecifiedDefaultValue = "false") final Boolean shouldMerge) + @ShellOption(value = "--mergeRecords", help = "If the records in the log files should be merged", + defaultValue = "false") final Boolean shouldMerge) throws IOException { System.out.println("===============> Showing only " + limit + " records <==============="); diff --git a/hudi-cli/src/main/java/org/apache/hudi/cli/commands/HoodieSyncValidateCommand.java b/hudi-cli/src/main/java/org/apache/hudi/cli/commands/HoodieSyncValidateCommand.java index 35e9b2b016b67..0fc26a55b8990 100644 --- a/hudi-cli/src/main/java/org/apache/hudi/cli/commands/HoodieSyncValidateCommand.java +++ b/hudi-cli/src/main/java/org/apache/hudi/cli/commands/HoodieSyncValidateCommand.java @@ -24,11 +24,9 @@ import org.apache.hudi.common.table.timeline.HoodieInstant; import org.apache.hudi.common.table.timeline.HoodieTimeline; import org.apache.hudi.exception.HoodieException; - -import org.springframework.shell.core.CommandMarker; -import org.springframework.shell.core.annotation.CliCommand; -import org.springframework.shell.core.annotation.CliOption; -import org.springframework.stereotype.Component; +import org.springframework.shell.standard.ShellComponent; +import org.springframework.shell.standard.ShellMethod; +import org.springframework.shell.standard.ShellOption; import java.io.IOException; import java.util.List; @@ -39,22 +37,22 @@ /** * CLI command to display sync options. */ -@Component -public class HoodieSyncValidateCommand implements CommandMarker { +@ShellComponent +public class HoodieSyncValidateCommand { - @CliCommand(value = "sync validate", help = "Validate the sync by counting the number of records") + @ShellMethod(key = "sync validate", value = "Validate the sync by counting the number of records") public String validateSync( - @CliOption(key = {"mode"}, unspecifiedDefaultValue = "complete", help = "Check mode") final String mode, - @CliOption(key = {"sourceDb"}, unspecifiedDefaultValue = "rawdata", help = "source database") final String srcDb, - @CliOption(key = {"targetDb"}, unspecifiedDefaultValue = "dwh_hoodie", + @ShellOption(value = {"--mode"}, defaultValue = "complete", help = "Check mode") final String mode, + @ShellOption(value = {"--sourceDb"}, defaultValue = "rawdata", help = "source database") final String srcDb, + @ShellOption(value = {"--targetDb"}, defaultValue = "dwh_hoodie", help = "target database") final String tgtDb, - @CliOption(key = {"partitionCount"}, unspecifiedDefaultValue = "5", + @ShellOption(value = {"--partitionCount"}, defaultValue = "5", help = "total number of recent partitions to validate") final int partitionCount, - @CliOption(key = {"hiveServerUrl"}, mandatory = true, + @ShellOption(value = {"--hiveServerUrl"}, help = "hiveServerURL to connect to") final String hiveServerUrl, - @CliOption(key = {"hiveUser"}, unspecifiedDefaultValue = "", + @ShellOption(value = {"--hiveUser"}, defaultValue = "", help = "hive username to connect to") final String hiveUser, - @CliOption(key = {"hivePass"}, mandatory = true, unspecifiedDefaultValue = "", + @ShellOption(value = {"--hivePass"}, defaultValue = "", help = "hive password to connect to") final String hivePass) throws Exception { if (HoodieCLI.syncTableMetadata == null) { diff --git a/hudi-cli/src/main/java/org/apache/hudi/cli/commands/MarkersCommand.java b/hudi-cli/src/main/java/org/apache/hudi/cli/commands/MarkersCommand.java index d229fe1a71f03..008c61aa9a84b 100644 --- a/hudi-cli/src/main/java/org/apache/hudi/cli/commands/MarkersCommand.java +++ b/hudi-cli/src/main/java/org/apache/hudi/cli/commands/MarkersCommand.java @@ -22,25 +22,24 @@ import org.apache.hudi.cli.utils.InputStreamConsumer; import org.apache.hudi.cli.utils.SparkUtil; import org.apache.hudi.common.table.HoodieTableMetaClient; - import org.apache.spark.launcher.SparkLauncher; -import org.springframework.shell.core.CommandMarker; -import org.springframework.shell.core.annotation.CliCommand; -import org.springframework.shell.core.annotation.CliOption; -import org.springframework.stereotype.Component; +import org.springframework.shell.standard.ShellComponent; +import org.springframework.shell.standard.ShellMethod; +import org.springframework.shell.standard.ShellOption; /** * CLI command for marker options. */ -@Component -public class MarkersCommand implements CommandMarker { +@ShellComponent +public class MarkersCommand { - @CliCommand(value = "marker delete", help = "Delete the marker") + @ShellMethod(key = "marker delete", value = "Delete the marker") public String deleteMarker( - @CliOption(key = {"commit"}, help = "Delete a marker") final String instantTime, - @CliOption(key = {"sparkProperties"}, help = "Spark Properties File Path") final String sparkPropertiesPath, - @CliOption(key = "sparkMaster", unspecifiedDefaultValue = "", help = "Spark Master") String master, - @CliOption(key = "sparkMemory", unspecifiedDefaultValue = "1G", + @ShellOption(value = {"--commit"}, help = "Delete a marker") final String instantTime, + @ShellOption(value = {"--sparkProperties"}, help = "Spark Properties File Path", + defaultValue = "") final String sparkPropertiesPath, + @ShellOption(value = "--sparkMaster", defaultValue = "", help = "Spark Master") String master, + @ShellOption(value = "--sparkMemory", defaultValue = "1G", help = "Spark executor memory") final String sparkMemory) throws Exception { HoodieTableMetaClient metaClient = HoodieCLI.getTableMetaClient(); diff --git a/hudi-cli/src/main/java/org/apache/hudi/cli/commands/MetadataCommand.java b/hudi-cli/src/main/java/org/apache/hudi/cli/commands/MetadataCommand.java index d9ef1d04cee98..65b01bb2545e4 100644 --- a/hudi-cli/src/main/java/org/apache/hudi/cli/commands/MetadataCommand.java +++ b/hudi-cli/src/main/java/org/apache/hudi/cli/commands/MetadataCommand.java @@ -18,6 +18,8 @@ package org.apache.hudi.cli.commands; +import org.apache.hadoop.fs.FileStatus; +import org.apache.hadoop.fs.Path; import org.apache.hudi.cli.HoodieCLI; import org.apache.hudi.cli.HoodiePrintHelper; import org.apache.hudi.cli.TableHeader; @@ -33,16 +35,12 @@ import org.apache.hudi.metadata.HoodieBackedTableMetadata; import org.apache.hudi.metadata.HoodieTableMetadata; import org.apache.hudi.metadata.SparkHoodieBackedTableMetadataWriter; - -import org.apache.hadoop.fs.FileStatus; -import org.apache.hadoop.fs.Path; -import org.apache.log4j.LogManager; -import org.apache.log4j.Logger; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.apache.spark.api.java.JavaSparkContext; -import org.springframework.shell.core.CommandMarker; -import org.springframework.shell.core.annotation.CliCommand; -import org.springframework.shell.core.annotation.CliOption; -import org.springframework.stereotype.Component; +import org.springframework.shell.standard.ShellComponent; +import org.springframework.shell.standard.ShellMethod; +import org.springframework.shell.standard.ShellOption; import java.io.FileNotFoundException; import java.io.IOException; @@ -71,8 +69,8 @@ * Run metadata commands * > metadata list-partitions */ -@Component -public class MetadataCommand implements CommandMarker { +@ShellComponent +public class MetadataCommand { private static final Logger LOG = LogManager.getLogger(MetadataCommand.class); private static String metadataBaseDirectory; @@ -98,9 +96,9 @@ public static String getMetadataTableBasePath(String tableBasePath) { return HoodieTableMetadata.getMetadataTableBasePath(tableBasePath); } - @CliCommand(value = "metadata set", help = "Set options for Metadata Table") - public String set(@CliOption(key = {"metadataDir"}, - help = "Directory to read/write metadata table (can be different from dataset)", unspecifiedDefaultValue = "") final String metadataDir) { + @ShellMethod(key = "metadata set", value = "Set options for Metadata Table") + public String set(@ShellOption(value = {"--metadataDir"}, + help = "Directory to read/write metadata table (can be different from dataset)", defaultValue = "") final String metadataDir) { if (!metadataDir.isEmpty()) { setMetadataBaseDirectory(metadataDir); } @@ -108,9 +106,9 @@ public String set(@CliOption(key = {"metadataDir"}, return "Ok"; } - @CliCommand(value = "metadata create", help = "Create the Metadata Table if it does not exist") + @ShellMethod(key = "metadata create", value = "Create the Metadata Table if it does not exist") public String create( - @CliOption(key = "sparkMaster", unspecifiedDefaultValue = SparkUtil.DEFAULT_SPARK_MASTER, help = "Spark master") final String master + @ShellOption(value = "--sparkMaster", defaultValue = SparkUtil.DEFAULT_SPARK_MASTER, help = "Spark master") final String master ) throws IOException { HoodieCLI.getTableMetaClient(); Path metadataPath = new Path(getMetadataTableBasePath(HoodieCLI.basePath)); @@ -131,7 +129,7 @@ public String create( return String.format("Created Metadata Table in %s (duration=%.2f secs)", metadataPath, timer.endTimer() / 1000.0); } - @CliCommand(value = "metadata delete", help = "Remove the Metadata Table") + @ShellMethod(key = "metadata delete", value = "Remove the Metadata Table") public String delete() throws Exception { HoodieCLI.getTableMetaClient(); Path metadataPath = new Path(getMetadataTableBasePath(HoodieCLI.basePath)); @@ -147,9 +145,9 @@ public String delete() throws Exception { return String.format("Removed Metadata Table from %s", metadataPath); } - @CliCommand(value = "metadata init", help = "Update the metadata table from commits since the creation") - public String init(@CliOption(key = "sparkMaster", unspecifiedDefaultValue = SparkUtil.DEFAULT_SPARK_MASTER, help = "Spark master") final String master, - @CliOption(key = {"readonly"}, unspecifiedDefaultValue = "false", + @ShellMethod(key = "metadata init", value = "Update the metadata table from commits since the creation") + public String init(@ShellOption(value = "--sparkMaster", defaultValue = SparkUtil.DEFAULT_SPARK_MASTER, help = "Spark master") final String master, + @ShellOption(value = {"--readonly"}, defaultValue = "false", help = "Open in read-only mode") final boolean readOnly) throws Exception { HoodieCLI.getTableMetaClient(); Path metadataPath = new Path(getMetadataTableBasePath(HoodieCLI.basePath)); @@ -171,7 +169,7 @@ public String init(@CliOption(key = "sparkMaster", unspecifiedDefaultValue = Spa return String.format(action + " Metadata Table in %s (duration=%.2fsec)", metadataPath, (timer.endTimer()) / 1000.0); } - @CliCommand(value = "metadata stats", help = "Print stats about the metadata") + @ShellMethod(key = "metadata stats", value = "Print stats about the metadata") public String stats() throws IOException { HoodieCLI.getTableMetaClient(); HoodieMetadataConfig config = HoodieMetadataConfig.newBuilder().enable(true).build(); @@ -194,9 +192,9 @@ public String stats() throws IOException { false, Integer.MAX_VALUE, false, rows); } - @CliCommand(value = "metadata list-partitions", help = "List all partitions from metadata") + @ShellMethod(key = "metadata list-partitions", value = "List all partitions from metadata") public String listPartitions( - @CliOption(key = "sparkMaster", unspecifiedDefaultValue = SparkUtil.DEFAULT_SPARK_MASTER, help = "Spark master") final String master + @ShellOption(value = "--sparkMaster", defaultValue = SparkUtil.DEFAULT_SPARK_MASTER, help = "Spark master") final String master ) throws IOException { HoodieCLI.getTableMetaClient(); initJavaSparkContext(Option.of(master)); @@ -224,9 +222,9 @@ public String listPartitions( false, Integer.MAX_VALUE, false, rows); } - @CliCommand(value = "metadata list-files", help = "Print a list of all files in a partition from the metadata") + @ShellMethod(key = "metadata list-files", value = "Print a list of all files in a partition from the metadata") public String listFiles( - @CliOption(key = {"partition"}, help = "Name of the partition to list files", unspecifiedDefaultValue = "") final String partition) throws IOException { + @ShellOption(value = {"--partition"}, help = "Name of the partition to list files", defaultValue = "") final String partition) throws IOException { HoodieCLI.getTableMetaClient(); HoodieMetadataConfig config = HoodieMetadataConfig.newBuilder().enable(true).build(); HoodieBackedTableMetadata metaReader = new HoodieBackedTableMetadata( @@ -257,9 +255,9 @@ public String listFiles( false, Integer.MAX_VALUE, false, rows); } - @CliCommand(value = "metadata validate-files", help = "Validate all files in all partitions from the metadata") + @ShellMethod(key = "metadata validate-files", value = "Validate all files in all partitions from the metadata") public String validateFiles( - @CliOption(key = {"verbose"}, help = "Print all file details", unspecifiedDefaultValue = "false") final boolean verbose) throws IOException { + @ShellOption(value = {"--verbose"}, help = "Print all file details", defaultValue = "false") final boolean verbose) throws IOException { HoodieCLI.getTableMetaClient(); HoodieMetadataConfig config = HoodieMetadataConfig.newBuilder().enable(true).build(); HoodieBackedTableMetadata metadataReader = new HoodieBackedTableMetadata( diff --git a/hudi-cli/src/main/java/org/apache/hudi/cli/commands/RepairsCommand.java b/hudi-cli/src/main/java/org/apache/hudi/cli/commands/RepairsCommand.java index c18d3a93fe349..f0ff924e227ff 100644 --- a/hudi-cli/src/main/java/org/apache/hudi/cli/commands/RepairsCommand.java +++ b/hudi-cli/src/main/java/org/apache/hudi/cli/commands/RepairsCommand.java @@ -18,6 +18,8 @@ package org.apache.hudi.cli.commands; +import org.apache.avro.AvroRuntimeException; +import org.apache.hadoop.fs.Path; import org.apache.hudi.cli.DeDupeType; import org.apache.hudi.cli.HoodieCLI; import org.apache.hudi.cli.HoodiePrintHelper; @@ -36,16 +38,14 @@ import org.apache.hudi.common.util.PartitionPathEncodeUtils; import org.apache.hudi.common.util.StringUtils; import org.apache.hudi.exception.HoodieIOException; - -import org.apache.avro.AvroRuntimeException; -import org.apache.hadoop.fs.Path; -import org.apache.log4j.Logger; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.apache.spark.launcher.SparkLauncher; import org.apache.spark.util.Utils; -import org.springframework.shell.core.CommandMarker; -import org.springframework.shell.core.annotation.CliCommand; -import org.springframework.shell.core.annotation.CliOption; -import org.springframework.stereotype.Component; +import org.springframework.shell.standard.ShellComponent; +import org.springframework.shell.standard.ShellMethod; +import org.springframework.shell.standard.ShellOption; +import scala.collection.JavaConverters; import java.io.FileInputStream; import java.io.IOException; @@ -55,36 +55,34 @@ import java.util.TreeSet; import java.util.stream.Collectors; -import scala.collection.JavaConverters; - import static org.apache.hudi.common.table.HoodieTableMetaClient.METAFOLDER_NAME; /** * CLI command to display and trigger repair options. */ -@Component -public class RepairsCommand implements CommandMarker { +@ShellComponent +public class RepairsCommand { - private static final Logger LOG = Logger.getLogger(RepairsCommand.class); + private static final Logger LOG = LogManager.getLogger(RepairsCommand.class); public static final String DEDUPLICATE_RETURN_PREFIX = "Deduplicated files placed in: "; - @CliCommand(value = "repair deduplicate", - help = "De-duplicate a partition path contains duplicates & produce repaired files to replace with") + @ShellMethod(key = "repair deduplicate", + value = "De-duplicate a partition path contains duplicates & produce repaired files to replace with") public String deduplicate( - @CliOption(key = {"duplicatedPartitionPath"}, help = "Partition Path containing the duplicates", - mandatory = true) final String duplicatedPartitionPath, - @CliOption(key = {"repairedOutputPath"}, help = "Location to place the repaired files", - mandatory = true) final String repairedOutputPath, - @CliOption(key = {"sparkProperties"}, help = "Spark Properties File Path", - unspecifiedDefaultValue = "") String sparkPropertiesPath, - @CliOption(key = "sparkMaster", unspecifiedDefaultValue = "", help = "Spark Master") String master, - @CliOption(key = "sparkMemory", unspecifiedDefaultValue = "4G", + @ShellOption(value = {"--duplicatedPartitionPath"}, help = "Partition Path containing the duplicates") + final String duplicatedPartitionPath, + @ShellOption(value = {"--repairedOutputPath"}, help = "Location to place the repaired files") + final String repairedOutputPath, + @ShellOption(value = {"--sparkProperties"}, help = "Spark Properties File Path", + defaultValue = "") String sparkPropertiesPath, + @ShellOption(value = "--sparkMaster", defaultValue = "", help = "Spark Master") String master, + @ShellOption(value = "--sparkMemory", defaultValue = "4G", help = "Spark executor memory") final String sparkMemory, - @CliOption(key = {"dryrun"}, + @ShellOption(value = {"--dryrun"}, help = "Should we actually remove duplicates or just run and store result to repairedOutputPath", - unspecifiedDefaultValue = "true") final boolean dryRun, - @CliOption(key = {"dedupeType"}, help = "Valid values are - insert_type, update_type and upsert_type", - unspecifiedDefaultValue = "insert_type") final String dedupeType) + defaultValue = "true") final boolean dryRun, + @ShellOption(value = {"--dedupeType"}, help = "Valid values are - insert_type, update_type and upsert_type", + defaultValue = "insert_type") final String dedupeType) throws Exception { if (!DeDupeType.values().contains(DeDupeType.withName(dedupeType))) { throw new IllegalArgumentException("Please provide valid dedupe type!"); @@ -112,10 +110,10 @@ public String deduplicate( } } - @CliCommand(value = "repair addpartitionmeta", help = "Add partition metadata to a table, if not present") + @ShellMethod(key = "repair addpartitionmeta", value = "Add partition metadata to a table, if not present") public String addPartitionMeta( - @CliOption(key = {"dryrun"}, help = "Should we actually add or just print what would be done", - unspecifiedDefaultValue = "true") final boolean dryRun) + @ShellOption(value = {"--dryrun"}, help = "Should we actually add or just print what would be done", + defaultValue = "true") final boolean dryRun) throws IOException { HoodieTableMetaClient client = HoodieCLI.getTableMetaClient(); @@ -150,9 +148,12 @@ public String addPartitionMeta( HoodieTableHeaderFields.HEADER_METADATA_PRESENT, HoodieTableHeaderFields.HEADER_ACTION}, rows); } - @CliCommand(value = "repair overwrite-hoodie-props", help = "Overwrite hoodie.properties with provided file. Risky operation. Proceed with caution!") + @ShellMethod(key = "repair overwrite-hoodie-props", + value = "Overwrite hoodie.properties with provided file. Risky operation. Proceed with caution!") public String overwriteHoodieProperties( - @CliOption(key = {"new-props-file"}, help = "Path to a properties file on local filesystem to overwrite the table's hoodie.properties with") final String overwriteFilePath) throws IOException { + @ShellOption(value = {"--new-props-file"}, + help = "Path to a properties file on local filesystem to overwrite the table's hoodie.properties with") + final String overwriteFilePath) throws IOException { HoodieTableMetaClient client = HoodieCLI.getTableMetaClient(); Properties newProps = new Properties(); @@ -181,7 +182,7 @@ public String overwriteHoodieProperties( HoodieTableHeaderFields.HEADER_OLD_VALUE, HoodieTableHeaderFields.HEADER_NEW_VALUE}, rows); } - @CliCommand(value = "repair corrupted clean files", help = "repair corrupted clean files") + @ShellMethod(key = "repair corrupted clean files", value = "repair corrupted clean files") public void removeCorruptedPendingCleanAction() { HoodieTableMetaClient client = HoodieCLI.getTableMetaClient(); @@ -204,10 +205,11 @@ public void removeCorruptedPendingCleanAction() { }); } - @CliCommand(value = "repair migrate-partition-meta", help = "Migrate all partition meta file currently stored in text format " + @ShellMethod(key = "repair migrate-partition-meta", value = "Migrate all partition meta file currently stored in text format " + "to be stored in base file format. See HoodieTableConfig#PARTITION_METAFILE_USE_DATA_FORMAT.") public String migratePartitionMeta( - @CliOption(key = {"dryrun"}, help = "dry run without modifying anything.", unspecifiedDefaultValue = "true") final boolean dryRun) + @ShellOption(value = {"--dryrun"}, help = "dry run without modifying anything.", defaultValue = "true") + final boolean dryRun) throws IOException { HoodieLocalEngineContext engineContext = new HoodieLocalEngineContext(HoodieCLI.conf); @@ -264,13 +266,13 @@ public String migratePartitionMeta( }, rows); } - @CliCommand(value = "repair deprecated partition", - help = "Repair deprecated partition (\"default\"). Re-writes data from the deprecated partition into " + PartitionPathEncodeUtils.DEFAULT_PARTITION_PATH) + @ShellMethod(key = "repair deprecated partition", + value = "Repair deprecated partition (\"default\"). Re-writes data from the deprecated partition into " + PartitionPathEncodeUtils.DEFAULT_PARTITION_PATH) public String repairDeprecatePartition( - @CliOption(key = {"sparkProperties"}, help = "Spark Properties File Path", - unspecifiedDefaultValue = "") String sparkPropertiesPath, - @CliOption(key = "sparkMaster", unspecifiedDefaultValue = "", help = "Spark Master") String master, - @CliOption(key = "sparkMemory", unspecifiedDefaultValue = "4G", + @ShellOption(value = {"--sparkProperties"}, help = "Spark Properties File Path", + defaultValue = "") String sparkPropertiesPath, + @ShellOption(value = "--sparkMaster", defaultValue = "", help = "Spark Master") String master, + @ShellOption(value = "--sparkMemory", defaultValue = "4G", help = "Spark executor memory") final String sparkMemory) throws Exception { if (StringUtils.isNullOrEmpty(sparkPropertiesPath)) { sparkPropertiesPath = @@ -290,17 +292,15 @@ public String repairDeprecatePartition( return "Repair succeeded"; } - @CliCommand(value = "rename partition", - help = "Rename partition. Usage: rename partition --oldPartition --newPartition ") + @ShellMethod(key = "rename partition", + value = "Rename partition. Usage: rename partition --oldPartition --newPartition ") public String renamePartition( - @CliOption(key = {"oldPartition"}, help = "Partition value to be renamed", mandatory = true, - unspecifiedDefaultValue = "") String oldPartition, - @CliOption(key = {"newPartition"}, help = "New partition value after rename", mandatory = true, - unspecifiedDefaultValue = "") String newPartition, - @CliOption(key = {"sparkProperties"}, help = "Spark Properties File Path", - unspecifiedDefaultValue = "") String sparkPropertiesPath, - @CliOption(key = "sparkMaster", unspecifiedDefaultValue = "", help = "Spark Master") String master, - @CliOption(key = "sparkMemory", unspecifiedDefaultValue = "4G", + @ShellOption(value = {"--oldPartition"}, help = "Partition value to be renamed") String oldPartition, + @ShellOption(value = {"--newPartition"}, help = "New partition value after rename") String newPartition, + @ShellOption(value = {"--sparkProperties"}, help = "Spark Properties File Path", + defaultValue = "") String sparkPropertiesPath, + @ShellOption(value = "--sparkMaster", defaultValue = "", help = "Spark Master") String master, + @ShellOption(value = "--sparkMemory", defaultValue = "4G", help = "Spark executor memory") final String sparkMemory) throws Exception { if (StringUtils.isNullOrEmpty(sparkPropertiesPath)) { sparkPropertiesPath = diff --git a/hudi-cli/src/main/java/org/apache/hudi/cli/commands/RollbacksCommand.java b/hudi-cli/src/main/java/org/apache/hudi/cli/commands/RollbacksCommand.java index 3040e0f6a1c12..e0fad70d99b3e 100644 --- a/hudi-cli/src/main/java/org/apache/hudi/cli/commands/RollbacksCommand.java +++ b/hudi-cli/src/main/java/org/apache/hudi/cli/commands/RollbacksCommand.java @@ -33,12 +33,10 @@ import org.apache.hudi.common.table.timeline.TimelineMetadataUtils; import org.apache.hudi.common.util.CollectionUtils; import org.apache.hudi.common.util.collection.Pair; - import org.apache.spark.launcher.SparkLauncher; -import org.springframework.shell.core.CommandMarker; -import org.springframework.shell.core.annotation.CliCommand; -import org.springframework.shell.core.annotation.CliOption; -import org.springframework.stereotype.Component; +import org.springframework.shell.standard.ShellComponent; +import org.springframework.shell.standard.ShellMethod; +import org.springframework.shell.standard.ShellOption; import java.io.IOException; import java.util.ArrayList; @@ -51,16 +49,16 @@ /** * CLI command to display rollback options. */ -@Component -public class RollbacksCommand implements CommandMarker { +@ShellComponent +public class RollbacksCommand { - @CliCommand(value = "show rollbacks", help = "List all rollback instants") + @ShellMethod(key = "show rollbacks", value = "List all rollback instants") public String showRollbacks( - @CliOption(key = {"limit"}, help = "Limit #rows to be displayed", unspecifiedDefaultValue = "10") Integer limit, - @CliOption(key = {"sortBy"}, help = "Sorting Field", unspecifiedDefaultValue = "") final String sortByField, - @CliOption(key = {"desc"}, help = "Ordering", unspecifiedDefaultValue = "false") final boolean descending, - @CliOption(key = {"headeronly"}, help = "Print Header Only", - unspecifiedDefaultValue = "false") final boolean headerOnly) { + @ShellOption(value = {"--limit"}, help = "Limit #rows to be displayed", defaultValue = "10") Integer limit, + @ShellOption(value = {"--sortBy"}, help = "Sorting Field", defaultValue = "") final String sortByField, + @ShellOption(value = {"--desc"}, help = "Ordering", defaultValue = "false") final boolean descending, + @ShellOption(value = {"--headeronly"}, help = "Print Header Only", + defaultValue = "false") final boolean headerOnly) { HoodieActiveTimeline activeTimeline = new RollbackTimeline(HoodieCLI.getTableMetaClient()); HoodieTimeline rollback = activeTimeline.getRollbackTimeline().filterCompletedInstants(); @@ -90,14 +88,14 @@ public String showRollbacks( return HoodiePrintHelper.print(header, new HashMap<>(), sortByField, descending, limit, headerOnly, rows); } - @CliCommand(value = "show rollback", help = "Show details of a rollback instant") + @ShellMethod(key = "show rollback", value = "Show details of a rollback instant") public String showRollback( - @CliOption(key = {"instant"}, help = "Rollback instant", mandatory = true) String rollbackInstant, - @CliOption(key = {"limit"}, help = "Limit #rows to be displayed", unspecifiedDefaultValue = "10") Integer limit, - @CliOption(key = {"sortBy"}, help = "Sorting Field", unspecifiedDefaultValue = "") final String sortByField, - @CliOption(key = {"desc"}, help = "Ordering", unspecifiedDefaultValue = "false") final boolean descending, - @CliOption(key = {"headeronly"}, help = "Print Header Only", - unspecifiedDefaultValue = "false") final boolean headerOnly) + @ShellOption(value = {"--instant"}, help = "Rollback instant") String rollbackInstant, + @ShellOption(value = {"--limit"}, help = "Limit #rows to be displayed", defaultValue = "10") Integer limit, + @ShellOption(value = {"--sortBy"}, help = "Sorting Field", defaultValue = "") final String sortByField, + @ShellOption(value = {"--desc"}, help = "Ordering", defaultValue = "false") final boolean descending, + @ShellOption(value = {"--headeronly"}, help = "Print Header Only", + defaultValue = "false") final boolean headerOnly) throws IOException { HoodieActiveTimeline activeTimeline = new RollbackTimeline(HoodieCLI.getTableMetaClient()); final List rows = new ArrayList<>(); @@ -125,14 +123,15 @@ public String showRollback( return HoodiePrintHelper.print(header, new HashMap<>(), sortByField, descending, limit, headerOnly, rows); } - @CliCommand(value = "commit rollback", help = "Rollback a commit") + @ShellMethod(key = "commit rollback", value = "Rollback a commit") public String rollbackCommit( - @CliOption(key = {"commit"}, help = "Commit to rollback") final String instantTime, - @CliOption(key = {"sparkProperties"}, help = "Spark Properties File Path") final String sparkPropertiesPath, - @CliOption(key = "sparkMaster", unspecifiedDefaultValue = "", help = "Spark Master") String master, - @CliOption(key = "sparkMemory", unspecifiedDefaultValue = "4G", + @ShellOption(value = {"--commit"}, help = "Commit to rollback") final String instantTime, + @ShellOption(value = {"--sparkProperties"}, help = "Spark Properties File Path", + defaultValue = "") final String sparkPropertiesPath, + @ShellOption(value = "--sparkMaster", defaultValue = "", help = "Spark Master") String master, + @ShellOption(value = "--sparkMemory", defaultValue = "4G", help = "Spark executor memory") final String sparkMemory, - @CliOption(key = "rollbackUsingMarkers", unspecifiedDefaultValue = "false", + @ShellOption(value = "--rollbackUsingMarkers", defaultValue = "false", help = "Enabling marker based rollback") final String rollbackUsingMarkers) throws Exception { HoodieActiveTimeline activeTimeline = HoodieCLI.getTableMetaClient().getActiveTimeline(); diff --git a/hudi-cli/src/main/java/org/apache/hudi/cli/commands/SavepointsCommand.java b/hudi-cli/src/main/java/org/apache/hudi/cli/commands/SavepointsCommand.java index 5b775e5f3135b..73f94acda8787 100644 --- a/hudi-cli/src/main/java/org/apache/hudi/cli/commands/SavepointsCommand.java +++ b/hudi-cli/src/main/java/org/apache/hudi/cli/commands/SavepointsCommand.java @@ -28,12 +28,10 @@ import org.apache.hudi.common.table.timeline.HoodieInstant; import org.apache.hudi.common.table.timeline.HoodieTimeline; import org.apache.hudi.exception.HoodieException; - import org.apache.spark.launcher.SparkLauncher; -import org.springframework.shell.core.CommandMarker; -import org.springframework.shell.core.annotation.CliCommand; -import org.springframework.shell.core.annotation.CliOption; -import org.springframework.stereotype.Component; +import org.springframework.shell.standard.ShellComponent; +import org.springframework.shell.standard.ShellMethod; +import org.springframework.shell.standard.ShellOption; import java.util.List; import java.util.stream.Collectors; @@ -41,10 +39,10 @@ /** * CLI command to display savepoint options. */ -@Component -public class SavepointsCommand implements CommandMarker { +@ShellComponent +public class SavepointsCommand { - @CliCommand(value = "savepoints show", help = "Show the savepoints") + @ShellMethod(key = "savepoints show", value = "Show the savepoints") public String showSavepoints() { HoodieActiveTimeline activeTimeline = HoodieCLI.getTableMetaClient().getActiveTimeline(); HoodieTimeline timeline = activeTimeline.getSavePointTimeline().filterCompletedInstants(); @@ -57,16 +55,17 @@ public String showSavepoints() { return HoodiePrintHelper.print(new String[] {HoodieTableHeaderFields.HEADER_SAVEPOINT_TIME}, rows); } - @CliCommand(value = "savepoint create", help = "Savepoint a commit") + @ShellMethod(key = "savepoint create", value = "Savepoint a commit") public String savepoint( - @CliOption(key = {"commit"}, help = "Commit to savepoint") final String commitTime, - @CliOption(key = {"user"}, unspecifiedDefaultValue = "default", + @ShellOption(value = {"--commit"}, help = "Commit to savepoint") final String commitTime, + @ShellOption(value = {"--user"}, defaultValue = "default", help = "User who is creating the savepoint") final String user, - @CliOption(key = {"comments"}, unspecifiedDefaultValue = "default", + @ShellOption(value = {"--comments"}, defaultValue = "default", help = "Comments for creating the savepoint") final String comments, - @CliOption(key = {"sparkProperties"}, help = "Spark Properties File Path") final String sparkPropertiesPath, - @CliOption(key = "sparkMaster", unspecifiedDefaultValue = "", help = "Spark Master") String master, - @CliOption(key = "sparkMemory", unspecifiedDefaultValue = "4G", + @ShellOption(value = {"--sparkProperties"}, help = "Spark Properties File Path", + defaultValue = "") final String sparkPropertiesPath, + @ShellOption(value = "--sparkMaster", defaultValue = "", help = "Spark Master") String master, + @ShellOption(value = "--sparkMemory", defaultValue = "4G", help = "Spark executor memory") final String sparkMemory) throws Exception { HoodieTableMetaClient metaClient = HoodieCLI.getTableMetaClient(); @@ -90,14 +89,15 @@ public String savepoint( return String.format("The commit \"%s\" has been savepointed.", commitTime); } - @CliCommand(value = "savepoint rollback", help = "Savepoint a commit") + @ShellMethod(key = "savepoint rollback", value = "Savepoint a commit") public String rollbackToSavepoint( - @CliOption(key = {"savepoint"}, help = "Savepoint to rollback") final String instantTime, - @CliOption(key = {"sparkProperties"}, help = "Spark Properties File Path") final String sparkPropertiesPath, - @CliOption(key = "sparkMaster", unspecifiedDefaultValue = "", help = "Spark Master") String master, - @CliOption(key = {"lazyFailedWritesCleanPolicy"}, help = "True if FailedWriteCleanPolicy is lazy", - unspecifiedDefaultValue = "false") final String lazyFailedWritesCleanPolicy, - @CliOption(key = "sparkMemory", unspecifiedDefaultValue = "4G", + @ShellOption(value = {"--savepoint"}, help = "Savepoint to rollback") final String instantTime, + @ShellOption(value = {"--sparkProperties"}, help = "Spark Properties File Path", + defaultValue = "") final String sparkPropertiesPath, + @ShellOption(value = "--sparkMaster", defaultValue = "", help = "Spark Master") String master, + @ShellOption(value = {"--lazyFailedWritesCleanPolicy"}, help = "True if FailedWriteCleanPolicy is lazy", + defaultValue = "false") final String lazyFailedWritesCleanPolicy, + @ShellOption(value = "--sparkMemory", defaultValue = "4G", help = "Spark executor memory") final String sparkMemory) throws Exception { HoodieTableMetaClient metaClient = HoodieCLI.getTableMetaClient(); @@ -126,12 +126,13 @@ public String rollbackToSavepoint( return String.format("Savepoint \"%s\" rolled back", instantTime); } - @CliCommand(value = "savepoint delete", help = "Delete the savepoint") + @ShellMethod(key = "savepoint delete", value = "Delete the savepoint") public String deleteSavepoint( - @CliOption(key = {"commit"}, help = "Delete a savepoint") final String instantTime, - @CliOption(key = {"sparkProperties"}, help = "Spark Properties File Path") final String sparkPropertiesPath, - @CliOption(key = "sparkMaster", unspecifiedDefaultValue = "", help = "Spark Master") String master, - @CliOption(key = "sparkMemory", unspecifiedDefaultValue = "4G", + @ShellOption(value = {"--commit"}, help = "Delete a savepoint") final String instantTime, + @ShellOption(value = {"--sparkProperties"}, help = "Spark Properties File Path", + defaultValue = "") final String sparkPropertiesPath, + @ShellOption(value = "--sparkMaster", defaultValue = "", help = "Spark Master") String master, + @ShellOption(value = "--sparkMemory", defaultValue = "4G", help = "Spark executor memory") final String sparkMemory) throws Exception { HoodieTableMetaClient metaClient = HoodieCLI.getTableMetaClient(); diff --git a/hudi-cli/src/main/java/org/apache/hudi/cli/commands/SparkEnvCommand.java b/hudi-cli/src/main/java/org/apache/hudi/cli/commands/SparkEnvCommand.java index aed404c300709..5c21fe43e155f 100644 --- a/hudi-cli/src/main/java/org/apache/hudi/cli/commands/SparkEnvCommand.java +++ b/hudi-cli/src/main/java/org/apache/hudi/cli/commands/SparkEnvCommand.java @@ -19,11 +19,9 @@ package org.apache.hudi.cli.commands; import org.apache.hudi.cli.HoodiePrintHelper; - -import org.springframework.shell.core.CommandMarker; -import org.springframework.shell.core.annotation.CliCommand; -import org.springframework.shell.core.annotation.CliOption; -import org.springframework.stereotype.Component; +import org.springframework.shell.standard.ShellComponent; +import org.springframework.shell.standard.ShellMethod; +import org.springframework.shell.standard.ShellOption; import java.util.HashMap; import java.util.Map; @@ -31,13 +29,13 @@ /** * CLI command to set and show spark launcher init env. */ -@Component -public class SparkEnvCommand implements CommandMarker { +@ShellComponent +public class SparkEnvCommand { public static Map env = new HashMap<>(); - @CliCommand(value = "set", help = "Set spark launcher env to cli") - public void setEnv(@CliOption(key = {"conf"}, help = "Env config to be set") final String confMap) { + @ShellMethod(key = "set", value = "Set spark launcher env to cli") + public void setEnv(@ShellOption(value = {"--conf"}, help = "Env config to be set") final String confMap) { String[] map = confMap.split("="); if (map.length != 2) { throw new IllegalArgumentException("Illegal set parameter, please use like [set --conf SPARK_HOME=/usr/etc/spark]"); @@ -46,7 +44,7 @@ public void setEnv(@CliOption(key = {"conf"}, help = "Env config to be set") fin System.setProperty(map[0].trim(), map[1].trim()); } - @CliCommand(value = "show envs all", help = "Show spark launcher envs") + @ShellMethod(key = "show envs all", value = "Show spark launcher envs") public String showAllEnv() { String[][] rows = new String[env.size()][2]; int i = 0; @@ -57,8 +55,8 @@ public String showAllEnv() { return HoodiePrintHelper.print(new String[] {"key", "value"}, rows); } - @CliCommand(value = "show env", help = "Show spark launcher env by key") - public String showEnvByKey(@CliOption(key = {"key"}, help = "Which env conf want to show") final String key) { + @ShellMethod(key = "show env", value = "Show spark launcher env by key") + public String showEnvByKey(@ShellOption(value = {"--key"}, help = "Which env conf want to show") final String key) { if (key == null || key.isEmpty()) { return showAllEnv(); } else { diff --git a/hudi-cli/src/main/java/org/apache/hudi/cli/commands/SparkMain.java b/hudi-cli/src/main/java/org/apache/hudi/cli/commands/SparkMain.java index 2a49ed2c4b65c..e43a5d037eef0 100644 --- a/hudi-cli/src/main/java/org/apache/hudi/cli/commands/SparkMain.java +++ b/hudi-cli/src/main/java/org/apache/hudi/cli/commands/SparkMain.java @@ -60,7 +60,8 @@ import org.apache.hudi.utilities.deltastreamer.HoodieDeltaStreamer; import org.apache.hadoop.fs.Path; -import org.apache.log4j.Logger; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.apache.spark.api.java.JavaSparkContext; import org.apache.spark.sql.Dataset; import org.apache.spark.sql.Row; @@ -86,7 +87,7 @@ */ public class SparkMain { - private static final Logger LOG = Logger.getLogger(SparkMain.class); + private static final Logger LOG = LogManager.getLogger(SparkMain.class); /** * Commands. diff --git a/hudi-cli/src/main/java/org/apache/hudi/cli/commands/StatsCommand.java b/hudi-cli/src/main/java/org/apache/hudi/cli/commands/StatsCommand.java index 66c5563102848..c9034d03d5fcd 100644 --- a/hudi-cli/src/main/java/org/apache/hudi/cli/commands/StatsCommand.java +++ b/hudi-cli/src/main/java/org/apache/hudi/cli/commands/StatsCommand.java @@ -18,6 +18,12 @@ package org.apache.hudi.cli.commands; +import com.codahale.metrics.Histogram; +import com.codahale.metrics.Snapshot; +import com.codahale.metrics.UniformReservoir; +import org.apache.hadoop.fs.FileStatus; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; import org.apache.hudi.cli.HoodieCLI; import org.apache.hudi.cli.HoodiePrintHelper; import org.apache.hudi.cli.HoodieTableHeaderFields; @@ -28,17 +34,9 @@ import org.apache.hudi.common.table.timeline.HoodieInstant; import org.apache.hudi.common.table.timeline.HoodieTimeline; import org.apache.hudi.common.util.NumericUtils; - -import com.codahale.metrics.Histogram; -import com.codahale.metrics.Snapshot; -import com.codahale.metrics.UniformReservoir; -import org.apache.hadoop.fs.FileStatus; -import org.apache.hadoop.fs.FileSystem; -import org.apache.hadoop.fs.Path; -import org.springframework.shell.core.CommandMarker; -import org.springframework.shell.core.annotation.CliCommand; -import org.springframework.shell.core.annotation.CliOption; -import org.springframework.stereotype.Component; +import org.springframework.shell.standard.ShellComponent; +import org.springframework.shell.standard.ShellMethod; +import org.springframework.shell.standard.ShellOption; import java.io.IOException; import java.text.DecimalFormat; @@ -52,19 +50,19 @@ /** * CLI command to displays stats options. */ -@Component -public class StatsCommand implements CommandMarker { +@ShellComponent +public class StatsCommand { public static final int MAX_FILES = 1000000; - @CliCommand(value = "stats wa", help = "Write Amplification. Ratio of how many records were upserted to how many " + @ShellMethod(key = "stats wa", value = "Write Amplification. Ratio of how many records were upserted to how many " + "records were actually written") public String writeAmplificationStats( - @CliOption(key = {"limit"}, help = "Limit commits", unspecifiedDefaultValue = "-1") final Integer limit, - @CliOption(key = {"sortBy"}, help = "Sorting Field", unspecifiedDefaultValue = "") final String sortByField, - @CliOption(key = {"desc"}, help = "Ordering", unspecifiedDefaultValue = "false") final boolean descending, - @CliOption(key = {"headeronly"}, help = "Print Header Only", - unspecifiedDefaultValue = "false") final boolean headerOnly) + @ShellOption(value = {"--limit"}, help = "Limit commits", defaultValue = "-1") final Integer limit, + @ShellOption(value = {"--sortBy"}, help = "Sorting Field", defaultValue = "") final String sortByField, + @ShellOption(value = {"--desc"}, help = "Ordering", defaultValue = "false") final boolean descending, + @ShellOption(value = {"--headeronly"}, help = "Print Header Only", + defaultValue = "false") final boolean headerOnly) throws IOException { long totalRecordsUpserted = 0; @@ -105,15 +103,15 @@ public Comparable[] printFileSizeHistogram(String instantTime, Snapshot s) { s.getMax(), s.size(), s.getStdDev()}; } - @CliCommand(value = "stats filesizes", help = "File Sizes. Display summary stats on sizes of files") + @ShellMethod(key = "stats filesizes", value = "File Sizes. Display summary stats on sizes of files") public String fileSizeStats( - @CliOption(key = {"partitionPath"}, help = "regex to select files, eg: 2016/08/02", - unspecifiedDefaultValue = "*/*/*") final String globRegex, - @CliOption(key = {"limit"}, help = "Limit commits", unspecifiedDefaultValue = "-1") final Integer limit, - @CliOption(key = {"sortBy"}, help = "Sorting Field", unspecifiedDefaultValue = "") final String sortByField, - @CliOption(key = {"desc"}, help = "Ordering", unspecifiedDefaultValue = "false") final boolean descending, - @CliOption(key = {"headeronly"}, help = "Print Header Only", - unspecifiedDefaultValue = "false") final boolean headerOnly) + @ShellOption(value = {"--partitionPath"}, help = "regex to select files, eg: 2016/08/02", + defaultValue = "*/*/*") final String globRegex, + @ShellOption(value = {"--limit"}, help = "Limit commits", defaultValue = "-1") final Integer limit, + @ShellOption(value = {"--sortBy"}, help = "Sorting Field", defaultValue = "") final String sortByField, + @ShellOption(value = {"--desc"}, help = "Ordering", defaultValue = "false") final boolean descending, + @ShellOption(value = {"--headeronly"}, help = "Print Header Only", + defaultValue = "false") final boolean headerOnly) throws IOException { FileSystem fs = HoodieCLI.fs; diff --git a/hudi-cli/src/main/java/org/apache/hudi/cli/commands/TableCommand.java b/hudi-cli/src/main/java/org/apache/hudi/cli/commands/TableCommand.java index d9b1d16d65e88..b3dfaf5ab73bc 100644 --- a/hudi-cli/src/main/java/org/apache/hudi/cli/commands/TableCommand.java +++ b/hudi-cli/src/main/java/org/apache/hudi/cli/commands/TableCommand.java @@ -18,6 +18,8 @@ package org.apache.hudi.cli.commands; +import org.apache.avro.Schema; +import org.apache.hadoop.fs.Path; import org.apache.hudi.cli.HoodieCLI; import org.apache.hudi.cli.HoodiePrintHelper; import org.apache.hudi.cli.HoodieTableHeaderFields; @@ -27,15 +29,11 @@ import org.apache.hudi.common.table.HoodieTableMetaClient; import org.apache.hudi.common.table.TableSchemaResolver; import org.apache.hudi.exception.TableNotFoundException; - -import org.apache.avro.Schema; -import org.apache.hadoop.fs.Path; -import org.apache.log4j.LogManager; -import org.apache.log4j.Logger; -import org.springframework.shell.core.CommandMarker; -import org.springframework.shell.core.annotation.CliCommand; -import org.springframework.shell.core.annotation.CliOption; -import org.springframework.stereotype.Component; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.springframework.shell.standard.ShellComponent; +import org.springframework.shell.standard.ShellMethod; +import org.springframework.shell.standard.ShellOption; import java.io.File; import java.io.FileInputStream; @@ -57,8 +55,8 @@ /** * CLI command to display hudi table options. */ -@Component -public class TableCommand implements CommandMarker { +@ShellComponent +public class TableCommand { private static final Logger LOG = LogManager.getLogger(TableCommand.class); @@ -66,17 +64,17 @@ public class TableCommand implements CommandMarker { System.out.println("Table command getting loaded"); } - @CliCommand(value = "connect", help = "Connect to a hoodie table") + @ShellMethod(key = "connect", value = "Connect to a hoodie table") public String connect( - @CliOption(key = {"path"}, mandatory = true, help = "Base Path of the table") final String path, - @CliOption(key = {"layoutVersion"}, help = "Timeline Layout version") Integer layoutVersion, - @CliOption(key = {"eventuallyConsistent"}, unspecifiedDefaultValue = "false", + @ShellOption(value = {"--path"}, help = "Base Path of the table") final String path, + @ShellOption(value = {"--layoutVersion"}, help = "Timeline Layout version", defaultValue = ShellOption.NULL) Integer layoutVersion, + @ShellOption(value = {"--eventuallyConsistent"}, defaultValue = "false", help = "Enable eventual consistency") final boolean eventuallyConsistent, - @CliOption(key = {"initialCheckIntervalMs"}, unspecifiedDefaultValue = "2000", + @ShellOption(value = {"--initialCheckIntervalMs"}, defaultValue = "2000", help = "Initial wait time for eventual consistency") final Integer initialConsistencyIntervalMs, - @CliOption(key = {"maxWaitIntervalMs"}, unspecifiedDefaultValue = "300000", + @ShellOption(value = {"--maxWaitIntervalMs"}, defaultValue = "300000", help = "Max wait time for eventual consistency") final Integer maxConsistencyIntervalMs, - @CliOption(key = {"maxCheckIntervalMs"}, unspecifiedDefaultValue = "7", + @ShellOption(value = {"--maxCheckIntervalMs"}, defaultValue = "7", help = "Max checks for eventual consistency") final Integer maxConsistencyChecks) throws IOException { HoodieCLI @@ -99,15 +97,17 @@ public String connect( * @param tableTypeStr Hoodie Table Type * @param payloadClass Payload Class */ - @CliCommand(value = "create", help = "Create a hoodie table if not present") + @ShellMethod(key = "create", value = "Create a hoodie table if not present") public String createTable( - @CliOption(key = {"path"}, mandatory = true, help = "Base Path of the table") final String path, - @CliOption(key = {"tableName"}, mandatory = true, help = "Hoodie Table Name") final String name, - @CliOption(key = {"tableType"}, unspecifiedDefaultValue = "COPY_ON_WRITE", + @ShellOption(value = {"--path"}, help = "Base Path of the table") final String path, + @ShellOption(value = {"--tableName"}, help = "Hoodie Table Name") final String name, + @ShellOption(value = {"--tableType"}, defaultValue = "COPY_ON_WRITE", help = "Hoodie Table Type. Must be one of : COPY_ON_WRITE or MERGE_ON_READ") final String tableTypeStr, - @CliOption(key = {"archiveLogFolder"}, help = "Folder Name for storing archived timeline") String archiveFolder, - @CliOption(key = {"layoutVersion"}, help = "Specific Layout Version to use") Integer layoutVersion, - @CliOption(key = {"payloadClass"}, unspecifiedDefaultValue = "org.apache.hudi.common.model.HoodieAvroPayload", + @ShellOption(value = {"--archiveLogFolder"}, help = "Folder Name for storing archived timeline", + defaultValue = ShellOption.NULL) String archiveFolder, + @ShellOption(value = {"--layoutVersion"}, help = "Specific Layout Version to use", + defaultValue = ShellOption.NULL) Integer layoutVersion, + @ShellOption(value = {"--payloadClass"}, defaultValue = "org.apache.hudi.common.model.HoodieAvroPayload", help = "Payload Class") final String payloadClass) throws IOException { boolean initialized = HoodieCLI.initConf(); @@ -140,7 +140,7 @@ public String createTable( /** * Describes table properties. */ - @CliCommand(value = "desc", help = "Describe Hoodie Table properties") + @ShellMethod(key = "desc", value = "Describe Hoodie Table properties") public String descTable() { HoodieTableMetaClient client = HoodieCLI.getTableMetaClient(); TableHeader header = new TableHeader().addTableHeaderField("Property").addTableHeaderField("Value"); @@ -157,8 +157,8 @@ public String descTable() { /** * Refresh table metadata. */ - @CliCommand(value = {"refresh", "metadata refresh", "commits refresh", "cleans refresh", "savepoints refresh"}, - help = "Refresh table metadata") + @ShellMethod(key = {"refresh", "metadata refresh", "commits refresh", "cleans refresh", "savepoints refresh"}, + value = "Refresh table metadata") public String refreshMetadata() { HoodieCLI.refreshTableMetadata(); return "Metadata for table " + HoodieCLI.getTableMetaClient().getTableConfig().getTableName() + " refreshed."; @@ -167,9 +167,10 @@ public String refreshMetadata() { /** * Fetches table schema in avro format. */ - @CliCommand(value = "fetch table schema", help = "Fetches latest table schema") + @ShellMethod(key = "fetch table schema", value = "Fetches latest table schema") public String fetchTableSchema( - @CliOption(key = {"outputFilePath"}, mandatory = false, help = "File path to write schema") final String outputFilePath) throws Exception { + @ShellOption(value = {"--outputFilePath"}, defaultValue = ShellOption.NULL, + help = "File path to write schema") final String outputFilePath) throws Exception { HoodieTableMetaClient client = HoodieCLI.getTableMetaClient(); TableSchemaResolver tableSchemaResolver = new TableSchemaResolver(client); Schema schema = tableSchemaResolver.getTableAvroSchema(); @@ -182,7 +183,7 @@ public String fetchTableSchema( } } - @CliCommand(value = "table recover-configs", help = "Recover table configs, from update/delete that failed midway.") + @ShellMethod(key = "table recover-configs", value = "Recover table configs, from update/delete that failed midway.") public String recoverTableConfig() throws IOException { HoodieCLI.refreshTableMetadata(); HoodieTableMetaClient client = HoodieCLI.getTableMetaClient(); @@ -191,9 +192,10 @@ public String recoverTableConfig() throws IOException { return descTable(); } - @CliCommand(value = "table update-configs", help = "Update the table configs with configs with provided file.") + @ShellMethod(key = "table update-configs", value = "Update the table configs with configs with provided file.") public String updateTableConfig( - @CliOption(key = {"props-file"}, mandatory = true, help = "Path to a properties file on local filesystem") final String updatePropsFilePath) throws IOException { + @ShellOption(value = {"--props-file"}, help = "Path to a properties file on local filesystem") + final String updatePropsFilePath) throws IOException { HoodieTableMetaClient client = HoodieCLI.getTableMetaClient(); Map oldProps = client.getTableConfig().propsMap(); @@ -207,9 +209,10 @@ public String updateTableConfig( return renderOldNewProps(newProps, oldProps); } - @CliCommand(value = "table delete-configs", help = "Delete the supplied table configs from the table.") + @ShellMethod(key = "table delete-configs", value = "Delete the supplied table configs from the table.") public String deleteTableConfig( - @CliOption(key = {"comma-separated-configs"}, mandatory = true, help = "Comma separated list of configs to delete.") final String csConfigs) { + @ShellOption(value = {"--comma-separated-configs"}, + help = "Comma separated list of configs to delete.") final String csConfigs) { HoodieTableMetaClient client = HoodieCLI.getTableMetaClient(); Map oldProps = client.getTableConfig().propsMap(); diff --git a/hudi-cli/src/main/java/org/apache/hudi/cli/commands/TempViewCommand.java b/hudi-cli/src/main/java/org/apache/hudi/cli/commands/TempViewCommand.java index 6fda6bd7034f4..3f88532b568ec 100644 --- a/hudi-cli/src/main/java/org/apache/hudi/cli/commands/TempViewCommand.java +++ b/hudi-cli/src/main/java/org/apache/hudi/cli/commands/TempViewCommand.java @@ -20,25 +20,23 @@ import org.apache.hudi.cli.HoodieCLI; import org.apache.hudi.exception.HoodieException; - -import org.springframework.shell.core.CommandMarker; -import org.springframework.shell.core.annotation.CliCommand; -import org.springframework.shell.core.annotation.CliOption; -import org.springframework.stereotype.Component; +import org.springframework.shell.standard.ShellComponent; +import org.springframework.shell.standard.ShellMethod; +import org.springframework.shell.standard.ShellOption; /** * CLI command to query/delete temp views. */ -@Component -public class TempViewCommand implements CommandMarker { +@ShellComponent +public class TempViewCommand { public static final String QUERY_SUCCESS = "Query ran successfully!"; public static final String QUERY_FAIL = "Query ran failed!"; public static final String SHOW_SUCCESS = "Show all views name successfully!"; - @CliCommand(value = {"temp_query", "temp query"}, help = "query against created temp view") + @ShellMethod(key = {"temp_query", "temp query"}, value = "query against created temp view") public String query( - @CliOption(key = {"sql"}, mandatory = true, help = "select query to run against view") final String sql) { + @ShellOption(value = {"--sql"}, help = "select query to run against view") final String sql) { try { HoodieCLI.getTempViewProvider().runQuery(sql); @@ -49,7 +47,7 @@ public String query( } - @CliCommand(value = {"temps_show", "temps show"}, help = "Show all views name") + @ShellMethod(key = {"temps_show", "temps show"}, value = "Show all views name") public String showAll() { try { @@ -60,9 +58,9 @@ public String showAll() { } } - @CliCommand(value = {"temp_delete", "temp delete"}, help = "Delete view name") + @ShellMethod(key = {"temp_delete", "temp delete"}, value = "Delete view name") public String delete( - @CliOption(key = {"view"}, mandatory = true, help = "view name") final String tableName) { + @ShellOption(value = {"--view"}, help = "view name") final String tableName) { try { HoodieCLI.getTempViewProvider().deleteTable(tableName); diff --git a/hudi-cli/src/main/java/org/apache/hudi/cli/commands/TimelineCommand.java b/hudi-cli/src/main/java/org/apache/hudi/cli/commands/TimelineCommand.java index 9af04d155bcba..bf7e5397cab93 100644 --- a/hudi-cli/src/main/java/org/apache/hudi/cli/commands/TimelineCommand.java +++ b/hudi-cli/src/main/java/org/apache/hudi/cli/commands/TimelineCommand.java @@ -19,6 +19,9 @@ package org.apache.hudi.cli.commands; +import org.apache.hadoop.fs.FileStatus; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; import org.apache.hudi.avro.model.HoodieRollbackMetadata; import org.apache.hudi.avro.model.HoodieRollbackPlan; import org.apache.hudi.cli.HoodieCLI; @@ -32,16 +35,11 @@ import org.apache.hudi.common.table.timeline.TimelineMetadataUtils; import org.apache.hudi.common.util.Option; import org.apache.hudi.metadata.HoodieTableMetadata; - -import org.apache.hadoop.fs.FileStatus; -import org.apache.hadoop.fs.FileSystem; -import org.apache.hadoop.fs.Path; -import org.apache.log4j.LogManager; -import org.apache.log4j.Logger; -import org.springframework.shell.core.CommandMarker; -import org.springframework.shell.core.annotation.CliCommand; -import org.springframework.shell.core.annotation.CliOption; -import org.springframework.stereotype.Component; +import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.LogManager; +import org.springframework.shell.standard.ShellComponent; +import org.springframework.shell.standard.ShellMethod; +import org.springframework.shell.standard.ShellOption; import java.io.IOException; import java.text.SimpleDateFormat; @@ -60,26 +58,26 @@ /** * CLI command to display timeline options. */ -@Component -public class TimelineCommand implements CommandMarker { +@ShellComponent +public class TimelineCommand { private static final Logger LOG = LogManager.getLogger(TimelineCommand.class); private static final SimpleDateFormat DATE_FORMAT_DEFAULT = new SimpleDateFormat("MM-dd HH:mm"); private static final SimpleDateFormat DATE_FORMAT_SECONDS = new SimpleDateFormat("MM-dd HH:mm:ss"); - @CliCommand(value = "timeline show active", help = "List all instants in active timeline") + @ShellMethod(key = "timeline show active", value = "List all instants in active timeline") public String showActive( - @CliOption(key = {"limit"}, help = "Limit #rows to be displayed", unspecifiedDefaultValue = "10") Integer limit, - @CliOption(key = {"sortBy"}, help = "Sorting Field", unspecifiedDefaultValue = "") final String sortByField, - @CliOption(key = {"desc"}, help = "Ordering", unspecifiedDefaultValue = "false") final boolean descending, - @CliOption(key = {"headeronly"}, help = "Print Header Only", - unspecifiedDefaultValue = "false") final boolean headerOnly, - @CliOption(key = {"with-metadata-table"}, help = "Show metadata table timeline together with data table", - unspecifiedDefaultValue = "false") final boolean withMetadataTable, - @CliOption(key = {"show-rollback-info"}, help = "Show instant to rollback for rollbacks", - unspecifiedDefaultValue = "false") final boolean showRollbackInfo, - @CliOption(key = {"show-time-seconds"}, help = "Show seconds in instant file modification time", - unspecifiedDefaultValue = "false") final boolean showTimeSeconds) { + @ShellOption(value = {"--limit"}, help = "Limit #rows to be displayed", defaultValue = "10") Integer limit, + @ShellOption(value = {"--sortBy"}, help = "Sorting Field", defaultValue = "") final String sortByField, + @ShellOption(value = {"--desc"}, help = "Ordering", defaultValue = "false") final boolean descending, + @ShellOption(value = {"--headeronly"}, help = "Print Header Only", + defaultValue = "false") final boolean headerOnly, + @ShellOption(value = {"--with-metadata-table"}, help = "Show metadata table timeline together with data table", + defaultValue = "false") final boolean withMetadataTable, + @ShellOption(value = {"--show-rollback-info"}, help = "Show instant to rollback for rollbacks", + defaultValue = "false") final boolean showRollbackInfo, + @ShellOption(value = {"--show-time-seconds"}, help = "Show seconds in instant file modification time", + defaultValue = "false") final boolean showTimeSeconds) { HoodieTableMetaClient metaClient = HoodieCLI.getTableMetaClient(); try { if (withMetadataTable) { @@ -100,17 +98,17 @@ public String showActive( } } - @CliCommand(value = "timeline show incomplete", help = "List all incomplete instants in active timeline") + @ShellMethod(key = "timeline show incomplete", value = "List all incomplete instants in active timeline") public String showIncomplete( - @CliOption(key = {"limit"}, help = "Limit #rows to be displayed", unspecifiedDefaultValue = "10") Integer limit, - @CliOption(key = {"sortBy"}, help = "Sorting Field", unspecifiedDefaultValue = "") final String sortByField, - @CliOption(key = {"desc"}, help = "Ordering", unspecifiedDefaultValue = "false") final boolean descending, - @CliOption(key = {"headeronly"}, help = "Print Header Only", - unspecifiedDefaultValue = "false") final boolean headerOnly, - @CliOption(key = {"show-rollback-info"}, help = "Show instant to rollback for rollbacks", - unspecifiedDefaultValue = "false") final boolean showRollbackInfo, - @CliOption(key = {"show-time-seconds"}, help = "Show seconds in instant file modification time", - unspecifiedDefaultValue = "false") final boolean showTimeSeconds) { + @ShellOption(value = {"--limit"}, help = "Limit #rows to be displayed", defaultValue = "10") Integer limit, + @ShellOption(value = {"--sortBy"}, help = "Sorting Field", defaultValue = "") final String sortByField, + @ShellOption(value = {"--desc"}, help = "Ordering", defaultValue = "false") final boolean descending, + @ShellOption(value = {"--headeronly"}, help = "Print Header Only", + defaultValue = "false") final boolean headerOnly, + @ShellOption(value = {"--show-rollback-info"}, help = "Show instant to rollback for rollbacks", + defaultValue = "false") final boolean showRollbackInfo, + @ShellOption(value = {"--show-time-seconds"}, help = "Show seconds in instant file modification time", + defaultValue = "false") final boolean showTimeSeconds) { HoodieTableMetaClient metaClient = HoodieCLI.getTableMetaClient(); try { return printTimelineInfo( @@ -123,16 +121,16 @@ public String showIncomplete( } } - @CliCommand(value = "metadata timeline show active", - help = "List all instants in active timeline of metadata table") + @ShellMethod(key = "metadata timeline show active", + value = "List all instants in active timeline of metadata table") public String metadataShowActive( - @CliOption(key = {"limit"}, help = "Limit #rows to be displayed", unspecifiedDefaultValue = "10") Integer limit, - @CliOption(key = {"sortBy"}, help = "Sorting Field", unspecifiedDefaultValue = "") final String sortByField, - @CliOption(key = {"desc"}, help = "Ordering", unspecifiedDefaultValue = "false") final boolean descending, - @CliOption(key = {"headeronly"}, help = "Print Header Only", - unspecifiedDefaultValue = "false") final boolean headerOnly, - @CliOption(key = {"show-time-seconds"}, help = "Show seconds in instant file modification time", - unspecifiedDefaultValue = "false") final boolean showTimeSeconds) { + @ShellOption(value = {"--limit"}, help = "Limit #rows to be displayed", defaultValue = "10") Integer limit, + @ShellOption(value = {"--sortBy"}, help = "Sorting Field", defaultValue = "") final String sortByField, + @ShellOption(value = {"--desc"}, help = "Ordering", defaultValue = "false") final boolean descending, + @ShellOption(value = {"--headeronly"}, help = "Print Header Only", + defaultValue = "false") final boolean headerOnly, + @ShellOption(value = {"--show-time-seconds"}, help = "Show seconds in instant file modification time", + defaultValue = "false") final boolean showTimeSeconds) { HoodieTableMetaClient metaClient = getMetadataTableMetaClient(HoodieCLI.getTableMetaClient()); try { return printTimelineInfo( @@ -145,16 +143,16 @@ public String metadataShowActive( } } - @CliCommand(value = "metadata timeline show incomplete", - help = "List all incomplete instants in active timeline of metadata table") + @ShellMethod(key = "metadata timeline show incomplete", + value = "List all incomplete instants in active timeline of metadata table") public String metadataShowIncomplete( - @CliOption(key = {"limit"}, help = "Limit #rows to be displayed", unspecifiedDefaultValue = "10") Integer limit, - @CliOption(key = {"sortBy"}, help = "Sorting Field", unspecifiedDefaultValue = "") final String sortByField, - @CliOption(key = {"desc"}, help = "Ordering", unspecifiedDefaultValue = "false") final boolean descending, - @CliOption(key = {"headeronly"}, help = "Print Header Only", - unspecifiedDefaultValue = "false") final boolean headerOnly, - @CliOption(key = {"show-time-seconds"}, help = "Show seconds in instant file modification time", - unspecifiedDefaultValue = "false") final boolean showTimeSeconds) { + @ShellOption(value = {"--limit"}, help = "Limit #rows to be displayed", defaultValue = "10") Integer limit, + @ShellOption(value = {"--sortBy"}, help = "Sorting Field", defaultValue = "") final String sortByField, + @ShellOption(value = {"--desc"}, help = "Ordering", defaultValue = "false") final boolean descending, + @ShellOption(value = {"--headeronly"}, help = "Print Header Only", + defaultValue = "false") final boolean headerOnly, + @ShellOption(value = {"--show-time-seconds"}, help = "Show seconds in instant file modification time", + defaultValue = "false") final boolean showTimeSeconds) { HoodieTableMetaClient metaClient = getMetadataTableMetaClient(HoodieCLI.getTableMetaClient()); try { return printTimelineInfo( diff --git a/hudi-cli/src/main/java/org/apache/hudi/cli/commands/UpgradeOrDowngradeCommand.java b/hudi-cli/src/main/java/org/apache/hudi/cli/commands/UpgradeOrDowngradeCommand.java index 259b04e630612..5561723d7a57a 100644 --- a/hudi-cli/src/main/java/org/apache/hudi/cli/commands/UpgradeOrDowngradeCommand.java +++ b/hudi-cli/src/main/java/org/apache/hudi/cli/commands/UpgradeOrDowngradeCommand.java @@ -25,25 +25,24 @@ import org.apache.hudi.common.table.HoodieTableMetaClient; import org.apache.hudi.common.table.HoodieTableVersion; import org.apache.hudi.common.util.StringUtils; - import org.apache.spark.launcher.SparkLauncher; -import org.springframework.shell.core.CommandMarker; -import org.springframework.shell.core.annotation.CliCommand; -import org.springframework.shell.core.annotation.CliOption; -import org.springframework.stereotype.Component; +import org.springframework.shell.standard.ShellComponent; +import org.springframework.shell.standard.ShellMethod; +import org.springframework.shell.standard.ShellOption; /** * CLI command to assist in upgrading/downgrading Hoodie table to a different version. */ -@Component -public class UpgradeOrDowngradeCommand implements CommandMarker { +@ShellComponent +public class UpgradeOrDowngradeCommand { - @CliCommand(value = "upgrade table", help = "Upgrades a table") + @ShellMethod(key = "upgrade table", value = "Upgrades a table") public String upgradeHoodieTable( - @CliOption(key = {"toVersion"}, help = "To version of Hoodie table to be upgraded/downgraded to", unspecifiedDefaultValue = "") final String toVersion, - @CliOption(key = {"sparkProperties"}, help = "Spark Properties File Path") final String sparkPropertiesPath, - @CliOption(key = "sparkMaster", unspecifiedDefaultValue = "", help = "Spark Master") String master, - @CliOption(key = "sparkMemory", unspecifiedDefaultValue = "4G", + @ShellOption(value = {"--toVersion"}, help = "To version of Hoodie table to be upgraded/downgraded to", defaultValue = "") final String toVersion, + @ShellOption(value = {"--sparkProperties"}, help = "Spark Properties File Path", + defaultValue = "") final String sparkPropertiesPath, + @ShellOption(value = "--sparkMaster", defaultValue = "", help = "Spark Master") String master, + @ShellOption(value = "--sparkMemory", defaultValue = "4G", help = "Spark executor memory") final String sparkMemory) throws Exception { @@ -62,12 +61,13 @@ public String upgradeHoodieTable( return String.format("Hoodie table upgraded/downgraded to %s", toVersionName); } - @CliCommand(value = "downgrade table", help = "Downgrades a table") + @ShellMethod(key = "downgrade table", value = "Downgrades a table") public String downgradeHoodieTable( - @CliOption(key = {"toVersion"}, help = "To version of Hoodie table to be upgraded/downgraded to", unspecifiedDefaultValue = "") final String toVersion, - @CliOption(key = {"sparkProperties"}, help = "Spark Properties File Path") final String sparkPropertiesPath, - @CliOption(key = "sparkMaster", unspecifiedDefaultValue = "", help = "Spark Master") String master, - @CliOption(key = "sparkMemory", unspecifiedDefaultValue = "4G", + @ShellOption(value = {"--toVersion"}, help = "To version of Hoodie table to be upgraded/downgraded to", defaultValue = "") final String toVersion, + @ShellOption(value = {"--sparkProperties"}, help = "Spark Properties File Path", + defaultValue = "") final String sparkPropertiesPath, + @ShellOption(value = "--sparkMaster", defaultValue = "", help = "Spark Master") String master, + @ShellOption(value = "--sparkMemory", defaultValue = "4G", help = "Spark executor memory") final String sparkMemory) throws Exception { diff --git a/hudi-cli/src/main/java/org/apache/hudi/cli/commands/UtilsCommand.java b/hudi-cli/src/main/java/org/apache/hudi/cli/commands/UtilsCommand.java index 5662be382abd7..2861b05b44d09 100644 --- a/hudi-cli/src/main/java/org/apache/hudi/cli/commands/UtilsCommand.java +++ b/hudi-cli/src/main/java/org/apache/hudi/cli/commands/UtilsCommand.java @@ -19,20 +19,18 @@ package org.apache.hudi.cli.commands; import org.apache.hudi.common.util.StringUtils; - -import org.springframework.shell.core.CommandMarker; -import org.springframework.shell.core.annotation.CliCommand; -import org.springframework.shell.core.annotation.CliOption; -import org.springframework.stereotype.Component; +import org.springframework.shell.standard.ShellComponent; +import org.springframework.shell.standard.ShellMethod; +import org.springframework.shell.standard.ShellOption; /** * CLI command to display utils. */ -@Component -public class UtilsCommand implements CommandMarker { +@ShellComponent +public class UtilsCommand { - @CliCommand(value = "utils loadClass", help = "Load a class") - public String loadClass(@CliOption(key = {"class"}, help = "Check mode") final String clazz) { + @ShellMethod(key = "utils loadClass", value = "Load a class") + public String loadClass(@ShellOption(value = {"--class"}, help = "Check mode") final String clazz) { if (StringUtils.isNullOrEmpty(clazz)) { return "Class to be loaded can not be null!"; } diff --git a/hudi-cli/src/main/java/org/apache/hudi/cli/utils/InputStreamConsumer.java b/hudi-cli/src/main/java/org/apache/hudi/cli/utils/InputStreamConsumer.java index 43636f6c1a3a0..a2ebe5769d488 100644 --- a/hudi-cli/src/main/java/org/apache/hudi/cli/utils/InputStreamConsumer.java +++ b/hudi-cli/src/main/java/org/apache/hudi/cli/utils/InputStreamConsumer.java @@ -18,17 +18,19 @@ package org.apache.hudi.cli.utils; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + import java.io.BufferedReader; import java.io.InputStream; import java.io.InputStreamReader; -import java.util.logging.Logger; /** * This class is responsible to read a Process output. */ public class InputStreamConsumer extends Thread { - private static final Logger LOG = Logger.getLogger(InputStreamConsumer.class.getName()); + private static final Logger LOG = LogManager.getLogger(InputStreamConsumer.class); private InputStream is; public InputStreamConsumer(InputStream is) { @@ -42,7 +44,7 @@ public void run() { BufferedReader br = new BufferedReader(isr); br.lines().forEach(LOG::info); } catch (Exception e) { - LOG.severe(e.toString()); + LOG.fatal(e.toString()); e.printStackTrace(); } } diff --git a/hudi-cli/src/main/java/org/apache/hudi/cli/utils/SparkTempViewProvider.java b/hudi-cli/src/main/java/org/apache/hudi/cli/utils/SparkTempViewProvider.java index 6f5a11ad6657f..4f9e4b0d9a9c0 100644 --- a/hudi-cli/src/main/java/org/apache/hudi/cli/utils/SparkTempViewProvider.java +++ b/hudi-cli/src/main/java/org/apache/hudi/cli/utils/SparkTempViewProvider.java @@ -19,7 +19,8 @@ package org.apache.hudi.cli.utils; import org.apache.hudi.exception.HoodieException; - +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.apache.spark.SparkConf; import org.apache.spark.api.java.JavaSparkContext; import org.apache.spark.sql.Dataset; @@ -29,34 +30,26 @@ import org.apache.spark.sql.types.DataType; import org.apache.spark.sql.types.DataTypes; import org.apache.spark.sql.types.StructType; -import org.springframework.shell.support.logging.HandlerUtils; import java.util.List; -import java.util.logging.Handler; -import java.util.logging.Level; -import java.util.logging.Logger; import java.util.stream.Collectors; public class SparkTempViewProvider implements TempViewProvider { - private static final Logger LOG = HandlerUtils.getLogger(SparkTempViewProvider.class); + + private static final Logger LOG = LogManager.getLogger(SparkTempViewProvider.class); private JavaSparkContext jsc; private SQLContext sqlContext; public SparkTempViewProvider(String appName) { try { - Handler handler = LOG.getParent().getHandlers()[0]; SparkConf sparkConf = new SparkConf().setAppName(appName) .set("spark.serializer", "org.apache.spark.serializer.KryoSerializer").setMaster("local[8]"); jsc = new JavaSparkContext(sparkConf); sqlContext = new SQLContext(jsc); - if (handler != null) { - LOG.getParent().removeHandler(LOG.getParent().getHandlers()[0]); - LOG.getParent().addHandler(handler); - } } catch (Throwable ex) { // log full stack trace and rethrow. Without this its difficult to debug failures, if any - LOG.log(Level.WARNING, "unable to initialize spark context ", ex); + LOG.warn("unable to initialize spark context ", ex); throw new HoodieException(ex); } } @@ -95,7 +88,7 @@ public void createOrReplace(String tableName, List headers, List - - - - - - - diff --git a/hudi-cli/src/main/resources/application.yml b/hudi-cli/src/main/resources/application.yml new file mode 100644 index 0000000000000..036524c58d5ed --- /dev/null +++ b/hudi-cli/src/main/resources/application.yml @@ -0,0 +1,23 @@ +### +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +### + +spring: + shell: + history: + enabled: true + name: hoodie-cmd.log \ No newline at end of file diff --git a/hudi-cli/src/main/resources/banner.txt b/hudi-cli/src/main/resources/banner.txt new file mode 100644 index 0000000000000..be572b83eb277 --- /dev/null +++ b/hudi-cli/src/main/resources/banner.txt @@ -0,0 +1,14 @@ +=================================================================== +* ___ ___ * +* /\__\ ___ /\ \ ___ * +* / / / /\__\ / \ \ /\ \ * +* / /__/ / / / / /\ \ \ \ \ \ * +* / \ \ ___ / / / / / \ \__\ / \__\ * +* / /\ \ /\__\ / /__/ ___ / /__/ \ |__| / /\/__/ * +* \/ \ \/ / / \ \ \ /\__\ \ \ \ / / / /\/ / / * +* \ / / \ \ / / / \ \ / / / \ /__/ * +* / / / \ \/ / / \ \/ / / \ \__\ * +* / / / \ / / \ / / \/__/ * +* \/__/ \/__/ \/__/ Apache Hudi CLI * +* * +=================================================================== \ No newline at end of file diff --git a/hudi-cli/src/main/resources/log4j2.properties b/hudi-cli/src/main/resources/log4j2.properties new file mode 100644 index 0000000000000..bc8e5ad56c7d6 --- /dev/null +++ b/hudi-cli/src/main/resources/log4j2.properties @@ -0,0 +1,38 @@ +### +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +### + +status = INFO +name = HudiCliLog4j2 + +appender.console.type = Console +appender.console.name = CONSOLE +appender.console.layout.type = PatternLayout +appender.console.layout.pattern = %-4r [%t] %-5p %c %x - %m%n + +# Root logger level +rootLogger.level = warn +# Root logger referring to console appender +rootLogger.appenderRef.stdout.ref = CONSOLE + +logger.hudi_cli.name = org.apache.hudi.cli +logger.hudi_cli.level = info +logger.hudi_common.name = org.apache.hudi.common +logger.hudi_common.level = info + +logger.spark.name = org.apache.spark +logger.spark.level = info diff --git a/hudi-cli/src/main/scala/org/apache/hudi/cli/DedupeSparkJob.scala b/hudi-cli/src/main/scala/org/apache/hudi/cli/DedupeSparkJob.scala index 25d1d7c21b155..00e96a3487504 100644 --- a/hudi-cli/src/main/scala/org/apache/hudi/cli/DedupeSparkJob.scala +++ b/hudi-cli/src/main/scala/org/apache/hudi/cli/DedupeSparkJob.scala @@ -18,14 +18,14 @@ package org.apache.hudi.cli import java.util.stream.Collectors - import org.apache.hadoop.fs.{FileSystem, FileUtil, Path} import org.apache.hudi.common.fs.FSUtils import org.apache.hudi.common.model.{HoodieBaseFile, HoodieRecord} import org.apache.hudi.common.table.HoodieTableMetaClient import org.apache.hudi.common.table.view.HoodieTableFileSystemView import org.apache.hudi.exception.HoodieException -import org.apache.log4j.Logger +import org.apache.logging.log4j.LogManager +import org.apache.logging.log4j.Logger import org.apache.spark.sql.{DataFrame, Row, SQLContext} import scala.collection.JavaConversions._ @@ -42,7 +42,7 @@ class DedupeSparkJob(basePath: String, dedupeType: DeDupeType.Value) { val sparkHelper = new SparkHelper(sqlContext, fs) - val LOG = Logger.getLogger(this.getClass) + val LOG = LogManager.getLogger(this.getClass) /** diff --git a/hudi-cli/src/test/java/org/apache/hudi/cli/commands/TestArchivedCommitsCommand.java b/hudi-cli/src/test/java/org/apache/hudi/cli/commands/TestArchivedCommitsCommand.java index 31dfad81a06ae..b642c1b3f8ebb 100644 --- a/hudi-cli/src/test/java/org/apache/hudi/cli/commands/TestArchivedCommitsCommand.java +++ b/hudi-cli/src/test/java/org/apache/hudi/cli/commands/TestArchivedCommitsCommand.java @@ -24,6 +24,7 @@ import org.apache.hudi.cli.functional.CLIFunctionalTestHarness; import org.apache.hudi.cli.testutils.HoodieTestCommitMetadataGenerator; import org.apache.hudi.cli.testutils.HoodieTestCommitUtilities; +import org.apache.hudi.cli.testutils.ShellEvaluationResultUtil; import org.apache.hudi.client.HoodieTimelineArchiver; import org.apache.hudi.common.model.HoodieCommitMetadata; import org.apache.hudi.common.table.HoodieTableMetaClient; @@ -39,7 +40,9 @@ import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Tag; import org.junit.jupiter.api.Test; -import org.springframework.shell.core.CommandResult; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.shell.Shell; import java.util.ArrayList; import java.util.HashMap; @@ -52,8 +55,12 @@ * Test Cases for {@link ArchivedCommitsCommand}. */ @Tag("functional") +@SpringBootTest(properties = {"spring.shell.interactive.enabled=false", "spring.shell.command.script.enabled=false"}) public class TestArchivedCommitsCommand extends CLIFunctionalTestHarness { + @Autowired + private Shell shell; + private String tablePath; @BeforeEach @@ -111,8 +118,8 @@ public void init() throws Exception { */ @Test public void testShowArchivedCommits() { - CommandResult cr = shell().executeCommand("show archived commit stats"); - assertTrue(cr.isSuccess()); + Object result = shell.evaluate(() -> "show archived commit stats"); + assertTrue(ShellEvaluationResultUtil.isSuccess(result)); TableHeader header = new TableHeader().addTableHeaderField("action").addTableHeaderField("instant") .addTableHeaderField("partition").addTableHeaderField("file_id").addTableHeaderField("prev_instant") @@ -153,7 +160,7 @@ public void testShowArchivedCommits() { String expectedResult = HoodiePrintHelper.print( header, new HashMap<>(), "", false, -1, false, rows); expectedResult = removeNonWordAndStripSpace(expectedResult); - String got = removeNonWordAndStripSpace(cr.getResult().toString()); + String got = removeNonWordAndStripSpace(result.toString()); assertEquals(expectedResult, got); } @@ -162,8 +169,8 @@ public void testShowArchivedCommits() { */ @Test public void testShowCommits() throws Exception { - CommandResult cr = shell().executeCommand("show archived commits"); - assertTrue(cr.isSuccess()); + Object cmdResult = shell.evaluate(() -> "show archived commits"); + assertTrue(ShellEvaluationResultUtil.isSuccess(cmdResult)); final List rows = new ArrayList<>(); // Test default skipMetadata and limit 10 @@ -178,12 +185,12 @@ public void testShowCommits() throws Exception { rows.add(new Comparable[] {"103", "commit"}); String expected = HoodiePrintHelper.print(header, new HashMap<>(), "", false, 10, false, rows); expected = removeNonWordAndStripSpace(expected); - String got = removeNonWordAndStripSpace(cr.getResult().toString()); + String got = removeNonWordAndStripSpace(cmdResult.toString()); assertEquals(expected, got); // Test with Metadata and no limit - cr = shell().executeCommand("show archived commits --skipMetadata false --limit -1"); - assertTrue(cr.isSuccess()); + cmdResult = shell.evaluate(() -> "show archived commits --skipMetadata false --limit 0"); + assertTrue(ShellEvaluationResultUtil.isSuccess(cmdResult)); rows.clear(); @@ -198,9 +205,9 @@ public void testShowCommits() throws Exception { rows.add(result); } header = header.addTableHeaderField("CommitDetails"); - expected = HoodiePrintHelper.print(header, new HashMap<>(), "", false, -1, false, rows); + expected = HoodiePrintHelper.print(header, new HashMap<>(), "", false, 0, false, rows); expected = removeNonWordAndStripSpace(expected); - got = removeNonWordAndStripSpace(cr.getResult().toString()); + got = removeNonWordAndStripSpace(cmdResult.toString()); assertEquals(expected, got); } } diff --git a/hudi-cli/src/test/java/org/apache/hudi/cli/commands/TestCleansCommand.java b/hudi-cli/src/test/java/org/apache/hudi/cli/commands/TestCleansCommand.java index cac4f1341b458..f0ed1787e21f8 100644 --- a/hudi-cli/src/test/java/org/apache/hudi/cli/commands/TestCleansCommand.java +++ b/hudi-cli/src/test/java/org/apache/hudi/cli/commands/TestCleansCommand.java @@ -25,6 +25,7 @@ import org.apache.hudi.cli.TableHeader; import org.apache.hudi.cli.functional.CLIFunctionalTestHarness; import org.apache.hudi.cli.testutils.HoodieTestCommitMetadataGenerator; +import org.apache.hudi.cli.testutils.ShellEvaluationResultUtil; import org.apache.hudi.common.fs.FSUtils; import org.apache.hudi.common.model.HoodieCleaningPolicy; import org.apache.hudi.common.model.HoodieCommitMetadata; @@ -44,7 +45,9 @@ import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Tag; import org.junit.jupiter.api.Test; -import org.springframework.shell.core.CommandResult; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.shell.Shell; import java.io.IOException; import java.net.URL; @@ -63,8 +66,12 @@ * Test Cases for {@link CleansCommand}. */ @Tag("functional") +@SpringBootTest(properties = {"spring.shell.interactive.enabled=false", "spring.shell.command.script.enabled=false"}) public class TestCleansCommand extends CLIFunctionalTestHarness { + @Autowired + private Shell shell; + private URL propsFilePath; private HoodieTableMetaClient metaClient; @@ -123,8 +130,8 @@ public void testShowCleans() throws Exception { assertEquals(1, metaClient.getActiveTimeline().reload().getCleanerTimeline().getInstants().count(), "Loaded 1 clean and the count should match"); - CommandResult cr = shell().executeCommand("cleans show"); - assertTrue(cr.isSuccess()); + Object result = shell.evaluate(() -> "cleans show"); + assertTrue(ShellEvaluationResultUtil.isSuccess(result)); HoodieInstant clean = metaClient.getActiveTimeline().reload().getCleanerTimeline().getInstants().findFirst().orElse(null); assertNotNull(clean); @@ -142,7 +149,7 @@ public void testShowCleans() throws Exception { String expected = HoodiePrintHelper.print(header, new HashMap<>(), "", false, -1, false, rows); expected = removeNonWordAndStripSpace(expected); - String got = removeNonWordAndStripSpace(cr.getResult().toString()); + String got = removeNonWordAndStripSpace(result.toString()); assertEquals(expected, got); } @@ -161,8 +168,8 @@ public void testShowCleanPartitions() { HoodieInstant clean = metaClient.getActiveTimeline().reload().getCleanerTimeline().getInstants().findFirst().get(); - CommandResult cr = shell().executeCommand("clean showpartitions --clean " + clean.getTimestamp()); - assertTrue(cr.isSuccess()); + Object result = shell.evaluate(() -> "clean showpartitions --clean " + clean.getTimestamp()); + assertTrue(ShellEvaluationResultUtil.isSuccess(result)); TableHeader header = new TableHeader().addTableHeaderField(HoodieTableHeaderFields.HEADER_PARTITION_PATH) .addTableHeaderField(HoodieTableHeaderFields.HEADER_CLEANING_POLICY) @@ -180,7 +187,7 @@ public void testShowCleanPartitions() { String expected = HoodiePrintHelper.print(header, new HashMap<>(), "", false, -1, false, rows); expected = removeNonWordAndStripSpace(expected); - String got = removeNonWordAndStripSpace(cr.getResult().toString()); + String got = removeNonWordAndStripSpace(result.toString()); assertEquals(expected, got); } diff --git a/hudi-cli/src/test/java/org/apache/hudi/cli/commands/TestCommitsCommand.java b/hudi-cli/src/test/java/org/apache/hudi/cli/commands/TestCommitsCommand.java index 0a06749523e8f..7e504488a2dfd 100644 --- a/hudi-cli/src/test/java/org/apache/hudi/cli/commands/TestCommitsCommand.java +++ b/hudi-cli/src/test/java/org/apache/hudi/cli/commands/TestCommitsCommand.java @@ -25,6 +25,7 @@ import org.apache.hudi.cli.functional.CLIFunctionalTestHarness; import org.apache.hudi.cli.testutils.HoodieTestCommitMetadataGenerator; import org.apache.hudi.cli.testutils.HoodieTestReplaceCommitMetadataGenerator; +import org.apache.hudi.cli.testutils.ShellEvaluationResultUtil; import org.apache.hudi.client.HoodieTimelineArchiver; import org.apache.hudi.common.config.HoodieMetadataConfig; import org.apache.hudi.common.fs.FSUtils; @@ -50,7 +51,9 @@ import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.EnumSource; import org.junit.jupiter.params.provider.ValueSource; -import org.springframework.shell.core.CommandResult; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.shell.Shell; import java.io.IOException; import java.util.ArrayList; @@ -70,8 +73,12 @@ * Test class for {@link org.apache.hudi.cli.commands.CommitsCommand}. */ @Tag("functional") +@SpringBootTest(properties = {"spring.shell.interactive.enabled=false", "spring.shell.command.script.enabled=false"}) public class TestCommitsCommand extends CLIFunctionalTestHarness { + @Autowired + private Shell shell; + private String tableName1; private String tableName2; private String tablePath1; @@ -183,12 +190,12 @@ private String generateExpectData(int records, Map data) thro public void testShowCommits() throws Exception { Map data = generateData(); - CommandResult cr = shell().executeCommand("commits show"); - assertTrue(cr.isSuccess()); + Object result = shell.evaluate(() -> "commits show"); + assertTrue(ShellEvaluationResultUtil.isSuccess(result)); String expected = generateExpectData(1, data); expected = removeNonWordAndStripSpace(expected); - String got = removeNonWordAndStripSpace(cr.getResult().toString()); + String got = removeNonWordAndStripSpace(result.toString()); assertEquals(expected, got); } @@ -198,12 +205,12 @@ public void testShowCommitsIncludingArchivedTimeline() throws Exception { data.remove("101"); data.remove("102"); - CommandResult cr = shell().executeCommand("commits show --includeExtraMetadata true --includeArchivedTimeline true --partition 2015/03/16"); - assertTrue(cr.isSuccess()); + Object result = shell.evaluate(() -> "commits show --includeExtraMetadata true --includeArchivedTimeline true --partition 2015/03/16"); + assertTrue(ShellEvaluationResultUtil.isSuccess(result)); String expected = generateExpectDataWithExtraMetadata(1, data); expected = removeNonWordAndStripSpace(expected); - String got = removeNonWordAndStripSpace(cr.getResult().toString()); + String got = removeNonWordAndStripSpace(result.toString()); assertEquals(expected, got); } @@ -236,8 +243,8 @@ private String generateExpectDataWithExtraMetadata(int records, Map data = generateDataAndArchive(enableMetadataTable); - CommandResult cr = shell().executeCommand(String.format("commits showarchived --startTs %s --endTs %s", "100", "104")); - assertTrue(cr.isSuccess()); + Object result = shell.evaluate(() -> String.format("commits showarchived --startTs %s --endTs %s", "100", "104")); + assertTrue(ShellEvaluationResultUtil.isSuccess(result)); // archived 101 and 102 instant, generate expect data assertEquals(2, metaClient.reloadActiveTimeline().getCommitsTimeline().countInstants(), @@ -248,7 +255,7 @@ public void testShowArchivedCommits(boolean enableMetadataTable) throws Exceptio data.remove("104"); String expected = generateExpectData(1, data); expected = removeNonWordAndStripSpace(expected); - String got = removeNonWordAndStripSpace(cr.getResult().toString()); + String got = removeNonWordAndStripSpace(result.toString()); assertEquals(expected, got); } @@ -331,8 +338,8 @@ public void testShowArchivedCommitsWithMultiCommitsFile(boolean enableMetadataTa archiver.archiveIfRequired(context()); } - CommandResult cr = shell().executeCommand(String.format("commits showarchived --startTs %s --endTs %s", "160", "174")); - assertTrue(cr.isSuccess()); + Object result = shell.evaluate(() -> String.format("commits showarchived --startTs %s --endTs %s", "160", "174")); + assertTrue(ShellEvaluationResultUtil.isSuccess(result)); assertEquals(3, metaClient.reloadActiveTimeline().getCommitsTimeline().countInstants(), "There should 3 instants not be archived!"); @@ -342,7 +349,7 @@ public void testShowArchivedCommitsWithMultiCommitsFile(boolean enableMetadataTa } String expected = generateExpectData(1, data2); expected = removeNonWordAndStripSpace(expected); - String got = removeNonWordAndStripSpace(cr.getResult().toString()); + String got = removeNonWordAndStripSpace(result.toString()); assertEquals(expected, got); } @@ -354,8 +361,8 @@ public void testShowCommitPartitions() throws Exception { Map data = generateData(); String commitInstant = "101"; - CommandResult cr = shell().executeCommand(String.format("commit showpartitions --commit %s", commitInstant)); - assertTrue(cr.isSuccess()); + Object result = shell.evaluate(() -> String.format("commit showpartitions --commit %s", commitInstant)); + assertTrue(ShellEvaluationResultUtil.isSuccess(result)); Integer[] value = data.get(commitInstant); List rows = new ArrayList<>(); @@ -380,7 +387,7 @@ public void testShowCommitPartitions() throws Exception { String expected = HoodiePrintHelper.print(header, fieldNameToConverterMap, "", false, -1, false, rows); expected = removeNonWordAndStripSpace(expected); - String got = removeNonWordAndStripSpace(cr.getResult().toString()); + String got = removeNonWordAndStripSpace(result.toString()); assertEquals(expected, got); } @@ -389,9 +396,10 @@ public void testShowCommitPartitionsWithReplaceCommits() throws Exception { Map data = generateMixedData(); for (HoodieInstant commitInstant : data.keySet()) { - CommandResult cr = shell().executeCommand(String.format("commit showpartitions --commit %s", commitInstant.getTimestamp())); + Object result = shell.evaluate(() -> + String.format("commit showpartitions --commit %s", commitInstant.getTimestamp())); - assertTrue(cr.isSuccess()); + assertTrue(ShellEvaluationResultUtil.isSuccess(result)); Integer[] value = data.get(commitInstant); List rows = new ArrayList<>(); @@ -416,7 +424,7 @@ public void testShowCommitPartitionsWithReplaceCommits() throws Exception { String expected = HoodiePrintHelper.print(header, fieldNameToConverterMap, "", false, -1, false, rows); expected = removeNonWordAndStripSpace(expected); - String got = removeNonWordAndStripSpace(cr.getResult().toString()); + String got = removeNonWordAndStripSpace(result.toString()); assertEquals(expected, got); } } @@ -429,8 +437,8 @@ public void testShowCommitFiles() throws Exception { Map data = generateData(); String commitInstant = "101"; - CommandResult cr = shell().executeCommand(String.format("commit showfiles --commit %s", commitInstant)); - assertTrue(cr.isSuccess()); + Object result = shell.evaluate(() -> String.format("commit showfiles --commit %s", commitInstant)); + assertTrue(ShellEvaluationResultUtil.isSuccess(result)); Integer[] value = data.get(commitInstant); List rows = new ArrayList<>(); @@ -453,7 +461,7 @@ public void testShowCommitFiles() throws Exception { String expected = HoodiePrintHelper.print(header, new HashMap<>(), "", false, -1, false, rows); expected = removeNonWordAndStripSpace(expected); - String got = removeNonWordAndStripSpace(cr.getResult().toString()); + String got = removeNonWordAndStripSpace(result.toString()); assertEquals(expected, got); } @@ -462,8 +470,8 @@ public void testShowCommitFilesWithReplaceCommits() throws Exception { Map data = generateMixedData(); for (HoodieInstant commitInstant : data.keySet()) { - CommandResult cr = shell().executeCommand(String.format("commit showfiles --commit %s", commitInstant.getTimestamp())); - assertTrue(cr.isSuccess()); + Object result = shell.evaluate(() -> String.format("commit showfiles --commit %s", commitInstant.getTimestamp())); + assertTrue(ShellEvaluationResultUtil.isSuccess(result)); Integer[] value = data.get(commitInstant); List rows = new ArrayList<>(); @@ -486,7 +494,7 @@ public void testShowCommitFilesWithReplaceCommits() throws Exception { String expected = HoodiePrintHelper.print(header, new HashMap<>(), "", false, -1, false, rows); expected = removeNonWordAndStripSpace(expected); - String got = removeNonWordAndStripSpace(cr.getResult().toString()); + String got = removeNonWordAndStripSpace(result.toString()); assertEquals(expected, got); } } @@ -508,15 +516,15 @@ public void testCompareCommits(HoodieTableType tableType) throws Exception { Option.of(value[0]), Option.of(value[1])); } - CommandResult cr = shell().executeCommand(String.format("commits compare --path %s", tablePath2)); - assertTrue(cr.isSuccess()); + Object result = shell.evaluate(() -> String.format("commits compare --path %s", tablePath2)); + assertTrue(ShellEvaluationResultUtil.isSuccess(result)); // the latest instant of test_table2 is 101 List commitsToCatchup = metaClient.getActiveTimeline().findInstantsAfter("101", Integer.MAX_VALUE) .getInstants().map(HoodieInstant::getTimestamp).collect(Collectors.toList()); String expected = String.format("Source %s is ahead by %d commits. Commits to catch up - %s", tableName1, commitsToCatchup.size(), commitsToCatchup); - assertEquals(expected, cr.getResult().toString()); + assertEquals(expected, result.toString()); } /** @@ -537,10 +545,10 @@ public void testSyncCommits(HoodieTableType tableType) throws Exception { Option.of(value[0]), Option.of(value[1])); } - CommandResult cr = shell().executeCommand(String.format("commits sync --path %s", tablePath2)); - assertTrue(cr.isSuccess()); + Object result = shell.evaluate(() -> String.format("commits sync --path %s", tablePath2)); + assertTrue(ShellEvaluationResultUtil.isSuccess(result)); String expected = String.format("Load sync state between %s and %s", tableName1, tableName2); - assertEquals(expected, cr.getResult().toString()); + assertEquals(expected, result.toString()); } } diff --git a/hudi-cli/src/test/java/org/apache/hudi/cli/commands/TestCompactionCommand.java b/hudi-cli/src/test/java/org/apache/hudi/cli/commands/TestCompactionCommand.java index e909e5c9ea28b..f1ea09470d35c 100644 --- a/hudi-cli/src/test/java/org/apache/hudi/cli/commands/TestCompactionCommand.java +++ b/hudi-cli/src/test/java/org/apache/hudi/cli/commands/TestCompactionCommand.java @@ -49,7 +49,9 @@ import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Tag; import org.junit.jupiter.api.Test; -import org.springframework.shell.core.CommandResult; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.shell.Shell; import java.io.IOException; import java.util.ArrayList; @@ -69,8 +71,12 @@ * Test Cases for {@link CompactionCommand}. */ @Tag("functional") +@SpringBootTest(properties = {"spring.shell.interactive.enabled=false", "spring.shell.command.script.enabled=false"}) public class TestCompactionCommand extends CLIFunctionalTestHarness { + @Autowired + private Shell shell; + private String tableName; private String tablePath; @@ -106,8 +112,8 @@ public void testCompactionsAll() throws IOException { HoodieCLI.getTableMetaClient().reloadActiveTimeline(); - CommandResult cr = shell().executeCommand("compactions show all"); - System.out.println(cr.getResult().toString()); + Object result = shell.evaluate(() -> "compactions show all"); + System.out.println(result.toString()); TableHeader header = new TableHeader().addTableHeaderField("Compaction Instant Time").addTableHeaderField("State") .addTableHeaderField("Total FileIds to be Compacted"); @@ -121,7 +127,7 @@ public void testCompactionsAll() throws IOException { rows.add(new Comparable[] {instant, "REQUESTED", fileIds.get(instant)}); }); String expected = HoodiePrintHelper.print(header, new HashMap<>(), "", false, -1, false, rows); - assertEquals(expected, cr.getResult().toString()); + assertEquals(expected, result.toString()); } /** @@ -138,8 +144,8 @@ public void testCompactionShow() throws IOException { HoodieCLI.getTableMetaClient().reloadActiveTimeline(); - CommandResult cr = shell().executeCommand("compaction show --instant 001"); - System.out.println(cr.getResult().toString()); + Object result = shell.evaluate(() -> "compaction show --instant 001"); + System.out.println(result.toString()); } private void generateCompactionInstances() throws IOException { @@ -188,7 +194,7 @@ public void testCompactionsShowArchived() throws IOException { generateArchive(); - CommandResult cr = shell().executeCommand("compactions showarchived --startTs 001 --endTs 005"); + Object result = shell.evaluate(() -> "compactions showarchived --startTs 001 --endTs 005"); // generate result Map fileMap = new HashMap<>(); @@ -203,7 +209,7 @@ public void testCompactionsShowArchived() throws IOException { String expected = HoodiePrintHelper.print(header, fieldNameToConverterMap, "", false, -1, false, rows); expected = removeNonWordAndStripSpace(expected); - String got = removeNonWordAndStripSpace(cr.getResult().toString()); + String got = removeNonWordAndStripSpace(result.toString()); assertEquals(expected, got); } @@ -222,13 +228,13 @@ public void testCompactionShowArchived() throws IOException { generateArchive(); - CommandResult cr = shell().executeCommand("compaction showarchived --instant " + instance); + Object result = shell.evaluate(() -> "compaction showarchived --instant " + instance); // generate expected String expected = CompactionCommand.printCompaction(plan, "", false, -1, false, null); expected = removeNonWordAndStripSpace(expected); - String got = removeNonWordAndStripSpace(cr.getResult().toString()); + String got = removeNonWordAndStripSpace(result.toString()); assertEquals(expected, got); } } diff --git a/hudi-cli/src/test/java/org/apache/hudi/cli/commands/TestDiffCommand.java b/hudi-cli/src/test/java/org/apache/hudi/cli/commands/TestDiffCommand.java index ed5e873bc0c98..c12ad676d41c7 100644 --- a/hudi-cli/src/test/java/org/apache/hudi/cli/commands/TestDiffCommand.java +++ b/hudi-cli/src/test/java/org/apache/hudi/cli/commands/TestDiffCommand.java @@ -25,6 +25,7 @@ import org.apache.hudi.cli.TableHeader; import org.apache.hudi.cli.functional.CLIFunctionalTestHarness; import org.apache.hudi.cli.testutils.HoodieTestCommitMetadataGenerator; +import org.apache.hudi.cli.testutils.ShellEvaluationResultUtil; import org.apache.hudi.common.fs.FSUtils; import org.apache.hudi.common.model.HoodieAvroPayload; import org.apache.hudi.common.model.HoodieCommitMetadata; @@ -43,7 +44,9 @@ import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Tag; import org.junit.jupiter.api.Test; -import org.springframework.shell.core.CommandResult; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.shell.Shell; import java.util.ArrayList; import java.util.Collections; @@ -63,8 +66,11 @@ * Test Cases for {@link DiffCommand}. */ @Tag("functional") +@SpringBootTest(properties = {"spring.shell.interactive.enabled=false", "spring.shell.command.script.enabled=false"}) public class TestDiffCommand extends CLIFunctionalTestHarness { + @Autowired + private Shell shell; private String tableName; private String tablePath; @@ -109,11 +115,11 @@ public void testDiffFile() throws Exception { HoodieTableMetaClient.reload(metaClient); - CommandResult cr = shell().executeCommand(String.format("diff file --fileId %s", fileId1)); - assertTrue(cr.isSuccess()); + Object result = shell.evaluate(() -> String.format("diff file --fileId %s", fileId1)); + assertTrue(ShellEvaluationResultUtil.isSuccess(result)); String expected = generateExpectDataWithExtraMetadata(commits, fileId1, HoodieTestDataGenerator.DEFAULT_FIRST_PARTITION_PATH); expected = removeNonWordAndStripSpace(expected); - String got = removeNonWordAndStripSpace(cr.getResult().toString()); + String got = removeNonWordAndStripSpace(result.toString()); assertEquals(expected, got); } diff --git a/hudi-cli/src/test/java/org/apache/hudi/cli/commands/TestFileSystemViewCommand.java b/hudi-cli/src/test/java/org/apache/hudi/cli/commands/TestFileSystemViewCommand.java index b39ac278f05f6..ddc420a087633 100644 --- a/hudi-cli/src/test/java/org/apache/hudi/cli/commands/TestFileSystemViewCommand.java +++ b/hudi-cli/src/test/java/org/apache/hudi/cli/commands/TestFileSystemViewCommand.java @@ -24,6 +24,7 @@ import org.apache.hudi.cli.TableHeader; import org.apache.hudi.cli.functional.CLIFunctionalTestHarness; import org.apache.hudi.cli.testutils.HoodieTestCommitMetadataGenerator; +import org.apache.hudi.cli.testutils.ShellEvaluationResultUtil; import org.apache.hudi.common.fs.FSUtils; import org.apache.hudi.common.model.FileSlice; import org.apache.hudi.common.model.HoodieFileGroup; @@ -36,7 +37,9 @@ import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Tag; import org.junit.jupiter.api.Test; -import org.springframework.shell.core.CommandResult; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.shell.Shell; import java.io.IOException; import java.nio.file.Files; @@ -57,8 +60,12 @@ * Test class for {@link FileSystemViewCommand}. */ @Tag("functional") +@SpringBootTest(properties = {"spring.shell.interactive.enabled=false", "spring.shell.command.script.enabled=false"}) public class TestFileSystemViewCommand extends CLIFunctionalTestHarness { + @Autowired + private Shell shell; + private String nonpartitionedTablePath; private String partitionedTablePath; private String partitionPath; @@ -161,8 +168,8 @@ private void createPartitionedTable() throws IOException { @Test public void testShowCommits() { // Test default show fsview all - CommandResult cr = shell().executeCommand("show fsview all --pathRegex */*/*"); - assertTrue(cr.isSuccess()); + Object result = shell.evaluate(() -> "show fsview all --pathRegex */*/*"); + assertTrue(ShellEvaluationResultUtil.isSuccess(result)); // Get all file groups Stream fileGroups = partitionedFsView.getAllFileGroups(partitionPath); @@ -199,7 +206,7 @@ public void testShowCommits() { .addTableHeaderField(HoodieTableHeaderFields.HEADER_DELTA_FILES); String expected = HoodiePrintHelper.print(header, fieldNameToConverterMap, "", false, -1, false, rows); expected = removeNonWordAndStripSpace(expected); - String got = removeNonWordAndStripSpace(cr.getResult().toString()); + String got = removeNonWordAndStripSpace(result.toString()); assertEquals(expected, got); } @@ -209,8 +216,8 @@ public void testShowCommits() { @Test public void testShowCommitsWithSpecifiedValues() { // Test command with options, baseFileOnly and maxInstant is 2 - CommandResult cr = shell().executeCommand("show fsview all --pathRegex */*/* --baseFileOnly true --maxInstant 2"); - assertTrue(cr.isSuccess()); + Object result = shell.evaluate(() -> "show fsview all --pathRegex */*/* --baseFileOnly true --maxInstant 2"); + assertTrue(ShellEvaluationResultUtil.isSuccess(result)); List rows = new ArrayList<>(); Stream fileGroups = partitionedFsView.getAllFileGroups(partitionPath); @@ -242,7 +249,7 @@ public void testShowCommitsWithSpecifiedValues() { String expected = HoodiePrintHelper.print(header, fieldNameToConverterMap, "", false, -1, false, rows); expected = removeNonWordAndStripSpace(expected); - String got = removeNonWordAndStripSpace(cr.getResult().toString()); + String got = removeNonWordAndStripSpace(result.toString()); assertEquals(expected, got); } @@ -317,21 +324,21 @@ public void testShowLatestFileSlices() throws IOException { // Test show with partition path '2016/03/15' new TableCommand().connect(partitionedTablePath, null, false, 0, 0, 0); - CommandResult partitionedTableCR = shell().executeCommand("show fsview latest --partitionPath " + partitionPath); - assertTrue(partitionedTableCR.isSuccess()); + Object partitionedTable = shell.evaluate(() -> "show fsview latest --partitionPath " + partitionPath); + assertTrue(ShellEvaluationResultUtil.isSuccess(partitionedTable)); Stream partitionedFileSlice = partitionedFsView.getLatestFileSlices(partitionPath); List partitionedRows = fileSlicesToCRList(partitionedFileSlice, partitionPath); String partitionedExpected = HoodiePrintHelper.print(header, fieldNameToConverterMap, "", false, -1, false, partitionedRows); partitionedExpected = removeNonWordAndStripSpace(partitionedExpected); - String partitionedResults = removeNonWordAndStripSpace(partitionedTableCR.getResult().toString()); + String partitionedResults = removeNonWordAndStripSpace(partitionedTable.toString()); assertEquals(partitionedExpected, partitionedResults); // Test show for non-partitioned table new TableCommand().connect(nonpartitionedTablePath, null, false, 0, 0, 0); - CommandResult nonpartitionedTableCR = shell().executeCommand("show fsview latest"); - assertTrue(nonpartitionedTableCR.isSuccess()); + Object nonpartitionedTable = shell.evaluate(() -> "show fsview latest"); + assertTrue(ShellEvaluationResultUtil.isSuccess(nonpartitionedTable)); Stream nonpartitionedFileSlice = nonpartitionedFsView.getLatestFileSlices(""); @@ -339,7 +346,7 @@ public void testShowLatestFileSlices() throws IOException { String nonpartitionedExpected = HoodiePrintHelper.print(header, fieldNameToConverterMap, "", false, -1, false, nonpartitionedRows); nonpartitionedExpected = removeNonWordAndStripSpace(nonpartitionedExpected); - String nonpartitionedResults = removeNonWordAndStripSpace(nonpartitionedTableCR.getResult().toString()); + String nonpartitionedResults = removeNonWordAndStripSpace(nonpartitionedTable.toString()); assertEquals(nonpartitionedExpected, nonpartitionedResults); } } diff --git a/hudi-cli/src/test/java/org/apache/hudi/cli/commands/TestHoodieLogFileCommand.java b/hudi-cli/src/test/java/org/apache/hudi/cli/commands/TestHoodieLogFileCommand.java index f92d5fc57915b..e93ad0c8cad4e 100644 --- a/hudi-cli/src/test/java/org/apache/hudi/cli/commands/TestHoodieLogFileCommand.java +++ b/hudi-cli/src/test/java/org/apache/hudi/cli/commands/TestHoodieLogFileCommand.java @@ -25,6 +25,7 @@ import org.apache.hudi.cli.TableHeader; import org.apache.hudi.cli.functional.CLIFunctionalTestHarness; import org.apache.hudi.cli.testutils.HoodieTestCommitMetadataGenerator; +import org.apache.hudi.cli.testutils.ShellEvaluationResultUtil; import org.apache.hudi.common.config.HoodieCommonConfig; import org.apache.hudi.common.fs.FSUtils; import org.apache.hudi.common.model.HoodieLogFile; @@ -51,7 +52,9 @@ import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Tag; import org.junit.jupiter.api.Test; -import org.springframework.shell.core.CommandResult; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.shell.Shell; import java.io.IOException; import java.net.URISyntaxException; @@ -74,8 +77,12 @@ * Test Cases for {@link HoodieLogFileCommand}. */ @Tag("functional") +@SpringBootTest(properties = {"spring.shell.interactive.enabled=false", "spring.shell.command.script.enabled=false"}) public class TestHoodieLogFileCommand extends CLIFunctionalTestHarness { + @Autowired + private Shell shell; + private String partitionPath; private HoodieAvroDataBlock dataBlock; private String tablePath; @@ -123,8 +130,8 @@ public void cleanUp() throws IOException { */ @Test public void testShowLogFileCommits() throws JsonProcessingException { - CommandResult cr = shell().executeCommand("show logfile metadata --logFilePathPattern " + partitionPath + "/*"); - assertTrue(cr.isSuccess()); + Object result = shell.evaluate(() -> "show logfile metadata --logFilePathPattern " + partitionPath + "/*"); + assertTrue(ShellEvaluationResultUtil.isSuccess(result)); TableHeader header = new TableHeader().addTableHeaderField(HoodieTableHeaderFields.HEADER_INSTANT_TIME) .addTableHeaderField(HoodieTableHeaderFields.HEADER_RECORD_COUNT) @@ -142,7 +149,7 @@ public void testShowLogFileCommits() throws JsonProcessingException { String expected = HoodiePrintHelper.print(header, new HashMap<>(), "", false, -1, false, rows); expected = removeNonWordAndStripSpace(expected); - String got = removeNonWordAndStripSpace(cr.getResult().toString()); + String got = removeNonWordAndStripSpace(result.toString()); assertEquals(expected, got); } @@ -151,15 +158,15 @@ public void testShowLogFileCommits() throws JsonProcessingException { */ @Test public void testShowLogFileRecords() throws IOException, URISyntaxException { - CommandResult cr = shell().executeCommand("show logfile records --logFilePathPattern " + partitionPath + "/*"); - assertTrue(cr.isSuccess()); + Object result = shell.evaluate(() -> "show logfile records --logFilePathPattern " + partitionPath + "/*"); + assertTrue(ShellEvaluationResultUtil.isSuccess(result)); // construct expect result, get 10 records. List records = SchemaTestUtil.generateTestRecords(0, 10); String[][] rows = records.stream().map(r -> new String[] {r.toString()}).toArray(String[][]::new); String expected = HoodiePrintHelper.print(new String[] {HoodieTableHeaderFields.HEADER_RECORDS}, rows); expected = removeNonWordAndStripSpace(expected); - String got = removeNonWordAndStripSpace(cr.getResult().toString()); + String got = removeNonWordAndStripSpace(result.toString()); assertEquals(expected, got); } @@ -196,9 +203,9 @@ public void testShowLogFileRecordsWithMerge() throws IOException, InterruptedExc } } - CommandResult cr = shell().executeCommand("show logfile records --logFilePathPattern " - + partitionPath + "/* --mergeRecords true"); - assertTrue(cr.isSuccess()); + Object result = shell.evaluate(() -> "show logfile records --logFilePathPattern " + + partitionPath + "/* --mergeRecords true"); + assertTrue(ShellEvaluationResultUtil.isSuccess(result)); // get expected result of 10 records. List logFilePaths = Arrays.stream(fs.globStatus(new Path(partitionPath + "/*"))) @@ -237,7 +244,7 @@ public void testShowLogFileRecordsWithMerge() throws IOException, InterruptedExc String expected = HoodiePrintHelper.print(new String[] {HoodieTableHeaderFields.HEADER_RECORDS}, rows); expected = removeNonWordAndStripSpace(expected); - String got = removeNonWordAndStripSpace(cr.getResult().toString()); + String got = removeNonWordAndStripSpace(result.toString()); assertEquals(expected, got); } } diff --git a/hudi-cli/src/test/java/org/apache/hudi/cli/commands/TestRepairsCommand.java b/hudi-cli/src/test/java/org/apache/hudi/cli/commands/TestRepairsCommand.java index 92d3fc52964b6..29377c21ea880 100644 --- a/hudi-cli/src/test/java/org/apache/hudi/cli/commands/TestRepairsCommand.java +++ b/hudi-cli/src/test/java/org/apache/hudi/cli/commands/TestRepairsCommand.java @@ -23,6 +23,7 @@ import org.apache.hudi.cli.HoodieTableHeaderFields; import org.apache.hudi.cli.functional.CLIFunctionalTestHarness; import org.apache.hudi.cli.testutils.HoodieTestCommitMetadataGenerator; +import org.apache.hudi.cli.testutils.ShellEvaluationResultUtil; import org.apache.hudi.client.SparkRDDWriteClient; import org.apache.hudi.client.WriteStatus; import org.apache.hudi.common.fs.FSUtils; @@ -50,7 +51,9 @@ import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Tag; import org.junit.jupiter.api.Test; -import org.springframework.shell.core.CommandResult; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.shell.Shell; import java.io.FileInputStream; import java.io.IOException; @@ -83,8 +86,12 @@ * Test class for {@link RepairsCommand}. */ @Tag("functional") +@SpringBootTest(properties = {"spring.shell.interactive.enabled=false", "spring.shell.command.script.enabled=false"}) public class TestRepairsCommand extends CLIFunctionalTestHarness { + @Autowired + private Shell shell; + private String tablePath; private FileSystem fs; @@ -122,8 +129,8 @@ public void testAddPartitionMetaWithDryRun() throws IOException { assertTrue(fs.mkdirs(new Path(partition3))); // default is dry run. - CommandResult cr = shell().executeCommand("repair addpartitionmeta"); - assertTrue(cr.isSuccess()); + Object result = shell.evaluate(() -> "repair addpartitionmeta"); + assertTrue(ShellEvaluationResultUtil.isSuccess(result)); // expected all 'No'. String[][] rows = FSUtils.getAllPartitionFoldersThreeLevelsDown(fs, tablePath) @@ -133,7 +140,7 @@ public void testAddPartitionMetaWithDryRun() throws IOException { String expected = HoodiePrintHelper.print(new String[] {HoodieTableHeaderFields.HEADER_PARTITION_PATH, HoodieTableHeaderFields.HEADER_METADATA_PRESENT, HoodieTableHeaderFields.HEADER_ACTION}, rows); expected = removeNonWordAndStripSpace(expected); - String got = removeNonWordAndStripSpace(cr.getResult().toString()); + String got = removeNonWordAndStripSpace(result.toString()); assertEquals(expected, got); } @@ -153,8 +160,8 @@ public void testAddPartitionMetaWithRealRun() throws IOException { assertTrue(fs.mkdirs(new Path(partition2))); assertTrue(fs.mkdirs(new Path(partition3))); - CommandResult cr = shell().executeCommand("repair addpartitionmeta --dryrun false"); - assertTrue(cr.isSuccess()); + Object result = shell.evaluate(() -> "repair addpartitionmeta --dryrun false"); + assertTrue(ShellEvaluationResultUtil.isSuccess(result)); List paths = FSUtils.getAllPartitionFoldersThreeLevelsDown(fs, tablePath); // after dry run, the action will be 'Repaired' @@ -164,10 +171,10 @@ public void testAddPartitionMetaWithRealRun() throws IOException { String expected = HoodiePrintHelper.print(new String[] {HoodieTableHeaderFields.HEADER_PARTITION_PATH, HoodieTableHeaderFields.HEADER_METADATA_PRESENT, HoodieTableHeaderFields.HEADER_ACTION}, rows); expected = removeNonWordAndStripSpace(expected); - String got = removeNonWordAndStripSpace(cr.getResult().toString()); + String got = removeNonWordAndStripSpace(result.toString()); assertEquals(expected, got); - cr = shell().executeCommand("repair addpartitionmeta"); + result = shell.evaluate(() -> "repair addpartitionmeta"); // after real run, Metadata is present now. rows = paths.stream() @@ -176,7 +183,7 @@ public void testAddPartitionMetaWithRealRun() throws IOException { expected = HoodiePrintHelper.print(new String[] {HoodieTableHeaderFields.HEADER_PARTITION_PATH, HoodieTableHeaderFields.HEADER_METADATA_PRESENT, HoodieTableHeaderFields.HEADER_ACTION}, rows); expected = removeNonWordAndStripSpace(expected); - got = removeNonWordAndStripSpace(cr.getResult().toString()); + got = removeNonWordAndStripSpace(result.toString()); assertEquals(expected, got); } @@ -188,8 +195,8 @@ public void testOverwriteHoodieProperties() throws IOException { URL newProps = this.getClass().getClassLoader().getResource("table-config.properties"); assertNotNull(newProps, "New property file must exist"); - CommandResult cr = shell().executeCommand("repair overwrite-hoodie-props --new-props-file " + newProps.getPath()); - assertTrue(cr.isSuccess()); + Object cmdResult = shell.evaluate(() -> "repair overwrite-hoodie-props --new-props-file " + newProps.getPath()); + assertTrue(ShellEvaluationResultUtil.isSuccess(cmdResult)); Map oldProps = HoodieCLI.getTableMetaClient().getTableConfig().propsMap(); @@ -217,7 +224,7 @@ public void testOverwriteHoodieProperties() throws IOException { String expect = HoodiePrintHelper.print(new String[] {HoodieTableHeaderFields.HEADER_HOODIE_PROPERTY, HoodieTableHeaderFields.HEADER_OLD_VALUE, HoodieTableHeaderFields.HEADER_NEW_VALUE}, rows); expect = removeNonWordAndStripSpace(expect); - String got = removeNonWordAndStripSpace(cr.getResult().toString()); + String got = removeNonWordAndStripSpace(cmdResult.toString()); assertEquals(expect, got); } @@ -244,8 +251,8 @@ public void testRemoveCorruptedPendingCleanAction() throws IOException { // first, there are four instants assertEquals(4, metaClient.getActiveTimeline().filterInflightsAndRequested().getInstants().count()); - CommandResult cr = shell().executeCommand("repair corrupted clean files"); - assertTrue(cr.isSuccess()); + Object result = shell.evaluate(() -> "repair corrupted clean files"); + assertTrue(ShellEvaluationResultUtil.isSuccess(result)); // reload meta client metaClient = HoodieTableMetaClient.reload(metaClient); diff --git a/hudi-cli/src/test/java/org/apache/hudi/cli/commands/TestRollbacksCommand.java b/hudi-cli/src/test/java/org/apache/hudi/cli/commands/TestRollbacksCommand.java index cf4faf2e16488..a4144937621a6 100644 --- a/hudi-cli/src/test/java/org/apache/hudi/cli/commands/TestRollbacksCommand.java +++ b/hudi-cli/src/test/java/org/apache/hudi/cli/commands/TestRollbacksCommand.java @@ -24,6 +24,7 @@ import org.apache.hudi.cli.HoodieTableHeaderFields; import org.apache.hudi.cli.TableHeader; import org.apache.hudi.cli.functional.CLIFunctionalTestHarness; +import org.apache.hudi.cli.testutils.ShellEvaluationResultUtil; import org.apache.hudi.client.BaseHoodieWriteClient; import org.apache.hudi.client.SparkRDDWriteClient; import org.apache.hudi.common.config.HoodieMetadataConfig; @@ -43,7 +44,9 @@ import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Tag; import org.junit.jupiter.api.Test; -import org.springframework.shell.core.CommandResult; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.shell.Shell; import java.io.IOException; import java.util.ArrayList; @@ -64,8 +67,12 @@ * Test class for {@link org.apache.hudi.cli.commands.RollbacksCommand}. */ @Tag("functional") +@SpringBootTest(properties = {"spring.shell.interactive.enabled=false", "spring.shell.command.script.enabled=false"}) public class TestRollbacksCommand extends CLIFunctionalTestHarness { + @Autowired + private Shell shell; + @BeforeEach public void init() throws Exception { String tableName = tableName(); @@ -116,8 +123,8 @@ public void init() throws Exception { */ @Test public void testShowRollbacks() { - CommandResult cr = shell().executeCommand("show rollbacks"); - assertTrue(cr.isSuccess()); + Object result = shell.evaluate(() -> "show rollbacks"); + assertTrue(ShellEvaluationResultUtil.isSuccess(result)); // get rollback instants HoodieActiveTimeline activeTimeline = new RollbacksCommand.RollbackTimeline(HoodieCLI.getTableMetaClient()); @@ -151,7 +158,7 @@ public void testShowRollbacks() { .addTableHeaderField(HoodieTableHeaderFields.HEADER_TOTAL_PARTITIONS); String expected = HoodiePrintHelper.print(header, new HashMap<>(), "", false, -1, false, rows); expected = removeNonWordAndStripSpace(expected); - String got = removeNonWordAndStripSpace(cr.getResult().toString()); + String got = removeNonWordAndStripSpace(result.toString()); assertEquals(expected, got); } @@ -166,8 +173,8 @@ public void testShowRollback() throws IOException { HoodieInstant instant = rollback.findFirst().orElse(null); assertNotNull(instant, "The instant can not be null."); - CommandResult cr = shell().executeCommand("show rollback --instant " + instant.getTimestamp()); - assertTrue(cr.isSuccess()); + Object result = shell.evaluate(() -> "show rollback --instant " + instant.getTimestamp()); + assertTrue(ShellEvaluationResultUtil.isSuccess(result)); List rows = new ArrayList<>(); // get metadata of instant @@ -194,7 +201,7 @@ public void testShowRollback() throws IOException { .addTableHeaderField(HoodieTableHeaderFields.HEADER_SUCCEEDED); String expected = HoodiePrintHelper.print(header, new HashMap<>(), "", false, -1, false, rows); expected = removeNonWordAndStripSpace(expected); - String got = removeNonWordAndStripSpace(cr.getResult().toString()); + String got = removeNonWordAndStripSpace(result.toString()); assertEquals(expected, got); } } diff --git a/hudi-cli/src/test/java/org/apache/hudi/cli/commands/TestSavepointsCommand.java b/hudi-cli/src/test/java/org/apache/hudi/cli/commands/TestSavepointsCommand.java index 436af1d976f5e..e4c8a4b1a41a4 100644 --- a/hudi-cli/src/test/java/org/apache/hudi/cli/commands/TestSavepointsCommand.java +++ b/hudi-cli/src/test/java/org/apache/hudi/cli/commands/TestSavepointsCommand.java @@ -22,6 +22,7 @@ import org.apache.hudi.cli.HoodiePrintHelper; import org.apache.hudi.cli.HoodieTableHeaderFields; import org.apache.hudi.cli.functional.CLIFunctionalTestHarness; +import org.apache.hudi.cli.testutils.ShellEvaluationResultUtil; import org.apache.hudi.common.model.HoodieTableType; import org.apache.hudi.common.table.timeline.HoodieTimeline; import org.apache.hudi.common.table.timeline.versioning.TimelineLayoutVersion; @@ -30,7 +31,9 @@ import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Tag; import org.junit.jupiter.api.Test; -import org.springframework.shell.core.CommandResult; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.shell.Shell; import java.io.IOException; import java.util.Comparator; @@ -43,8 +46,12 @@ * Test class for {@link org.apache.hudi.cli.commands.SavepointsCommand}. */ @Tag("functional") +@SpringBootTest(properties = {"spring.shell.interactive.enabled=false", "spring.shell.command.script.enabled=false"}) public class TestSavepointsCommand extends CLIFunctionalTestHarness { + @Autowired + private Shell shell; + private String tablePath; @BeforeEach @@ -69,15 +76,15 @@ public void testShowSavepoints() throws IOException { HoodieTestDataGenerator.createSavepointFile(tablePath, instantTime, hadoopConf()); } - CommandResult cr = shell().executeCommand("savepoints show"); - assertTrue(cr.isSuccess()); + Object result = shell.evaluate(() -> "savepoints show"); + assertTrue(ShellEvaluationResultUtil.isSuccess(result)); // generate expect result String[][] rows = Stream.of("100", "101", "102", "103").sorted(Comparator.reverseOrder()) .map(instant -> new String[] {instant}).toArray(String[][]::new); String expected = HoodiePrintHelper.print(new String[] {HoodieTableHeaderFields.HEADER_SAVEPOINT_TIME}, rows); expected = removeNonWordAndStripSpace(expected); - String got = removeNonWordAndStripSpace(cr.getResult().toString()); + String got = removeNonWordAndStripSpace(result.toString()); assertEquals(expected, got); } @@ -101,8 +108,8 @@ public void testRefreshMetaClient() throws IOException { HoodieCLI.getTableMetaClient().getActiveTimeline().getSavePointTimeline().filterCompletedInstants(); assertEquals(0, timeline.countInstants(), "there should have no instant"); - CommandResult cr = shell().executeCommand("savepoints refresh"); - assertTrue(cr.isSuccess()); + Object result = shell.evaluate(() -> "savepoints refresh"); + assertTrue(ShellEvaluationResultUtil.isSuccess(result)); timeline = HoodieCLI.getTableMetaClient().getActiveTimeline().getSavePointTimeline().filterCompletedInstants(); diff --git a/hudi-cli/src/test/java/org/apache/hudi/cli/commands/TestSparkEnvCommand.java b/hudi-cli/src/test/java/org/apache/hudi/cli/commands/TestSparkEnvCommand.java index f0a8c1e6efc1d..09f5bd0576a68 100644 --- a/hudi-cli/src/test/java/org/apache/hudi/cli/commands/TestSparkEnvCommand.java +++ b/hudi-cli/src/test/java/org/apache/hudi/cli/commands/TestSparkEnvCommand.java @@ -21,9 +21,12 @@ import org.apache.hudi.cli.HoodiePrintHelper; import org.apache.hudi.cli.functional.CLIFunctionalTestHarness; +import org.apache.hudi.cli.testutils.ShellEvaluationResultUtil; import org.junit.jupiter.api.Tag; import org.junit.jupiter.api.Test; -import org.springframework.shell.core.CommandResult; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.shell.Shell; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertTrue; @@ -32,29 +35,33 @@ * Test Cases for {@link SparkEnvCommand}. */ @Tag("functional") +@SpringBootTest(properties = {"spring.shell.interactive.enabled=false", "spring.shell.command.script.enabled=false"}) public class TestSparkEnvCommand extends CLIFunctionalTestHarness { + @Autowired + private Shell shell; + /** * Test Cases for set and get spark env. */ @Test public void testSetAndGetSparkEnv() { // First, be empty - CommandResult cr = shell().executeCommand("show envs all"); + Object cmdResult = shell.evaluate(() -> "show envs all"); String nullResult = HoodiePrintHelper.print(new String[] {"key", "value"}, new String[0][2]); nullResult = removeNonWordAndStripSpace(nullResult); - String got = removeNonWordAndStripSpace(cr.getResult().toString()); + String got = removeNonWordAndStripSpace(cmdResult.toString()); assertEquals(nullResult, got); // Set SPARK_HOME - cr = shell().executeCommand("set --conf SPARK_HOME=/usr/etc/spark"); - assertTrue(cr.isSuccess()); + cmdResult = shell.evaluate(() -> "set --conf SPARK_HOME=/usr/etc/spark"); + assertTrue(ShellEvaluationResultUtil.isSuccess(cmdResult)); //Get - cr = shell().executeCommand("show env --key SPARK_HOME"); + cmdResult = shell.evaluate(() -> "show env --key SPARK_HOME"); String result = HoodiePrintHelper.print(new String[] {"key", "value"}, new String[][] {new String[] {"SPARK_HOME", "/usr/etc/spark"}}); result = removeNonWordAndStripSpace(result); - got = removeNonWordAndStripSpace(cr.getResult().toString()); + got = removeNonWordAndStripSpace(cmdResult.toString()); assertEquals(result, got); } } diff --git a/hudi-cli/src/test/java/org/apache/hudi/cli/commands/TestStatsCommand.java b/hudi-cli/src/test/java/org/apache/hudi/cli/commands/TestStatsCommand.java index 3fa2d19cc3947..dfdb37b3bb00a 100644 --- a/hudi-cli/src/test/java/org/apache/hudi/cli/commands/TestStatsCommand.java +++ b/hudi-cli/src/test/java/org/apache/hudi/cli/commands/TestStatsCommand.java @@ -24,6 +24,7 @@ import org.apache.hudi.cli.TableHeader; import org.apache.hudi.cli.functional.CLIFunctionalTestHarness; import org.apache.hudi.cli.testutils.HoodieTestCommitMetadataGenerator; +import org.apache.hudi.cli.testutils.ShellEvaluationResultUtil; import org.apache.hudi.common.model.HoodieTableType; import org.apache.hudi.common.table.timeline.versioning.TimelineLayoutVersion; import org.apache.hudi.common.testutils.HoodieTestDataGenerator; @@ -36,7 +37,9 @@ import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Tag; import org.junit.jupiter.api.Test; -import org.springframework.shell.core.CommandResult; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.shell.Shell; import java.io.IOException; import java.text.DecimalFormat; @@ -53,8 +56,12 @@ * Test class of {@link org.apache.hudi.cli.commands.StatsCommand}. */ @Tag("functional") +@SpringBootTest(properties = {"spring.shell.interactive.enabled=false", "spring.shell.command.script.enabled=false"}) public class TestStatsCommand extends CLIFunctionalTestHarness { + @Autowired + private Shell shell; + private String tablePath; @BeforeEach @@ -87,8 +94,8 @@ public void testWriteAmplificationStats() throws Exception { Option.of(v[0]), Option.of(v[1])); } - CommandResult cr = shell().executeCommand("stats wa"); - assertTrue(cr.isSuccess()); + Object result = shell.evaluate(() -> "stats wa"); + assertTrue(ShellEvaluationResultUtil.isSuccess(result)); // generate expect List rows = new ArrayList<>(); @@ -107,7 +114,7 @@ public void testWriteAmplificationStats() throws Exception { .addTableHeaderField(HoodieTableHeaderFields.HEADER_WRITE_AMPLIFICATION_FACTOR); String expected = HoodiePrintHelper.print(header, new HashMap<>(), "", false, -1, false, rows); expected = removeNonWordAndStripSpace(expected); - String got = removeNonWordAndStripSpace(cr.getResult().toString()); + String got = removeNonWordAndStripSpace(result.toString()); assertEquals(expected, got); } @@ -142,8 +149,8 @@ public void testFileSizeStats() throws Exception { .withBaseFilesInPartition(partition2, data2[1], data2[2]) .withBaseFilesInPartition(partition3, data2[3]); - CommandResult cr = shell().executeCommand("stats filesizes"); - assertTrue(cr.isSuccess()); + Object result = shell.evaluate(() -> "stats filesizes"); + assertTrue(ShellEvaluationResultUtil.isSuccess(result)); Histogram globalHistogram = new Histogram(new UniformReservoir(StatsCommand.MAX_FILES)); HashMap commitHistoMap = new HashMap<>(); @@ -177,7 +184,7 @@ public void testFileSizeStats() throws Exception { String expect = HoodiePrintHelper.print(header, new StatsCommand().getFieldNameToConverterMap(), "", false, -1, false, rows); expect = removeNonWordAndStripSpace(expect); - String got = removeNonWordAndStripSpace(cr.getResult().toString()); + String got = removeNonWordAndStripSpace(result.toString()); assertEquals(expect, got); } } diff --git a/hudi-cli/src/test/java/org/apache/hudi/cli/commands/TestTableCommand.java b/hudi-cli/src/test/java/org/apache/hudi/cli/commands/TestTableCommand.java index 08cdb7dc47f09..c1c44f6251889 100644 --- a/hudi-cli/src/test/java/org/apache/hudi/cli/commands/TestTableCommand.java +++ b/hudi-cli/src/test/java/org/apache/hudi/cli/commands/TestTableCommand.java @@ -22,6 +22,7 @@ import org.apache.hudi.cli.HoodieCLI; import org.apache.hudi.cli.functional.CLIFunctionalTestHarness; import org.apache.hudi.cli.testutils.HoodieTestCommitMetadataGenerator; +import org.apache.hudi.cli.testutils.ShellEvaluationResultUtil; import org.apache.hudi.common.fs.ConsistencyGuardConfig; import org.apache.hudi.common.model.HoodieCommitMetadata; import org.apache.hudi.common.model.HoodieTableType; @@ -38,7 +39,9 @@ import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Tag; import org.junit.jupiter.api.Test; -import org.springframework.shell.core.CommandResult; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.shell.Shell; import java.io.File; import java.io.FileInputStream; @@ -60,8 +63,12 @@ * Test Cases for {@link TableCommand}. */ @Tag("functional") +@SpringBootTest(properties = {"spring.shell.interactive.enabled=false", "spring.shell.command.script.enabled=false"}) public class TestTableCommand extends CLIFunctionalTestHarness { + @Autowired + private Shell shell; + private String tableName; private String tablePath; private String metaPath; @@ -83,9 +90,8 @@ public void init() { * Method to create a table for connect or desc. */ private boolean prepareTable() { - CommandResult cr = shell().executeCommand( - "create --path " + tablePath + " --tableName " + tableName); - return cr.isSuccess(); + Object result = shell.evaluate(() -> "create --path " + tablePath + " --tableName " + tableName); + return ShellEvaluationResultUtil.isSuccess(result); } /** @@ -97,10 +103,9 @@ public void testConnectTable() { assertTrue(prepareTable()); // Test connect with specified values - CommandResult cr = shell().executeCommand( - "connect --path " + tablePath + " --initialCheckIntervalMs 3000 " + Object result = shell.evaluate(() -> "connect --path " + tablePath + " --initialCheckIntervalMs 3000 " + "--maxWaitIntervalMs 40000 --maxCheckIntervalMs 8"); - assertTrue(cr.isSuccess()); + assertTrue(ShellEvaluationResultUtil.isSuccess(result)); // Check specified values ConsistencyGuardConfig conf = HoodieCLI.consistencyGuardConfig; @@ -136,11 +141,10 @@ public void testDefaultCreate() { @Test public void testCreateWithSpecifiedValues() { // Test create with specified values - CommandResult cr = shell().executeCommand( - "create --path " + tablePath + " --tableName " + tableName + Object result = shell.evaluate(() -> "create --path " + tablePath + " --tableName " + tableName + " --tableType MERGE_ON_READ --archiveLogFolder archive"); - assertTrue(cr.isSuccess()); - assertEquals("Metadata for table " + tableName + " loaded", cr.getResult().toString()); + assertTrue(ShellEvaluationResultUtil.isSuccess(result)); + assertEquals("Metadata for table " + tableName + " loaded", result.toString()); HoodieTableMetaClient client = HoodieCLI.getTableMetaClient(); assertEquals(metaPath + Path.SEPARATOR + "archive", client.getArchivePath()); assertEquals(tablePath, client.getBasePath()); @@ -157,13 +161,13 @@ public void testDescTable() { assertTrue(prepareTable()); // Test desc table - CommandResult cr = shell().executeCommand("desc"); - assertTrue(cr.isSuccess()); + Object result = shell.evaluate(() -> "desc"); + assertTrue(ShellEvaluationResultUtil.isSuccess(result)); // check table's basePath metaPath and type - assertTrue(cr.getResult().toString().contains(tablePath)); - assertTrue(cr.getResult().toString().contains(metaPath)); - assertTrue(cr.getResult().toString().contains("COPY_ON_WRITE")); + assertTrue(result.toString().contains(tablePath)); + assertTrue(result.toString().contains(metaPath)); + assertTrue(result.toString().contains("COPY_ON_WRITE")); } /** @@ -201,8 +205,8 @@ private void testRefreshCommand(String command) throws IOException { HoodieCLI.getTableMetaClient().getActiveTimeline().getCommitTimeline().filterCompletedInstants(); assertEquals(0, timeline.countInstants(), "there should have no instant"); - CommandResult cr = shell().executeCommand(command); - assertTrue(cr.isSuccess()); + Object result = shell.evaluate(() -> command); + assertTrue(ShellEvaluationResultUtil.isSuccess(result)); timeline = HoodieCLI.getTableMetaClient().getActiveTimeline().getCommitTimeline().filterCompletedInstants(); @@ -234,10 +238,10 @@ public void testFetchTableSchema() throws Exception { generateData(schemaStr); - CommandResult cr = shell().executeCommand("fetch table schema"); - assertTrue(cr.isSuccess()); + Object result = shell.evaluate(() -> "fetch table schema"); + assertTrue(ShellEvaluationResultUtil.isSuccess(result)); - String actualSchemaStr = cr.getResult().toString().substring(cr.getResult().toString().indexOf("{")); + String actualSchemaStr = result.toString().substring(result.toString().indexOf("{")); Schema actualSchema = new Schema.Parser().parse(actualSchemaStr); Schema expectedSchema = new Schema.Parser().parse(schemaStr); @@ -245,8 +249,8 @@ public void testFetchTableSchema() throws Exception { assertEquals(actualSchema, expectedSchema); File file = File.createTempFile("temp", null); - cr = shell().executeCommand("fetch table schema --outputFilePath " + file.getAbsolutePath()); - assertTrue(cr.isSuccess()); + result = shell.evaluate(() -> "fetch table schema --outputFilePath " + file.getAbsolutePath()); + assertTrue(ShellEvaluationResultUtil.isSuccess(result)); actualSchemaStr = getFileContent(file.getAbsolutePath()); actualSchema = new Schema.Parser().parse(actualSchemaStr); diff --git a/hudi-cli/src/test/java/org/apache/hudi/cli/commands/TestTempViewCommand.java b/hudi-cli/src/test/java/org/apache/hudi/cli/commands/TestTempViewCommand.java index f1651d7c57048..b6f17fa3364e7 100644 --- a/hudi-cli/src/test/java/org/apache/hudi/cli/commands/TestTempViewCommand.java +++ b/hudi-cli/src/test/java/org/apache/hudi/cli/commands/TestTempViewCommand.java @@ -20,6 +20,7 @@ import org.apache.hudi.cli.HoodieCLI; import org.apache.hudi.cli.functional.CLIFunctionalTestHarness; +import org.apache.hudi.cli.testutils.MockCommandLineInput; import org.apache.hudi.cli.utils.SparkTempViewProvider; import org.apache.hudi.cli.utils.TempViewProvider; import org.apache.hudi.exception.HoodieException; @@ -28,7 +29,9 @@ import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Tag; import org.junit.jupiter.api.Test; -import org.springframework.shell.core.CommandResult; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.shell.Shell; import java.util.ArrayList; import java.util.Arrays; @@ -39,8 +42,11 @@ import static org.junit.jupiter.api.Assertions.assertTrue; @Tag("functional") +@SpringBootTest(properties = {"spring.shell.interactive.enabled=false", "spring.shell.command.script.enabled=false"}) public class TestTempViewCommand extends CLIFunctionalTestHarness { + @Autowired + private Shell shell; private TempViewProvider tempViewProvider; private final String tableName = tableName(); @@ -63,26 +69,28 @@ public void cleanUpTempView() { @Test public void testQueryWithException() { - CommandResult cr = shell().executeCommand(String.format("temp query --sql 'select * from %s'", "table_non_exist")); - assertEquals(TempViewCommand.QUERY_FAIL, cr.getResult().toString()); + Object result = shell.evaluate((MockCommandLineInput) () -> + String.format("temp query --sql 'select * from %s'", "table_non_exist")); + assertEquals(TempViewCommand.QUERY_FAIL, result.toString()); } @Test public void testQuery() { - CommandResult cr = shell().executeCommand(String.format("temp query --sql 'select * from %s'", tableName)); - assertEquals(TempViewCommand.QUERY_SUCCESS, cr.getResult().toString()); + Object result = shell.evaluate((MockCommandLineInput) () -> + String.format("temp query --sql 'select * from %s'", tableName)); + assertEquals(TempViewCommand.QUERY_SUCCESS, result.toString()); } @Test public void testShowAll() { - CommandResult cr = shell().executeCommand("temps show"); - assertEquals(TempViewCommand.SHOW_SUCCESS, cr.getResult().toString()); + Object result = shell.evaluate(() -> "temps show"); + assertEquals(TempViewCommand.SHOW_SUCCESS, result.toString()); } @Test public void testDelete() { - CommandResult cr = shell().executeCommand(String.format("temp delete --view %s", tableName)); - assertTrue(cr.getResult().toString().endsWith("successfully!")); + Object result = shell.evaluate(() -> String.format("temp delete --view %s", tableName)); + assertTrue(result.toString().endsWith("successfully!")); // after delete, we can not access table yet. assertThrows(HoodieException.class, () -> HoodieCLI.getTempViewProvider().runQuery("select * from " + tableName)); diff --git a/hudi-cli/src/test/java/org/apache/hudi/cli/commands/TestUtilsCommand.java b/hudi-cli/src/test/java/org/apache/hudi/cli/commands/TestUtilsCommand.java index e364814976d26..f7b82d7a3dc6d 100644 --- a/hudi-cli/src/test/java/org/apache/hudi/cli/commands/TestUtilsCommand.java +++ b/hudi-cli/src/test/java/org/apache/hudi/cli/commands/TestUtilsCommand.java @@ -19,11 +19,14 @@ package org.apache.hudi.cli.commands; import org.apache.hudi.cli.functional.CLIFunctionalTestHarness; +import org.apache.hudi.cli.testutils.ShellEvaluationResultUtil; import org.apache.hudi.table.HoodieTable; import org.junit.jupiter.api.Tag; import org.junit.jupiter.api.Test; -import org.springframework.shell.core.CommandResult; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.shell.Shell; import static org.junit.jupiter.api.Assertions.assertAll; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -34,19 +37,23 @@ * Test class for {@link org.apache.hudi.cli.commands.UtilsCommand}. */ @Tag("functional") +@SpringBootTest(properties = {"spring.shell.interactive.enabled=false", "spring.shell.command.script.enabled=false"}) public class TestUtilsCommand extends CLIFunctionalTestHarness { + @Autowired + private Shell shell; + /** * Test case for success load class. */ @Test public void testLoadClass() { String name = HoodieTable.class.getName(); - CommandResult cr = shell().executeCommand(String.format("utils loadClass --class %s", name)); + Object result = shell.evaluate(() -> String.format("utils loadClass --class %s", name)); assertAll("Command runs success", - () -> assertTrue(cr.isSuccess()), - () -> assertNotNull(cr.getResult().toString()), - () -> assertTrue(cr.getResult().toString().startsWith("file:"))); + () -> assertTrue(ShellEvaluationResultUtil.isSuccess(result)), + () -> assertNotNull(result.toString()), + () -> assertTrue(result.toString().startsWith("file:"))); } /** @@ -55,12 +62,12 @@ public void testLoadClass() { @Test public void testLoadClassNotFound() { String name = "test.class.NotFound"; - CommandResult cr = shell().executeCommand(String.format("utils loadClass --class %s", name)); + Object result = shell.evaluate(() -> String.format("utils loadClass --class %s", name)); assertAll("Command runs success", - () -> assertTrue(cr.isSuccess()), - () -> assertNotNull(cr.getResult().toString()), - () -> assertEquals(cr.getResult().toString(), String.format("Class %s not found!", name))); + () -> assertTrue(ShellEvaluationResultUtil.isSuccess(result)), + () -> assertNotNull(result.toString()), + () -> assertEquals(result.toString(), String.format("Class %s not found!", name))); } /** @@ -69,11 +76,11 @@ public void testLoadClassNotFound() { @Test public void testLoadClassNull() { String name = ""; - CommandResult cr = shell().executeCommand(String.format("utils loadClass --class %s", name)); + Object result = shell.evaluate(() -> String.format("utils loadClass --class %s", name)); assertAll("Command runs success", - () -> assertTrue(cr.isSuccess()), - () -> assertNotNull(cr.getResult().toString()), - () -> assertEquals("Class to be loaded can not be null!", cr.getResult().toString())); + () -> assertTrue(ShellEvaluationResultUtil.isSuccess(result)), + () -> assertNotNull(result.toString()), + () -> assertEquals("Class to be loaded can not be null!", result.toString())); } } diff --git a/hudi-cli/src/test/java/org/apache/hudi/cli/functional/CLIFunctionalTestHarness.java b/hudi-cli/src/test/java/org/apache/hudi/cli/functional/CLIFunctionalTestHarness.java index 7a12a6692a2bc..0bc422c3b8e6c 100644 --- a/hudi-cli/src/test/java/org/apache/hudi/cli/functional/CLIFunctionalTestHarness.java +++ b/hudi-cli/src/test/java/org/apache/hudi/cli/functional/CLIFunctionalTestHarness.java @@ -35,8 +35,6 @@ import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.io.TempDir; -import org.springframework.shell.Bootstrap; -import org.springframework.shell.core.JLineShellComponent; import java.nio.file.Paths; @@ -49,7 +47,7 @@ public class CLIFunctionalTestHarness implements SparkProvider { private static transient SparkSession spark; private static transient SQLContext sqlContext; private static transient JavaSparkContext jsc; - private static transient JLineShellComponent shell; + /** * An indicator of the initialization status. */ @@ -81,10 +79,6 @@ public HoodieSparkEngineContext context() { return context; } - public JLineShellComponent shell() { - return shell; - } - public String tableName() { return tableName("_test_table"); } @@ -103,7 +97,7 @@ public Configuration hadoopConf() { @BeforeEach public synchronized void runBeforeEach() { - initialized = spark != null && shell != null; + initialized = spark != null; if (!initialized) { SparkConf sparkConf = conf(); SparkRDDWriteClient.registerClasses(sparkConf); @@ -112,7 +106,6 @@ public synchronized void runBeforeEach() { sqlContext = spark.sqlContext(); jsc = new JavaSparkContext(spark.sparkContext()); context = new HoodieSparkEngineContext(jsc); - shell = new Bootstrap().getJLineShellComponent(); timelineService = HoodieClientTestUtils.initTimelineService( context, basePath(), incrementTimelineServicePortToUse()); timelineServicePort = timelineService.getServerPort(); @@ -125,10 +118,6 @@ public static synchronized void cleanUpAfterAll() { spark.close(); spark = null; } - if (shell != null) { - shell.stop(); - shell = null; - } if (timelineService != null) { timelineService.close(); } diff --git a/hudi-cli/src/test/java/org/apache/hudi/cli/functional/CLIFunctionalTestSuite.java b/hudi-cli/src/test/java/org/apache/hudi/cli/functional/CLIFunctionalTestSuite.java deleted file mode 100644 index e21a4e8fba2a0..0000000000000 --- a/hudi-cli/src/test/java/org/apache/hudi/cli/functional/CLIFunctionalTestSuite.java +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.hudi.cli.functional; - -import org.junit.platform.runner.JUnitPlatform; -import org.junit.platform.suite.api.IncludeTags; -import org.junit.platform.suite.api.SelectPackages; -import org.junit.runner.RunWith; - -@RunWith(JUnitPlatform.class) -@SelectPackages("org.apache.hudi.cli.commands") -@IncludeTags("functional") -public class CLIFunctionalTestSuite { -} diff --git a/hudi-cli/src/test/java/org/apache/hudi/cli/integ/ITTestBootstrapCommand.java b/hudi-cli/src/test/java/org/apache/hudi/cli/integ/ITTestBootstrapCommand.java index fb615f546b44a..f22ce1bbaf523 100644 --- a/hudi-cli/src/test/java/org/apache/hudi/cli/integ/ITTestBootstrapCommand.java +++ b/hudi-cli/src/test/java/org/apache/hudi/cli/integ/ITTestBootstrapCommand.java @@ -23,15 +23,17 @@ import org.apache.hudi.cli.HoodiePrintHelper; import org.apache.hudi.cli.commands.TableCommand; import org.apache.hudi.cli.testutils.HoodieCLIIntegrationTestBase; -import org.apache.hudi.functional.TestBootstrap; +import org.apache.hudi.cli.testutils.ShellEvaluationResultUtil; import org.apache.hudi.common.model.HoodieTableType; import org.apache.hudi.common.table.timeline.versioning.TimelineLayoutVersion; - +import org.apache.hudi.functional.TestBootstrap; import org.apache.spark.sql.Dataset; import org.apache.spark.sql.Row; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; -import org.springframework.shell.core.CommandResult; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.shell.Shell; import java.io.IOException; import java.time.Instant; @@ -44,8 +46,11 @@ /** * Test class of {@link org.apache.hudi.cli.commands.BootstrapCommand}. */ +@SpringBootTest(properties = {"spring.shell.interactive.enabled=false", "spring.shell.command.script.enabled=false"}) public class ITTestBootstrapCommand extends HoodieCLIIntegrationTestBase { + @Autowired + private Shell shell; private static final int NUM_OF_RECORDS = 100; private static final String PARTITION_FIELD = "datestr"; private static final String RECORD_KEY_FIELD = "_row_key"; @@ -81,8 +86,8 @@ public void testBootstrapRunCommand() throws IOException { String cmdStr = String.format( "bootstrap run --targetPath %s --tableName %s --tableType %s --srcPath %s --rowKeyField %s --partitionPathField %s --sparkMaster %s", tablePath, tableName, HoodieTableType.COPY_ON_WRITE.name(), sourcePath, RECORD_KEY_FIELD, PARTITION_FIELD, "local"); - CommandResult cr = getShell().executeCommand(cmdStr); - assertTrue(cr.isSuccess()); + Object resultForBootstrapRun = shell.evaluate(() -> cmdStr); + assertTrue(ShellEvaluationResultUtil.isSuccess(resultForBootstrapRun)); // Connect & check Hudi table exist new TableCommand().connect(tablePath, TimelineLayoutVersion.VERSION_1, false, 2000, 300000, 7); @@ -90,8 +95,8 @@ public void testBootstrapRunCommand() throws IOException { assertEquals(1, metaClient.getActiveTimeline().getCommitsTimeline().countInstants(), "Should have 1 commit."); // test "bootstrap index showpartitions" - CommandResult crForIndexedPartitions = getShell().executeCommand("bootstrap index showpartitions"); - assertTrue(crForIndexedPartitions.isSuccess()); + Object resultForIndexedPartitions = shell.evaluate(() -> "bootstrap index showpartitions"); + assertTrue(ShellEvaluationResultUtil.isSuccess(resultForIndexedPartitions)); String[] header = new String[] {"Indexed partitions"}; String[][] rows = new String[partitions.size()][1]; @@ -100,15 +105,15 @@ public void testBootstrapRunCommand() throws IOException { } String expect = HoodiePrintHelper.print(header, rows); expect = removeNonWordAndStripSpace(expect); - String got = removeNonWordAndStripSpace(crForIndexedPartitions.getResult().toString()); + String got = removeNonWordAndStripSpace(resultForIndexedPartitions.toString()); assertEquals(expect, got); // test "bootstrap index showMapping" - CommandResult crForIndexedMapping = getShell().executeCommand("bootstrap index showmapping"); - assertTrue(crForIndexedMapping.isSuccess()); + Object resultForIndexedMapping = shell.evaluate(() -> "bootstrap index showmapping"); + assertTrue(ShellEvaluationResultUtil.isSuccess(resultForIndexedMapping)); - CommandResult crForIndexedMappingWithPartition = getShell().executeCommand(String.format( - "bootstrap index showmapping --partitionPath %s=%s", PARTITION_FIELD, partitions.get(0))); - assertTrue(crForIndexedMappingWithPartition.isSuccess()); + Object resultForIndexedMappingWithPartition = shell.evaluate(() -> String.format( + "bootstrap index showmapping --partitionPath %s=%s", PARTITION_FIELD, partitions.get(0))); + assertTrue(ShellEvaluationResultUtil.isSuccess(resultForIndexedMappingWithPartition)); } } diff --git a/hudi-cli/src/test/java/org/apache/hudi/cli/integ/ITTestClusteringCommand.java b/hudi-cli/src/test/java/org/apache/hudi/cli/integ/ITTestClusteringCommand.java index f0f08f87c11a6..f81133aca0066 100644 --- a/hudi-cli/src/test/java/org/apache/hudi/cli/integ/ITTestClusteringCommand.java +++ b/hudi-cli/src/test/java/org/apache/hudi/cli/integ/ITTestClusteringCommand.java @@ -21,6 +21,7 @@ import org.apache.hudi.cli.HoodieCLI; import org.apache.hudi.cli.commands.TableCommand; import org.apache.hudi.cli.testutils.HoodieCLIIntegrationTestBase; +import org.apache.hudi.cli.testutils.ShellEvaluationResultUtil; import org.apache.hudi.client.SparkRDDWriteClient; import org.apache.hudi.client.WriteStatus; import org.apache.hudi.client.common.HoodieSparkEngineContext; @@ -40,7 +41,9 @@ import org.apache.spark.api.java.JavaSparkContext; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; -import org.springframework.shell.core.CommandResult; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.shell.Shell; import java.io.IOException; import java.nio.file.Paths; @@ -57,8 +60,12 @@ * A command use SparkLauncher need load jars under lib which generate during mvn package. * Use integration test instead of unit test. */ +@SpringBootTest(properties = {"spring.shell.interactive.enabled=false", "spring.shell.command.script.enabled=false"}) public class ITTestClusteringCommand extends HoodieCLIIntegrationTestBase { + @Autowired + private Shell shell; + @BeforeEach public void init() throws IOException { tableName = "test_table_" + ITTestClusteringCommand.class.getName(); @@ -81,11 +88,11 @@ public void testScheduleClustering() throws IOException { // generate commits generateCommits(); - CommandResult cr = scheduleClustering(); + Object result = scheduleClustering(); assertAll("Command run failed", - () -> assertTrue(cr.isSuccess()), + () -> assertTrue(ShellEvaluationResultUtil.isSuccess(result)), () -> assertTrue( - cr.getResult().toString().startsWith("Succeeded to schedule clustering for"))); + result.toString().startsWith("Succeeded to schedule clustering for"))); // there is 1 requested clustering HoodieActiveTimeline timeline = HoodieCLI.getTableMetaClient().getActiveTimeline(); @@ -100,8 +107,8 @@ public void testClustering() throws IOException { // generate commits generateCommits(); - CommandResult cr1 = scheduleClustering(); - assertTrue(cr1.isSuccess()); + Object result1 = scheduleClustering(); + assertTrue(ShellEvaluationResultUtil.isSuccess(result1)); // get clustering instance HoodieActiveTimeline timeline = HoodieCLI.getTableMetaClient().getActiveTimeline(); @@ -110,14 +117,14 @@ public void testClustering() throws IOException { assertTrue(instanceOpt.isPresent(), "Must have pending clustering."); final String instance = instanceOpt.get(); - CommandResult cr2 = getShell().executeCommand( - String.format("clustering run --parallelism %s --clusteringInstant %s --sparkMaster %s", + Object result2 = shell.evaluate(() -> + String.format("clustering run --parallelism %s --clusteringInstant %s --sparkMaster %s", 2, instance, "local")); assertAll("Command run failed", - () -> assertTrue(cr2.isSuccess()), + () -> assertTrue(ShellEvaluationResultUtil.isSuccess(result2)), () -> assertTrue( - cr2.getResult().toString().startsWith("Succeeded to run clustering for "))); + result2.toString().startsWith("Succeeded to run clustering for "))); // assert clustering complete assertTrue(HoodieCLI.getTableMetaClient().getActiveTimeline().reload() @@ -139,13 +146,13 @@ public void testClusteringScheduleAndExecute() throws IOException { // generate commits generateCommits(); - CommandResult cr2 = getShell().executeCommand( - String.format("clustering scheduleAndExecute --parallelism %s --sparkMaster %s", 2, "local")); + Object result = shell.evaluate(() -> + String.format("clustering scheduleAndExecute --parallelism %s --sparkMaster %s", 2, "local")); assertAll("Command run failed", - () -> assertTrue(cr2.isSuccess()), + () -> assertTrue(ShellEvaluationResultUtil.isSuccess(result)), () -> assertTrue( - cr2.getResult().toString().startsWith("Succeeded to run clustering for scheduleAndExecute"))); + result.toString().startsWith("Succeeded to run clustering for scheduleAndExecute"))); // assert clustering complete assertTrue(HoodieCLI.getTableMetaClient().getActiveTimeline().reload() @@ -154,10 +161,10 @@ public void testClusteringScheduleAndExecute() throws IOException { "Completed clustering couldn't be 0"); } - private CommandResult scheduleClustering() { + private Object scheduleClustering() { // generate requested clustering - return getShell().executeCommand( - String.format("clustering schedule --hoodieConfigs hoodie.clustering.inline.max.commits=1 --sparkMaster %s", "local")); + return shell.evaluate(() -> + String.format("clustering schedule --hoodieConfigs hoodie.clustering.inline.max.commits=1 --sparkMaster %s", "local")); } private void generateCommits() throws IOException { diff --git a/hudi-cli/src/test/java/org/apache/hudi/cli/integ/ITTestCommitsCommand.java b/hudi-cli/src/test/java/org/apache/hudi/cli/integ/ITTestCommitsCommand.java index 4e1be39e480dd..3f32081e5e4a9 100644 --- a/hudi-cli/src/test/java/org/apache/hudi/cli/integ/ITTestCommitsCommand.java +++ b/hudi-cli/src/test/java/org/apache/hudi/cli/integ/ITTestCommitsCommand.java @@ -22,16 +22,18 @@ import org.apache.hudi.cli.commands.RollbacksCommand; import org.apache.hudi.cli.commands.TableCommand; import org.apache.hudi.cli.testutils.HoodieCLIIntegrationTestBase; +import org.apache.hudi.cli.testutils.ShellEvaluationResultUtil; import org.apache.hudi.common.model.HoodieTableType; import org.apache.hudi.common.table.HoodieTableMetaClient; import org.apache.hudi.common.table.timeline.HoodieActiveTimeline; import org.apache.hudi.common.table.timeline.versioning.TimelineLayoutVersion; import org.apache.hudi.common.testutils.HoodieTestTable; - import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.Test; -import org.springframework.shell.core.CommandResult; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.shell.Shell; import java.io.IOException; import java.nio.file.Paths; @@ -53,8 +55,12 @@ * Use integration test instead of unit test. */ @Disabled("HUDI-4226") +@SpringBootTest(properties = {"spring.shell.interactive.enabled=false", "spring.shell.command.script.enabled=false"}) public class ITTestCommitsCommand extends HoodieCLIIntegrationTestBase { + @Autowired + private Shell shell; + @Override protected HoodieTableType getTableType() { return HoodieTableType.COPY_ON_WRITE; @@ -96,12 +102,12 @@ public void testRollbackCommit() throws Exception { .addCommit("102") .withBaseFilesInPartitions(partitionAndFileId); - CommandResult cr = getShell().executeCommand(String.format("commit rollback --commit %s --sparkMaster %s --sparkMemory %s", - "102", "local", "4G")); + Object result = shell.evaluate(() -> String.format("commit rollback --commit %s --sparkMaster %s --sparkMemory %s", + "102", "local", "4G")); assertAll("Command run failed", - () -> assertTrue(cr.isSuccess()), - () -> assertEquals("Commit 102 rolled back", cr.getResult().toString())); + () -> assertTrue(ShellEvaluationResultUtil.isSuccess(result)), + () -> assertEquals("Commit 102 rolled back", result.toString())); metaClient = HoodieTableMetaClient.reload(HoodieCLI.getTableMetaClient()); @@ -112,11 +118,12 @@ public void testRollbackCommit() throws Exception { assertEquals(2, timeline.getCommitsTimeline().countInstants(), "There should have 2 instants."); // rollback complete commit - CommandResult cr2 = getShell().executeCommand(String.format("commit rollback --commit %s --sparkMaster %s --sparkMemory %s", - "101", "local", "4G")); + Object result2 = shell.evaluate(() -> String.format("commit rollback --commit %s --sparkMaster %s --sparkMemory %s", + "101", "local", "4G")); + assertAll("Command run failed", - () -> assertTrue(cr2.isSuccess()), - () -> assertEquals("Commit 101 rolled back", cr2.getResult().toString())); + () -> assertTrue(ShellEvaluationResultUtil.isSuccess(result2)), + () -> assertEquals("Commit 101 rolled back", result2.toString())); metaClient = HoodieTableMetaClient.reload(HoodieCLI.getTableMetaClient()); @@ -127,14 +134,13 @@ public void testRollbackCommit() throws Exception { assertEquals(1, timeline2.getCommitsTimeline().countInstants(), "There should have 1 instants."); // rollback with rollbackUsingMarkers==false - CommandResult cr3 = getShell().executeCommand( - String.format("commit rollback --commit %s --rollbackUsingMarkers false --sparkMaster %s --sparkMemory %s", - "100", "local", "4G")); + Object result3 = shell.evaluate(() -> + String.format("commit rollback --commit %s --rollbackUsingMarkers false --sparkMaster %s --sparkMemory %s", + "100", "local", "4G")); assertAll("Command run failed", - () -> assertTrue(cr3.isSuccess()), - () -> assertEquals("Commit 100 rolled back", cr3.getResult().toString())); - + () -> assertTrue(ShellEvaluationResultUtil.isSuccess(result3)), + () -> assertEquals("Commit 100 rolled back", result3.toString())); metaClient = HoodieTableMetaClient.reload(HoodieCLI.getTableMetaClient()); HoodieActiveTimeline rollbackTimeline3 = new RollbacksCommand.RollbackTimeline(metaClient); diff --git a/hudi-cli/src/test/java/org/apache/hudi/cli/integ/ITTestCompactionCommand.java b/hudi-cli/src/test/java/org/apache/hudi/cli/integ/ITTestCompactionCommand.java index 76db8e782f90c..21e961ee28df6 100644 --- a/hudi-cli/src/test/java/org/apache/hudi/cli/integ/ITTestCompactionCommand.java +++ b/hudi-cli/src/test/java/org/apache/hudi/cli/integ/ITTestCompactionCommand.java @@ -21,6 +21,7 @@ import org.apache.hudi.cli.HoodieCLI; import org.apache.hudi.cli.commands.TableCommand; import org.apache.hudi.cli.testutils.HoodieCLIIntegrationTestBase; +import org.apache.hudi.cli.testutils.ShellEvaluationResultUtil; import org.apache.hudi.client.CompactionAdminClient; import org.apache.hudi.client.SparkRDDWriteClient; import org.apache.hudi.client.TestCompactionAdminClient; @@ -49,7 +50,9 @@ import org.apache.spark.api.java.JavaSparkContext; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; -import org.springframework.shell.core.CommandResult; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.shell.Shell; import java.io.BufferedWriter; import java.io.FileWriter; @@ -69,8 +72,11 @@ * A command use SparkLauncher need load jars under lib which generate during mvn package. * Use integration test instead of unit test. */ +@SpringBootTest(properties = {"spring.shell.interactive.enabled=false", "spring.shell.command.script.enabled=false"}) public class ITTestCompactionCommand extends HoodieCLIIntegrationTestBase { + @Autowired + private Shell shell; @BeforeEach public void init() throws IOException { tableName = "test_table_" + ITTestCompactionCommand.class.getName(); @@ -93,13 +99,13 @@ public void testScheduleCompact() throws IOException { // generate commits generateCommits(); - CommandResult cr = getShell().executeCommand( - String.format("compaction schedule --hoodieConfigs hoodie.compact.inline.max.delta.commits=1 --sparkMaster %s", + Object result = shell.evaluate(() -> + String.format("compaction schedule --hoodieConfigs hoodie.compact.inline.max.delta.commits=1 --sparkMaster %s", "local")); assertAll("Command run failed", - () -> assertTrue(cr.isSuccess()), + () -> assertTrue(ShellEvaluationResultUtil.isSuccess(result)), () -> assertTrue( - cr.getResult().toString().startsWith("Attempted to schedule compaction for"))); + result.toString().startsWith("Attempted to schedule compaction for"))); // there is 1 requested compaction HoodieActiveTimeline timeline = HoodieCLI.getTableMetaClient().getActiveTimeline(); @@ -119,14 +125,14 @@ public void testCompact() throws IOException { String schemaPath = Paths.get(basePath, "compaction.schema").toString(); writeSchemaToTmpFile(schemaPath); - CommandResult cr2 = getShell().executeCommand( - String.format("compaction run --parallelism %s --schemaFilePath %s --sparkMaster %s", + Object result2 = shell.evaluate(() -> + String.format("compaction run --parallelism %s --schemaFilePath %s --sparkMaster %s", 2, schemaPath, "local")); assertAll("Command run failed", - () -> assertTrue(cr2.isSuccess()), + () -> assertTrue(ShellEvaluationResultUtil.isSuccess(result2)), () -> assertTrue( - cr2.getResult().toString().startsWith("Compaction successfully completed for"))); + result2.toString().startsWith("Compaction successfully completed for"))); // assert compaction complete assertTrue(HoodieCLI.getTableMetaClient().getActiveTimeline().reload() @@ -146,15 +152,15 @@ public void testCompactScheduleAndExecute() throws IOException { String schemaPath = Paths.get(basePath, "compaction.schema").toString(); writeSchemaToTmpFile(schemaPath); - CommandResult cr2 = getShell().executeCommand( - String.format("compaction scheduleAndExecute --parallelism %s --schemaFilePath %s --sparkMaster %s " - + "--hoodieConfigs hoodie.compact.inline.max.delta.commits=1", + Object result = shell.evaluate(() -> + String.format("compaction scheduleAndExecute --parallelism %s --schemaFilePath %s --sparkMaster %s " + + "--hoodieConfigs hoodie.compact.inline.max.delta.commits=1", 2, schemaPath, "local")); assertAll("Command run failed", - () -> assertTrue(cr2.isSuccess()), + () -> assertTrue(ShellEvaluationResultUtil.isSuccess(result)), () -> assertTrue( - cr2.getResult().toString().startsWith("Schedule and execute compaction successfully completed"))); + result.toString().startsWith("Schedule and execute compaction successfully completed"))); // assert compaction complete assertTrue(HoodieCLI.getTableMetaClient().getActiveTimeline().reload() @@ -173,14 +179,14 @@ public void testValidateCompaction() throws IOException { String instance = prepareScheduleCompaction(); - CommandResult cr = getShell().executeCommand( - String.format("compaction validate --instant %s --sparkMaster %s", instance, "local")); + Object result = shell.evaluate(() -> + String.format("compaction validate --instant %s --sparkMaster %s", instance, "local")); assertAll("Command run failed", - () -> assertTrue(cr.isSuccess()), + () -> assertTrue(ShellEvaluationResultUtil.isSuccess(result)), () -> assertTrue( // compaction requested should be valid - cr.getResult().toString().contains("COMPACTION PLAN VALID"))); + result.toString().contains("COMPACTION PLAN VALID"))); } /** @@ -195,14 +201,14 @@ public void testUnscheduleCompaction() throws Exception { String instance = prepareScheduleCompaction(); - CommandResult cr = getShell().executeCommand( - String.format("compaction unschedule --instant %s --sparkMaster %s", instance, "local")); + Object result = shell.evaluate(() -> + String.format("compaction unschedule --instant %s --sparkMaster %s", instance, "local")); // Always has no file assertAll("Command run failed", - () -> assertTrue(cr.isSuccess()), + () -> assertTrue(ShellEvaluationResultUtil.isSuccess(result)), () -> assertEquals("No File renames needed to unschedule pending compaction. Operation successful.", - cr.getResult().toString())); + result.toString())); } /** @@ -219,14 +225,14 @@ public void testUnscheduleCompactFile() throws IOException { CompactionOperation op = CompactionOperation.convertFromAvroRecordInstance( CompactionUtils.getCompactionPlan(metaClient, "001").getOperations().stream().findFirst().get()); - CommandResult cr = getShell().executeCommand( - String.format("compaction unscheduleFileId --fileId %s --partitionPath %s --sparkMaster %s", + Object result = shell.evaluate(() -> + String.format("compaction unscheduleFileId --fileId %s --partitionPath %s --sparkMaster %s", op.getFileGroupId().getFileId(), op.getFileGroupId().getPartitionPath(), "local")); assertAll("Command run failed", - () -> assertTrue(cr.isSuccess()), - () -> assertTrue(removeNonWordAndStripSpace(cr.getResult().toString()).contains("true")), - () -> assertFalse(removeNonWordAndStripSpace(cr.getResult().toString()).contains("false"))); + () -> assertTrue(ShellEvaluationResultUtil.isSuccess(result)), + () -> assertTrue(removeNonWordAndStripSpace(result.toString()).contains("true")), + () -> assertFalse(removeNonWordAndStripSpace(result.toString()).contains("false"))); } /** @@ -256,25 +262,25 @@ public void testRepairCompaction() throws Exception { client.unscheduleCompactionPlan(compactionInstant, false, 1, false); - CommandResult cr = getShell().executeCommand( - String.format("compaction repair --instant %s --sparkMaster %s", compactionInstant, "local")); + Object result = shell.evaluate(() -> + String.format("compaction repair --instant %s --sparkMaster %s", compactionInstant, "local")); // All Executes is succeeded, result contains true and has no false // Expected: // ║ File Id │ Source File Path │ Destination File Path │ Rename Executed? │ Rename Succeeded? │ Error ║ // ║ * │ * │ * │ true │ true │ ║ assertAll("Command run failed", - () -> assertTrue(cr.isSuccess()), - () -> assertTrue(removeNonWordAndStripSpace(cr.getResult().toString()).contains("true")), - () -> assertFalse(removeNonWordAndStripSpace(cr.getResult().toString()).contains("false"))); + () -> assertTrue(ShellEvaluationResultUtil.isSuccess(result)), + () -> assertTrue(removeNonWordAndStripSpace(result.toString()).contains("true")), + () -> assertFalse(removeNonWordAndStripSpace(result.toString()).contains("false"))); } private String prepareScheduleCompaction() { // generate requested compaction - CommandResult cr = getShell().executeCommand( - String.format("compaction schedule --hoodieConfigs hoodie.compact.inline.max.delta.commits=1 --sparkMaster %s", + Object result = shell.evaluate(() -> + String.format("compaction schedule --hoodieConfigs hoodie.compact.inline.max.delta.commits=1 --sparkMaster %s", "local")); - assertTrue(cr.isSuccess()); + assertTrue(ShellEvaluationResultUtil.isSuccess(result)); // get compaction instance HoodieActiveTimeline timeline = HoodieCLI.getTableMetaClient().getActiveTimeline(); diff --git a/hudi-cli/src/test/java/org/apache/hudi/cli/integ/ITTestHDFSParquetImportCommand.java b/hudi-cli/src/test/java/org/apache/hudi/cli/integ/ITTestHDFSParquetImportCommand.java index 3e4a45306b9ed..a71697657a0d7 100644 --- a/hudi-cli/src/test/java/org/apache/hudi/cli/integ/ITTestHDFSParquetImportCommand.java +++ b/hudi-cli/src/test/java/org/apache/hudi/cli/integ/ITTestHDFSParquetImportCommand.java @@ -18,9 +18,13 @@ package org.apache.hudi.cli.integ; +import org.apache.avro.generic.GenericRecord; +import org.apache.hadoop.fs.FSDataOutputStream; +import org.apache.hadoop.fs.Path; import org.apache.hudi.cli.HoodieCLI; import org.apache.hudi.cli.commands.TableCommand; import org.apache.hudi.cli.testutils.HoodieCLIIntegrationTestBase; +import org.apache.hudi.cli.testutils.ShellEvaluationResultUtil; import org.apache.hudi.common.model.HoodieTableType; import org.apache.hudi.common.table.HoodieTableMetaClient; import org.apache.hudi.common.table.timeline.versioning.TimelineLayoutVersion; @@ -29,16 +33,14 @@ import org.apache.hudi.utilities.HDFSParquetImporter; import org.apache.hudi.utilities.functional.TestHDFSParquetImporter; import org.apache.hudi.utilities.functional.TestHDFSParquetImporter.HoodieTripModel; - -import org.apache.avro.generic.GenericRecord; -import org.apache.hadoop.fs.FSDataOutputStream; -import org.apache.hadoop.fs.Path; import org.apache.spark.sql.Dataset; import org.apache.spark.sql.Row; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.Test; -import org.springframework.shell.core.CommandResult; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.shell.Shell; import java.io.IOException; import java.nio.file.Files; @@ -55,8 +57,11 @@ * Test class for {@link org.apache.hudi.cli.commands.HDFSParquetImportCommand}. */ @Disabled("Disable due to flakiness and feature deprecation.") +@SpringBootTest(properties = {"spring.shell.interactive.enabled=false", "spring.shell.command.script.enabled=false"}) public class ITTestHDFSParquetImportCommand extends HoodieCLIIntegrationTestBase { + @Autowired + private Shell shell; private Path sourcePath; private Path targetPath; private String tableName; @@ -93,11 +98,12 @@ public void testConvertWithInsert() throws IOException { + "--schemaFilePath %s --format %s --sparkMemory %s --retry %s --sparkMaster %s", sourcePath.toString(), targetPath.toString(), tableName, HoodieTableType.COPY_ON_WRITE.name(), "_row_key", "timestamp", "1", schemaFile, "parquet", "2G", "1", "local"); - CommandResult cr = getShell().executeCommand(command); + + Object result = shell.evaluate(() -> command); assertAll("Command run success", - () -> assertTrue(cr.isSuccess()), - () -> assertEquals("Table imported to hoodie format", cr.getResult().toString())); + () -> assertTrue(ShellEvaluationResultUtil.isSuccess(result)), + () -> assertEquals("Table imported to hoodie format", result.toString())); // Check hudi table exist String metaPath = targetPath + Path.SEPARATOR + HoodieTableMetaClient.METAFOLDER_NAME; @@ -139,11 +145,11 @@ public void testConvertWithUpsert() throws IOException, ParseException { + "--schemaFilePath %s --format %s --sparkMemory %s --retry %s --sparkMaster %s --upsert %s", upsertFolder.toString(), targetPath.toString(), tableName, HoodieTableType.COPY_ON_WRITE.name(), "_row_key", "timestamp", "1", schemaFile, "parquet", "2G", "1", "local", "true"); - CommandResult cr = getShell().executeCommand(command); + Object result = shell.evaluate(() -> command); assertAll("Command run success", - () -> assertTrue(cr.isSuccess()), - () -> assertEquals("Table imported to hoodie format", cr.getResult().toString())); + () -> assertTrue(ShellEvaluationResultUtil.isSuccess(result)), + () -> assertEquals("Table imported to hoodie format", result.toString())); // reload meta client metaClient = HoodieTableMetaClient.reload(metaClient); diff --git a/hudi-cli/src/test/java/org/apache/hudi/cli/integ/ITTestMarkersCommand.java b/hudi-cli/src/test/java/org/apache/hudi/cli/integ/ITTestMarkersCommand.java index 35561ef09c371..5aacfd82de044 100644 --- a/hudi-cli/src/test/java/org/apache/hudi/cli/integ/ITTestMarkersCommand.java +++ b/hudi-cli/src/test/java/org/apache/hudi/cli/integ/ITTestMarkersCommand.java @@ -21,13 +21,16 @@ import org.apache.hadoop.fs.Path; import org.apache.hudi.cli.commands.TableCommand; import org.apache.hudi.cli.testutils.HoodieCLIIntegrationTestBase; +import org.apache.hudi.cli.testutils.ShellEvaluationResultUtil; import org.apache.hudi.common.model.HoodieTableType; import org.apache.hudi.common.model.IOType; import org.apache.hudi.common.table.timeline.versioning.TimelineLayoutVersion; import org.apache.hudi.common.testutils.FileCreateUtils; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; -import org.springframework.shell.core.CommandResult; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.shell.Shell; import java.io.IOException; @@ -40,8 +43,11 @@ * A command use SparkLauncher need load jars under lib which generate during mvn package. * Use integration test instead of unit test. */ +@SpringBootTest(properties = {"spring.shell.interactive.enabled=false", "spring.shell.command.script.enabled=false"}) public class ITTestMarkersCommand extends HoodieCLIIntegrationTestBase { + @Autowired + private Shell shell; private String tablePath; @BeforeEach @@ -68,9 +74,10 @@ public void testDeleteMarker() throws IOException { assertEquals(2, FileCreateUtils.getTotalMarkerFileCount(tablePath, "partA", instantTime1, IOType.APPEND)); - CommandResult cr = getShell().executeCommand( - String.format("marker delete --commit %s --sparkMaster %s", instantTime1, "local")); - assertTrue(cr.isSuccess()); + Object result = shell.evaluate(() -> + String.format("marker delete --commit %s --sparkMaster %s", instantTime1, "local")); + + assertTrue(ShellEvaluationResultUtil.isSuccess(result)); assertEquals(0, FileCreateUtils.getTotalMarkerFileCount(tablePath, "partA", instantTime1, IOType.APPEND)); } diff --git a/hudi-cli/src/test/java/org/apache/hudi/cli/integ/ITTestRepairsCommand.java b/hudi-cli/src/test/java/org/apache/hudi/cli/integ/ITTestRepairsCommand.java index 52b8aed8de00d..5938a8ffe243e 100644 --- a/hudi-cli/src/test/java/org/apache/hudi/cli/integ/ITTestRepairsCommand.java +++ b/hudi-cli/src/test/java/org/apache/hudi/cli/integ/ITTestRepairsCommand.java @@ -18,11 +18,15 @@ package org.apache.hudi.cli.integ; +import org.apache.avro.Schema; +import org.apache.hadoop.fs.FileStatus; +import org.apache.hadoop.fs.Path; import org.apache.hudi.avro.HoodieAvroUtils; import org.apache.hudi.cli.HoodieCLI; import org.apache.hudi.cli.commands.RepairsCommand; import org.apache.hudi.cli.commands.TableCommand; import org.apache.hudi.cli.testutils.HoodieCLIIntegrationTestBase; +import org.apache.hudi.cli.testutils.ShellEvaluationResultUtil; import org.apache.hudi.common.model.HoodieBaseFile; import org.apache.hudi.common.model.HoodieFileFormat; import org.apache.hudi.common.model.HoodieRecord; @@ -33,15 +37,13 @@ import org.apache.hudi.common.testutils.HoodieTestDataGenerator; import org.apache.hudi.common.testutils.SchemaTestUtil; import org.apache.hudi.testutils.HoodieSparkWriteableTestTable; - -import org.apache.avro.Schema; -import org.apache.hadoop.fs.FileStatus; -import org.apache.hadoop.fs.Path; import org.apache.spark.sql.Dataset; import org.junit.jupiter.api.BeforeEach; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.shell.Shell; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.EnumSource; -import org.springframework.shell.core.CommandResult; import java.io.IOException; import java.nio.file.Paths; @@ -58,8 +60,12 @@ * A command use SparkLauncher need load jars under lib which generate during mvn package. * Use integration test instead of unit test. */ +@SpringBootTest(properties = {"spring.shell.interactive.enabled=false", "spring.shell.command.script.enabled=false"}) public class ITTestRepairsCommand extends HoodieCLIIntegrationTestBase { + @Autowired + private Shell shell; + private String duplicatedPartitionPath; private String duplicatedPartitionPathWithUpdates; private String duplicatedPartitionPathWithUpserts; @@ -155,9 +161,9 @@ public void testDeduplicateWithInserts(HoodieTableType tableType) throws IOExcep String partitionPath = HoodieTestDataGenerator.DEFAULT_FIRST_PARTITION_PATH; String cmdStr = String.format("repair deduplicate --duplicatedPartitionPath %s --repairedOutputPath %s --sparkMaster %s", partitionPath, repairedOutputPath, "local"); - CommandResult cr = getShell().executeCommand(cmdStr); - assertTrue(cr.isSuccess()); - assertEquals(RepairsCommand.DEDUPLICATE_RETURN_PREFIX + repairedOutputPath, cr.getResult().toString()); + Object resultForCmd = shell.evaluate(() -> cmdStr); + assertTrue(ShellEvaluationResultUtil.isSuccess(resultForCmd)); + assertEquals(RepairsCommand.DEDUPLICATE_RETURN_PREFIX + repairedOutputPath, resultForCmd.toString()); // After deduplicate, there are 200 records FileStatus[] fileStatus = fs.listStatus(new Path(repairedOutputPath)); @@ -185,9 +191,9 @@ public void testDeduplicateWithUpdates(HoodieTableType tableType) throws IOExcep String partitionPath = HoodieTestDataGenerator.DEFAULT_SECOND_PARTITION_PATH; String cmdStr = String.format("repair deduplicate --duplicatedPartitionPath %s --repairedOutputPath %s --sparkMaster %s --dedupeType %s", partitionPath, repairedOutputPath, "local", "update_type"); - CommandResult cr = getShell().executeCommand(cmdStr); - assertTrue(cr.isSuccess()); - assertEquals(RepairsCommand.DEDUPLICATE_RETURN_PREFIX + repairedOutputPath, cr.getResult().toString()); + Object resultForCmd = shell.evaluate(() -> cmdStr); + assertTrue(ShellEvaluationResultUtil.isSuccess(resultForCmd)); + assertEquals(RepairsCommand.DEDUPLICATE_RETURN_PREFIX + repairedOutputPath, resultForCmd.toString()); // After deduplicate, there are 100 records FileStatus[] fileStatus = fs.listStatus(new Path(repairedOutputPath)); @@ -215,9 +221,9 @@ public void testDeduplicateWithUpserts(HoodieTableType tableType) throws IOExcep String partitionPath = HoodieTestDataGenerator.DEFAULT_THIRD_PARTITION_PATH; String cmdStr = String.format("repair deduplicate --duplicatedPartitionPath %s --repairedOutputPath %s --sparkMaster %s --dedupeType %s", partitionPath, repairedOutputPath, "local", "upsert_type"); - CommandResult cr = getShell().executeCommand(cmdStr); - assertTrue(cr.isSuccess()); - assertEquals(RepairsCommand.DEDUPLICATE_RETURN_PREFIX + repairedOutputPath, cr.getResult().toString()); + Object resultForCmd = shell.evaluate(() -> cmdStr); + assertTrue(ShellEvaluationResultUtil.isSuccess(resultForCmd)); + assertEquals(RepairsCommand.DEDUPLICATE_RETURN_PREFIX + repairedOutputPath, resultForCmd.toString()); // After deduplicate, there are 100 records FileStatus[] fileStatus = fs.listStatus(new Path(repairedOutputPath)); @@ -249,9 +255,9 @@ public void testDeduplicateWithReal(HoodieTableType tableType) throws IOExceptio String partitionPath = HoodieTestDataGenerator.DEFAULT_FIRST_PARTITION_PATH; String cmdStr = String.format("repair deduplicate --duplicatedPartitionPath %s --repairedOutputPath %s" + " --sparkMaster %s --dryrun %s", partitionPath, repairedOutputPath, "local", false); - CommandResult cr = getShell().executeCommand(cmdStr); - assertTrue(cr.isSuccess()); - assertEquals(RepairsCommand.DEDUPLICATE_RETURN_PREFIX + partitionPath, cr.getResult().toString()); + Object resultForCmd = shell.evaluate(() -> cmdStr); + assertTrue(ShellEvaluationResultUtil.isSuccess(resultForCmd)); + assertEquals(RepairsCommand.DEDUPLICATE_RETURN_PREFIX + partitionPath, resultForCmd.toString()); // After deduplicate, there are 200 records under partition path FileStatus[] fileStatus = fs.listStatus(new Path(Paths.get(tablePath, duplicatedPartitionPath).toString())); diff --git a/hudi-cli/src/test/java/org/apache/hudi/cli/integ/ITTestSavepointsCommand.java b/hudi-cli/src/test/java/org/apache/hudi/cli/integ/ITTestSavepointsCommand.java index 07a573a8cbc6b..9bc368e952248 100644 --- a/hudi-cli/src/test/java/org/apache/hudi/cli/integ/ITTestSavepointsCommand.java +++ b/hudi-cli/src/test/java/org/apache/hudi/cli/integ/ITTestSavepointsCommand.java @@ -22,6 +22,7 @@ import org.apache.hudi.cli.HoodieCLI; import org.apache.hudi.cli.commands.TableCommand; import org.apache.hudi.cli.testutils.HoodieCLIIntegrationTestBase; +import org.apache.hudi.cli.testutils.ShellEvaluationResultUtil; import org.apache.hudi.client.common.HoodieSparkEngineContext; import org.apache.hudi.common.config.HoodieMetadataConfig; import org.apache.hudi.common.model.HoodieTableType; @@ -36,7 +37,9 @@ import org.apache.hudi.metadata.SparkHoodieBackedTableMetadataWriter; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; -import org.springframework.shell.core.CommandResult; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.shell.Shell; import java.io.IOException; @@ -51,8 +54,11 @@ * A command use SparkLauncher need load jars under lib which generate during mvn package. * Use integration test instead of unit test. */ +@SpringBootTest(properties = {"spring.shell.interactive.enabled=false", "spring.shell.command.script.enabled=false"}) public class ITTestSavepointsCommand extends HoodieCLIIntegrationTestBase { + @Autowired + private Shell shell; private String tablePath; @BeforeEach @@ -78,13 +84,13 @@ public void testSavepoint() { } String savepoint = "102"; - CommandResult cr = getShell().executeCommand( - String.format("savepoint create --commit %s --sparkMaster %s", savepoint, "local")); + Object result = shell.evaluate(() -> + String.format("savepoint create --commit %s --sparkMaster %s", savepoint, "local")); assertAll("Command run failed", - () -> assertTrue(cr.isSuccess()), + () -> assertTrue(ShellEvaluationResultUtil.isSuccess(result)), () -> assertEquals( - String.format("The commit \"%s\" has been savepointed.", savepoint), cr.getResult().toString())); + String.format("The commit \"%s\" has been savepointed.", savepoint), result.toString())); // there is 1 savepoint instant HoodieActiveTimeline timeline = HoodieCLI.getTableMetaClient().getActiveTimeline(); @@ -106,13 +112,13 @@ public void testRollbackToSavepoint() throws IOException { String savepoint = "102"; HoodieTestDataGenerator.createSavepointFile(tablePath, savepoint, jsc.hadoopConfiguration()); - CommandResult cr = getShell().executeCommand( - String.format("savepoint rollback --savepoint %s --sparkMaster %s", savepoint, "local")); + Object result = shell.evaluate(() -> + String.format("savepoint rollback --savepoint %s --sparkMaster %s", savepoint, "local")); assertAll("Command run failed", - () -> assertTrue(cr.isSuccess()), + () -> assertTrue(ShellEvaluationResultUtil.isSuccess(result)), () -> assertEquals( - String.format("Savepoint \"%s\" rolled back", savepoint), cr.getResult().toString())); + String.format("Savepoint \"%s\" rolled back", savepoint), result.toString())); // there is 1 restore instant HoodieActiveTimeline timeline = HoodieCLI.getTableMetaClient().getActiveTimeline(); @@ -148,13 +154,13 @@ public void testRollbackToSavepointWithMetadataTableEnable() throws IOException assertTrue(HoodieCLI.fs.exists(metadataTableBasePath)); // roll back to savepoint - CommandResult cr = getShell().executeCommand( - String.format("savepoint rollback --savepoint %s --sparkMaster %s", savepoint, "local")); + Object result = shell.evaluate(() -> + String.format("savepoint rollback --savepoint %s --sparkMaster %s", savepoint, "local")); assertAll("Command run failed", - () -> assertTrue(cr.isSuccess()), + () -> assertTrue(ShellEvaluationResultUtil.isSuccess(result)), () -> assertEquals( - String.format("Savepoint \"%s\" rolled back", savepoint), cr.getResult().toString())); + String.format("Savepoint \"%s\" rolled back", savepoint), result.toString())); // there is 1 restore instant HoodieActiveTimeline timeline = HoodieCLI.getTableMetaClient().getActiveTimeline(); @@ -187,13 +193,13 @@ public void testDeleteSavepoint() throws IOException { HoodieActiveTimeline timeline = HoodieCLI.getTableMetaClient().getActiveTimeline(); assertEquals(2, timeline.getSavePointTimeline().countInstants(), "There should 2 instants."); - CommandResult cr = getShell().executeCommand( - String.format("savepoint delete --commit %s --sparkMaster %s", savepoint1, "local")); + Object result = shell.evaluate(() -> + String.format("savepoint delete --commit %s --sparkMaster %s", savepoint1, "local")); assertAll("Command run failed", - () -> assertTrue(cr.isSuccess()), + () -> assertTrue(ShellEvaluationResultUtil.isSuccess(result)), () -> assertEquals( - String.format("Savepoint \"%s\" deleted.", savepoint1), cr.getResult().toString())); + String.format("Savepoint \"%s\" deleted.", savepoint1),result.toString())); // reload timeline timeline = timeline.reload(); diff --git a/hudi-cli/src/test/java/org/apache/hudi/cli/testutils/HoodieCLIIntegrationTestHarness.java b/hudi-cli/src/test/java/org/apache/hudi/cli/testutils/HoodieCLIIntegrationTestHarness.java index e24ea6582af3e..d49ac6b3289a6 100644 --- a/hudi-cli/src/test/java/org/apache/hudi/cli/testutils/HoodieCLIIntegrationTestHarness.java +++ b/hudi-cli/src/test/java/org/apache/hudi/cli/testutils/HoodieCLIIntegrationTestHarness.java @@ -19,32 +19,14 @@ package org.apache.hudi.cli.testutils; import org.apache.hudi.testutils.HoodieClientTestHarness; - -import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.BeforeEach; -import org.springframework.shell.Bootstrap; -import org.springframework.shell.core.JLineShellComponent; /** * Class to start Bootstrap and JLineShellComponent. */ public class HoodieCLIIntegrationTestHarness extends HoodieClientTestHarness { - private static JLineShellComponent shell; - - @BeforeAll - public static void startup() { - Bootstrap bootstrap = new Bootstrap(); - shell = bootstrap.getJLineShellComponent(); - } - - @AfterAll - public static void shutdown() { - shell.stop(); - } - @BeforeEach public void setup() throws Exception { initPath(); @@ -55,10 +37,6 @@ public void teardown() throws Exception { System.gc(); } - protected static JLineShellComponent getShell() { - return shell; - } - /** * Helper to prepare string for matching. * @param str Input string. diff --git a/hudi-cli/src/test/java/org/apache/hudi/cli/testutils/MockCommandLineInput.java b/hudi-cli/src/test/java/org/apache/hudi/cli/testutils/MockCommandLineInput.java new file mode 100644 index 0000000000000..1d803fc8103bd --- /dev/null +++ b/hudi-cli/src/test/java/org/apache/hudi/cli/testutils/MockCommandLineInput.java @@ -0,0 +1,57 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hudi.cli.testutils; + +import org.springframework.shell.Input; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; + +public interface MockCommandLineInput extends Input { + @Override + default List words() { + if (null == rawText() || rawText().isEmpty()) { + return Collections.emptyList(); + } + boolean isInQuote = false; + List result = new ArrayList<>(); + StringBuilder stringBuilder = new StringBuilder(); + for (int i = 0; i < rawText().length(); i++) { + char c = rawText().charAt(i); + if (' ' == c && !isInQuote) { + if (stringBuilder.length() != 0) { + result.add(stringBuilder.toString()); + stringBuilder.delete(0, stringBuilder.length()); + } + } else if ('\'' == c || '"' == c) { + if (isInQuote) { + isInQuote = false; + result.add(stringBuilder.toString()); + stringBuilder.delete(0, stringBuilder.length()); + } else { + isInQuote = true; + } + } else { + stringBuilder.append(c); + } + } + return result; + } +} diff --git a/hudi-cli/src/main/java/org/apache/hudi/cli/HoodieHistoryFileNameProvider.java b/hudi-cli/src/test/java/org/apache/hudi/cli/testutils/ShellEvaluationResultUtil.java similarity index 58% rename from hudi-cli/src/main/java/org/apache/hudi/cli/HoodieHistoryFileNameProvider.java rename to hudi-cli/src/test/java/org/apache/hudi/cli/testutils/ShellEvaluationResultUtil.java index 95f983416a50d..d1832a82691cb 100644 --- a/hudi-cli/src/main/java/org/apache/hudi/cli/HoodieHistoryFileNameProvider.java +++ b/hudi-cli/src/test/java/org/apache/hudi/cli/testutils/ShellEvaluationResultUtil.java @@ -16,28 +16,21 @@ * limitations under the License. */ -package org.apache.hudi.cli; +package org.apache.hudi.cli.testutils; -import org.springframework.core.Ordered; -import org.springframework.core.annotation.Order; -import org.springframework.shell.plugin.support.DefaultHistoryFileNameProvider; -import org.springframework.stereotype.Component; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; -/** - * CLI history file provider. - */ -@Component -@Order(Ordered.HIGHEST_PRECEDENCE) -public class HoodieHistoryFileNameProvider extends DefaultHistoryFileNameProvider { - - @Override - public String getHistoryFileName() { - return "hoodie-cmd.log"; - } +public class ShellEvaluationResultUtil { + private static final Logger LOGGER = LogManager.getLogger(ShellEvaluationResultUtil.class); + private ShellEvaluationResultUtil() {} - @Override - public String getProviderName() { - return "Hoodie file name provider"; + public static boolean isSuccess(Object shellEvaluationResult) { + boolean hasError = shellEvaluationResult instanceof Throwable; + if (hasError) { + Throwable throwable = (Throwable) shellEvaluationResult; + LOGGER.error(throwable.toString()); + } + return !hasError; } - } diff --git a/hudi-examples/hudi-examples-flink/pom.xml b/hudi-examples/hudi-examples-flink/pom.xml index 662f15621d705..3c2cc0fd7c2f9 100644 --- a/hudi-examples/hudi-examples-flink/pom.xml +++ b/hudi-examples/hudi-examples-flink/pom.xml @@ -263,6 +263,12 @@ test + + org.junit.platform + junit-platform-launcher + ${junit.platform.version} + test + org.junit.jupiter junit-jupiter-api diff --git a/hudi-examples/hudi-examples-spark/pom.xml b/hudi-examples/hudi-examples-spark/pom.xml index 3c59acdfb7555..41a1091a621b8 100644 --- a/hudi-examples/hudi-examples-spark/pom.xml +++ b/hudi-examples/hudi-examples-spark/pom.xml @@ -244,6 +244,12 @@ + + org.junit.platform + junit-platform-launcher + ${junit.platform.version} + test + org.junit.jupiter junit-jupiter-api diff --git a/hudi-integ-test/src/test/java/org/apache/hudi/integ/ITTestHoodieDemo.java b/hudi-integ-test/src/test/java/org/apache/hudi/integ/ITTestHoodieDemo.java index 148fc862a1a1b..32b4122abe21c 100644 --- a/hudi-integ-test/src/test/java/org/apache/hudi/integ/ITTestHoodieDemo.java +++ b/hudi-integ-test/src/test/java/org/apache/hudi/integ/ITTestHoodieDemo.java @@ -508,7 +508,7 @@ private void testIncrementalSparkSQLQuery() throws Exception { } private void scheduleAndRunCompaction() throws Exception { - executeCommandStringInDocker(ADHOC_1_CONTAINER, HUDI_CLI_TOOL + " --cmdfile " + COMPACTION_COMMANDS, true); - executeCommandStringInDocker(ADHOC_1_CONTAINER, HUDI_CLI_TOOL + " --cmdfile " + COMPACTION_BOOTSTRAP_COMMANDS, true); + executeCommandStringInDocker(ADHOC_1_CONTAINER, HUDI_CLI_TOOL + " script --file " + COMPACTION_COMMANDS, true); + executeCommandStringInDocker(ADHOC_1_CONTAINER, HUDI_CLI_TOOL + " script --file " + COMPACTION_BOOTSTRAP_COMMANDS, true); } } diff --git a/hudi-integ-test/src/test/java/org/apache/hudi/integ/command/ITTestHoodieSyncCommand.java b/hudi-integ-test/src/test/java/org/apache/hudi/integ/command/ITTestHoodieSyncCommand.java index e6a4b6146273c..0b415f37cdb8d 100644 --- a/hudi-integ-test/src/test/java/org/apache/hudi/integ/command/ITTestHoodieSyncCommand.java +++ b/hudi-integ-test/src/test/java/org/apache/hudi/integ/command/ITTestHoodieSyncCommand.java @@ -49,11 +49,11 @@ public void testValidateSync() throws Exception { hiveTableName, HoodieTableType.COPY_ON_WRITE.name(), PartitionType.SINGLE_KEY_PARTITIONED, "append", hiveTableName); TestExecStartResultCallback result = - executeCommandStringInDocker(ADHOC_1_CONTAINER, HUDI_CLI_TOOL + " --cmdfile " + SYNC_VALIDATE_COMMANDS, true); + executeCommandStringInDocker(ADHOC_1_CONTAINER, HUDI_CLI_TOOL + " script --file " + SYNC_VALIDATE_COMMANDS, true); String expected = String.format("Count difference now is (count(%s) - count(%s) == %d. Catch up count is %d", hiveTableName, hiveTableName2, 100, 200); - assertTrue(result.getStderr().toString().contains(expected)); + assertTrue(result.getStdout().toString().contains(expected)); dropHiveTables(hiveTableName, HoodieTableType.COPY_ON_WRITE.name()); dropHiveTables(hiveTableName2, HoodieTableType.COPY_ON_WRITE.name()); diff --git a/pom.xml b/pom.xml index 6cc39820938b6..5daef106da0ad 100644 --- a/pom.xml +++ b/pom.xml @@ -202,6 +202,8 @@ 3.5.7 8000 http://localhost:${dynamodb-local.port} + 2.7.3 + 2.1.1 @@ -526,6 +528,7 @@ **/target/** **/generated-sources/** .github/** + **/banner.txt **/*.iml .mvn/** @@ -1493,6 +1496,31 @@ + + + + org.springframework.boot + spring-boot-starter-test + ${springboot.version} + test + + + org.springframework.boot + spring-boot-starter-logging + + + + + org.springframework.shell + spring-shell-starter + ${spring.shell.version} + + + com.google.guava + guava + + +