diff --git a/azure-pipelines.yml b/azure-pipelines.yml
index 8a2d7f0de076a..f10e243bd523a 100644
--- a/azure-pipelines.yml
+++ b/azure-pipelines.yml
@@ -21,12 +21,68 @@ trigger:
pool:
vmImage: 'ubuntu-18.04'
+parameters:
+ - name: job1Modules
+ type: object
+ default:
+ - 'hudi-common'
+ - 'hudi-flink-datasource/hudi-flink'
+ - 'hudi-flink-datasource/hudi-flink1.13.x'
+ - 'hudi-flink-datasource/hudi-flink1.14.x'
+ - name: job2Modules
+ type: object
+ default:
+ - 'hudi-client/hudi-spark-client'
+ - name: job3Modules
+ type: object
+ default:
+ - 'hudi-cli'
+ - 'hudi-client/hudi-client-common'
+ - 'hudi-client/hudi-flink-client'
+ - 'hudi-client/hudi-java-client'
+ - 'hudi-sync/hudi-adb-sync'
+ - 'hudi-sync/hudi-datahub-sync'
+ - 'hudi-sync/hudi-hive-sync'
+ - 'hudi-sync/hudi-sync-common'
+ - 'hudi-utilities'
+ - name: job4Modules
+ type: object
+ default:
+ - '!hudi-cli'
+ - '!hudi-client'
+ - '!hudi-client/hudi-client-common'
+ - '!hudi-client/hudi-flink-client'
+ - '!hudi-client/hudi-java-client'
+ - '!hudi-client/hudi-spark-client'
+ - '!hudi-common'
+ - '!hudi-examples'
+ - '!hudi-examples/hudi-examples-common'
+ - '!hudi-examples/hudi-examples-flink'
+ - '!hudi-examples/hudi-examples-java'
+ - '!hudi-examples/hudi-examples-spark'
+ - '!hudi-flink-datasource'
+ - '!hudi-flink-datasource/hudi-flink'
+ - '!hudi-flink-datasource/hudi-flink1.13.x'
+ - '!hudi-flink-datasource/hudi-flink1.14.x'
+ - '!hudi-sync'
+ - '!hudi-sync/hudi-adb-sync'
+ - '!hudi-sync/hudi-datahub-sync'
+ - '!hudi-sync/hudi-hive-sync'
+ - '!hudi-sync/hudi-sync-common'
+ - '!hudi-utilities'
+
variables:
- MAVEN_OPTS: '-Dcheckstyle.skip=true -Drat.skip=true -Djacoco.skip=true'
+ BUILD_PROFILES: '-Dscala-2.11 -Dspark2 -Dflink1.14'
+ PLUGIN_OPTS: '-Dcheckstyle.skip=true -Drat.skip=true -Djacoco.skip=true'
+ MVN_OPTS_INSTALL: '-T 2.5C -DskipTests $(BUILD_PROFILES) $(PLUGIN_OPTS)'
+ MVN_OPTS_TEST: '-fae $(BUILD_PROFILES) $(PLUGIN_OPTS)'
SPARK_VERSION: '2.4.4'
HADOOP_VERSION: '2.7'
SPARK_ARCHIVE: spark-$(SPARK_VERSION)-bin-hadoop$(HADOOP_VERSION)
- EXCLUDE_TESTED_MODULES: '!hudi-examples/hudi-examples-common,!hudi-examples/hudi-examples-flink,!hudi-examples/hudi-examples-java,!hudi-examples/hudi-examples-spark,!hudi-common,!hudi-flink-datasource/hudi-flink,!hudi-client/hudi-spark-client,!hudi-client/hudi-client-common,!hudi-client/hudi-flink-client,!hudi-client/hudi-java-client,!hudi-cli,!hudi-utilities,!hudi-sync/hudi-hive-sync'
+ JOB1_MODULES: ${{ join(',',parameters.job1Modules) }}
+ JOB2_MODULES: ${{ join(',',parameters.job2Modules) }}
+ JOB3_MODULES: ${{ join(',',parameters.job3Modules) }}
+ JOB4_MODULES: ${{ join(',',parameters.job4Modules) }}
stages:
- stage: test
@@ -40,28 +96,27 @@ stages:
inputs:
mavenPomFile: 'pom.xml'
goals: 'clean install'
- options: -T 2.5C -DskipTests
+ options: $(MVN_OPTS_INSTALL)
publishJUnitResults: false
jdkVersionOption: '1.8'
- mavenOptions: '-Xmx4g $(MAVEN_OPTS)'
- task: Maven@3
displayName: UT common flink client/spark-client
inputs:
mavenPomFile: 'pom.xml'
goals: 'test'
- options: -Punit-tests -pl hudi-common,hudi-flink-datasource/hudi-flink,hudi-client/hudi-spark-client
+ options: $(MVN_OPTS_TEST) -Punit-tests -pl $(JOB1_MODULES),hudi-client/hudi-spark-client
publishJUnitResults: false
jdkVersionOption: '1.8'
- mavenOptions: '-Xmx4g $(MAVEN_OPTS)'
+ mavenOptions: '-Xmx4g'
- task: Maven@3
displayName: FT common flink
inputs:
mavenPomFile: 'pom.xml'
goals: 'test'
- options: -Pfunctional-tests -pl hudi-common,hudi-flink-datasource/hudi-flink
+ options: $(MVN_OPTS_TEST) -Pfunctional-tests -pl $(JOB1_MODULES)
publishJUnitResults: false
jdkVersionOption: '1.8'
- mavenOptions: '-Xmx4g $(MAVEN_OPTS)'
+ mavenOptions: '-Xmx4g'
- job: UT_FT_2
displayName: FT client/spark-client
timeoutInMinutes: '120'
@@ -71,21 +126,20 @@ stages:
inputs:
mavenPomFile: 'pom.xml'
goals: 'clean install'
- options: -T 2.5C -DskipTests
+ options: $(MVN_OPTS_INSTALL)
publishJUnitResults: false
jdkVersionOption: '1.8'
- mavenOptions: '-Xmx4g $(MAVEN_OPTS)'
- task: Maven@3
displayName: FT client/spark-client
inputs:
mavenPomFile: 'pom.xml'
goals: 'test'
- options: -Pfunctional-tests -pl hudi-client/hudi-spark-client
+ options: $(MVN_OPTS_TEST) -Pfunctional-tests -pl $(JOB2_MODULES)
publishJUnitResults: false
jdkVersionOption: '1.8'
- mavenOptions: '-Xmx4g $(MAVEN_OPTS)'
+ mavenOptions: '-Xmx4g'
- job: UT_FT_3
- displayName: UT FT clients & cli & utilities & sync/hive-sync
+ displayName: UT FT clients & cli & utilities & sync
timeoutInMinutes: '120'
steps:
- task: Maven@3
@@ -93,28 +147,27 @@ stages:
inputs:
mavenPomFile: 'pom.xml'
goals: 'clean install'
- options: -T 2.5C -DskipTests
+ options: $(MVN_OPTS_INSTALL)
publishJUnitResults: false
jdkVersionOption: '1.8'
- mavenOptions: '-Xmx4g $(MAVEN_OPTS)'
- task: Maven@3
- displayName: UT clients & cli & utilities & sync/hive-sync
+ displayName: UT clients & cli & utilities & sync
inputs:
mavenPomFile: 'pom.xml'
goals: 'test'
- options: -Punit-tests -pl hudi-client/hudi-client-common,hudi-client/hudi-flink-client,hudi-client/hudi-java-client,hudi-cli,hudi-utilities,hudi-sync/hudi-hive-sync
+ options: $(MVN_OPTS_TEST) -Punit-tests -pl $(JOB3_MODULES)
publishJUnitResults: false
jdkVersionOption: '1.8'
- mavenOptions: '-Xmx4g $(MAVEN_OPTS)'
+ mavenOptions: '-Xmx4g'
- task: Maven@3
- displayName: FT clients & cli & utilities & sync/hive-sync
+ displayName: FT clients & cli & utilities & sync
inputs:
mavenPomFile: 'pom.xml'
goals: 'test'
- options: -Pfunctional-tests -pl hudi-client/hudi-client-common,hudi-client/hudi-flink-client,hudi-client/hudi-java-client,hudi-cli,hudi-utilities,hudi-sync/hudi-hive-sync
+ options: $(MVN_OPTS_TEST) -Pfunctional-tests -pl $(JOB3_MODULES)
publishJUnitResults: false
jdkVersionOption: '1.8'
- mavenOptions: '-Xmx4g $(MAVEN_OPTS)'
+ mavenOptions: '-Xmx4g'
- job: UT_FT_4
displayName: UT FT other modules
timeoutInMinutes: '120'
@@ -124,28 +177,27 @@ stages:
inputs:
mavenPomFile: 'pom.xml'
goals: 'clean install'
- options: -T 2.5C -DskipTests
+ options: $(MVN_OPTS_INSTALL)
publishJUnitResults: false
jdkVersionOption: '1.8'
- mavenOptions: '-Xmx4g $(MAVEN_OPTS)'
- task: Maven@3
displayName: UT other modules
inputs:
mavenPomFile: 'pom.xml'
goals: 'test'
- options: -Punit-tests -pl $(EXCLUDE_TESTED_MODULES)
+ options: $(MVN_OPTS_TEST) -Punit-tests -pl $(JOB4_MODULES)
publishJUnitResults: false
jdkVersionOption: '1.8'
- mavenOptions: '-Xmx4g $(MAVEN_OPTS)'
+ mavenOptions: '-Xmx4g'
- task: Maven@3
displayName: FT other modules
inputs:
mavenPomFile: 'pom.xml'
goals: 'test'
- options: -Pfunctional-tests -pl $(EXCLUDE_TESTED_MODULES)
+ options: $(MVN_OPTS_TEST) -Pfunctional-tests -pl $(JOB4_MODULES)
publishJUnitResults: false
jdkVersionOption: '1.8'
- mavenOptions: '-Xmx4g $(MAVEN_OPTS)'
+ mavenOptions: '-Xmx4g'
- job: IT
displayName: IT modules
timeoutInMinutes: '120'
@@ -155,19 +207,18 @@ stages:
inputs:
mavenPomFile: 'pom.xml'
goals: 'clean install'
- options: -T 2.5C -Pintegration-tests -DskipTests
+ options: $(MVN_OPTS_INSTALL) -Pintegration-tests
publishJUnitResults: false
jdkVersionOption: '1.8'
- mavenOptions: '-Xmx4g $(MAVEN_OPTS)'
- task: Maven@3
displayName: UT integ-test
inputs:
mavenPomFile: 'pom.xml'
goals: 'test'
- options: -Pintegration-tests -DskipUTs=false -DskipITs=true -pl hudi-integ-test test
+ options: $(MVN_OPTS_TEST) -Pintegration-tests -DskipUTs=false -DskipITs=true -pl hudi-integ-test
publishJUnitResults: false
jdkVersionOption: '1.8'
- mavenOptions: '-Xmx4g $(MAVEN_OPTS)'
+ mavenOptions: '-Xmx4g'
- task: AzureCLI@2
displayName: Prepare for IT
inputs:
@@ -180,5 +231,6 @@ stages:
tar -xvf $(Pipeline.Workspace)/$(SPARK_ARCHIVE).tgz -C $(Pipeline.Workspace)/
mkdir /tmp/spark-events/
- script: |
- mvn $(MAVEN_OPTS) -Pintegration-tests verify
+ export SPARK_HOME=$(Pipeline.Workspace)/$(SPARK_ARCHIVE)
+ mvn $(MVN_OPTS_TEST) -Pintegration-tests verify
displayName: IT
diff --git a/hudi-aws/pom.xml b/hudi-aws/pom.xml
index 2429e47943812..dc9653a62f916 100644
--- a/hudi-aws/pom.xml
+++ b/hudi-aws/pom.xml
@@ -144,11 +144,41 @@
test
+
+ org.junit.jupiter
+ junit-jupiter-engine
+ test
+
+
+
+ org.junit.vintage
+ junit-vintage-engine
+ test
+
+
+
+ org.junit.jupiter
+ junit-jupiter-params
+ test
+
+
org.mockito
mockito-junit-jupiter
test
+
+
+ org.junit.platform
+ junit-platform-runner
+ test
+
+
+
+ org.junit.platform
+ junit-platform-suite-api
+ test
+
diff --git a/hudi-cli/pom.xml b/hudi-cli/pom.xml
index 5c68ef7416449..e3111f3fb9a0c 100644
--- a/hudi-cli/pom.xml
+++ b/hudi-cli/pom.xml
@@ -205,6 +205,24 @@
log4j
+
+ org.apache.logging.log4j
+ log4j-core
+ test
+
+
+
+ org.apache.logging.log4j
+ log4j-api
+ test
+
+
+
+ org.apache.logging.log4j
+ log4j-slf4j-impl
+ test
+
+
org.apache.parquet
parquet-avro
@@ -259,31 +277,71 @@
hadoop-hdfs
+
+
+
+ org.apache.hadoop
+ hadoop-common
+ tests
+ test
+
+
+ org.mortbay.jetty
+ *
+
+
+ javax.servlet.jsp
+ *
+
+
+ javax.servlet
+ *
+
+
+
+
+
+ org.apache.hadoop
+ hadoop-hdfs
+ tests
+ test
+
+
+ javax.servlet
+ *
+
+
+ netty
+ io.netty
+
+
+ netty-all
+ io.netty
+
+
+
+
org.junit.jupiter
junit-jupiter-api
test
-
org.junit.jupiter
junit-jupiter-engine
test
-
org.junit.vintage
junit-vintage-engine
test
-
org.junit.jupiter
junit-jupiter-params
test
-
org.mockito
mockito-junit-jupiter
diff --git a/hudi-cli/src/main/java/org/apache/hudi/cli/commands/SparkMain.java b/hudi-cli/src/main/java/org/apache/hudi/cli/commands/SparkMain.java
index 43fe168587ac1..ae4f9c660053d 100644
--- a/hudi-cli/src/main/java/org/apache/hudi/cli/commands/SparkMain.java
+++ b/hudi-cli/src/main/java/org/apache/hudi/cli/commands/SparkMain.java
@@ -148,7 +148,7 @@ public static void main(String[] args) throws Exception {
}
configs = new ArrayList<>();
if (args.length > 9) {
- configs.addAll(Arrays.asList(args).subList(8, args.length));
+ configs.addAll(Arrays.asList(args).subList(9, args.length));
}
returnCode = compact(jsc, args[3], args[4], null, Integer.parseInt(args[5]), args[6],
diff --git a/hudi-cli/src/test/java/org/apache/hudi/cli/testutils/SparkUtilTest.java b/hudi-cli/src/test/java/org/apache/hudi/cli/TestSparkUtil.java
similarity index 95%
rename from hudi-cli/src/test/java/org/apache/hudi/cli/testutils/SparkUtilTest.java
rename to hudi-cli/src/test/java/org/apache/hudi/cli/TestSparkUtil.java
index a470ee1c2a37d..4dcd15156baf1 100644
--- a/hudi-cli/src/test/java/org/apache/hudi/cli/testutils/SparkUtilTest.java
+++ b/hudi-cli/src/test/java/org/apache/hudi/cli/TestSparkUtil.java
@@ -16,7 +16,7 @@
* limitations under the License.
*/
-package org.apache.hudi.cli.testutils;
+package org.apache.hudi.cli;
import org.apache.hudi.common.util.Option;
import org.apache.hudi.cli.utils.SparkUtil;
@@ -30,7 +30,7 @@
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
-public class SparkUtilTest {
+public class TestSparkUtil {
@Test
public void testInitSparkLauncher() throws URISyntaxException {
diff --git a/hudi-cli/src/test/java/org/apache/hudi/cli/integ/ITTestBootstrapCommand.java b/hudi-cli/src/test/java/org/apache/hudi/cli/integ/ITTestBootstrapCommand.java
index 6b3e3045507f1..fb615f546b44a 100644
--- a/hudi-cli/src/test/java/org/apache/hudi/cli/integ/ITTestBootstrapCommand.java
+++ b/hudi-cli/src/test/java/org/apache/hudi/cli/integ/ITTestBootstrapCommand.java
@@ -22,7 +22,7 @@
import org.apache.hudi.cli.HoodieCLI;
import org.apache.hudi.cli.HoodiePrintHelper;
import org.apache.hudi.cli.commands.TableCommand;
-import org.apache.hudi.cli.testutils.AbstractShellIntegrationTest;
+import org.apache.hudi.cli.testutils.HoodieCLIIntegrationTestBase;
import org.apache.hudi.functional.TestBootstrap;
import org.apache.hudi.common.model.HoodieTableType;
import org.apache.hudi.common.table.timeline.versioning.TimelineLayoutVersion;
@@ -44,7 +44,7 @@
/**
* Test class of {@link org.apache.hudi.cli.commands.BootstrapCommand}.
*/
-public class ITTestBootstrapCommand extends AbstractShellIntegrationTest {
+public class ITTestBootstrapCommand extends HoodieCLIIntegrationTestBase {
private static final int NUM_OF_RECORDS = 100;
private static final String PARTITION_FIELD = "datestr";
diff --git a/hudi-cli/src/test/java/org/apache/hudi/cli/integ/ITTestClusteringCommand.java b/hudi-cli/src/test/java/org/apache/hudi/cli/integ/ITTestClusteringCommand.java
index 97d3d91fb63ad..f0f08f87c11a6 100644
--- a/hudi-cli/src/test/java/org/apache/hudi/cli/integ/ITTestClusteringCommand.java
+++ b/hudi-cli/src/test/java/org/apache/hudi/cli/integ/ITTestClusteringCommand.java
@@ -20,7 +20,7 @@
import org.apache.hudi.cli.HoodieCLI;
import org.apache.hudi.cli.commands.TableCommand;
-import org.apache.hudi.cli.testutils.AbstractShellIntegrationTest;
+import org.apache.hudi.cli.testutils.HoodieCLIIntegrationTestBase;
import org.apache.hudi.client.SparkRDDWriteClient;
import org.apache.hudi.client.WriteStatus;
import org.apache.hudi.client.common.HoodieSparkEngineContext;
@@ -57,7 +57,7 @@
* A command use SparkLauncher need load jars under lib which generate during mvn package.
* Use integration test instead of unit test.
*/
-public class ITTestClusteringCommand extends AbstractShellIntegrationTest {
+public class ITTestClusteringCommand extends HoodieCLIIntegrationTestBase {
@BeforeEach
public void init() throws IOException {
@@ -105,9 +105,10 @@ public void testClustering() throws IOException {
// get clustering instance
HoodieActiveTimeline timeline = HoodieCLI.getTableMetaClient().getActiveTimeline();
- Option instance =
+ Option instanceOpt =
timeline.filterPendingReplaceTimeline().firstInstant().map(HoodieInstant::getTimestamp);
- assertTrue(instance.isPresent(), "Must have pending clustering.");
+ assertTrue(instanceOpt.isPresent(), "Must have pending clustering.");
+ final String instance = instanceOpt.get();
CommandResult cr2 = getShell().executeCommand(
String.format("clustering run --parallelism %s --clusteringInstant %s --sparkMaster %s",
diff --git a/hudi-cli/src/test/java/org/apache/hudi/cli/integ/ITTestCommitsCommand.java b/hudi-cli/src/test/java/org/apache/hudi/cli/integ/ITTestCommitsCommand.java
index fd533be09b6be..5345df9528bcf 100644
--- a/hudi-cli/src/test/java/org/apache/hudi/cli/integ/ITTestCommitsCommand.java
+++ b/hudi-cli/src/test/java/org/apache/hudi/cli/integ/ITTestCommitsCommand.java
@@ -21,7 +21,7 @@
import org.apache.hudi.cli.HoodieCLI;
import org.apache.hudi.cli.commands.RollbacksCommand;
import org.apache.hudi.cli.commands.TableCommand;
-import org.apache.hudi.cli.testutils.AbstractShellIntegrationTest;
+import org.apache.hudi.cli.testutils.HoodieCLIIntegrationTestBase;
import org.apache.hudi.common.model.HoodieTableType;
import org.apache.hudi.common.table.HoodieTableMetaClient;
import org.apache.hudi.common.table.timeline.HoodieActiveTimeline;
@@ -29,6 +29,7 @@
import org.apache.hudi.common.testutils.HoodieTestTable;
import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.Test;
import org.springframework.shell.core.CommandResult;
@@ -41,6 +42,7 @@
import static org.apache.hudi.common.testutils.HoodieTestDataGenerator.DEFAULT_PARTITION_PATHS;
import static org.apache.hudi.common.testutils.HoodieTestDataGenerator.DEFAULT_SECOND_PARTITION_PATH;
import static org.apache.hudi.common.testutils.HoodieTestDataGenerator.DEFAULT_THIRD_PARTITION_PATH;
+import static org.junit.jupiter.api.Assertions.assertAll;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertTrue;
@@ -50,7 +52,13 @@
* A command use SparkLauncher need load jars under lib which generate during mvn package.
* Use integration test instead of unit test.
*/
-public class ITTestCommitsCommand extends AbstractShellIntegrationTest {
+@Disabled("HUDI-4226")
+public class ITTestCommitsCommand extends HoodieCLIIntegrationTestBase {
+
+ @Override
+ protected HoodieTableType getTableType() {
+ return HoodieTableType.COPY_ON_WRITE;
+ }
@BeforeEach
public void init() throws IOException {
@@ -79,19 +87,21 @@ public void testRollbackCommit() throws Exception {
put(DEFAULT_THIRD_PARTITION_PATH, "file-3");
}
};
- final String rollbackCommit = "102";
HoodieTestTable.of(metaClient)
.withPartitionMetaFiles(DEFAULT_PARTITION_PATHS)
.addCommit("100")
.withBaseFilesInPartitions(partitionAndFileId)
.addCommit("101")
.withBaseFilesInPartitions(partitionAndFileId)
- .addCommit(rollbackCommit)
+ .addCommit("102")
.withBaseFilesInPartitions(partitionAndFileId);
CommandResult cr = getShell().executeCommand(String.format("commit rollback --commit %s --sparkMaster %s --sparkMemory %s",
- rollbackCommit, "local", "4G"));
- assertTrue(cr.isSuccess());
+ "102", "local", "4G"));
+
+ assertAll("Command run failed",
+ () -> assertTrue(cr.isSuccess()),
+ () -> assertEquals("Commit 102 rolled back", cr.getResult().toString()));
metaClient = HoodieTableMetaClient.reload(HoodieCLI.getTableMetaClient());
@@ -103,15 +113,17 @@ public void testRollbackCommit() throws Exception {
// rollback complete commit
CommandResult cr2 = getShell().executeCommand(String.format("commit rollback --commit %s --sparkMaster %s --sparkMemory %s",
- "101", "local", "4G"));
- assertTrue(cr2.isSuccess());
+ "101", "local", "4G"));
+ assertAll("Command run failed",
+ () -> assertTrue(cr2.isSuccess()),
+ () -> assertEquals("Commit 101 rolled back", cr2.getResult().toString()));
metaClient = HoodieTableMetaClient.reload(HoodieCLI.getTableMetaClient());
HoodieActiveTimeline rollbackTimeline2 = new RollbacksCommand.RollbackTimeline(metaClient);
- assertEquals(1, rollbackTimeline2.getRollbackTimeline().countInstants(), "There should have 2 rollback instant.");
+ assertEquals(2, rollbackTimeline2.getRollbackTimeline().countInstants(), "There should have 2 rollback instant.");
HoodieActiveTimeline timeline2 = metaClient.reloadActiveTimeline();
- assertEquals(2, timeline2.getCommitsTimeline().countInstants(), "There should have 1 instants.");
+ assertEquals(1, timeline2.getCommitsTimeline().countInstants(), "There should have 1 instants.");
}
}
diff --git a/hudi-cli/src/test/java/org/apache/hudi/cli/integ/ITTestCompactionCommand.java b/hudi-cli/src/test/java/org/apache/hudi/cli/integ/ITTestCompactionCommand.java
index 267cee70f2893..76db8e782f90c 100644
--- a/hudi-cli/src/test/java/org/apache/hudi/cli/integ/ITTestCompactionCommand.java
+++ b/hudi-cli/src/test/java/org/apache/hudi/cli/integ/ITTestCompactionCommand.java
@@ -20,7 +20,7 @@
import org.apache.hudi.cli.HoodieCLI;
import org.apache.hudi.cli.commands.TableCommand;
-import org.apache.hudi.cli.testutils.AbstractShellIntegrationTest;
+import org.apache.hudi.cli.testutils.HoodieCLIIntegrationTestBase;
import org.apache.hudi.client.CompactionAdminClient;
import org.apache.hudi.client.SparkRDDWriteClient;
import org.apache.hudi.client.TestCompactionAdminClient;
@@ -69,7 +69,7 @@
* A command use SparkLauncher need load jars under lib which generate during mvn package.
* Use integration test instead of unit test.
*/
-public class ITTestCompactionCommand extends AbstractShellIntegrationTest {
+public class ITTestCompactionCommand extends HoodieCLIIntegrationTestBase {
@BeforeEach
public void init() throws IOException {
@@ -147,7 +147,8 @@ public void testCompactScheduleAndExecute() throws IOException {
writeSchemaToTmpFile(schemaPath);
CommandResult cr2 = getShell().executeCommand(
- String.format("compaction scheduleAndExecute --parallelism %s --schemaFilePath %s --sparkMaster %s",
+ String.format("compaction scheduleAndExecute --parallelism %s --schemaFilePath %s --sparkMaster %s "
+ + "--hoodieConfigs hoodie.compact.inline.max.delta.commits=1",
2, schemaPath, "local"));
assertAll("Command run failed",
diff --git a/hudi-cli/src/test/java/org/apache/hudi/cli/integ/ITTestHDFSParquetImportCommand.java b/hudi-cli/src/test/java/org/apache/hudi/cli/integ/ITTestHDFSParquetImportCommand.java
index 8cdc4c891084d..3e4a45306b9ed 100644
--- a/hudi-cli/src/test/java/org/apache/hudi/cli/integ/ITTestHDFSParquetImportCommand.java
+++ b/hudi-cli/src/test/java/org/apache/hudi/cli/integ/ITTestHDFSParquetImportCommand.java
@@ -20,7 +20,7 @@
import org.apache.hudi.cli.HoodieCLI;
import org.apache.hudi.cli.commands.TableCommand;
-import org.apache.hudi.cli.testutils.AbstractShellIntegrationTest;
+import org.apache.hudi.cli.testutils.HoodieCLIIntegrationTestBase;
import org.apache.hudi.common.model.HoodieTableType;
import org.apache.hudi.common.table.HoodieTableMetaClient;
import org.apache.hudi.common.table.timeline.versioning.TimelineLayoutVersion;
@@ -55,7 +55,7 @@
* Test class for {@link org.apache.hudi.cli.commands.HDFSParquetImportCommand}.
*/
@Disabled("Disable due to flakiness and feature deprecation.")
-public class ITTestHDFSParquetImportCommand extends AbstractShellIntegrationTest {
+public class ITTestHDFSParquetImportCommand extends HoodieCLIIntegrationTestBase {
private Path sourcePath;
private Path targetPath;
diff --git a/hudi-cli/src/test/java/org/apache/hudi/cli/integ/ITTestMarkersCommand.java b/hudi-cli/src/test/java/org/apache/hudi/cli/integ/ITTestMarkersCommand.java
index 221a29f5250d2..35561ef09c371 100644
--- a/hudi-cli/src/test/java/org/apache/hudi/cli/integ/ITTestMarkersCommand.java
+++ b/hudi-cli/src/test/java/org/apache/hudi/cli/integ/ITTestMarkersCommand.java
@@ -20,7 +20,7 @@
import org.apache.hadoop.fs.Path;
import org.apache.hudi.cli.commands.TableCommand;
-import org.apache.hudi.cli.testutils.AbstractShellIntegrationTest;
+import org.apache.hudi.cli.testutils.HoodieCLIIntegrationTestBase;
import org.apache.hudi.common.model.HoodieTableType;
import org.apache.hudi.common.model.IOType;
import org.apache.hudi.common.table.timeline.versioning.TimelineLayoutVersion;
@@ -40,7 +40,7 @@
* A command use SparkLauncher need load jars under lib which generate during mvn package.
* Use integration test instead of unit test.
*/
-public class ITTestMarkersCommand extends AbstractShellIntegrationTest {
+public class ITTestMarkersCommand extends HoodieCLIIntegrationTestBase {
private String tablePath;
diff --git a/hudi-cli/src/test/java/org/apache/hudi/cli/integ/ITTestRepairsCommand.java b/hudi-cli/src/test/java/org/apache/hudi/cli/integ/ITTestRepairsCommand.java
index f26519a3572de..d7d6872bc1b4b 100644
--- a/hudi-cli/src/test/java/org/apache/hudi/cli/integ/ITTestRepairsCommand.java
+++ b/hudi-cli/src/test/java/org/apache/hudi/cli/integ/ITTestRepairsCommand.java
@@ -22,7 +22,7 @@
import org.apache.hudi.cli.HoodieCLI;
import org.apache.hudi.cli.commands.RepairsCommand;
import org.apache.hudi.cli.commands.TableCommand;
-import org.apache.hudi.cli.testutils.AbstractShellIntegrationTest;
+import org.apache.hudi.cli.testutils.HoodieCLIIntegrationTestBase;
import org.apache.hudi.common.model.HoodieBaseFile;
import org.apache.hudi.common.model.HoodieFileFormat;
import org.apache.hudi.common.model.HoodieRecord;
@@ -57,7 +57,7 @@
* A command use SparkLauncher need load jars under lib which generate during mvn package.
* Use integration test instead of unit test.
*/
-public class ITTestRepairsCommand extends AbstractShellIntegrationTest {
+public class ITTestRepairsCommand extends HoodieCLIIntegrationTestBase {
private String duplicatedPartitionPath;
private String duplicatedPartitionPathWithUpdates;
diff --git a/hudi-cli/src/test/java/org/apache/hudi/cli/integ/ITTestSavepointsCommand.java b/hudi-cli/src/test/java/org/apache/hudi/cli/integ/ITTestSavepointsCommand.java
index 7de1c2d014260..07a573a8cbc6b 100644
--- a/hudi-cli/src/test/java/org/apache/hudi/cli/integ/ITTestSavepointsCommand.java
+++ b/hudi-cli/src/test/java/org/apache/hudi/cli/integ/ITTestSavepointsCommand.java
@@ -21,7 +21,7 @@
import org.apache.hadoop.fs.Path;
import org.apache.hudi.cli.HoodieCLI;
import org.apache.hudi.cli.commands.TableCommand;
-import org.apache.hudi.cli.testutils.AbstractShellIntegrationTest;
+import org.apache.hudi.cli.testutils.HoodieCLIIntegrationTestBase;
import org.apache.hudi.client.common.HoodieSparkEngineContext;
import org.apache.hudi.common.config.HoodieMetadataConfig;
import org.apache.hudi.common.model.HoodieTableType;
@@ -51,7 +51,7 @@
* A command use SparkLauncher need load jars under lib which generate during mvn package.
* Use integration test instead of unit test.
*/
-public class ITTestSavepointsCommand extends AbstractShellIntegrationTest {
+public class ITTestSavepointsCommand extends HoodieCLIIntegrationTestBase {
private String tablePath;
@@ -139,11 +139,7 @@ public void testRollbackToSavepointWithMetadataTableEnable() throws IOException
HoodieTestDataGenerator.createSavepointFile(tablePath, savepoint, jsc.hadoopConfiguration());
// re-bootstrap metadata table
- // delete first
- String basePath = metaClient.getBasePath();
- Path metadataTableBasePath = new Path(HoodieTableMetadata.getMetadataTableBasePath(basePath));
- metaClient.getFs().delete(metadataTableBasePath, true);
-
+ Path metadataTableBasePath = new Path(HoodieTableMetadata.getMetadataTableBasePath(HoodieCLI.basePath));
// then bootstrap metadata table at instant 104
HoodieWriteConfig writeConfig = HoodieWriteConfig.newBuilder().withPath(HoodieCLI.basePath)
.withMetadataConfig(HoodieMetadataConfig.newBuilder().enable(true).build()).build();
diff --git a/hudi-cli/src/test/java/org/apache/hudi/cli/testutils/AbstractShellIntegrationTest.java b/hudi-cli/src/test/java/org/apache/hudi/cli/testutils/HoodieCLIIntegrationTestBase.java
similarity index 93%
rename from hudi-cli/src/test/java/org/apache/hudi/cli/testutils/AbstractShellIntegrationTest.java
rename to hudi-cli/src/test/java/org/apache/hudi/cli/testutils/HoodieCLIIntegrationTestBase.java
index 67449dc980917..86b618d502297 100644
--- a/hudi-cli/src/test/java/org/apache/hudi/cli/testutils/AbstractShellIntegrationTest.java
+++ b/hudi-cli/src/test/java/org/apache/hudi/cli/testutils/HoodieCLIIntegrationTestBase.java
@@ -25,7 +25,7 @@
/**
* Class to initial resources for shell.
*/
-public abstract class AbstractShellIntegrationTest extends AbstractShellBaseIntegrationTest {
+public class HoodieCLIIntegrationTestBase extends HoodieCLIIntegrationTestHarness {
@Override
@BeforeEach
diff --git a/hudi-cli/src/test/java/org/apache/hudi/cli/testutils/AbstractShellBaseIntegrationTest.java b/hudi-cli/src/test/java/org/apache/hudi/cli/testutils/HoodieCLIIntegrationTestHarness.java
similarity index 96%
rename from hudi-cli/src/test/java/org/apache/hudi/cli/testutils/AbstractShellBaseIntegrationTest.java
rename to hudi-cli/src/test/java/org/apache/hudi/cli/testutils/HoodieCLIIntegrationTestHarness.java
index e016564439696..e24ea6582af3e 100644
--- a/hudi-cli/src/test/java/org/apache/hudi/cli/testutils/AbstractShellBaseIntegrationTest.java
+++ b/hudi-cli/src/test/java/org/apache/hudi/cli/testutils/HoodieCLIIntegrationTestHarness.java
@@ -30,7 +30,7 @@
/**
* Class to start Bootstrap and JLineShellComponent.
*/
-public class AbstractShellBaseIntegrationTest extends HoodieClientTestHarness {
+public class HoodieCLIIntegrationTestHarness extends HoodieClientTestHarness {
private static JLineShellComponent shell;
diff --git a/hudi-examples/hudi-examples-common/pom.xml b/hudi-examples/hudi-examples-common/pom.xml
index 4a99d975dd571..b78621d0ac4b8 100644
--- a/hudi-examples/hudi-examples-common/pom.xml
+++ b/hudi-examples/hudi-examples-common/pom.xml
@@ -105,5 +105,33 @@
org.apache.parquet
parquet-avro
+
+
+
+ org.junit.jupiter
+ junit-jupiter-api
+ test
+
+
+ org.junit.jupiter
+ junit-jupiter-engine
+ test
+
+
+ org.junit.vintage
+ junit-vintage-engine
+ test
+
+
+ org.junit.jupiter
+ junit-jupiter-params
+ test
+
+
+ org.mockito
+ mockito-junit-jupiter
+ test
+
+
diff --git a/hudi-examples/hudi-examples-java/pom.xml b/hudi-examples/hudi-examples-java/pom.xml
index 965cdef972194..cfd803dffaac4 100644
--- a/hudi-examples/hudi-examples-java/pom.xml
+++ b/hudi-examples/hudi-examples-java/pom.xml
@@ -125,5 +125,33 @@
hudi-java-client
${project.version}
+
+
+
+ org.junit.jupiter
+ junit-jupiter-api
+ test
+
+
+ org.junit.jupiter
+ junit-jupiter-engine
+ test
+
+
+ org.junit.vintage
+ junit-vintage-engine
+ test
+
+
+ org.junit.jupiter
+ junit-jupiter-params
+ test
+
+
+ org.mockito
+ mockito-junit-jupiter
+ test
+
+
diff --git a/hudi-flink-datasource/hudi-flink/pom.xml b/hudi-flink-datasource/hudi-flink/pom.xml
index 04d45358b5b5e..7b5fded8cbace 100644
--- a/hudi-flink-datasource/hudi-flink/pom.xml
+++ b/hudi-flink-datasource/hudi-flink/pom.xml
@@ -292,6 +292,26 @@
junit-jupiter-params
test
+
+ org.mockito
+ mockito-junit-jupiter
+ test
+
+
+ org.junit.platform
+ junit-platform-runner
+ test
+
+
+ org.junit.platform
+ junit-platform-suite-api
+ test
+
+
+ org.junit.platform
+ junit-platform-commons
+ test
+
org.apache.hudi
diff --git a/hudi-flink-datasource/hudi-flink1.13.x/pom.xml b/hudi-flink-datasource/hudi-flink1.13.x/pom.xml
index 68a4d89829d78..ff60a89490444 100644
--- a/hudi-flink-datasource/hudi-flink1.13.x/pom.xml
+++ b/hudi-flink-datasource/hudi-flink1.13.x/pom.xml
@@ -58,6 +58,50 @@
test
test-jar
+
+
+
+
+
+ org.junit.jupiter
+ junit-jupiter-api
+ test
+
+
+ org.junit.jupiter
+ junit-jupiter-engine
+ test
+
+
+ org.junit.vintage
+ junit-vintage-engine
+ test
+
+
+ org.junit.jupiter
+ junit-jupiter-params
+ test
+
+
+ org.mockito
+ mockito-junit-jupiter
+ test
+
+
+ org.junit.platform
+ junit-platform-runner
+ test
+
+
+ org.junit.platform
+ junit-platform-suite-api
+ test
+
+
+ org.junit.platform
+ junit-platform-commons
+ test
+
@@ -87,4 +131,4 @@
-
\ No newline at end of file
+
diff --git a/hudi-flink-datasource/hudi-flink1.14.x/pom.xml b/hudi-flink-datasource/hudi-flink1.14.x/pom.xml
index 186d8bd3c2da8..ed7f1b9a1edba 100644
--- a/hudi-flink-datasource/hudi-flink1.14.x/pom.xml
+++ b/hudi-flink-datasource/hudi-flink1.14.x/pom.xml
@@ -70,6 +70,50 @@
test
test-jar
+
+
+
+
+
+ org.junit.jupiter
+ junit-jupiter-api
+ test
+
+
+ org.junit.jupiter
+ junit-jupiter-engine
+ test
+
+
+ org.junit.vintage
+ junit-vintage-engine
+ test
+
+
+ org.junit.jupiter
+ junit-jupiter-params
+ test
+
+
+ org.mockito
+ mockito-junit-jupiter
+ test
+
+
+ org.junit.platform
+ junit-platform-runner
+ test
+
+
+ org.junit.platform
+ junit-platform-suite-api
+ test
+
+
+ org.junit.platform
+ junit-platform-commons
+ test
+
@@ -99,4 +143,4 @@
-
\ No newline at end of file
+
diff --git a/hudi-gcp/pom.xml b/hudi-gcp/pom.xml
index da4046b1611e3..8c4f54f047f2a 100644
--- a/hudi-gcp/pom.xml
+++ b/hudi-gcp/pom.xml
@@ -83,6 +83,42 @@ See https://github.com/GoogleCloudPlatform/cloud-opensource-java/wiki/The-Google
junit-jupiter-api
test
+
+
+ org.junit.jupiter
+ junit-jupiter-engine
+ test
+
+
+
+ org.junit.vintage
+ junit-vintage-engine
+ test
+
+
+
+ org.junit.jupiter
+ junit-jupiter-params
+ test
+
+
+
+ org.mockito
+ mockito-junit-jupiter
+ test
+
+
+
+ org.junit.platform
+ junit-platform-runner
+ test
+
+
+
+ org.junit.platform
+ junit-platform-suite-api
+ test
+
diff --git a/hudi-integ-test/pom.xml b/hudi-integ-test/pom.xml
index 17e05e3c3dec0..3c19e5ef261b3 100644
--- a/hudi-integ-test/pom.xml
+++ b/hudi-integ-test/pom.xml
@@ -88,6 +88,13 @@
+
+ org.apache.spark
+ spark-avro_${scala.binary.version}
+ ${spark.version}
+ test
+
+
org.apache.hudi
@@ -162,6 +169,24 @@
log4j
+
+ org.apache.logging.log4j
+ log4j-core
+ test
+
+
+
+ org.apache.logging.log4j
+ log4j-api
+ test
+
+
+
+ org.apache.logging.log4j
+ log4j-slf4j-impl
+ test
+
+
org.apache.hudi
@@ -284,6 +309,7 @@
org.apache.hadoop
hadoop-common
tests
+ test
org.mortbay.jetty
@@ -366,36 +392,47 @@
test
+
org.junit.jupiter
junit-jupiter-api
test
-
org.junit.jupiter
junit-jupiter-engine
test
-
org.junit.vintage
junit-vintage-engine
test
-
org.junit.jupiter
junit-jupiter-params
test
-
org.mockito
mockito-junit-jupiter
test
-
+
+ org.junit.platform
+ junit-platform-runner
+ test
+
+
+ org.junit.platform
+ junit-platform-suite-api
+ test
+
+
+ org.junit.platform
+ junit-platform-commons
+ test
+
org.scalatest
scalatest_${scala.binary.version}
diff --git a/hudi-integ-test/src/test/java/org/apache/hudi/integ/testsuite/TestDFSHoodieTestSuiteWriterAdapter.java b/hudi-integ-test/src/test/java/org/apache/hudi/integ/testsuite/TestDFSHoodieTestSuiteWriterAdapter.java
index e6eb036e9b9f5..2b69a319a53e4 100644
--- a/hudi-integ-test/src/test/java/org/apache/hudi/integ/testsuite/TestDFSHoodieTestSuiteWriterAdapter.java
+++ b/hudi-integ-test/src/test/java/org/apache/hudi/integ/testsuite/TestDFSHoodieTestSuiteWriterAdapter.java
@@ -65,7 +65,7 @@ public class TestDFSHoodieTestSuiteWriterAdapter extends UtilitiesTestBase {
@BeforeAll
public static void initClass() throws Exception {
- UtilitiesTestBase.initClass();
+ UtilitiesTestBase.initTestServices(false, false);
}
@AfterAll
diff --git a/hudi-integ-test/src/test/java/org/apache/hudi/integ/testsuite/TestFileDeltaInputWriter.java b/hudi-integ-test/src/test/java/org/apache/hudi/integ/testsuite/TestFileDeltaInputWriter.java
index 8e175c5bffcf0..f3cda10a620b5 100644
--- a/hudi-integ-test/src/test/java/org/apache/hudi/integ/testsuite/TestFileDeltaInputWriter.java
+++ b/hudi-integ-test/src/test/java/org/apache/hudi/integ/testsuite/TestFileDeltaInputWriter.java
@@ -58,7 +58,7 @@ public class TestFileDeltaInputWriter extends UtilitiesTestBase {
@BeforeAll
public static void initClass() throws Exception {
- UtilitiesTestBase.initClass();
+ UtilitiesTestBase.initTestServices(false, false);
}
@AfterAll
diff --git a/hudi-integ-test/src/test/java/org/apache/hudi/integ/testsuite/job/TestHoodieTestSuiteJob.java b/hudi-integ-test/src/test/java/org/apache/hudi/integ/testsuite/job/TestHoodieTestSuiteJob.java
index 7fae555068b21..fd7e8ff472f00 100644
--- a/hudi-integ-test/src/test/java/org/apache/hudi/integ/testsuite/job/TestHoodieTestSuiteJob.java
+++ b/hudi-integ-test/src/test/java/org/apache/hudi/integ/testsuite/job/TestHoodieTestSuiteJob.java
@@ -90,7 +90,7 @@ public static Stream configParams() {
@BeforeAll
public static void initClass() throws Exception {
- UtilitiesTestBase.initClass();
+ UtilitiesTestBase.initTestServices(true, true);
// prepare the configs.
UtilitiesTestBase.Helpers.copyToDFSFromAbsolutePath(System.getProperty("user.dir") + "/.."
+ BASE_PROPERTIES_DOCKER_DEMO_RELATIVE_PATH, dfs, dfsBasePath + "/base.properties");
diff --git a/hudi-integ-test/src/test/java/org/apache/hudi/integ/testsuite/reader/TestDFSAvroDeltaInputReader.java b/hudi-integ-test/src/test/java/org/apache/hudi/integ/testsuite/reader/TestDFSAvroDeltaInputReader.java
index fa8f4ac41d53f..9f9439f376880 100644
--- a/hudi-integ-test/src/test/java/org/apache/hudi/integ/testsuite/reader/TestDFSAvroDeltaInputReader.java
+++ b/hudi-integ-test/src/test/java/org/apache/hudi/integ/testsuite/reader/TestDFSAvroDeltaInputReader.java
@@ -43,7 +43,7 @@ public class TestDFSAvroDeltaInputReader extends UtilitiesTestBase {
@BeforeAll
public static void initClass() throws Exception {
- UtilitiesTestBase.initClass();
+ UtilitiesTestBase.initTestServices(false, false);
}
@AfterAll
diff --git a/hudi-integ-test/src/test/java/org/apache/hudi/integ/testsuite/reader/TestDFSHoodieDatasetInputReader.java b/hudi-integ-test/src/test/java/org/apache/hudi/integ/testsuite/reader/TestDFSHoodieDatasetInputReader.java
index a5b6072029f06..80f6e2548ce3b 100644
--- a/hudi-integ-test/src/test/java/org/apache/hudi/integ/testsuite/reader/TestDFSHoodieDatasetInputReader.java
+++ b/hudi-integ-test/src/test/java/org/apache/hudi/integ/testsuite/reader/TestDFSHoodieDatasetInputReader.java
@@ -51,7 +51,7 @@ public class TestDFSHoodieDatasetInputReader extends UtilitiesTestBase {
@BeforeAll
public static void initClass() throws Exception {
- UtilitiesTestBase.initClass();
+ UtilitiesTestBase.initTestServices(false, false);
}
@AfterAll
diff --git a/hudi-spark-datasource/hudi-spark-common/pom.xml b/hudi-spark-datasource/hudi-spark-common/pom.xml
index e55ff5a7bfa72..99bfab41bf7d2 100644
--- a/hudi-spark-datasource/hudi-spark-common/pom.xml
+++ b/hudi-spark-datasource/hudi-spark-common/pom.xml
@@ -242,6 +242,43 @@
junit-jupiter-api
test
+
+
+ org.junit.jupiter
+ junit-jupiter-engine
+ test
+
+
+
+ org.junit.vintage
+ junit-vintage-engine
+ test
+
+
+
+ org.junit.jupiter
+ junit-jupiter-params
+ test
+
+
+
+ org.mockito
+ mockito-junit-jupiter
+ test
+
+
+
+ org.junit.platform
+ junit-platform-runner
+ test
+
+
+
+ org.junit.platform
+ junit-platform-suite-api
+ test
+
+
diff --git a/hudi-spark-datasource/hudi-spark-common/src/test/java/org/apache/hudi/internal/HoodieBulkInsertInternalWriterTestBase.java b/hudi-spark-datasource/hudi-spark-common/src/test/java/org/apache/hudi/internal/HoodieBulkInsertInternalWriterTestBase.java
index 54eaadd1e37b9..b9f77bccfd56d 100644
--- a/hudi-spark-datasource/hudi-spark-common/src/test/java/org/apache/hudi/internal/HoodieBulkInsertInternalWriterTestBase.java
+++ b/hudi-spark-datasource/hudi-spark-common/src/test/java/org/apache/hudi/internal/HoodieBulkInsertInternalWriterTestBase.java
@@ -29,7 +29,6 @@
import org.apache.hudi.keygen.SimpleKeyGenerator;
import org.apache.hudi.testutils.HoodieClientTestHarness;
import org.apache.hudi.testutils.SparkDatasetTestUtils;
-
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.junit.jupiter.api.AfterEach;
@@ -62,6 +61,7 @@ public void setUp() throws Exception {
initFileSystem();
initTestDataGenerator();
initMetaClient();
+ initTimelineService();
}
@AfterEach
@@ -87,11 +87,11 @@ protected HoodieWriteConfig getWriteConfig(boolean populateMetaFields, String hi
protected void assertWriteStatuses(List writeStatuses, int batches, int size,
Option> fileAbsPaths, Option> fileNames) {
- assertWriteStatuses(writeStatuses, batches, size, false, fileAbsPaths, fileNames);
+ assertWriteStatuses(writeStatuses, batches, size, false, fileAbsPaths, fileNames, false);
}
protected void assertWriteStatuses(List writeStatuses, int batches, int size, boolean areRecordsSorted,
- Option> fileAbsPaths, Option> fileNames) {
+ Option> fileAbsPaths, Option> fileNames, boolean isHiveStylePartitioning) {
if (areRecordsSorted) {
assertEquals(batches, writeStatuses.size());
} else {
@@ -114,7 +114,8 @@ protected void assertWriteStatuses(List writeStatuses
int counter = 0;
for (HoodieInternalWriteStatus writeStatus : writeStatuses) {
// verify write status
- assertEquals(HoodieTestDataGenerator.DEFAULT_PARTITION_PATHS[counter % 3], writeStatus.getPartitionPath());
+ String actualPartitionPathFormat = isHiveStylePartitioning ? SparkDatasetTestUtils.PARTITION_PATH_FIELD_NAME + "=%s" : "%s";
+ assertEquals(String.format(actualPartitionPathFormat, HoodieTestDataGenerator.DEFAULT_PARTITION_PATHS[counter % 3]), writeStatus.getPartitionPath());
if (areRecordsSorted) {
assertEquals(writeStatus.getTotalRecords(), size);
} else {
@@ -142,7 +143,7 @@ protected void assertWriteStatuses(List writeStatuses
assertEquals(sizeMap.get(HoodieTestDataGenerator.DEFAULT_PARTITION_PATHS[counter % 3]), writeStat.getNumWrites());
}
assertEquals(fileId, writeStat.getFileId());
- assertEquals(HoodieTestDataGenerator.DEFAULT_PARTITION_PATHS[counter++ % 3], writeStat.getPartitionPath());
+ assertEquals(String.format(actualPartitionPathFormat, HoodieTestDataGenerator.DEFAULT_PARTITION_PATHS[counter++ % 3]), writeStat.getPartitionPath());
assertEquals(0, writeStat.getNumDeletes());
assertEquals(0, writeStat.getNumUpdateWrites());
assertEquals(0, writeStat.getTotalWriteErrors());
diff --git a/hudi-spark-datasource/hudi-spark-common/src/test/resources/log4j-surefire-quiet.properties b/hudi-spark-datasource/hudi-spark-common/src/test/resources/log4j-surefire-quiet.properties
new file mode 100644
index 0000000000000..ca0a50c84270c
--- /dev/null
+++ b/hudi-spark-datasource/hudi-spark-common/src/test/resources/log4j-surefire-quiet.properties
@@ -0,0 +1,30 @@
+###
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+###
+log4j.rootLogger=WARN, CONSOLE
+log4j.logger.org.apache.hudi=DEBUG
+log4j.logger.org.apache.hadoop.hbase=ERROR
+
+# CONSOLE is set to be a ConsoleAppender.
+log4j.appender.CONSOLE=org.apache.log4j.ConsoleAppender
+# CONSOLE uses PatternLayout.
+log4j.appender.CONSOLE.layout=org.apache.log4j.PatternLayout
+log4j.appender.CONSOLE.layout.ConversionPattern=[%-5p] %d %c %x - %m%n
+log4j.appender.CONSOLE.filter.a=org.apache.log4j.varia.LevelRangeFilter
+log4j.appender.CONSOLE.filter.a.AcceptOnMatch=true
+log4j.appender.CONSOLE.filter.a.LevelMin=WARN
+log4j.appender.CONSOLE.filter.a.LevelMax=FATAL
diff --git a/hudi-spark-datasource/hudi-spark-common/src/test/resources/log4j-surefire.properties b/hudi-spark-datasource/hudi-spark-common/src/test/resources/log4j-surefire.properties
new file mode 100644
index 0000000000000..14bbb089724c8
--- /dev/null
+++ b/hudi-spark-datasource/hudi-spark-common/src/test/resources/log4j-surefire.properties
@@ -0,0 +1,31 @@
+###
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+###
+log4j.rootLogger=WARN, CONSOLE
+log4j.logger.org.apache=INFO
+log4j.logger.org.apache.hudi=DEBUG
+log4j.logger.org.apache.hadoop.hbase=ERROR
+
+# CONSOLE is set to be a ConsoleAppender.
+log4j.appender.CONSOLE=org.apache.log4j.ConsoleAppender
+# CONSOLE uses PatternLayout.
+log4j.appender.CONSOLE.layout=org.apache.log4j.PatternLayout
+log4j.appender.CONSOLE.layout.ConversionPattern=%-4r [%t] %-5p %c %x - %m%n
+log4j.appender.CONSOLE.filter.a=org.apache.log4j.varia.LevelRangeFilter
+log4j.appender.CONSOLE.filter.a.AcceptOnMatch=true
+log4j.appender.CONSOLE.filter.a.LevelMin=WARN
+log4j.appender.CONSOLE.filter.a.LevelMax=FATAL
diff --git a/hudi-spark-datasource/hudi-spark2-common/pom.xml b/hudi-spark-datasource/hudi-spark2-common/pom.xml
index 1cbdf7d1d8e1a..756264968a10d 100644
--- a/hudi-spark-datasource/hudi-spark2-common/pom.xml
+++ b/hudi-spark-datasource/hudi-spark2-common/pom.xml
@@ -32,4 +32,47 @@
8
+
+
+ org.junit.jupiter
+ junit-jupiter-api
+ test
+
+
+
+ org.junit.jupiter
+ junit-jupiter-engine
+ test
+
+
+
+ org.junit.vintage
+ junit-vintage-engine
+ test
+
+
+
+ org.junit.jupiter
+ junit-jupiter-params
+ test
+
+
+
+ org.mockito
+ mockito-junit-jupiter
+ test
+
+
+
+ org.junit.platform
+ junit-platform-runner
+ test
+
+
+
+ org.junit.platform
+ junit-platform-suite-api
+ test
+
+
diff --git a/hudi-spark-datasource/hudi-spark2/pom.xml b/hudi-spark-datasource/hudi-spark2/pom.xml
index 3d7f61c290f46..0b810947d8799 100644
--- a/hudi-spark-datasource/hudi-spark2/pom.xml
+++ b/hudi-spark-datasource/hudi-spark2/pom.xml
@@ -242,11 +242,71 @@
junit-jupiter-api
test
+
+
+ org.junit.jupiter
+ junit-jupiter-engine
+ test
+
+
+
+ org.junit.vintage
+ junit-vintage-engine
+ test
+
+
org.junit.jupiter
junit-jupiter-params
test
+
+
+ org.mockito
+ mockito-junit-jupiter
+ test
+
+
+
+ org.junit.platform
+ junit-platform-runner
+ test
+
+
+
+ org.junit.platform
+ junit-platform-suite-api
+ test
+
+
+
+
+ org.apache.parquet
+ parquet-avro
+ test
+
+
+
+ org.apache.hadoop
+ hadoop-hdfs
+ tests
+ test
+
+
+
+ org.mortbay.jetty
+ *
+
+
+ javax.servlet.jsp
+ *
+
+
+ javax.servlet
+ *
+
+
+
diff --git a/hudi-spark-datasource/hudi-spark2/src/test/java/org/apache/hudi/internal/TestHoodieBulkInsertDataInternalWriter.java b/hudi-spark-datasource/hudi-spark2/src/test/java/org/apache/hudi/internal/TestHoodieBulkInsertDataInternalWriter.java
index f31a344714c27..8e87755c294d1 100644
--- a/hudi-spark-datasource/hudi-spark2/src/test/java/org/apache/hudi/internal/TestHoodieBulkInsertDataInternalWriter.java
+++ b/hudi-spark-datasource/hudi-spark2/src/test/java/org/apache/hudi/internal/TestHoodieBulkInsertDataInternalWriter.java
@@ -18,18 +18,14 @@
package org.apache.hudi.internal;
-import org.apache.hudi.common.model.HoodieRecord;
import org.apache.hudi.common.testutils.HoodieTestDataGenerator;
import org.apache.hudi.common.util.Option;
import org.apache.hudi.config.HoodieWriteConfig;
import org.apache.hudi.table.HoodieSparkTable;
import org.apache.hudi.table.HoodieTable;
-import org.apache.hudi.testutils.SparkDatasetTestUtils;
-
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.catalyst.InternalRow;
-import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.Arguments;
@@ -104,7 +100,7 @@ public void testDataInternalWriter(boolean sorted, boolean populateMetaFields) t
Option> fileNames = Option.of(new ArrayList<>());
// verify write statuses
- assertWriteStatuses(commitMetadata.getWriteStatuses(), batches, size, sorted, fileAbsPaths, fileNames);
+ assertWriteStatuses(commitMetadata.getWriteStatuses(), batches, size, sorted, fileAbsPaths, fileNames, false);
// verify rows
Dataset result = sqlContext.read().parquet(fileAbsPaths.get().toArray(new String[0]));
@@ -146,14 +142,11 @@ public void testDataInternalWriterHiveStylePartitioning() throws Exception {
Option> fileNames = Option.of(new ArrayList<>());
// verify write statuses
- assertWriteStatuses(commitMetadata.getWriteStatuses(), batches, size, sorted, fileAbsPaths, fileNames);
+ assertWriteStatuses(commitMetadata.getWriteStatuses(), batches, size, sorted, fileAbsPaths, fileNames, true);
// verify rows
Dataset result = sqlContext.read().parquet(fileAbsPaths.get().toArray(new String[0]));
assertOutput(totalInputRows, result, instantTime, fileNames, populateMetaFields);
-
- result.collectAsList().forEach(entry -> Assertions.assertTrue(entry.getAs(HoodieRecord.PARTITION_PATH_METADATA_FIELD).toString()
- .contains(SparkDatasetTestUtils.PARTITION_PATH_FIELD_NAME + "=")));
}
}
@@ -202,7 +195,7 @@ public void testGlobalFailure() throws Exception {
Option> fileAbsPaths = Option.of(new ArrayList<>());
Option> fileNames = Option.of(new ArrayList<>());
// verify write statuses
- assertWriteStatuses(commitMetadata.getWriteStatuses(), 1, size / 2, false, fileAbsPaths, fileNames);
+ assertWriteStatuses(commitMetadata.getWriteStatuses(), 1, size / 2, false, fileAbsPaths, fileNames, false);
// verify rows
Dataset result = sqlContext.read().parquet(fileAbsPaths.get().toArray(new String[0]));
diff --git a/hudi-spark-datasource/hudi-spark2/src/test/java/org/apache/hudi/internal/TestHoodieDataSourceInternalWriter.java b/hudi-spark-datasource/hudi-spark2/src/test/java/org/apache/hudi/internal/TestHoodieDataSourceInternalWriter.java
index 04162562e0c81..b26f3ec9a06cb 100644
--- a/hudi-spark-datasource/hudi-spark2/src/test/java/org/apache/hudi/internal/TestHoodieDataSourceInternalWriter.java
+++ b/hudi-spark-datasource/hudi-spark2/src/test/java/org/apache/hudi/internal/TestHoodieDataSourceInternalWriter.java
@@ -24,7 +24,6 @@
import org.apache.hudi.common.util.Option;
import org.apache.hudi.config.HoodieWriteConfig;
import org.apache.hudi.testutils.HoodieClientTestUtils;
-
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.catalyst.InternalRow;
diff --git a/hudi-spark-datasource/hudi-spark3/src/test/java/org/apache/hudi/spark3/internal/TestHoodieBulkInsertDataInternalWriter.java b/hudi-spark-datasource/hudi-spark3/src/test/java/org/apache/hudi/spark3/internal/TestHoodieBulkInsertDataInternalWriter.java
index a3d0e32372025..96b06937504f1 100644
--- a/hudi-spark-datasource/hudi-spark3/src/test/java/org/apache/hudi/spark3/internal/TestHoodieBulkInsertDataInternalWriter.java
+++ b/hudi-spark-datasource/hudi-spark3/src/test/java/org/apache/hudi/spark3/internal/TestHoodieBulkInsertDataInternalWriter.java
@@ -102,7 +102,7 @@ public void testDataInternalWriter(boolean sorted, boolean populateMetaFields) t
Option> fileNames = Option.of(new ArrayList<>());
// verify write statuses
- assertWriteStatuses(commitMetadata.getWriteStatuses(), batches, size, sorted, fileAbsPaths, fileNames);
+ assertWriteStatuses(commitMetadata.getWriteStatuses(), batches, size, sorted, fileAbsPaths, fileNames, false);
// verify rows
Dataset result = sqlContext.read().parquet(fileAbsPaths.get().toArray(new String[0]));
diff --git a/hudi-sync/hudi-adb-sync/pom.xml b/hudi-sync/hudi-adb-sync/pom.xml
index 0dd8783b67133..0a01ffd61a6d6 100644
--- a/hudi-sync/hudi-adb-sync/pom.xml
+++ b/hudi-sync/hudi-adb-sync/pom.xml
@@ -123,6 +123,42 @@
junit-jupiter-api
test
+
+
+ org.junit.jupiter
+ junit-jupiter-engine
+ test
+
+
+
+ org.junit.vintage
+ junit-vintage-engine
+ test
+
+
+
+ org.junit.jupiter
+ junit-jupiter-params
+ test
+
+
+
+ org.mockito
+ mockito-junit-jupiter
+ test
+
+
+
+ org.junit.platform
+ junit-platform-runner
+ test
+
+
+
+ org.junit.platform
+ junit-platform-suite-api
+ test
+
diff --git a/hudi-sync/hudi-datahub-sync/pom.xml b/hudi-sync/hudi-datahub-sync/pom.xml
index aecc5dc7808f4..9a4588eda974e 100644
--- a/hudi-sync/hudi-datahub-sync/pom.xml
+++ b/hudi-sync/hudi-datahub-sync/pom.xml
@@ -115,6 +115,24 @@
test
+
+ org.mockito
+ mockito-junit-jupiter
+ test
+
+
+
+ org.junit.platform
+ junit-platform-runner
+ test
+
+
+
+ org.junit.platform
+ junit-platform-suite-api
+ test
+
+
diff --git a/hudi-utilities/src/test/java/org/apache/hudi/utilities/functional/HoodieDeltaStreamerTestBase.java b/hudi-utilities/src/test/java/org/apache/hudi/utilities/functional/HoodieDeltaStreamerTestBase.java
index 1a1cf39dbfef6..cc6f90790c732 100644
--- a/hudi-utilities/src/test/java/org/apache/hudi/utilities/functional/HoodieDeltaStreamerTestBase.java
+++ b/hudi-utilities/src/test/java/org/apache/hudi/utilities/functional/HoodieDeltaStreamerTestBase.java
@@ -97,7 +97,7 @@ public class HoodieDeltaStreamerTestBase extends UtilitiesTestBase {
@BeforeAll
public static void initClass() throws Exception {
- UtilitiesTestBase.initClass(true);
+ UtilitiesTestBase.initTestServices(true, true);
PARQUET_SOURCE_ROOT = dfsBasePath + "/parquetFiles";
ORC_SOURCE_ROOT = dfsBasePath + "/orcFiles";
JSON_KAFKA_SOURCE_ROOT = dfsBasePath + "/jsonKafkaFiles";
diff --git a/hudi-utilities/src/test/java/org/apache/hudi/utilities/sources/TestHoodieIncrSource.java b/hudi-utilities/src/test/java/org/apache/hudi/utilities/sources/TestHoodieIncrSource.java
index 1f15cc3093e7a..fa5cba446f928 100644
--- a/hudi-utilities/src/test/java/org/apache/hudi/utilities/sources/TestHoodieIncrSource.java
+++ b/hudi-utilities/src/test/java/org/apache/hudi/utilities/sources/TestHoodieIncrSource.java
@@ -23,13 +23,14 @@
import org.apache.hudi.common.config.HoodieMetadataConfig;
import org.apache.hudi.common.config.TypedProperties;
import org.apache.hudi.common.model.HoodieRecord;
+import org.apache.hudi.common.table.HoodieTableMetaClient;
import org.apache.hudi.common.table.timeline.versioning.TimelineLayoutVersion;
import org.apache.hudi.common.testutils.HoodieTestDataGenerator;
import org.apache.hudi.common.util.Option;
import org.apache.hudi.common.util.collection.Pair;
import org.apache.hudi.config.HoodieCompactionConfig;
import org.apache.hudi.config.HoodieWriteConfig;
-import org.apache.hudi.testutils.HoodieClientTestHarness;
+import org.apache.hudi.testutils.SparkClientFunctionalTestHarness;
import org.apache.hudi.utilities.schema.SchemaProvider;
import org.apache.hudi.utilities.sources.helpers.IncrSourceHelper;
@@ -37,7 +38,6 @@
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
-import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
@@ -50,28 +50,27 @@
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
-public class TestHoodieIncrSource extends HoodieClientTestHarness {
+public class TestHoodieIncrSource extends SparkClientFunctionalTestHarness {
+
+ private HoodieTestDataGenerator dataGen;
+ private HoodieTableMetaClient metaClient;
@BeforeEach
public void setUp() throws IOException {
- initResources();
- }
-
- @AfterEach
- public void tearDown() throws IOException {
- cleanupResources();
+ dataGen = new HoodieTestDataGenerator();
+ metaClient = getHoodieMetaClient(hadoopConf(), basePath());
}
@Test
public void testHoodieIncrSource() throws IOException {
- HoodieWriteConfig writeConfig = getConfigBuilder(basePath)
+ HoodieWriteConfig writeConfig = getConfigBuilder(basePath(), metaClient)
.withCompactionConfig(HoodieCompactionConfig.newBuilder()
.archiveCommitsWith(2, 3).retainCommits(1).build())
.withMetadataConfig(HoodieMetadataConfig.newBuilder()
.withMaxNumDeltaCommitsBeforeCompaction(1).build())
.build();
- SparkRDDWriteClient writeClient = new SparkRDDWriteClient(context, writeConfig);
+ SparkRDDWriteClient writeClient = getHoodieWriteClient(writeConfig);
Pair> inserts = writeRecords(writeClient, true, null, "100");
Pair> inserts2 = writeRecords(writeClient, true, null, "200");
Pair> inserts3 = writeRecords(writeClient, true, null, "300");
@@ -97,15 +96,16 @@ public void testHoodieIncrSource() throws IOException {
// insert new batch and ensure the checkpoint moves
readAndAssert(IncrSourceHelper.MissingCheckpointStrategy.READ_LATEST, Option.of(inserts5.getKey()), 100, inserts6.getKey());
+ writeClient.close();
}
private void readAndAssert(IncrSourceHelper.MissingCheckpointStrategy missingCheckpointStrategy, Option checkpointToPull, int expectedCount, String expectedCheckpoint) {
Properties properties = new Properties();
- properties.setProperty("hoodie.deltastreamer.source.hoodieincr.path", basePath);
+ properties.setProperty("hoodie.deltastreamer.source.hoodieincr.path", basePath());
properties.setProperty("hoodie.deltastreamer.source.hoodieincr.missing.checkpoint.strategy", missingCheckpointStrategy.name());
TypedProperties typedProperties = new TypedProperties(properties);
- HoodieIncrSource incrSource = new HoodieIncrSource(typedProperties, jsc, sparkSession, new TestSchemaProvider(HoodieTestDataGenerator.AVRO_SCHEMA));
+ HoodieIncrSource incrSource = new HoodieIncrSource(typedProperties, jsc(), spark(), new DummySchemaProvider(HoodieTestDataGenerator.AVRO_SCHEMA));
// read everything until latest
Pair