Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
67 changes: 56 additions & 11 deletions .github/workflows/bot.yml
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ on:
- master
- 'release-*'
env:
MVN_ARGS: -e -ntp -B -V -Pwarn-log -Dorg.slf4j.simpleLogger.log.org.apache.maven.plugins.shade=warn -Dorg.slf4j.simpleLogger.log.org.apache.maven.plugins.dependency=warn
MVN_ARGS: -e -ntp -B -V -Dgpg.skip -Djacoco.skip -Pwarn-log -Dorg.slf4j.simpleLogger.log.org.apache.maven.plugins.shade=warn -Dorg.slf4j.simpleLogger.log.org.apache.maven.plugins.dependency=warn
SPARK_COMMON_MODULES: hudi-spark-datasource/hudi-spark,hudi-spark-datasource/hudi-spark-common

jobs:
Expand Down Expand Up @@ -51,22 +51,18 @@ jobs:
strategy:
matrix:
include:
- scalaProfile: "scala-2.11"
- scalaProfile: "scala-2.11 -Dscala.binary.version=2.11"
sparkProfile: "spark2.4"
sparkModules: "hudi-spark-datasource/hudi-spark2"

- scalaProfile: "scala-2.12"
sparkProfile: "spark2.4"
sparkModules: "hudi-spark-datasource/hudi-spark2"
sparkProfile: "spark3.0"
sparkModules: "hudi-spark-datasource/hudi-spark3.0.x"

- scalaProfile: "scala-2.12"
sparkProfile: "spark3.1"
sparkModules: "hudi-spark-datasource/hudi-spark3.1.x"

- scalaProfile: "scala-2.12"
sparkProfile: "spark3.0"
sparkModules: "hudi-spark-datasource/hudi-spark3.0.x"

- scalaProfile: "scala-2.12"
sparkProfile: "spark3.2"
sparkModules: "hudi-spark-datasource/hudi-spark3.2.x"
Expand All @@ -88,7 +84,7 @@ jobs:
SCALA_PROFILE: ${{ matrix.scalaProfile }}
SPARK_PROFILE: ${{ matrix.sparkProfile }}
run:
mvn clean install -D"$SCALA_PROFILE" -D"$SPARK_PROFILE" -pl hudi-examples/hudi-examples-spark,hudi-spark-datasource/hudi-spark -am -DskipTests=true $MVN_ARGS
mvn clean install -D"$SCALA_PROFILE" -D"$SPARK_PROFILE" -DskipTests=true $MVN_ARGS
- name: Quickstart Test
env:
SCALA_PROFILE: ${{ matrix.scalaProfile }}
Expand All @@ -100,15 +96,15 @@ jobs:
SCALA_PROFILE: ${{ matrix.scalaProfile }}
SPARK_PROFILE: ${{ matrix.sparkProfile }}
SPARK_MODULES: ${{ matrix.sparkModules }}
if: ${{ !endsWith(env.SPARK_PROFILE, '2.4') }} # skip test spark 2.4 as it's covered by Azure CI
if: ${{ !endsWith(env.SPARK_PROFILE, '3.2') }} # skip test spark 3.2 as it's covered by Azure CI
run:
mvn test -Punit-tests -D"$SCALA_PROFILE" -D"$SPARK_PROFILE" -pl "hudi-common,$SPARK_COMMON_MODULES,$SPARK_MODULES" $MVN_ARGS
- name: FT - Spark
env:
SCALA_PROFILE: ${{ matrix.scalaProfile }}
SPARK_PROFILE: ${{ matrix.sparkProfile }}
SPARK_MODULES: ${{ matrix.sparkModules }}
if: ${{ !endsWith(env.SPARK_PROFILE, '2.4') }} # skip test spark 2.4 as it's covered by Azure CI
if: ${{ !endsWith(env.SPARK_PROFILE, '3.2') }} # skip test spark 3.2 as it's covered by Azure CI
run:
mvn test -Pfunctional-tests -D"$SCALA_PROFILE" -D"$SPARK_PROFILE" -pl "$SPARK_COMMON_MODULES,$SPARK_MODULES" $MVN_ARGS

Expand Down Expand Up @@ -142,6 +138,14 @@ jobs:
FLINK_PROFILE: ${{ matrix.flinkProfile }}
run:
mvn test -Punit-tests -D"$SCALA_PROFILE" -D"$FLINK_PROFILE" -pl hudi-examples/hudi-examples-flink $MVN_ARGS
- name: Integration Test
env:
SCALA_PROFILE: 'scala-2.12'
FLINK_PROFILE: ${{ matrix.flinkProfile }}
if: ${{ endsWith(env.FLINK_PROFILE, '1.17') }}
run: |
mvn clean install -Pintegration-tests -D"$SCALA_PROFILE" -D"$FLINK_PROFILE" -pl hudi-flink-datasource/hudi-flink -am -Davro.version=1.10.0 -DskipTests=true $MVN_ARGS
mvn verify -Pintegration-tests -D"$SCALA_PROFILE" -D"$FLINK_PROFILE" -pl hudi-flink-datasource/hudi-flink $MVN_ARGS

validate-bundles:
runs-on: ubuntu-latest
Expand Down Expand Up @@ -269,3 +273,44 @@ jobs:
if: ${{ endsWith(env.SPARK_PROFILE, '3.3') }} # Only Spark 3.3 supports Java 17 as of now
run: |
./packaging/bundle-validation/ci_run.sh $HUDI_VERSION openjdk17 $STAGING_REPO_NUM

integration-tests:
runs-on: ubuntu-latest
strategy:
matrix:
include:
- sparkProfile: 'spark2.4'
sparkArchive: 'spark-2.4.4/spark-2.4.4-bin-hadoop2.7.tgz'
steps:
- uses: actions/checkout@v2
- name: Set up JDK 8
uses: actions/setup-java@v2
with:
java-version: '8'
distribution: 'adopt'
architecture: x64
- name: Build Project
env:
SPARK_PROFILE: ${{ matrix.sparkProfile }}
SCALA_PROFILE: '-Dscala-2.11 -Dscala.binary.version=2.11'
run:
mvn clean install $SCALA_PROFILE -D"$SPARK_PROFILE" -Pintegration-tests -DskipTests=true $MVN_ARGS
- name: 'UT integ-test'
env:
SPARK_PROFILE: ${{ matrix.sparkProfile }}
SCALA_PROFILE: '-Dscala-2.11 -Dscala.binary.version=2.11'
run:
mvn test $SCALA_PROFILE -D"$SPARK_PROFILE" -Pintegration-tests -DskipUTs=false -DskipITs=true -pl hudi-integ-test $MVN_ARGS
- name: 'IT'
env:
SPARK_PROFILE: ${{ matrix.sparkProfile }}
SPARK_ARCHIVE: ${{ matrix.sparkArchive }}
SCALA_PROFILE: '-Dscala-2.11 -Dscala.binary.version=2.11'
run: |
echo "Downloading $SPARK_ARCHIVE"
curl https://archive.apache.org/dist/spark/$SPARK_ARCHIVE --create-dirs -o $GITHUB_WORKSPACE/$SPARK_ARCHIVE
tar -xvf $GITHUB_WORKSPACE/$SPARK_ARCHIVE -C $GITHUB_WORKSPACE/
mkdir /tmp/spark-events/
SPARK_ARCHIVE_BASENAME=$(basename $SPARK_ARCHIVE)
export SPARK_HOME=$GITHUB_WORKSPACE/${SPARK_ARCHIVE_BASENAME%.*}
mvn verify $SCALA_PROFILE -D"$SPARK_PROFILE" -Pintegration-tests -pl !hudi-flink-datasource/hudi-flink $MVN_ARGS
51 changes: 7 additions & 44 deletions azure-pipelines-20230430.yml
Original file line number Diff line number Diff line change
Expand Up @@ -46,8 +46,9 @@ parameters:
default:
- 'hudi-spark-datasource'
- 'hudi-spark-datasource/hudi-spark'
- 'hudi-spark-datasource/hudi-spark2'
- 'hudi-spark-datasource/hudi-spark2-common'
- 'hudi-spark-datasource/hudi-spark3.2.x'
- 'hudi-spark-datasource/hudi-spark3.2plus-common'
- 'hudi-spark-datasource/hudi-spark3-common'
- 'hudi-spark-datasource/hudi-spark-common'
- name: job4UTModules
type: object
Expand All @@ -68,8 +69,9 @@ parameters:
- '!hudi-flink-datasource/hudi-flink1.17.x'
- '!hudi-spark-datasource'
- '!hudi-spark-datasource/hudi-spark'
- '!hudi-spark-datasource/hudi-spark2'
- '!hudi-spark-datasource/hudi-spark2-common'
- '!hudi-spark-datasource/hudi-spark3.2.x'
- '!hudi-spark-datasource/hudi-spark3.2plus-common'
- '!hudi-spark-datasource/hudi-spark3-common'
- '!hudi-spark-datasource/hudi-spark-common'
- name: job4FTModules
type: object
Expand All @@ -90,13 +92,10 @@ parameters:
- '!hudi-flink-datasource/hudi-flink1.17.x'

variables:
BUILD_PROFILES: '-Dscala-2.11 -Dspark2.4 -Dflink1.17'
BUILD_PROFILES: '-Dscala-2.12 -Dspark3.2 -Dflink1.17'
PLUGIN_OPTS: '-Dcheckstyle.skip=true -Drat.skip=true -Djacoco.skip=true -ntp -B -V -Pwarn-log -Dorg.slf4j.simpleLogger.log.org.apache.maven.plugins.shade=warn -Dorg.slf4j.simpleLogger.log.org.apache.maven.plugins.dependency=warn'
MVN_OPTS_INSTALL: '-Phudi-platform-service -DskipTests $(BUILD_PROFILES) $(PLUGIN_OPTS)'
MVN_OPTS_TEST: '-fae -Pwarn-log $(BUILD_PROFILES) $(PLUGIN_OPTS)'
SPARK_VERSION: '2.4.4'
HADOOP_VERSION: '2.7'
SPARK_ARCHIVE: spark-$(SPARK_VERSION)-bin-hadoop$(HADOOP_VERSION)
JOB1_MODULES: ${{ join(',',parameters.job1Modules) }}
JOB2_MODULES: ${{ join(',',parameters.job2Modules) }}
JOB3_MODULES: ${{ join(',',parameters.job3UTModules) }}
Expand Down Expand Up @@ -220,39 +219,3 @@ stages:
- script: |
grep "testcase" */target/surefire-reports/*.xml */*/target/surefire-reports/*.xml | awk -F'"' ' { print $6,$4,$2 } ' | sort -nr | head -n 100
displayName: Top 100 long-running testcases
- job: IT
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

IT will still be running with spark2.4 as per the docker demo setup, and moved to GH actions

displayName: IT modules
timeoutInMinutes: '150'
steps:
- task: Maven@4
displayName: maven install
inputs:
mavenPomFile: 'pom.xml'
goals: 'clean install'
options: $(MVN_OPTS_INSTALL) -Pintegration-tests
publishJUnitResults: false
jdkVersionOption: '1.8'
- task: Maven@4
displayName: UT integ-test
inputs:
mavenPomFile: 'pom.xml'
goals: 'test'
options: $(MVN_OPTS_TEST) -Pintegration-tests -DskipUTs=false -DskipITs=true -pl hudi-integ-test
publishJUnitResults: false
jdkVersionOption: '1.8'
mavenOptions: '-Xmx4g'
- task: AzureCLI@2
displayName: Prepare for IT
inputs:
azureSubscription: apachehudici-service-connection
scriptType: bash
scriptLocation: inlineScript
inlineScript: |
echo 'Downloading $(SPARK_ARCHIVE)'
az storage blob download -c ci-caches -n $(SPARK_ARCHIVE).tgz -f $(Pipeline.Workspace)/$(SPARK_ARCHIVE).tgz --account-name apachehudici
tar -xvf $(Pipeline.Workspace)/$(SPARK_ARCHIVE).tgz -C $(Pipeline.Workspace)/
mkdir /tmp/spark-events/
- script: |
export SPARK_HOME=$(Pipeline.Workspace)/$(SPARK_ARCHIVE)
mvn $(MVN_OPTS_TEST) -Pintegration-tests verify
displayName: IT
10 changes: 10 additions & 0 deletions hudi-cli/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -246,6 +246,16 @@
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-core_${scala.binary.version}</artifactId>
<exclusions>
<exclusion>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client-api</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client-runtime</artifactId>
</exclusion>
</exclusions>
</dependency>
Comment on lines 246 to 259
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

removing these won't affect using cli/spark/utilities bundles as spark will be provided

<dependency>
<groupId>org.apache.spark</groupId>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,8 @@
import org.apache.hudi.common.util.Option;
import org.apache.hudi.config.HoodieIndexConfig;
import org.apache.hudi.config.HoodieWriteConfig;

import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.Arguments;
Expand Down Expand Up @@ -198,10 +200,10 @@ public void testWriteReadHFileWithMetaFields(boolean populateMetaFields, boolean
}
}

@Disabled("Disable the test with evolved schema for HFile since it's not supported")
@ParameterizedTest
@Override
@Test
public void testWriteReadWithEvolvedSchema() throws Exception {
// Disable the test with evolved schema for HFile since it's not supported
public void testWriteReadWithEvolvedSchema(String evolvedSchemaPath) throws Exception {
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

should we just remove it for now? there's already a tracking jira.

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

it's disabled with a message. should be a good reminder there.

// TODO(HUDI-3683): fix the schema evolution for HFile
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,8 @@
package org.apache.hudi.io.storage;

import org.apache.hudi.common.bloom.BloomFilter;
import org.apache.hudi.common.model.HoodieKey;
import org.apache.hudi.common.model.HoodieAvroIndexedRecord;
import org.apache.hudi.common.model.HoodieKey;
import org.apache.hudi.common.model.HoodieRecord;

import org.apache.avro.Schema;
Expand All @@ -34,6 +34,8 @@
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.io.TempDir;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.ValueSource;

import java.io.File;
import java.io.IOException;
Expand Down Expand Up @@ -143,20 +145,19 @@ public void testWriteReadComplexRecord() throws Exception {
verifyComplexRecords(createReader(conf).getRecordIterator());
}

@Test
public void testWriteReadWithEvolvedSchema() throws Exception {
@ParameterizedTest
@ValueSource(strings = {
"/exampleEvolvedSchema.avsc",
"/exampleEvolvedSchemaChangeOrder.avsc",
"/exampleEvolvedSchemaColumnRequire.avsc",
"/exampleEvolvedSchemaColumnType.avsc",
"/exampleEvolvedSchemaDeleteColumn.avsc"
})
public void testWriteReadWithEvolvedSchema(String evolvedSchemaPath) throws Exception {
writeFileWithSimpleSchema();

Configuration conf = new Configuration();
HoodieAvroFileReader hoodieReader = createReader(conf);
String[] schemaList = new String[] {
"/exampleEvolvedSchema.avsc", "/exampleEvolvedSchemaChangeOrder.avsc",
"/exampleEvolvedSchemaColumnRequire.avsc", "/exampleEvolvedSchemaColumnType.avsc",
"/exampleEvolvedSchemaDeleteColumn.avsc"};

for (String evolvedSchemaPath : schemaList) {
verifyReaderWithSchema(evolvedSchemaPath, hoodieReader);
}
verifyReaderWithSchema(evolvedSchemaPath, hoodieReader);
}

@Test
Expand All @@ -182,7 +183,7 @@ protected void writeFileWithSimpleSchema() throws Exception {
writer.close();
}

protected void writeFileWithSchemaWithMeta() throws Exception {
private void writeFileWithSchemaWithMeta() throws Exception {
Schema avroSchema = getSchemaFromResource(TestHoodieReaderWriterBase.class, "/exampleSchemaWithMetaFields.avsc");
HoodieAvroFileWriter writer = createWriter(avroSchema, true);
for (int i = 0; i < NUM_RECORDS; i++) {
Expand All @@ -209,7 +210,7 @@ protected void verifySimpleRecords(Iterator<HoodieRecord<IndexedRecord>> iterato
}
}

protected void verifyComplexRecords(Iterator<HoodieRecord<IndexedRecord>> iterator) {
private void verifyComplexRecords(Iterator<HoodieRecord<IndexedRecord>> iterator) {
int index = 0;
while (iterator.hasNext()) {
GenericRecord record = (GenericRecord) iterator.next().getData();
Expand Down Expand Up @@ -259,13 +260,15 @@ private void verifyRecord(String schemaPath, GenericRecord record, int index) {
String numStr = String.format("%02d", index);
assertEquals("key" + numStr, record.get("_row_key").toString());
assertEquals(Integer.toString(index), record.get("time").toString());
if ("/exampleEvolvedSchemaColumnType.avsc".equals(schemaPath)) {
if (schemaPath.equals("/exampleEvolvedSchemaColumnType.avsc")) {
assertEquals(Integer.toString(index), record.get("number").toString());
} else if ("/exampleEvolvedSchemaDeleteColumn.avsc".equals(schemaPath)) {
assertNull(record.get("number"));
assertNull(record.getSchema().getField("added_field"));
} else if (schemaPath.equals("/exampleEvolvedSchemaDeleteColumn.avsc")) {
assertNull(record.getSchema().getField("number"));
assertNull(record.getSchema().getField("added_field"));
} else {
assertEquals(index, record.get("number"));
assertNull(record.get("added_field"));
}
assertNull(record.get("added_field"));
}
}
10 changes: 10 additions & 0 deletions hudi-client/hudi-spark-client/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,16 @@
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-core_${scala.binary.version}</artifactId>
<exclusions>
<exclusion>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client-api</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client-runtime</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -114,6 +114,7 @@ public class TestSparkHoodieHBaseIndex extends SparkClientFunctionalTestHarness
@BeforeAll
public static void init() throws Exception {
// Initialize HbaseMiniCluster
System.setProperty("zookeeper.4lw.commands.whitelist", "*");
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

why do we need these configs in this PR?

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

this is caused by spark version upgrade and changed zookeeper's version.

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

trying to run in CI without it. locally seems fine.

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

"zookeeper.4lw.commands.whitelist", "*" will be required to start the test service properly with new zookeeper version used. added it back

hbaseConfig = HBaseConfiguration.create();
hbaseConfig.set(ZOOKEEPER_ZNODE_PARENT, "/hudi-hbase-test");

Expand Down
Loading