Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 9 additions & 0 deletions .github/workflows/bot.yml
Original file line number Diff line number Diff line change
Expand Up @@ -117,6 +117,9 @@ jobs:
if: ${{ !endsWith(env.SPARK_PROFILE, '3.2') }} # skip test spark 3.2 as it's covered by Azure CI
run:
mvn test -Pfunctional-tests -D"$SCALA_PROFILE" -D"$SPARK_PROFILE" -pl "$SPARK_COMMON_MODULES,$SPARK_MODULES" $MVN_ARGS
- name: Test runtime breakdown
run:
grep "testcase" */target/surefire-reports/*.xml */*/target/surefire-reports/*.xml | awk -F'"' ' { print $6,$4,$2 } ' | sort -nr

test-hudi-hadoop-mr-and-hudi-java-client:
runs-on: ubuntu-latest
Expand Down Expand Up @@ -151,6 +154,9 @@ jobs:
FLINK_PROFILE: ${{ matrix.flinkProfile }}
run:
mvn test -Punit-tests -fae -D"$SCALA_PROFILE" -D"$SPARK_PROFILE" -D"FLINK_PROFILE" -pl hudi-hadoop-mr,hudi-client/hudi-java-client $MVN_ARGS
- name: Test runtime breakdown
run:
grep "testcase" */target/surefire-reports/*.xml */*/target/surefire-reports/*.xml | awk -F'"' ' { print $6,$4,$2 } ' | sort -nr

test-spark-java17:
runs-on: ubuntu-latest
Expand Down Expand Up @@ -208,6 +214,9 @@ jobs:
if: ${{ !endsWith(env.SPARK_PROFILE, '3.2') }} # skip test spark 3.2 as it's covered by Azure CI
run:
mvn test -Pfunctional-tests -Pjava17 -D"$SCALA_PROFILE" -D"$SPARK_PROFILE" -pl "$SPARK_COMMON_MODULES,$SPARK_MODULES" $MVN_ARGS
- name: Test runtime breakdown
run:
grep "testcase" */target/surefire-reports/*.xml */*/target/surefire-reports/*.xml | awk -F'"' ' { print $6,$4,$2 } ' | sort -nr

test-flink:
runs-on: ubuntu-latest
Expand Down
12 changes: 12 additions & 0 deletions azure-pipelines-20230430.yml
Original file line number Diff line number Diff line change
Expand Up @@ -143,6 +143,9 @@ stages:
- script: |
grep "testcase" */target/surefire-reports/*.xml */*/target/surefire-reports/*.xml | awk -F'"' ' { print $6,$4,$2 } ' | sort -nr | head -n 100
displayName: Top 100 long-running testcases
- script: |
grep "testcase" */target/surefire-reports/*.xml */*/target/surefire-reports/*.xml | awk -F'"' ' { print $6,$4,$2 } ' | sort -nr
displayName: Test runtime breakdown
- job: UT_FT_2
displayName: FT client/spark-client
timeoutInMinutes: '150'
Expand All @@ -167,6 +170,9 @@ stages:
- script: |
grep "testcase" */target/surefire-reports/*.xml */*/target/surefire-reports/*.xml | awk -F'"' ' { print $6,$4,$2 } ' | sort -nr | head -n 100
displayName: Top 100 long-running testcases
- script: |
grep "testcase" */target/surefire-reports/*.xml */*/target/surefire-reports/*.xml | awk -F'"' ' { print $6,$4,$2 } ' | sort -nr
displayName: Test runtime breakdown
- job: UT_FT_3
displayName: UT spark-datasource
timeoutInMinutes: '240'
Expand All @@ -191,6 +197,9 @@ stages:
- script: |
grep "testcase" */target/surefire-reports/*.xml */*/target/surefire-reports/*.xml | awk -F'"' ' { print $6,$4,$2 } ' | sort -nr | head -n 100
displayName: Top 100 long-running testcases
- script: |
grep "testcase" */target/surefire-reports/*.xml */*/target/surefire-reports/*.xml | awk -F'"' ' { print $6,$4,$2 } ' | sort -nr
displayName: Test runtime breakdown
- job: UT_FT_4
displayName: UT FT other modules
timeoutInMinutes: '240'
Expand Down Expand Up @@ -224,3 +233,6 @@ stages:
- script: |
grep "testcase" */target/surefire-reports/*.xml */*/target/surefire-reports/*.xml | awk -F'"' ' { print $6,$4,$2 } ' | sort -nr | head -n 100
displayName: Top 100 long-running testcases
- script: |
grep "testcase" */target/surefire-reports/*.xml */*/target/surefire-reports/*.xml | awk -F'"' ' { print $6,$4,$2 } ' | sort -nr
displayName: Test runtime breakdown
6 changes: 0 additions & 6 deletions hudi-aws/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -186,12 +186,6 @@
<version>${aws.sdk.httpcore.version}</version>
</dependency>

<dependency>
<groupId>software.amazon.awssdk</groupId>
<artifactId>sts</artifactId>
<version>${aws.sdk.version}</version>
</dependency>

<!-- Test -->
<dependency>
<groupId>org.apache.hudi</groupId>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,9 +36,6 @@ public static AwsCredentialsProvider getAwsCredentialsProvider(Properties props)

private static AwsCredentialsProvider getAwsCredentialsProviderChain(Properties props) {
List<AwsCredentialsProvider> providers = new ArrayList<>();
if (HoodieConfigAWSAssumedRoleCredentialsProvider.validConf(props)) {
providers.add(new HoodieConfigAWSAssumedRoleCredentialsProvider(props));
}
HoodieConfigAWSCredentialsProvider hoodieConfigAWSCredentialsProvider = new HoodieConfigAWSCredentialsProvider(props);
if (hoodieConfigAWSCredentialsProvider.resolveCredentials() != null) {
providers.add(hoodieConfigAWSCredentialsProvider);
Expand Down

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,6 @@
import static org.apache.hudi.sync.common.HoodieSyncConfig.META_SYNC_DATABASE_NAME;
import static org.apache.hudi.sync.common.HoodieSyncConfig.META_SYNC_PARTITION_FIELDS;
import static org.apache.hudi.sync.common.util.TableUtils.tableId;
import org.apache.hudi.aws.credentials.HoodieAWSCredentialsProviderFactory;

/**
* This class implements all the AWS APIs to enable syncing of a Hudi Table with the
Expand All @@ -108,9 +107,7 @@ public class AWSGlueCatalogSyncClient extends HoodieSyncClient {

public AWSGlueCatalogSyncClient(HiveSyncConfig config) {
super(config);
this.awsGlue = GlueAsyncClient.builder()
.credentialsProvider(HoodieAWSCredentialsProviderFactory.getAwsCredentialsProvider(config.getProps()))
.build();
this.awsGlue = GlueAsyncClient.builder().build();
this.databaseName = config.getStringOrDefault(META_SYNC_DATABASE_NAME);
this.skipTableArchive = config.getBooleanOrDefault(GlueCatalogSyncClientConfig.GLUE_SKIP_TABLE_ARCHIVE);
this.enableMetadataTable = Boolean.toString(config.getBoolean(GLUE_METADATA_FILE_LISTING)).toUpperCase();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@
@ConfigClassProperty(name = "Amazon Web Services Configs",
groupName = ConfigGroups.Names.AWS,
description = "Amazon Web Services configurations to access resources like Amazon DynamoDB (for locks),"
+ " Amazon CloudWatch (metrics) and Amazon Glue (metadata).")
+ " Amazon CloudWatch (metrics).")
public class HoodieAWSConfig extends HoodieConfig {
public static final ConfigProperty<String> AWS_ACCESS_KEY = ConfigProperty
.key("hoodie.aws.access.key")
Expand All @@ -69,13 +69,6 @@ public class HoodieAWSConfig extends HoodieConfig {
.sinceVersion("0.10.0")
.withDocumentation("AWS session token");

public static final ConfigProperty<String> AWS_ASSUME_ROLE_ARN = ConfigProperty
.key("hoodie.aws.role.arn")
.noDefaultValue()
.markAdvanced()
.sinceVersion("0.13.2")
.withDocumentation("AWS Role ARN to assume");

private HoodieAWSConfig() {
super();
}
Expand All @@ -96,10 +89,6 @@ public String getAWSSessionToken() {
return getString(AWS_SESSION_TOKEN);
}

public String getAWSAssumeRoleARN() {
return getString(AWS_ASSUME_ROLE_ARN);
}

public static class Builder {

private final HoodieAWSConfig awsConfig = new HoodieAWSConfig();
Expand Down Expand Up @@ -131,11 +120,6 @@ public HoodieAWSConfig.Builder withSessionToken(String sessionToken) {
return this;
}

public HoodieAWSConfig.Builder withAssumeRoleARN(String assumeRoleARN) {
awsConfig.setValue(AWS_ASSUME_ROLE_ARN, assumeRoleARN);
return this;
}

public Builder withDynamoDBTable(String dynamoDbTableName) {
awsConfig.setValue(DYNAMODB_LOCK_TABLE_NAME, dynamoDbTableName);
return this;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -39,20 +39,4 @@ public void testGetAWSCredentials() {
assertEquals("random-secret-key", credentials.secretAccessKey());
assertEquals("random-session-token", credentials.sessionToken());
}

@Test
public void testGetAWSCredentialsWithInvalidAssumeRole() {
// This test is to ensure that the AWS credentials provider factory fallbacks to default credentials
// when the assume role ARN is invalid.
HoodieConfig cfg = new HoodieConfig();
cfg.setValue(HoodieAWSConfig.AWS_ACCESS_KEY, "random-access-key");
cfg.setValue(HoodieAWSConfig.AWS_SECRET_KEY, "random-secret-key");
cfg.setValue(HoodieAWSConfig.AWS_SESSION_TOKEN, "random-session-token");
cfg.setValue(HoodieAWSConfig.AWS_ASSUME_ROLE_ARN, "invalid-role-arn");
AwsSessionCredentials credentials = (AwsSessionCredentials) org.apache.hudi.aws.credentials.HoodieAWSCredentialsProviderFactory.getAwsCredentialsProvider(cfg.getProps()).resolveCredentials();
assertEquals("random-access-key", credentials.accessKeyId());
assertEquals("random-secret-key", credentials.secretAccessKey());
assertEquals("random-session-token", credentials.sessionToken());
}

}