Skip to content

Commit a22b130

Browse files
committed
[FLINK-17375] Refactor travis_watchdog.sh into separate ci/ and azure-pipelines/ scripts.
The guiding principle in this refactoring was to put everything generic (independent of concrete CI system (such as Travis or Azure)) into tools/ci/* and the scripts specific to a CI system (currently Azure) into tools/azure-pipelines/*.
1 parent 75cfab4 commit a22b130

21 files changed

+523
-641
lines changed

.github/PULL_REQUEST_TEMPLATE.md

+1-1
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@
1212
1313
- Fill out the template below to describe the changes contributed by the pull request. That will give reviewers the context they need to do the review.
1414
15-
- Make sure that the change passes the automated tests, i.e., `mvn clean verify` passes. You can set up Travis CI to do that following [this guide](https://flink.apache.org/contributing/contribute-code.html#open-a-pull-request).
15+
- Make sure that the change passes the automated tests, i.e., `mvn clean verify` passes. You can set up Azure Pipelines CI to do that following [this guide](https://cwiki.apache.org/confluence/display/FLINK/Azure+Pipelines#AzurePipelines-Tutorial:SettingupAzurePipelinesforaforkoftheFlinkrepository).
1616
1717
- Each pull request should address only one issue, not mix up code from multiple issues.
1818

azure-pipelines.yml

+2-1
Original file line numberDiff line numberDiff line change
@@ -48,10 +48,11 @@ resources:
4848
# to understand why the secrets are handled like this
4949
variables:
5050
MAVEN_CACHE_FOLDER: $(Pipeline.Workspace)/.m2/repository
51+
E2E_CACHE_FOLDER: $(Pipeline.Workspace)/e2e_cache
5152
MAVEN_OPTS: '-Dmaven.repo.local=$(MAVEN_CACHE_FOLDER)'
5253
CACHE_KEY: maven | $(Agent.OS) | **/pom.xml, !**/target/**
5354
CACHE_FALLBACK_KEY: maven | $(Agent.OS)
54-
CACHE_FLINK_DIR: $(Pipeline.Workspace)/flink_cache
55+
FLINK_ARTIFACT_DIR: $(Pipeline.Workspace)/flink_artifact
5556
SECRET_S3_BUCKET: $[variables.IT_CASE_S3_BUCKET]
5657
SECRET_S3_ACCESS_KEY: $[variables.IT_CASE_S3_ACCESS_KEY]
5758
SECRET_S3_SECRET_KEY: $[variables.IT_CASE_S3_SECRET_KEY]

flink-end-to-end-tests/run-nightly-tests.sh

+3-3
Original file line numberDiff line numberDiff line change
@@ -45,7 +45,7 @@ if [ ! -z "$TF_BUILD" ] ; then
4545
echo "COMPRESSING build artifacts."
4646
COMPRESSED_ARCHIVE=${BUILD_BUILDNUMBER}.tgz
4747
mkdir compressed-archive-dir
48-
tar -zcvf compressed-archive-dir/${COMPRESSED_ARCHIVE} $ARTIFACTS_DIR
48+
tar -zcvf compressed-archive-dir/${COMPRESSED_ARCHIVE} -C $ARTIFACTS_DIR .
4949
echo "##vso[task.setvariable variable=ARTIFACT_DIR]$(pwd)/compressed-archive-dir"
5050
}
5151
on_exit compress_logs
@@ -235,15 +235,15 @@ printf "Running Java end-to-end tests\n"
235235
printf "==============================================================================\n"
236236

237237

238-
LOG4J_PROPERTIES=${END_TO_END_DIR}/../tools/ci/log4j-ci.properties
238+
LOG4J_PROPERTIES=${END_TO_END_DIR}/../tools/ci/log4j.properties
239239

240240
MVN_LOGGING_OPTIONS="-Dlog.dir=${ARTIFACTS_DIR} -DlogBackupDir=${ARTIFACTS_DIR} -Dlog4j.configurationFile=file://$LOG4J_PROPERTIES"
241241
MVN_COMMON_OPTIONS="-Dflink.forkCount=2 -Dflink.forkCountTestPackage=2 -Dfast -Pskip-webui-build"
242242
e2e_modules=$(find flink-end-to-end-tests -mindepth 2 -maxdepth 5 -name 'pom.xml' -printf '%h\n' | sort -u | tr '\n' ',')
243243
e2e_modules="${e2e_modules},$(find flink-walkthroughs -mindepth 2 -maxdepth 2 -name 'pom.xml' -printf '%h\n' | sort -u | tr '\n' ',')"
244244

245245
PROFILE="$PROFILE -Pe2e-travis1 -Pe2e-travis2 -Pe2e-travis3 -Pe2e-travis4 -Pe2e-travis5 -Pe2e-travis6"
246-
run_mvn ${MVN_COMMON_OPTIONS} ${MVN_LOGGING_OPTIONS} ${PROFILE} verify -pl ${e2e_modules} -DdistDir=$(readlink -e build-target)
246+
run_mvn ${MVN_COMMON_OPTIONS} ${MVN_LOGGING_OPTIONS} ${PROFILE} verify -pl ${e2e_modules} -DdistDir=$(readlink -e build-target) -Dcache-dir=$E2E_CACHE_FOLDER -Dcache-ttl=P1M
247247

248248
EXIT_CODE=$?
249249

flink-end-to-end-tests/run-pre-commit-tests.sh

-64
This file was deleted.

flink-yarn-tests/src/test/java/org/apache/flink/yarn/YarnTestBase.java

+5-5
Original file line numberDiff line numberDiff line change
@@ -449,7 +449,7 @@ public boolean accept(File dir, String name) {
449449
}
450450

451451
if (!whitelistedFound) {
452-
// logging in FATAL to see the actual message in TRAVIS tests.
452+
// logging in FATAL to see the actual message in CI tests.
453453
Marker fatal = MarkerFactory.getMarker("FATAL");
454454
LOG.error(fatal, "Prohibited String '{}' in '{}:{}'", aProhibited, f.getAbsolutePath(), lineFromFile);
455455

@@ -1048,10 +1048,10 @@ public static void teardown() throws Exception {
10481048
hdfsSiteXML.delete();
10491049
}
10501050

1051-
// When we are on travis, we copy the temp files of JUnit (containing the MiniYARNCluster log files)
1051+
// When we are on CI, we copy the temp files of JUnit (containing the MiniYARNCluster log files)
10521052
// to <flinkRoot>/target/flink-yarn-tests-*.
10531053
// The files from there are picked up by the tools/ci/* scripts to upload them.
1054-
if (isOnTravis()) {
1054+
if (isOnCI()) {
10551055
File target = new File("../target" + YARN_CONFIGURATION.get(TEST_CLUSTER_NAME_KEY));
10561056
if (!target.mkdirs()) {
10571057
LOG.warn("Error creating dirs to {}", target);
@@ -1067,8 +1067,8 @@ public static void teardown() throws Exception {
10671067

10681068
}
10691069

1070-
public static boolean isOnTravis() {
1071-
return System.getenv("TRAVIS") != null && System.getenv("TRAVIS").equals("true");
1070+
public static boolean isOnCI() {
1071+
return System.getenv("IS_CI") != null && System.getenv("IS_CI").equals("true");
10721072
}
10731073

10741074
protected void waitApplicationFinishedElseKillIt(

tools/azure-pipelines/azure_controller.sh

-198
This file was deleted.

tools/azure-pipelines/build-apache-repo.yml

+2-1
Original file line numberDiff line numberDiff line change
@@ -42,10 +42,11 @@ resources:
4242

4343
variables:
4444
MAVEN_CACHE_FOLDER: $(Pipeline.Workspace)/.m2/repository
45+
E2E_CACHE_FOLDER: $(Pipeline.Workspace)/e2e_cache
4546
MAVEN_OPTS: '-Dmaven.repo.local=$(MAVEN_CACHE_FOLDER)'
4647
CACHE_KEY: maven | $(Agent.OS) | **/pom.xml, !**/target/**
4748
CACHE_FALLBACK_KEY: maven | $(Agent.OS)
48-
CACHE_FLINK_DIR: $(Pipeline.Workspace)/flink_cache
49+
FLINK_ARTIFACT_DIR: $(Pipeline.Workspace)/flink_artifact
4950
SECRET_S3_BUCKET: $[variables.IT_CASE_S3_BUCKET]
5051
SECRET_S3_ACCESS_KEY: $[variables.IT_CASE_S3_ACCESS_KEY]
5152
SECRET_S3_SECRET_KEY: $[variables.IT_CASE_S3_SECRET_KEY]

tools/azure-pipelines/build-python-wheels.yml

+7-5
Original file line numberDiff line numberDiff line change
@@ -22,8 +22,10 @@ jobs:
2222
clean: all
2323
steps:
2424
# Compile
25-
- script: STAGE=compile ${{parameters.environment}} ./tools/azure-pipelines/azure_controller.sh compile
26-
displayName: Build
25+
- script: |
26+
${{parameters.environment}} ./tools/ci/compile.sh
27+
./tools/azure-pipelines/create_build_artifact.sh
28+
displayName: Compile
2729
2830
- script: |
2931
VERSION=$(mvn --file pom.xml org.apache.maven.plugins:maven-help-plugin:3.1.0:evaluate -Dexpression=project.version -q -DforceStdout)
@@ -38,8 +40,8 @@ jobs:
3840
# upload artifacts for building wheels
3941
- task: PublishPipelineArtifact@1
4042
inputs:
41-
targetPath: $(Pipeline.Workspace)/flink.tar.gz
42-
artifactName: FlinkCompileCacheDir-${{parameters.stage_name}}
43+
path: $(FLINK_ARTIFACT_DIR)
44+
artifact: FlinkCompileArtifact-${{parameters.stage_name}}
4345

4446
- job: build_wheels
4547
dependsOn: compile_${{parameters.stage_name}}
@@ -58,7 +60,7 @@ jobs:
5860
- task: DownloadPipelineArtifact@2
5961
inputs:
6062
path: $(Pipeline.Workspace)
61-
artifact: FlinkCompileCacheDir-${{parameters.stage_name}}
63+
artifact: FlinkCompileArtifact-${{parameters.stage_name}}
6264
- script: |
6365
tar zxf $(Pipeline.Workspace)/flink.tar.gz -C $(Pipeline.Workspace)
6466
mkdir -p flink-dist/target/flink-$(VERSION)-bin

0 commit comments

Comments
 (0)