diff --git a/.github/workflows/intermittent-test-check.yml b/.github/workflows/intermittent-test-check.yml index 6ad316e797fa..296b0e0c50b1 100644 --- a/.github/workflows/intermittent-test-check.yml +++ b/.github/workflows/intermittent-test-check.yml @@ -65,6 +65,7 @@ env: RATIS_REPO: ${{ github.event.inputs.ratis-repo }} RATIS_VERSION: ${{ github.event.inputs.ratis-ref }} JAVA_VERSION: ${{ github.event.inputs.java-version }} + OUTPUT_DIR: flaky-test-check-ci-results # Surefire 3.0.0-M4 is used because newer versions do not reliably kill the fork on timeout # SUREFIRE-1722, SUREFIRE-1815 SUREFIRE_VERSION: 3.0.0-M4 @@ -131,9 +132,9 @@ jobs: - name: Find modules id: modules run: | - grep -e 'surefire:${{ env.SUREFIRE_VERSION }}:test' -e 'Running org.apache' target/unit/output.log | grep -B1 'Running org.apache' - modules=$(grep -e 'surefire:${{ env.SUREFIRE_VERSION }}:test' -e 'Running org.apache' target/unit/output.log | grep -B1 'Running org.apache' \ - | grep surefire | cut -f2 -d'@' | awk '{ print $1 }' | sed 's/^/:/' | xargs | sed -e 's/ /,/g') + grep --text -e 'surefire:${{ env.SUREFIRE_VERSION }}:test' -e 'Running org.apache' ${{ env.OUTPUT_DIR }}/output.log | grep --text -B1 'Running org.apache' + modules=$(grep --text -e 'surefire:${{ env.SUREFIRE_VERSION }}:test' -e 'Running org.apache' ${{ env.OUTPUT_DIR }}/output.log | grep --text -B1 'Running org.apache' \ + | grep --text surefire | cut -f2 -d'@' | awk '{ print $1 }' | sed 's/^/:/' | xargs | sed -e 's/ /,/g') echo "modules=$modules" >> $GITHUB_OUTPUT if: ${{ !cancelled() }} build: @@ -275,14 +276,14 @@ jobs: env: DEVELOCITY_ACCESS_KEY: ${{ secrets.DEVELOCITY_ACCESS_KEY }} - name: Summary of failures - run: hadoop-ozone/dev-support/checks/_summary.sh target/unit/summary.txt + run: hadoop-ozone/dev-support/checks/_summary.sh ${{ env.OUTPUT_DIR }}/summary.txt if: ${{ !cancelled() }} - name: Archive build results uses: actions/upload-artifact@v4 if: ${{ failure() }} with: name: result-${{ github.run_number }}-${{ github.run_id }}-split-${{ matrix.split }} - path: target/unit + path: ${{ env.OUTPUT_DIR }} count-failures: if: ${{ failure() }} needs: run-test @@ -292,12 +293,12 @@ jobs: uses: actions/download-artifact@v4 - name: Count failures run: | - failures=$(find . -name 'summary.txt' | grep -v 'iteration' | xargs grep -v 'exit code: 0' | wc -l) + failures=$(find . -name 'summary.txt' | grep --text -v 'iteration' | xargs grep --text -v 'exit code: 0' | wc -l) echo "Total failures: $failures" if [[ $failures -gt 0 ]]; then echo "" echo "Failed runs:" - grep 'exit code: 1' */summary.txt | grep -o 'split.*teration [0-9]*' | sed -e 's/.summary.txt:/ /' -e 's/-/ /' | sort -g -k2 -k4 + grep --text 'exit code: 1' */summary.txt | grep --text -o 'split.*teration [0-9]*' | sed -e 's/.summary.txt:/ /' -e 's/-/ /' | sort -g -k2 -k4 echo "" exit 1 fi diff --git a/hadoop-ozone/dev-support/checks/junit.sh b/hadoop-ozone/dev-support/checks/junit.sh index 7c25293c5d74..ce93510f1f3e 100755 --- a/hadoop-ozone/dev-support/checks/junit.sh +++ b/hadoop-ozone/dev-support/checks/junit.sh @@ -68,8 +68,8 @@ for i in $(seq 1 ${ITERATIONS}); do mkdir -p "${REPORT_DIR}" fi - mvn ${MAVEN_OPTIONS} -Dmaven-surefire-plugin.argLineAccessArgs="${OZONE_MODULE_ACCESS_ARGS}" "$@" verify \ - | tee "${REPORT_DIR}/output.log" + mvn ${MAVEN_OPTIONS} -Dmaven-surefire-plugin.argLineAccessArgs="${OZONE_MODULE_ACCESS_ARGS}" "$@" clean verify \ + | tee "${REPORT_DIR}/output.log" irc=$? # shellcheck source=hadoop-ozone/dev-support/checks/_mvn_unit_report.sh diff --git a/hadoop-ozone/ozone-manager/src/test/java/org/apache/hadoop/ozone/om/snapshot/TestSnapshotCache.java b/hadoop-ozone/ozone-manager/src/test/java/org/apache/hadoop/ozone/om/snapshot/TestSnapshotCache.java index 195bcd105a92..947d14e1b8b2 100644 --- a/hadoop-ozone/ozone-manager/src/test/java/org/apache/hadoop/ozone/om/snapshot/TestSnapshotCache.java +++ b/hadoop-ozone/ozone-manager/src/test/java/org/apache/hadoop/ozone/om/snapshot/TestSnapshotCache.java @@ -384,6 +384,7 @@ void testSnapshotOperationsNotBlockedDuringCompaction() throws IOException, Inte final Semaphore compactionLock = new Semaphore(1); final AtomicBoolean table1Compacting = new AtomicBoolean(false); final AtomicBoolean table1CompactedFinish = new AtomicBoolean(false); + final AtomicBoolean table2CompactedFinish = new AtomicBoolean(false); org.apache.hadoop.hdds.utils.db.DBStore store1 = snapshot1.get().getMetadataManager().getStore(); doAnswer(invocation -> { table1Compacting.set(true); @@ -392,6 +393,10 @@ void testSnapshotOperationsNotBlockedDuringCompaction() throws IOException, Inte table1CompactedFinish.set(true); return null; }).when(store1).compactTable("table1"); + doAnswer(invocation -> { + table2CompactedFinish.set(true); + return null; + }).when(store1).compactTable("table2"); compactionLock.acquire(); final UUID dbKey2 = UUID.randomUUID(); @@ -411,6 +416,7 @@ void testSnapshotOperationsNotBlockedDuringCompaction() throws IOException, Inte assertFalse(table1CompactedFinish.get()); compactionLock.release(); GenericTestUtils.waitFor(() -> table1CompactedFinish.get(), 50, 3000); + GenericTestUtils.waitFor(() -> table2CompactedFinish.get(), 50, 3000); verify(store1, times(1)).compactTable("table1"); verify(store1, times(1)).compactTable("table2");