From 759ec73b7345d52cdc83839b06a8eab3c0cd2296 Mon Sep 17 00:00:00 2001 From: Hyukjin Kwon Date: Wed, 12 Aug 2020 19:07:02 +0900 Subject: [PATCH 1/5] Test uploading Junit test report artifact --- .github/workflows/master.yml | 151 ++---------------- .github/workflows/test_report.yml | 25 +++ .../types/UTF8StringPropertyCheckSuite.scala | 4 +- dev/run-tests.py | 7 +- python/pyspark/sql/tests/test_arrow.py | 2 +- .../org/apache/spark/sql/DataFrameSuite.scala | 6 +- 6 files changed, 47 insertions(+), 148 deletions(-) create mode 100644 .github/workflows/test_report.yml diff --git a/.github/workflows/master.yml b/.github/workflows/master.yml index 009ebe90ddf51..051a8df86bf50 100644 --- a/.github/workflows/master.yml +++ b/.github/workflows/master.yml @@ -1,4 +1,4 @@ -name: master +name: Build and test on: push: @@ -27,53 +27,12 @@ jobs: # Kinesis tests depends on external Amazon kinesis service. # Note that the modules below are from sparktestsupport/modules.py. modules: - - |- - core, unsafe, kvstore, avro, - network-common, network-shuffle, repl, launcher, - examples, sketch, graphx - - |- - catalyst, hive-thriftserver - - |- - streaming, sql-kafka-0-10, streaming-kafka-0-10, - mllib-local, mllib, - yarn, mesos, kubernetes, hadoop-cloud, spark-ganglia-lgpl - - |- - pyspark-sql, pyspark-mllib, pyspark-resource - - |- - pyspark-core, pyspark-streaming, pyspark-ml - - |- - sparkr + - >- + unsafe # Here, we split Hive and SQL tests into some of slow ones and the rest of them. included-tags: [""] excluded-tags: [""] comment: [""] - include: - # Hive tests - - modules: hive - java: 1.8 - hadoop: hadoop3.2 - hive: hive2.3 - included-tags: org.apache.spark.tags.SlowHiveTest - comment: "- slow tests" - - modules: hive - java: 1.8 - hadoop: hadoop3.2 - hive: hive2.3 - excluded-tags: org.apache.spark.tags.SlowHiveTest - comment: "- other tests" - # SQL tests - - modules: sql - java: 1.8 - hadoop: hadoop3.2 - hive: hive2.3 - included-tags: org.apache.spark.tags.ExtendedSQLTest - comment: "- slow tests" - - modules: sql - java: 1.8 - hadoop: hadoop3.2 - hive: hive2.3 - excluded-tags: org.apache.spark.tags.ExtendedSQLTest - comment: "- other tests" env: MODULES_TO_TEST: ${{ matrix.modules }} EXCLUDED_TAGS: ${{ matrix.excluded-tags }} @@ -144,14 +103,15 @@ jobs: # PyArrow is not supported in PyPy yet, see ARROW-2651. # TODO(SPARK-32247): scipy installation with PyPy fails for an unknown reason. run: | - python3.6 -m pip install numpy pyarrow pandas scipy + python3.6 -m pip install numpy pyarrow pandas scipy xmlrunner python3.6 -m pip list + # PyPy does not have xmlrunner pypy3 -m pip install numpy pandas pypy3 -m pip list - name: Install Python packages (Python 3.8) if: contains(matrix.modules, 'pyspark') || (contains(matrix.modules, 'sql') && !contains(matrix.modules, 'sql-')) run: | - python3.8 -m pip install numpy pyarrow pandas scipy + python3.8 -m pip install numpy pyarrow pandas scipy xmlrunner python3.8 -m pip list # SparkR - name: Install R 4.0 @@ -170,104 +130,17 @@ jobs: # Show installed packages in R. sudo Rscript -e 'pkg_list <- as.data.frame(installed.packages()[, c(1,3:4)]); pkg_list[is.na(pkg_list$Priority), 1:2, drop = FALSE]' # Run the tests. - - name: "Run tests: ${{ matrix.modules }}" + - name: Run tests run: | # Hive tests become flaky when running in parallel as it's too intensive. if [[ "$MODULES_TO_TEST" == "hive" ]]; then export SERIAL_SBT_TESTS=1; fi mkdir -p ~/.m2 ./dev/run-tests --parallelism 2 --modules "$MODULES_TO_TEST" --included-tags "$INCLUDED_TAGS" --excluded-tags "$EXCLUDED_TAGS" rm -rf ~/.m2/repository/org/apache/spark - - # Static analysis, and documentation build - lint: - name: Linters, licenses, dependencies and documentation generation - runs-on: ubuntu-latest - steps: - - name: Checkout Spark repository - uses: actions/checkout@v2 - - name: Cache Maven local repository - uses: actions/cache@v2 - with: - path: ~/.m2/repository - key: docs-maven-repo-${{ hashFiles('**/pom.xml') }} - restore-keys: | - docs-maven- - - name: Install JDK 1.8 - uses: actions/setup-java@v1 + - name: Archive test report + if: always() + uses: actions/upload-artifact@v2 with: - java-version: 1.8 - - name: Install Python 3.6 - uses: actions/setup-python@v2 - with: - python-version: 3.6 - architecture: x64 - - name: Install Python linter dependencies - run: | - # TODO(SPARK-32407): Sphinx 3.1+ does not correctly index nested classes. - # See also https://github.com/sphinx-doc/sphinx/issues/7551. - pip3 install flake8 'sphinx<3.1.0' numpy pydata_sphinx_theme - - name: Install R 4.0 - run: | - sudo sh -c "echo 'deb https://cloud.r-project.org/bin/linux/ubuntu bionic-cran40/' >> /etc/apt/sources.list" - curl -sL "https://keyserver.ubuntu.com/pks/lookup?op=get&search=0xE298A3A825C0D65DFD57CBB651716619E084DAB9" | sudo apt-key add - sudo apt-get update - sudo apt-get install -y r-base r-base-dev libcurl4-openssl-dev - - name: Install R linter dependencies and SparkR - run: | - sudo apt-get install -y libcurl4-openssl-dev - sudo Rscript -e "install.packages(c('devtools'), repos='https://cloud.r-project.org/')" - sudo Rscript -e "devtools::install_github('jimhester/lintr@v2.0.0')" - ./R/install-dev.sh - - name: Install Ruby 2.7 for documentation generation - uses: actions/setup-ruby@v1 - with: - ruby-version: 2.7 - - name: Install dependencies for documentation generation - run: | - sudo apt-get install -y libcurl4-openssl-dev pandoc - # TODO(SPARK-32407): Sphinx 3.1+ does not correctly index nested classes. - # See also https://github.com/sphinx-doc/sphinx/issues/7551. - pip install 'sphinx<3.1.0' mkdocs numpy pydata_sphinx_theme - gem install jekyll jekyll-redirect-from rouge - sudo Rscript -e "install.packages(c('devtools', 'testthat', 'knitr', 'rmarkdown', 'roxygen2'), repos='https://cloud.r-project.org/')" - - name: Scala linter - run: ./dev/lint-scala - - name: Java linter - run: ./dev/lint-java - - name: Python linter - run: ./dev/lint-python - - name: R linter - run: ./dev/lint-r - - name: License test - run: ./dev/check-license - - name: Dependencies test - run: ./dev/test-dependencies.sh - - name: Run documentation build - run: | - cd docs - jekyll build + name: junit-xml-reports-${{ matrix.modules }}-${{ matrix.comment }}-${{ matrix.java }}-${{ matrix.hadoop }}-${{ matrix.hive }} + path: "**/target/test-reports/*.xml" - java11: - name: Java 11 build - runs-on: ubuntu-latest - steps: - - name: Checkout Spark repository - uses: actions/checkout@v2 - - name: Cache Maven local repository - uses: actions/cache@v2 - with: - path: ~/.m2/repository - key: java11-maven-${{ hashFiles('**/pom.xml') }} - restore-keys: | - java11-maven- - - name: Install Java 11 - uses: actions/setup-java@v1 - with: - java-version: 11 - - name: Build with Maven - run: | - export MAVEN_OPTS="-Xmx2g -XX:ReservedCodeCacheSize=1g -Dorg.slf4j.simpleLogger.defaultLogLevel=WARN" - export MAVEN_CLI_OPTS="--no-transfer-progress" - mkdir -p ~/.m2 - ./build/mvn $MAVEN_CLI_OPTS -DskipTests -Pyarn -Pmesos -Pkubernetes -Phive -Phive-thriftserver -Phadoop-cloud -Djava.version=11 install - rm -rf ~/.m2/repository/org/apache/spark diff --git a/.github/workflows/test_report.yml b/.github/workflows/test_report.yml new file mode 100644 index 0000000000000..db8d851c4879c --- /dev/null +++ b/.github/workflows/test_report.yml @@ -0,0 +1,25 @@ +name: "Report JUnit test results" +on: + workflow_run: + workflows: ["Build and test"] + types: + - completed + +jobs: + test_report: + runs-on: ubuntu-latest + steps: + - name: Download artifact + uses: HyukjinKwon/action-download-artifact@master + with: + github_token: ${{ secrets.GITHUB_TOKEN }} + workflow: ${{ github.event.workflow_run.workflow_id }} + commit: ${{ github.event.workflow_run.head_commit.id }} + - run: ls -al + - name: Publish test report + uses: HyukjinKwon/action-surefire-report@master + with: + github_token: ${{ secrets.GITHUB_TOKEN }} + report_paths: "**/target/test-reports/*.xml" + commit: ${{ github.event.workflow_run.head_commit.id }} + diff --git a/common/unsafe/src/test/scala/org/apache/spark/unsafe/types/UTF8StringPropertyCheckSuite.scala b/common/unsafe/src/test/scala/org/apache/spark/unsafe/types/UTF8StringPropertyCheckSuite.scala index 69a082053aa65..c14c59117ba2d 100644 --- a/common/unsafe/src/test/scala/org/apache/spark/unsafe/types/UTF8StringPropertyCheckSuite.scala +++ b/common/unsafe/src/test/scala/org/apache/spark/unsafe/types/UTF8StringPropertyCheckSuite.scala @@ -40,7 +40,7 @@ class UTF8StringPropertyCheckSuite extends AnyFunSuite with ScalaCheckDrivenProp test("numChars") { forAll { (s: String) => - assert(toUTF8(s).numChars() === s.length) + assert(toUTF8(s).numChars() !== s.length) } } @@ -80,7 +80,7 @@ class UTF8StringPropertyCheckSuite extends AnyFunSuite with ScalaCheckDrivenProp test("compare") { forAll { (s1: String, s2: String) => - assert(Math.signum(toUTF8(s1).compareTo(toUTF8(s2))) === Math.signum(s1.compareTo(s2))) + assert(Math.signum(toUTF8(s1).compareTo(toUTF8(s2))) !== Math.signum(s1.compareTo(s2))) } } diff --git a/dev/run-tests.py b/dev/run-tests.py index 93023d41e297a..a3f1bd0c38479 100755 --- a/dev/run-tests.py +++ b/dev/run-tests.py @@ -745,15 +745,16 @@ def main(): run_build_tests() # spark build - build_apache_spark(build_tool, extra_profiles) + # build_apache_spark(build_tool, extra_profiles) # backwards compatibility checks if build_tool == "sbt": # Note: compatibility tests only supported in sbt for now - detect_binary_inop_with_mima(extra_profiles) + # detect_binary_inop_with_mima(extra_profiles) # Since we did not build assembly/package before running dev/mima, we need to # do it here because the tests still rely on it; see SPARK-13294 for details. - build_spark_assembly_sbt(extra_profiles, should_run_java_style_checks) + # build_spark_assembly_sbt(extra_profiles, should_run_java_style_checks) + pass # run the test suites run_scala_tests(build_tool, extra_profiles, test_modules, excluded_tags, included_tags) diff --git a/python/pyspark/sql/tests/test_arrow.py b/python/pyspark/sql/tests/test_arrow.py index 6859084237b89..982c91ea119af 100644 --- a/python/pyspark/sql/tests/test_arrow.py +++ b/python/pyspark/sql/tests/test_arrow.py @@ -201,7 +201,7 @@ def test_no_partition_frame(self): pdf = df.toPandas() self.assertEqual(len(pdf.columns), 1) self.assertEqual(pdf.columns[0], "field1") - self.assertTrue(pdf.empty) + self.assertTrue("A") def test_propagates_spark_exception(self): df = self.spark.range(3).toDF("i") diff --git a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala index bcfc77545bbd2..856af275ce534 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala @@ -83,8 +83,8 @@ class DataFrameSuite extends QueryTest } test("access complex data") { - assert(complexData.filter(complexData("a").getItem(0) === 2).count() == 1) - assert(complexData.filter(complexData("m").getItem("1") === 1).count() == 1) + assert(complexData.filter(complexData("a").getItem(0) === 2).count() == 2) + assert(complexData.filter(complexData("m").getItem("1") === 1).count() == 2) assert(complexData.filter(complexData("s").getField("key") === 1).count() == 1) } @@ -96,7 +96,7 @@ class DataFrameSuite extends QueryTest test("empty data frame") { assert(spark.emptyDataFrame.columns.toSeq === Seq.empty[String]) - assert(spark.emptyDataFrame.count() === 0) + assert(spark.emptyDataFrame.count() === 1) } test("head, take and tail") { From 9ae04a3884c81ff229149d59864e605936bc31bc Mon Sep 17 00:00:00 2001 From: Hyukjin Kwon Date: Wed, 12 Aug 2020 19:07:02 +0900 Subject: [PATCH 2/5] Publish failed and succeeded test reports in GitHub Actions --- .github/workflows/master.yml | 39 +++++++++++++++++-- .github/workflows/test_report.yml | 16 ++++++-- .../types/UTF8StringPropertyCheckSuite.scala | 4 +- 3 files changed, 51 insertions(+), 8 deletions(-) diff --git a/.github/workflows/master.yml b/.github/workflows/master.yml index 051a8df86bf50..d84712e54557e 100644 --- a/.github/workflows/master.yml +++ b/.github/workflows/master.yml @@ -9,7 +9,6 @@ on: - master jobs: - # TODO(SPARK-32248): Recover JDK 11 builds # Build: build Spark and run the tests for specified modules. build: name: "Build modules: ${{ matrix.modules }} ${{ matrix.comment }} (JDK ${{ matrix.java }}, ${{ matrix.hadoop }}, ${{ matrix.hive }})" @@ -28,7 +27,21 @@ jobs: # Note that the modules below are from sparktestsupport/modules.py. modules: - >- - unsafe + core, unsafe, kvstore, avro, + network-common, network-shuffle, repl, launcher, + examples, sketch, graphx + - >- + catalyst, hive-thriftserver + - >- + streaming, sql-kafka-0-10, streaming-kafka-0-10, + mllib-local, mllib, + yarn, mesos, kubernetes, hadoop-cloud, spark-ganglia-lgpl + - >- + pyspark-sql, pyspark-mllib, pyspark-resource + - >- + pyspark-core, pyspark-streaming, pyspark-ml + - >- + sparkr # Here, we split Hive and SQL tests into some of slow ones and the rest of them. included-tags: [""] excluded-tags: [""] @@ -137,9 +150,29 @@ jobs: mkdir -p ~/.m2 ./dev/run-tests --parallelism 2 --modules "$MODULES_TO_TEST" --included-tags "$INCLUDED_TAGS" --excluded-tags "$EXCLUDED_TAGS" rm -rf ~/.m2/repository/org/apache/spark - - name: Archive test report + - name: Upload test results to report if: always() uses: actions/upload-artifact@v2 + with: + name: test-results-${{ matrix.modules }}-${{ matrix.comment }}-${{ matrix.java }}-${{ matrix.hadoop }}-${{ matrix.hive }} + path: "**/target/test-reports/*.xml" + + # Static analysis, and documentation build + lint: + name: Linters, licenses, dependencies and documentation generation + runs-on: ubuntu-latest + steps: + - name: Checkout Spark repository + uses: actions/checkout@v2 + - name: Cache Maven local repository + uses: actions/cache@v2 + with: + path: ~/.m2/repository + key: docs-maven-repo-${{ hashFiles('**/pom.xml') }} + restore-keys: | + docs-maven- + - name: Install JDK 1.8 + uses: actions/setup-java@v1 with: name: junit-xml-reports-${{ matrix.modules }}-${{ matrix.comment }}-${{ matrix.java }}-${{ matrix.hadoop }}-${{ matrix.hive }} path: "**/target/test-reports/*.xml" diff --git a/.github/workflows/test_report.yml b/.github/workflows/test_report.yml index db8d851c4879c..fc937a2c369e3 100644 --- a/.github/workflows/test_report.yml +++ b/.github/workflows/test_report.yml @@ -1,4 +1,4 @@ -name: "Report JUnit test results" +name: Report test results on: workflow_run: workflows: ["Build and test"] @@ -9,16 +9,26 @@ jobs: test_report: runs-on: ubuntu-latest steps: - - name: Download artifact + - name: Download test results to report + # TODO(SPARK-32605): It was forked to have a custom fix + # https://github.com/HyukjinKwon/action-surefire-report/commit/c96094cc35061fcf154a7cb46807f2f3e2339476 + # in order to add the support of custom target commit SHA. It should be contributed back to the original + # plugin and avoid using the fork. uses: HyukjinKwon/action-download-artifact@master with: github_token: ${{ secrets.GITHUB_TOKEN }} workflow: ${{ github.event.workflow_run.workflow_id }} commit: ${{ github.event.workflow_run.head_commit.id }} - - run: ls -al - name: Publish test report + # TODO(SPARK-32606): It was forked to have a custom fix + # https://github.com/HyukjinKwon/action-download-artifact/commit/750b71af351aba467757d7be6924199bb08db4ed + # in order to add the support to download all artifacts. It should be contributed back to the original + # plugin and avoid using the fork. + # Alternatively, we can use the official actions/download-artifact once they support to download artifacts + # between different workloads, see also https://github.com/actions/download-artifact/issues/3 uses: HyukjinKwon/action-surefire-report@master with: + check_name: Test report github_token: ${{ secrets.GITHUB_TOKEN }} report_paths: "**/target/test-reports/*.xml" commit: ${{ github.event.workflow_run.head_commit.id }} diff --git a/common/unsafe/src/test/scala/org/apache/spark/unsafe/types/UTF8StringPropertyCheckSuite.scala b/common/unsafe/src/test/scala/org/apache/spark/unsafe/types/UTF8StringPropertyCheckSuite.scala index c14c59117ba2d..e3a18d2085ab8 100644 --- a/common/unsafe/src/test/scala/org/apache/spark/unsafe/types/UTF8StringPropertyCheckSuite.scala +++ b/common/unsafe/src/test/scala/org/apache/spark/unsafe/types/UTF8StringPropertyCheckSuite.scala @@ -34,7 +34,7 @@ class UTF8StringPropertyCheckSuite extends AnyFunSuite with ScalaCheckDrivenProp test("toString") { forAll { (s: String) => - assert(toUTF8(s).toString() === s) + assert(toUTF8(s).toString() !== s) } } @@ -73,7 +73,7 @@ class UTF8StringPropertyCheckSuite extends AnyFunSuite with ScalaCheckDrivenProp test("toLowerCase") { forAll { (s: String) => - assert(toUTF8(s).toLowerCase === toUTF8(s.toLowerCase)) + assert(toUTF8(s) === toUTF8(s.toLowerCase)) } } // scalastyle:on caselocale From d29f8c0eddde4b6d1a0c76eeb8890856ed5104d8 Mon Sep 17 00:00:00 2001 From: HyukjinKwon Date: Thu, 13 Aug 2020 22:37:13 +0900 Subject: [PATCH 3/5] Match with the proposed PR --- .github/workflows/master.yml | 105 ++++++++++++++++++++++++++++++++++- 1 file changed, 103 insertions(+), 2 deletions(-) diff --git a/.github/workflows/master.yml b/.github/workflows/master.yml index d84712e54557e..ae1b9167b5a95 100644 --- a/.github/workflows/master.yml +++ b/.github/workflows/master.yml @@ -46,6 +46,33 @@ jobs: included-tags: [""] excluded-tags: [""] comment: [""] + include: + # Hive tests + - modules: hive + java: 1.8 + hadoop: hadoop3.2 + hive: hive2.3 + included-tags: org.apache.spark.tags.SlowHiveTest + comment: "- slow tests" + - modules: hive + java: 1.8 + hadoop: hadoop3.2 + hive: hive2.3 + excluded-tags: org.apache.spark.tags.SlowHiveTest + comment: "- other tests" + # SQL tests + - modules: sql + java: 1.8 + hadoop: hadoop3.2 + hive: hive2.3 + included-tags: org.apache.spark.tags.ExtendedSQLTest + comment: "- slow tests" + - modules: sql + java: 1.8 + hadoop: hadoop3.2 + hive: hive2.3 + excluded-tags: org.apache.spark.tags.ExtendedSQLTest + comment: "- other tests" env: MODULES_TO_TEST: ${{ matrix.modules }} EXCLUDED_TAGS: ${{ matrix.excluded-tags }} @@ -174,6 +201,80 @@ jobs: - name: Install JDK 1.8 uses: actions/setup-java@v1 with: - name: junit-xml-reports-${{ matrix.modules }}-${{ matrix.comment }}-${{ matrix.java }}-${{ matrix.hadoop }}-${{ matrix.hive }} - path: "**/target/test-reports/*.xml" + java-version: 1.8 + - name: Install Python 3.6 + uses: actions/setup-python@v2 + with: + python-version: 3.6 + architecture: x64 + - name: Install Python linter dependencies + run: | + # TODO(SPARK-32407): Sphinx 3.1+ does not correctly index nested classes. + # See also https://github.com/sphinx-doc/sphinx/issues/7551. + pip3 install flake8 'sphinx<3.1.0' numpy pydata_sphinx_theme + - name: Install R 4.0 + run: | + sudo sh -c "echo 'deb https://cloud.r-project.org/bin/linux/ubuntu bionic-cran40/' >> /etc/apt/sources.list" + curl -sL "https://keyserver.ubuntu.com/pks/lookup?op=get&search=0xE298A3A825C0D65DFD57CBB651716619E084DAB9" | sudo apt-key add + sudo apt-get update + sudo apt-get install -y r-base r-base-dev libcurl4-openssl-dev + - name: Install R linter dependencies and SparkR + run: | + sudo apt-get install -y libcurl4-openssl-dev + sudo Rscript -e "install.packages(c('devtools'), repos='https://cloud.r-project.org/')" + sudo Rscript -e "devtools::install_github('jimhester/lintr@v2.0.0')" + ./R/install-dev.sh + - name: Install Ruby 2.7 for documentation generation + uses: actions/setup-ruby@v1 + with: + ruby-version: 2.7 + - name: Install dependencies for documentation generation + run: | + sudo apt-get install -y libcurl4-openssl-dev pandoc + # TODO(SPARK-32407): Sphinx 3.1+ does not correctly index nested classes. + # See also https://github.com/sphinx-doc/sphinx/issues/7551. + pip install 'sphinx<3.1.0' mkdocs numpy pydata_sphinx_theme + gem install jekyll jekyll-redirect-from rouge + sudo Rscript -e "install.packages(c('devtools', 'testthat', 'knitr', 'rmarkdown', 'roxygen2'), repos='https://cloud.r-project.org/')" + - name: Scala linter + run: ./dev/lint-scala + - name: Java linter + run: ./dev/lint-java + - name: Python linter + run: ./dev/lint-python + - name: R linter + run: ./dev/lint-r + - name: License test + run: ./dev/check-license + - name: Dependencies test + run: ./dev/test-dependencies.sh + - name: Run documentation build + run: | + cd docs + jekyll build + + java11: + name: Java 11 build + runs-on: ubuntu-latest + steps: + - name: Checkout Spark repository + uses: actions/checkout@v2 + - name: Cache Maven local repository + uses: actions/cache@v2 + with: + path: ~/.m2/repository + key: java11-maven-${{ hashFiles('**/pom.xml') }} + restore-keys: | + java11-maven- + - name: Install Java 11 + uses: actions/setup-java@v1 + with: + java-version: 11 + - name: Build with Maven + run: | + export MAVEN_OPTS="-Xmx2g -XX:ReservedCodeCacheSize=1g -Dorg.slf4j.simpleLogger.defaultLogLevel=WARN" + export MAVEN_CLI_OPTS="--no-transfer-progress" + mkdir -p ~/.m2 + ./build/mvn $MAVEN_CLI_OPTS -DskipTests -Pyarn -Pmesos -Pkubernetes -Phive -Phive-thriftserver -Phadoop-cloud -Djava.version=11 install + rm -rf ~/.m2/repository/org/apache/spark From 991bdea9cba0a0d6687e1df6936eeeb40c322de2 Mon Sep 17 00:00:00 2001 From: HyukjinKwon Date: Thu, 13 Aug 2020 22:38:27 +0900 Subject: [PATCH 4/5] Match with OSS PR --- dev/run-tests.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/dev/run-tests.py b/dev/run-tests.py index a3f1bd0c38479..93023d41e297a 100755 --- a/dev/run-tests.py +++ b/dev/run-tests.py @@ -745,16 +745,15 @@ def main(): run_build_tests() # spark build - # build_apache_spark(build_tool, extra_profiles) + build_apache_spark(build_tool, extra_profiles) # backwards compatibility checks if build_tool == "sbt": # Note: compatibility tests only supported in sbt for now - # detect_binary_inop_with_mima(extra_profiles) + detect_binary_inop_with_mima(extra_profiles) # Since we did not build assembly/package before running dev/mima, we need to # do it here because the tests still rely on it; see SPARK-13294 for details. - # build_spark_assembly_sbt(extra_profiles, should_run_java_style_checks) - pass + build_spark_assembly_sbt(extra_profiles, should_run_java_style_checks) # run the test suites run_scala_tests(build_tool, extra_profiles, test_modules, excluded_tags, included_tags) From 28b9e331d27bc6c434635bafb0df1987747f6c97 Mon Sep 17 00:00:00 2001 From: HyukjinKwon Date: Fri, 14 Aug 2020 12:23:52 +0900 Subject: [PATCH 5/5] Trigger the test