From f776dcae230109f1cc00ac39cb6f160e6465fcca Mon Sep 17 00:00:00 2001 From: Prashant Singh Date: Wed, 12 Apr 2023 14:19:38 -0700 Subject: [PATCH 01/10] java-17 add --- .github/workflows/java-ci.yml | 2 +- build.gradle | 2 ++ jmh.gradle | 2 +- 3 files changed, 4 insertions(+), 2 deletions(-) diff --git a/.github/workflows/java-ci.yml b/.github/workflows/java-ci.yml index 970dbe5ea5ca..94a56512b531 100644 --- a/.github/workflows/java-ci.yml +++ b/.github/workflows/java-ci.yml @@ -54,7 +54,7 @@ jobs: runs-on: ubuntu-22.04 strategy: matrix: - jvm: [8, 11] + jvm: [8, 11, 17] env: SPARK_LOCAL_IP: localhost steps: diff --git a/build.gradle b/build.gradle index 977a1b96fd91..697c6abccaf8 100644 --- a/build.gradle +++ b/build.gradle @@ -70,6 +70,8 @@ if (JavaVersion.current() == JavaVersion.VERSION_1_8) { project.ext.jdkVersion = '8' } else if (JavaVersion.current() == JavaVersion.VERSION_11) { project.ext.jdkVersion = '11' +} else if (JavaVersion.current() == JavaVersion.VERSION_17) { + project.ext.jdkVersion = '17' } else { throw new GradleException("This build must be run with JDK 8 or 11 but was executed with JDK " + JavaVersion.current()) } diff --git a/jmh.gradle b/jmh.gradle index 31d544838b3b..7882606c9209 100644 --- a/jmh.gradle +++ b/jmh.gradle @@ -17,7 +17,7 @@ * under the License. */ -if (jdkVersion != '8' && jdkVersion != '11') { +if (jdkVersion != '8' && jdkVersion != '11' && jdkVersion != '17') { throw new GradleException("The JMH benchamrks must be run with JDK 8 or JDK 11") } From c6f11950fb80be3abaf650981e562c5db850acf6 Mon Sep 17 00:00:00 2001 From: Prashant Singh Date: Wed, 12 Apr 2023 16:08:10 -0700 Subject: [PATCH 02/10] update gradle prop --- .github/workflows/java-ci.yml | 2 +- .../iceberg/expressions/ExpressionUtil.java | 4 +-- build.gradle | 25 ++++++++++++++++++- jmh.gradle | 2 +- 4 files changed, 28 insertions(+), 5 deletions(-) diff --git a/.github/workflows/java-ci.yml b/.github/workflows/java-ci.yml index 94a56512b531..19b20bcb07cf 100644 --- a/.github/workflows/java-ci.yml +++ b/.github/workflows/java-ci.yml @@ -71,7 +71,7 @@ jobs: key: ${{ runner.os }}-gradle-${{ hashFiles('**/*.gradle*', '**/gradle-wrapper.properties') }} restore-keys: ${{ runner.os }}-gradle- - run: echo -e "$(ip addr show eth0 | grep "inet\b" | awk '{print $2}' | cut -d/ -f1)\t$(hostname -f) $(hostname -s)" | sudo tee -a /etc/hosts - - run: ./gradlew check -DsparkVersions= -DhiveVersions= -DflinkVersions= -Pquick=true -x javadoc + - run: ./gradlew check -DsparkVersions= -DhiveVersions= -DflinkVersions= -Pquick=true -x javadoc - uses: actions/upload-artifact@v3 if: failure() with: diff --git a/api/src/main/java/org/apache/iceberg/expressions/ExpressionUtil.java b/api/src/main/java/org/apache/iceberg/expressions/ExpressionUtil.java index aa36fb51b7e9..7909ead259a0 100644 --- a/api/src/main/java/org/apache/iceberg/expressions/ExpressionUtil.java +++ b/api/src/main/java/org/apache/iceberg/expressions/ExpressionUtil.java @@ -47,10 +47,10 @@ public class ExpressionUtil { private static final Pattern DATE = Pattern.compile("\\d{4}-\\d{2}-\\d{2}"); private static final Pattern TIME = Pattern.compile("\\d{2}:\\d{2}(:\\d{2}(.\\d{1,6})?)?"); private static final Pattern TIMESTAMP = - Pattern.compile("\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}(:\\d{2}(.\\d{1,6})?)?"); + Pattern.compile("\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}(:\\d{2}(.\\d{1,10})?)?"); private static final Pattern TIMESTAMPTZ = Pattern.compile( - "\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}(:\\d{2}(.\\d{1,6})?)?([-+]\\d{2}:\\d{2}|Z)"); + "\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}(:\\d{2}(.\\d{1,10})?)?([-+]\\d{2}:\\d{2}|Z)"); static final int LONG_IN_PREDICATE_ABBREVIATION_THRESHOLD = 10; private static final int LONG_IN_PREDICATE_ABBREVIATION_MIN_GAIN = 5; diff --git a/build.gradle b/build.gradle index 697c6abccaf8..587ada383abc 100644 --- a/build.gradle +++ b/build.gradle @@ -73,7 +73,7 @@ if (JavaVersion.current() == JavaVersion.VERSION_1_8) { } else if (JavaVersion.current() == JavaVersion.VERSION_17) { project.ext.jdkVersion = '17' } else { - throw new GradleException("This build must be run with JDK 8 or 11 but was executed with JDK " + JavaVersion.current()) + throw new GradleException("This build must be run with JDK 8 or 11 or 17 but was executed with JDK " + JavaVersion.current()) } apply plugin: 'com.gorylenko.gradle-git-properties' @@ -273,6 +273,11 @@ project(':iceberg-bundled-guava') { } project(':iceberg-api') { + test { + if (JavaVersion.current() == JavaVersion.VERSION_17) { + jvmArgs += ["-Xmx1024m", "-XX:+IgnoreUnrecognizedVMOptions", "--add-opens", "java.base/java.io=ALL-UNNAMED", "--add-opens", "java.base/java.lang.reflect=ALL-UNNAMED", "--add-opens", "java.base/java.lang=ALL-UNNAMED", "--add-opens", "java.base/java.math=ALL-UNNAMED", "--add-opens", "java.base/java.net=ALL-UNNAMED", "--add-opens", "java.base/java.nio=ALL-UNNAMED", "--add-opens", "java.base/java.text=ALL-UNNAMED", "--add-opens", "java.base/java.time=ALL-UNNAMED", "--add-opens", "java.base/java.util.concurrent.atomic=ALL-UNNAMED", "--add-opens", "java.base/java.util.concurrent=ALL-UNNAMED", "--add-opens", "java.base/java.util.regex=ALL-UNNAMED", "--add-opens", "java.base/java.util=ALL-UNNAMED", "--add-opens", "java.base/jdk.internal.ref=ALL-UNNAMED", "--add-opens", "java.base/jdk.internal.reflect=ALL-UNNAMED", "--add-opens", "java.sql/java.sql=ALL-UNNAMED", "--add-opens", "java.base/sun.util.calendar=ALL-UNNAMED"] + } + } dependencies { implementation project(path: ':iceberg-bundled-guava', configuration: 'shadow') compileOnly "com.google.errorprone:error_prone_annotations" @@ -313,6 +318,9 @@ project(':iceberg-common') { project(':iceberg-core') { test { useJUnitPlatform() + if (JavaVersion.current() == JavaVersion.VERSION_17) { + jvmArgs += ["-Xmx1024m", "-XX:+IgnoreUnrecognizedVMOptions", "--add-opens", "java.base/java.io=ALL-UNNAMED", "--add-opens", "java.base/java.lang.invoke=ALL-UNNAMED", "--add-opens", "java.base/java.lang.reflect=ALL-UNNAMED", "--add-opens", "java.base/java.lang=ALL-UNNAMED", "--add-opens", "java.base/java.math=ALL-UNNAMED", "--add-opens", "java.base/java.net=ALL-UNNAMED", "--add-opens", "java.base/java.nio=ALL-UNNAMED", "--add-opens", "java.base/java.text=ALL-UNNAMED", "--add-opens", "java.base/java.time=ALL-UNNAMED", "--add-opens", "java.base/java.util.concurrent.atomic=ALL-UNNAMED", "--add-opens", "java.base/java.util.concurrent=ALL-UNNAMED", "--add-opens", "java.base/java.util.regex=ALL-UNNAMED", "--add-opens", "java.base/java.util=ALL-UNNAMED", "--add-opens", "java.base/jdk.internal.ref=ALL-UNNAMED", "--add-opens", "java.base/jdk.internal.reflect=ALL-UNNAMED", "--add-opens", "java.sql/java.sql=ALL-UNNAMED"] + } } dependencies { api project(':iceberg-api') @@ -427,6 +435,11 @@ project(':iceberg-aliyun') { } project(':iceberg-aws') { + test { + if (JavaVersion.current() == JavaVersion.VERSION_17) { + jvmArgs += ["-Xmx1024m", "-XX:+IgnoreUnrecognizedVMOptions", "--add-opens", "java.base/java.lang.invoke=ALL-UNNAMED", "--add-opens", "java.base/java.io=ALL-UNNAMED", "--add-opens", "java.base/java.lang.reflect=ALL-UNNAMED", "--add-opens", "java.base/java.lang=ALL-UNNAMED", "--add-opens", "java.base/java.math=ALL-UNNAMED", "--add-opens", "java.base/java.net=ALL-UNNAMED", "--add-opens", "java.base/java.nio=ALL-UNNAMED", "--add-opens", "java.base/java.text=ALL-UNNAMED", "--add-opens", "java.base/java.time=ALL-UNNAMED", "--add-opens", "java.base/java.util.concurrent.atomic=ALL-UNNAMED", "--add-opens", "java.base/java.util.concurrent=ALL-UNNAMED", "--add-opens", "java.base/java.util.regex=ALL-UNNAMED", "--add-opens", "java.base/java.util=ALL-UNNAMED", "--add-opens", "java.base/jdk.internal.ref=ALL-UNNAMED", "--add-opens", "java.base/jdk.internal.reflect=ALL-UNNAMED", "--add-opens", "java.sql/java.sql=ALL-UNNAMED"] + } + } dependencies { implementation project(path: ':iceberg-bundled-guava', configuration: 'shadow') api project(':iceberg-api') @@ -566,6 +579,11 @@ project(':iceberg-delta-lake') { } project(':iceberg-gcp') { + test { + if (JavaVersion.current() == JavaVersion.VERSION_17) { + jvmArgs += ["-Xmx1024m", "-XX:+IgnoreUnrecognizedVMOptions", "--add-opens", "java.base/java.lang.invoke=ALL-UNNAMED", "--add-opens", "java.base/java.io=ALL-UNNAMED", "--add-opens", "java.base/java.lang.reflect=ALL-UNNAMED", "--add-opens", "java.base/java.lang=ALL-UNNAMED", "--add-opens", "java.base/java.math=ALL-UNNAMED", "--add-opens", "java.base/java.net=ALL-UNNAMED", "--add-opens", "java.base/java.nio=ALL-UNNAMED", "--add-opens", "java.base/java.text=ALL-UNNAMED", "--add-opens", "java.base/java.time=ALL-UNNAMED", "--add-opens", "java.base/java.util.concurrent.atomic=ALL-UNNAMED", "--add-opens", "java.base/java.util.concurrent=ALL-UNNAMED", "--add-opens", "java.base/java.util.regex=ALL-UNNAMED", "--add-opens", "java.base/java.util=ALL-UNNAMED", "--add-opens", "java.base/jdk.internal.ref=ALL-UNNAMED", "--add-opens", "java.base/jdk.internal.reflect=ALL-UNNAMED", "--add-opens", "java.sql/java.sql=ALL-UNNAMED"] + } + } dependencies { implementation project(path: ':iceberg-bundled-guava', configuration: 'shadow') api project(':iceberg-api') @@ -717,6 +735,11 @@ project(':iceberg-parquet') { } project(':iceberg-arrow') { + test { + if (JavaVersion.current() == JavaVersion.VERSION_17) { + jvmArgs += ["-Xmx1024m", "-XX:+IgnoreUnrecognizedVMOptions", "--add-opens", "java.base/java.lang.invoke=ALL-UNNAMED", "--add-opens", "java.base/java.io=ALL-UNNAMED", "--add-opens", "java.base/java.lang.reflect=ALL-UNNAMED", "--add-opens", "java.base/java.lang=ALL-UNNAMED", "--add-opens", "java.base/java.math=ALL-UNNAMED", "--add-opens", "java.base/java.net=ALL-UNNAMED", "--add-opens", "java.base/java.nio=ALL-UNNAMED", "--add-opens", "java.base/java.text=ALL-UNNAMED", "--add-opens", "java.base/java.time=ALL-UNNAMED", "--add-opens", "java.base/java.util.concurrent.atomic=ALL-UNNAMED", "--add-opens", "java.base/java.util.concurrent=ALL-UNNAMED", "--add-opens", "java.base/java.util.regex=ALL-UNNAMED", "--add-opens", "java.base/java.util=ALL-UNNAMED", "--add-opens", "java.base/jdk.internal.ref=ALL-UNNAMED", "--add-opens", "java.base/jdk.internal.reflect=ALL-UNNAMED", "--add-opens", "java.sql/java.sql=ALL-UNNAMED"] + } + } dependencies { implementation project(path: ':iceberg-bundled-guava', configuration: 'shadow') api project(':iceberg-api') diff --git a/jmh.gradle b/jmh.gradle index 7882606c9209..bd411af395d4 100644 --- a/jmh.gradle +++ b/jmh.gradle @@ -18,7 +18,7 @@ */ if (jdkVersion != '8' && jdkVersion != '11' && jdkVersion != '17') { - throw new GradleException("The JMH benchamrks must be run with JDK 8 or JDK 11") + throw new GradleException("The JMH benchamrks must be run with JDK 8 or JDK 11 or JDK 17") } def sparkVersions = (System.getProperty("sparkVersions") != null ? System.getProperty("sparkVersions") : System.getProperty("defaultSparkVersions")).split(",") From 3fc591dba05d07e0bf11e0c05115c06dcd7bcddb Mon Sep 17 00:00:00 2001 From: Prashant Singh Date: Thu, 20 Apr 2023 17:36:04 -0700 Subject: [PATCH 03/10] address review feedback --- .../java/org/apache/iceberg/expressions/ExpressionUtil.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/api/src/main/java/org/apache/iceberg/expressions/ExpressionUtil.java b/api/src/main/java/org/apache/iceberg/expressions/ExpressionUtil.java index 7909ead259a0..8f3aeea38bc1 100644 --- a/api/src/main/java/org/apache/iceberg/expressions/ExpressionUtil.java +++ b/api/src/main/java/org/apache/iceberg/expressions/ExpressionUtil.java @@ -47,10 +47,10 @@ public class ExpressionUtil { private static final Pattern DATE = Pattern.compile("\\d{4}-\\d{2}-\\d{2}"); private static final Pattern TIME = Pattern.compile("\\d{2}:\\d{2}(:\\d{2}(.\\d{1,6})?)?"); private static final Pattern TIMESTAMP = - Pattern.compile("\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}(:\\d{2}(.\\d{1,10})?)?"); + Pattern.compile("\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}(:\\d{2}(.\\d{1,9})?)?"); private static final Pattern TIMESTAMPTZ = Pattern.compile( - "\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}(:\\d{2}(.\\d{1,10})?)?([-+]\\d{2}:\\d{2}|Z)"); + "\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}(:\\d{2}(.\\d{1,9})?)?([-+]\\d{2}:\\d{2}|Z)"); static final int LONG_IN_PREDICATE_ABBREVIATION_THRESHOLD = 10; private static final int LONG_IN_PREDICATE_ABBREVIATION_MIN_GAIN = 5; From 1e64055182e963a6d62048f4e258717e5056b1ea Mon Sep 17 00:00:00 2001 From: Prashant Singh Date: Thu, 20 Apr 2023 16:29:31 -0700 Subject: [PATCH 04/10] add java 17 for all engines ci --- .github/workflows/delta-conversion-ci.yml | 4 +-- .github/workflows/hive-ci.yml | 2 +- .github/workflows/spark-ci.yml | 32 ++++++++++++++++++- build.gradle | 3 ++ mr/build.gradle | 3 ++ spark/v3.3/build.gradle | 12 +++++++ .../TestMetadataTableReadableMetrics.java | 2 ++ spark/v3.4/build.gradle | 13 ++++++++ .../TestMetadataTableReadableMetrics.java | 2 ++ 9 files changed, 69 insertions(+), 4 deletions(-) diff --git a/.github/workflows/delta-conversion-ci.yml b/.github/workflows/delta-conversion-ci.yml index 08ec5f0c32a8..afb6e03a832a 100644 --- a/.github/workflows/delta-conversion-ci.yml +++ b/.github/workflows/delta-conversion-ci.yml @@ -59,7 +59,7 @@ jobs: runs-on: ubuntu-22.04 strategy: matrix: - jvm: [8, 11] + jvm: [8, 11, 17] env: SPARK_LOCAL_IP: localhost steps: @@ -88,7 +88,7 @@ jobs: runs-on: ubuntu-22.04 strategy: matrix: - jvm: [ 8, 11 ] + jvm: [ 8, 11, 17 ] env: SPARK_LOCAL_IP: localhost steps: diff --git a/.github/workflows/hive-ci.yml b/.github/workflows/hive-ci.yml index db5b9816c034..11e4e3e9303d 100644 --- a/.github/workflows/hive-ci.yml +++ b/.github/workflows/hive-ci.yml @@ -57,7 +57,7 @@ jobs: runs-on: ubuntu-22.04 strategy: matrix: - jvm: [8, 11] + jvm: [8, 11, 17] env: SPARK_LOCAL_IP: localhost steps: diff --git a/.github/workflows/spark-ci.yml b/.github/workflows/spark-ci.yml index f01456930dde..b360e0f95eb5 100644 --- a/.github/workflows/spark-ci.yml +++ b/.github/workflows/spark-ci.yml @@ -90,7 +90,7 @@ jobs: strategy: matrix: jvm: [8, 11] - spark: ['3.2','3.3', '3.4'] + spark: ['3.2','3.3','3.4'] env: SPARK_LOCAL_IP: localhost steps: @@ -114,3 +114,33 @@ jobs: name: test logs path: | **/build/testlogs + + spark-3x-java-17-tests: + runs-on: ubuntu-22.04 + strategy: + matrix: + spark: ['3.3','3.4'] + scala-version: ['2.12', '2.13'] + env: + SPARK_LOCAL_IP: localhost + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-java@v3 + with: + distribution: zulu + java-version: 17 + - uses: actions/cache@v3 + with: + path: | + ~/.gradle/caches + ~/.gradle/wrapper + key: ${{ runner.os }}-gradle-${{ hashFiles('**/*.gradle*', '**/gradle-wrapper.properties') }} + restore-keys: ${{ runner.os }}-gradle- + - run: echo -e "$(ip addr show eth0 | grep "inet\b" | awk '{print $2}' | cut -d/ -f1)\t$(hostname -f) $(hostname -s)" | sudo tee -a /etc/hosts + - run: ./gradlew -DsparkVersions=${{ matrix.spark }} -DscalaVersion=${{ matrix.scala-version }} -DhiveVersions= -DflinkVersions= :iceberg-spark:iceberg-spark-${{ matrix.spark }}_${{ matrix.scala-version }}:check :iceberg-spark:iceberg-spark-extensions-${{ matrix.spark }}_${{ matrix.scala-version }}:check :iceberg-spark:iceberg-spark-runtime-${{ matrix.spark }}_${{ matrix.scala-version }}:check -Pquick=true -x javadoc + - uses: actions/upload-artifact@v3 + if: failure() + with: + name: test logs + path: | + **/build/testlogs \ No newline at end of file diff --git a/build.gradle b/build.gradle index 587ada383abc..caaef75a410e 100644 --- a/build.gradle +++ b/build.gradle @@ -573,6 +573,9 @@ project(':iceberg-delta-lake') { task integrationTest(type: Test) { testClassesDirs = sourceSets.integration.output.classesDirs classpath = sourceSets.integration.runtimeClasspath + if (JavaVersion.current() == JavaVersion.VERSION_17) { + jvmArgs += ["-Xmx1024m", "-XX:+IgnoreUnrecognizedVMOptions", "--add-opens", "java.base/java.io=ALL-UNNAMED", "--add-opens", "java.base/java.lang.invoke=ALL-UNNAMED", "--add-opens", "java.base/java.lang.reflect=ALL-UNNAMED", "--add-opens", "java.base/java.lang=ALL-UNNAMED", "--add-opens", "java.base/java.math=ALL-UNNAMED", "--add-opens", "java.base/java.net=ALL-UNNAMED", "--add-opens", "java.base/java.nio=ALL-UNNAMED", "--add-opens", "java.base/java.text=ALL-UNNAMED", "--add-opens", "java.base/java.time=ALL-UNNAMED", "--add-opens", "java.base/java.util.concurrent.atomic=ALL-UNNAMED", "--add-opens", "java.base/java.util.concurrent=ALL-UNNAMED", "--add-opens", "java.base/java.util.regex=ALL-UNNAMED", "--add-opens", "java.base/java.util=ALL-UNNAMED", "--add-opens", "java.base/jdk.internal.ref=ALL-UNNAMED", "--add-opens", "java.base/jdk.internal.reflect=ALL-UNNAMED", "--add-opens", "java.sql/java.sql=ALL-UNNAMED", "--add-opens", "java.base/sun.util.calendar=ALL-UNNAMED", "--add-opens", "java.base/sun.nio.ch=ALL-UNNAMED", "--add-opens", "java.base/sun.nio.cs=ALL-UNNAMED", "--add-opens", "java.base/sun.security.action=ALL-UNNAMED", "--add-opens", "java.base/sun.util.calendar=ALL-UNNAMED"] + } } check.dependsOn integrationTest } diff --git a/mr/build.gradle b/mr/build.gradle index 4ed799d5fb79..1d6f69a7887c 100644 --- a/mr/build.gradle +++ b/mr/build.gradle @@ -76,6 +76,9 @@ project(':iceberg-mr') { test { // testJoinTables / testScanTable maxHeapSize '2500m' + if (JavaVersion.current() == JavaVersion.VERSION_17) { + jvmArgs += ["-Xmx1024m", "-XX:+IgnoreUnrecognizedVMOptions", "--add-opens", "java.base/java.io=ALL-UNNAMED", "--add-opens", "java.base/java.lang.reflect=ALL-UNNAMED", "--add-opens", "java.base/java.lang=ALL-UNNAMED", "--add-opens", "java.base/java.math=ALL-UNNAMED", "--add-opens", "java.base/java.net=ALL-UNNAMED", "--add-opens", "java.base/java.nio=ALL-UNNAMED", "--add-opens", "java.base/java.text=ALL-UNNAMED", "--add-opens", "java.base/java.time=ALL-UNNAMED", "--add-opens", "java.base/java.util.concurrent.atomic=ALL-UNNAMED", "--add-opens", "java.base/java.util.concurrent=ALL-UNNAMED", "--add-opens", "java.base/java.util.regex=ALL-UNNAMED", "--add-opens", "java.base/java.util=ALL-UNNAMED", "--add-opens", "java.base/jdk.internal.ref=ALL-UNNAMED", "--add-opens", "java.base/jdk.internal.reflect=ALL-UNNAMED", "--add-opens", "java.sql/java.sql=ALL-UNNAMED", "--add-opens", "java.base/sun.util.calendar=ALL-UNNAMED", ] + } } } diff --git a/spark/v3.3/build.gradle b/spark/v3.3/build.gradle index 6dff38e3821f..c1f619df9116 100644 --- a/spark/v3.3/build.gradle +++ b/spark/v3.3/build.gradle @@ -106,6 +106,9 @@ project(":iceberg-spark:iceberg-spark-${sparkMajorVersion}_${scalaVersion}") { test { useJUnitPlatform() + if (JavaVersion.current() == JavaVersion.VERSION_17) { + jvmArgs += ["-Xmx1024m", "-XX:+IgnoreUnrecognizedVMOptions", "--add-opens", "java.base/java.io=ALL-UNNAMED", "--add-opens", "java.base/java.lang.invoke=ALL-UNNAMED", "--add-opens", "java.base/java.lang.reflect=ALL-UNNAMED", "--add-opens", "java.base/java.lang=ALL-UNNAMED", "--add-opens", "java.base/java.math=ALL-UNNAMED", "--add-opens", "java.base/java.net=ALL-UNNAMED", "--add-opens", "java.base/java.nio=ALL-UNNAMED", "--add-opens", "java.base/java.text=ALL-UNNAMED", "--add-opens", "java.base/java.time=ALL-UNNAMED", "--add-opens", "java.base/java.util.concurrent.atomic=ALL-UNNAMED", "--add-opens", "java.base/java.util.concurrent=ALL-UNNAMED", "--add-opens", "java.base/java.util.regex=ALL-UNNAMED", "--add-opens", "java.base/java.util=ALL-UNNAMED", "--add-opens", "java.base/jdk.internal.ref=ALL-UNNAMED", "--add-opens", "java.base/jdk.internal.reflect=ALL-UNNAMED", "--add-opens", "java.sql/java.sql=ALL-UNNAMED", "--add-opens", "java.base/sun.util.calendar=ALL-UNNAMED", "--add-opens", "java.base/sun.nio.ch=ALL-UNNAMED", "--add-opens", "java.base/sun.nio.cs=ALL-UNNAMED", "--add-opens", "java.base/sun.security.action=ALL-UNNAMED", "--add-opens", "java.base/sun.util.calendar=ALL-UNNAMED"] + } } tasks.withType(Test) { @@ -167,6 +170,12 @@ project(":iceberg-spark:iceberg-spark-extensions-${sparkMajorVersion}_${scalaVer antlr "org.antlr:antlr4:4.8" } + test { + if (JavaVersion.current() == JavaVersion.VERSION_17) { + jvmArgs += ["-Xmx1024m", "-XX:+IgnoreUnrecognizedVMOptions", "--add-opens", "java.base/java.io=ALL-UNNAMED", "--add-opens", "java.base/java.lang.invoke=ALL-UNNAMED", "--add-opens", "java.base/java.lang.reflect=ALL-UNNAMED", "--add-opens", "java.base/java.lang=ALL-UNNAMED", "--add-opens", "java.base/java.math=ALL-UNNAMED", "--add-opens", "java.base/java.net=ALL-UNNAMED", "--add-opens", "java.base/java.nio=ALL-UNNAMED", "--add-opens", "java.base/java.text=ALL-UNNAMED", "--add-opens", "java.base/java.time=ALL-UNNAMED", "--add-opens", "java.base/java.util.concurrent.atomic=ALL-UNNAMED", "--add-opens", "java.base/java.util.concurrent=ALL-UNNAMED", "--add-opens", "java.base/java.util.regex=ALL-UNNAMED", "--add-opens", "java.base/java.util=ALL-UNNAMED", "--add-opens", "java.base/jdk.internal.ref=ALL-UNNAMED", "--add-opens", "java.base/jdk.internal.reflect=ALL-UNNAMED", "--add-opens", "java.sql/java.sql=ALL-UNNAMED", "--add-opens", "java.base/sun.util.calendar=ALL-UNNAMED", "--add-opens", "java.base/sun.nio.ch=ALL-UNNAMED", "--add-opens", "java.base/sun.nio.cs=ALL-UNNAMED", "--add-opens", "java.base/sun.security.action=ALL-UNNAMED", "--add-opens", "java.base/sun.util.calendar=ALL-UNNAMED"] + } + } + generateGrammarSource { maxHeapSize = "64m" arguments += ['-visitor', '-package', 'org.apache.spark.sql.catalyst.parser.extensions'] @@ -275,6 +284,9 @@ project(":iceberg-spark:iceberg-spark-runtime-${sparkMajorVersion}_${scalaVersio task integrationTest(type: Test) { description = "Test Spark3 Runtime Jar against Spark ${sparkMajorVersion}" group = "verification" + if (JavaVersion.current() == JavaVersion.VERSION_17) { + jvmArgs += ["-Xmx1024m", "-XX:+IgnoreUnrecognizedVMOptions", "--add-opens", "java.base/java.io=ALL-UNNAMED", "--add-opens", "java.base/java.lang.invoke=ALL-UNNAMED", "--add-opens", "java.base/java.lang.reflect=ALL-UNNAMED", "--add-opens", "java.base/java.lang=ALL-UNNAMED", "--add-opens", "java.base/java.math=ALL-UNNAMED", "--add-opens", "java.base/java.net=ALL-UNNAMED", "--add-opens", "java.base/java.nio=ALL-UNNAMED", "--add-opens", "java.base/java.text=ALL-UNNAMED", "--add-opens", "java.base/java.time=ALL-UNNAMED", "--add-opens", "java.base/java.util.concurrent.atomic=ALL-UNNAMED", "--add-opens", "java.base/java.util.concurrent=ALL-UNNAMED", "--add-opens", "java.base/java.util.regex=ALL-UNNAMED", "--add-opens", "java.base/java.util=ALL-UNNAMED", "--add-opens", "java.base/jdk.internal.ref=ALL-UNNAMED", "--add-opens", "java.base/jdk.internal.reflect=ALL-UNNAMED", "--add-opens", "java.sql/java.sql=ALL-UNNAMED", "--add-opens", "java.base/sun.util.calendar=ALL-UNNAMED", "--add-opens", "java.base/sun.nio.ch=ALL-UNNAMED", "--add-opens", "java.base/sun.nio.cs=ALL-UNNAMED", "--add-opens", "java.base/sun.security.action=ALL-UNNAMED", "--add-opens", "java.base/sun.util.calendar=ALL-UNNAMED"] + } testClassesDirs = sourceSets.integration.output.classesDirs classpath = sourceSets.integration.runtimeClasspath + files(shadowJar.archiveFile.get().asFile.path) inputs.file(shadowJar.archiveFile.get().asFile.path) diff --git a/spark/v3.3/spark/src/test/java/org/apache/iceberg/spark/source/TestMetadataTableReadableMetrics.java b/spark/v3.3/spark/src/test/java/org/apache/iceberg/spark/source/TestMetadataTableReadableMetrics.java index 343943b0f891..2f5b86e0a94d 100644 --- a/spark/v3.3/spark/src/test/java/org/apache/iceberg/spark/source/TestMetadataTableReadableMetrics.java +++ b/spark/v3.3/spark/src/test/java/org/apache/iceberg/spark/source/TestMetadataTableReadableMetrics.java @@ -45,6 +45,7 @@ import org.apache.spark.sql.Dataset; import org.apache.spark.sql.Row; import org.junit.After; +import org.junit.Ignore; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TemporaryFolder; @@ -190,6 +191,7 @@ private GenericRecord createNestedRecord(Long longCol, Double doubleCol) { } @Test + @Ignore public void testPrimitiveColumns() throws Exception { createPrimitiveTable(); diff --git a/spark/v3.4/build.gradle b/spark/v3.4/build.gradle index 5cf131098742..aa41d1656ff5 100644 --- a/spark/v3.4/build.gradle +++ b/spark/v3.4/build.gradle @@ -106,6 +106,9 @@ project(":iceberg-spark:iceberg-spark-${sparkMajorVersion}_${scalaVersion}") { test { useJUnitPlatform() + if (JavaVersion.current() == JavaVersion.VERSION_17) { + jvmArgs += ["-Xmx1024m", "-XX:+IgnoreUnrecognizedVMOptions", "--add-opens", "java.base/java.io=ALL-UNNAMED", "--add-opens", "java.base/java.lang.invoke=ALL-UNNAMED", "--add-opens", "java.base/java.lang.reflect=ALL-UNNAMED", "--add-opens", "java.base/java.lang=ALL-UNNAMED", "--add-opens", "java.base/java.math=ALL-UNNAMED", "--add-opens", "java.base/java.net=ALL-UNNAMED", "--add-opens", "java.base/java.nio=ALL-UNNAMED", "--add-opens", "java.base/java.text=ALL-UNNAMED", "--add-opens", "java.base/java.time=ALL-UNNAMED", "--add-opens", "java.base/java.util.concurrent.atomic=ALL-UNNAMED", "--add-opens", "java.base/java.util.concurrent=ALL-UNNAMED", "--add-opens", "java.base/java.util.regex=ALL-UNNAMED", "--add-opens", "java.base/java.util=ALL-UNNAMED", "--add-opens", "java.base/jdk.internal.ref=ALL-UNNAMED", "--add-opens", "java.base/jdk.internal.reflect=ALL-UNNAMED", "--add-opens", "java.sql/java.sql=ALL-UNNAMED", "--add-opens", "java.base/sun.util.calendar=ALL-UNNAMED", "--add-opens", "java.base/sun.nio.ch=ALL-UNNAMED", "--add-opens", "java.base/sun.nio.cs=ALL-UNNAMED", "--add-opens", "java.base/sun.security.action=ALL-UNNAMED", "--add-opens", "java.base/sun.util.calendar=ALL-UNNAMED"] + } } tasks.withType(Test) { @@ -171,6 +174,13 @@ project(":iceberg-spark:iceberg-spark-extensions-${sparkMajorVersion}_${scalaVer maxHeapSize = "64m" arguments += ['-visitor', '-package', 'org.apache.spark.sql.catalyst.parser.extensions'] } + + test { + useJUnitPlatform() + if (JavaVersion.current() == JavaVersion.VERSION_17) { + jvmArgs += ["-Xmx1024m", "-XX:+IgnoreUnrecognizedVMOptions", "--add-opens", "java.base/java.io=ALL-UNNAMED", "--add-opens", "java.base/java.lang.invoke=ALL-UNNAMED", "--add-opens", "java.base/java.lang.reflect=ALL-UNNAMED", "--add-opens", "java.base/java.lang=ALL-UNNAMED", "--add-opens", "java.base/java.math=ALL-UNNAMED", "--add-opens", "java.base/java.net=ALL-UNNAMED", "--add-opens", "java.base/java.nio=ALL-UNNAMED", "--add-opens", "java.base/java.text=ALL-UNNAMED", "--add-opens", "java.base/java.time=ALL-UNNAMED", "--add-opens", "java.base/java.util.concurrent.atomic=ALL-UNNAMED", "--add-opens", "java.base/java.util.concurrent=ALL-UNNAMED", "--add-opens", "java.base/java.util.regex=ALL-UNNAMED", "--add-opens", "java.base/java.util=ALL-UNNAMED", "--add-opens", "java.base/jdk.internal.ref=ALL-UNNAMED", "--add-opens", "java.base/jdk.internal.reflect=ALL-UNNAMED", "--add-opens", "java.sql/java.sql=ALL-UNNAMED", "--add-opens", "java.base/sun.util.calendar=ALL-UNNAMED", "--add-opens", "java.base/sun.nio.ch=ALL-UNNAMED", "--add-opens", "java.base/sun.nio.cs=ALL-UNNAMED", "--add-opens", "java.base/sun.security.action=ALL-UNNAMED", "--add-opens", "java.base/sun.util.calendar=ALL-UNNAMED"] + } + } } project(":iceberg-spark:iceberg-spark-runtime-${sparkMajorVersion}_${scalaVersion}") { @@ -275,6 +285,9 @@ project(":iceberg-spark:iceberg-spark-runtime-${sparkMajorVersion}_${scalaVersio task integrationTest(type: Test) { description = "Test Spark3 Runtime Jar against Spark ${sparkMajorVersion}" group = "verification" + if (JavaVersion.current() == JavaVersion.VERSION_17) { + jvmArgs += ["-Xmx1024m", "-XX:+IgnoreUnrecognizedVMOptions", "--add-opens", "java.base/java.io=ALL-UNNAMED", "--add-opens", "java.base/java.lang.invoke=ALL-UNNAMED", "--add-opens", "java.base/java.lang.reflect=ALL-UNNAMED", "--add-opens", "java.base/java.lang=ALL-UNNAMED", "--add-opens", "java.base/java.math=ALL-UNNAMED", "--add-opens", "java.base/java.net=ALL-UNNAMED", "--add-opens", "java.base/java.nio=ALL-UNNAMED", "--add-opens", "java.base/java.text=ALL-UNNAMED", "--add-opens", "java.base/java.time=ALL-UNNAMED", "--add-opens", "java.base/java.util.concurrent.atomic=ALL-UNNAMED", "--add-opens", "java.base/java.util.concurrent=ALL-UNNAMED", "--add-opens", "java.base/java.util.regex=ALL-UNNAMED", "--add-opens", "java.base/java.util=ALL-UNNAMED", "--add-opens", "java.base/jdk.internal.ref=ALL-UNNAMED", "--add-opens", "java.base/jdk.internal.reflect=ALL-UNNAMED", "--add-opens", "java.sql/java.sql=ALL-UNNAMED", "--add-opens", "java.base/sun.util.calendar=ALL-UNNAMED", "--add-opens", "java.base/sun.nio.ch=ALL-UNNAMED", "--add-opens", "java.base/sun.nio.cs=ALL-UNNAMED", "--add-opens", "java.base/sun.security.action=ALL-UNNAMED", "--add-opens", "java.base/sun.util.calendar=ALL-UNNAMED"] + } testClassesDirs = sourceSets.integration.output.classesDirs classpath = sourceSets.integration.runtimeClasspath + files(shadowJar.archiveFile.get().asFile.path) inputs.file(shadowJar.archiveFile.get().asFile.path) diff --git a/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestMetadataTableReadableMetrics.java b/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestMetadataTableReadableMetrics.java index 343943b0f891..2f5b86e0a94d 100644 --- a/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestMetadataTableReadableMetrics.java +++ b/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestMetadataTableReadableMetrics.java @@ -45,6 +45,7 @@ import org.apache.spark.sql.Dataset; import org.apache.spark.sql.Row; import org.junit.After; +import org.junit.Ignore; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TemporaryFolder; @@ -190,6 +191,7 @@ private GenericRecord createNestedRecord(Long longCol, Double doubleCol) { } @Test + @Ignore public void testPrimitiveColumns() throws Exception { createPrimitiveTable(); From 8dd29caf687e2f490b47a9f0519f2514d6b4150c Mon Sep 17 00:00:00 2001 From: Prashant Singh Date: Fri, 21 Apr 2023 12:20:34 -0700 Subject: [PATCH 05/10] refactor gradles and address review feedback --- build.gradle | 48 +++++++++++++++++++++++++---------------- mr/build.gradle | 4 +--- spark/v3.3/build.gradle | 12 +++-------- spark/v3.4/build.gradle | 12 +++-------- 4 files changed, 37 insertions(+), 39 deletions(-) diff --git a/build.gradle b/build.gradle index caaef75a410e..4f8b17a0c2be 100644 --- a/build.gradle +++ b/build.gradle @@ -68,10 +68,34 @@ try { if (JavaVersion.current() == JavaVersion.VERSION_1_8) { project.ext.jdkVersion = '8' + project.ext.extraJvmArgs = [] } else if (JavaVersion.current() == JavaVersion.VERSION_11) { project.ext.jdkVersion = '11' + project.ext.extraJvmArgs = [] } else if (JavaVersion.current() == JavaVersion.VERSION_17) { project.ext.jdkVersion = '17' + project.ext.extraJvmArgs = ["-XX:+IgnoreUnrecognizedVMOptions", + "--add-opens", "java.base/java.io=ALL-UNNAMED", + "--add-opens", "java.base/java.lang.invoke=ALL-UNNAMED", + "--add-opens", "java.base/java.lang.reflect=ALL-UNNAMED", + "--add-opens", "java.base/java.lang=ALL-UNNAMED", + "--add-opens", "java.base/java.math=ALL-UNNAMED", + "--add-opens", "java.base/java.net=ALL-UNNAMED", + "--add-opens", "java.base/java.nio=ALL-UNNAMED", + "--add-opens", "java.base/java.text=ALL-UNNAMED", + "--add-opens", "java.base/java.time=ALL-UNNAMED", + "--add-opens", "java.base/java.util.concurrent.atomic=ALL-UNNAMED", + "--add-opens", "java.base/java.util.concurrent=ALL-UNNAMED", + "--add-opens", "java.base/java.util.regex=ALL-UNNAMED", + "--add-opens", "java.base/java.util=ALL-UNNAMED", + "--add-opens", "java.base/jdk.internal.ref=ALL-UNNAMED", + "--add-opens", "java.base/jdk.internal.reflect=ALL-UNNAMED", + "--add-opens", "java.sql/java.sql=ALL-UNNAMED", + "--add-opens", "java.base/sun.util.calendar=ALL-UNNAMED", + "--add-opens", "java.base/sun.nio.ch=ALL-UNNAMED", + "--add-opens", "java.base/sun.nio.cs=ALL-UNNAMED", + "--add-opens", "java.base/sun.security.action=ALL-UNNAMED", + "--add-opens", "java.base/sun.util.calendar=ALL-UNNAMED"] } else { throw new GradleException("This build must be run with JDK 8 or 11 or 17 but was executed with JDK " + JavaVersion.current()) } @@ -274,9 +298,7 @@ project(':iceberg-bundled-guava') { project(':iceberg-api') { test { - if (JavaVersion.current() == JavaVersion.VERSION_17) { - jvmArgs += ["-Xmx1024m", "-XX:+IgnoreUnrecognizedVMOptions", "--add-opens", "java.base/java.io=ALL-UNNAMED", "--add-opens", "java.base/java.lang.reflect=ALL-UNNAMED", "--add-opens", "java.base/java.lang=ALL-UNNAMED", "--add-opens", "java.base/java.math=ALL-UNNAMED", "--add-opens", "java.base/java.net=ALL-UNNAMED", "--add-opens", "java.base/java.nio=ALL-UNNAMED", "--add-opens", "java.base/java.text=ALL-UNNAMED", "--add-opens", "java.base/java.time=ALL-UNNAMED", "--add-opens", "java.base/java.util.concurrent.atomic=ALL-UNNAMED", "--add-opens", "java.base/java.util.concurrent=ALL-UNNAMED", "--add-opens", "java.base/java.util.regex=ALL-UNNAMED", "--add-opens", "java.base/java.util=ALL-UNNAMED", "--add-opens", "java.base/jdk.internal.ref=ALL-UNNAMED", "--add-opens", "java.base/jdk.internal.reflect=ALL-UNNAMED", "--add-opens", "java.sql/java.sql=ALL-UNNAMED", "--add-opens", "java.base/sun.util.calendar=ALL-UNNAMED"] - } + jvmArgs += project.property('extraJvmArgs') } dependencies { implementation project(path: ':iceberg-bundled-guava', configuration: 'shadow') @@ -318,9 +340,7 @@ project(':iceberg-common') { project(':iceberg-core') { test { useJUnitPlatform() - if (JavaVersion.current() == JavaVersion.VERSION_17) { - jvmArgs += ["-Xmx1024m", "-XX:+IgnoreUnrecognizedVMOptions", "--add-opens", "java.base/java.io=ALL-UNNAMED", "--add-opens", "java.base/java.lang.invoke=ALL-UNNAMED", "--add-opens", "java.base/java.lang.reflect=ALL-UNNAMED", "--add-opens", "java.base/java.lang=ALL-UNNAMED", "--add-opens", "java.base/java.math=ALL-UNNAMED", "--add-opens", "java.base/java.net=ALL-UNNAMED", "--add-opens", "java.base/java.nio=ALL-UNNAMED", "--add-opens", "java.base/java.text=ALL-UNNAMED", "--add-opens", "java.base/java.time=ALL-UNNAMED", "--add-opens", "java.base/java.util.concurrent.atomic=ALL-UNNAMED", "--add-opens", "java.base/java.util.concurrent=ALL-UNNAMED", "--add-opens", "java.base/java.util.regex=ALL-UNNAMED", "--add-opens", "java.base/java.util=ALL-UNNAMED", "--add-opens", "java.base/jdk.internal.ref=ALL-UNNAMED", "--add-opens", "java.base/jdk.internal.reflect=ALL-UNNAMED", "--add-opens", "java.sql/java.sql=ALL-UNNAMED"] - } + jvmArgs += project.property('extraJvmArgs') } dependencies { api project(':iceberg-api') @@ -436,9 +456,7 @@ project(':iceberg-aliyun') { project(':iceberg-aws') { test { - if (JavaVersion.current() == JavaVersion.VERSION_17) { - jvmArgs += ["-Xmx1024m", "-XX:+IgnoreUnrecognizedVMOptions", "--add-opens", "java.base/java.lang.invoke=ALL-UNNAMED", "--add-opens", "java.base/java.io=ALL-UNNAMED", "--add-opens", "java.base/java.lang.reflect=ALL-UNNAMED", "--add-opens", "java.base/java.lang=ALL-UNNAMED", "--add-opens", "java.base/java.math=ALL-UNNAMED", "--add-opens", "java.base/java.net=ALL-UNNAMED", "--add-opens", "java.base/java.nio=ALL-UNNAMED", "--add-opens", "java.base/java.text=ALL-UNNAMED", "--add-opens", "java.base/java.time=ALL-UNNAMED", "--add-opens", "java.base/java.util.concurrent.atomic=ALL-UNNAMED", "--add-opens", "java.base/java.util.concurrent=ALL-UNNAMED", "--add-opens", "java.base/java.util.regex=ALL-UNNAMED", "--add-opens", "java.base/java.util=ALL-UNNAMED", "--add-opens", "java.base/jdk.internal.ref=ALL-UNNAMED", "--add-opens", "java.base/jdk.internal.reflect=ALL-UNNAMED", "--add-opens", "java.sql/java.sql=ALL-UNNAMED"] - } + jvmArgs += project.property('extraJvmArgs') } dependencies { implementation project(path: ':iceberg-bundled-guava', configuration: 'shadow') @@ -573,9 +591,7 @@ project(':iceberg-delta-lake') { task integrationTest(type: Test) { testClassesDirs = sourceSets.integration.output.classesDirs classpath = sourceSets.integration.runtimeClasspath - if (JavaVersion.current() == JavaVersion.VERSION_17) { - jvmArgs += ["-Xmx1024m", "-XX:+IgnoreUnrecognizedVMOptions", "--add-opens", "java.base/java.io=ALL-UNNAMED", "--add-opens", "java.base/java.lang.invoke=ALL-UNNAMED", "--add-opens", "java.base/java.lang.reflect=ALL-UNNAMED", "--add-opens", "java.base/java.lang=ALL-UNNAMED", "--add-opens", "java.base/java.math=ALL-UNNAMED", "--add-opens", "java.base/java.net=ALL-UNNAMED", "--add-opens", "java.base/java.nio=ALL-UNNAMED", "--add-opens", "java.base/java.text=ALL-UNNAMED", "--add-opens", "java.base/java.time=ALL-UNNAMED", "--add-opens", "java.base/java.util.concurrent.atomic=ALL-UNNAMED", "--add-opens", "java.base/java.util.concurrent=ALL-UNNAMED", "--add-opens", "java.base/java.util.regex=ALL-UNNAMED", "--add-opens", "java.base/java.util=ALL-UNNAMED", "--add-opens", "java.base/jdk.internal.ref=ALL-UNNAMED", "--add-opens", "java.base/jdk.internal.reflect=ALL-UNNAMED", "--add-opens", "java.sql/java.sql=ALL-UNNAMED", "--add-opens", "java.base/sun.util.calendar=ALL-UNNAMED", "--add-opens", "java.base/sun.nio.ch=ALL-UNNAMED", "--add-opens", "java.base/sun.nio.cs=ALL-UNNAMED", "--add-opens", "java.base/sun.security.action=ALL-UNNAMED", "--add-opens", "java.base/sun.util.calendar=ALL-UNNAMED"] - } + jvmArgs += project.property('extraJvmArgs') } check.dependsOn integrationTest } @@ -583,9 +599,7 @@ project(':iceberg-delta-lake') { project(':iceberg-gcp') { test { - if (JavaVersion.current() == JavaVersion.VERSION_17) { - jvmArgs += ["-Xmx1024m", "-XX:+IgnoreUnrecognizedVMOptions", "--add-opens", "java.base/java.lang.invoke=ALL-UNNAMED", "--add-opens", "java.base/java.io=ALL-UNNAMED", "--add-opens", "java.base/java.lang.reflect=ALL-UNNAMED", "--add-opens", "java.base/java.lang=ALL-UNNAMED", "--add-opens", "java.base/java.math=ALL-UNNAMED", "--add-opens", "java.base/java.net=ALL-UNNAMED", "--add-opens", "java.base/java.nio=ALL-UNNAMED", "--add-opens", "java.base/java.text=ALL-UNNAMED", "--add-opens", "java.base/java.time=ALL-UNNAMED", "--add-opens", "java.base/java.util.concurrent.atomic=ALL-UNNAMED", "--add-opens", "java.base/java.util.concurrent=ALL-UNNAMED", "--add-opens", "java.base/java.util.regex=ALL-UNNAMED", "--add-opens", "java.base/java.util=ALL-UNNAMED", "--add-opens", "java.base/jdk.internal.ref=ALL-UNNAMED", "--add-opens", "java.base/jdk.internal.reflect=ALL-UNNAMED", "--add-opens", "java.sql/java.sql=ALL-UNNAMED"] - } + jvmArgs += project.property('extraJvmArgs') } dependencies { implementation project(path: ':iceberg-bundled-guava', configuration: 'shadow') @@ -739,9 +753,7 @@ project(':iceberg-parquet') { project(':iceberg-arrow') { test { - if (JavaVersion.current() == JavaVersion.VERSION_17) { - jvmArgs += ["-Xmx1024m", "-XX:+IgnoreUnrecognizedVMOptions", "--add-opens", "java.base/java.lang.invoke=ALL-UNNAMED", "--add-opens", "java.base/java.io=ALL-UNNAMED", "--add-opens", "java.base/java.lang.reflect=ALL-UNNAMED", "--add-opens", "java.base/java.lang=ALL-UNNAMED", "--add-opens", "java.base/java.math=ALL-UNNAMED", "--add-opens", "java.base/java.net=ALL-UNNAMED", "--add-opens", "java.base/java.nio=ALL-UNNAMED", "--add-opens", "java.base/java.text=ALL-UNNAMED", "--add-opens", "java.base/java.time=ALL-UNNAMED", "--add-opens", "java.base/java.util.concurrent.atomic=ALL-UNNAMED", "--add-opens", "java.base/java.util.concurrent=ALL-UNNAMED", "--add-opens", "java.base/java.util.regex=ALL-UNNAMED", "--add-opens", "java.base/java.util=ALL-UNNAMED", "--add-opens", "java.base/jdk.internal.ref=ALL-UNNAMED", "--add-opens", "java.base/jdk.internal.reflect=ALL-UNNAMED", "--add-opens", "java.sql/java.sql=ALL-UNNAMED"] - } + jvmArgs += project.property('extraJvmArgs') } dependencies { implementation project(path: ':iceberg-bundled-guava', configuration: 'shadow') diff --git a/mr/build.gradle b/mr/build.gradle index 1d6f69a7887c..7ce66677cc93 100644 --- a/mr/build.gradle +++ b/mr/build.gradle @@ -76,9 +76,7 @@ project(':iceberg-mr') { test { // testJoinTables / testScanTable maxHeapSize '2500m' - if (JavaVersion.current() == JavaVersion.VERSION_17) { - jvmArgs += ["-Xmx1024m", "-XX:+IgnoreUnrecognizedVMOptions", "--add-opens", "java.base/java.io=ALL-UNNAMED", "--add-opens", "java.base/java.lang.reflect=ALL-UNNAMED", "--add-opens", "java.base/java.lang=ALL-UNNAMED", "--add-opens", "java.base/java.math=ALL-UNNAMED", "--add-opens", "java.base/java.net=ALL-UNNAMED", "--add-opens", "java.base/java.nio=ALL-UNNAMED", "--add-opens", "java.base/java.text=ALL-UNNAMED", "--add-opens", "java.base/java.time=ALL-UNNAMED", "--add-opens", "java.base/java.util.concurrent.atomic=ALL-UNNAMED", "--add-opens", "java.base/java.util.concurrent=ALL-UNNAMED", "--add-opens", "java.base/java.util.regex=ALL-UNNAMED", "--add-opens", "java.base/java.util=ALL-UNNAMED", "--add-opens", "java.base/jdk.internal.ref=ALL-UNNAMED", "--add-opens", "java.base/jdk.internal.reflect=ALL-UNNAMED", "--add-opens", "java.sql/java.sql=ALL-UNNAMED", "--add-opens", "java.base/sun.util.calendar=ALL-UNNAMED", ] - } + jvmArgs += project.property('extraJvmArgs') } } diff --git a/spark/v3.3/build.gradle b/spark/v3.3/build.gradle index c1f619df9116..6f32639e52dc 100644 --- a/spark/v3.3/build.gradle +++ b/spark/v3.3/build.gradle @@ -106,9 +106,7 @@ project(":iceberg-spark:iceberg-spark-${sparkMajorVersion}_${scalaVersion}") { test { useJUnitPlatform() - if (JavaVersion.current() == JavaVersion.VERSION_17) { - jvmArgs += ["-Xmx1024m", "-XX:+IgnoreUnrecognizedVMOptions", "--add-opens", "java.base/java.io=ALL-UNNAMED", "--add-opens", "java.base/java.lang.invoke=ALL-UNNAMED", "--add-opens", "java.base/java.lang.reflect=ALL-UNNAMED", "--add-opens", "java.base/java.lang=ALL-UNNAMED", "--add-opens", "java.base/java.math=ALL-UNNAMED", "--add-opens", "java.base/java.net=ALL-UNNAMED", "--add-opens", "java.base/java.nio=ALL-UNNAMED", "--add-opens", "java.base/java.text=ALL-UNNAMED", "--add-opens", "java.base/java.time=ALL-UNNAMED", "--add-opens", "java.base/java.util.concurrent.atomic=ALL-UNNAMED", "--add-opens", "java.base/java.util.concurrent=ALL-UNNAMED", "--add-opens", "java.base/java.util.regex=ALL-UNNAMED", "--add-opens", "java.base/java.util=ALL-UNNAMED", "--add-opens", "java.base/jdk.internal.ref=ALL-UNNAMED", "--add-opens", "java.base/jdk.internal.reflect=ALL-UNNAMED", "--add-opens", "java.sql/java.sql=ALL-UNNAMED", "--add-opens", "java.base/sun.util.calendar=ALL-UNNAMED", "--add-opens", "java.base/sun.nio.ch=ALL-UNNAMED", "--add-opens", "java.base/sun.nio.cs=ALL-UNNAMED", "--add-opens", "java.base/sun.security.action=ALL-UNNAMED", "--add-opens", "java.base/sun.util.calendar=ALL-UNNAMED"] - } + jvmArgs += project.property('extraJvmArgs') } tasks.withType(Test) { @@ -171,9 +169,7 @@ project(":iceberg-spark:iceberg-spark-extensions-${sparkMajorVersion}_${scalaVer } test { - if (JavaVersion.current() == JavaVersion.VERSION_17) { - jvmArgs += ["-Xmx1024m", "-XX:+IgnoreUnrecognizedVMOptions", "--add-opens", "java.base/java.io=ALL-UNNAMED", "--add-opens", "java.base/java.lang.invoke=ALL-UNNAMED", "--add-opens", "java.base/java.lang.reflect=ALL-UNNAMED", "--add-opens", "java.base/java.lang=ALL-UNNAMED", "--add-opens", "java.base/java.math=ALL-UNNAMED", "--add-opens", "java.base/java.net=ALL-UNNAMED", "--add-opens", "java.base/java.nio=ALL-UNNAMED", "--add-opens", "java.base/java.text=ALL-UNNAMED", "--add-opens", "java.base/java.time=ALL-UNNAMED", "--add-opens", "java.base/java.util.concurrent.atomic=ALL-UNNAMED", "--add-opens", "java.base/java.util.concurrent=ALL-UNNAMED", "--add-opens", "java.base/java.util.regex=ALL-UNNAMED", "--add-opens", "java.base/java.util=ALL-UNNAMED", "--add-opens", "java.base/jdk.internal.ref=ALL-UNNAMED", "--add-opens", "java.base/jdk.internal.reflect=ALL-UNNAMED", "--add-opens", "java.sql/java.sql=ALL-UNNAMED", "--add-opens", "java.base/sun.util.calendar=ALL-UNNAMED", "--add-opens", "java.base/sun.nio.ch=ALL-UNNAMED", "--add-opens", "java.base/sun.nio.cs=ALL-UNNAMED", "--add-opens", "java.base/sun.security.action=ALL-UNNAMED", "--add-opens", "java.base/sun.util.calendar=ALL-UNNAMED"] - } + jvmArgs += project.property('extraJvmArgs') } generateGrammarSource { @@ -284,9 +280,7 @@ project(":iceberg-spark:iceberg-spark-runtime-${sparkMajorVersion}_${scalaVersio task integrationTest(type: Test) { description = "Test Spark3 Runtime Jar against Spark ${sparkMajorVersion}" group = "verification" - if (JavaVersion.current() == JavaVersion.VERSION_17) { - jvmArgs += ["-Xmx1024m", "-XX:+IgnoreUnrecognizedVMOptions", "--add-opens", "java.base/java.io=ALL-UNNAMED", "--add-opens", "java.base/java.lang.invoke=ALL-UNNAMED", "--add-opens", "java.base/java.lang.reflect=ALL-UNNAMED", "--add-opens", "java.base/java.lang=ALL-UNNAMED", "--add-opens", "java.base/java.math=ALL-UNNAMED", "--add-opens", "java.base/java.net=ALL-UNNAMED", "--add-opens", "java.base/java.nio=ALL-UNNAMED", "--add-opens", "java.base/java.text=ALL-UNNAMED", "--add-opens", "java.base/java.time=ALL-UNNAMED", "--add-opens", "java.base/java.util.concurrent.atomic=ALL-UNNAMED", "--add-opens", "java.base/java.util.concurrent=ALL-UNNAMED", "--add-opens", "java.base/java.util.regex=ALL-UNNAMED", "--add-opens", "java.base/java.util=ALL-UNNAMED", "--add-opens", "java.base/jdk.internal.ref=ALL-UNNAMED", "--add-opens", "java.base/jdk.internal.reflect=ALL-UNNAMED", "--add-opens", "java.sql/java.sql=ALL-UNNAMED", "--add-opens", "java.base/sun.util.calendar=ALL-UNNAMED", "--add-opens", "java.base/sun.nio.ch=ALL-UNNAMED", "--add-opens", "java.base/sun.nio.cs=ALL-UNNAMED", "--add-opens", "java.base/sun.security.action=ALL-UNNAMED", "--add-opens", "java.base/sun.util.calendar=ALL-UNNAMED"] - } + jvmArgs += project.property('extraJvmArgs') testClassesDirs = sourceSets.integration.output.classesDirs classpath = sourceSets.integration.runtimeClasspath + files(shadowJar.archiveFile.get().asFile.path) inputs.file(shadowJar.archiveFile.get().asFile.path) diff --git a/spark/v3.4/build.gradle b/spark/v3.4/build.gradle index aa41d1656ff5..9add2f1065a2 100644 --- a/spark/v3.4/build.gradle +++ b/spark/v3.4/build.gradle @@ -106,9 +106,7 @@ project(":iceberg-spark:iceberg-spark-${sparkMajorVersion}_${scalaVersion}") { test { useJUnitPlatform() - if (JavaVersion.current() == JavaVersion.VERSION_17) { - jvmArgs += ["-Xmx1024m", "-XX:+IgnoreUnrecognizedVMOptions", "--add-opens", "java.base/java.io=ALL-UNNAMED", "--add-opens", "java.base/java.lang.invoke=ALL-UNNAMED", "--add-opens", "java.base/java.lang.reflect=ALL-UNNAMED", "--add-opens", "java.base/java.lang=ALL-UNNAMED", "--add-opens", "java.base/java.math=ALL-UNNAMED", "--add-opens", "java.base/java.net=ALL-UNNAMED", "--add-opens", "java.base/java.nio=ALL-UNNAMED", "--add-opens", "java.base/java.text=ALL-UNNAMED", "--add-opens", "java.base/java.time=ALL-UNNAMED", "--add-opens", "java.base/java.util.concurrent.atomic=ALL-UNNAMED", "--add-opens", "java.base/java.util.concurrent=ALL-UNNAMED", "--add-opens", "java.base/java.util.regex=ALL-UNNAMED", "--add-opens", "java.base/java.util=ALL-UNNAMED", "--add-opens", "java.base/jdk.internal.ref=ALL-UNNAMED", "--add-opens", "java.base/jdk.internal.reflect=ALL-UNNAMED", "--add-opens", "java.sql/java.sql=ALL-UNNAMED", "--add-opens", "java.base/sun.util.calendar=ALL-UNNAMED", "--add-opens", "java.base/sun.nio.ch=ALL-UNNAMED", "--add-opens", "java.base/sun.nio.cs=ALL-UNNAMED", "--add-opens", "java.base/sun.security.action=ALL-UNNAMED", "--add-opens", "java.base/sun.util.calendar=ALL-UNNAMED"] - } + jvmArgs += project.property('extraJvmArgs') } tasks.withType(Test) { @@ -177,9 +175,7 @@ project(":iceberg-spark:iceberg-spark-extensions-${sparkMajorVersion}_${scalaVer test { useJUnitPlatform() - if (JavaVersion.current() == JavaVersion.VERSION_17) { - jvmArgs += ["-Xmx1024m", "-XX:+IgnoreUnrecognizedVMOptions", "--add-opens", "java.base/java.io=ALL-UNNAMED", "--add-opens", "java.base/java.lang.invoke=ALL-UNNAMED", "--add-opens", "java.base/java.lang.reflect=ALL-UNNAMED", "--add-opens", "java.base/java.lang=ALL-UNNAMED", "--add-opens", "java.base/java.math=ALL-UNNAMED", "--add-opens", "java.base/java.net=ALL-UNNAMED", "--add-opens", "java.base/java.nio=ALL-UNNAMED", "--add-opens", "java.base/java.text=ALL-UNNAMED", "--add-opens", "java.base/java.time=ALL-UNNAMED", "--add-opens", "java.base/java.util.concurrent.atomic=ALL-UNNAMED", "--add-opens", "java.base/java.util.concurrent=ALL-UNNAMED", "--add-opens", "java.base/java.util.regex=ALL-UNNAMED", "--add-opens", "java.base/java.util=ALL-UNNAMED", "--add-opens", "java.base/jdk.internal.ref=ALL-UNNAMED", "--add-opens", "java.base/jdk.internal.reflect=ALL-UNNAMED", "--add-opens", "java.sql/java.sql=ALL-UNNAMED", "--add-opens", "java.base/sun.util.calendar=ALL-UNNAMED", "--add-opens", "java.base/sun.nio.ch=ALL-UNNAMED", "--add-opens", "java.base/sun.nio.cs=ALL-UNNAMED", "--add-opens", "java.base/sun.security.action=ALL-UNNAMED", "--add-opens", "java.base/sun.util.calendar=ALL-UNNAMED"] - } + jvmArgs += project.property('extraJvmArgs') } } @@ -285,9 +281,7 @@ project(":iceberg-spark:iceberg-spark-runtime-${sparkMajorVersion}_${scalaVersio task integrationTest(type: Test) { description = "Test Spark3 Runtime Jar against Spark ${sparkMajorVersion}" group = "verification" - if (JavaVersion.current() == JavaVersion.VERSION_17) { - jvmArgs += ["-Xmx1024m", "-XX:+IgnoreUnrecognizedVMOptions", "--add-opens", "java.base/java.io=ALL-UNNAMED", "--add-opens", "java.base/java.lang.invoke=ALL-UNNAMED", "--add-opens", "java.base/java.lang.reflect=ALL-UNNAMED", "--add-opens", "java.base/java.lang=ALL-UNNAMED", "--add-opens", "java.base/java.math=ALL-UNNAMED", "--add-opens", "java.base/java.net=ALL-UNNAMED", "--add-opens", "java.base/java.nio=ALL-UNNAMED", "--add-opens", "java.base/java.text=ALL-UNNAMED", "--add-opens", "java.base/java.time=ALL-UNNAMED", "--add-opens", "java.base/java.util.concurrent.atomic=ALL-UNNAMED", "--add-opens", "java.base/java.util.concurrent=ALL-UNNAMED", "--add-opens", "java.base/java.util.regex=ALL-UNNAMED", "--add-opens", "java.base/java.util=ALL-UNNAMED", "--add-opens", "java.base/jdk.internal.ref=ALL-UNNAMED", "--add-opens", "java.base/jdk.internal.reflect=ALL-UNNAMED", "--add-opens", "java.sql/java.sql=ALL-UNNAMED", "--add-opens", "java.base/sun.util.calendar=ALL-UNNAMED", "--add-opens", "java.base/sun.nio.ch=ALL-UNNAMED", "--add-opens", "java.base/sun.nio.cs=ALL-UNNAMED", "--add-opens", "java.base/sun.security.action=ALL-UNNAMED", "--add-opens", "java.base/sun.util.calendar=ALL-UNNAMED"] - } + jvmArgs += project.property('extraJvmArgs') testClassesDirs = sourceSets.integration.output.classesDirs classpath = sourceSets.integration.runtimeClasspath + files(shadowJar.archiveFile.get().asFile.path) inputs.file(shadowJar.archiveFile.get().asFile.path) From b51b34c6194eef0e9993d69d37d7a4e1ba888baf Mon Sep 17 00:00:00 2001 From: Prashant Singh Date: Fri, 21 Apr 2023 21:00:34 -0700 Subject: [PATCH 06/10] make metadata metric test more robust --- .github/workflows/delta-conversion-ci.yml | 2 +- .../TestMetadataTableReadableMetrics.java | 73 +++++++++++++++---- spark/v3.4/build.gradle | 1 - .../TestMetadataTableReadableMetrics.java | 73 +++++++++++++++---- 4 files changed, 117 insertions(+), 32 deletions(-) diff --git a/.github/workflows/delta-conversion-ci.yml b/.github/workflows/delta-conversion-ci.yml index afb6e03a832a..ec72fee6a2b9 100644 --- a/.github/workflows/delta-conversion-ci.yml +++ b/.github/workflows/delta-conversion-ci.yml @@ -88,7 +88,7 @@ jobs: runs-on: ubuntu-22.04 strategy: matrix: - jvm: [ 8, 11, 17 ] + jvm: [8, 11, 17] env: SPARK_LOCAL_IP: localhost steps: diff --git a/spark/v3.3/spark/src/test/java/org/apache/iceberg/spark/source/TestMetadataTableReadableMetrics.java b/spark/v3.3/spark/src/test/java/org/apache/iceberg/spark/source/TestMetadataTableReadableMetrics.java index 2f5b86e0a94d..e51c5c29be27 100644 --- a/spark/v3.3/spark/src/test/java/org/apache/iceberg/spark/source/TestMetadataTableReadableMetrics.java +++ b/spark/v3.3/spark/src/test/java/org/apache/iceberg/spark/source/TestMetadataTableReadableMetrics.java @@ -26,6 +26,7 @@ import java.nio.ByteBuffer; import java.util.Base64; import java.util.List; +import java.util.Map; import org.apache.iceberg.DataFile; import org.apache.iceberg.Files; import org.apache.iceberg.PartitionSpec; @@ -45,7 +46,6 @@ import org.apache.spark.sql.Dataset; import org.apache.spark.sql.Row; import org.junit.After; -import org.junit.Ignore; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TemporaryFolder; @@ -77,6 +77,8 @@ public class TestMetadataTableReadableMetrics extends SparkTestBaseWithCatalog { optional(8, "fixedCol", Types.FixedType.ofLength(3)), optional(9, "binaryCol", Types.BinaryType.get())); + private DataFile dataFile; + public TestMetadataTableReadableMetrics() { // only SparkCatalog supports metadata table sql queries super(SparkCatalogConfig.HIVE); @@ -123,8 +125,7 @@ private Table createPrimitiveTable() throws IOException { createPrimitiveRecord( false, 2, 2L, Float.NaN, 2.0D, new BigDecimal("2.00"), "2", null, null)); - DataFile dataFile = - FileHelpers.writeDataFile(table, Files.localOutput(temp.newFile()), records); + dataFile = FileHelpers.writeDataFile(table, Files.localOutput(temp.newFile()), records); table.newAppend().appendFile(dataFile).commit(); return table; } @@ -142,8 +143,7 @@ private void createNestedTable() throws IOException { createNestedRecord(0L, 0.0), createNestedRecord(1L, Double.NaN), createNestedRecord(null, null)); - DataFile dataFile = - FileHelpers.writeDataFile(table, Files.localOutput(temp.newFile()), records); + dataFile = FileHelpers.writeDataFile(table, Files.localOutput(temp.newFile()), records); table.newAppend().appendFile(dataFile).commit(); } @@ -191,33 +191,76 @@ private GenericRecord createNestedRecord(Long longCol, Double doubleCol) { } @Test - @Ignore public void testPrimitiveColumns() throws Exception { createPrimitiveTable(); + Map columSizeStats = dataFile.columnSizes(); Object[] binaryCol = row( - 59L, + columSizeStats.get(PRIMITIVE_SCHEMA.findField("binaryCol").fieldId()), 4L, 2L, null, Base64.getDecoder().decode("1111"), Base64.getDecoder().decode("2222")); - Object[] booleanCol = row(44L, 4L, 0L, null, false, true); - Object[] decimalCol = row(97L, 4L, 1L, null, new BigDecimal("1.00"), new BigDecimal("2.00")); - Object[] doubleCol = row(99L, 4L, 0L, 1L, 1.0D, 2.0D); + Object[] booleanCol = + row( + columSizeStats.get(PRIMITIVE_SCHEMA.findField("booleanCol").fieldId()), + 4L, + 0L, + null, + false, + true); + Object[] decimalCol = + row( + columSizeStats.get(PRIMITIVE_SCHEMA.findField("decimalCol").fieldId()), + 4L, + 1L, + null, + new BigDecimal("1.00"), + new BigDecimal("2.00")); + Object[] doubleCol = + row( + columSizeStats.get(PRIMITIVE_SCHEMA.findField("doubleCol").fieldId()), + 4L, + 0L, + 1L, + 1.0D, + 2.0D); Object[] fixedCol = row( - 55L, + columSizeStats.get(PRIMITIVE_SCHEMA.findField("fixedCol").fieldId()), 4L, 2L, null, Base64.getDecoder().decode("1111"), Base64.getDecoder().decode("2222")); - Object[] floatCol = row(90L, 4L, 0L, 2L, 0f, 0f); - Object[] intCol = row(91L, 4L, 0L, null, 1, 2); - Object[] longCol = row(91L, 4L, 0L, null, 1L, 2L); - Object[] stringCol = row(99L, 4L, 0L, null, "1", "2"); + Object[] floatCol = + row( + columSizeStats.get(PRIMITIVE_SCHEMA.findField("floatCol").fieldId()), + 4L, + 0L, + 2L, + 0f, + 0f); + Object[] intCol = + row(columSizeStats.get(PRIMITIVE_SCHEMA.findField("intCol").fieldId()), 4L, 0L, null, 1, 2); + Object[] longCol = + row( + columSizeStats.get(PRIMITIVE_SCHEMA.findField("longCol").fieldId()), + 4L, + 0L, + null, + 1L, + 2L); + Object[] stringCol = + row( + columSizeStats.get(PRIMITIVE_SCHEMA.findField("stringCol").fieldId()), + 4L, + 0L, + null, + "1", + "2"); Object[] metrics = row( diff --git a/spark/v3.4/build.gradle b/spark/v3.4/build.gradle index 9add2f1065a2..9238bd2dd124 100644 --- a/spark/v3.4/build.gradle +++ b/spark/v3.4/build.gradle @@ -174,7 +174,6 @@ project(":iceberg-spark:iceberg-spark-extensions-${sparkMajorVersion}_${scalaVer } test { - useJUnitPlatform() jvmArgs += project.property('extraJvmArgs') } } diff --git a/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestMetadataTableReadableMetrics.java b/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestMetadataTableReadableMetrics.java index 2f5b86e0a94d..e51c5c29be27 100644 --- a/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestMetadataTableReadableMetrics.java +++ b/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestMetadataTableReadableMetrics.java @@ -26,6 +26,7 @@ import java.nio.ByteBuffer; import java.util.Base64; import java.util.List; +import java.util.Map; import org.apache.iceberg.DataFile; import org.apache.iceberg.Files; import org.apache.iceberg.PartitionSpec; @@ -45,7 +46,6 @@ import org.apache.spark.sql.Dataset; import org.apache.spark.sql.Row; import org.junit.After; -import org.junit.Ignore; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TemporaryFolder; @@ -77,6 +77,8 @@ public class TestMetadataTableReadableMetrics extends SparkTestBaseWithCatalog { optional(8, "fixedCol", Types.FixedType.ofLength(3)), optional(9, "binaryCol", Types.BinaryType.get())); + private DataFile dataFile; + public TestMetadataTableReadableMetrics() { // only SparkCatalog supports metadata table sql queries super(SparkCatalogConfig.HIVE); @@ -123,8 +125,7 @@ private Table createPrimitiveTable() throws IOException { createPrimitiveRecord( false, 2, 2L, Float.NaN, 2.0D, new BigDecimal("2.00"), "2", null, null)); - DataFile dataFile = - FileHelpers.writeDataFile(table, Files.localOutput(temp.newFile()), records); + dataFile = FileHelpers.writeDataFile(table, Files.localOutput(temp.newFile()), records); table.newAppend().appendFile(dataFile).commit(); return table; } @@ -142,8 +143,7 @@ private void createNestedTable() throws IOException { createNestedRecord(0L, 0.0), createNestedRecord(1L, Double.NaN), createNestedRecord(null, null)); - DataFile dataFile = - FileHelpers.writeDataFile(table, Files.localOutput(temp.newFile()), records); + dataFile = FileHelpers.writeDataFile(table, Files.localOutput(temp.newFile()), records); table.newAppend().appendFile(dataFile).commit(); } @@ -191,33 +191,76 @@ private GenericRecord createNestedRecord(Long longCol, Double doubleCol) { } @Test - @Ignore public void testPrimitiveColumns() throws Exception { createPrimitiveTable(); + Map columSizeStats = dataFile.columnSizes(); Object[] binaryCol = row( - 59L, + columSizeStats.get(PRIMITIVE_SCHEMA.findField("binaryCol").fieldId()), 4L, 2L, null, Base64.getDecoder().decode("1111"), Base64.getDecoder().decode("2222")); - Object[] booleanCol = row(44L, 4L, 0L, null, false, true); - Object[] decimalCol = row(97L, 4L, 1L, null, new BigDecimal("1.00"), new BigDecimal("2.00")); - Object[] doubleCol = row(99L, 4L, 0L, 1L, 1.0D, 2.0D); + Object[] booleanCol = + row( + columSizeStats.get(PRIMITIVE_SCHEMA.findField("booleanCol").fieldId()), + 4L, + 0L, + null, + false, + true); + Object[] decimalCol = + row( + columSizeStats.get(PRIMITIVE_SCHEMA.findField("decimalCol").fieldId()), + 4L, + 1L, + null, + new BigDecimal("1.00"), + new BigDecimal("2.00")); + Object[] doubleCol = + row( + columSizeStats.get(PRIMITIVE_SCHEMA.findField("doubleCol").fieldId()), + 4L, + 0L, + 1L, + 1.0D, + 2.0D); Object[] fixedCol = row( - 55L, + columSizeStats.get(PRIMITIVE_SCHEMA.findField("fixedCol").fieldId()), 4L, 2L, null, Base64.getDecoder().decode("1111"), Base64.getDecoder().decode("2222")); - Object[] floatCol = row(90L, 4L, 0L, 2L, 0f, 0f); - Object[] intCol = row(91L, 4L, 0L, null, 1, 2); - Object[] longCol = row(91L, 4L, 0L, null, 1L, 2L); - Object[] stringCol = row(99L, 4L, 0L, null, "1", "2"); + Object[] floatCol = + row( + columSizeStats.get(PRIMITIVE_SCHEMA.findField("floatCol").fieldId()), + 4L, + 0L, + 2L, + 0f, + 0f); + Object[] intCol = + row(columSizeStats.get(PRIMITIVE_SCHEMA.findField("intCol").fieldId()), 4L, 0L, null, 1, 2); + Object[] longCol = + row( + columSizeStats.get(PRIMITIVE_SCHEMA.findField("longCol").fieldId()), + 4L, + 0L, + null, + 1L, + 2L); + Object[] stringCol = + row( + columSizeStats.get(PRIMITIVE_SCHEMA.findField("stringCol").fieldId()), + 4L, + 0L, + null, + "1", + "2"); Object[] metrics = row( From 9bcbc9d061f02aa48b2106b3bb676d1d990b2100 Mon Sep 17 00:00:00 2001 From: Prashant Singh Date: Sun, 23 Apr 2023 09:54:51 -0700 Subject: [PATCH 07/10] Address review feedback --- build.gradle | 15 ++--------- mr/build.gradle | 1 - spark/v3.3/build.gradle | 5 ---- .../TestMetadataTableReadableMetrics.java | 26 ++++++++++++------- spark/v3.4/build.gradle | 5 ---- .../TestMetadataTableReadableMetrics.java | 26 ++++++++++++------- 6 files changed, 34 insertions(+), 44 deletions(-) diff --git a/build.gradle b/build.gradle index 4f8b17a0c2be..f34defbf3be0 100644 --- a/build.gradle +++ b/build.gradle @@ -243,6 +243,8 @@ subprojects { maxHeapSize = "1500m" + jvmArgs += project.property('extraJvmArgs') + testLogging { events "failed" exceptionFormat "full" @@ -297,9 +299,6 @@ project(':iceberg-bundled-guava') { } project(':iceberg-api') { - test { - jvmArgs += project.property('extraJvmArgs') - } dependencies { implementation project(path: ':iceberg-bundled-guava', configuration: 'shadow') compileOnly "com.google.errorprone:error_prone_annotations" @@ -340,7 +339,6 @@ project(':iceberg-common') { project(':iceberg-core') { test { useJUnitPlatform() - jvmArgs += project.property('extraJvmArgs') } dependencies { api project(':iceberg-api') @@ -455,9 +453,6 @@ project(':iceberg-aliyun') { } project(':iceberg-aws') { - test { - jvmArgs += project.property('extraJvmArgs') - } dependencies { implementation project(path: ':iceberg-bundled-guava', configuration: 'shadow') api project(':iceberg-api') @@ -598,9 +593,6 @@ project(':iceberg-delta-lake') { } project(':iceberg-gcp') { - test { - jvmArgs += project.property('extraJvmArgs') - } dependencies { implementation project(path: ':iceberg-bundled-guava', configuration: 'shadow') api project(':iceberg-api') @@ -752,9 +744,6 @@ project(':iceberg-parquet') { } project(':iceberg-arrow') { - test { - jvmArgs += project.property('extraJvmArgs') - } dependencies { implementation project(path: ':iceberg-bundled-guava', configuration: 'shadow') api project(':iceberg-api') diff --git a/mr/build.gradle b/mr/build.gradle index 7ce66677cc93..4ed799d5fb79 100644 --- a/mr/build.gradle +++ b/mr/build.gradle @@ -76,7 +76,6 @@ project(':iceberg-mr') { test { // testJoinTables / testScanTable maxHeapSize '2500m' - jvmArgs += project.property('extraJvmArgs') } } diff --git a/spark/v3.3/build.gradle b/spark/v3.3/build.gradle index 6f32639e52dc..875a7fe2ca51 100644 --- a/spark/v3.3/build.gradle +++ b/spark/v3.3/build.gradle @@ -106,7 +106,6 @@ project(":iceberg-spark:iceberg-spark-${sparkMajorVersion}_${scalaVersion}") { test { useJUnitPlatform() - jvmArgs += project.property('extraJvmArgs') } tasks.withType(Test) { @@ -168,10 +167,6 @@ project(":iceberg-spark:iceberg-spark-extensions-${sparkMajorVersion}_${scalaVer antlr "org.antlr:antlr4:4.8" } - test { - jvmArgs += project.property('extraJvmArgs') - } - generateGrammarSource { maxHeapSize = "64m" arguments += ['-visitor', '-package', 'org.apache.spark.sql.catalyst.parser.extensions'] diff --git a/spark/v3.3/spark/src/test/java/org/apache/iceberg/spark/source/TestMetadataTableReadableMetrics.java b/spark/v3.3/spark/src/test/java/org/apache/iceberg/spark/source/TestMetadataTableReadableMetrics.java index e51c5c29be27..f80ea4cdc11d 100644 --- a/spark/v3.3/spark/src/test/java/org/apache/iceberg/spark/source/TestMetadataTableReadableMetrics.java +++ b/spark/v3.3/spark/src/test/java/org/apache/iceberg/spark/source/TestMetadataTableReadableMetrics.java @@ -193,11 +193,11 @@ private GenericRecord createNestedRecord(Long longCol, Double doubleCol) { @Test public void testPrimitiveColumns() throws Exception { createPrimitiveTable(); - Map columSizeStats = dataFile.columnSizes(); + Map columnSizeStats = dataFile.columnSizes(); Object[] binaryCol = row( - columSizeStats.get(PRIMITIVE_SCHEMA.findField("binaryCol").fieldId()), + columnSizeStats.get(PRIMITIVE_SCHEMA.findField("binaryCol").fieldId()), 4L, 2L, null, @@ -205,7 +205,7 @@ public void testPrimitiveColumns() throws Exception { Base64.getDecoder().decode("2222")); Object[] booleanCol = row( - columSizeStats.get(PRIMITIVE_SCHEMA.findField("booleanCol").fieldId()), + columnSizeStats.get(PRIMITIVE_SCHEMA.findField("booleanCol").fieldId()), 4L, 0L, null, @@ -213,7 +213,7 @@ public void testPrimitiveColumns() throws Exception { true); Object[] decimalCol = row( - columSizeStats.get(PRIMITIVE_SCHEMA.findField("decimalCol").fieldId()), + columnSizeStats.get(PRIMITIVE_SCHEMA.findField("decimalCol").fieldId()), 4L, 1L, null, @@ -221,7 +221,7 @@ public void testPrimitiveColumns() throws Exception { new BigDecimal("2.00")); Object[] doubleCol = row( - columSizeStats.get(PRIMITIVE_SCHEMA.findField("doubleCol").fieldId()), + columnSizeStats.get(PRIMITIVE_SCHEMA.findField("doubleCol").fieldId()), 4L, 0L, 1L, @@ -229,7 +229,7 @@ public void testPrimitiveColumns() throws Exception { 2.0D); Object[] fixedCol = row( - columSizeStats.get(PRIMITIVE_SCHEMA.findField("fixedCol").fieldId()), + columnSizeStats.get(PRIMITIVE_SCHEMA.findField("fixedCol").fieldId()), 4L, 2L, null, @@ -237,17 +237,23 @@ public void testPrimitiveColumns() throws Exception { Base64.getDecoder().decode("2222")); Object[] floatCol = row( - columSizeStats.get(PRIMITIVE_SCHEMA.findField("floatCol").fieldId()), + columnSizeStats.get(PRIMITIVE_SCHEMA.findField("floatCol").fieldId()), 4L, 0L, 2L, 0f, 0f); Object[] intCol = - row(columSizeStats.get(PRIMITIVE_SCHEMA.findField("intCol").fieldId()), 4L, 0L, null, 1, 2); + row( + columnSizeStats.get(PRIMITIVE_SCHEMA.findField("intCol").fieldId()), + 4L, + 0L, + null, + 1, + 2); Object[] longCol = row( - columSizeStats.get(PRIMITIVE_SCHEMA.findField("longCol").fieldId()), + columnSizeStats.get(PRIMITIVE_SCHEMA.findField("longCol").fieldId()), 4L, 0L, null, @@ -255,7 +261,7 @@ public void testPrimitiveColumns() throws Exception { 2L); Object[] stringCol = row( - columSizeStats.get(PRIMITIVE_SCHEMA.findField("stringCol").fieldId()), + columnSizeStats.get(PRIMITIVE_SCHEMA.findField("stringCol").fieldId()), 4L, 0L, null, diff --git a/spark/v3.4/build.gradle b/spark/v3.4/build.gradle index 9238bd2dd124..bbd60f74b7d9 100644 --- a/spark/v3.4/build.gradle +++ b/spark/v3.4/build.gradle @@ -106,7 +106,6 @@ project(":iceberg-spark:iceberg-spark-${sparkMajorVersion}_${scalaVersion}") { test { useJUnitPlatform() - jvmArgs += project.property('extraJvmArgs') } tasks.withType(Test) { @@ -172,10 +171,6 @@ project(":iceberg-spark:iceberg-spark-extensions-${sparkMajorVersion}_${scalaVer maxHeapSize = "64m" arguments += ['-visitor', '-package', 'org.apache.spark.sql.catalyst.parser.extensions'] } - - test { - jvmArgs += project.property('extraJvmArgs') - } } project(":iceberg-spark:iceberg-spark-runtime-${sparkMajorVersion}_${scalaVersion}") { diff --git a/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestMetadataTableReadableMetrics.java b/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestMetadataTableReadableMetrics.java index e51c5c29be27..f80ea4cdc11d 100644 --- a/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestMetadataTableReadableMetrics.java +++ b/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestMetadataTableReadableMetrics.java @@ -193,11 +193,11 @@ private GenericRecord createNestedRecord(Long longCol, Double doubleCol) { @Test public void testPrimitiveColumns() throws Exception { createPrimitiveTable(); - Map columSizeStats = dataFile.columnSizes(); + Map columnSizeStats = dataFile.columnSizes(); Object[] binaryCol = row( - columSizeStats.get(PRIMITIVE_SCHEMA.findField("binaryCol").fieldId()), + columnSizeStats.get(PRIMITIVE_SCHEMA.findField("binaryCol").fieldId()), 4L, 2L, null, @@ -205,7 +205,7 @@ public void testPrimitiveColumns() throws Exception { Base64.getDecoder().decode("2222")); Object[] booleanCol = row( - columSizeStats.get(PRIMITIVE_SCHEMA.findField("booleanCol").fieldId()), + columnSizeStats.get(PRIMITIVE_SCHEMA.findField("booleanCol").fieldId()), 4L, 0L, null, @@ -213,7 +213,7 @@ public void testPrimitiveColumns() throws Exception { true); Object[] decimalCol = row( - columSizeStats.get(PRIMITIVE_SCHEMA.findField("decimalCol").fieldId()), + columnSizeStats.get(PRIMITIVE_SCHEMA.findField("decimalCol").fieldId()), 4L, 1L, null, @@ -221,7 +221,7 @@ public void testPrimitiveColumns() throws Exception { new BigDecimal("2.00")); Object[] doubleCol = row( - columSizeStats.get(PRIMITIVE_SCHEMA.findField("doubleCol").fieldId()), + columnSizeStats.get(PRIMITIVE_SCHEMA.findField("doubleCol").fieldId()), 4L, 0L, 1L, @@ -229,7 +229,7 @@ public void testPrimitiveColumns() throws Exception { 2.0D); Object[] fixedCol = row( - columSizeStats.get(PRIMITIVE_SCHEMA.findField("fixedCol").fieldId()), + columnSizeStats.get(PRIMITIVE_SCHEMA.findField("fixedCol").fieldId()), 4L, 2L, null, @@ -237,17 +237,23 @@ public void testPrimitiveColumns() throws Exception { Base64.getDecoder().decode("2222")); Object[] floatCol = row( - columSizeStats.get(PRIMITIVE_SCHEMA.findField("floatCol").fieldId()), + columnSizeStats.get(PRIMITIVE_SCHEMA.findField("floatCol").fieldId()), 4L, 0L, 2L, 0f, 0f); Object[] intCol = - row(columSizeStats.get(PRIMITIVE_SCHEMA.findField("intCol").fieldId()), 4L, 0L, null, 1, 2); + row( + columnSizeStats.get(PRIMITIVE_SCHEMA.findField("intCol").fieldId()), + 4L, + 0L, + null, + 1, + 2); Object[] longCol = row( - columSizeStats.get(PRIMITIVE_SCHEMA.findField("longCol").fieldId()), + columnSizeStats.get(PRIMITIVE_SCHEMA.findField("longCol").fieldId()), 4L, 0L, null, @@ -255,7 +261,7 @@ public void testPrimitiveColumns() throws Exception { 2L); Object[] stringCol = row( - columSizeStats.get(PRIMITIVE_SCHEMA.findField("stringCol").fieldId()), + columnSizeStats.get(PRIMITIVE_SCHEMA.findField("stringCol").fieldId()), 4L, 0L, null, From 1fd914fe6fc3bd08b6c8cc1fc09ddb57dd789e1f Mon Sep 17 00:00:00 2001 From: Prashant Singh Date: Sun, 23 Apr 2023 21:01:30 -0700 Subject: [PATCH 08/10] Add jvm flags for aws integ test --- build.gradle | 1 + 1 file changed, 1 insertion(+) diff --git a/build.gradle b/build.gradle index f34defbf3be0..cef0cfbe0ad1 100644 --- a/build.gradle +++ b/build.gradle @@ -518,6 +518,7 @@ project(':iceberg-aws') { task integrationTest(type: Test) { testClassesDirs = sourceSets.integration.output.classesDirs classpath = sourceSets.integration.runtimeClasspath + jvmArgs += project.property('extraJvmArgs') } } From 3d2fae1fd7ad94cc3d11bbe20bd17731cd6a8363 Mon Sep 17 00:00:00 2001 From: Prashant Singh Date: Mon, 24 Apr 2023 14:31:06 -0700 Subject: [PATCH 09/10] Address review feedback --- .../source/TestMetadataTableReadableMetrics.java | 11 ++++++----- .../source/TestMetadataTableReadableMetrics.java | 11 ++++++----- 2 files changed, 12 insertions(+), 10 deletions(-) diff --git a/spark/v3.3/spark/src/test/java/org/apache/iceberg/spark/source/TestMetadataTableReadableMetrics.java b/spark/v3.3/spark/src/test/java/org/apache/iceberg/spark/source/TestMetadataTableReadableMetrics.java index f80ea4cdc11d..416d5eed5b65 100644 --- a/spark/v3.3/spark/src/test/java/org/apache/iceberg/spark/source/TestMetadataTableReadableMetrics.java +++ b/spark/v3.3/spark/src/test/java/org/apache/iceberg/spark/source/TestMetadataTableReadableMetrics.java @@ -77,8 +77,6 @@ public class TestMetadataTableReadableMetrics extends SparkTestBaseWithCatalog { optional(8, "fixedCol", Types.FixedType.ofLength(3)), optional(9, "binaryCol", Types.BinaryType.get())); - private DataFile dataFile; - public TestMetadataTableReadableMetrics() { // only SparkCatalog supports metadata table sql queries super(SparkCatalogConfig.HIVE); @@ -125,7 +123,8 @@ private Table createPrimitiveTable() throws IOException { createPrimitiveRecord( false, 2, 2L, Float.NaN, 2.0D, new BigDecimal("2.00"), "2", null, null)); - dataFile = FileHelpers.writeDataFile(table, Files.localOutput(temp.newFile()), records); + DataFile dataFile = + FileHelpers.writeDataFile(table, Files.localOutput(temp.newFile()), records); table.newAppend().appendFile(dataFile).commit(); return table; } @@ -143,7 +142,8 @@ private void createNestedTable() throws IOException { createNestedRecord(0L, 0.0), createNestedRecord(1L, Double.NaN), createNestedRecord(null, null)); - dataFile = FileHelpers.writeDataFile(table, Files.localOutput(temp.newFile()), records); + DataFile dataFile = + FileHelpers.writeDataFile(table, Files.localOutput(temp.newFile()), records); table.newAppend().appendFile(dataFile).commit(); } @@ -192,7 +192,8 @@ private GenericRecord createNestedRecord(Long longCol, Double doubleCol) { @Test public void testPrimitiveColumns() throws Exception { - createPrimitiveTable(); + Table table = createPrimitiveTable(); + DataFile dataFile = table.currentSnapshot().addedDataFiles(table.io()).iterator().next(); Map columnSizeStats = dataFile.columnSizes(); Object[] binaryCol = diff --git a/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestMetadataTableReadableMetrics.java b/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestMetadataTableReadableMetrics.java index f80ea4cdc11d..416d5eed5b65 100644 --- a/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestMetadataTableReadableMetrics.java +++ b/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestMetadataTableReadableMetrics.java @@ -77,8 +77,6 @@ public class TestMetadataTableReadableMetrics extends SparkTestBaseWithCatalog { optional(8, "fixedCol", Types.FixedType.ofLength(3)), optional(9, "binaryCol", Types.BinaryType.get())); - private DataFile dataFile; - public TestMetadataTableReadableMetrics() { // only SparkCatalog supports metadata table sql queries super(SparkCatalogConfig.HIVE); @@ -125,7 +123,8 @@ private Table createPrimitiveTable() throws IOException { createPrimitiveRecord( false, 2, 2L, Float.NaN, 2.0D, new BigDecimal("2.00"), "2", null, null)); - dataFile = FileHelpers.writeDataFile(table, Files.localOutput(temp.newFile()), records); + DataFile dataFile = + FileHelpers.writeDataFile(table, Files.localOutput(temp.newFile()), records); table.newAppend().appendFile(dataFile).commit(); return table; } @@ -143,7 +142,8 @@ private void createNestedTable() throws IOException { createNestedRecord(0L, 0.0), createNestedRecord(1L, Double.NaN), createNestedRecord(null, null)); - dataFile = FileHelpers.writeDataFile(table, Files.localOutput(temp.newFile()), records); + DataFile dataFile = + FileHelpers.writeDataFile(table, Files.localOutput(temp.newFile()), records); table.newAppend().appendFile(dataFile).commit(); } @@ -192,7 +192,8 @@ private GenericRecord createNestedRecord(Long longCol, Double doubleCol) { @Test public void testPrimitiveColumns() throws Exception { - createPrimitiveTable(); + Table table = createPrimitiveTable(); + DataFile dataFile = table.currentSnapshot().addedDataFiles(table.io()).iterator().next(); Map columnSizeStats = dataFile.columnSizes(); Object[] binaryCol = From 82fec3de678b92c05f064c49a8fcb1e73655a09a Mon Sep 17 00:00:00 2001 From: Prashant Singh Date: Tue, 25 Apr 2023 11:34:13 -0700 Subject: [PATCH 10/10] Add time regex --- .../org/apache/iceberg/expressions/ExpressionUtil.java | 2 +- .../apache/iceberg/expressions/TestExpressionUtil.java | 8 ++++++-- 2 files changed, 7 insertions(+), 3 deletions(-) diff --git a/api/src/main/java/org/apache/iceberg/expressions/ExpressionUtil.java b/api/src/main/java/org/apache/iceberg/expressions/ExpressionUtil.java index 8f3aeea38bc1..7eb61cc14e69 100644 --- a/api/src/main/java/org/apache/iceberg/expressions/ExpressionUtil.java +++ b/api/src/main/java/org/apache/iceberg/expressions/ExpressionUtil.java @@ -45,7 +45,7 @@ public class ExpressionUtil { private static final long THREE_DAYS_IN_HOURS = TimeUnit.DAYS.toHours(3); private static final long NINETY_DAYS_IN_HOURS = TimeUnit.DAYS.toHours(90); private static final Pattern DATE = Pattern.compile("\\d{4}-\\d{2}-\\d{2}"); - private static final Pattern TIME = Pattern.compile("\\d{2}:\\d{2}(:\\d{2}(.\\d{1,6})?)?"); + private static final Pattern TIME = Pattern.compile("\\d{2}:\\d{2}(:\\d{2}(.\\d{1,9})?)?"); private static final Pattern TIMESTAMP = Pattern.compile("\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}(:\\d{2}(.\\d{1,9})?)?"); private static final Pattern TIMESTAMPTZ = diff --git a/api/src/test/java/org/apache/iceberg/expressions/TestExpressionUtil.java b/api/src/test/java/org/apache/iceberg/expressions/TestExpressionUtil.java index 770a9df13a90..a64b3299c148 100644 --- a/api/src/test/java/org/apache/iceberg/expressions/TestExpressionUtil.java +++ b/api/src/test/java/org/apache/iceberg/expressions/TestExpressionUtil.java @@ -30,6 +30,7 @@ import org.apache.iceberg.Schema; import org.apache.iceberg.relocated.com.google.common.collect.Lists; import org.apache.iceberg.types.Types; +import org.apache.iceberg.util.DateTimeUtil; import org.assertj.core.api.Assertions; import org.junit.Assert; import org.junit.Test; @@ -314,14 +315,17 @@ public void testSanitizeDate() { @Test public void testSanitizeTime() { + long micros = DateTimeUtil.microsFromTimestamptz(OffsetDateTime.now()) / 1000000; + String currentTime = DateTimeUtil.microsToIsoTime(micros); + assertEquals( Expressions.equal("test", "(time)"), - ExpressionUtil.sanitize(Expressions.equal("test", "23:49:51"))); + ExpressionUtil.sanitize(Expressions.equal("test", currentTime))); Assert.assertEquals( "Sanitized string should be identical except for descriptive literal", "test = (time)", - ExpressionUtil.toSanitizedString(Expressions.equal("test", "23:49:51"))); + ExpressionUtil.toSanitizedString(Expressions.equal("test", currentTime))); } @Test