diff --git a/.github/workflows/codeql-csharp-analysis.yml b/.github/workflows/codeql-csharp-analysis.yml index 48af1be55c1..f18428ce200 100644 --- a/.github/workflows/codeql-csharp-analysis.yml +++ b/.github/workflows/codeql-csharp-analysis.yml @@ -67,6 +67,7 @@ jobs: 3.1.x 5.0.x 6.0.x + 7.0.x # Initializes the CodeQL tools for scanning. - name: Initialize CodeQL diff --git a/.github/workflows/maven4.yml b/.github/workflows/maven4.yml new file mode 100644 index 00000000000..5e29e019639 --- /dev/null +++ b/.github/workflows/maven4.yml @@ -0,0 +1,65 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +name: 'Maven 4' +on: + workflow_dispatch: + push: + branches: [ master ] + pull_request: + branches: [ master ] + paths: + - .github/workflows/maven4.yml + - lang/java/** + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +jobs: + maven4: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + + - name: Cache Local Maven Repository + uses: actions/cache@v3 + with: + path: ~/.m2/repository + key: ${{ runner.os }}-maven-${{ hashFiles('**/pom.xml') }} + restore-keys: | + ${{ runner.os }}-maven- + + - name: Cache Maven 4 Build Cache + uses: actions/cache@v3 + with: + path: ~/.m2/build-cache + key: ${{ runner.os }}-maven-build-cache-${{ hashFiles('**/pom.xml') }} + restore-keys: | + ${{ runner.os }}-maven-build-cache + + - name: Setup Java + uses: actions/setup-java@v3 + with: + distribution: 'adopt' + java-version: '11' + + - name: Setup Maven 4 + uses: stCarolas/setup-maven@v4.5 + with: + maven-version: 4.0.0-alpha-3 + + - name: Test + run: mvn clean package diff --git a/.github/workflows/test-lang-csharp.yml b/.github/workflows/test-lang-csharp.yml index b436f270c88..1b1d5af9527 100644 --- a/.github/workflows/test-lang-csharp.yml +++ b/.github/workflows/test-lang-csharp.yml @@ -36,21 +36,22 @@ jobs: test: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - name: Add libzstd shell: bash run: sudo apt-get install -y libzstd-dev - name: Install .NET SDKs - uses: actions/setup-dotnet@v1 + uses: actions/setup-dotnet@v3 with: dotnet-version: | 3.1.x 5.0.x 6.0.x + 7.0.x - - uses: actions/cache@v2 + - uses: actions/cache@v3 with: path: ~/.nuget/packages key: ${{ runner.os }}-nuget-${{ hashFiles('**/packages.lock.json') }} @@ -63,38 +64,26 @@ jobs: - name: Test run: ./build.sh test - # Build and test against .NET 7 - # .NET 7 is not released yet, however this is a good way to test if the project is ready for the release - # Once .NET 7 is officially released, this can be removed and 7.0.x can be used instead above - - name: Install .NET SDK 7.0 (pre-release) - uses: actions/setup-dotnet@v1 - with: - include-prerelease: true - dotnet-version: | - 7.0.x - - - name: Test .NET 7.0 (pre-release) - run: ./build.sh test - interop: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - name: Add libzstd shell: bash run: sudo apt-get install -y libzstd-dev - name: Install .NET SDKs - uses: actions/setup-dotnet@v1 + uses: actions/setup-dotnet@v3 with: dotnet-version: | 3.1.x 5.0.x 6.0.x + 7.0.x - name: Cache Local Maven Repository - uses: actions/cache@v2 + uses: actions/cache@v3 with: path: ~/.m2/repository key: ${{ runner.os }}-maven-${{ hashFiles('**/pom.xml') }} diff --git a/.github/workflows/test-lang-java.yml b/.github/workflows/test-lang-java.yml index c7049437a32..3f1917283a3 100644 --- a/.github/workflows/test-lang-java.yml +++ b/.github/workflows/test-lang-java.yml @@ -125,6 +125,7 @@ jobs: 3.1.x 5.0.x 6.0.x + 7.0.x - name: Install Java Avro for Interop Test working-directory: . diff --git a/.mvn/extensions.xml b/.mvn/extensions.xml new file mode 100644 index 00000000000..c6000969d38 --- /dev/null +++ b/.mvn/extensions.xml @@ -0,0 +1,25 @@ + + + + + org.apache.maven.extensions + maven-build-cache-extension + 1.0.0 + + diff --git a/build.sh b/build.sh index 52ee9cd30c3..0bd1e1880c4 100755 --- a/build.sh +++ b/build.sh @@ -56,6 +56,9 @@ DOCKER_BUILD_XTRA_ARGS=${DOCKER_BUILD_XTRA_ARGS-} # Override the docker image name used. DOCKER_IMAGE_NAME=${DOCKER_IMAGE_NAME-} +# When building a docker container, these are the files that will sent and available. +DOCKER_EXTRA_CONTEXT="lang/ruby/Gemfile lang/ruby/avro.gemspec lang/ruby/Manifest share/VERSION.txt" + usage() { echo "Usage: $0 {lint|test|dist|sign|clean|veryclean|docker [--args \"docker-args\"]|rat|githooks|docker-test}" exit 1 @@ -300,8 +303,9 @@ do echo "RUN getent group $GROUP_ID || groupadd -g $GROUP_ID $USER_NAME" echo "RUN getent passwd $USER_ID || useradd -g $GROUP_ID -u $USER_ID -k /root -m $USER_NAME" } > Dockerfile + # Include the ruby gemspec for preinstallation. # shellcheck disable=SC2086 - tar -cf- lang/ruby/Gemfile Dockerfile | docker build $DOCKER_BUILD_XTRA_ARGS -t "$DOCKER_IMAGE_NAME" - + tar -cf- Dockerfile $DOCKER_EXTRA_CONTEXT | docker build $DOCKER_BUILD_XTRA_ARGS -t "$DOCKER_IMAGE_NAME" - rm Dockerfile # By mapping the .m2 directory you can do an mvn install from # within the container and use the result on your normal @@ -336,7 +340,7 @@ do ;; docker-test) - tar -cf- share/docker/Dockerfile lang/ruby/Gemfile | + tar -cf- share/docker/Dockerfile $DOCKER_EXTRA_CONTEXT | docker build -t avro-test -f share/docker/Dockerfile - docker run --rm -v "${PWD}:/avro${DOCKER_MOUNT_FLAG}" --env "JAVA=${JAVA:-8}" avro-test /avro/share/docker/run-tests.sh ;; diff --git a/lang/c++/api/Reader.hh b/lang/c++/api/Reader.hh index ca6a719e31c..588a912648a 100644 --- a/lang/c++/api/Reader.hh +++ b/lang/c++/api/Reader.hh @@ -84,7 +84,7 @@ public: union { double d; uint64_t i; - } v; + } v = { 0 }; reader_.read(v.i); val = v.d; } diff --git a/lang/c++/api/buffer/Buffer.hh b/lang/c++/api/buffer/Buffer.hh index bc3baf12330..45c439d6d43 100644 --- a/lang/c++/api/buffer/Buffer.hh +++ b/lang/c++/api/buffer/Buffer.hh @@ -145,7 +145,7 @@ public: **/ size_type wroteTo(size_type size) { - int wrote = 0; + size_type wrote = 0; if (size) { if (size > freeSpace()) { throw std::length_error("Impossible to write more data than free space"); diff --git a/lang/c/tests/test_avro_commons_schema.c b/lang/c/tests/test_avro_commons_schema.c index c4679d89df9..e3751e9836a 100644 --- a/lang/c/tests/test_avro_commons_schema.c +++ b/lang/c/tests/test_avro_commons_schema.c @@ -104,6 +104,8 @@ static void read_data(const char *dirpath, avro_schema_t schema) { fprintf(stdout, "\nExit run test OK => %d records", records_read); remove("./copy.avro"); fflush(stdout); + avro_file_reader_close(reader); + avro_file_writer_close(writer); } static void run_tests(const char *dirpath) @@ -111,6 +113,7 @@ static void run_tests(const char *dirpath) fprintf(stdout, "\nRun test for path '%s'", dirpath); avro_schema_t schema = read_common_schema_test(dirpath); read_data(dirpath, schema); + avro_schema_decref(schema); } diff --git a/lang/csharp/README.md b/lang/csharp/README.md index 70fc90e86da..6f923fe0273 100644 --- a/lang/csharp/README.md +++ b/lang/csharp/README.md @@ -17,20 +17,20 @@ Install-Package Apache.Avro ## Project Target Frameworks -| Project | Published to nuget.org | Type | .NET Standard 2.0 | .NET Standard 2.1 | .NET Core 3.1 | .NET 5.0 | .NET 6.0 | -|:-------------------:|:--------------------------:|:----------:|:------------------:|:-----------------:|:-------------:|:---------:|:---------:| -| Avro.main | Apache.Avro | Library | ✔️ | ✔️ | | | | -| Avro.File.Snappy | Apache.Avro.File.Snappy | Library | ✔️ | ✔️ | | | | -| Avro.File.BZip2 | Apache.Avro.File.BZip2 | Library | ✔️ | ✔️ | | | | -| Avro.File.XZ | Apache.Avro.File.XZ | Library | ✔️ | ✔️ | | | | -| Avro.File.Zstandard | Apache.Avro.File.Zstandard | Library | ✔️ | ✔️ | | | | -| Avro.codegen | Apache.Avro.Tools | Exe | | | ✔️ |✔️ |✔️ | -| Avro.ipc | | Library | ✔️ | ✔️ | | | | -| Avro.ipc.test | | Unit Tests | | | ✔️ |✔️ |✔️ | -| Avro.msbuild | | Library | ✔️ | ✔️ | | | | -| Avro.perf | | Exe | | | ✔️ |✔️ |✔️ | -| Avro.test | | Unit Tests | | | ✔️ |✔️ |✔️ | -| Avro.benchmark | | Exe | | | ✔️ |✔️ |✔️ | +| Project | Published to nuget.org | Type | .NET Standard 2.0 | .NET Standard 2.1 | .NET Core 3.1 | .NET 5.0 | .NET 6.0 | .NET 7.0 | +|:-------------------:|:--------------------------:|:----------:|:------------------:|:-----------------:|:-------------:|:---------:|:---------:|:---------:| +| Avro.main | Apache.Avro | Library | ✔️ | ✔️ | | | | | +| Avro.File.Snappy | Apache.Avro.File.Snappy | Library | ✔️ | ✔️ | | | | | +| Avro.File.BZip2 | Apache.Avro.File.BZip2 | Library | ✔️ | ✔️ | | | | | +| Avro.File.XZ | Apache.Avro.File.XZ | Library | ✔️ | ✔️ | | | | | +| Avro.File.Zstandard | Apache.Avro.File.Zstandard | Library | ✔️ | ✔️ | | | | | +| Avro.codegen | Apache.Avro.Tools | Exe | | | ✔️ |✔️ |✔️ |✔️ | +| Avro.ipc | | Library | ✔️ | ✔️ | | | | | +| Avro.ipc.test | | Unit Tests | | | ✔️ |✔️ |✔️ |✔️ | +| Avro.msbuild | | Library | ✔️ | ✔️ | | | | | +| Avro.perf | | Exe | | | ✔️ |✔️ |✔️ |✔️ | +| Avro.test | | Unit Tests | | | ✔️ |✔️ |✔️ |✔️ | +| Avro.benchmark | | Exe | | | ✔️ |✔️ |✔️ |✔️ | ## Dependency package version strategy diff --git a/lang/csharp/build.sh b/lang/csharp/build.sh index c65e08d5df1..623ef03a353 100755 --- a/lang/csharp/build.sh +++ b/lang/csharp/build.sh @@ -42,7 +42,7 @@ do perf) pushd ./src/apache/perf/ - dotnet run --configuration Release --framework net6.0 + dotnet run --configuration Release --framework net7.0 ;; dist) @@ -77,7 +77,7 @@ do ;; interop-data-generate) - dotnet run --project src/apache/test/Avro.test.csproj --framework net6.0 ../../share/test/schemas/interop.avsc ../../build/interop/data + dotnet run --project src/apache/test/Avro.test.csproj --framework net7.0 ../../share/test/schemas/interop.avsc ../../build/interop/data ;; interop-data-test) diff --git a/lang/csharp/common.props b/lang/csharp/common.props index 72a79fda086..f7d03625598 100644 --- a/lang/csharp/common.props +++ b/lang/csharp/common.props @@ -37,9 +37,7 @@ - - netcoreapp3.1;net5.0;net6.0 - net7.0 + netcoreapp3.1;net5.0;net6.0;net7.0 netstandard2.0;netstandard2.1 @@ -61,6 +59,12 @@ + + + false + true + + false true diff --git a/lang/csharp/src/apache/benchmark/Avro.benchmark.csproj b/lang/csharp/src/apache/benchmark/Avro.benchmark.csproj index 5b38895a18c..b944de3c2d4 100644 --- a/lang/csharp/src/apache/benchmark/Avro.benchmark.csproj +++ b/lang/csharp/src/apache/benchmark/Avro.benchmark.csproj @@ -31,6 +31,12 @@ false + + + + $(NoWarn);CS8981 + + diff --git a/lang/csharp/src/apache/benchmark/Program.cs b/lang/csharp/src/apache/benchmark/Program.cs index 4381897e24e..5b63517623c 100644 --- a/lang/csharp/src/apache/benchmark/Program.cs +++ b/lang/csharp/src/apache/benchmark/Program.cs @@ -21,8 +21,8 @@ namespace Avro.Benchmark { public class Program { - // dotnet run -c Release -f net6.0 - // dotnet run -c Release -f net6.0 --runtimes netcoreapp3.1 net5.0 net6.0 + // dotnet run -c Release -f net7.0 + // dotnet run -c Release -f net7.0 --runtimes netcoreapp3.1 net5.0 net6.0 net7.0 public static void Main(string[] args) { BenchmarkSwitcher.FromAssembly(typeof(Program).Assembly).Run(args); diff --git a/lang/csharp/src/apache/main/IO/Encoder.cs b/lang/csharp/src/apache/main/IO/Encoder.cs index 84a2099a195..0c1712af430 100644 --- a/lang/csharp/src/apache/main/IO/Encoder.cs +++ b/lang/csharp/src/apache/main/IO/Encoder.cs @@ -187,5 +187,10 @@ public interface Encoder /// Position within data where the contents start. /// Number of bytes to write. void WriteFixed(byte[] data, int start, int len); + + /// + /// Flushes the encoder. + /// + void Flush(); } } diff --git a/lang/csharp/src/apache/main/IO/JsonEncoder.cs b/lang/csharp/src/apache/main/IO/JsonEncoder.cs index 48415d7cddd..c159a013e8c 100644 --- a/lang/csharp/src/apache/main/IO/JsonEncoder.cs +++ b/lang/csharp/src/apache/main/IO/JsonEncoder.cs @@ -28,7 +28,7 @@ namespace Avro.IO /// An for Avro's JSON data encoding. /// /// JsonEncoder buffers output, and data may not appear on the output until - /// is called. + /// is called. /// /// JsonEncoder is not thread-safe. /// diff --git a/lang/csharp/src/apache/test/IO/JsonCodecTests.cs b/lang/csharp/src/apache/test/IO/JsonCodecTests.cs index 145b8df7294..28aab10e70c 100644 --- a/lang/csharp/src/apache/test/IO/JsonCodecTests.cs +++ b/lang/csharp/src/apache/test/IO/JsonCodecTests.cs @@ -316,8 +316,8 @@ private byte[] fromJsonToAvro(string json, Schema schema) GenericDatumWriter writer = new GenericDatumWriter(schema); MemoryStream output = new MemoryStream(); - JsonDecoder decoder = new JsonDecoder(schema, json); - BinaryEncoder encoder = new BinaryEncoder(output); + Decoder decoder = new JsonDecoder(schema, json); + Encoder encoder = new BinaryEncoder(output); object datum = reader.Read(null, decoder); diff --git a/lang/csharp/versions.props b/lang/csharp/versions.props index 170e609db36..4acdaa9d759 100644 --- a/lang/csharp/versions.props +++ b/lang/csharp/versions.props @@ -26,14 +26,14 @@ !!! SHIPPED CLASS LIBRARIES SHOULD USE MINIMUMVERSIONs FOR SOME LIBRARIES. SEE BELOW !!! --> - 12.0.3 - 6.0.0 + 13.0.1 + 7.0.0 4.3.0 4.7.0 4.7.0 - 1.3.3 + 1.4.1 1.3.0 4.1.0 1.1.7 @@ -55,21 +55,19 @@ Please sort the packages alphabetically --> - 0.13.1 - 3.1.2 - 3.1.2 - 17.1.0 - 17.1.0 - 4.1.0 - 4.1.0 - 4.1.0 - - 6.0.0 - 7.0.0-preview* - 17.1.0 - 3.13.2 - 3.15.0 - 4.2.1 + 0.13.2 + 3.2.0 + 3.2.0 + 17.4.0 + 17.4.0 + 4.3.1 + 4.3.1 + 4.3.1 + 7.0.1 + 17.4.0 + 3.13.3 + 3.15.2 + 4.3.0 1.1.118 diff --git a/lang/java/avro/src/main/java/org/apache/avro/generic/GenericData.java b/lang/java/avro/src/main/java/org/apache/avro/generic/GenericData.java index d33f8bbf018..597a21949dc 100644 --- a/lang/java/avro/src/main/java/org/apache/avro/generic/GenericData.java +++ b/lang/java/avro/src/main/java/org/apache/avro/generic/GenericData.java @@ -1137,6 +1137,69 @@ public int compare(Object o1, Object o2, Schema s) { return compare(o1, o2, s, false); } + protected int compareMaps(final Map m1, final Map m2) { + if (m1 == m2) { + return 0; + } + + if (m2.size() != m2.size()) { + return 1; + } + + /** + * Peek at keys, assuming they're all the same type within a Map + */ + final Object key1 = m1.keySet().iterator().next(); + final Object key2 = m2.keySet().iterator().next(); + boolean utf8ToString = false; + boolean stringToUtf8 = false; + + if (key1 instanceof Utf8 && key2 instanceof String) { + utf8ToString = true; + } else if (key1 instanceof String && key2 instanceof Utf8) { + stringToUtf8 = true; + } + + try { + for (Map.Entry e : m1.entrySet()) { + final Object key = e.getKey(); + Object lookupKey = key; + if (utf8ToString) { + lookupKey = key.toString(); + } else if (stringToUtf8) { + lookupKey = new Utf8((String) lookupKey); + } + final Object value = e.getValue(); + if (value == null) { + if (!(m2.get(lookupKey) == null && m2.containsKey(lookupKey))) { + return 1; + } + } else { + final Object value2 = m2.get(lookupKey); + if (value instanceof Utf8 && value2 instanceof String) { + if (!value.toString().equals(value2)) { + return 1; + } + } else if (value instanceof String && value2 instanceof Utf8) { + if (!new Utf8((String) value).equals(value2)) { + return 1; + } + } else { + if (!value.equals(value2)) { + return 1; + } + } + } + } + } catch (ClassCastException unused) { + return 1; + } catch (NullPointerException unused) { + return 1; + } + + return 0; + } + /** * Comparison implementation. When equals is true, only checks for equality, not * for order. @@ -1173,7 +1236,7 @@ protected int compare(Object o1, Object o2, Schema s, boolean equals) { return e1.hasNext() ? 1 : (e2.hasNext() ? -1 : 0); case MAP: if (equals) - return o1.equals(o2) ? 0 : 1; + return compareMaps((Map) o1, (Map) o2); throw new AvroRuntimeException("Can't compare maps!"); case UNION: int i1 = resolveUnion(s, o1); diff --git a/lang/java/avro/src/test/java/org/apache/avro/generic/TestGenericData.java b/lang/java/avro/src/test/java/org/apache/avro/generic/TestGenericData.java index 21492d1b0ec..54e8c662a96 100644 --- a/lang/java/avro/src/test/java/org/apache/avro/generic/TestGenericData.java +++ b/lang/java/avro/src/test/java/org/apache/avro/generic/TestGenericData.java @@ -145,6 +145,64 @@ public void testEquals() { assertEquals(r1, r2); } + @Test + public void testMapKeyEqualsStringAndUtf8Compatibility() { + Field myMapField = new Field("my_map", Schema.createMap(Schema.create(Schema.Type.STRING)), null, null); + Schema schema = Schema.createRecord("my_record", "doc", "mytest", false); + schema.setFields(Arrays.asList(myMapField)); + GenericRecord r0 = new GenericData.Record(schema); + GenericRecord r1 = new GenericData.Record(schema); + + HashMap pair1 = new HashMap<>(); + pair1.put("keyOne", "valueOne"); + r0.put("my_map", pair1); + + HashMap pair2 = new HashMap<>(); + pair2.put(new Utf8("keyOne"), "valueOne"); + r1.put("my_map", pair2); + + assertEquals(r0, r1); + assertEquals(r1, r0); + } + + @Test + public void testMapValuesEqualsStringAndUtf8Compatibility() { + Field myMapField = new Field("my_map", Schema.createMap(Schema.create(Schema.Type.STRING)), null, null); + Schema schema = Schema.createRecord("my_record", "doc", "mytest", false); + schema.setFields(Arrays.asList(myMapField)); + GenericRecord r0 = new GenericData.Record(schema); + GenericRecord r1 = new GenericData.Record(schema); + + HashMap pair1 = new HashMap<>(); + pair1.put("keyOne", "valueOne"); + r0.put("my_map", pair1); + + HashMap pair2 = new HashMap<>(); + pair2.put("keyOne", new Utf8("valueOne")); + r1.put("my_map", pair2); + + assertEquals(r0, r1); + assertEquals(r1, r0); + } + + @Test + public void testArrayValuesEqualsStringAndUtf8Compatibility() { + Field myArrayField = new Field("my_array", Schema.createArray(Schema.create(Schema.Type.STRING)), null, null); + Schema schema = Schema.createRecord("my_record", "doc", "mytest", false); + schema.setFields(Arrays.asList(myArrayField)); + GenericRecord r0 = new GenericData.Record(schema); + GenericRecord r1 = new GenericData.Record(schema); + + List array1 = Arrays.asList("valueOne"); + r0.put("my_array", array1); + + List array2 = Arrays.asList(new Utf8("valueOne")); + r1.put("my_array", array2); + + assertEquals(r0, r1); + assertEquals(r1, r0); + } + private Schema recordSchema() { List fields = new ArrayList<>(); fields.add(new Field("anArray", Schema.createArray(Schema.create(Type.STRING)), null, null)); diff --git a/lang/java/avro/src/test/java/org/apache/avro/specific/TestSpecificData.java b/lang/java/avro/src/test/java/org/apache/avro/specific/TestSpecificData.java index 3fb17a7f30d..faa60bdc6e8 100644 --- a/lang/java/avro/src/test/java/org/apache/avro/specific/TestSpecificData.java +++ b/lang/java/avro/src/test/java/org/apache/avro/specific/TestSpecificData.java @@ -176,7 +176,7 @@ public void testNonStringable() throws Exception { } @Test - void classNameContainingReservedWords() { + public void classNameContainingReservedWords() { final Schema schema = Schema.createRecord("AnyName", null, "db.public.table", false); assertEquals("db.public$.table.AnyName", SpecificData.getClassName(schema)); diff --git a/lang/java/maven-plugin/pom.xml b/lang/java/maven-plugin/pom.xml index 4845222f7d2..0ebc7e90519 100644 --- a/lang/java/maven-plugin/pom.xml +++ b/lang/java/maven-plugin/pom.xml @@ -73,7 +73,7 @@ org.codehaus.plexus plexus-utils - 3.5.0 + 3.5.1 provided diff --git a/lang/java/pom.xml b/lang/java/pom.xml index 82b6ea4320a..2a36d94bd8e 100644 --- a/lang/java/pom.xml +++ b/lang/java/pom.xml @@ -37,18 +37,18 @@ ${project.parent.basedir} - 3.3.4 - 2.12.7.20221012 + 3.3.5 + 2.14.2 4.0.1 - 9.4.50.v20221201 + 9.4.51.v20230217 5.0.4 - 5.9.2 - 4.1.89.Final - 3.22.2 + 5.9.3 + 4.1.93.Final + 3.23.2 0.16.0 1.7.36 - 1.2.24 - 1.1.9.0 + 1.2.25 + 1.1.10.0 2.3 3.3.9 1.10.13 @@ -58,8 +58,8 @@ 1.9 4.11.0 2.2 - 1.52.1 - 1.5.2-5 + 1.55.1 + 1.5.5-4 3.2.1 5.1.8 @@ -67,6 +67,7 @@ 3.1.0 3.0.3 7.0.12 + 2.7.9 @@ -97,7 +98,7 @@ org.codehaus.mojo build-helper-maven-plugin - 3.2.0 + 3.4.0 org.apache.maven.plugins @@ -156,7 +157,7 @@ org.apache.maven.plugins maven-surefire-plugin - 3.0.0-M7 + ${maven-surefire-plugin.version} @@ -233,6 +234,11 @@ spotless-maven-plugin ${spotless-maven-plugin.version} + + org.cyclonedx + cyclonedx-maven-plugin + ${cyclonedx-maven-plugin.version} + @@ -320,6 +326,18 @@ + + org.cyclonedx + cyclonedx-maven-plugin + + + package + + makeBom + + + + diff --git a/lang/py/avro/io.py b/lang/py/avro/io.py index 998dcd863a8..7b5576697eb 100644 --- a/lang/py/avro/io.py +++ b/lang/py/avro/io.py @@ -482,7 +482,7 @@ def write_decimal_bytes(self, datum: decimal.Decimal, scale: int) -> None: signed long is 8, 8 bytes are written. """ sign, digits, exp = datum.as_tuple() - if (-1 * exp) > scale: + if (-1 * int(exp)) > scale: raise avro.errors.AvroOutOfScaleException(scale, datum, exp) unscaled_datum = 0 @@ -508,7 +508,7 @@ def write_decimal_fixed(self, datum: decimal.Decimal, scale: int, size: int) -> Decimal in fixed are encoded as size of fixed bytes. """ sign, digits, exp = datum.as_tuple() - if (-1 * exp) > scale: + if (-1 * int(exp)) > scale: raise avro.errors.AvroOutOfScaleException(scale, datum, exp) unscaled_datum = 0 diff --git a/lang/py/avro/test/test_compatibility.py b/lang/py/avro/test/test_compatibility.py index 3c36b6f846d..e9af424e919 100644 --- a/lang/py/avro/test/test_compatibility.py +++ b/lang/py/avro/test/test_compatibility.py @@ -691,7 +691,7 @@ def test_schema_compatibility(self): (WITHOUT_NAMESPACE_RECORD, WITH_NAMESPACE_RECORD), ] - for (reader, writer) in compatible_reader_writer_test_cases: + for reader, writer in compatible_reader_writer_test_cases: self.assertTrue(self.are_compatible(reader, writer)) def test_schema_compatibility_fixed_size_mismatch(self): @@ -711,7 +711,7 @@ def test_schema_compatibility_fixed_size_mismatch(self): "/fields/1/type/size", ), ] - for (reader, writer, message, location) in incompatible_fixed_pairs: + for reader, writer, message, location in incompatible_fixed_pairs: result = ReaderWriterCompatibilityChecker().get_compatibility(reader, writer) self.assertIs(result.compatibility, SchemaCompatibilityType.incompatible) self.assertIn( @@ -737,7 +737,7 @@ def test_schema_compatibility_missing_enum_symbols(self): "/fields/0/type/symbols", ), ] - for (reader, writer, message, location) in incompatible_pairs: + for reader, writer, message, location in incompatible_pairs: result = ReaderWriterCompatibilityChecker().get_compatibility(reader, writer) self.assertIs(result.compatibility, SchemaCompatibilityType.incompatible) self.assertIn(message, result.messages) @@ -853,7 +853,7 @@ def test_schema_compatibility_missing_union_branch(self): ), ] - for (reader, writer, message, location) in incompatible_pairs: + for reader, writer, message, location in incompatible_pairs: result = ReaderWriterCompatibilityChecker().get_compatibility(reader, writer) self.assertIs(result.compatibility, SchemaCompatibilityType.incompatible) self.assertEqual(result.messages, message) @@ -872,7 +872,7 @@ def test_schema_compatibility_name_mismatch(self): ), ] - for (reader, writer, message, location) in incompatible_pairs: + for reader, writer, message, location in incompatible_pairs: result = ReaderWriterCompatibilityChecker().get_compatibility(reader, writer) self.assertIs(result.compatibility, SchemaCompatibilityType.incompatible) self.assertIn(message, result.messages) @@ -883,7 +883,7 @@ def test_schema_compatibility_reader_field_missing_default_value(self): (A_INT_RECORD1, EMPTY_RECORD1, "a", "/fields/0"), (A_INT_B_DINT_RECORD1, EMPTY_RECORD1, "a", "/fields/0"), ] - for (reader, writer, message, location) in incompatible_pairs: + for reader, writer, message, location in incompatible_pairs: result = ReaderWriterCompatibilityChecker().get_compatibility(reader, writer) self.assertIs(result.compatibility, SchemaCompatibilityType.incompatible) self.assertEqual(len(result.messages), 1) @@ -1063,7 +1063,7 @@ def test_schema_compatibility_type_mismatch(self): "/", ), ] - for (reader, writer, message, location) in incompatible_pairs: + for reader, writer, message, location in incompatible_pairs: result = ReaderWriterCompatibilityChecker().get_compatibility(reader, writer) self.assertIs(result.compatibility, SchemaCompatibilityType.incompatible) self.assertIn(message, result.messages) diff --git a/lang/py/avro/test/test_datafile_interop.py b/lang/py/avro/test/test_datafile_interop.py index d9e4c690daf..7dec16e7810 100644 --- a/lang/py/avro/test/test_datafile_interop.py +++ b/lang/py/avro/test/test_datafile_interop.py @@ -42,7 +42,6 @@ def test_interop(self) -> None: continue i = None with self.subTest(filename=filename), avro.datafile.DataFileReader(filename.open("rb"), avro.io.DatumReader()) as dfr: - user_metadata = dfr.get_meta("user_metadata") if user_metadata is not None: self.assertEqual(user_metadata, b"someByteArray") diff --git a/lang/py/avro/test/test_io.py b/lang/py/avro/test/test_io.py index 29dd82130ce..b77c17fb98b 100644 --- a/lang/py/avro/test/test_io.py +++ b/lang/py/avro/test/test_io.py @@ -450,7 +450,6 @@ def check_default_value(self) -> None: class TestIncompatibleSchemaReading(unittest.TestCase): def test_deserialization_fails(self) -> None: - reader_schema = avro.schema.parse( json.dumps( { @@ -505,7 +504,7 @@ def test_decimal_bytes_small_scale(self) -> None: """Avro should raise an AvroTypeException when attempting to write a decimal with a larger exponent than the schema's scale.""" datum = decimal.Decimal("3.1415") _, _, exp = datum.as_tuple() - scale = -1 * exp - 1 + scale = -1 * int(exp) - 1 schema = avro.schema.parse( json.dumps( { @@ -522,7 +521,7 @@ def test_decimal_fixed_small_scale(self) -> None: """Avro should raise an AvroTypeException when attempting to write a decimal with a larger exponent than the schema's scale.""" datum = decimal.Decimal("3.1415") _, _, exp = datum.as_tuple() - scale = -1 * exp - 1 + scale = -1 * int(exp) - 1 schema = avro.schema.parse( json.dumps( { diff --git a/lang/py/avro/test/test_schema.py b/lang/py/avro/test/test_schema.py index 8286567f9ff..c59ded8a73e 100644 --- a/lang/py/avro/test/test_schema.py +++ b/lang/py/avro/test/test_schema.py @@ -662,7 +662,7 @@ def parse_valid(self) -> None: try: warnings.filterwarnings(action="error", category=avro.errors.IgnoredLogicalType) self.test_schema.parse() - except (avro.errors.IgnoredLogicalType) as e: + except avro.errors.IgnoredLogicalType as e: self.assertIn(type(e), (type(w) for w in test_warnings)) self.assertIn(str(e), (str(w) for w in test_warnings)) except (avro.errors.AvroException, avro.errors.SchemaParseException): # pragma: no coverage diff --git a/lang/py/avro/tether/tether_task.py b/lang/py/avro/tether/tether_task.py index dc138d07d70..c521fa56b4c 100644 --- a/lang/py/avro/tether/tether_task.py +++ b/lang/py/avro/tether/tether_task.py @@ -300,7 +300,6 @@ def configure(self, taskType, inSchemaText, outSchemaText): self._red_fkeys = [f.name for f in self.midschema.fields if not (f.order == "ignore")] except Exception as e: - estr = traceback.format_exc() self.fail(estr) @@ -335,7 +334,6 @@ def input(self, data, count): self.map(inRecord, self.midCollector) elif self.taskType == TaskType.REDUCE: - # store the previous record prev = self.midRecord diff --git a/lang/rust/.gitignore b/lang/rust/.gitignore index 9ac07d70741..875c6ff7096 100644 --- a/lang/rust/.gitignore +++ b/lang/rust/.gitignore @@ -4,3 +4,5 @@ .idea/ *.iml precommit_venv/ +fleet.toml +**/.cargo/config.toml diff --git a/pom.xml b/pom.xml index 1ee27bccd6c..d4947e07e13 100644 --- a/pom.xml +++ b/pom.xml @@ -22,7 +22,7 @@ org.apache apache - 27 + 29 org.apache.avro @@ -48,23 +48,23 @@ 0.15 - 3.2.1 + 3.2.2 9.3 - 3.1.0 - 1.6.1 + 3.3.0 + 1.7.0 3.1.0 - 3.0.1 + 3.1.0 3.5.0 - 3.7.1 + 3.8.2 3.0.0 3.4.1 3.2.1 - 3.7.0 + 3.5.2 2.27.2 - 3.0.0 + 3.1.0 - 1659285393 + 10 @@ -345,17 +345,6 @@ - - - - - - Also allow the license url to be https. - - https://www.apache.org/licenses/LICENSE-2.0 - - - true false @@ -365,11 +354,13 @@ **/.gitattributes **/.gitignore **/.gitmodules - + doc/build/** doc/themes/docsy/** doc/examples/java-example/target/** doc/examples/mr-example/target/** + doc/node_modules/** + **/.hugo_build.lock **/*.log **/*.rej diff --git a/share/docker/Dockerfile b/share/docker/Dockerfile index 7b4da5272bc..bbc8bef3283 100644 --- a/share/docker/Dockerfile +++ b/share/docker/Dockerfile @@ -86,7 +86,7 @@ RUN apt-get -qqy install --no-install-recommends libzstd-dev \ # Install a maven release ------------------------------------------- # Inspired from https://github.com/apache/accumulo-docker/blob/master/Dockerfile#L53 -ENV MAVEN_VERSION 3.8.4 +ENV MAVEN_VERSION 3.8.6 ENV APACHE_DIST_URLS \ https://www.apache.org/dyn/closer.cgi?action=download&filename= \ # if the version is outdated (or we're grabbing the .asc file), we might have to pull from the dist/archive :/ @@ -179,12 +179,18 @@ RUN wget https://packages.microsoft.com/config/ubuntu/20.04/packages-microsoft-p && dpkg -i packages-microsoft-prod.deb \ && rm packages-microsoft-prod.deb \ && apt-get update \ - && apt-get -qqy install --no-install-recommends dotnet-sdk-3.1 dotnet-sdk-5.0 dotnet-sdk-6.0 \ + && apt-get -qqy install --no-install-recommends dotnet-sdk-3.1 dotnet-sdk-5.0 dotnet-sdk-6.0 dotnet-sdk-7.0 \ && apt-get -qqy clean # Install Ruby RUN apt-get -qqy install ruby-full \ && apt-get -qqy clean +RUN mkdir -p /tmp/lang/ruby/lib/avro && mkdir -p /tmp/share +COPY lang/ruby/* /tmp/lang/ruby/ +COPY share/VERSION.txt /tmp/share/ +RUN gem install bundler --no-document && \ + apt-get install -qqy libyaml-dev && \ + cd /tmp/lang/ruby && bundle install # Install Rust RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y --default-toolchain 1.60.0