diff --git a/core/trino-main/pom.xml b/core/trino-main/pom.xml
index 522c87d7eee4..4335fb00f508 100644
--- a/core/trino-main/pom.xml
+++ b/core/trino-main/pom.xml
@@ -86,7 +86,7 @@
io.airlift
- aircompressor
+ aircompressor-v3
diff --git a/core/trino-main/src/main/java/io/trino/execution/buffer/CompressionCodec.java b/core/trino-main/src/main/java/io/trino/execution/buffer/CompressionCodec.java
index 4e8d88e57c83..a75218217fd1 100644
--- a/core/trino-main/src/main/java/io/trino/execution/buffer/CompressionCodec.java
+++ b/core/trino-main/src/main/java/io/trino/execution/buffer/CompressionCodec.java
@@ -13,7 +13,21 @@
*/
package io.trino.execution.buffer;
+import io.airlift.compress.v3.lz4.Lz4Compressor;
+import io.airlift.compress.v3.zstd.ZstdCompressor;
+
+import java.util.OptionalInt;
+
public enum CompressionCodec
{
- NONE, LZ4, ZSTD
+ NONE, LZ4, ZSTD;
+
+ public OptionalInt maxCompressedLength(int inputSize)
+ {
+ return switch (this) {
+ case NONE -> OptionalInt.empty();
+ case LZ4 -> OptionalInt.of(Lz4Compressor.create().maxCompressedLength(inputSize));
+ case ZSTD -> OptionalInt.of(ZstdCompressor.create().maxCompressedLength(inputSize));
+ };
+ }
}
diff --git a/core/trino-main/src/main/java/io/trino/execution/buffer/PageDeserializer.java b/core/trino-main/src/main/java/io/trino/execution/buffer/PageDeserializer.java
index 8a2bc1dbb7fc..48748c702bda 100644
--- a/core/trino-main/src/main/java/io/trino/execution/buffer/PageDeserializer.java
+++ b/core/trino-main/src/main/java/io/trino/execution/buffer/PageDeserializer.java
@@ -14,9 +14,8 @@
package io.trino.execution.buffer;
import com.google.common.base.VerifyException;
-import io.airlift.compress.Decompressor;
-import io.airlift.compress.lz4.Lz4Decompressor;
-import io.airlift.compress.lz4.Lz4RawCompressor;
+import io.airlift.compress.v3.Decompressor;
+import io.airlift.compress.v3.lz4.Lz4Decompressor;
import io.airlift.slice.Slice;
import io.airlift.slice.SliceInput;
import io.airlift.slice.Slices;
@@ -34,6 +33,7 @@
import java.io.UnsupportedEncodingException;
import java.security.GeneralSecurityException;
import java.util.Optional;
+import java.util.OptionalInt;
import static com.google.common.base.Preconditions.checkArgument;
import static io.airlift.slice.SizeOf.instanceSize;
@@ -63,7 +63,8 @@ public PageDeserializer(
BlockEncodingSerde blockEncodingSerde,
Optional decompressor,
Optional encryptionKey,
- int blockSizeInBytes)
+ int blockSizeInBytes,
+ OptionalInt maxCompressedBlockSizeInBytes)
{
this.blockEncodingSerde = requireNonNull(blockEncodingSerde, "blockEncodingSerde is null");
requireNonNull(encryptionKey, "encryptionKey is null");
@@ -71,7 +72,8 @@ public PageDeserializer(
input = new SerializedPageInput(
requireNonNull(decompressor, "decompressor is null"),
encryptionKey,
- blockSizeInBytes);
+ blockSizeInBytes,
+ maxCompressedBlockSizeInBytes);
}
public Page deserialize(Slice serializedPage)
@@ -101,7 +103,7 @@ private static class SerializedPageInput
private final ReadBuffer[] buffers;
- private SerializedPageInput(Optional decompressor, Optional encryptionKey, int blockSizeInBytes)
+ private SerializedPageInput(Optional decompressor, Optional encryptionKey, int blockSizeInBytes, OptionalInt maxCompressedBlockSizeInBytes)
{
this.decompressor = requireNonNull(decompressor, "decompressor is null");
this.encryptionKey = requireNonNull(encryptionKey, "encryptionKey is null");
@@ -122,7 +124,7 @@ private SerializedPageInput(Optional decompressor, Optional compressor,
Optional encryptionKey,
- int blockSizeInBytes)
+ int blockSizeInBytes,
+ OptionalInt maxCompressedBlockSize)
{
this.blockEncodingSerde = requireNonNull(blockEncodingSerde, "blockEncodingSerde is null");
requireNonNull(encryptionKey, "encryptionKey is null");
@@ -75,7 +75,8 @@ public PageSerializer(
output = new SerializedPageOutput(
requireNonNull(compressor, "compressor is null"),
encryptionKey,
- blockSizeInBytes);
+ blockSizeInBytes,
+ maxCompressedBlockSize);
}
public Slice serialize(Page page)
@@ -95,7 +96,8 @@ private static class SerializedPageOutput
{
private static final int INSTANCE_SIZE = instanceSize(SerializedPageOutput.class);
// TODO: implement getRetainedSizeInBytes in Lz4Compressor
- private static final int COMPRESSOR_RETAINED_SIZE = toIntExact(instanceSize(Lz4Compressor.class) + sizeOfIntArray(Lz4RawCompressor.MAX_TABLE_SIZE));
+ // TODO: need a fix
+ private static final int COMPRESSOR_RETAINED_SIZE = toIntExact(instanceSize(Lz4Compressor.class));
private static final int ENCRYPTION_KEY_RETAINED_SIZE = toIntExact(instanceSize(SecretKeySpec.class) + sizeOfByteArray(256 / 8));
private static final double MINIMUM_COMPRESSION_RATIO = 0.8;
@@ -111,7 +113,8 @@ private static class SerializedPageOutput
private SerializedPageOutput(
Optional compressor,
Optional encryptionKey,
- int blockSizeInBytes)
+ int blockSizeInBytes,
+ OptionalInt maxCompressedBlockSize)
{
this.compressor = requireNonNull(compressor, "compressor is null");
this.encryptionKey = requireNonNull(encryptionKey, "encryptionKey is null");
@@ -129,7 +132,7 @@ private SerializedPageOutput(
if (encryptionKey.isPresent()) {
int bufferSize = blockSizeInBytes;
if (compressor.isPresent()) {
- bufferSize = compressor.get().maxCompressedLength(blockSizeInBytes)
+ bufferSize = maxCompressedBlockSize.orElseThrow()
// to store compressed block size
+ Integer.BYTES;
}
diff --git a/core/trino-main/src/main/java/io/trino/execution/buffer/PagesSerdeFactory.java b/core/trino-main/src/main/java/io/trino/execution/buffer/PagesSerdeFactory.java
index ed897df6cd25..2fcf3b41e79a 100644
--- a/core/trino-main/src/main/java/io/trino/execution/buffer/PagesSerdeFactory.java
+++ b/core/trino-main/src/main/java/io/trino/execution/buffer/PagesSerdeFactory.java
@@ -13,24 +13,34 @@
*/
package io.trino.execution.buffer;
-import io.airlift.compress.Compressor;
-import io.airlift.compress.Decompressor;
-import io.airlift.compress.lz4.Lz4Compressor;
-import io.airlift.compress.lz4.Lz4Decompressor;
-import io.airlift.compress.zstd.ZstdCompressor;
-import io.airlift.compress.zstd.ZstdDecompressor;
+import io.airlift.compress.v3.Compressor;
+import io.airlift.compress.v3.Decompressor;
+import io.airlift.compress.v3.lz4.Lz4Compressor;
+import io.airlift.compress.v3.lz4.Lz4Decompressor;
+import io.airlift.compress.v3.zstd.ZstdCompressor;
+import io.airlift.compress.v3.zstd.ZstdDecompressor;
import io.trino.spi.block.BlockEncodingSerde;
import javax.crypto.SecretKey;
+import java.util.Map;
import java.util.Optional;
+import java.util.OptionalInt;
+import static io.trino.execution.buffer.CompressionCodec.LZ4;
+import static io.trino.execution.buffer.CompressionCodec.NONE;
+import static io.trino.execution.buffer.CompressionCodec.ZSTD;
import static java.util.Objects.requireNonNull;
public class PagesSerdeFactory
{
private static final int SERIALIZED_PAGE_DEFAULT_BLOCK_SIZE_IN_BYTES = 64 * 1024;
+ private static final Map MAX_COMPRESSED_LENGTH = Map.of(
+ NONE, NONE.maxCompressedLength(SERIALIZED_PAGE_DEFAULT_BLOCK_SIZE_IN_BYTES),
+ LZ4, LZ4.maxCompressedLength(SERIALIZED_PAGE_DEFAULT_BLOCK_SIZE_IN_BYTES),
+ ZSTD, ZSTD.maxCompressedLength(SERIALIZED_PAGE_DEFAULT_BLOCK_SIZE_IN_BYTES));
+
private final BlockEncodingSerde blockEncodingSerde;
private final CompressionCodec compressionCodec;
@@ -42,20 +52,30 @@ public PagesSerdeFactory(BlockEncodingSerde blockEncodingSerde, CompressionCodec
public PageSerializer createSerializer(Optional encryptionKey)
{
- return new PageSerializer(blockEncodingSerde, createCompressor(compressionCodec), encryptionKey, SERIALIZED_PAGE_DEFAULT_BLOCK_SIZE_IN_BYTES);
+ return new PageSerializer(
+ blockEncodingSerde,
+ createCompressor(compressionCodec),
+ encryptionKey,
+ SERIALIZED_PAGE_DEFAULT_BLOCK_SIZE_IN_BYTES,
+ MAX_COMPRESSED_LENGTH.get(compressionCodec));
}
public PageDeserializer createDeserializer(Optional encryptionKey)
{
- return new PageDeserializer(blockEncodingSerde, createDecompressor(compressionCodec), encryptionKey, SERIALIZED_PAGE_DEFAULT_BLOCK_SIZE_IN_BYTES);
+ return new PageDeserializer(
+ blockEncodingSerde,
+ createDecompressor(compressionCodec),
+ encryptionKey,
+ SERIALIZED_PAGE_DEFAULT_BLOCK_SIZE_IN_BYTES,
+ MAX_COMPRESSED_LENGTH.get(compressionCodec));
}
public static Optional createCompressor(CompressionCodec compressionCodec)
{
return switch (compressionCodec) {
case NONE -> Optional.empty();
- case LZ4 -> Optional.of(new Lz4Compressor());
- case ZSTD -> Optional.of(new ZstdCompressor());
+ case LZ4 -> Optional.of(Lz4Compressor.create());
+ case ZSTD -> Optional.of(ZstdCompressor.create());
};
}
@@ -63,8 +83,8 @@ public static Optional createDecompressor(CompressionCodec compres
{
return switch (compressionCodec) {
case NONE -> Optional.empty();
- case LZ4 -> Optional.of(new Lz4Decompressor());
- case ZSTD -> Optional.of(new ZstdDecompressor());
+ case LZ4 -> Optional.of(Lz4Decompressor.create());
+ case ZSTD -> Optional.of(ZstdDecompressor.create());
};
}
}
diff --git a/core/trino-main/src/main/java/io/trino/server/Server.java b/core/trino-main/src/main/java/io/trino/server/Server.java
index 1a906f3997d2..9d63f54329ca 100644
--- a/core/trino-main/src/main/java/io/trino/server/Server.java
+++ b/core/trino-main/src/main/java/io/trino/server/Server.java
@@ -22,6 +22,9 @@
import com.google.inject.TypeLiteral;
import io.airlift.bootstrap.ApplicationConfigurationException;
import io.airlift.bootstrap.Bootstrap;
+import io.airlift.compress.v3.lz4.Lz4NativeCompressor;
+import io.airlift.compress.v3.snappy.SnappyNativeCompressor;
+import io.airlift.compress.v3.zstd.ZstdNativeCompressor;
import io.airlift.discovery.client.Announcer;
import io.airlift.discovery.client.DiscoveryModule;
import io.airlift.discovery.client.ServiceAnnouncement;
@@ -142,6 +145,10 @@ private void doStart(String trinoVersion)
Injector injector = app.initialize();
log.info("Trino version: %s", injector.getInstance(NodeVersion.class).getVersion());
+ log.info("Zstandard native compression: %s", formatEnabled(ZstdNativeCompressor.isEnabled()));
+ log.info("Lz4 native compression: %s", formatEnabled(Lz4NativeCompressor.isEnabled()));
+ log.info("Snappy native compression: %s", formatEnabled(SnappyNativeCompressor.isEnabled()));
+
logLocation(log, "Working directory", Paths.get("."));
logLocation(log, "Etc directory", Paths.get("etc"));
@@ -188,7 +195,6 @@ private void doStart(String trinoVersion)
injector.getInstance(Announcer.class).start();
injector.getInstance(StartupStatus.class).startupComplete();
-
log.info("Server startup completed in %s", Duration.nanosSince(startTime).convertToMostSuccinctTimeUnit());
log.info("======== SERVER STARTED ========");
}
@@ -290,4 +296,9 @@ private static void logLocation(Logger log, String name, Path path)
}
log.info("%s: %s", name, path);
}
+
+ private static String formatEnabled(boolean flag)
+ {
+ return flag ? "enabled" : "disabled";
+ }
}
diff --git a/core/trino-main/src/main/java/io/trino/server/protocol/PreparedStatementEncoder.java b/core/trino-main/src/main/java/io/trino/server/protocol/PreparedStatementEncoder.java
index 2d7cb3741c56..f081b246618b 100644
--- a/core/trino-main/src/main/java/io/trino/server/protocol/PreparedStatementEncoder.java
+++ b/core/trino-main/src/main/java/io/trino/server/protocol/PreparedStatementEncoder.java
@@ -14,8 +14,8 @@
package io.trino.server.protocol;
import com.google.inject.Inject;
-import io.airlift.compress.zstd.ZstdCompressor;
-import io.airlift.compress.zstd.ZstdDecompressor;
+import io.airlift.compress.v3.zstd.ZstdCompressor;
+import io.airlift.compress.v3.zstd.ZstdDecompressor;
import io.trino.server.ProtocolConfig;
import static com.google.common.io.BaseEncoding.base64Url;
@@ -43,7 +43,7 @@ public String encodePreparedStatementForHeader(String preparedStatement)
return preparedStatement;
}
- ZstdCompressor compressor = new ZstdCompressor();
+ ZstdCompressor compressor = ZstdCompressor.create();
byte[] inputBytes = preparedStatement.getBytes(UTF_8);
byte[] compressed = new byte[compressor.maxCompressedLength(inputBytes.length)];
int outputSize = compressor.compress(inputBytes, 0, inputBytes.length, compressed, 0, compressed.length);
@@ -63,9 +63,9 @@ public String decodePreparedStatementFromHeader(String headerValue)
String encoded = headerValue.substring(PREFIX.length());
byte[] compressed = base64Url().decode(encoded);
-
- byte[] preparedStatement = new byte[toIntExact(ZstdDecompressor.getDecompressedSize(compressed, 0, compressed.length))];
- new ZstdDecompressor().decompress(compressed, 0, compressed.length, preparedStatement, 0, preparedStatement.length);
+ ZstdDecompressor decompressor = ZstdDecompressor.create();
+ byte[] preparedStatement = new byte[toIntExact(decompressor.getDecompressedSize(compressed, 0, compressed.length))];
+ decompressor.decompress(compressed, 0, compressed.length, preparedStatement, 0, preparedStatement.length);
return new String(preparedStatement, UTF_8);
}
}
diff --git a/core/trino-main/src/main/java/io/trino/server/security/oauth2/ZstdCodec.java b/core/trino-main/src/main/java/io/trino/server/security/oauth2/ZstdCodec.java
index 8173946f5cb0..ee8fcb2a4459 100644
--- a/core/trino-main/src/main/java/io/trino/server/security/oauth2/ZstdCodec.java
+++ b/core/trino-main/src/main/java/io/trino/server/security/oauth2/ZstdCodec.java
@@ -13,51 +13,26 @@
*/
package io.trino.server.security.oauth2;
-import io.airlift.compress.zstd.ZstdCompressor;
-import io.airlift.compress.zstd.ZstdDecompressor;
-import io.airlift.compress.zstd.ZstdInputStream;
-import io.airlift.compress.zstd.ZstdOutputStream;
-import io.jsonwebtoken.CompressionCodec;
-import io.jsonwebtoken.CompressionException;
+import io.airlift.compress.v3.zstd.ZstdInputStream;
+import io.airlift.compress.v3.zstd.ZstdOutputStream;
+import io.jsonwebtoken.io.CompressionAlgorithm;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.UncheckedIOException;
-import static java.lang.Math.toIntExact;
-import static java.util.Arrays.copyOfRange;
-
public class ZstdCodec
- implements CompressionCodec
+ implements CompressionAlgorithm
{
public static final String CODEC_NAME = "ZSTD";
@Override
- public String getAlgorithmName()
+ public String getId()
{
return CODEC_NAME;
}
- @Override
- public byte[] compress(byte[] bytes)
- throws CompressionException
- {
- ZstdCompressor compressor = new ZstdCompressor();
- byte[] compressed = new byte[compressor.maxCompressedLength(bytes.length)];
- int outputSize = compressor.compress(bytes, 0, bytes.length, compressed, 0, compressed.length);
- return copyOfRange(compressed, 0, outputSize);
- }
-
- @Override
- public byte[] decompress(byte[] bytes)
- throws CompressionException
- {
- byte[] output = new byte[toIntExact(ZstdDecompressor.getDecompressedSize(bytes, 0, bytes.length))];
- new ZstdDecompressor().decompress(bytes, 0, bytes.length, output, 0, output.length);
- return output;
- }
-
@Override
public OutputStream compress(OutputStream out)
{
@@ -74,10 +49,4 @@ public InputStream decompress(InputStream in)
{
return new ZstdInputStream(in);
}
-
- @Override
- public String getId()
- {
- return CODEC_NAME;
- }
}
diff --git a/core/trino-main/src/main/java/io/trino/util/ThreadLocalCompressorDecompressor.java b/core/trino-main/src/main/java/io/trino/util/ThreadLocalCompressorDecompressor.java
deleted file mode 100644
index 1c73ac299e54..000000000000
--- a/core/trino-main/src/main/java/io/trino/util/ThreadLocalCompressorDecompressor.java
+++ /dev/null
@@ -1,68 +0,0 @@
-/*
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package io.trino.util;
-
-import io.airlift.compress.Compressor;
-import io.airlift.compress.Decompressor;
-import io.airlift.compress.MalformedInputException;
-
-import java.nio.ByteBuffer;
-import java.util.function.Supplier;
-
-import static java.util.Objects.requireNonNull;
-
-public class ThreadLocalCompressorDecompressor
- implements Compressor, Decompressor
-{
- private final ThreadLocal compressor;
- private final ThreadLocal decompressor;
-
- public ThreadLocalCompressorDecompressor(Supplier compressorFactory, Supplier decompressorFactory)
- {
- this.compressor = ThreadLocal.withInitial(requireNonNull(compressorFactory, "compressorFactory is null"));
- this.decompressor = ThreadLocal.withInitial(requireNonNull(decompressorFactory, "decompressorFactory is null"));
- }
-
- @Override
- public int maxCompressedLength(int uncompressedSize)
- {
- return compressor.get().maxCompressedLength(uncompressedSize);
- }
-
- @Override
- public int compress(byte[] input, int inputOffset, int inputLength, byte[] output, int outputOffset, int maxOutputLength)
- {
- return compressor.get().compress(input, inputOffset, inputLength, output, outputOffset, maxOutputLength);
- }
-
- @Override
- public void compress(ByteBuffer input, ByteBuffer output)
- {
- compressor.get().compress(input, output);
- }
-
- @Override
- public int decompress(byte[] input, int inputOffset, int inputLength, byte[] output, int outputOffset, int maxOutputLength)
- throws MalformedInputException
- {
- return decompressor.get().decompress(input, inputOffset, inputLength, output, outputOffset, maxOutputLength);
- }
-
- @Override
- public void decompress(ByteBuffer input, ByteBuffer output)
- throws MalformedInputException
- {
- decompressor.get().decompress(input, output);
- }
-}
diff --git a/core/trino-main/src/test/java/io/trino/execution/buffer/TestPagesSerde.java b/core/trino-main/src/test/java/io/trino/execution/buffer/TestPagesSerde.java
index 42628fb62443..59083c4c903d 100644
--- a/core/trino-main/src/test/java/io/trino/execution/buffer/TestPagesSerde.java
+++ b/core/trino-main/src/test/java/io/trino/execution/buffer/TestPagesSerde.java
@@ -146,8 +146,8 @@ private void testRoundTrip(List types, List pages, boolean encryptio
{
Optional encryptionKey = encryptionEnabled ? Optional.of(createRandomAesEncryptionKey()) : Optional.empty();
for (CompressionCodec compressionCodec : CompressionCodec.values()) {
- PageSerializer serializer = new PageSerializer(blockEncodingSerde, createCompressor(compressionCodec), encryptionKey, blockSizeInBytes);
- PageDeserializer deserializer = new PageDeserializer(blockEncodingSerde, createDecompressor(compressionCodec), encryptionKey, blockSizeInBytes);
+ PageSerializer serializer = new PageSerializer(blockEncodingSerde, createCompressor(compressionCodec), encryptionKey, blockSizeInBytes, compressionCodec.maxCompressedLength(blockSizeInBytes));
+ PageDeserializer deserializer = new PageDeserializer(blockEncodingSerde, createDecompressor(compressionCodec), encryptionKey, blockSizeInBytes, compressionCodec.maxCompressedLength(blockSizeInBytes));
for (Page page : pages) {
Slice serialized = serializer.serialize(page);
Page deserialized = deserializer.deserialize(serialized);
@@ -270,8 +270,8 @@ private void testDeserializationWithRollover(boolean encryptionEnabled, int numb
RolloverBlockSerde blockSerde = new RolloverBlockSerde();
Optional encryptionKey = encryptionEnabled ? Optional.of(createRandomAesEncryptionKey()) : Optional.empty();
for (CompressionCodec compressionCodec : CompressionCodec.values()) {
- PageSerializer serializer = new PageSerializer(blockSerde, createCompressor(compressionCodec), encryptionKey, blockSize);
- PageDeserializer deserializer = new PageDeserializer(blockSerde, createDecompressor(compressionCodec), encryptionKey, blockSize);
+ PageSerializer serializer = new PageSerializer(blockSerde, createCompressor(compressionCodec), encryptionKey, blockSize, compressionCodec.maxCompressedLength(blockSize));
+ PageDeserializer deserializer = new PageDeserializer(blockSerde, createDecompressor(compressionCodec), encryptionKey, blockSize, compressionCodec.maxCompressedLength(blockSize));
Page page = createTestPage(numberOfEntries);
Slice serialized = serializer.serialize(page);
diff --git a/lib/trino-hive-formats/pom.xml b/lib/trino-hive-formats/pom.xml
index 2670feb52b34..b6920b4f8286 100644
--- a/lib/trino-hive-formats/pom.xml
+++ b/lib/trino-hive-formats/pom.xml
@@ -45,7 +45,7 @@
io.airlift
- aircompressor
+ aircompressor-v3
true
diff --git a/lib/trino-hive-formats/src/main/java/io/trino/hive/formats/compression/Codec.java b/lib/trino-hive-formats/src/main/java/io/trino/hive/formats/compression/Codec.java
index ae57836822a5..2cb23f8e3e9e 100644
--- a/lib/trino-hive-formats/src/main/java/io/trino/hive/formats/compression/Codec.java
+++ b/lib/trino-hive-formats/src/main/java/io/trino/hive/formats/compression/Codec.java
@@ -13,7 +13,7 @@
*/
package io.trino.hive.formats.compression;
-import io.airlift.compress.hadoop.HadoopStreams;
+import io.airlift.compress.v3.hadoop.HadoopStreams;
import java.io.IOException;
import java.io.InputStream;
diff --git a/lib/trino-hive-formats/src/main/java/io/trino/hive/formats/compression/CompressionKind.java b/lib/trino-hive-formats/src/main/java/io/trino/hive/formats/compression/CompressionKind.java
index b064791561a5..a4cfc1853216 100644
--- a/lib/trino-hive-formats/src/main/java/io/trino/hive/formats/compression/CompressionKind.java
+++ b/lib/trino-hive-formats/src/main/java/io/trino/hive/formats/compression/CompressionKind.java
@@ -15,15 +15,15 @@
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
-import io.airlift.compress.bzip2.BZip2HadoopStreams;
-import io.airlift.compress.deflate.JdkDeflateHadoopStreams;
-import io.airlift.compress.gzip.JdkGzipHadoopStreams;
-import io.airlift.compress.hadoop.HadoopStreams;
-import io.airlift.compress.lz4.Lz4HadoopStreams;
-import io.airlift.compress.lzo.LzoHadoopStreams;
-import io.airlift.compress.lzo.LzopHadoopStreams;
-import io.airlift.compress.snappy.SnappyHadoopStreams;
-import io.airlift.compress.zstd.ZstdHadoopStreams;
+import io.airlift.compress.v3.bzip2.BZip2HadoopStreams;
+import io.airlift.compress.v3.deflate.JdkDeflateHadoopStreams;
+import io.airlift.compress.v3.gzip.JdkGzipHadoopStreams;
+import io.airlift.compress.v3.hadoop.HadoopStreams;
+import io.airlift.compress.v3.lz4.Lz4HadoopStreams;
+import io.airlift.compress.v3.lzo.LzoHadoopStreams;
+import io.airlift.compress.v3.lzo.LzopHadoopStreams;
+import io.airlift.compress.v3.snappy.SnappyHadoopStreams;
+import io.airlift.compress.v3.zstd.ZstdHadoopStreams;
import java.util.Arrays;
import java.util.List;
diff --git a/lib/trino-hive-formats/src/main/java/io/trino/hive/formats/compression/MemoryCompressedSliceOutput.java b/lib/trino-hive-formats/src/main/java/io/trino/hive/formats/compression/MemoryCompressedSliceOutput.java
index acea99c8fb8c..6a30db379c52 100644
--- a/lib/trino-hive-formats/src/main/java/io/trino/hive/formats/compression/MemoryCompressedSliceOutput.java
+++ b/lib/trino-hive-formats/src/main/java/io/trino/hive/formats/compression/MemoryCompressedSliceOutput.java
@@ -13,7 +13,7 @@
*/
package io.trino.hive.formats.compression;
-import io.airlift.compress.hadoop.HadoopStreams;
+import io.airlift.compress.v3.hadoop.HadoopStreams;
import io.airlift.slice.Slice;
import io.trino.plugin.base.io.ChunkedSliceOutput;
diff --git a/lib/trino-hive-formats/src/main/java/io/trino/hive/formats/compression/ValueCompressor.java b/lib/trino-hive-formats/src/main/java/io/trino/hive/formats/compression/ValueCompressor.java
index b4ab8154fb9e..247d887df5b5 100644
--- a/lib/trino-hive-formats/src/main/java/io/trino/hive/formats/compression/ValueCompressor.java
+++ b/lib/trino-hive-formats/src/main/java/io/trino/hive/formats/compression/ValueCompressor.java
@@ -13,7 +13,7 @@
*/
package io.trino.hive.formats.compression;
-import io.airlift.compress.hadoop.HadoopStreams;
+import io.airlift.compress.v3.hadoop.HadoopStreams;
import io.airlift.slice.DynamicSliceOutput;
import io.airlift.slice.Slice;
diff --git a/lib/trino-hive-formats/src/main/java/io/trino/hive/formats/compression/ValueDecompressor.java b/lib/trino-hive-formats/src/main/java/io/trino/hive/formats/compression/ValueDecompressor.java
index 9ca81e49b875..4ec11269cacc 100644
--- a/lib/trino-hive-formats/src/main/java/io/trino/hive/formats/compression/ValueDecompressor.java
+++ b/lib/trino-hive-formats/src/main/java/io/trino/hive/formats/compression/ValueDecompressor.java
@@ -13,7 +13,7 @@
*/
package io.trino.hive.formats.compression;
-import io.airlift.compress.hadoop.HadoopStreams;
+import io.airlift.compress.v3.hadoop.HadoopStreams;
import io.airlift.slice.Slice;
import java.io.IOException;
diff --git a/lib/trino-hive-formats/src/test/java/com/hadoop/compression/lzo/LzopCodec.java b/lib/trino-hive-formats/src/test/java/com/hadoop/compression/lzo/LzopCodec.java
index 7989890f53ab..531adb0f7fd6 100644
--- a/lib/trino-hive-formats/src/test/java/com/hadoop/compression/lzo/LzopCodec.java
+++ b/lib/trino-hive-formats/src/test/java/com/hadoop/compression/lzo/LzopCodec.java
@@ -14,4 +14,4 @@
package com.hadoop.compression.lzo;
public class LzopCodec
- extends io.airlift.compress.lzo.LzopCodec {}
+ extends io.airlift.compress.v3.lzo.LzopCodec {}
diff --git a/lib/trino-hive-formats/src/test/java/org/apache/hadoop/io/compress/LzoCodec.java b/lib/trino-hive-formats/src/test/java/org/apache/hadoop/io/compress/LzoCodec.java
index fda3406e8580..f1b9c380c98f 100644
--- a/lib/trino-hive-formats/src/test/java/org/apache/hadoop/io/compress/LzoCodec.java
+++ b/lib/trino-hive-formats/src/test/java/org/apache/hadoop/io/compress/LzoCodec.java
@@ -14,5 +14,5 @@
package org.apache.hadoop.io.compress;
public class LzoCodec
- extends io.airlift.compress.lzo.LzoCodec
+ extends io.airlift.compress.v3.lzo.LzoCodec
{}
diff --git a/lib/trino-orc/pom.xml b/lib/trino-orc/pom.xml
index c6b680046a4e..8b919c81af7e 100644
--- a/lib/trino-orc/pom.xml
+++ b/lib/trino-orc/pom.xml
@@ -30,7 +30,7 @@
io.airlift
- aircompressor
+ aircompressor-v3
@@ -116,6 +116,12 @@
runtime
+
+ io.airlift
+ aircompressor
+ test
+
+
io.airlift
junit-extensions
diff --git a/lib/trino-orc/src/main/java/io/trino/orc/DeflateCompressor.java b/lib/trino-orc/src/main/java/io/trino/orc/DeflateCompressor.java
deleted file mode 100644
index 2186d91ca800..000000000000
--- a/lib/trino-orc/src/main/java/io/trino/orc/DeflateCompressor.java
+++ /dev/null
@@ -1,65 +0,0 @@
-/*
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package io.trino.orc;
-
-import io.airlift.compress.Compressor;
-
-import java.nio.ByteBuffer;
-import java.util.zip.Deflater;
-
-import static java.util.zip.Deflater.FULL_FLUSH;
-
-public class DeflateCompressor
- implements Compressor
-{
- private static final int EXTRA_COMPRESSION_SPACE = 16;
- private static final int COMPRESSION_LEVEL = 4;
-
- @Override
- public int maxCompressedLength(int uncompressedSize)
- {
- // From Mark Adler's post http://stackoverflow.com/questions/1207877/java-size-of-compression-output-bytearray
- return uncompressedSize + ((uncompressedSize + 7) >> 3) + ((uncompressedSize + 63) >> 6) + 5 + EXTRA_COMPRESSION_SPACE;
- }
-
- @Override
- public int compress(byte[] input, int inputOffset, int inputLength, byte[] output, int outputOffset, int maxOutputLength)
- {
- int maxCompressedLength = maxCompressedLength(inputLength);
- if (maxOutputLength < maxCompressedLength) {
- throw new IllegalArgumentException("Output buffer must be at least " + maxCompressedLength + " bytes");
- }
-
- Deflater deflater = new Deflater(COMPRESSION_LEVEL, true);
- try {
- deflater.setInput(input, inputOffset, inputLength);
- deflater.finish();
-
- int compressedDataLength = deflater.deflate(output, outputOffset, maxOutputLength, FULL_FLUSH);
- if (!deflater.finished()) {
- throw new IllegalStateException("maxCompressedLength formula is incorrect, because deflate produced more data");
- }
- return compressedDataLength;
- }
- finally {
- deflater.end();
- }
- }
-
- @Override
- public void compress(ByteBuffer input, ByteBuffer output)
- {
- throw new UnsupportedOperationException("Compression of byte buffer not supported for deflate");
- }
-}
diff --git a/lib/trino-orc/src/main/java/io/trino/orc/OrcLz4Decompressor.java b/lib/trino-orc/src/main/java/io/trino/orc/OrcLz4Decompressor.java
index 72a5b23ce293..db7246cc4c36 100644
--- a/lib/trino-orc/src/main/java/io/trino/orc/OrcLz4Decompressor.java
+++ b/lib/trino-orc/src/main/java/io/trino/orc/OrcLz4Decompressor.java
@@ -13,9 +13,9 @@
*/
package io.trino.orc;
-import io.airlift.compress.Decompressor;
-import io.airlift.compress.MalformedInputException;
-import io.airlift.compress.lz4.Lz4Decompressor;
+import io.airlift.compress.v3.Decompressor;
+import io.airlift.compress.v3.MalformedInputException;
+import io.airlift.compress.v3.lz4.Lz4Decompressor;
import static java.util.Objects.requireNonNull;
@@ -24,7 +24,7 @@ class OrcLz4Decompressor
{
private final OrcDataSourceId orcDataSourceId;
private final int maxBufferSize;
- private final Decompressor decompressor = new Lz4Decompressor();
+ private final Decompressor decompressor = Lz4Decompressor.create();
public OrcLz4Decompressor(OrcDataSourceId orcDataSourceId, int maxBufferSize)
{
diff --git a/lib/trino-orc/src/main/java/io/trino/orc/OrcOutputBuffer.java b/lib/trino-orc/src/main/java/io/trino/orc/OrcOutputBuffer.java
index 5350272e5f64..a4c7dabd7bce 100644
--- a/lib/trino-orc/src/main/java/io/trino/orc/OrcOutputBuffer.java
+++ b/lib/trino-orc/src/main/java/io/trino/orc/OrcOutputBuffer.java
@@ -14,10 +14,11 @@
package io.trino.orc;
import com.google.common.annotations.VisibleForTesting;
-import io.airlift.compress.Compressor;
-import io.airlift.compress.lz4.Lz4Compressor;
-import io.airlift.compress.snappy.SnappyCompressor;
-import io.airlift.compress.zstd.ZstdCompressor;
+import io.airlift.compress.v3.Compressor;
+import io.airlift.compress.v3.deflate.DeflateCompressor;
+import io.airlift.compress.v3.lz4.Lz4Compressor;
+import io.airlift.compress.v3.snappy.SnappyCompressor;
+import io.airlift.compress.v3.zstd.ZstdCompressor;
import io.airlift.slice.SizeOf;
import io.airlift.slice.Slice;
import io.airlift.slice.SliceOutput;
@@ -92,10 +93,10 @@ private static Compressor getCompressor(CompressionKind compression)
{
return switch (compression) {
case NONE -> null;
- case SNAPPY -> new SnappyCompressor();
+ case SNAPPY -> SnappyCompressor.create();
case ZLIB -> new DeflateCompressor();
- case LZ4 -> new Lz4Compressor();
- case ZSTD -> new ZstdCompressor();
+ case LZ4 -> Lz4Compressor.create();
+ case ZSTD -> ZstdCompressor.create();
};
}
diff --git a/lib/trino-orc/src/main/java/io/trino/orc/OrcSnappyDecompressor.java b/lib/trino-orc/src/main/java/io/trino/orc/OrcSnappyDecompressor.java
index ee6f65ea8431..a0714a118b62 100644
--- a/lib/trino-orc/src/main/java/io/trino/orc/OrcSnappyDecompressor.java
+++ b/lib/trino-orc/src/main/java/io/trino/orc/OrcSnappyDecompressor.java
@@ -13,8 +13,8 @@
*/
package io.trino.orc;
-import io.airlift.compress.MalformedInputException;
-import io.airlift.compress.snappy.SnappyDecompressor;
+import io.airlift.compress.v3.MalformedInputException;
+import io.airlift.compress.v3.snappy.SnappyDecompressor;
import static io.airlift.slice.SizeOf.SIZE_OF_LONG;
import static java.util.Objects.requireNonNull;
@@ -24,7 +24,7 @@ class OrcSnappyDecompressor
{
private final OrcDataSourceId orcDataSourceId;
private final int maxBufferSize;
- private final SnappyDecompressor decompressor = new SnappyDecompressor();
+ private final SnappyDecompressor decompressor = SnappyDecompressor.create();
public OrcSnappyDecompressor(OrcDataSourceId orcDataSourceId, int maxBufferSize)
{
@@ -37,7 +37,7 @@ public int decompress(byte[] input, int offset, int length, OutputBuffer output)
throws OrcCorruptionException
{
try {
- int uncompressedLength = SnappyDecompressor.getUncompressedLength(input, offset);
+ int uncompressedLength = decompressor.getUncompressedLength(input, offset);
if (uncompressedLength > maxBufferSize) {
throw new OrcCorruptionException(orcDataSourceId, "Snappy requires buffer (%s) larger than max size (%s)", uncompressedLength, maxBufferSize);
}
diff --git a/lib/trino-orc/src/main/java/io/trino/orc/OrcZstdDecompressor.java b/lib/trino-orc/src/main/java/io/trino/orc/OrcZstdDecompressor.java
index 83b5b2fe45e0..44ded52948d5 100644
--- a/lib/trino-orc/src/main/java/io/trino/orc/OrcZstdDecompressor.java
+++ b/lib/trino-orc/src/main/java/io/trino/orc/OrcZstdDecompressor.java
@@ -13,8 +13,8 @@
*/
package io.trino.orc;
-import io.airlift.compress.MalformedInputException;
-import io.airlift.compress.zstd.ZstdDecompressor;
+import io.airlift.compress.v3.MalformedInputException;
+import io.airlift.compress.v3.zstd.ZstdDecompressor;
import static java.lang.StrictMath.toIntExact;
import static java.util.Objects.requireNonNull;
@@ -24,7 +24,7 @@ class OrcZstdDecompressor
{
private final OrcDataSourceId orcDataSourceId;
private final int maxBufferSize;
- private final ZstdDecompressor decompressor = new ZstdDecompressor();
+ private final ZstdDecompressor decompressor = ZstdDecompressor.create();
public OrcZstdDecompressor(OrcDataSourceId orcDataSourceId, int maxBufferSize)
{
@@ -37,7 +37,7 @@ public int decompress(byte[] input, int offset, int length, OutputBuffer output)
throws OrcCorruptionException
{
try {
- long uncompressedLength = ZstdDecompressor.getDecompressedSize(input, offset, length);
+ long uncompressedLength = decompressor.getDecompressedSize(input, offset, length);
if (uncompressedLength > maxBufferSize) {
throw new OrcCorruptionException(orcDataSourceId, "Zstd requires buffer (%s) larger than max size (%s)", uncompressedLength, maxBufferSize);
}
diff --git a/lib/trino-parquet/pom.xml b/lib/trino-parquet/pom.xml
index bc15c73df533..2f5c7cb71172 100644
--- a/lib/trino-parquet/pom.xml
+++ b/lib/trino-parquet/pom.xml
@@ -26,7 +26,7 @@
io.airlift
- aircompressor
+ aircompressor-v3
diff --git a/lib/trino-parquet/src/main/java/io/trino/parquet/ParquetCompressionUtils.java b/lib/trino-parquet/src/main/java/io/trino/parquet/ParquetCompressionUtils.java
index 1c9069817ddf..784d0ce918b3 100644
--- a/lib/trino-parquet/src/main/java/io/trino/parquet/ParquetCompressionUtils.java
+++ b/lib/trino-parquet/src/main/java/io/trino/parquet/ParquetCompressionUtils.java
@@ -14,11 +14,11 @@
package io.trino.parquet;
import com.google.common.io.ByteStreams;
-import io.airlift.compress.Decompressor;
-import io.airlift.compress.lz4.Lz4Decompressor;
-import io.airlift.compress.lzo.LzoDecompressor;
-import io.airlift.compress.snappy.SnappyDecompressor;
-import io.airlift.compress.zstd.ZstdDecompressor;
+import io.airlift.compress.v3.Decompressor;
+import io.airlift.compress.v3.lz4.Lz4Decompressor;
+import io.airlift.compress.v3.lzo.LzoDecompressor;
+import io.airlift.compress.v3.snappy.SnappyDecompressor;
+import io.airlift.compress.v3.zstd.ZstdDecompressor;
import io.airlift.slice.Slice;
import org.apache.parquet.format.CompressionCodec;
@@ -65,7 +65,7 @@ private static Slice decompressSnappy(Slice input, int uncompressedSize)
// Snappy decompressor is more efficient if there's at least a long's worth of extra space
// in the output buffer
byte[] buffer = new byte[uncompressedSize + SIZE_OF_LONG];
- int actualUncompressedSize = decompress(new SnappyDecompressor(), input, 0, input.length(), buffer, 0);
+ int actualUncompressedSize = decompress(SnappyDecompressor.create(), input, 0, input.length(), buffer, 0);
if (actualUncompressedSize != uncompressedSize) {
throw new IllegalArgumentException(format("Invalid uncompressedSize for SNAPPY input. Expected %s, actual: %s", uncompressedSize, actualUncompressedSize));
}
@@ -75,7 +75,7 @@ private static Slice decompressSnappy(Slice input, int uncompressedSize)
private static Slice decompressZstd(Slice input, int uncompressedSize)
{
byte[] buffer = new byte[uncompressedSize];
- decompress(new ZstdDecompressor(), input, 0, input.length(), buffer, 0);
+ decompress(ZstdDecompressor.create(), input, 0, input.length(), buffer, 0);
return wrappedBuffer(buffer);
}
@@ -100,7 +100,7 @@ private static Slice decompressGzip(Slice input, int uncompressedSize)
private static Slice decompressLz4(Slice input, int uncompressedSize)
{
- return decompressFramed(new Lz4Decompressor(), input, uncompressedSize);
+ return decompressFramed(Lz4Decompressor.create(), input, uncompressedSize);
}
private static Slice decompressLZO(Slice input, int uncompressedSize)
diff --git a/lib/trino-parquet/src/main/java/io/trino/parquet/writer/ParquetCompressor.java b/lib/trino-parquet/src/main/java/io/trino/parquet/writer/ParquetCompressor.java
index e42179e0bde1..a716de82ec97 100644
--- a/lib/trino-parquet/src/main/java/io/trino/parquet/writer/ParquetCompressor.java
+++ b/lib/trino-parquet/src/main/java/io/trino/parquet/writer/ParquetCompressor.java
@@ -13,9 +13,10 @@
*/
package io.trino.parquet.writer;
-import io.airlift.compress.Compressor;
-import io.airlift.compress.snappy.SnappyCompressor;
-import io.airlift.compress.zstd.ZstdCompressor;
+import io.airlift.compress.v3.Compressor;
+import io.airlift.compress.v3.lz4.Lz4Compressor;
+import io.airlift.compress.v3.snappy.SnappyCompressor;
+import io.airlift.compress.v3.zstd.ZstdCompressor;
import io.airlift.slice.Slices;
import org.apache.parquet.format.CompressionCodec;
@@ -37,13 +38,14 @@ static ParquetCompressor getCompressor(CompressionCodec codec)
case GZIP:
return new GzipCompressor();
case SNAPPY:
- return new AirLiftCompressor(new SnappyCompressor());
+ return new AirLiftCompressor(SnappyCompressor.create());
case ZSTD:
- return new AirLiftCompressor(new ZstdCompressor());
+ return new AirLiftCompressor(ZstdCompressor.create());
+ case LZ4:
+ return new AirLiftCompressor(Lz4Compressor.create());
case UNCOMPRESSED:
return null;
case LZO:
- case LZ4:
case LZ4_RAW:
// TODO Support LZO and LZ4_RAW compression
// Note: LZ4 compression scheme has been deprecated by parquet-format in favor of LZ4_RAW
diff --git a/lib/trino-parquet/src/test/java/io/trino/parquet/reader/TestPageReader.java b/lib/trino-parquet/src/test/java/io/trino/parquet/reader/TestPageReader.java
index 9c98a150270e..102e2b4fc01b 100644
--- a/lib/trino-parquet/src/test/java/io/trino/parquet/reader/TestPageReader.java
+++ b/lib/trino-parquet/src/test/java/io/trino/parquet/reader/TestPageReader.java
@@ -15,8 +15,7 @@
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
-import io.airlift.compress.snappy.SnappyCompressor;
-import io.airlift.compress.snappy.SnappyRawCompressor;
+import io.airlift.compress.v3.snappy.SnappyCompressor;
import io.airlift.slice.Slice;
import io.airlift.slice.Slices;
import io.trino.parquet.DataPage;
@@ -376,8 +375,9 @@ private static byte[] compress(CompressionCodec compressionCodec, byte[] bytes,
return Arrays.copyOfRange(bytes, offset, offset + length);
}
if (compressionCodec == SNAPPY) {
- byte[] out = new byte[SnappyRawCompressor.maxCompressedLength(length)];
- int compressedSize = new SnappyCompressor().compress(bytes, offset, length, out, 0, out.length);
+ SnappyCompressor compressor = SnappyCompressor.create();
+ byte[] out = new byte[compressor.maxCompressedLength(length)];
+ int compressedSize = compressor.compress(bytes, offset, length, out, 0, out.length);
return Arrays.copyOf(out, compressedSize);
}
throw new IllegalArgumentException("unsupported compression code " + compressionCodec);
diff --git a/plugin/trino-hive/pom.xml b/plugin/trino-hive/pom.xml
index 85e9f80e621a..99c8f1e7b18d 100644
--- a/plugin/trino-hive/pom.xml
+++ b/plugin/trino-hive/pom.xml
@@ -61,7 +61,7 @@
io.airlift
- aircompressor
+ aircompressor-v3
diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/MetastoreUtil.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/MetastoreUtil.java
index a4487bfb1678..5280dd5bb578 100644
--- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/MetastoreUtil.java
+++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/MetastoreUtil.java
@@ -20,8 +20,8 @@
import com.google.common.collect.ImmutableMultimap;
import com.google.common.collect.ImmutableSet;
import com.google.common.primitives.Longs;
-import io.airlift.compress.Compressor;
-import io.airlift.compress.zstd.ZstdCompressor;
+import io.airlift.compress.v3.Compressor;
+import io.airlift.compress.v3.zstd.ZstdCompressor;
import io.airlift.slice.Slice;
import io.trino.hive.thrift.metastore.ResourceType;
import io.trino.hive.thrift.metastore.ResourceUri;
@@ -447,7 +447,7 @@ public static String metastoreFunctionName(String functionName, String signature
public static List toResourceUris(byte[] input)
{
- Compressor compressor = new ZstdCompressor();
+ Compressor compressor = ZstdCompressor.create();
byte[] compressed = new byte[compressor.maxCompressedLength(input.length)];
int outputSize = compressor.compress(input, 0, input.length, compressed, 0, compressed.length);
diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/thrift/ThriftMetastoreUtil.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/thrift/ThriftMetastoreUtil.java
index 7776f5d5a957..c646aee968e5 100644
--- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/thrift/ThriftMetastoreUtil.java
+++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/thrift/ThriftMetastoreUtil.java
@@ -21,7 +21,7 @@
import com.google.common.io.ByteArrayDataOutput;
import com.google.common.io.ByteStreams;
import com.google.common.primitives.Shorts;
-import io.airlift.compress.zstd.ZstdDecompressor;
+import io.airlift.compress.v3.zstd.ZstdDecompressor;
import io.airlift.json.JsonCodec;
import io.trino.hive.thrift.metastore.BinaryColumnStatsData;
import io.trino.hive.thrift.metastore.BooleanColumnStatsData;
@@ -945,9 +945,10 @@ public static byte[] fromResourceUris(List resourceUris)
}
byte[] compressed = bytes.toByteArray();
- long size = ZstdDecompressor.getDecompressedSize(compressed, 0, compressed.length);
+ ZstdDecompressor decompressor = ZstdDecompressor.create();
+ long size = decompressor.getDecompressedSize(compressed, 0, compressed.length);
byte[] output = new byte[toIntExact(size)];
- new ZstdDecompressor().decompress(compressed, 0, compressed.length, output, 0, output.length);
+ decompressor.decompress(compressed, 0, compressed.length, output, 0, output.length);
return output;
}
diff --git a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/metastore/glue/TestGlueConverter.java b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/metastore/glue/TestGlueConverter.java
index fb00bc051fe5..a61e42860c00 100644
--- a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/metastore/glue/TestGlueConverter.java
+++ b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/metastore/glue/TestGlueConverter.java
@@ -220,7 +220,7 @@ void testToGlueFunctionInput()
.build();
LanguageFunction actual = GlueConverter.fromGlueFunction(function);
- assertThat(input.resourceUris().size()).isEqualTo(4);
+ assertThat(input.resourceUris().size()).isEqualTo(3);
assertThat(actual).isEqualTo(expected);
// verify that the owner comes from the metastore
diff --git a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/metastore/glue/v1/TestGlueInputConverter.java b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/metastore/glue/v1/TestGlueInputConverter.java
index e153115a8fee..89b4f4a8af66 100644
--- a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/metastore/glue/v1/TestGlueInputConverter.java
+++ b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/metastore/glue/v1/TestGlueInputConverter.java
@@ -100,7 +100,7 @@ public void testConvertFunction()
.withResourceUris(input.getResourceUris());
LanguageFunction actual = GlueToTrinoConverter.convertFunction(function);
- assertThat(input.getResourceUris().size()).isEqualTo(4);
+ assertThat(input.getResourceUris().size()).isEqualTo(3);
assertThat(actual).isEqualTo(expected);
// verify that the owner comes from the metastore
diff --git a/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/BaseIcebergConnectorTest.java b/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/BaseIcebergConnectorTest.java
index 74bc1ac5c48f..bddbd8893c7d 100644
--- a/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/BaseIcebergConnectorTest.java
+++ b/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/BaseIcebergConnectorTest.java
@@ -926,8 +926,8 @@ public void testCreatePartitionedTable()
" ('a_double', NULL, 1e0, 0.5e0, NULL, '1.0', '1.0'), " +
" ('a_short_decimal', NULL, 1e0, 0.5e0, NULL, '1.0', '1.0'), " +
" ('a_long_decimal', NULL, 1e0, 0.5e0, NULL, '11.0', '11.0'), " +
- " ('a_varchar', 234e0, 1e0, 0.5e0, NULL, NULL, NULL), " +
- " ('a_varbinary', 114e0, 1e0, 0.5e0, NULL, NULL, NULL), " +
+ " ('a_varchar', 213e0, 1e0, 0.5e0, NULL, NULL, NULL), " +
+ " ('a_varbinary', 103e0, 1e0, 0.5e0, NULL, NULL, NULL), " +
" ('a_date', NULL, 1e0, 0.5e0, NULL, '2021-07-24', '2021-07-24'), " +
" ('a_time', NULL, 1e0, 0.5e0, NULL, NULL, NULL), " +
" ('a_timestamp', NULL, 1e0, 0.5e0, NULL, '2021-07-24 03:43:57.987654', '2021-07-24 03:43:57.987654'), " +
@@ -936,7 +936,7 @@ public void testCreatePartitionedTable()
" ('a_row', NULL, NULL, NULL, NULL, NULL, NULL), " +
" ('an_array', NULL, NULL, NULL, NULL, NULL, NULL), " +
" ('a_map', NULL, NULL, NULL, NULL, NULL, NULL), " +
- " ('a quoted, field', 224e0, 1e0, 0.5e0, NULL, NULL, NULL), " +
+ " ('a quoted, field', 202e0, 1e0, 0.5e0, NULL, NULL, NULL), " +
" (NULL, NULL, NULL, NULL, 2e0, NULL, NULL)");
}
case AVRO -> {
@@ -3298,7 +3298,7 @@ public void testTruncateTextTransform()
assertThat(query("SHOW STATS FOR test_truncate_text_transform"))
.skippingTypesCheck()
.matches("VALUES " +
- " ('d', " + (format == PARQUET ? "550e0" : "NULL") + ", 7e0, " + (format == AVRO ? "0.1e0" : "0.125e0") + ", NULL, NULL, NULL), " +
+ " ('d', " + (format == PARQUET ? "507e0" : "NULL") + ", 7e0, " + (format == AVRO ? "0.1e0" : "0.125e0") + ", NULL, NULL, NULL), " +
" ('b', NULL, 8e0, 0e0, NULL, " + (format == AVRO ? "NULL, NULL" : "'1', '101'") + "), " +
" (NULL, NULL, NULL, NULL, 8e0, NULL, NULL)");
@@ -3588,7 +3588,7 @@ public void testApplyFilterWithNonEmptyConstraintPredicate()
" ('b', NULL, 7e0, 0e0, NULL, '1', '7'), " +
" (NULL, NULL, NULL, NULL, 7e0, NULL, NULL)";
case PARQUET -> "VALUES " +
- " ('d', 364e0, 7e0, 0e0, NULL, NULL, NULL), " +
+ " ('d', 342e0, 7e0, 0e0, NULL, NULL, NULL), " +
" ('b', NULL, 7e0, 0e0, NULL, '1', '7'), " +
" (NULL, NULL, NULL, NULL, 7e0, NULL, NULL)";
case AVRO -> "VALUES " +
@@ -3645,7 +3645,7 @@ public void testVoidTransform()
assertThat(query("SHOW STATS FOR test_void_transform"))
.skippingTypesCheck()
.matches("VALUES " +
- " ('d', " + (format == PARQUET ? "205e0" : "NULL") + ", 5e0, 0.2857142857142857, NULL, NULL, NULL), " +
+ " ('d', " + (format == PARQUET ? "194e0" : "NULL") + ", 5e0, 0.2857142857142857, NULL, NULL, NULL), " +
" ('b', NULL, 7e0, 0e0, NULL, '1', '7'), " +
" (NULL, NULL, NULL, NULL, 7e0, NULL, NULL)");
}
@@ -3810,8 +3810,8 @@ public void testBasicAnalyze()
" (NULL, NULL, NULL, NULL, 5e0, NULL, NULL)")
: ("VALUES " +
" ('regionkey', NULL, NULL, 0e0, NULL, '0', '4'), " +
- " ('name', " + (format == PARQUET ? "234e0" : "NULL") + ", NULL, 0e0, NULL, NULL, NULL), " +
- " ('comment', " + (format == PARQUET ? "639e0" : "NULL") + ", NULL, 0e0, NULL, NULL, NULL), " +
+ " ('name', " + (format == PARQUET ? "224e0" : "NULL") + ", NULL, 0e0, NULL, NULL, NULL), " +
+ " ('comment', " + (format == PARQUET ? "626e0" : "NULL") + ", NULL, 0e0, NULL, NULL, NULL), " +
" (NULL, NULL, NULL, NULL, 5e0, NULL, NULL)");
String statsWithNdv = format == AVRO
@@ -3822,8 +3822,8 @@ public void testBasicAnalyze()
" (NULL, NULL, NULL, NULL, 5e0, NULL, NULL)")
: ("VALUES " +
" ('regionkey', NULL, 5e0, 0e0, NULL, '0', '4'), " +
- " ('name', " + (format == PARQUET ? "234e0" : "NULL") + ", 5e0, 0e0, NULL, NULL, NULL), " +
- " ('comment', " + (format == PARQUET ? "639e0" : "NULL") + ", 5e0, 0e0, NULL, NULL, NULL), " +
+ " ('name', " + (format == PARQUET ? "224e0" : "NULL") + ", 5e0, 0e0, NULL, NULL, NULL), " +
+ " ('comment', " + (format == PARQUET ? "626e0" : "NULL") + ", 5e0, 0e0, NULL, NULL, NULL), " +
" (NULL, NULL, NULL, NULL, 5e0, NULL, NULL)");
assertThat(query(defaultSession, "SHOW STATS FOR " + tableName)).skippingTypesCheck().matches(statsWithNdv);
@@ -4310,8 +4310,8 @@ public void testCreateNestedPartitionedTable()
" ('dbl', NULL, 1e0, 0e0, NULL, '1.0', '1.0'), " +
" ('mp', NULL, NULL, " + (format == ORC ? "0e0" : "NULL") + ", NULL, NULL, NULL), " +
" ('dec', NULL, 1e0, 0e0, NULL, '1.0', '1.0'), " +
- " ('vc', " + (format == PARQUET ? "116e0" : "NULL") + ", 1e0, 0e0, NULL, NULL, NULL), " +
- " ('vb', " + (format == PARQUET ? "77e0" : "NULL") + ", 1e0, 0e0, NULL, NULL, NULL), " +
+ " ('vc', " + (format == PARQUET ? "105e0" : "NULL") + ", 1e0, 0e0, NULL, NULL, NULL), " +
+ " ('vb', " + (format == PARQUET ? "71e0" : "NULL") + ", 1e0, 0e0, NULL, NULL, NULL), " +
" ('ts', NULL, 1e0, 0e0, NULL, '2021-07-24 02:43:57.348000', " + (format == ORC ? "'2021-07-24 02:43:57.348999'" : "'2021-07-24 02:43:57.348000'") + "), " +
" ('tstz', NULL, 1e0, 0e0, NULL, '2021-07-24 02:43:57.348 UTC', '2021-07-24 02:43:57.348 UTC'), " +
" ('str', NULL, NULL, " + (format == ORC ? "0e0" : "NULL") + ", NULL, NULL, NULL), " +
@@ -4373,7 +4373,7 @@ public void testCreateNestedPartitionedTable()
" ('dbl', NULL, 1e0, 0e0, NULL, '1.0', '1.0'), " +
" ('mp', NULL, NULL, " + (format == ORC ? "0e0" : "NULL") + ", NULL, NULL, NULL), " +
" ('dec', NULL, 1e0, 0e0, NULL, '1.0', '1.0'), " +
- " ('vc', " + (format == PARQUET ? "116e0" : "NULL") + ", 1e0, 0e0, NULL, NULL, NULL), " +
+ " ('vc', " + (format == PARQUET ? "105e0" : "NULL") + ", 1e0, 0e0, NULL, NULL, NULL), " +
" ('str', NULL, NULL, " + (format == ORC ? "0e0" : "NULL") + ", NULL, NULL, NULL), " +
" (NULL, NULL, NULL, NULL, 1e0, NULL, NULL)");
}
@@ -4699,8 +4699,8 @@ public void testAllAvailableTypes()
" ('a_double', NULL, 1e0, 0.5e0, NULL, '1.0', '1.0'), " +
" ('a_short_decimal', NULL, 1e0, 0.5e0, NULL, '1.0', '1.0'), " +
" ('a_long_decimal', NULL, 1e0, 0.5e0, NULL, '11.0', '11.0'), " +
- " ('a_varchar', " + (format == PARQUET ? "234e0" : "NULL") + ", 1e0, 0.5e0, NULL, NULL, NULL), " +
- " ('a_varbinary', " + (format == PARQUET ? "114e0" : "NULL") + ", 1e0, 0.5e0, NULL, NULL, NULL), " +
+ " ('a_varchar', " + (format == PARQUET ? "213e0" : "NULL") + ", 1e0, 0.5e0, NULL, NULL, NULL), " +
+ " ('a_varbinary', " + (format == PARQUET ? "103e0" : "NULL") + ", 1e0, 0.5e0, NULL, NULL, NULL), " +
" ('a_date', NULL, 1e0, 0.5e0, NULL, '2021-07-24', '2021-07-24'), " +
" ('a_time', NULL, 1e0, 0.5e0, NULL, NULL, NULL), " +
" ('a_timestamp', NULL, 1e0, 0.5e0, NULL, " + (format == ORC ? "'2021-07-24 03:43:57.987000', '2021-07-24 03:43:57.987999'" : "'2021-07-24 03:43:57.987654', '2021-07-24 03:43:57.987654'") + "), " +
@@ -4753,8 +4753,8 @@ public void testAllAvailableTypes()
" ('a_double', NULL, 1e0, 0.5e0, NULL, '1.0', '1.0'), " +
" ('a_short_decimal', NULL, 1e0, 0.5e0, NULL, '1.0', '1.0'), " +
" ('a_long_decimal', NULL, 1e0, 0.5e0, NULL, '11.0', '11.0'), " +
- " ('a_varchar', " + (format == PARQUET ? "234e0" : "NULL") + ", 1e0, 0.5e0, NULL, NULL, NULL), " +
- " ('a_varbinary', " + (format == PARQUET ? "114e0" : "NULL") + ", 1e0, 0.5e0, NULL, NULL, NULL), " +
+ " ('a_varchar', " + (format == PARQUET ? "213e0" : "NULL") + ", 1e0, 0.5e0, NULL, NULL, NULL), " +
+ " ('a_varbinary', " + (format == PARQUET ? "103e0" : "NULL") + ", 1e0, 0.5e0, NULL, NULL, NULL), " +
" ('a_date', NULL, 1e0, 0.5e0, NULL, '2021-07-24', '2021-07-24'), " +
" ('a_time', NULL, 1e0, 0.5e0, NULL, NULL, NULL), " +
" ('a_timestamp', NULL, 1e0, 0.5e0, NULL, " + (format == ORC ? "'2021-07-24 03:43:57.987000', '2021-07-24 03:43:57.987999'" : "'2021-07-24 03:43:57.987654', '2021-07-24 03:43:57.987654'") + "), " +
diff --git a/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/TestIcebergStatistics.java b/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/TestIcebergStatistics.java
index 6b87934499f4..76789ca1cfc5 100644
--- a/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/TestIcebergStatistics.java
+++ b/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/TestIcebergStatistics.java
@@ -64,8 +64,8 @@ public void testAnalyze(boolean collectOnStatsOnWrites)
VALUES
('nationkey', null, 25, 0, null, '0', '24'),
('regionkey', null, 5, 0, null, '0', '4'),
- ('comment', 2178.0, 25, 0, null, null, null),
- ('name', 594.0, 25, 0, null, null, null),
+ ('comment', 2162.0, 25, 0, null, null, null),
+ ('name', 583.0, 25, 0, null, null, null),
(null, null, null, null, 25, null, null)""";
if (collectOnStatsOnWrites) {
@@ -78,8 +78,8 @@ public void testAnalyze(boolean collectOnStatsOnWrites)
VALUES
('nationkey', null, null, 0, null, '0', '24'),
('regionkey', null, null, 0, null, '0', '4'),
- ('comment', 2178.0, null, 0, null, null, null),
- ('name', 594.0, null, 0, null, null, null),
+ ('comment', 2162.0, null, 0, null, null, null),
+ ('name', 583.0, null, 0, null, null, null),
(null, null, null, null, 25, null, null)""");
}
@@ -96,8 +96,8 @@ public void testAnalyze(boolean collectOnStatsOnWrites)
VALUES
('nationkey', null, 25, 0, null, '0', '24'),
('regionkey', null, 5, 0, null, '0', '4'),
- ('comment', 4357.0, 25, 0, null, null, null),
- ('name', 1188.0, 25, 0, null, null, null),
+ ('comment', 4325.0, 25, 0, null, null, null),
+ ('name', 1166.0, 25, 0, null, null, null),
(null, null, null, null, 50, null, null)""";
assertUpdate("ANALYZE " + tableName);
assertQuery("SHOW STATS FOR " + tableName, goodStatsAfterFirstInsert);
@@ -108,8 +108,8 @@ public void testAnalyze(boolean collectOnStatsOnWrites)
VALUES
('nationkey', null, 50, 0, null, '0', '49'),
('regionkey', null, 10, 0, null, '0', '9'),
- ('comment', 6517.0, 50, 0, null, null, null),
- ('name', 1800.0, 50, 0, null, null, null),
+ ('comment', 6463.0, 50, 0, null, null, null),
+ ('name', 1768.0, 50, 0, null, null, null),
(null, null, null, null, 75, null, null)""";
if (collectOnStatsOnWrites) {
@@ -123,8 +123,8 @@ public void testAnalyze(boolean collectOnStatsOnWrites)
VALUES
('nationkey', null, 25, 0, null, '0', '49'),
('regionkey', null, 5, 0, null, '0', '9'),
- ('comment', 6517.0, 25, 0, null, null, null),
- ('name', 1800.0, 25, 0, null, null, null),
+ ('comment', 6463.0, 25, 0, null, null, null),
+ ('name', 1768.0, 25, 0, null, null, null),
(null, null, null, null, 75, null, null)""");
}
@@ -191,8 +191,8 @@ public void testAnalyzePartitioned(boolean collectOnStatsOnWrites)
VALUES
('nationkey', null, 25, 0, null, '0', '24'),
('regionkey', null, 5, 0, null, '0', '4'),
- ('comment', 3558.0, 25, 0, null, null, null),
- ('name', 1231.0, 25, 0, null, null, null),
+ ('comment', 3507.0, 25, 0, null, null, null),
+ ('name', 1182.0, 25, 0, null, null, null),
(null, null, null, null, 25, null, null)""";
if (collectOnStatsOnWrites) {
@@ -205,8 +205,8 @@ public void testAnalyzePartitioned(boolean collectOnStatsOnWrites)
VALUES
('nationkey', null, null, 0, null, '0', '24'),
('regionkey', null, null, 0, null, '0', '4'),
- ('comment', 3558.0, null, 0, null, null, null),
- ('name', 1231.0, null, 0, null, null, null),
+ ('comment', 3507.0, null, 0, null, null, null),
+ ('name', 1182.0, null, 0, null, null, null),
(null, null, null, null, 25, null, null)""");
}
@@ -223,8 +223,8 @@ public void testAnalyzePartitioned(boolean collectOnStatsOnWrites)
VALUES
('nationkey', null, 25, 0, null, '0', '24'),
('regionkey', null, 5, 0, null, '0', '4'),
- ('comment', 7117.0, 25, 0, null, null, null),
- ('name', 2462.0, 25, 0, null, null, null),
+ ('comment', 7014.0, 25, 0, null, null, null),
+ ('name', 2365.0, 25, 0, null, null, null),
(null, null, null, null, 50, null, null)""");
// insert modified rows
@@ -233,8 +233,8 @@ public void testAnalyzePartitioned(boolean collectOnStatsOnWrites)
VALUES
('nationkey', null, 50, 0, null, '0', '49'),
('regionkey', null, 10, 0, null, '0', '9'),
- ('comment', 10659.0, 50, 0, null, null, null),
- ('name', 3715.0, 50, 0, null, null, null),
+ ('comment', 10493.999999999998, 50, 0, null, null, null),
+ ('name', 3564.0000000000005, 50, 0, null, null, null),
(null, null, null, null, 75, null, null)""";
if (collectOnStatsOnWrites) {
@@ -248,8 +248,8 @@ public void testAnalyzePartitioned(boolean collectOnStatsOnWrites)
VALUES
('nationkey', null, 25, 0, null, '0', '49'),
('regionkey', null, 5, 0, null, '0', '9'),
- ('comment', 10659.0, 25, 0, null, null, null),
- ('name', 3715.0, 25, 0, null, null, null),
+ ('comment', 10493.999999999998, 25, 0, null, null, null),
+ ('name', 3564.0000000000005, 25, 0, null, null, null),
(null, null, null, null, 75, null, null)""");
}
@@ -300,8 +300,8 @@ public void testAnalyzeEmpty()
VALUES
('nationkey', null, 25, 0, null, '0', '24'),
('regionkey', null, 5, 0, null, '0', '4'),
- ('comment', 2178.0, 25, 0, null, null, null),
- ('name', 594.0, 25, 0, null, null, null),
+ ('comment', 2162.0, 25, 0, null, null, null),
+ ('name', 583.0, 25, 0, null, null, null),
(null, null, null, null, 25, null, null)""");
assertUpdate("DROP TABLE " + tableName);
@@ -329,7 +329,7 @@ public void testCollectStatisticsOnWrite(boolean collectOnStatsOnCreateTable, bo
('comment', %s, 7, 0, null, null, null),
('name', %s, 7, 0, null, null, null),
(null, null, null, null, 7, null, null)"""
- .formatted(partitioned ? "1328.0" : "954.9999999999999", partitioned ? "501.99999999999994" : "280.0")
+ .formatted(partitioned ? "1301.0" : "936.0", partitioned ? "469.0" : "270.0")
: """
VALUES
('nationkey', null, null, 0, null, '0', '9'),
@@ -337,7 +337,7 @@ public void testCollectStatisticsOnWrite(boolean collectOnStatsOnCreateTable, bo
('comment', %s, null, 0, null, null, null),
('name', %s, null, 0, null, null, null),
(null, null, null, null, 7, null, null)"""
- .formatted(partitioned ? "1328.0" : "954.9999999999999", partitioned ? "501.99999999999994" : "280.0"));
+ .formatted(partitioned ? "1301.0" : "936.0", partitioned ? "469.0" : "270.0"));
assertUpdate(withStatsOnWrite(getSession(), true), "INSERT INTO " + tableName + " SELECT * FROM tpch.sf1.nation WHERE nationkey >= 12 OR regionkey >= 3", 18);
assertQuery(
@@ -350,7 +350,7 @@ public void testCollectStatisticsOnWrite(boolean collectOnStatsOnCreateTable, bo
('comment', %s, 25, 0, null, null, null),
('name', %s, 25, 0, null, null, null),
(null, null, null, null, 25, null, null)"""
- .formatted(partitioned ? "4141.0" : "2659.0", partitioned ? "1533.0" : "745.0")
+ .formatted(partitioned ? "4058.0" : "2627.0", partitioned ? "1447.0" : "726.0")
: """
VALUES
('nationkey', null, null, 0, null, '0', '24'),
@@ -358,7 +358,7 @@ public void testCollectStatisticsOnWrite(boolean collectOnStatsOnCreateTable, bo
('comment', %s, null, 0, null, null, null),
('name', %s, null, 0, null, null, null),
(null, null, null, null, 25, null, null)"""
- .formatted(partitioned ? "4141.0" : "2659.0", partitioned ? "1533.0" : "745.0"));
+ .formatted(partitioned ? "4058.0" : "2627.0", partitioned ? "1447.0" : "726.0"));
assertUpdate("DROP TABLE " + tableName);
}
@@ -395,7 +395,7 @@ public void testCollectStatisticsOnWriteToEmptyTable(boolean collectOnStatsOnCre
('comment', %f, 25, 0, null, null, null),
('name', %f, 25, 0, null, null, null),
(null, null, null, null, 25, null, null)"""
- .formatted(partitioned ? 3558.0 : 2178.0, partitioned ? 1231.0 : 594.0));
+ .formatted(partitioned ? 3507.0 : 2162.0, partitioned ? 1182.0 : 583));
assertUpdate("DROP TABLE " + tableName);
}
@@ -521,8 +521,8 @@ public void testAnalyzeSomeColumns()
VALUES
('nationkey', null, 25, 0, null, '0', '24'),
('regionkey', null, 5, 0, null, '0', '4'),
- ('comment', 2178.0, null, 0, null, null, null),
- ('name', 594.0, null, 0, null, null, null),
+ ('comment', 2162.0, null, 0, null, null, null),
+ ('name', 583.0, null, 0, null, null, null),
(null, null, null, null, 25, null, null)""");
// insert modified rows
@@ -536,8 +536,8 @@ public void testAnalyzeSomeColumns()
VALUES
('nationkey', null, 50, 0, null, '0', '49'),
('regionkey', null, 10, 0, null, '0', '9'),
- ('comment', 4471.0, null, 0, null, null, null),
- ('name', 1215.0, null, 0, null, null, null),
+ ('comment', 4441.0, null, 0, null, null, null),
+ ('name', 1193.0, null, 0, null, null, null),
(null, null, null, null, 50, null, null)""");
// drop stats
@@ -551,8 +551,8 @@ public void testAnalyzeSomeColumns()
VALUES
('nationkey', null, 50, 0, null, '0', '49'),
('regionkey', null, 10, 0, null, '0', '9'),
- ('comment', 4471.0, 50, 0, null, null, null),
- ('name', 1215.0, 50, 0, null, null, null),
+ ('comment', 4441.0, 50, 0, null, null, null),
+ ('name', 1193.0, 50, 0, null, null, null),
(null, null, null, null, 50, null, null)""");
// insert modified rows
@@ -565,8 +565,8 @@ public void testAnalyzeSomeColumns()
VALUES
('nationkey', null, 50, 0, null, '0', '74'),
('regionkey', null, 10, 0, null, '0', '14'),
- ('comment', 6746.999999999999, 50, 0, null, null, null),
- ('name', 1836.0, 50, 0, null, null, null),
+ ('comment', 6701.0, 50, 0, null, null, null),
+ ('name', 1803.0, 50, 0, null, null, null),
(null, null, null, null, 75, null, null)""");
// reanalyze with a subset of columns
@@ -577,8 +577,8 @@ public void testAnalyzeSomeColumns()
VALUES
('nationkey', null, 75, 0, null, '0', '74'),
('regionkey', null, 15, 0, null, '0', '14'),
- ('comment', 6746.999999999999, 50, 0, null, null, null), -- result of previous analyze
- ('name', 1836.0, 50, 0, null, null, null), -- result of previous analyze
+ ('comment', 6701.0, 50, 0, null, null, null), -- result of previous analyze
+ ('name', 1803.0, 50, 0, null, null, null), -- result of previous analyze
(null, null, null, null, 75, null, null)""");
// analyze all columns
@@ -589,8 +589,8 @@ public void testAnalyzeSomeColumns()
VALUES
('nationkey', null, 75, 0, null, '0', '74'),
('regionkey', null, 15, 0, null, '0', '14'),
- ('comment', 6746.999999999999, 75, 0, null, null, null),
- ('name', 1836.0, 75, 0, null, null, null),
+ ('comment', 6701.0, 75, 0, null, null, null),
+ ('name', 1803.0, 75, 0, null, null, null),
(null, null, null, null, 75, null, null)""");
assertUpdate("DROP TABLE " + tableName);
@@ -633,15 +633,15 @@ public void testDropExtendedStats()
VALUES
('nationkey', null, null, 0, null, '0', '24'),
('regionkey', null, null, 0, null, '0', '4'),
- ('comment', 2178.0, null, 0, null, null, null),
- ('name', 594.0, null, 0, null, null, null),
+ ('comment', 2162.0, null, 0, null, null, null),
+ ('name', 583.0, null, 0, null, null, null),
(null, null, null, null, 25, null, null)""";
String extendedStats = """
VALUES
('nationkey', null, 25, 0, null, '0', '24'),
('regionkey', null, 5, 0, null, '0', '4'),
- ('comment', 2178.0, 25, 0, null, null, null),
- ('name', 594.0, 25, 0, null, null, null),
+ ('comment', 2162.0, 25, 0, null, null, null),
+ ('name', 583.0, 25, 0, null, null, null),
(null, null, null, null, 25, null, null)""";
assertQuery("SHOW STATS FOR " + tableName, extendedStats);
@@ -671,8 +671,8 @@ public void testDropMissingStats()
VALUES
('nationkey', null, null, 0, null, '0', '24'),
('regionkey', null, null, 0, null, '0', '4'),
- ('comment', 2178.0, null, 0, null, null, null),
- ('name', 594.0, null, 0, null, null, null),
+ ('comment', 2162.0, null, 0, null, null, null),
+ ('name', 583.0, null, 0, null, null, null),
(null, null, null, null, 25, null, null)""");
assertUpdate("DROP TABLE " + tableName);
@@ -741,8 +741,8 @@ public void testAnalyzeAndRollbackToSnapshot()
VALUES
('nationkey', null, 25, 0, null, '0', '24'),
('regionkey', null, 5, 0, null, '0', '4'),
- ('comment', 2475.0, 25, 0, null, null, null),
- ('name', 726.0, 25, 0, null, null, null),
+ ('comment', 2448.0, 25, 0, null, null, null),
+ ('name', 704.0, 25, 0, null, null, null),
(null, null, null, null, 26, null, null)""");
assertUpdate(format("CALL system.rollback_to_snapshot('%s', '%s', %s)", schema, tableName, createSnapshot));
@@ -753,8 +753,8 @@ public void testAnalyzeAndRollbackToSnapshot()
VALUES
('nationkey', null, 25, 0, null, '0', '24'),
('regionkey', null, 5, 0, null, '0', '4'),
- ('comment', 2178.0, 25, 0, null, null, null),
- ('name', 594.0, 25, 0, null, null, null),
+ ('comment', 2162.0, 25, 0, null, null, null),
+ ('name', 583.0, 25, 0, null, null, null),
(null, null, null, null, 25, null, null)""");
assertUpdate("DROP TABLE " + tableName);
@@ -779,8 +779,8 @@ public void testAnalyzeAndDeleteOrphanFiles()
VALUES
('nationkey', null, 25, 0, null, '0', '24'),
('regionkey', null, 5, 0, null, '0', '4'),
- ('comment', 2178.0, 25, 0, null, null, null),
- ('name', 594.0, 25, 0, null, null, null),
+ ('comment', 2162.0, 25, 0, null, null, null),
+ ('name', 583.0, 25, 0, null, null, null),
(null, null, null, null, 25, null, null)""");
assertUpdate("DROP TABLE " + tableName);
diff --git a/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/TestIcebergV2.java b/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/TestIcebergV2.java
index 17561a900c4c..ae310c3e1200 100644
--- a/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/TestIcebergV2.java
+++ b/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/TestIcebergV2.java
@@ -928,7 +928,7 @@ public void testFilesTable()
(0,
'PARQUET',
25L,
- JSON '{"1":141,"2":220,"3":99,"4":807}',
+ JSON '{"1":137,"2":216,"3":91,"4":801}',
JSON '{"1":25,"2":25,"3":25,"4":25}',
jSON '{"1":0,"2":0,"3":0,"4":0}',
jSON '{}',
diff --git a/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/catalog/glue/TestIcebergS3AndGlueMetastoreTest.java b/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/catalog/glue/TestIcebergS3AndGlueMetastoreTest.java
index 6211bf406326..32fe42719a27 100644
--- a/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/catalog/glue/TestIcebergS3AndGlueMetastoreTest.java
+++ b/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/catalog/glue/TestIcebergS3AndGlueMetastoreTest.java
@@ -136,7 +136,7 @@ private void testAnalyzeWithProvidedTableLocation(boolean partitioned, LocationP
('col_str', %s, 4.0, 0.0, null, null, null),
('col_int', null, 4.0, 0.0, null, 1, 4),
(null, null, null, null, 4.0, null, null)"""
- .formatted(partitioned ? "475.0" : "264.0");
+ .formatted(partitioned ? "432.0" : "243.0");
// Check extended statistics collection on write
assertQuery("SHOW STATS FOR " + tableName, expectedStatistics);
diff --git a/pom.xml b/pom.xml
index 12b9265d9651..58798fe3636e 100644
--- a/pom.xml
+++ b/pom.xml
@@ -674,12 +674,19 @@
4.7.6
+
io.airlift
aircompressor
0.27
+
+ io.airlift
+ aircompressor-v3
+ 3.0
+
+
io.airlift
bytecode
diff --git a/testing/trino-product-tests/src/main/java/io/trino/tests/product/hive/TestHiveRedirectionToIceberg.java b/testing/trino-product-tests/src/main/java/io/trino/tests/product/hive/TestHiveRedirectionToIceberg.java
index c3ea7cd86325..3d7c7e55b009 100644
--- a/testing/trino-product-tests/src/main/java/io/trino/tests/product/hive/TestHiveRedirectionToIceberg.java
+++ b/testing/trino-product-tests/src/main/java/io/trino/tests/product/hive/TestHiveRedirectionToIceberg.java
@@ -316,9 +316,9 @@ public void testShowStats()
assertThat(onTrino().executeQuery("SHOW STATS FOR " + hiveTableName))
.containsOnly(
row("nationkey", null, 25d, 0d, null, "0", "24"),
- row("name", 1231d, 25d, 0d, null, null, null),
+ row("name", 1182d, 25d, 0d, null, null, null),
row("regionkey", null, 5d, 0d, null, "0", "4"),
- row("comment", 3558d, 25d, 0d, null, null, null),
+ row("comment", 3507d, 25d, 0d, null, null, null),
row(null, null, null, null, 25d, null, null));
onTrino().executeQuery("DROP TABLE " + icebergTableName);
diff --git a/testing/trino-product-tests/src/main/java/io/trino/tests/product/iceberg/TestIcebergPartitionEvolution.java b/testing/trino-product-tests/src/main/java/io/trino/tests/product/iceberg/TestIcebergPartitionEvolution.java
index a5893d762efd..646c259b1d2d 100644
--- a/testing/trino-product-tests/src/main/java/io/trino/tests/product/iceberg/TestIcebergPartitionEvolution.java
+++ b/testing/trino-product-tests/src/main/java/io/trino/tests/product/iceberg/TestIcebergPartitionEvolution.java
@@ -68,9 +68,9 @@ public void testDroppedPartitionField(boolean dropFirst)
assertThat(onTrino().executeQuery("SHOW STATS FOR test_dropped_partition_field"))
.containsOnly(
- row("a", 664.0, 3.0, 1. / 6, null, null, null),
- row("b", 666.0, 3.0, 1. / 6, null, null, null),
- row("c", 639.0, 4.0, 0., null, null, null),
+ row("a", 599.0, 3.0, 1. / 6, null, null, null),
+ row("b", 602.0, 3.0, 1. / 6, null, null, null),
+ row("c", 585.0, 4.0, 0., null, null, null),
row(null, null, null, null, 6., null, null));
assertThat(onTrino().executeQuery("SELECT column_name, data_type FROM information_schema.columns WHERE table_name = 'test_dropped_partition_field$partitions'"))