diff --git a/plugin/trino-pinot/pom.xml b/plugin/trino-pinot/pom.xml
index 18766206ee8b..67851fb0ceb0 100755
--- a/plugin/trino-pinot/pom.xml
+++ b/plugin/trino-pinot/pom.xml
@@ -13,7 +13,7 @@
Trino - Pinot connector
- 1.2.0
+ 1.3.0
true
${air.test.jvm.additional-arguments.default}
@@ -22,10 +22,15 @@
+
+ net.openhft
+ chronicle-core
+ 2.27ea1
+
net.openhft
posix
- 2.26ea1
+ 2.27ea0
@@ -301,6 +306,10 @@
org.apache.logging.log4j
log4j-1.2-api
+
+ org.apache.logging.log4j
+ log4j-core
+
org.apache.logging.log4j
log4j-slf4j-impl
@@ -374,7 +383,6 @@
net.openhft
chronicle-core
- 2.26ea1
runtime
@@ -391,9 +399,8 @@
- org.apache.httpcomponents
- httpcore
- 4.4.16
+ org.apache.httpcomponents.core5
+ httpcore5
runtime
diff --git a/plugin/trino-pinot/src/main/java/io/trino/plugin/pinot/client/PinotDataFetcher.java b/plugin/trino-pinot/src/main/java/io/trino/plugin/pinot/client/PinotDataFetcher.java
index 24d8c3c4a200..bf75a2b35f44 100644
--- a/plugin/trino-pinot/src/main/java/io/trino/plugin/pinot/client/PinotDataFetcher.java
+++ b/plugin/trino-pinot/src/main/java/io/trino/plugin/pinot/client/PinotDataFetcher.java
@@ -13,29 +13,24 @@
*/
package io.trino.plugin.pinot.client;
+import com.google.common.collect.ImmutableList;
import io.trino.plugin.pinot.PinotException;
import io.trino.plugin.pinot.PinotSplit;
import org.apache.pinot.common.datatable.DataTable;
import java.util.List;
-import java.util.Map;
import java.util.Optional;
import java.util.concurrent.atomic.AtomicLong;
-import static com.google.common.collect.ImmutableList.toImmutableList;
import static io.trino.plugin.pinot.PinotErrorCode.PINOT_EXCEPTION;
import static java.lang.String.format;
import static java.util.Objects.requireNonNull;
-import static org.apache.pinot.common.datatable.DataTable.EXCEPTION_METADATA_KEY;
public interface PinotDataFetcher
{
default void checkExceptions(DataTable dataTable, PinotSplit split, String query)
{
- List exceptions = dataTable.getMetadata().entrySet().stream()
- .filter(metadataEntry -> metadataEntry.getKey().startsWith(EXCEPTION_METADATA_KEY))
- .map(Map.Entry::getValue)
- .collect(toImmutableList());
+ List exceptions = ImmutableList.copyOf(dataTable.getExceptions().values());
if (!exceptions.isEmpty()) {
throw new PinotException(PINOT_EXCEPTION, Optional.of(query), format("Encountered %d pinot exceptions for split %s: %s", exceptions.size(), split, exceptions));
}
diff --git a/plugin/trino-pinot/src/main/java/io/trino/plugin/pinot/query/PinotTransformFunctionTypeResolver.java b/plugin/trino-pinot/src/main/java/io/trino/plugin/pinot/query/PinotTransformFunctionTypeResolver.java
index 577598ede017..3f89ec1a110f 100644
--- a/plugin/trino-pinot/src/main/java/io/trino/plugin/pinot/query/PinotTransformFunctionTypeResolver.java
+++ b/plugin/trino-pinot/src/main/java/io/trino/plugin/pinot/query/PinotTransformFunctionTypeResolver.java
@@ -36,7 +36,7 @@ private PinotTransformFunctionTypeResolver() {}
{
Map builder = new HashMap<>();
for (TransformFunctionType transformFunctionType : TransformFunctionType.values()) {
- for (String alias : transformFunctionType.getAlternativeNames()) {
+ for (String alias : transformFunctionType.getNames()) {
TransformFunctionType previousValue = builder.put(canonicalize(alias), transformFunctionType);
checkState(previousValue == null || previousValue == transformFunctionType, "Duplicate key with different values for alias '%s', transform function type '%s' and previous value '%s'", canonicalize(alias), transformFunctionType, previousValue);
}
@@ -53,7 +53,7 @@ public static Optional getTransformFunctionType(FunctionC
if (transformFunctionType != null) {
return Optional.of(transformFunctionType);
}
- if (FunctionRegistry.containsFunction(canonicalizedFunctionName)) {
+ if (FunctionRegistry.contains(canonicalizedFunctionName)) {
return Optional.of(SCALAR);
}
return Optional.empty();
diff --git a/plugin/trino-pinot/src/test/java/io/trino/plugin/pinot/TestPinotConnectorSmokeTest.java b/plugin/trino-pinot/src/test/java/io/trino/plugin/pinot/TestPinotConnectorSmokeTest.java
index 051b23845def..d2a906797efe 100644
--- a/plugin/trino-pinot/src/test/java/io/trino/plugin/pinot/TestPinotConnectorSmokeTest.java
+++ b/plugin/trino-pinot/src/test/java/io/trino/plugin/pinot/TestPinotConnectorSmokeTest.java
@@ -39,9 +39,8 @@
import org.apache.avro.generic.GenericRecordBuilder;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.serialization.StringSerializer;
-import org.apache.pinot.common.utils.TarGzCompressionUtils;
+import org.apache.pinot.common.utils.TarCompressionUtils;
import org.apache.pinot.segment.local.recordtransformer.CompositeTransformer;
-import org.apache.pinot.segment.local.recordtransformer.RecordTransformer;
import org.apache.pinot.segment.local.segment.creator.RecordReaderSegmentCreationDataSource;
import org.apache.pinot.segment.local.segment.creator.TransformPipeline;
import org.apache.pinot.segment.local.segment.creator.impl.SegmentIndexCreationDriverImpl;
@@ -53,7 +52,7 @@
import org.apache.pinot.spi.data.DateTimeFormatSpec;
import org.apache.pinot.spi.data.readers.GenericRow;
import org.apache.pinot.spi.data.readers.RecordReader;
-import org.apache.pinot.spi.recordenricher.RecordEnricherPipeline;
+import org.apache.pinot.spi.recordtransformer.RecordTransformer;
import org.apache.pinot.spi.utils.builder.TableNameBuilder;
import org.junit.jupiter.api.Test;
@@ -648,11 +647,11 @@ record = null;
return record;
};
SegmentIndexCreationDriverImpl driver = new SegmentIndexCreationDriverImpl();
- driver.init(segmentGeneratorConfig, dataSource, new RecordEnricherPipeline(), new TransformPipeline(recordTransformer, null));
+ driver.init(segmentGeneratorConfig, dataSource, new TransformPipeline(recordTransformer, null));
driver.build();
File segmentOutputDirectory = driver.getOutputDirectory();
File tgzPath = new File(String.join(File.separator, outputDirectory, segmentOutputDirectory.getName() + ".tar.gz"));
- TarGzCompressionUtils.createTarGzFile(segmentOutputDirectory, tgzPath);
+ TarCompressionUtils.createCompressedTarFile(segmentOutputDirectory, tgzPath);
return Paths.get(tgzPath.getAbsolutePath());
}
catch (Exception e) {
diff --git a/plugin/trino-pinot/src/test/java/io/trino/plugin/pinot/TestingPinotCluster.java b/plugin/trino-pinot/src/test/java/io/trino/plugin/pinot/TestingPinotCluster.java
index c0fab7c3e7e2..08e46ca1f59f 100644
--- a/plugin/trino-pinot/src/test/java/io/trino/plugin/pinot/TestingPinotCluster.java
+++ b/plugin/trino-pinot/src/test/java/io/trino/plugin/pinot/TestingPinotCluster.java
@@ -25,10 +25,10 @@
import io.airlift.http.client.jetty.JettyHttpClient;
import io.airlift.json.JsonCodec;
import io.trino.plugin.pinot.auth.password.PinotPasswordAuthenticationProvider;
-import org.apache.http.Header;
-import org.apache.http.NameValuePair;
-import org.apache.http.message.BasicHeader;
-import org.apache.http.message.BasicNameValuePair;
+import org.apache.hc.core5.http.Header;
+import org.apache.hc.core5.http.NameValuePair;
+import org.apache.hc.core5.http.message.BasicHeader;
+import org.apache.hc.core5.http.message.BasicNameValuePair;
import org.apache.pinot.common.exception.HttpErrorStatusException;
import org.apache.pinot.common.utils.FileUploadDownloadClient;
import org.apache.pinot.common.utils.SimpleHttpResponse;