Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
19 changes: 13 additions & 6 deletions plugin/trino-pinot/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
<description>Trino - Pinot connector</description>

<properties>
<dep.pinot.version>1.2.0</dep.pinot.version>
<dep.pinot.version>1.3.0</dep.pinot.version>
<air.compiler.fail-warnings>true</air.compiler.fail-warnings>
<!-- additional JVM flags required by chronicle-hft which is used by pinot-segment spi -->
<air.test.jvm.additional-arguments>${air.test.jvm.additional-arguments.default}
Expand All @@ -22,10 +22,15 @@

<dependencyManagement>
<dependencies>
<dependency>
<groupId>net.openhft</groupId>
<artifactId>chronicle-core</artifactId>
<version>2.27ea1</version>
</dependency>
<dependency>
<groupId>net.openhft</groupId>
<artifactId>posix</artifactId>
<version>2.26ea1</version>
<version>2.27ea0</version>
</dependency>

<dependency>
Expand Down Expand Up @@ -301,6 +306,10 @@
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-1.2-api</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-core</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-slf4j-impl</artifactId>
Expand Down Expand Up @@ -374,7 +383,6 @@
<dependency>
<groupId>net.openhft</groupId>
<artifactId>chronicle-core</artifactId>
<version>2.26ea1</version>
<scope>runtime</scope>
</dependency>

Expand All @@ -391,9 +399,8 @@
</dependency>

<dependency>
<groupId>org.apache.httpcomponents</groupId>
<artifactId>httpcore</artifactId>
<version>4.4.16</version>
<groupId>org.apache.httpcomponents.core5</groupId>
<artifactId>httpcore5</artifactId>
<scope>runtime</scope>
</dependency>

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,29 +13,24 @@
*/
package io.trino.plugin.pinot.client;

import com.google.common.collect.ImmutableList;
import io.trino.plugin.pinot.PinotException;
import io.trino.plugin.pinot.PinotSplit;
import org.apache.pinot.common.datatable.DataTable;

import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.concurrent.atomic.AtomicLong;

import static com.google.common.collect.ImmutableList.toImmutableList;
import static io.trino.plugin.pinot.PinotErrorCode.PINOT_EXCEPTION;
import static java.lang.String.format;
import static java.util.Objects.requireNonNull;
import static org.apache.pinot.common.datatable.DataTable.EXCEPTION_METADATA_KEY;

public interface PinotDataFetcher
{
default void checkExceptions(DataTable dataTable, PinotSplit split, String query)
{
List<String> exceptions = dataTable.getMetadata().entrySet().stream()
.filter(metadataEntry -> metadataEntry.getKey().startsWith(EXCEPTION_METADATA_KEY))
.map(Map.Entry::getValue)
.collect(toImmutableList());
List<String> exceptions = ImmutableList.copyOf(dataTable.getExceptions().values());
if (!exceptions.isEmpty()) {
throw new PinotException(PINOT_EXCEPTION, Optional.of(query), format("Encountered %d pinot exceptions for split %s: %s", exceptions.size(), split, exceptions));
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ private PinotTransformFunctionTypeResolver() {}
{
Map<String, TransformFunctionType> builder = new HashMap<>();
for (TransformFunctionType transformFunctionType : TransformFunctionType.values()) {
for (String alias : transformFunctionType.getAlternativeNames()) {
for (String alias : transformFunctionType.getNames()) {
TransformFunctionType previousValue = builder.put(canonicalize(alias), transformFunctionType);
checkState(previousValue == null || previousValue == transformFunctionType, "Duplicate key with different values for alias '%s', transform function type '%s' and previous value '%s'", canonicalize(alias), transformFunctionType, previousValue);
}
Expand All @@ -53,7 +53,7 @@ public static Optional<TransformFunctionType> getTransformFunctionType(FunctionC
if (transformFunctionType != null) {
return Optional.of(transformFunctionType);
}
if (FunctionRegistry.containsFunction(canonicalizedFunctionName)) {
if (FunctionRegistry.contains(canonicalizedFunctionName)) {
return Optional.of(SCALAR);
}
return Optional.empty();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -39,9 +39,8 @@
import org.apache.avro.generic.GenericRecordBuilder;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.serialization.StringSerializer;
import org.apache.pinot.common.utils.TarGzCompressionUtils;
import org.apache.pinot.common.utils.TarCompressionUtils;
import org.apache.pinot.segment.local.recordtransformer.CompositeTransformer;
import org.apache.pinot.segment.local.recordtransformer.RecordTransformer;
import org.apache.pinot.segment.local.segment.creator.RecordReaderSegmentCreationDataSource;
import org.apache.pinot.segment.local.segment.creator.TransformPipeline;
import org.apache.pinot.segment.local.segment.creator.impl.SegmentIndexCreationDriverImpl;
Expand All @@ -53,7 +52,7 @@
import org.apache.pinot.spi.data.DateTimeFormatSpec;
import org.apache.pinot.spi.data.readers.GenericRow;
import org.apache.pinot.spi.data.readers.RecordReader;
import org.apache.pinot.spi.recordenricher.RecordEnricherPipeline;
import org.apache.pinot.spi.recordtransformer.RecordTransformer;
import org.apache.pinot.spi.utils.builder.TableNameBuilder;
import org.junit.jupiter.api.Test;

Expand Down Expand Up @@ -648,11 +647,11 @@ record = null;
return record;
};
SegmentIndexCreationDriverImpl driver = new SegmentIndexCreationDriverImpl();
driver.init(segmentGeneratorConfig, dataSource, new RecordEnricherPipeline(), new TransformPipeline(recordTransformer, null));
driver.init(segmentGeneratorConfig, dataSource, new TransformPipeline(recordTransformer, null));
driver.build();
File segmentOutputDirectory = driver.getOutputDirectory();
File tgzPath = new File(String.join(File.separator, outputDirectory, segmentOutputDirectory.getName() + ".tar.gz"));
TarGzCompressionUtils.createTarGzFile(segmentOutputDirectory, tgzPath);
TarCompressionUtils.createCompressedTarFile(segmentOutputDirectory, tgzPath);
return Paths.get(tgzPath.getAbsolutePath());
}
catch (Exception e) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,10 +25,10 @@
import io.airlift.http.client.jetty.JettyHttpClient;
import io.airlift.json.JsonCodec;
import io.trino.plugin.pinot.auth.password.PinotPasswordAuthenticationProvider;
import org.apache.http.Header;
import org.apache.http.NameValuePair;
import org.apache.http.message.BasicHeader;
import org.apache.http.message.BasicNameValuePair;
import org.apache.hc.core5.http.Header;
import org.apache.hc.core5.http.NameValuePair;
import org.apache.hc.core5.http.message.BasicHeader;
import org.apache.hc.core5.http.message.BasicNameValuePair;
import org.apache.pinot.common.exception.HttpErrorStatusException;
import org.apache.pinot.common.utils.FileUploadDownloadClient;
import org.apache.pinot.common.utils.SimpleHttpResponse;
Expand Down