Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ public TestAccumuloClient()

Connector connector = AccumuloQueryRunner.getAccumuloConnector();
config.setZooKeepers(connector.getInstance().getZooKeepers());
zooKeeperMetadataManager = new ZooKeeperMetadataManager(config, createTestFunctionAndTypeManager());
zooKeeperMetadataManager = new ZooKeeperMetadataManager(config, createTestFunctionAndTypeManager().getFunctionAndTypeResolver());
client = new AccumuloClient(connector, config, zooKeeperMetadataManager, new AccumuloTableManager(connector), new IndexLookup(connector, new ColumnCardinalityCache(connector, config)));
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -90,7 +90,7 @@ public void setUp()
// Initialize the Arrow allocator
allocator = new RootAllocator(Integer.MAX_VALUE);
logger.debug("Allocator initialized: %s", allocator.getName());
arrowBlockBuilder = new ArrowBlockBuilder(FUNCTION_AND_TYPE_MANAGER);
arrowBlockBuilder = new ArrowBlockBuilder(FUNCTION_AND_TYPE_MANAGER.getFunctionAndTypeResolver());
}

@AfterClass
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -102,7 +102,7 @@ public TestJdbcComputePushdown()
DeterminismEvaluator determinismEvaluator = new RowExpressionDeterminismEvaluator(functionAndTypeManager);

this.jdbcComputePushdown = new JdbcComputePushdown(
functionAndTypeManager,
functionAndTypeManager.getFunctionAndTypeResolver(),
functionResolution,
determinismEvaluator,
(ConnectorSession session) -> new RowExpressionOptimizer(METADATA),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,7 @@ private JsonCodec<DeltaTableHandle> getJsonCodec()
binder.install(new HandleJsonModule());
configBinder(binder).bindConfig(FeaturesConfig.class);
FunctionAndTypeManager functionAndTypeManager = createTestFunctionAndTypeManager();
binder.bind(TypeManager.class).toInstance(functionAndTypeManager);
binder.bind(TypeManager.class).toInstance(functionAndTypeManager.getFunctionAndTypeResolver());
jsonBinder(binder).addDeserializerBinding(Type.class).to(TypeDeserializer.class);
newSetBinder(binder, Type.class);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ private void testProject(String sqlExpression, String expectedDruidExpression, S
{
RowExpression pushDownExpression = getRowExpression(sqlExpression, sessionHolder);
String actualDruidExpression = pushDownExpression.accept(new DruidProjectExpressionConverter(
functionAndTypeManager,
functionAndTypeResolver,
standardFunctionResolution),
testInput).getDefinition();
assertEquals(actualDruidExpression, expectedDruidExpression);
Expand Down Expand Up @@ -78,8 +78,8 @@ private void testFilter(String sqlExpression, String expectedDruidExpression, Se
{
RowExpression pushDownExpression = getRowExpression(sqlExpression, sessionHolder);
String actualDruidExpression = pushDownExpression.accept(new DruidFilterExpressionConverter(
functionAndTypeManager,
functionAndTypeManager,
functionAndTypeResolver,
functionAndTypeResolver,
standardFunctionResolution,
sessionHolder.getConnectorSession()),
testInputFunction).getDefinition();
Expand All @@ -91,8 +91,8 @@ private void testFilterUnsupported(String sqlExpression, SessionHolder sessionHo
try {
RowExpression pushDownExpression = getRowExpression(sqlExpression, sessionHolder);
String actualDruidExpression = pushDownExpression.accept(new DruidFilterExpressionConverter(
functionAndTypeManager,
functionAndTypeManager,
functionAndTypeResolver,
functionAndTypeResolver,
standardFunctionResolution,
sessionHolder.getConnectorSession()),
testInputFunction).getDefinition();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -142,8 +142,8 @@ private void assertPlanMatch(PlanNode actual, PlanMatchPattern expected, TypePro

private PlanNode getOptimizedPlan(PlanBuilder planBuilder, PlanNode originalPlan)
{
DruidQueryGenerator druidQueryGenerator = new DruidQueryGenerator(functionAndTypeManager, functionAndTypeManager, standardFunctionResolution);
DruidPlanOptimizer optimizer = new DruidPlanOptimizer(druidQueryGenerator, functionAndTypeManager, new RowExpressionDeterminismEvaluator(functionAndTypeManager), functionAndTypeManager, standardFunctionResolution);
DruidQueryGenerator druidQueryGenerator = new DruidQueryGenerator(functionAndTypeResolver, functionAndTypeResolver, standardFunctionResolution);
DruidPlanOptimizer optimizer = new DruidPlanOptimizer(druidQueryGenerator, functionAndTypeResolver, new RowExpressionDeterminismEvaluator(functionAndTypeManager), functionAndTypeResolver, standardFunctionResolution);
return optimizer.optimize(originalPlan, defaultSessionHolder.getConnectorSession(), new VariableAllocator(), planBuilder.getIdAllocator());
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@
import com.facebook.presto.spi.relation.RowExpression;
import com.facebook.presto.spi.relation.VariableReferenceExpression;
import com.facebook.presto.sql.ExpressionUtils;
import com.facebook.presto.sql.analyzer.FunctionAndTypeResolver;
import com.facebook.presto.sql.parser.ParsingOptions;
import com.facebook.presto.sql.parser.SqlParser;
import com.facebook.presto.sql.planner.TypeProvider;
Expand Down Expand Up @@ -74,7 +75,8 @@
public class TestDruidQueryBase
{
protected static final FunctionAndTypeManager functionAndTypeManager = createTestFunctionAndTypeManager();
protected static final StandardFunctionResolution standardFunctionResolution = new FunctionResolution(functionAndTypeManager.getFunctionAndTypeResolver());
protected static final FunctionAndTypeResolver functionAndTypeResolver = functionAndTypeManager.getFunctionAndTypeResolver();
protected static final StandardFunctionResolution standardFunctionResolution = new FunctionResolution(functionAndTypeResolver);

protected static ConnectorId druidConnectorId = new ConnectorId("id");
protected static DruidTableHandle realtimeOnlyTable = new DruidTableHandle("schema", "realtimeOnly", Optional.empty());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ private void testDQL(
Map<String, String> outputVariables)
{
PlanNode planNode = planBuilderConsumer.apply(createPlanBuilder(sessionHolder));
DruidQueryGenerator.DruidQueryGeneratorResult druidQueryGeneratorResult = new DruidQueryGenerator(functionAndTypeManager, functionAndTypeManager, standardFunctionResolution).generate(planNode, sessionHolder.getConnectorSession()).get();
DruidQueryGenerator.DruidQueryGeneratorResult druidQueryGeneratorResult = new DruidQueryGenerator(functionAndTypeResolver, functionAndTypeResolver, standardFunctionResolution).generate(planNode, sessionHolder.getConnectorSession()).get();
if (expectedDQL.contains("__expressions__")) {
String expressions = planNode.getOutputVariables().stream().map(v -> outputVariables.get(v.getName())).filter(v -> v != null).collect(Collectors.joining(", "));
expectedDQL = expectedDQL.replace("__expressions__", expressions);
Expand Down Expand Up @@ -86,15 +86,15 @@ public void testSimpleSelectWithFilterLimit()
{
testDQL(
planBuilder -> limit(
planBuilder,
30L,
project(
planBuilder,
filter(
planBuilder,
tableScan(planBuilder, druidTable, regionId, city, fare, secondsSinceEpoch),
getRowExpression("secondssinceepoch > 20", defaultSessionHolder)),
ImmutableList.of("city", "secondssinceepoch"))),
planBuilder,
30L,
project(
planBuilder,
filter(
planBuilder,
tableScan(planBuilder, druidTable, regionId, city, fare, secondsSinceEpoch),
getRowExpression("secondssinceepoch > 20", defaultSessionHolder)),
ImmutableList.of("city", "secondssinceepoch"))),
"SELECT \"city\", \"secondsSinceEpoch\" FROM \"realtimeOnly\" WHERE (\"secondsSinceEpoch\" > 20) LIMIT 30");
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,7 @@
import static com.facebook.presto.hive.HiveFileSystemTestUtils.getSplitsCount;
import static com.facebook.presto.hive.HiveTestUtils.DO_NOTHING_DIRECTORY_LISTER;
import static com.facebook.presto.hive.HiveTestUtils.FILTER_STATS_CALCULATOR_SERVICE;
import static com.facebook.presto.hive.HiveTestUtils.FUNCTION_AND_TYPE_MANAGER;
import static com.facebook.presto.hive.HiveTestUtils.FUNCTION_AND_TYPE_RESOLVER;
import static com.facebook.presto.hive.HiveTestUtils.FUNCTION_RESOLUTION;
import static com.facebook.presto.hive.HiveTestUtils.ROW_EXPRESSION_SERVICE;
import static com.facebook.presto.hive.HiveTestUtils.getDefaultHiveAggregatedPageSourceFactories;
Expand Down Expand Up @@ -136,7 +136,7 @@ public S3SelectTestHelper(String host,

HiveCluster hiveCluster = new TestingHiveCluster(metastoreClientConfig, host, port);
executor = newCachedThreadPool(daemonThreadsNamed("hive-%s"));
HivePartitionManager hivePartitionManager = new HivePartitionManager(FUNCTION_AND_TYPE_MANAGER, config);
HivePartitionManager hivePartitionManager = new HivePartitionManager(FUNCTION_AND_TYPE_RESOLVER, config);

S3ConfigurationUpdater s3Config = new PrestoS3ConfigurationUpdater(new HiveS3Config()
.setS3AwsAccessKey(awsAccessKey)
Expand All @@ -159,7 +159,7 @@ public S3SelectTestHelper(String host,
hdfsEnvironment,
hivePartitionManager,
newDirectExecutorService(),
FUNCTION_AND_TYPE_MANAGER,
FUNCTION_AND_TYPE_RESOLVER,
locationService,
FUNCTION_RESOLUTION,
ROW_EXPRESSION_SERVICE,
Expand All @@ -185,7 +185,7 @@ public S3SelectTestHelper(String host,
hdfsEnvironment,
new CachingDirectoryLister(new HadoopDirectoryLister(), new HiveClientConfig()),
new BoundedExecutor(executor, config.getMaxSplitIteratorThreads()),
new HiveCoercionPolicy(FUNCTION_AND_TYPE_MANAGER),
new HiveCoercionPolicy(FUNCTION_AND_TYPE_RESOLVER),
new CounterStat(),
config.getMaxOutstandingSplits(),
config.getMaxOutstandingSplitsSize(),
Expand All @@ -203,7 +203,7 @@ public S3SelectTestHelper(String host,
getDefaultHiveBatchPageSourceFactories(config, metastoreClientConfig),
getDefaultHiveSelectivePageSourceFactories(config, metastoreClientConfig),
getDefaultHiveAggregatedPageSourceFactories(config, metastoreClientConfig),
FUNCTION_AND_TYPE_MANAGER,
FUNCTION_AND_TYPE_RESOLVER,
ROW_EXPRESSION_SERVICE);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -251,7 +251,7 @@
import static com.facebook.presto.hive.HiveTableProperties.STORAGE_FORMAT_PROPERTY;
import static com.facebook.presto.hive.HiveTestUtils.DO_NOTHING_DIRECTORY_LISTER;
import static com.facebook.presto.hive.HiveTestUtils.FILTER_STATS_CALCULATOR_SERVICE;
import static com.facebook.presto.hive.HiveTestUtils.FUNCTION_AND_TYPE_MANAGER;
import static com.facebook.presto.hive.HiveTestUtils.FUNCTION_AND_TYPE_RESOLVER;
import static com.facebook.presto.hive.HiveTestUtils.FUNCTION_RESOLUTION;
import static com.facebook.presto.hive.HiveTestUtils.HDFS_ENVIRONMENT;
import static com.facebook.presto.hive.HiveTestUtils.METADATA;
Expand Down Expand Up @@ -1008,7 +1008,7 @@ protected final void setup(String databaseName, HiveClientConfig hiveClientConfi

setupHive(connectorId.toString(), databaseName, hiveClientConfig.getTimeZone());

hivePartitionManager = new HivePartitionManager(FUNCTION_AND_TYPE_MANAGER, hiveClientConfig);
hivePartitionManager = new HivePartitionManager(FUNCTION_AND_TYPE_RESOLVER, hiveClientConfig);
metastoreClient = hiveMetastore;
HdfsConfiguration hdfsConfiguration = new HiveHdfsConfiguration(new HdfsConfigurationInitializer(hiveClientConfig, metastoreClientConfig), ImmutableSet.of(), hiveClientConfig);
hdfsEnvironment = new HdfsEnvironment(hdfsConfiguration, metastoreClientConfig, new NoHdfsAuthentication());
Expand All @@ -1027,7 +1027,7 @@ protected final void setup(String databaseName, HiveClientConfig hiveClientConfi
getHiveClientConfig().getMaxPartitionsPerScan(),
false,
10_000,
FUNCTION_AND_TYPE_MANAGER,
FUNCTION_AND_TYPE_RESOLVER,
locationService,
FUNCTION_RESOLUTION,
ROW_EXPRESSION_SERVICE,
Expand Down Expand Up @@ -1056,7 +1056,7 @@ protected final void setup(String databaseName, HiveClientConfig hiveClientConfi
hdfsEnvironment,
new CachingDirectoryLister(new HadoopDirectoryLister(), new HiveClientConfig()),
directExecutor(),
new HiveCoercionPolicy(FUNCTION_AND_TYPE_MANAGER),
new HiveCoercionPolicy(FUNCTION_AND_TYPE_RESOLVER),
new CounterStat(),
100,
hiveClientConfig.getMaxOutstandingSplitsSize(),
Expand All @@ -1073,7 +1073,7 @@ protected final void setup(String databaseName, HiveClientConfig hiveClientConfi
PAGE_SORTER,
metastoreClient,
new GroupByHashPageIndexerFactory(JOIN_COMPILER),
FUNCTION_AND_TYPE_MANAGER,
FUNCTION_AND_TYPE_RESOLVER,
getHiveClientConfig(),
getMetastoreClientConfig(),
getSortingFileWriterConfig(),
Expand All @@ -1093,7 +1093,7 @@ protected final void setup(String databaseName, HiveClientConfig hiveClientConfi
getDefaultHiveBatchPageSourceFactories(hiveClientConfig, metastoreClientConfig),
getDefaultHiveSelectivePageSourceFactories(hiveClientConfig, metastoreClientConfig),
getDefaultHiveAggregatedPageSourceFactories(hiveClientConfig, metastoreClientConfig),
FUNCTION_AND_TYPE_MANAGER,
FUNCTION_AND_TYPE_RESOLVER,
ROW_EXPRESSION_SERVICE);
}

Expand Down Expand Up @@ -1570,7 +1570,7 @@ protected void doTestMismatchSchemaTable(
ROW_EXPRESSION_SERVICE,
FUNCTION_RESOLUTION,
hivePartitionManager,
METADATA.getFunctionAndTypeManager(),
METADATA.getFunctionAndTypeManager().getFunctionAndTypeResolver(),
tableHandle,
predicate,
Optional.empty()).getLayout().getHandle();
Expand Down Expand Up @@ -2317,7 +2317,7 @@ private void doTestBucketedTableEvolution(HiveStorageFormat storageFormat, Schem
ROW_EXPRESSION_SERVICE,
FUNCTION_RESOLUTION,
hivePartitionManager,
METADATA.getFunctionAndTypeManager(),
METADATA.getFunctionAndTypeManager().getFunctionAndTypeResolver(),
tableHandle,
predicate,
Optional.empty()).getLayout().getHandle();
Expand Down Expand Up @@ -2675,7 +2675,7 @@ protected ConnectorTableLayout getTableLayout(ConnectorSession session, Connecto
ROW_EXPRESSION_SERVICE,
FUNCTION_RESOLUTION,
hivePartitionManager,
METADATA.getFunctionAndTypeManager(),
METADATA.getFunctionAndTypeManager().getFunctionAndTypeResolver(),
tableHandle,
TRUE_CONSTANT,
Optional.empty()).getLayout();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -103,7 +103,7 @@
import static com.facebook.presto.hive.HiveManifestUtils.getFileSize;
import static com.facebook.presto.hive.HiveStorageFormat.DWRF;
import static com.facebook.presto.hive.HiveStorageFormat.ORC;
import static com.facebook.presto.hive.HiveTestUtils.FUNCTION_AND_TYPE_MANAGER;
import static com.facebook.presto.hive.HiveTestUtils.FUNCTION_AND_TYPE_RESOLVER;
import static com.facebook.presto.hive.HiveTestUtils.SESSION;
import static com.facebook.presto.hive.HiveTestUtils.mapType;
import static com.facebook.presto.hive.HiveUtil.isStructuralType;
Expand Down Expand Up @@ -510,7 +510,7 @@ public static FileSplit createTestFile(
List<Type> types = testColumns.stream()
.map(TestColumn::getType)
.map(HiveType::valueOf)
.map(type -> type.getType(FUNCTION_AND_TYPE_MANAGER))
.map(type -> type.getType(FUNCTION_AND_TYPE_RESOLVER))
.collect(toList());

PageBuilder pageBuilder = new PageBuilder(types);
Expand Down Expand Up @@ -692,7 +692,7 @@ protected void checkCursor(RecordCursor cursor, List<TestColumn> testColumns, in
for (int i = 0, testColumnsSize = testColumns.size(); i < testColumnsSize; i++) {
TestColumn testColumn = testColumns.get(i);

Type type = HiveType.valueOf(testColumn.getObjectInspector().getTypeName()).getType(FUNCTION_AND_TYPE_MANAGER);
Type type = HiveType.valueOf(testColumn.getObjectInspector().getTypeName()).getType(FUNCTION_AND_TYPE_RESOLVER);
Object fieldFromCursor = getFieldFromCursor(cursor, type, i);
if (fieldFromCursor == null) {
assertNull(testColumn.getExpectedValue(), String.format("Expected null for column %s", testColumn.getName()));
Expand Down
Loading
Loading