Skip to content

Commit

Permalink
remove group-by v1
Browse files Browse the repository at this point in the history
  • Loading branch information
clintropolis committed Aug 18, 2023
1 parent b97cc45 commit 6e93044
Show file tree
Hide file tree
Showing 115 changed files with 837 additions and 4,115 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -52,13 +52,10 @@
import org.apache.druid.query.dimension.DefaultDimensionSpec;
import org.apache.druid.query.groupby.GroupByQuery;
import org.apache.druid.query.groupby.GroupByQueryConfig;
import org.apache.druid.query.groupby.GroupByQueryEngine;
import org.apache.druid.query.groupby.GroupByQueryQueryToolChest;
import org.apache.druid.query.groupby.GroupByQueryRunnerFactory;
import org.apache.druid.query.groupby.GroupingEngine;
import org.apache.druid.query.groupby.ResultRow;
import org.apache.druid.query.groupby.strategy.GroupByStrategySelector;
import org.apache.druid.query.groupby.strategy.GroupByStrategyV1;
import org.apache.druid.query.groupby.strategy.GroupByStrategyV2;
import org.apache.druid.query.spec.MultipleIntervalSegmentSpec;
import org.apache.druid.query.spec.QuerySegmentSpec;
import org.apache.druid.segment.IndexIO;
Expand Down Expand Up @@ -346,11 +343,6 @@ public void setup() throws IOException
);
final GroupByQueryConfig config = new GroupByQueryConfig()
{
@Override
public String getDefaultStrategy()
{
return defaultStrategy;
}

@Override
public int getBufferGrouperInitialBuckets()
Expand Down Expand Up @@ -385,27 +377,19 @@ public String getFormatString()
};

final Supplier<GroupByQueryConfig> configSupplier = Suppliers.ofInstance(config);
final GroupByStrategySelector strategySelector = new GroupByStrategySelector(
final GroupingEngine groupingEngine = new GroupingEngine(
druidProcessingConfig,
configSupplier,
new GroupByStrategyV1(
configSupplier,
new GroupByQueryEngine(configSupplier, bufferPool),
QueryBenchmarkUtil.NOOP_QUERYWATCHER
),
new GroupByStrategyV2(
druidProcessingConfig,
configSupplier,
bufferPool,
mergePool,
TestHelper.makeJsonMapper(),
new ObjectMapper(new SmileFactory()),
QueryBenchmarkUtil.NOOP_QUERYWATCHER
)
bufferPool,
mergePool,
TestHelper.makeJsonMapper(),
new ObjectMapper(new SmileFactory()),
QueryBenchmarkUtil.NOOP_QUERYWATCHER
);

factory = new GroupByQueryRunnerFactory(
strategySelector,
new GroupByQueryQueryToolChest(strategySelector)
groupingEngine,
new GroupByQueryQueryToolChest(groupingEngine)
);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -77,14 +77,11 @@
import org.apache.druid.query.expression.TestExprMacroTable;
import org.apache.druid.query.groupby.GroupByQuery;
import org.apache.druid.query.groupby.GroupByQueryConfig;
import org.apache.druid.query.groupby.GroupByQueryEngine;
import org.apache.druid.query.groupby.GroupByQueryQueryToolChest;
import org.apache.druid.query.groupby.GroupByQueryRunnerFactory;
import org.apache.druid.query.groupby.GroupByQueryRunnerTest;
import org.apache.druid.query.groupby.GroupingEngine;
import org.apache.druid.query.groupby.ResultRow;
import org.apache.druid.query.groupby.strategy.GroupByStrategySelector;
import org.apache.druid.query.groupby.strategy.GroupByStrategyV1;
import org.apache.druid.query.groupby.strategy.GroupByStrategyV2;
import org.apache.druid.query.planning.DataSourceAnalysis;
import org.apache.druid.query.spec.MultipleIntervalSegmentSpec;
import org.apache.druid.query.spec.QuerySegmentSpec;
Expand Down Expand Up @@ -287,11 +284,6 @@ public int getNumThreads()
GroupByQueryRunnerTest.DEFAULT_MAPPER,
new GroupByQueryConfig()
{
@Override
public String getDefaultStrategy()
{
return GroupByStrategySelector.STRATEGY_V2;
}
},
processingConfig
)
Expand Down Expand Up @@ -364,25 +356,17 @@ private static GroupByQueryRunnerFactory makeGroupByQueryRunnerFactory(
bufferSupplier,
processingConfig.getNumMergeBuffers()
);
final GroupByStrategySelector strategySelector = new GroupByStrategySelector(
final GroupingEngine groupingEngine = new GroupingEngine(
processingConfig,
configSupplier,
new GroupByStrategyV1(
configSupplier,
new GroupByQueryEngine(configSupplier, bufferPool),
QueryRunnerTestHelper.NOOP_QUERYWATCHER
),
new GroupByStrategyV2(
processingConfig,
configSupplier,
bufferPool,
mergeBufferPool,
mapper,
mapper,
QueryRunnerTestHelper.NOOP_QUERYWATCHER
)
bufferPool,
mergeBufferPool,
mapper,
mapper,
QueryRunnerTestHelper.NOOP_QUERYWATCHER
);
final GroupByQueryQueryToolChest toolChest = new GroupByQueryQueryToolChest(strategySelector);
return new GroupByQueryRunnerFactory(strategySelector, toolChest);
final GroupByQueryQueryToolChest toolChest = new GroupByQueryQueryToolChest(groupingEngine);
return new GroupByQueryRunnerFactory(groupingEngine, toolChest);
}

@TearDown(Level.Trial)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -63,15 +63,12 @@
import org.apache.druid.query.filter.BoundDimFilter;
import org.apache.druid.query.groupby.GroupByQuery;
import org.apache.druid.query.groupby.GroupByQueryConfig;
import org.apache.druid.query.groupby.GroupByQueryEngine;
import org.apache.druid.query.groupby.GroupByQueryQueryToolChest;
import org.apache.druid.query.groupby.GroupByQueryRunnerFactory;
import org.apache.druid.query.groupby.GroupingEngine;
import org.apache.druid.query.groupby.ResultRow;
import org.apache.druid.query.groupby.orderby.DefaultLimitSpec;
import org.apache.druid.query.groupby.orderby.OrderByColumnSpec;
import org.apache.druid.query.groupby.strategy.GroupByStrategySelector;
import org.apache.druid.query.groupby.strategy.GroupByStrategyV1;
import org.apache.druid.query.groupby.strategy.GroupByStrategyV2;
import org.apache.druid.query.ordering.StringComparators;
import org.apache.druid.query.spec.MultipleIntervalSegmentSpec;
import org.apache.druid.query.spec.QuerySegmentSpec;
Expand Down Expand Up @@ -139,9 +136,6 @@ public class GroupByBenchmark
@Param({"basic.A", "basic.nested"})
private String schemaAndQuery;

@Param({"v1", "v2"})
private String defaultStrategy;

@Param({"all", "day"})
private String queryGranularity;

Expand Down Expand Up @@ -461,11 +455,6 @@ public void setup()
);
final GroupByQueryConfig config = new GroupByQueryConfig()
{
@Override
public String getDefaultStrategy()
{
return defaultStrategy;
}

@Override
public int getBufferGrouperInitialBuckets()
Expand Down Expand Up @@ -500,27 +489,19 @@ public String getFormatString()
};

final Supplier<GroupByQueryConfig> configSupplier = Suppliers.ofInstance(config);
final GroupByStrategySelector strategySelector = new GroupByStrategySelector(
final GroupingEngine groupingEngine = new GroupingEngine(
druidProcessingConfig,
configSupplier,
new GroupByStrategyV1(
configSupplier,
new GroupByQueryEngine(configSupplier, bufferPool),
QueryBenchmarkUtil.NOOP_QUERYWATCHER
),
new GroupByStrategyV2(
druidProcessingConfig,
configSupplier,
bufferPool,
mergePool,
TestHelper.makeJsonMapper(),
new ObjectMapper(new SmileFactory()),
QueryBenchmarkUtil.NOOP_QUERYWATCHER
)
bufferPool,
mergePool,
TestHelper.makeJsonMapper(),
new ObjectMapper(new SmileFactory()),
QueryBenchmarkUtil.NOOP_QUERYWATCHER
);

factory = new GroupByQueryRunnerFactory(
strategySelector,
new GroupByQueryQueryToolChest(strategySelector)
groupingEngine,
new GroupByQueryQueryToolChest(groupingEngine)
);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -128,9 +128,6 @@ public Comparator<CompressedBigDecimal> getComparator()
@Override
public abstract AggregateCombiner<CompressedBigDecimal> makeAggregateCombiner();

@Override
public abstract List<AggregatorFactory> getRequiredColumns();

@Override
public abstract String toString();

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,9 +28,6 @@
import org.apache.druid.segment.ColumnSelectorFactory;
import org.apache.druid.segment.ColumnValueSelector;

import java.util.Collections;
import java.util.List;

public class CompressedBigDecimalMaxAggregatorFactory extends CompressedBigDecimalAggregatorFactoryBase
{
private static final byte CACHE_TYPE_ID = 0x37;
Expand Down Expand Up @@ -113,18 +110,6 @@ public AggregateCombiner<CompressedBigDecimal> makeAggregateCombiner()
return new CompressedBigDecimalMaxAggregateCombiner();
}

@Override
public List<AggregatorFactory> getRequiredColumns()
{
return Collections.singletonList(new CompressedBigDecimalMaxAggregatorFactory(
fieldName,
fieldName,
size,
scale,
strictNumberParsing
));
}

@Override
public String toString()
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,8 +29,6 @@
import org.apache.druid.segment.ColumnValueSelector;

import javax.annotation.Nonnull;
import java.util.Collections;
import java.util.List;


/**
Expand Down Expand Up @@ -118,18 +116,6 @@ public AggregateCombiner<CompressedBigDecimal> makeAggregateCombiner()
return new CompressedBigDecimalMinAggregateCombiner();
}

@Override
public List<AggregatorFactory> getRequiredColumns()
{
return Collections.singletonList(new CompressedBigDecimalMinAggregatorFactory(
name,
fieldName,
size,
scale,
strictNumberParsing
));
}

@Override
public String toString()
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,8 +30,6 @@

import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import java.util.Collections;
import java.util.List;

public class CompressedBigDecimalSumAggregatorFactory extends CompressedBigDecimalAggregatorFactoryBase
{
Expand Down Expand Up @@ -118,18 +116,6 @@ public AggregateCombiner<CompressedBigDecimal> makeAggregateCombiner()
return new CompressedBigDecimalSumAggregateCombiner();
}

@Override
public List<AggregatorFactory> getRequiredColumns()
{
return Collections.singletonList(new CompressedBigDecimalSumAggregatorFactory(
name,
fieldName,
size,
scale,
strictNumberParsing
));
}

@Override
public String toString()
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,6 @@

import java.io.IOException;
import java.math.BigDecimal;
import java.util.Arrays;


public class CompressedBigDecimalMaxFactoryTest extends CompressedBigDecimalFactoryTestBase
Expand All @@ -50,10 +49,6 @@ public void testCompressedBigDecimalMaxAggregatorFactory()
Assert.assertEquals("5", aggregatorFactory.deserialize(new BigDecimal(5)).toString());
Assert.assertEquals("5.0", aggregatorFactory.deserialize(5d).toString());
Assert.assertEquals("5", aggregatorFactory.deserialize("5").toString());
Assert.assertEquals(
"[CompressedBigDecimalMaxAggregatorFactory{name='fieldName', type='COMPLEX<compressedBigDecimal>', fieldName='fieldName', requiredFields='[fieldName]', size='9', scale='0', strictNumberParsing='false'}]",
Arrays.toString(aggregatorFactory.getRequiredColumns().toArray())
);
Assert.assertNull(aggregatorFactory.combine(null, null));
Assert.assertEquals("4", aggregatorFactory.combine(new BigDecimal(4), null).toString());
Assert.assertEquals("4", aggregatorFactory.combine(null, new BigDecimal(4)).toString());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,6 @@

import java.io.IOException;
import java.math.BigDecimal;
import java.util.Arrays;


public class CompressedBigDecimalMinFactoryTest extends CompressedBigDecimalFactoryTestBase
Expand All @@ -50,10 +49,7 @@ public void testCompressedBigDecimalMinAggregatorFactory()
Assert.assertEquals("5", aggregatorFactory.deserialize(new BigDecimal(5)).toString());
Assert.assertEquals("5.0", aggregatorFactory.deserialize(5d).toString());
Assert.assertEquals("5", aggregatorFactory.deserialize("5").toString());
Assert.assertEquals(
"[CompressedBigDecimalMinAggregatorFactory{name='name', type='COMPLEX<compressedBigDecimal>', fieldName='fieldName', requiredFields='[fieldName]', size='9', scale='0', strictNumberParsing='false'}]",
Arrays.toString(aggregatorFactory.getRequiredColumns().toArray())
);

// default is to initialize to
Assert.assertNull(aggregatorFactory.combine(null, null));
Assert.assertEquals("4", aggregatorFactory.combine(new BigDecimal(4), null).toString());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,6 @@

import java.io.IOException;
import java.math.BigDecimal;
import java.util.Arrays;

/**
* test CompressedBigDecimalSumFactory and various aggregators and combiner produced
Expand All @@ -52,10 +51,7 @@ public void testCompressedBigDecimalAggregatorFactory()
Assert.assertEquals("5", aggregatorFactory.deserialize(new BigDecimal(5)).toString());
Assert.assertEquals("5.0", aggregatorFactory.deserialize(5d).toString());
Assert.assertEquals("5", aggregatorFactory.deserialize("5").toString());
Assert.assertEquals(
"[CompressedBigDecimalSumAggregatorFactory{name='name', type='COMPLEX<compressedBigDecimal>', fieldName='fieldName', requiredFields='[fieldName]', size='9', scale='0', strictNumberParsing='false'}]",
Arrays.toString(aggregatorFactory.getRequiredColumns().toArray())
);

Assert.assertEquals("0", aggregatorFactory.combine(null, null).toString());
Assert.assertEquals("4", aggregatorFactory.combine(new BigDecimal(4), null).toString());
Assert.assertEquals("4", aggregatorFactory.combine(null, new BigDecimal(4)).toString());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -141,14 +141,6 @@ public AggregatorFactory getCombiningFactory()
return new LongSumAggregatorFactory(name, name);
}

@Override
public List<AggregatorFactory> getRequiredColumns()
{
return Collections.singletonList(
new DistinctCountAggregatorFactory(fieldName, fieldName, bitMapFactory)
);
}

@Override
public Object deserialize(Object object)
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -167,19 +167,6 @@ public AggregatorFactory getMergingFactory(AggregatorFactory other) throws Aggre
}
}

@Override
public List<AggregatorFactory> getRequiredColumns()
{
return Collections.singletonList(
new MomentSketchAggregatorFactory(
fieldName,
fieldName,
k,
compress
)
);
}

private MomentSketchWrapper deserializeFromByteArray(byte[] bytes)
{
return MomentSketchWrapper.fromByteArray(bytes);
Expand Down
Loading

0 comments on commit 6e93044

Please sign in to comment.