diff --git a/core/trino-main/pom.xml b/core/trino-main/pom.xml
index 1363a9fa9a7c..73b9cd58314f 100644
--- a/core/trino-main/pom.xml
+++ b/core/trino-main/pom.xml
@@ -13,15 +13,6 @@
${project.parent.basedir}
-
-
- instances
@@ -390,12 +381,6 @@
provided
-
- org.testng
- testng
- provided
-
-
com.squareup.okhttp3
okhttp
@@ -533,25 +518,35 @@
-
-
-
- org.apache.maven.plugins
- maven-surefire-plugin
-
-
-
- org.apache.maven.surefire
- surefire-junit-platform
- ${dep.plugin.surefire.version}
-
-
- org.apache.maven.surefire
- surefire-testng
- ${dep.plugin.surefire.version}
-
-
-
-
-
+
+
+ benchmarks
+
+
+
+ org.codehaus.mojo
+ exec-maven-plugin
+
+ ${java.home}/bin/java
+
+ -DoutputDirectory=benchmark_outputs
+ -classpath
+
+ io.trino.benchmark.BenchmarkSuite
+
+ test
+
+
+
+ benchmarks
+
+ exec
+
+
+
+
+
+
+
+
diff --git a/core/trino-main/src/test/java/io/trino/operator/TestGroupedTopNRankAccumulator.java b/core/trino-main/src/test/java/io/trino/operator/TestGroupedTopNRankAccumulator.java
index 7cf8f1f114f5..ca21f4cca5e5 100644
--- a/core/trino-main/src/test/java/io/trino/operator/TestGroupedTopNRankAccumulator.java
+++ b/core/trino-main/src/test/java/io/trino/operator/TestGroupedTopNRankAccumulator.java
@@ -15,14 +15,12 @@
import io.trino.array.LongBigArray;
import it.unimi.dsi.fastutil.longs.LongArrayList;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
+import org.junit.jupiter.api.Test;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
-import static com.google.common.collect.Lists.cartesianProduct;
import static java.lang.Math.min;
import static org.assertj.core.api.Assertions.assertThat;
@@ -43,27 +41,20 @@ public long hashCode(long rowId)
}
};
- @DataProvider
- public static Object[][] parameters()
- {
- List topNs = Arrays.asList(1, 2, 3);
- List valueCounts = Arrays.asList(0, 1, 2, 4, 8);
- List groupCounts = Arrays.asList(1, 2, 3);
- List drainWithRankings = Arrays.asList(true, false);
- return to2DArray(cartesianProduct(topNs, valueCounts, groupCounts, drainWithRankings));
- }
-
- private static Object[][] to2DArray(List> nestedList)
+ @Test
+ public void testSinglePeerGroupInsert()
{
- Object[][] array = new Object[nestedList.size()][];
- for (int i = 0; i < nestedList.size(); i++) {
- array[i] = nestedList.get(i).toArray();
+ for (int topN : Arrays.asList(1, 2, 3)) {
+ for (int valueCount : Arrays.asList(0, 1, 2, 4, 8)) {
+ for (int groupCount : Arrays.asList(1, 2, 3)) {
+ testSinglePeerGroupInsert(topN, valueCount, groupCount, true);
+ testSinglePeerGroupInsert(topN, valueCount, groupCount, false);
+ }
+ }
}
- return array;
}
- @Test(dataProvider = "parameters")
- public void testSinglePeerGroupInsert(int topN, long valueCount, long groupCount, boolean drainWithRanking)
+ private void testSinglePeerGroupInsert(int topN, long valueCount, long groupCount, boolean drainWithRanking)
{
List evicted = new LongArrayList();
GroupedTopNRankAccumulator accumulator = new GroupedTopNRankAccumulator(STRATEGY, topN, evicted::add);
@@ -103,8 +94,20 @@ public void testSinglePeerGroupInsert(int topN, long valueCount, long groupCount
}
}
- @Test(dataProvider = "parameters")
- public void testIncreasingAllUniqueValues(int topN, long valueCount, long groupCount, boolean drainWithRanking)
+ @Test
+ public void testIncreasingAllUniqueValues()
+ {
+ for (int topN : Arrays.asList(1, 2, 3)) {
+ for (int valueCount : Arrays.asList(0, 1, 2, 4, 8)) {
+ for (int groupCount : Arrays.asList(1, 2, 3)) {
+ testIncreasingAllUniqueValues(topN, valueCount, groupCount, true);
+ testIncreasingAllUniqueValues(topN, valueCount, groupCount, false);
+ }
+ }
+ }
+ }
+
+ private void testIncreasingAllUniqueValues(int topN, long valueCount, long groupCount, boolean drainWithRanking)
{
List evicted = new LongArrayList();
GroupedTopNRankAccumulator accumulator = new GroupedTopNRankAccumulator(STRATEGY, topN, evicted::add);
@@ -144,8 +147,20 @@ public void testIncreasingAllUniqueValues(int topN, long valueCount, long groupC
}
}
- @Test(dataProvider = "parameters")
- public void testDecreasingAllUniqueValues(int topN, long valueCount, long groupCount, boolean drainWithRanking)
+ @Test
+ public void testDecreasingAllUniqueValues()
+ {
+ for (int topN : Arrays.asList(1, 2, 3)) {
+ for (int valueCount : Arrays.asList(0, 1, 2, 4, 8)) {
+ for (int groupCount : Arrays.asList(1, 2, 3)) {
+ testDecreasingAllUniqueValues(topN, valueCount, groupCount, true);
+ testDecreasingAllUniqueValues(topN, valueCount, groupCount, false);
+ }
+ }
+ }
+ }
+
+ private void testDecreasingAllUniqueValues(int topN, long valueCount, long groupCount, boolean drainWithRanking)
{
List evicted = new LongArrayList();
GroupedTopNRankAccumulator accumulator = new GroupedTopNRankAccumulator(STRATEGY, topN, evicted::add);
diff --git a/core/trino-main/src/test/java/io/trino/operator/TestGroupedTopNRankBuilder.java b/core/trino-main/src/test/java/io/trino/operator/TestGroupedTopNRankBuilder.java
index 5b5a67a8b7d5..4f6c6a0a43a0 100644
--- a/core/trino-main/src/test/java/io/trino/operator/TestGroupedTopNRankBuilder.java
+++ b/core/trino-main/src/test/java/io/trino/operator/TestGroupedTopNRankBuilder.java
@@ -19,8 +19,7 @@
import io.trino.spi.type.TypeOperators;
import io.trino.sql.gen.JoinCompiler;
import io.trino.type.BlockTypeOperators;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
+import org.junit.jupiter.api.Test;
import java.util.List;
import java.util.concurrent.atomic.AtomicBoolean;
@@ -39,12 +38,6 @@
public class TestGroupedTopNRankBuilder
{
- @DataProvider
- public static Object[][] produceRanking()
- {
- return new Object[][] {{true}, {false}};
- }
-
@Test
public void testEmptyInput()
{
@@ -74,8 +67,14 @@ public long hashCode(Page page, int position)
assertThat(groupedTopNBuilder.buildResult().hasNext()).isFalse();
}
- @Test(dataProvider = "produceRanking")
- public void testSingleGroupTopN(boolean produceRanking)
+ @Test
+ public void testSingleGroupTopN()
+ {
+ testSingleGroupTopN(true);
+ testSingleGroupTopN(false);
+ }
+
+ private void testSingleGroupTopN(boolean produceRanking)
{
TypeOperators typeOperators = new TypeOperators();
BlockTypeOperators blockTypeOperators = new BlockTypeOperators(typeOperators);
@@ -133,8 +132,14 @@ public void testSingleGroupTopN(boolean produceRanking)
assertPageEquals(outputTypes, getOnlyElement(output), expected);
}
- @Test(dataProvider = "produceRanking")
- public void testMultiGroupTopN(boolean produceRanking)
+ @Test
+ public void testMultiGroupTopN()
+ {
+ testMultiGroupTopN(true);
+ testMultiGroupTopN(false);
+ }
+
+ private void testMultiGroupTopN(boolean produceRanking)
{
TypeOperators typeOperators = new TypeOperators();
BlockTypeOperators blockTypeOperators = new BlockTypeOperators(typeOperators);
diff --git a/core/trino-main/src/test/java/io/trino/operator/TestGroupedTopNRowNumberBuilder.java b/core/trino-main/src/test/java/io/trino/operator/TestGroupedTopNRowNumberBuilder.java
index 06d1acce7219..a150a4e47dcf 100644
--- a/core/trino-main/src/test/java/io/trino/operator/TestGroupedTopNRowNumberBuilder.java
+++ b/core/trino-main/src/test/java/io/trino/operator/TestGroupedTopNRowNumberBuilder.java
@@ -18,8 +18,7 @@
import io.trino.spi.type.Type;
import io.trino.spi.type.TypeOperators;
import io.trino.sql.gen.JoinCompiler;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
+import org.junit.jupiter.api.Test;
import java.util.List;
import java.util.concurrent.atomic.AtomicBoolean;
@@ -36,19 +35,6 @@ public class TestGroupedTopNRowNumberBuilder
{
private static final TypeOperators TYPE_OPERATORS_CACHE = new TypeOperators();
- @DataProvider
- public static Object[][] produceRowNumbers()
- {
- return new Object[][] {{true}, {false}};
- }
-
- @DataProvider
- public static Object[][] pageRowCounts()
- {
- // make either page or row count > 1024 to expand the big arrays
- return new Object[][] {{10000, 20}, {20, 10000}};
- }
-
@Test
public void testEmptyInput()
{
@@ -64,8 +50,14 @@ public void testEmptyInput()
assertThat(groupedTopNBuilder.buildResult().hasNext()).isFalse();
}
- @Test(dataProvider = "produceRowNumbers")
- public void testMultiGroupTopN(boolean produceRowNumbers)
+ @Test
+ public void testMultiGroupTopN()
+ {
+ testMultiGroupTopN(true);
+ testMultiGroupTopN(false);
+ }
+
+ private void testMultiGroupTopN(boolean produceRowNumbers)
{
List types = ImmutableList.of(BIGINT, DOUBLE);
List input = rowPagesBuilder(types)
@@ -131,8 +123,14 @@ public void testMultiGroupTopN(boolean produceRowNumbers)
}
}
- @Test(dataProvider = "produceRowNumbers")
- public void testSingleGroupTopN(boolean produceRowNumbers)
+ @Test
+ public void testSingleGroupTopN()
+ {
+ testSingleGroupTopN(true);
+ testSingleGroupTopN(false);
+ }
+
+ private void testSingleGroupTopN(boolean produceRowNumbers)
{
List types = ImmutableList.of(BIGINT, DOUBLE);
List input = rowPagesBuilder(types)
diff --git a/core/trino-main/src/test/java/io/trino/operator/TestHashAggregationOperator.java b/core/trino-main/src/test/java/io/trino/operator/TestHashAggregationOperator.java
index babf7dbdf044..cdeb3e748778 100644
--- a/core/trino-main/src/test/java/io/trino/operator/TestHashAggregationOperator.java
+++ b/core/trino-main/src/test/java/io/trino/operator/TestHashAggregationOperator.java
@@ -40,10 +40,10 @@
import io.trino.sql.planner.plan.PlanNodeId;
import io.trino.testing.MaterializedResult;
import io.trino.testing.TestingTaskContext;
-import org.testng.annotations.AfterMethod;
-import org.testng.annotations.BeforeMethod;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.TestInstance;
+import org.junit.jupiter.api.parallel.Execution;
import java.io.IOException;
import java.util.ArrayList;
@@ -94,8 +94,11 @@
import static java.util.concurrent.Executors.newScheduledThreadPool;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatThrownBy;
+import static org.junit.jupiter.api.TestInstance.Lifecycle.PER_CLASS;
+import static org.junit.jupiter.api.parallel.ExecutionMode.CONCURRENT;
-@Test(singleThreaded = true)
+@TestInstance(PER_CLASS)
+@Execution(CONCURRENT)
public class TestHashAggregationOperator
{
private static final TestingFunctionResolution FUNCTION_RESOLUTION = new TestingFunctionResolution();
@@ -107,58 +110,36 @@ public class TestHashAggregationOperator
private static final int MAX_BLOCK_SIZE_IN_BYTES = 64 * 1024;
- private ExecutorService executor;
- private ScheduledExecutorService scheduledExecutor;
+ private final ExecutorService executor = newCachedThreadPool(daemonThreadsNamed(getClass().getSimpleName() + "-%s"));
+ private final ScheduledExecutorService scheduledExecutor = newScheduledThreadPool(2, daemonThreadsNamed(getClass().getSimpleName() + "-scheduledExecutor-%s"));
private final TypeOperators typeOperators = new TypeOperators();
private final JoinCompiler joinCompiler = new JoinCompiler(typeOperators);
- private DummySpillerFactory spillerFactory;
- @BeforeMethod
- public void setUp()
- {
- executor = newCachedThreadPool(daemonThreadsNamed(getClass().getSimpleName() + "-%s"));
- scheduledExecutor = newScheduledThreadPool(2, daemonThreadsNamed(getClass().getSimpleName() + "-scheduledExecutor-%s"));
- spillerFactory = new DummySpillerFactory();
- }
-
- @AfterMethod(alwaysRun = true)
+ @AfterAll
public void tearDown()
{
- spillerFactory = null;
executor.shutdownNow();
scheduledExecutor.shutdownNow();
}
- @DataProvider(name = "hashEnabled")
- public static Object[][] hashEnabled()
- {
- return new Object[][] {{true}, {false}};
- }
-
- @DataProvider(name = "hashEnabledAndMemoryLimitForMergeValues")
- public static Object[][] hashEnabledAndMemoryLimitForMergeValuesProvider()
+ @Test
+ public void testHashAggregation()
{
- return new Object[][] {
- {true, true, true, 8, Integer.MAX_VALUE},
- {true, true, false, 8, Integer.MAX_VALUE},
- {false, false, false, 0, 0},
- {false, true, true, 0, 0},
- {false, true, false, 0, 0},
- {false, true, true, 8, 0},
- {false, true, false, 8, 0},
- {false, true, true, 8, Integer.MAX_VALUE},
- {false, true, false, 8, Integer.MAX_VALUE}};
+ testHashAggregation(true, true, true, 8, Integer.MAX_VALUE);
+ testHashAggregation(true, true, false, 8, Integer.MAX_VALUE);
+ testHashAggregation(false, false, false, 0, 0);
+ testHashAggregation(false, true, true, 0, 0);
+ testHashAggregation(false, true, false, 0, 0);
+ testHashAggregation(false, true, true, 8, 0);
+ testHashAggregation(false, true, false, 8, 0);
+ testHashAggregation(false, true, true, 8, Integer.MAX_VALUE);
+ testHashAggregation(false, true, false, 8, Integer.MAX_VALUE);
}
- @DataProvider
- public Object[][] dataType()
+ private void testHashAggregation(boolean hashEnabled, boolean spillEnabled, boolean revokeMemoryWhenAddingPages, long memoryLimitForMerge, long memoryLimitForMergeWithMemory)
{
- return new Object[][] {{VARCHAR}, {BIGINT}};
- }
+ DummySpillerFactory spillerFactory = new DummySpillerFactory();
- @Test(dataProvider = "hashEnabledAndMemoryLimitForMergeValues")
- public void testHashAggregation(boolean hashEnabled, boolean spillEnabled, boolean revokeMemoryWhenAddingPages, long memoryLimitForMerge, long memoryLimitForMergeWithMemory)
- {
// make operator produce multiple pages during finish phase
int numberOfRows = 40_000;
TestingAggregationFunction countVarcharColumn = FUNCTION_RESOLUTION.getAggregateFunction("count", fromTypes(VARCHAR));
@@ -215,9 +196,24 @@ public void testHashAggregation(boolean hashEnabled, boolean spillEnabled, boole
.isTrue();
}
- @Test(dataProvider = "hashEnabledAndMemoryLimitForMergeValues")
- public void testHashAggregationWithGlobals(boolean hashEnabled, boolean spillEnabled, boolean revokeMemoryWhenAddingPages, long memoryLimitForMerge, long memoryLimitForMergeWithMemory)
+ @Test
+ public void testHashAggregationWithGlobals()
+ {
+ testHashAggregationWithGlobals(true, true, true, 8, Integer.MAX_VALUE);
+ testHashAggregationWithGlobals(true, true, false, 8, Integer.MAX_VALUE);
+ testHashAggregationWithGlobals(false, false, false, 0, 0);
+ testHashAggregationWithGlobals(false, true, true, 0, 0);
+ testHashAggregationWithGlobals(false, true, false, 0, 0);
+ testHashAggregationWithGlobals(false, true, true, 8, 0);
+ testHashAggregationWithGlobals(false, true, false, 8, 0);
+ testHashAggregationWithGlobals(false, true, true, 8, Integer.MAX_VALUE);
+ testHashAggregationWithGlobals(false, true, false, 8, Integer.MAX_VALUE);
+ }
+
+ private void testHashAggregationWithGlobals(boolean hashEnabled, boolean spillEnabled, boolean revokeMemoryWhenAddingPages, long memoryLimitForMerge, long memoryLimitForMergeWithMemory)
{
+ DummySpillerFactory spillerFactory = new DummySpillerFactory();
+
TestingAggregationFunction countVarcharColumn = FUNCTION_RESOLUTION.getAggregateFunction("count", fromTypes(VARCHAR));
TestingAggregationFunction countBooleanColumn = FUNCTION_RESOLUTION.getAggregateFunction("count", fromTypes(BOOLEAN));
TestingAggregationFunction maxVarcharColumn = FUNCTION_RESOLUTION.getAggregateFunction("max", fromTypes(VARCHAR));
@@ -263,9 +259,24 @@ public void testHashAggregationWithGlobals(boolean hashEnabled, boolean spillEna
assertOperatorEqualsIgnoreOrder(operatorFactory, driverContext, input, expected, hashEnabled, Optional.of(groupByChannels.size()), revokeMemoryWhenAddingPages);
}
- @Test(dataProvider = "hashEnabledAndMemoryLimitForMergeValues")
- public void testHashAggregationMemoryReservation(boolean hashEnabled, boolean spillEnabled, boolean revokeMemoryWhenAddingPages, long memoryLimitForMerge, long memoryLimitForMergeWithMemory)
+ @Test
+ public void testHashAggregationMemoryReservation()
+ {
+ testHashAggregationMemoryReservation(true, true, true, 8, Integer.MAX_VALUE);
+ testHashAggregationMemoryReservation(true, true, false, 8, Integer.MAX_VALUE);
+ testHashAggregationMemoryReservation(false, false, false, 0, 0);
+ testHashAggregationMemoryReservation(false, true, true, 0, 0);
+ testHashAggregationMemoryReservation(false, true, false, 0, 0);
+ testHashAggregationMemoryReservation(false, true, true, 8, 0);
+ testHashAggregationMemoryReservation(false, true, false, 8, 0);
+ testHashAggregationMemoryReservation(false, true, true, 8, Integer.MAX_VALUE);
+ testHashAggregationMemoryReservation(false, true, false, 8, Integer.MAX_VALUE);
+ }
+
+ private void testHashAggregationMemoryReservation(boolean hashEnabled, boolean spillEnabled, boolean revokeMemoryWhenAddingPages, long memoryLimitForMerge, long memoryLimitForMergeWithMemory)
{
+ DummySpillerFactory spillerFactory = new DummySpillerFactory();
+
TestingAggregationFunction arrayAggColumn = FUNCTION_RESOLUTION.getAggregateFunction("array_agg", fromTypes(BIGINT));
List hashChannels = Ints.asList(1);
@@ -308,8 +319,19 @@ public void testHashAggregationMemoryReservation(boolean hashEnabled, boolean sp
assertThat(getOnlyElement(operator.getOperatorContext().getNestedOperatorStats()).getRevocableMemoryReservation().toBytes()).isEqualTo(0);
}
- @Test(dataProvider = "hashEnabled", expectedExceptions = ExceededMemoryLimitException.class, expectedExceptionsMessageRegExp = "Query exceeded per-node memory limit of 10B.*")
- public void testMemoryLimit(boolean hashEnabled)
+ @Test
+ public void testMemoryLimit()
+ {
+ assertThatThrownBy(() -> testMemoryLimit(true))
+ .isInstanceOf(ExceededMemoryLimitException.class)
+ .hasMessageMatching("Query exceeded per-node memory limit of 10B.*");
+
+ assertThatThrownBy(() -> testMemoryLimit(false))
+ .isInstanceOf(ExceededMemoryLimitException.class)
+ .hasMessageMatching("Query exceeded per-node memory limit of 10B.*");
+ }
+
+ private void testMemoryLimit(boolean hashEnabled)
{
TestingAggregationFunction maxVarcharColumn = FUNCTION_RESOLUTION.getAggregateFunction("max", fromTypes(VARCHAR));
@@ -347,9 +369,24 @@ public void testMemoryLimit(boolean hashEnabled)
toPages(operatorFactory, driverContext, input);
}
- @Test(dataProvider = "hashEnabledAndMemoryLimitForMergeValues")
- public void testHashBuilderResize(boolean hashEnabled, boolean spillEnabled, boolean revokeMemoryWhenAddingPages, long memoryLimitForMerge, long memoryLimitForMergeWithMemory)
+ @Test
+ public void testHashBuilderResize()
+ {
+ testHashBuilderResize(true, true, true, 8, Integer.MAX_VALUE);
+ testHashBuilderResize(true, true, false, 8, Integer.MAX_VALUE);
+ testHashBuilderResize(false, false, false, 0, 0);
+ testHashBuilderResize(false, true, true, 0, 0);
+ testHashBuilderResize(false, true, false, 0, 0);
+ testHashBuilderResize(false, true, true, 8, 0);
+ testHashBuilderResize(false, true, false, 8, 0);
+ testHashBuilderResize(false, true, true, 8, Integer.MAX_VALUE);
+ testHashBuilderResize(false, true, false, 8, Integer.MAX_VALUE);
+ }
+
+ private void testHashBuilderResize(boolean hashEnabled, boolean spillEnabled, boolean revokeMemoryWhenAddingPages, long memoryLimitForMerge, long memoryLimitForMergeWithMemory)
{
+ DummySpillerFactory spillerFactory = new DummySpillerFactory();
+
BlockBuilder builder = VARCHAR.createBlockBuilder(null, 1, MAX_BLOCK_SIZE_IN_BYTES);
VARCHAR.writeSlice(builder, Slices.allocate(200_000)); // this must be larger than MAX_BLOCK_SIZE_IN_BYTES, 64K
builder.build();
@@ -388,7 +425,13 @@ public void testHashBuilderResize(boolean hashEnabled, boolean spillEnabled, boo
toPages(operatorFactory, driverContext, input, revokeMemoryWhenAddingPages);
}
- @Test(dataProvider = "dataType")
+ @Test
+ public void testMemoryReservationYield()
+ {
+ testMemoryReservationYield(VARCHAR);
+ testMemoryReservationYield(BIGINT);
+ }
+
public void testMemoryReservationYield(Type type)
{
List input = createPagesWithDistinctHashKeys(type, 6_000, 600);
@@ -426,8 +469,19 @@ public void testMemoryReservationYield(Type type)
assertThat(count).isEqualTo(6_000 * 600);
}
- @Test(dataProvider = "hashEnabled", expectedExceptions = ExceededMemoryLimitException.class, expectedExceptionsMessageRegExp = "Query exceeded per-node memory limit of 3MB.*")
- public void testHashBuilderResizeLimit(boolean hashEnabled)
+ @Test
+ public void testHashBuilderResizeLimit()
+ {
+ assertThatThrownBy(() -> testHashBuilderResizeLimit(true))
+ .isInstanceOf(ExceededMemoryLimitException.class)
+ .hasMessageMatching("Query exceeded per-node memory limit of 3MB.*");
+
+ assertThatThrownBy(() -> testHashBuilderResizeLimit(false))
+ .isInstanceOf(ExceededMemoryLimitException.class)
+ .hasMessageMatching("Query exceeded per-node memory limit of 3MB.*");
+ }
+
+ private void testHashBuilderResizeLimit(boolean hashEnabled)
{
BlockBuilder builder = VARCHAR.createBlockBuilder(null, 1, MAX_BLOCK_SIZE_IN_BYTES);
VARCHAR.writeSlice(builder, Slices.allocate(5_000_000)); // this must be larger than MAX_BLOCK_SIZE_IN_BYTES, 64K
@@ -464,8 +518,14 @@ public void testHashBuilderResizeLimit(boolean hashEnabled)
toPages(operatorFactory, driverContext, input);
}
- @Test(dataProvider = "hashEnabled")
- public void testMultiSliceAggregationOutput(boolean hashEnabled)
+ @Test
+ public void testMultiSliceAggregationOutput()
+ {
+ testMultiSliceAggregationOutput(true);
+ testMultiSliceAggregationOutput(false);
+ }
+
+ private void testMultiSliceAggregationOutput(boolean hashEnabled)
{
// estimate the number of entries required to create 1.5 pages of results
// See InMemoryHashAggregationBuilder.buildTypes()
@@ -499,8 +559,15 @@ public void testMultiSliceAggregationOutput(boolean hashEnabled)
assertThat(toPages(operatorFactory, createDriverContext(), input).size()).isEqualTo(2);
}
- @Test(dataProvider = "hashEnabled")
- public void testMultiplePartialFlushes(boolean hashEnabled)
+ @Test
+ public void testMultiplePartialFlushes()
+ throws Exception
+ {
+ testMultiplePartialFlushes(true);
+ testMultiplePartialFlushes(false);
+ }
+
+ private void testMultiplePartialFlushes(boolean hashEnabled)
throws Exception
{
List hashChannels = Ints.asList(0);
@@ -584,6 +651,8 @@ public void testMultiplePartialFlushes(boolean hashEnabled)
@Test
public void testMergeWithMemorySpill()
{
+ DummySpillerFactory spillerFactory = new DummySpillerFactory();
+
RowPagesBuilder rowPagesBuilder = rowPagesBuilder(BIGINT);
int smallPagesSpillThresholdSize = 150000;
diff --git a/core/trino-main/src/test/java/io/trino/operator/TestHashSemiJoinOperator.java b/core/trino-main/src/test/java/io/trino/operator/TestHashSemiJoinOperator.java
index c43a6c412aa9..3171e6c59a7b 100644
--- a/core/trino-main/src/test/java/io/trino/operator/TestHashSemiJoinOperator.java
+++ b/core/trino-main/src/test/java/io/trino/operator/TestHashSemiJoinOperator.java
@@ -25,10 +25,11 @@
import io.trino.sql.gen.JoinCompiler;
import io.trino.sql.planner.plan.PlanNodeId;
import io.trino.testing.MaterializedResult;
-import org.testng.annotations.AfterMethod;
-import org.testng.annotations.BeforeMethod;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.TestInstance;
+import org.junit.jupiter.api.parallel.Execution;
import java.util.List;
import java.util.Optional;
@@ -46,8 +47,12 @@
import static io.trino.testing.TestingTaskContext.createTaskContext;
import static java.util.concurrent.Executors.newCachedThreadPool;
import static java.util.concurrent.Executors.newScheduledThreadPool;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
+import static org.junit.jupiter.api.TestInstance.Lifecycle.PER_METHOD;
+import static org.junit.jupiter.api.parallel.ExecutionMode.SAME_THREAD;
-@Test(singleThreaded = true)
+@TestInstance(PER_METHOD)
+@Execution(SAME_THREAD)
public class TestHashSemiJoinOperator
{
private ExecutorService executor;
@@ -55,7 +60,7 @@ public class TestHashSemiJoinOperator
private TaskContext taskContext;
private TypeOperators typeOperators;
- @BeforeMethod
+ @BeforeEach
public void setUp()
{
executor = newCachedThreadPool(daemonThreadsNamed(getClass().getSimpleName() + "-%s"));
@@ -64,27 +69,21 @@ public void setUp()
typeOperators = new TypeOperators();
}
- @AfterMethod(alwaysRun = true)
+ @AfterEach
public void tearDown()
{
executor.shutdownNow();
scheduledExecutor.shutdownNow();
}
- @DataProvider(name = "hashEnabledValues")
- public static Object[][] hashEnabledValuesProvider()
+ @Test
+ public void testSemiJoin()
{
- return new Object[][] {{true}, {false}};
+ testSemiJoin(true);
+ testSemiJoin(false);
}
- @DataProvider
- public Object[][] dataType()
- {
- return new Object[][] {{VARCHAR}, {BIGINT}};
- }
-
- @Test(dataProvider = "hashEnabledValues")
- public void testSemiJoin(boolean hashEnabled)
+ private void testSemiJoin(boolean hashEnabled)
{
DriverContext driverContext = taskContext.addPipelineContext(0, true, true, false).addDriverContext();
@@ -148,8 +147,14 @@ public void testSemiJoin(boolean hashEnabled)
OperatorAssertion.assertOperatorEquals(joinOperatorFactory, driverContext, probeInput, expected, hashEnabled, ImmutableList.of(probeTypes.size()));
}
- @Test(dataProvider = "hashEnabledValues")
- public void testSemiJoinOnVarcharType(boolean hashEnabled)
+ @Test
+ public void testSemiJoinOnVarcharType()
+ {
+ testSemiJoinOnVarcharType(true);
+ testSemiJoinOnVarcharType(false);
+ }
+
+ private void testSemiJoinOnVarcharType(boolean hashEnabled)
{
DriverContext driverContext = taskContext.addPipelineContext(0, true, true, false).addDriverContext();
@@ -213,8 +218,14 @@ public void testSemiJoinOnVarcharType(boolean hashEnabled)
OperatorAssertion.assertOperatorEquals(joinOperatorFactory, driverContext, probeInput, expected, hashEnabled, ImmutableList.of(probeTypes.size()));
}
- @Test(dataProvider = "hashEnabledValues")
- public void testBuildSideNulls(boolean hashEnabled)
+ @Test
+ public void testBuildSideNulls()
+ {
+ testBuildSideNulls(true);
+ testBuildSideNulls(false);
+ }
+
+ private void testBuildSideNulls(boolean hashEnabled)
{
DriverContext driverContext = taskContext.addPipelineContext(0, true, true, false).addDriverContext();
@@ -272,8 +283,14 @@ public void testBuildSideNulls(boolean hashEnabled)
OperatorAssertion.assertOperatorEquals(joinOperatorFactory, driverContext, probeInput, expected, hashEnabled, ImmutableList.of(probeTypes.size()));
}
- @Test(dataProvider = "hashEnabledValues")
- public void testProbeSideNulls(boolean hashEnabled)
+ @Test
+ public void testProbeSideNulls()
+ {
+ testProbeSideNulls(true);
+ testProbeSideNulls(false);
+ }
+
+ private void testProbeSideNulls(boolean hashEnabled)
{
DriverContext driverContext = taskContext.addPipelineContext(0, true, true, false).addDriverContext();
@@ -331,8 +348,14 @@ public void testProbeSideNulls(boolean hashEnabled)
OperatorAssertion.assertOperatorEquals(joinOperatorFactory, driverContext, probeInput, expected, hashEnabled, ImmutableList.of(probeTypes.size()));
}
- @Test(dataProvider = "hashEnabledValues")
- public void testProbeAndBuildNulls(boolean hashEnabled)
+ @Test
+ public void testProbeAndBuildNulls()
+ {
+ testProbeAndBuildNulls(true);
+ testProbeAndBuildNulls(false);
+ }
+
+ private void testProbeAndBuildNulls(boolean hashEnabled)
{
DriverContext driverContext = taskContext.addPipelineContext(0, true, true, false).addDriverContext();
@@ -391,8 +414,19 @@ public void testProbeAndBuildNulls(boolean hashEnabled)
OperatorAssertion.assertOperatorEquals(joinOperatorFactory, driverContext, probeInput, expected, hashEnabled, ImmutableList.of(probeTypes.size()));
}
- @Test(dataProvider = "hashEnabledValues", expectedExceptions = ExceededMemoryLimitException.class, expectedExceptionsMessageRegExp = "Query exceeded per-node memory limit of.*")
- public void testMemoryLimit(boolean hashEnabled)
+ @Test
+ public void testMemoryLimit()
+ {
+ assertThatThrownBy(() -> testMemoryLimit(true))
+ .isInstanceOf(ExceededMemoryLimitException.class)
+ .hasMessageMatching("Query exceeded per-node memory limit of.*");
+
+ assertThatThrownBy(() -> testMemoryLimit(false))
+ .isInstanceOf(ExceededMemoryLimitException.class)
+ .hasMessageMatching("Query exceeded per-node memory limit of.*");
+ }
+
+ private void testMemoryLimit(boolean hashEnabled)
{
DriverContext driverContext = createTaskContext(executor, scheduledExecutor, TEST_SESSION, DataSize.ofBytes(100))
.addPipelineContext(0, true, true, false)
diff --git a/core/trino-main/src/test/java/io/trino/operator/TestMarkDistinctOperator.java b/core/trino-main/src/test/java/io/trino/operator/TestMarkDistinctOperator.java
index 1657f51fb5d7..f79b8b979bce 100644
--- a/core/trino-main/src/test/java/io/trino/operator/TestMarkDistinctOperator.java
+++ b/core/trino-main/src/test/java/io/trino/operator/TestMarkDistinctOperator.java
@@ -25,10 +25,10 @@
import io.trino.sql.gen.JoinCompiler;
import io.trino.sql.planner.plan.PlanNodeId;
import io.trino.testing.MaterializedResult;
-import org.testng.annotations.AfterMethod;
-import org.testng.annotations.BeforeMethod;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.TestInstance;
+import org.junit.jupiter.api.parallel.Execution;
import java.util.List;
import java.util.Optional;
@@ -51,47 +51,33 @@
import static java.util.concurrent.Executors.newCachedThreadPool;
import static java.util.concurrent.Executors.newScheduledThreadPool;
import static org.assertj.core.api.Assertions.assertThat;
+import static org.junit.jupiter.api.TestInstance.Lifecycle.PER_CLASS;
+import static org.junit.jupiter.api.parallel.ExecutionMode.CONCURRENT;
-@Test(singleThreaded = true)
+@TestInstance(PER_CLASS)
+@Execution(CONCURRENT)
public class TestMarkDistinctOperator
{
- private ExecutorService executor;
- private ScheduledExecutorService scheduledExecutor;
- private DriverContext driverContext;
+ private final ExecutorService executor = newCachedThreadPool(daemonThreadsNamed(getClass().getSimpleName() + "-%s"));
+ private final ScheduledExecutorService scheduledExecutor = newScheduledThreadPool(2, daemonThreadsNamed(getClass().getSimpleName() + "-scheduledExecutor-%s"));
private final TypeOperators typeOperators = new TypeOperators();
private final JoinCompiler joinCompiler = new JoinCompiler(typeOperators);
- @BeforeMethod
- public void setUp()
- {
- executor = newCachedThreadPool(daemonThreadsNamed(getClass().getSimpleName() + "-%s"));
- scheduledExecutor = newScheduledThreadPool(2, daemonThreadsNamed(getClass().getSimpleName() + "-scheduledExecutor-%s"));
- driverContext = createTaskContext(executor, scheduledExecutor, TEST_SESSION)
- .addPipelineContext(0, true, true, false)
- .addDriverContext();
- }
-
- @AfterMethod(alwaysRun = true)
+ @AfterAll
public void tearDown()
{
executor.shutdownNow();
scheduledExecutor.shutdownNow();
}
- @DataProvider
- public Object[][] dataType()
+ @Test
+ public void testMarkDistinct()
{
- return new Object[][] {{VARCHAR}, {BIGINT}};
+ testMarkDistinct(true, newDriverContext());
+ testMarkDistinct(false, newDriverContext());
}
- @DataProvider(name = "hashEnabledValues")
- public static Object[][] hashEnabledValuesProvider()
- {
- return new Object[][] {{true}, {false}};
- }
-
- @Test(dataProvider = "hashEnabledValues")
- public void testMarkDistinct(boolean hashEnabled)
+ private void testMarkDistinct(boolean hashEnabled, DriverContext driverContext)
{
RowPagesBuilder rowPagesBuilder = rowPagesBuilder(hashEnabled, Ints.asList(0), BIGINT);
List input = rowPagesBuilder
@@ -116,8 +102,14 @@ public void testMarkDistinct(boolean hashEnabled)
OperatorAssertion.assertOperatorEqualsIgnoreOrder(operatorFactory, driverContext, input, expected.build(), hashEnabled, Optional.of(1));
}
- @Test(dataProvider = "hashEnabledValues")
- public void testRleDistinctMask(boolean hashEnabled)
+ @Test
+ public void testRleDistinctMask()
+ {
+ testRleDistinctMask(true, newDriverContext());
+ testRleDistinctMask(false, newDriverContext());
+ }
+
+ private void testRleDistinctMask(boolean hashEnabled, DriverContext driverContext)
{
RowPagesBuilder rowPagesBuilder = rowPagesBuilder(hashEnabled, Ints.asList(0), BIGINT);
List inputs = rowPagesBuilder
@@ -180,8 +172,14 @@ public void testRleDistinctMask(boolean hashEnabled)
}
}
- @Test(dataProvider = "dataType")
- public void testMemoryReservationYield(Type type)
+ @Test
+ public void testMemoryReservationYield()
+ {
+ testMemoryReservationYield(BIGINT);
+ testMemoryReservationYield(VARCHAR);
+ }
+
+ private void testMemoryReservationYield(Type type)
{
List input = createPagesWithDistinctHashKeys(type, 6_000, 600);
@@ -202,4 +200,11 @@ public void testMemoryReservationYield(Type type)
}
assertThat(count).isEqualTo(6_000 * 600);
}
+
+ private DriverContext newDriverContext()
+ {
+ return createTaskContext(executor, scheduledExecutor, TEST_SESSION)
+ .addPipelineContext(0, true, true, false)
+ .addDriverContext();
+ }
}
diff --git a/core/trino-main/src/test/java/io/trino/operator/TestOrderByOperator.java b/core/trino-main/src/test/java/io/trino/operator/TestOrderByOperator.java
index ed23a1d2860d..2fbe0ac96793 100644
--- a/core/trino-main/src/test/java/io/trino/operator/TestOrderByOperator.java
+++ b/core/trino-main/src/test/java/io/trino/operator/TestOrderByOperator.java
@@ -23,10 +23,10 @@
import io.trino.sql.planner.plan.PlanNodeId;
import io.trino.testing.MaterializedResult;
import io.trino.testing.TestingTaskContext;
-import org.testng.annotations.AfterMethod;
-import org.testng.annotations.BeforeMethod;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.TestInstance;
+import org.junit.jupiter.api.parallel.Execution;
import java.util.List;
import java.util.Optional;
@@ -53,45 +53,38 @@
import static java.util.concurrent.Executors.newScheduledThreadPool;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatThrownBy;
+import static org.junit.jupiter.api.TestInstance.Lifecycle.PER_CLASS;
+import static org.junit.jupiter.api.parallel.ExecutionMode.CONCURRENT;
-@Test(singleThreaded = true)
+@TestInstance(PER_CLASS)
+@Execution(CONCURRENT)
public class TestOrderByOperator
{
- private ExecutorService executor;
- private ScheduledExecutorService scheduledExecutor;
- private DummySpillerFactory spillerFactory;
+ private final ExecutorService executor = newCachedThreadPool(daemonThreadsNamed(getClass().getSimpleName() + "-%s"));
+ private final ScheduledExecutorService scheduledExecutor = newScheduledThreadPool(2, daemonThreadsNamed(getClass().getSimpleName() + "-scheduledExecutor-%s"));
private final TypeOperators typeOperators = new TypeOperators();
- @DataProvider
- public static Object[][] spillEnabled()
+ @AfterAll
+ public void tearDown()
{
- return new Object[][] {
- {false, false, 0},
- {true, false, 8},
- {true, true, 8},
- {true, false, 0},
- {true, true, 0}};
+ executor.shutdownNow();
+ scheduledExecutor.shutdownNow();
}
- @BeforeMethod
- public void setUp()
+ @Test
+ public void testMultipleOutputPages()
{
- executor = newCachedThreadPool(daemonThreadsNamed(getClass().getSimpleName() + "-%s"));
- scheduledExecutor = newScheduledThreadPool(2, daemonThreadsNamed(getClass().getSimpleName() + "-scheduledExecutor-%s"));
- spillerFactory = new DummySpillerFactory();
+ testMultipleOutputPages(false, false, 0);
+ testMultipleOutputPages(true, false, 8);
+ testMultipleOutputPages(true, true, 8);
+ testMultipleOutputPages(true, false, 0);
+ testMultipleOutputPages(true, true, 0);
}
- @AfterMethod(alwaysRun = true)
- public void tearDown()
+ private void testMultipleOutputPages(boolean spillEnabled, boolean revokeMemoryWhenAddingPages, long memoryLimit)
{
- executor.shutdownNow();
- scheduledExecutor.shutdownNow();
- spillerFactory = null;
- }
+ DummySpillerFactory spillerFactory = new DummySpillerFactory();
- @Test(dataProvider = "spillEnabled")
- public void testMultipleOutputPages(boolean spillEnabled, boolean revokeMemoryWhenAddingPages, long memoryLimit)
- {
// make operator produce multiple pages during finish phase
int numberOfRows = 80_000;
List input = rowPagesBuilder(BIGINT, DOUBLE)
@@ -129,8 +122,17 @@ public void testMultipleOutputPages(boolean spillEnabled, boolean revokeMemoryWh
.isTrue();
}
- @Test(dataProvider = "spillEnabled")
- public void testSingleFieldKey(boolean spillEnabled, boolean revokeMemoryWhenAddingPages, long memoryLimit)
+ @Test
+ public void testSingleFieldKey()
+ {
+ testSingleFieldKey(false, false, 0);
+ testSingleFieldKey(true, false, 8);
+ testSingleFieldKey(true, true, 8);
+ testSingleFieldKey(true, false, 0);
+ testSingleFieldKey(true, true, 0);
+ }
+
+ private void testSingleFieldKey(boolean spillEnabled, boolean revokeMemoryWhenAddingPages, long memoryLimit)
{
List input = rowPagesBuilder(BIGINT, DOUBLE)
.row(1L, 0.1)
@@ -150,7 +152,7 @@ public void testSingleFieldKey(boolean spillEnabled, boolean revokeMemoryWhenAdd
ImmutableList.of(ASC_NULLS_LAST),
new PagesIndex.TestingFactory(false),
spillEnabled,
- Optional.of(spillerFactory),
+ Optional.of(new DummySpillerFactory()),
new OrderingCompiler(typeOperators));
DriverContext driverContext = createDriverContext(memoryLimit);
@@ -164,8 +166,17 @@ public void testSingleFieldKey(boolean spillEnabled, boolean revokeMemoryWhenAdd
assertOperatorEquals(operatorFactory, driverContext, input, expected, revokeMemoryWhenAddingPages);
}
- @Test(dataProvider = "spillEnabled")
- public void testMultiFieldKey(boolean spillEnabled, boolean revokeMemoryWhenAddingPages, long memoryLimit)
+ @Test
+ public void testMultiFieldKey()
+ {
+ testMultiFieldKey(false, false, 0);
+ testMultiFieldKey(true, false, 8);
+ testMultiFieldKey(true, true, 8);
+ testMultiFieldKey(true, false, 0);
+ testMultiFieldKey(true, true, 0);
+ }
+
+ private void testMultiFieldKey(boolean spillEnabled, boolean revokeMemoryWhenAddingPages, long memoryLimit)
{
List input = rowPagesBuilder(VARCHAR, BIGINT)
.row("a", 1L)
@@ -185,7 +196,7 @@ public void testMultiFieldKey(boolean spillEnabled, boolean revokeMemoryWhenAddi
ImmutableList.of(ASC_NULLS_LAST, DESC_NULLS_LAST),
new PagesIndex.TestingFactory(false),
spillEnabled,
- Optional.of(spillerFactory),
+ Optional.of(new DummySpillerFactory()),
new OrderingCompiler(typeOperators));
DriverContext driverContext = createDriverContext(memoryLimit);
@@ -199,8 +210,17 @@ public void testMultiFieldKey(boolean spillEnabled, boolean revokeMemoryWhenAddi
assertOperatorEquals(operatorFactory, driverContext, input, expected, revokeMemoryWhenAddingPages);
}
- @Test(dataProvider = "spillEnabled")
- public void testReverseOrder(boolean spillEnabled, boolean revokeMemoryWhenAddingPages, long memoryLimit)
+ @Test
+ public void testReverseOrder()
+ {
+ testReverseOrder(false, false, 0);
+ testReverseOrder(true, false, 8);
+ testReverseOrder(true, true, 8);
+ testReverseOrder(true, false, 0);
+ testReverseOrder(true, true, 0);
+ }
+
+ private void testReverseOrder(boolean spillEnabled, boolean revokeMemoryWhenAddingPages, long memoryLimit)
{
List input = rowPagesBuilder(BIGINT, DOUBLE)
.row(1L, 0.1)
@@ -220,7 +240,7 @@ public void testReverseOrder(boolean spillEnabled, boolean revokeMemoryWhenAddin
ImmutableList.of(DESC_NULLS_LAST),
new PagesIndex.TestingFactory(false),
spillEnabled,
- Optional.of(spillerFactory),
+ Optional.of(new DummySpillerFactory()),
new OrderingCompiler(typeOperators));
DriverContext driverContext = createDriverContext(memoryLimit);
@@ -259,7 +279,7 @@ public void testMemoryLimit()
ImmutableList.of(ASC_NULLS_LAST),
new PagesIndex.TestingFactory(false),
false,
- Optional.of(spillerFactory),
+ Optional.of(new DummySpillerFactory()),
new OrderingCompiler(typeOperators));
assertThatThrownBy(() -> toPages(operatorFactory, driverContext, input))
diff --git a/core/trino-main/src/test/java/io/trino/operator/TestTopNPeerGroupLookup.java b/core/trino-main/src/test/java/io/trino/operator/TestTopNPeerGroupLookup.java
index d88a5cb00c7a..a2c6c192026c 100644
--- a/core/trino-main/src/test/java/io/trino/operator/TestTopNPeerGroupLookup.java
+++ b/core/trino-main/src/test/java/io/trino/operator/TestTopNPeerGroupLookup.java
@@ -13,13 +13,10 @@
*/
package io.trino.operator;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
+import org.junit.jupiter.api.Test;
import java.util.Arrays;
-import java.util.List;
-import static com.google.common.collect.Lists.cartesianProduct;
import static org.assertj.core.api.Assertions.assertThat;
public class TestTopNPeerGroupLookup
@@ -41,28 +38,20 @@ public long hashCode(long rowId)
private static final long UNMAPPED_GROUP_ID = Long.MIN_VALUE;
private static final long DEFAULT_RETURN_VALUE = -1L;
- @DataProvider
- public static Object[][] parameters()
+ @Test
+ public void testCombinations()
{
- List expectedSizes = Arrays.asList(0, 1, 2, 3, 1_000);
- List fillFactors = Arrays.asList(0.1f, 0.9f, 1f);
- List totalGroupIds = Arrays.asList(1L, 10L);
- List totalRowIds = Arrays.asList(1L, 1_000L);
-
- return to2DArray(cartesianProduct(expectedSizes, fillFactors, totalGroupIds, totalRowIds));
- }
-
- private static Object[][] to2DArray(List> nestedList)
- {
- Object[][] array = new Object[nestedList.size()][];
- for (int i = 0; i < nestedList.size(); i++) {
- array[i] = nestedList.get(i).toArray();
+ for (int expectedSize : Arrays.asList(0, 1, 2, 3, 1_000)) {
+ for (float fillFactor : Arrays.asList(0.1f, 0.9f, 1f)) {
+ testCombinations(expectedSize, fillFactor, 1L, 1L);
+ testCombinations(expectedSize, fillFactor, 10L, 1L);
+ testCombinations(expectedSize, fillFactor, 1L, 1_000L);
+ testCombinations(expectedSize, fillFactor, 10L, 1_000L);
+ }
}
- return array;
}
- @Test(dataProvider = "parameters")
- public void testCombinations(int expectedSize, float fillFactor, long totalGroupIds, long totalRowIds)
+ private void testCombinations(int expectedSize, float fillFactor, long totalGroupIds, long totalRowIds)
{
TopNPeerGroupLookup lookup = new TopNPeerGroupLookup(expectedSize, fillFactor, HASH_STRATEGY, UNMAPPED_GROUP_ID, DEFAULT_RETURN_VALUE);
diff --git a/core/trino-main/src/test/java/io/trino/operator/TestWindowOperator.java b/core/trino-main/src/test/java/io/trino/operator/TestWindowOperator.java
index f9c3e5f6f4aa..1c55c5d36569 100644
--- a/core/trino-main/src/test/java/io/trino/operator/TestWindowOperator.java
+++ b/core/trino-main/src/test/java/io/trino/operator/TestWindowOperator.java
@@ -38,10 +38,10 @@
import io.trino.sql.planner.plan.PlanNodeId;
import io.trino.testing.MaterializedResult;
import io.trino.testing.TestingTaskContext;
-import org.testng.annotations.AfterMethod;
-import org.testng.annotations.BeforeMethod;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.TestInstance;
+import org.junit.jupiter.api.parallel.Execution;
import java.util.List;
import java.util.Optional;
@@ -72,8 +72,12 @@
import static java.util.concurrent.Executors.newCachedThreadPool;
import static java.util.concurrent.Executors.newScheduledThreadPool;
import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
+import static org.junit.jupiter.api.TestInstance.Lifecycle.PER_CLASS;
+import static org.junit.jupiter.api.parallel.ExecutionMode.CONCURRENT;
-@Test(singleThreaded = true)
+@TestInstance(PER_CLASS)
+@Execution(CONCURRENT)
public class TestWindowOperator
{
private static final TypeOperators TYPE_OPERATORS_CACHE = new TypeOperators();
@@ -100,40 +104,30 @@ public class TestWindowOperator
private static final List LEAD = ImmutableList.of(
window(new ReflectionWindowFunctionSupplier(3, LeadFunction.class), VARCHAR, UNBOUNDED_FRAME, false, ImmutableList.of(), 1, 3, 4));
- private ExecutorService executor;
- private ScheduledExecutorService scheduledExecutor;
- private DummySpillerFactory spillerFactory;
+ private final ExecutorService executor = newCachedThreadPool(daemonThreadsNamed(getClass().getSimpleName() + "-%s"));
+ private final ScheduledExecutorService scheduledExecutor = newScheduledThreadPool(2, daemonThreadsNamed(getClass().getSimpleName() + "-scheduledExecutor-%s"));
- @BeforeMethod
- public void setUp()
- {
- executor = newCachedThreadPool(daemonThreadsNamed(getClass().getSimpleName() + "-%s"));
- scheduledExecutor = newScheduledThreadPool(2, daemonThreadsNamed(getClass().getSimpleName() + "-scheduledExecutor-%s"));
- spillerFactory = new DummySpillerFactory();
- }
-
- @AfterMethod(alwaysRun = true)
+ @AfterAll
public void tearDown()
{
executor.shutdownNow();
scheduledExecutor.shutdownNow();
- spillerFactory = null;
}
- @DataProvider
- public static Object[][] spillEnabled()
+ @Test
+ public void testMultipleOutputPages()
{
- return new Object[][] {
- {false, false, 0},
- {true, false, 8},
- {true, true, 8},
- {true, false, 0},
- {true, true, 0}};
+ testMultipleOutputPages(false, false, 0);
+ testMultipleOutputPages(true, false, 8);
+ testMultipleOutputPages(true, true, 8);
+ testMultipleOutputPages(true, false, 0);
+ testMultipleOutputPages(true, true, 0);
}
- @Test(dataProvider = "spillEnabled")
- public void testMultipleOutputPages(boolean spillEnabled, boolean revokeMemoryWhenAddingPages, long memoryLimit)
+ private void testMultipleOutputPages(boolean spillEnabled, boolean revokeMemoryWhenAddingPages, long memoryLimit)
{
+ DummySpillerFactory spillerFactory = new DummySpillerFactory();
+
// make operator produce multiple pages during finish phase
int numberOfRows = 80_000;
List input = rowPagesBuilder(BIGINT, DOUBLE)
@@ -147,6 +141,7 @@ public void testMultipleOutputPages(boolean spillEnabled, boolean revokeMemoryWh
Ints.asList(),
Ints.asList(0),
ImmutableList.copyOf(new SortOrder[] {SortOrder.DESC_NULLS_FIRST}),
+ spillerFactory,
spillEnabled);
DriverContext driverContext = createDriverContext(memoryLimit);
@@ -167,8 +162,17 @@ public void testMultipleOutputPages(boolean spillEnabled, boolean revokeMemoryWh
.isTrue();
}
- @Test(dataProvider = "spillEnabled")
- public void testRowNumber(boolean spillEnabled, boolean revokeMemoryWhenAddingPages, long memoryLimit)
+ @Test
+ public void testRowNumber()
+ {
+ testRowNumber(false, false, 0);
+ testRowNumber(true, false, 8);
+ testRowNumber(true, true, 8);
+ testRowNumber(true, false, 0);
+ testRowNumber(true, true, 0);
+ }
+
+ private void testRowNumber(boolean spillEnabled, boolean revokeMemoryWhenAddingPages, long memoryLimit)
{
List input = rowPagesBuilder(BIGINT, DOUBLE)
.row(2L, 0.3)
@@ -186,6 +190,7 @@ public void testRowNumber(boolean spillEnabled, boolean revokeMemoryWhenAddingPa
Ints.asList(),
Ints.asList(0),
ImmutableList.copyOf(new SortOrder[] {SortOrder.ASC_NULLS_LAST}),
+ new DummySpillerFactory(),
spillEnabled);
DriverContext driverContext = createDriverContext(memoryLimit);
@@ -200,8 +205,17 @@ public void testRowNumber(boolean spillEnabled, boolean revokeMemoryWhenAddingPa
assertOperatorEquals(operatorFactory, driverContext, input, expected, revokeMemoryWhenAddingPages);
}
- @Test(dataProvider = "spillEnabled")
- public void testRowNumberPartition(boolean spillEnabled, boolean revokeMemoryWhenAddingPages, long memoryLimit)
+ @Test
+ public void testRowNumberPartition()
+ {
+ testRowNumberPartition(false, false, 0);
+ testRowNumberPartition(true, false, 8);
+ testRowNumberPartition(true, true, 8);
+ testRowNumberPartition(true, false, 0);
+ testRowNumberPartition(true, true, 0);
+ }
+
+ private void testRowNumberPartition(boolean spillEnabled, boolean revokeMemoryWhenAddingPages, long memoryLimit)
{
List input = rowPagesBuilder(VARCHAR, BIGINT, DOUBLE, BOOLEAN)
.row("b", -1L, -0.1, true)
@@ -219,6 +233,7 @@ public void testRowNumberPartition(boolean spillEnabled, boolean revokeMemoryWhe
Ints.asList(0),
Ints.asList(1),
ImmutableList.copyOf(new SortOrder[] {SortOrder.ASC_NULLS_LAST}),
+ new DummySpillerFactory(),
spillEnabled);
DriverContext driverContext = createDriverContext(memoryLimit);
@@ -255,6 +270,7 @@ public void testRowNumberArbitrary()
Ints.asList(),
Ints.asList(),
ImmutableList.copyOf(new SortOrder[] {}),
+ new DummySpillerFactory(),
false);
DriverContext driverContext = createDriverContext();
@@ -294,6 +310,7 @@ public void testRowNumberArbitraryWithSpill()
Ints.asList(),
Ints.asList(),
ImmutableList.copyOf(new SortOrder[] {}),
+ new DummySpillerFactory(),
true);
DriverContext driverContext = createDriverContext();
@@ -311,7 +328,16 @@ public void testRowNumberArbitraryWithSpill()
assertOperatorEquals(operatorFactory, driverContext, input, expected);
}
- @Test(dataProvider = "spillEnabled")
+ @Test
+ public void testDistinctPartitionAndPeers()
+ {
+ testDistinctPartitionAndPeers(false, false, 0);
+ testDistinctPartitionAndPeers(true, false, 8);
+ testDistinctPartitionAndPeers(true, true, 8);
+ testDistinctPartitionAndPeers(true, false, 0);
+ testDistinctPartitionAndPeers(true, true, 0);
+ }
+
public void testDistinctPartitionAndPeers(boolean spillEnabled, boolean revokeMemoryWhenAddingPages, long memoryLimit)
{
List input = rowPagesBuilder(DOUBLE, DOUBLE)
@@ -344,6 +370,7 @@ public void testDistinctPartitionAndPeers(boolean spillEnabled, boolean revokeMe
Ints.asList(0),
Ints.asList(1),
ImmutableList.copyOf(new SortOrder[] {SortOrder.ASC_NULLS_LAST}),
+ new DummySpillerFactory(),
spillEnabled);
DriverContext driverContext = createDriverContext(memoryLimit);
@@ -372,35 +399,49 @@ public void testDistinctPartitionAndPeers(boolean spillEnabled, boolean revokeMe
assertOperatorEquals(operatorFactory, driverContext, input, expected, revokeMemoryWhenAddingPages);
}
- @Test(expectedExceptions = ExceededMemoryLimitException.class, expectedExceptionsMessageRegExp = "Query exceeded per-node memory limit of 10B.*")
+ @Test
public void testMemoryLimit()
{
- List input = rowPagesBuilder(BIGINT, DOUBLE)
- .row(1L, 0.1)
- .row(2L, 0.2)
- .pageBreak()
- .row(-1L, -0.1)
- .row(4L, 0.4)
- .build();
-
- DriverContext driverContext = createTaskContext(executor, scheduledExecutor, TEST_SESSION, DataSize.ofBytes(10))
- .addPipelineContext(0, true, true, false)
- .addDriverContext();
-
- WindowOperatorFactory operatorFactory = createFactoryUnbounded(
- ImmutableList.of(BIGINT, DOUBLE),
- Ints.asList(1),
- ROW_NUMBER,
- Ints.asList(),
- Ints.asList(0),
- ImmutableList.copyOf(new SortOrder[] {SortOrder.ASC_NULLS_LAST}),
- false);
+ assertThatThrownBy(() -> {
+ List input = rowPagesBuilder(BIGINT, DOUBLE)
+ .row(1L, 0.1)
+ .row(2L, 0.2)
+ .pageBreak()
+ .row(-1L, -0.1)
+ .row(4L, 0.4)
+ .build();
+
+ DriverContext driverContext = createTaskContext(executor, scheduledExecutor, TEST_SESSION, DataSize.ofBytes(10))
+ .addPipelineContext(0, true, true, false)
+ .addDriverContext();
+
+ WindowOperatorFactory operatorFactory = createFactoryUnbounded(
+ ImmutableList.of(BIGINT, DOUBLE),
+ Ints.asList(1),
+ ROW_NUMBER,
+ Ints.asList(),
+ Ints.asList(0),
+ ImmutableList.copyOf(new SortOrder[] {SortOrder.ASC_NULLS_LAST}),
+ new DummySpillerFactory(),
+ false);
+
+ toPages(operatorFactory, driverContext, input);
+ })
+ .isInstanceOf(ExceededMemoryLimitException.class)
+ .hasMessageMatching("Query exceeded per-node memory limit of 10B.*");
+ }
- toPages(operatorFactory, driverContext, input);
+ @Test
+ public void testFirstValuePartition()
+ {
+ testFirstValuePartition(false, false, 0);
+ testFirstValuePartition(true, false, 8);
+ testFirstValuePartition(true, true, 8);
+ testFirstValuePartition(true, false, 0);
+ testFirstValuePartition(true, true, 0);
}
- @Test(dataProvider = "spillEnabled")
- public void testFirstValuePartition(boolean spillEnabled, boolean revokeMemoryWhenAddingPages, long memoryLimit)
+ private void testFirstValuePartition(boolean spillEnabled, boolean revokeMemoryWhenAddingPages, long memoryLimit)
{
List input = rowPagesBuilder(VARCHAR, VARCHAR, BIGINT, BOOLEAN, VARCHAR)
.row("b", "A1", 1L, true, "")
@@ -419,6 +460,7 @@ public void testFirstValuePartition(boolean spillEnabled, boolean revokeMemoryWh
Ints.asList(0),
Ints.asList(2),
ImmutableList.copyOf(new SortOrder[] {SortOrder.ASC_NULLS_LAST}),
+ new DummySpillerFactory(),
spillEnabled);
DriverContext driverContext = createDriverContext(memoryLimit);
@@ -454,6 +496,7 @@ public void testClose()
Ints.asList(0),
Ints.asList(1),
ImmutableList.copyOf(new SortOrder[] {SortOrder.ASC_NULLS_LAST}),
+ new DummySpillerFactory(),
false);
DriverContext driverContext = createDriverContext(1000);
@@ -469,8 +512,17 @@ public void testClose()
operator.close();
}
- @Test(dataProvider = "spillEnabled")
- public void testLastValuePartition(boolean spillEnabled, boolean revokeMemoryWhenAddingPages, long memoryLimit)
+ @Test
+ public void testLastValuePartition()
+ {
+ testLastValuePartition(false, false, 0);
+ testLastValuePartition(true, false, 8);
+ testLastValuePartition(true, true, 8);
+ testLastValuePartition(true, false, 0);
+ testLastValuePartition(true, true, 0);
+ }
+
+ private void testLastValuePartition(boolean spillEnabled, boolean revokeMemoryWhenAddingPages, long memoryLimit)
{
List input = rowPagesBuilder(VARCHAR, VARCHAR, BIGINT, BOOLEAN, VARCHAR)
.row("b", "A1", 1L, true, "")
@@ -490,6 +542,7 @@ public void testLastValuePartition(boolean spillEnabled, boolean revokeMemoryWhe
Ints.asList(0),
Ints.asList(2),
ImmutableList.copyOf(new SortOrder[] {SortOrder.ASC_NULLS_LAST}),
+ new DummySpillerFactory(),
spillEnabled);
MaterializedResult expected = resultBuilder(driverContext.getSession(), VARCHAR, VARCHAR, BIGINT, BOOLEAN, VARCHAR)
@@ -503,8 +556,17 @@ public void testLastValuePartition(boolean spillEnabled, boolean revokeMemoryWhe
assertOperatorEquals(operatorFactory, driverContext, input, expected, revokeMemoryWhenAddingPages);
}
- @Test(dataProvider = "spillEnabled")
- public void testNthValuePartition(boolean spillEnabled, boolean revokeMemoryWhenAddingPages, long memoryLimit)
+ @Test
+ public void testNthValuePartition()
+ {
+ testNthValuePartition(false, false, 0);
+ testNthValuePartition(true, false, 8);
+ testNthValuePartition(true, true, 8);
+ testNthValuePartition(true, false, 0);
+ testNthValuePartition(true, true, 0);
+ }
+
+ private void testNthValuePartition(boolean spillEnabled, boolean revokeMemoryWhenAddingPages, long memoryLimit)
{
List input = rowPagesBuilder(VARCHAR, VARCHAR, BIGINT, BIGINT, BOOLEAN, VARCHAR)
.row("b", "A1", 1L, 2L, true, "")
@@ -523,6 +585,7 @@ public void testNthValuePartition(boolean spillEnabled, boolean revokeMemoryWhen
Ints.asList(0),
Ints.asList(2),
ImmutableList.copyOf(new SortOrder[] {SortOrder.ASC_NULLS_LAST}),
+ new DummySpillerFactory(),
spillEnabled);
DriverContext driverContext = createDriverContext(memoryLimit);
@@ -538,8 +601,17 @@ public void testNthValuePartition(boolean spillEnabled, boolean revokeMemoryWhen
assertOperatorEquals(operatorFactory, driverContext, input, expected, revokeMemoryWhenAddingPages);
}
- @Test(dataProvider = "spillEnabled")
- public void testLagPartition(boolean spillEnabled, boolean revokeMemoryWhenAddingPages, long memoryLimit)
+ @Test
+ public void testLagPartition()
+ {
+ testLagPartition(false, false, 0);
+ testLagPartition(true, false, 8);
+ testLagPartition(true, true, 8);
+ testLagPartition(true, false, 0);
+ testLagPartition(true, true, 0);
+ }
+
+ private void testLagPartition(boolean spillEnabled, boolean revokeMemoryWhenAddingPages, long memoryLimit)
{
List input = rowPagesBuilder(VARCHAR, VARCHAR, BIGINT, BIGINT, VARCHAR, BOOLEAN, VARCHAR)
.row("b", "A1", 1L, 1L, "D", true, "")
@@ -558,6 +630,7 @@ public void testLagPartition(boolean spillEnabled, boolean revokeMemoryWhenAddin
Ints.asList(0),
Ints.asList(2),
ImmutableList.copyOf(new SortOrder[] {SortOrder.ASC_NULLS_LAST}),
+ new DummySpillerFactory(),
spillEnabled);
DriverContext driverContext = createDriverContext(memoryLimit);
@@ -573,8 +646,17 @@ public void testLagPartition(boolean spillEnabled, boolean revokeMemoryWhenAddin
assertOperatorEquals(operatorFactory, driverContext, input, expected, revokeMemoryWhenAddingPages);
}
- @Test(dataProvider = "spillEnabled")
- public void testLeadPartition(boolean spillEnabled, boolean revokeMemoryWhenAddingPages, long memoryLimit)
+ @Test
+ public void testLeadPartition()
+ {
+ testLeadPartition(false, false, 0);
+ testLeadPartition(true, false, 8);
+ testLeadPartition(true, true, 8);
+ testLeadPartition(true, false, 0);
+ testLeadPartition(true, true, 0);
+ }
+
+ private void testLeadPartition(boolean spillEnabled, boolean revokeMemoryWhenAddingPages, long memoryLimit)
{
List input = rowPagesBuilder(VARCHAR, VARCHAR, BIGINT, BIGINT, VARCHAR, BOOLEAN, VARCHAR)
.row("b", "A1", 1L, 1L, "D", true, "")
@@ -593,6 +675,7 @@ public void testLeadPartition(boolean spillEnabled, boolean revokeMemoryWhenAddi
Ints.asList(0),
Ints.asList(2),
ImmutableList.copyOf(new SortOrder[] {SortOrder.ASC_NULLS_LAST}),
+ new DummySpillerFactory(),
spillEnabled);
DriverContext driverContext = createDriverContext(memoryLimit);
@@ -608,8 +691,17 @@ public void testLeadPartition(boolean spillEnabled, boolean revokeMemoryWhenAddi
assertOperatorEquals(operatorFactory, driverContext, input, expected, revokeMemoryWhenAddingPages);
}
- @Test(dataProvider = "spillEnabled")
- public void testPartiallyPreGroupedPartitionWithEmptyInput(boolean spillEnabled, boolean revokeMemoryWhenAddingPages, long memoryLimit)
+ @Test
+ public void testPartiallyPreGroupedPartitionWithEmptyInput()
+ {
+ testPartiallyPreGroupedPartitionWithEmptyInput(false, false, 0);
+ testPartiallyPreGroupedPartitionWithEmptyInput(true, false, 8);
+ testPartiallyPreGroupedPartitionWithEmptyInput(true, true, 8);
+ testPartiallyPreGroupedPartitionWithEmptyInput(true, false, 0);
+ testPartiallyPreGroupedPartitionWithEmptyInput(true, true, 0);
+ }
+
+ private void testPartiallyPreGroupedPartitionWithEmptyInput(boolean spillEnabled, boolean revokeMemoryWhenAddingPages, long memoryLimit)
{
List input = rowPagesBuilder(BIGINT, VARCHAR, BIGINT, VARCHAR)
.pageBreak()
@@ -625,6 +717,7 @@ public void testPartiallyPreGroupedPartitionWithEmptyInput(boolean spillEnabled,
Ints.asList(3),
ImmutableList.of(SortOrder.ASC_NULLS_LAST),
0,
+ new DummySpillerFactory(),
spillEnabled);
DriverContext driverContext = createDriverContext(memoryLimit);
@@ -634,8 +727,17 @@ public void testPartiallyPreGroupedPartitionWithEmptyInput(boolean spillEnabled,
assertOperatorEquals(operatorFactory, driverContext, input, expected, revokeMemoryWhenAddingPages);
}
- @Test(dataProvider = "spillEnabled")
- public void testPartiallyPreGroupedPartition(boolean spillEnabled, boolean revokeMemoryWhenAddingPages, long memoryLimit)
+ @Test
+ public void testPartiallyPreGroupedPartition()
+ {
+ testPartiallyPreGroupedPartition(false, false, 0);
+ testPartiallyPreGroupedPartition(true, false, 8);
+ testPartiallyPreGroupedPartition(true, true, 8);
+ testPartiallyPreGroupedPartition(true, false, 0);
+ testPartiallyPreGroupedPartition(true, true, 0);
+ }
+
+ private void testPartiallyPreGroupedPartition(boolean spillEnabled, boolean revokeMemoryWhenAddingPages, long memoryLimit)
{
List input = rowPagesBuilder(BIGINT, VARCHAR, BIGINT, VARCHAR)
.pageBreak()
@@ -659,6 +761,7 @@ public void testPartiallyPreGroupedPartition(boolean spillEnabled, boolean revok
Ints.asList(3),
ImmutableList.of(SortOrder.ASC_NULLS_LAST),
0,
+ new DummySpillerFactory(),
spillEnabled);
DriverContext driverContext = createDriverContext(memoryLimit);
@@ -674,8 +777,17 @@ public void testPartiallyPreGroupedPartition(boolean spillEnabled, boolean revok
assertOperatorEqualsIgnoreOrder(operatorFactory, driverContext, input, expected, revokeMemoryWhenAddingPages);
}
- @Test(dataProvider = "spillEnabled")
- public void testFullyPreGroupedPartition(boolean spillEnabled, boolean revokeMemoryWhenAddingPages, long memoryLimit)
+ @Test
+ public void testFullyPreGroupedPartition()
+ {
+ testFullyPreGroupedPartition(false, false, 0);
+ testFullyPreGroupedPartition(true, false, 8);
+ testFullyPreGroupedPartition(true, true, 8);
+ testFullyPreGroupedPartition(true, false, 0);
+ testFullyPreGroupedPartition(true, true, 0);
+ }
+
+ private void testFullyPreGroupedPartition(boolean spillEnabled, boolean revokeMemoryWhenAddingPages, long memoryLimit)
{
List input = rowPagesBuilder(BIGINT, VARCHAR, BIGINT, VARCHAR)
.pageBreak()
@@ -700,6 +812,7 @@ public void testFullyPreGroupedPartition(boolean spillEnabled, boolean revokeMem
Ints.asList(3),
ImmutableList.of(SortOrder.ASC_NULLS_LAST),
0,
+ new DummySpillerFactory(),
spillEnabled);
DriverContext driverContext = createDriverContext(memoryLimit);
@@ -716,8 +829,17 @@ public void testFullyPreGroupedPartition(boolean spillEnabled, boolean revokeMem
assertOperatorEqualsIgnoreOrder(operatorFactory, driverContext, input, expected, revokeMemoryWhenAddingPages);
}
- @Test(dataProvider = "spillEnabled")
- public void testFullyPreGroupedAndPartiallySortedPartition(boolean spillEnabled, boolean revokeMemoryWhenAddingPages, long memoryLimit)
+ @Test
+ public void testFullyPreGroupedAndPartiallySortedPartition()
+ {
+ testFullyPreGroupedAndPartiallySortedPartition(false, false, 0);
+ testFullyPreGroupedAndPartiallySortedPartition(true, false, 8);
+ testFullyPreGroupedAndPartiallySortedPartition(true, true, 8);
+ testFullyPreGroupedAndPartiallySortedPartition(true, false, 0);
+ testFullyPreGroupedAndPartiallySortedPartition(true, true, 0);
+ }
+
+ private void testFullyPreGroupedAndPartiallySortedPartition(boolean spillEnabled, boolean revokeMemoryWhenAddingPages, long memoryLimit)
{
List input = rowPagesBuilder(BIGINT, VARCHAR, BIGINT, VARCHAR)
.pageBreak()
@@ -743,6 +865,7 @@ public void testFullyPreGroupedAndPartiallySortedPartition(boolean spillEnabled,
Ints.asList(3, 2),
ImmutableList.of(SortOrder.ASC_NULLS_LAST, SortOrder.ASC_NULLS_LAST),
1,
+ new DummySpillerFactory(),
spillEnabled);
DriverContext driverContext = createDriverContext(memoryLimit);
@@ -760,8 +883,17 @@ public void testFullyPreGroupedAndPartiallySortedPartition(boolean spillEnabled,
assertOperatorEqualsIgnoreOrder(operatorFactory, driverContext, input, expected, revokeMemoryWhenAddingPages);
}
- @Test(dataProvider = "spillEnabled")
- public void testFullyPreGroupedAndFullySortedPartition(boolean spillEnabled, boolean revokeMemoryWhenAddingPages, long memoryLimit)
+ @Test
+ public void testFullyPreGroupedAndFullySortedPartition()
+ {
+ testFullyPreGroupedAndFullySortedPartition(false, false, 0);
+ testFullyPreGroupedAndFullySortedPartition(true, false, 8);
+ testFullyPreGroupedAndFullySortedPartition(true, true, 8);
+ testFullyPreGroupedAndFullySortedPartition(true, false, 0);
+ testFullyPreGroupedAndFullySortedPartition(true, true, 0);
+ }
+
+ private void testFullyPreGroupedAndFullySortedPartition(boolean spillEnabled, boolean revokeMemoryWhenAddingPages, long memoryLimit)
{
List input = rowPagesBuilder(BIGINT, VARCHAR, BIGINT, VARCHAR)
.pageBreak()
@@ -787,6 +919,7 @@ public void testFullyPreGroupedAndFullySortedPartition(boolean spillEnabled, boo
Ints.asList(3),
ImmutableList.of(SortOrder.ASC_NULLS_LAST),
1,
+ new DummySpillerFactory(),
spillEnabled);
DriverContext driverContext = createDriverContext(memoryLimit);
@@ -844,6 +977,7 @@ private WindowOperatorFactory createFactoryUnbounded(
List partitionChannels,
List sortChannels,
List sortOrder,
+ SpillerFactory spillerFactory,
boolean spillEnabled)
{
return createFactoryUnbounded(
@@ -855,6 +989,7 @@ private WindowOperatorFactory createFactoryUnbounded(
sortChannels,
sortOrder,
0,
+ spillerFactory,
spillEnabled);
}
@@ -867,6 +1002,7 @@ private WindowOperatorFactory createFactoryUnbounded(
List sortChannels,
List sortOrder,
int preSortedChannelPrefix,
+ DummySpillerFactory spillerFactory,
boolean spillEnabled)
{
return new WindowOperatorFactory(
diff --git a/core/trino-main/src/test/java/io/trino/operator/aggregation/AbstractTestApproximateCountDistinct.java b/core/trino-main/src/test/java/io/trino/operator/aggregation/AbstractTestApproximateCountDistinct.java
index a697a552f7e5..6ee64e55a1c9 100644
--- a/core/trino-main/src/test/java/io/trino/operator/aggregation/AbstractTestApproximateCountDistinct.java
+++ b/core/trino-main/src/test/java/io/trino/operator/aggregation/AbstractTestApproximateCountDistinct.java
@@ -21,8 +21,7 @@
import io.trino.spi.block.BlockBuilder;
import io.trino.spi.type.Type;
import org.apache.commons.math3.stat.descriptive.DescriptiveStatistics;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
+import org.junit.jupiter.api.Test;
import java.util.ArrayList;
import java.util.Collections;
@@ -52,35 +51,35 @@ protected int getUniqueValuesCount()
return 20000;
}
- @DataProvider(name = "provideStandardErrors")
- public Object[][] provideStandardErrors()
+ @Test
+ public void testNoPositions()
{
- return new Object[][] {
- {0.0230}, // 2k buckets
- {0.0115}, // 8k buckets
- };
+ assertCount(ImmutableList.of(), 0.0230, 0);
+ assertCount(ImmutableList.of(), 0.0115, 0);
}
- @Test(dataProvider = "provideStandardErrors")
- public void testNoPositions(double maxStandardError)
+ @Test
+ public void testSinglePosition()
{
- assertCount(ImmutableList.of(), maxStandardError, 0);
+ assertCount(ImmutableList.of(randomValue()), 0.0230, 1);
+ assertCount(ImmutableList.of(randomValue()), 0.0115, 1);
}
- @Test(dataProvider = "provideStandardErrors")
- public void testSinglePosition(double maxStandardError)
+ @Test
+ public void testAllPositionsNull()
{
- assertCount(ImmutableList.of(randomValue()), maxStandardError, 1);
+ assertCount(Collections.nCopies(100, null), 0.0230, 0);
+ assertCount(Collections.nCopies(100, null), 0.0115, 0);
}
- @Test(dataProvider = "provideStandardErrors")
- public void testAllPositionsNull(double maxStandardError)
+ @Test
+ public void testMixedNullsAndNonNulls()
{
- assertCount(Collections.nCopies(100, null), maxStandardError, 0);
+ testMixedNullsAndNonNulls(0.0230);
+ testMixedNullsAndNonNulls(0.0115);
}
- @Test(dataProvider = "provideStandardErrors")
- public void testMixedNullsAndNonNulls(double maxStandardError)
+ private void testMixedNullsAndNonNulls(double maxStandardError)
{
int uniques = getUniqueValuesCount();
List