Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -47,10 +47,12 @@
import io.trino.sql.planner.plan.PlanNodeId;
import io.trino.testing.TestingSession;
import io.trino.util.FinalizerService;
import org.testng.annotations.AfterMethod;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.DataProvider;
import org.testng.annotations.Test;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.TestInstance;
import org.junit.jupiter.api.Timeout;
import org.junit.jupiter.api.parallel.Execution;

import java.net.InetAddress;
import java.net.URI;
Expand Down Expand Up @@ -85,11 +87,14 @@
import static java.util.concurrent.Executors.newCachedThreadPool;
import static java.util.concurrent.Executors.newScheduledThreadPool;
import static java.util.concurrent.TimeUnit.MILLISECONDS;
import static org.junit.jupiter.api.TestInstance.Lifecycle.PER_METHOD;
import static org.junit.jupiter.api.parallel.ExecutionMode.SAME_THREAD;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertFalse;
import static org.testng.Assert.assertTrue;

@Test(singleThreaded = true)
@TestInstance(PER_METHOD)
@Execution(SAME_THREAD)
public class TestNodeScheduler
{
private FinalizerService finalizerService;
Expand All @@ -103,7 +108,7 @@ public class TestNodeScheduler
private ScheduledExecutorService remoteTaskScheduledExecutor;
private Session session;

@BeforeMethod
@BeforeEach
public void setUp()
{
session = TestingSession.testSessionBuilder().build();
Expand Down Expand Up @@ -135,7 +140,7 @@ private void setUpNodes()
new InternalNode("other3", URI.create("http://10.0.0.1:13"), NodeVersion.UNKNOWN, false));
}

@AfterMethod(alwaysRun = true)
@AfterEach
public void tearDown()
{
remoteTaskExecutor.shutdown();
Expand Down Expand Up @@ -173,7 +178,8 @@ public void testScheduleLocal()
assertEquals(assignment.getValue(), split);
}

@Test(timeOut = 60 * 1000)
@Test
@Timeout(60)
public void testTopologyAwareScheduling()
{
NodeTaskMap nodeTaskMap = new NodeTaskMap(finalizerService);
Expand Down Expand Up @@ -652,27 +658,27 @@ public void testEquateDistribution()
assertEquals(assignment.get(node4).size(), 4);
}

@DataProvider
public static Object[][] equateDistributionTestParameters()
@Test
public void testEquateDistributionConsistentHashing()
{
return new Object[][] {
{5, 10, 0.00},
{5, 20, 0.055},
{10, 50, 0.00},
{10, 100, 0.045},
{10, 200, 0.090},
{50, 550, 0.045},
{50, 600, 0.047},
{50, 700, 0.045},
{100, 550, 0.036},
{100, 600, 0.054},
{100, 1000, 0.039},
{100, 1500, 0.045}};
testEquateDistributionConsistentHashing(5, 10, 0.00);
testEquateDistributionConsistentHashing(5, 20, 0.055);
testEquateDistributionConsistentHashing(10, 50, 0.00);
testEquateDistributionConsistentHashing(10, 100, 0.045);
testEquateDistributionConsistentHashing(10, 200, 0.090);
testEquateDistributionConsistentHashing(50, 550, 0.045);
testEquateDistributionConsistentHashing(50, 600, 0.047);
testEquateDistributionConsistentHashing(50, 700, 0.045);
testEquateDistributionConsistentHashing(100, 550, 0.036);
testEquateDistributionConsistentHashing(100, 600, 0.054);
testEquateDistributionConsistentHashing(100, 1000, 0.039);
testEquateDistributionConsistentHashing(100, 1500, 0.045);
}

@Test(dataProvider = "equateDistributionTestParameters")
public void testEquateDistributionConsistentHashing(int numberOfNodes, int numberOfSplits, double misassignedSplitsRatio)
private void testEquateDistributionConsistentHashing(int numberOfNodes, int numberOfSplits, double misassignedSplitsRatio)
{
setUp();

ImmutableList.Builder<InternalNode> nodesBuilder = ImmutableList.builder();
for (int i = 0; i < numberOfNodes; ++i) {
InternalNode node = new InternalNode("node" + i, URI.create("http://10.0.0.1:" + (i + 10)), NodeVersion.UNKNOWN, false);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,7 @@
import io.trino.spi.type.DecimalParseResult;
import io.trino.spi.type.Decimals;
import io.trino.spi.type.Type;
import org.testng.annotations.DataProvider;
import org.testng.annotations.Test;
import org.junit.jupiter.api.Test;

import static io.trino.plugin.hive.HiveTimestampPrecision.NANOSECONDS;
import static io.trino.plugin.hive.HiveType.toHiveType;
Expand All @@ -34,8 +33,37 @@

public class TestDecimalCoercers
{
@Test(dataProvider = "dataProvider")
public void testDecimalToIntCoercion(String decimalString, Type coercedType, Object expectedValue)
@Test
public void testDecimalToIntCoercion()
{
testDecimalToIntCoercion("12.120000000000000000", TINYINT, 12L);
testDecimalToIntCoercion("-12.120000000000000000", TINYINT, -12L);
testDecimalToIntCoercion("12.120", TINYINT, 12L);
testDecimalToIntCoercion("-12.120", TINYINT, -12L);
testDecimalToIntCoercion("141.120000000000000000", TINYINT, null);
testDecimalToIntCoercion("-141.120", TINYINT, null);
testDecimalToIntCoercion("130.120000000000000000", SMALLINT, 130L);
testDecimalToIntCoercion("-130.120000000000000000", SMALLINT, -130L);
testDecimalToIntCoercion("130.120", SMALLINT, 130L);
testDecimalToIntCoercion("-130.120", SMALLINT, -130L);
testDecimalToIntCoercion("66000.30120000000000000", SMALLINT, null);
testDecimalToIntCoercion("-66000.120", SMALLINT, null);
testDecimalToIntCoercion("33000.12000000000000000", INTEGER, 33000L);
testDecimalToIntCoercion("-33000.12000000000000000", INTEGER, -33000L);
testDecimalToIntCoercion("33000.120", INTEGER, 33000L);
testDecimalToIntCoercion("-33000.120", INTEGER, -33000L);
testDecimalToIntCoercion("3300000000.1200000000000", INTEGER, null);
testDecimalToIntCoercion("3300000000.120", INTEGER, null);
testDecimalToIntCoercion("3300000000.1200000000000", BIGINT, 3300000000L);
testDecimalToIntCoercion("-3300000000.120000000000", BIGINT, -3300000000L);
testDecimalToIntCoercion("3300000000.12", BIGINT, 3300000000L);
testDecimalToIntCoercion("-3300000000.12", BIGINT, -3300000000L);
testDecimalToIntCoercion("330000000000000000000.12000000000", BIGINT, null);
testDecimalToIntCoercion("-330000000000000000000.12000000000", BIGINT, null);
testDecimalToIntCoercion("3300000", INTEGER, 3300000L);
}

private void testDecimalToIntCoercion(String decimalString, Type coercedType, Object expectedValue)
{
DecimalParseResult parseResult = Decimals.parse(decimalString);

Expand All @@ -48,38 +76,6 @@ public void testDecimalToIntCoercion(String decimalString, Type coercedType, Obj
assertDecimalToIntCoercion(parseResult.getType(), parseResult.getObject(), coercedType, expectedValue);
}

@DataProvider
public static Object[][] dataProvider()
{
return new Object[][] {
{"12.120000000000000000", TINYINT, 12L},
{"-12.120000000000000000", TINYINT, -12L},
{"12.120", TINYINT, 12L},
{"-12.120", TINYINT, -12L},
{"141.120000000000000000", TINYINT, null},
{"-141.120", TINYINT, null},
{"130.120000000000000000", SMALLINT, 130L},
{"-130.120000000000000000", SMALLINT, -130L},
{"130.120", SMALLINT, 130L},
{"-130.120", SMALLINT, -130L},
{"66000.30120000000000000", SMALLINT, null},
{"-66000.120", SMALLINT, null},
{"33000.12000000000000000", INTEGER, 33000L},
{"-33000.12000000000000000", INTEGER, -33000L},
{"33000.120", INTEGER, 33000L},
{"-33000.120", INTEGER, -33000L},
{"3300000000.1200000000000", INTEGER, null},
{"3300000000.120", INTEGER, null},
{"3300000000.1200000000000", BIGINT, 3300000000L},
{"-3300000000.120000000000", BIGINT, -3300000000L},
{"3300000000.12", BIGINT, 3300000000L},
{"-3300000000.12", BIGINT, -3300000000L},
{"330000000000000000000.12000000000", BIGINT, null},
{"-330000000000000000000.12000000000", BIGINT, null},
{"3300000", INTEGER, 3300000L},
};
}

private void assertDecimalToIntCoercion(Type fromType, Object valueToBeCoerced, Type toType, Object expectedValue)
{
Block coercedValue = createCoercer(TESTING_TYPE_MANAGER, toHiveType(fromType), toHiveType(toType), new CoercionUtils.CoercionContext(NANOSECONDS, false)).orElseThrow()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,16 +36,13 @@
import io.trino.testing.containers.Minio;
import org.intellij.lang.annotations.Language;
import org.testng.annotations.AfterClass;
import org.testng.annotations.DataProvider;
import org.testng.annotations.Test;

import java.io.File;
import java.util.Arrays;

import static com.google.common.base.Preconditions.checkArgument;
import static io.trino.plugin.hive.HiveQueryRunner.TPCH_SCHEMA;
import static io.trino.plugin.hive.HiveTestUtils.HDFS_FILE_SYSTEM_FACTORY;
import static io.trino.testing.DataProviders.toDataProvider;
import static io.trino.testing.MultisetAssertions.assertMultisetsEqual;
import static io.trino.testing.TestingNames.randomNameSuffix;
import static io.trino.testing.containers.Minio.MINIO_ACCESS_KEY;
Expand Down Expand Up @@ -111,63 +108,67 @@ public void tearDown()
minio = null;
}

@Test(dataProvider = "storageFormats")
public void testSelectWithFilter(StorageFormat format)
@Test
public void testSelectWithFilter()
{
assertUpdate("DROP TABLE IF EXISTS test_select_from_where");
String tableLocation = randomTableLocation("test_select_from_where");

assertUpdate("CREATE TABLE test_select_from_where WITH (format = '" + format + "', external_location = '" + tableLocation + "') AS SELECT 2 AS age", 1);

assertFileSystemAccesses(
withSmallFileThreshold(getSession(), DataSize.valueOf("1MB")), // large enough threshold for single request of small file
"SELECT * FROM test_select_from_where WHERE age = 2",
ImmutableMultiset.<String>builder()
.add("S3.GetObject")
.add("S3.ListObjectsV2")
.build());

assertFileSystemAccesses(
withSmallFileThreshold(getSession(), DataSize.valueOf("10B")), // disables single request for small file
"SELECT * FROM test_select_from_where WHERE age = 2",
ImmutableMultiset.<String>builder()
.addCopies("S3.GetObject", occurrences(format, 3, 2))
.add("S3.ListObjectsV2")
.build());

assertUpdate("DROP TABLE test_select_from_where");
for (StorageFormat format : StorageFormat.values()) {
assertUpdate("DROP TABLE IF EXISTS test_select_from_where");
String tableLocation = randomTableLocation("test_select_from_where");

assertUpdate("CREATE TABLE test_select_from_where WITH (format = '" + format + "', external_location = '" + tableLocation + "') AS SELECT 2 AS age", 1);

assertFileSystemAccesses(
withSmallFileThreshold(getSession(), DataSize.valueOf("1MB")), // large enough threshold for single request of small file
"SELECT * FROM test_select_from_where WHERE age = 2",
ImmutableMultiset.<String>builder()
.add("S3.GetObject")
.add("S3.ListObjectsV2")
.build());

assertFileSystemAccesses(
withSmallFileThreshold(getSession(), DataSize.valueOf("10B")), // disables single request for small file
"SELECT * FROM test_select_from_where WHERE age = 2",
ImmutableMultiset.<String>builder()
.addCopies("S3.GetObject", occurrences(format, 3, 2))
.add("S3.ListObjectsV2")
.build());

assertUpdate("DROP TABLE test_select_from_where");
}
}

@Test(dataProvider = "storageFormats")
public void testSelectPartitionTable(StorageFormat format)
@Test
public void testSelectPartitionTable()
{
assertUpdate("DROP TABLE IF EXISTS test_select_from_partition");
String tableLocation = randomTableLocation("test_select_from_partition");

assertUpdate("CREATE TABLE test_select_from_partition (data int, key varchar)" +
"WITH (partitioned_by = ARRAY['key'], format = '" + format + "', external_location = '" + tableLocation + "')");
assertUpdate("INSERT INTO test_select_from_partition VALUES (1, 'part1'), (2, 'part2')", 2);

assertFileSystemAccesses("SELECT * FROM test_select_from_partition",
ImmutableMultiset.<String>builder()
.addCopies("S3.GetObject", 2)
.addCopies("S3.ListObjectsV2", 2)
.build());

assertFileSystemAccesses("SELECT * FROM test_select_from_partition WHERE key = 'part1'",
ImmutableMultiset.<String>builder()
.add("S3.GetObject")
.add("S3.ListObjectsV2")
.build());

assertUpdate("INSERT INTO test_select_from_partition VALUES (11, 'part1')", 1);
assertFileSystemAccesses("SELECT * FROM test_select_from_partition WHERE key = 'part1'",
ImmutableMultiset.<String>builder()
.addCopies("S3.GetObject", 2)
.addCopies("S3.ListObjectsV2", 1)
.build());

assertUpdate("DROP TABLE test_select_from_partition");
for (StorageFormat format : StorageFormat.values()) {
assertUpdate("DROP TABLE IF EXISTS test_select_from_partition");
String tableLocation = randomTableLocation("test_select_from_partition");

assertUpdate("CREATE TABLE test_select_from_partition (data int, key varchar)" +
"WITH (partitioned_by = ARRAY['key'], format = '" + format + "', external_location = '" + tableLocation + "')");
assertUpdate("INSERT INTO test_select_from_partition VALUES (1, 'part1'), (2, 'part2')", 2);

assertFileSystemAccesses("SELECT * FROM test_select_from_partition",
ImmutableMultiset.<String>builder()
.addCopies("S3.GetObject", 2)
.addCopies("S3.ListObjectsV2", 2)
.build());

assertFileSystemAccesses("SELECT * FROM test_select_from_partition WHERE key = 'part1'",
ImmutableMultiset.<String>builder()
.add("S3.GetObject")
.add("S3.ListObjectsV2")
.build());

assertUpdate("INSERT INTO test_select_from_partition VALUES (11, 'part1')", 1);
assertFileSystemAccesses("SELECT * FROM test_select_from_partition WHERE key = 'part1'",
ImmutableMultiset.<String>builder()
.addCopies("S3.GetObject", 2)
.addCopies("S3.ListObjectsV2", 1)
.build());

assertUpdate("DROP TABLE test_select_from_partition");
}
}

private static String randomTableLocation(String tableName)
Expand Down Expand Up @@ -195,13 +196,6 @@ private Multiset<String> getOperations()
.collect(toCollection(HashMultiset::create));
}

@DataProvider
public static Object[][] storageFormats()
{
return Arrays.stream(StorageFormat.values())
.collect(toDataProvider());
}

private static int occurrences(StorageFormat tableType, int orcValue, int parquetValue)
{
checkArgument(!(orcValue == parquetValue), "No need to use Occurrences when ORC and Parquet");
Expand Down
Loading