diff --git a/plugin/trino-pinot/src/main/java/io/trino/plugin/pinot/PinotErrorCode.java b/plugin/trino-pinot/src/main/java/io/trino/plugin/pinot/PinotErrorCode.java index 04f7be1dd737..cc6eb39a5657 100755 --- a/plugin/trino-pinot/src/main/java/io/trino/plugin/pinot/PinotErrorCode.java +++ b/plugin/trino-pinot/src/main/java/io/trino/plugin/pinot/PinotErrorCode.java @@ -24,6 +24,7 @@ public enum PinotErrorCode implements ErrorCodeSupplier { PINOT_UNSUPPORTED_COLUMN_TYPE(0, EXTERNAL), // schema issues + PINOT_AMBIGUOUS_TABLE_NAME(1, EXTERNAL), // Duplicate case insensitive table name PINOT_INSUFFICIENT_SERVER_RESPONSE(2, EXTERNAL), // numServersResponded < numServersQueried PINOT_EXCEPTION(3, EXTERNAL), // Exception reported by pinot PINOT_HTTP_ERROR(4, EXTERNAL), // Some non okay http error code diff --git a/plugin/trino-pinot/src/main/java/io/trino/plugin/pinot/PinotMetadata.java b/plugin/trino-pinot/src/main/java/io/trino/plugin/pinot/PinotMetadata.java index 1ef45ed55bef..ac567dd7052b 100755 --- a/plugin/trino-pinot/src/main/java/io/trino/plugin/pinot/PinotMetadata.java +++ b/plugin/trino-pinot/src/main/java/io/trino/plugin/pinot/PinotMetadata.java @@ -20,7 +20,6 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; -import io.airlift.log.Logger; import io.trino.collect.cache.NonEvictableLoadingCache; import io.trino.plugin.base.aggregation.AggregateFunctionRewriter; import io.trino.plugin.base.aggregation.AggregateFunctionRule; @@ -50,7 +49,6 @@ import io.trino.spi.connector.LimitApplicationResult; import io.trino.spi.connector.SchemaTableName; import io.trino.spi.connector.SchemaTablePrefix; -import io.trino.spi.connector.TableNotFoundException; import io.trino.spi.expression.ConnectorExpression; import io.trino.spi.expression.Variable; import io.trino.spi.predicate.Domain; @@ -66,7 +64,7 @@ import java.util.Optional; import java.util.OptionalLong; import java.util.concurrent.ExecutionException; -import java.util.concurrent.Executor; +import java.util.concurrent.ExecutorService; import java.util.concurrent.TimeUnit; import java.util.function.Function; @@ -89,14 +87,9 @@ public class PinotMetadata implements ConnectorMetadata { public static final String PINOT_COLUMN_NAME_PROPERTY = "pinotColumnName"; - - private static final Logger log = Logger.get(PinotMetadata.class); - - private static final Object ALL_TABLES_CACHE_KEY = new Object(); - private static final String SCHEMA_NAME = "default"; + public static final String SCHEMA_NAME = "default"; private final NonEvictableLoadingCache> pinotTableColumnCache; - private final NonEvictableLoadingCache> allTablesCache; private final int maxRowsPerBrokerQuery; private final AggregateFunctionRewriter aggregateFunctionRewriter; private final ImplementCountDistinct implementCountDistinct; @@ -106,15 +99,11 @@ public class PinotMetadata public PinotMetadata( PinotClient pinotClient, PinotConfig pinotConfig, - @ForPinot Executor executor) + @ForPinot ExecutorService executor) { requireNonNull(pinotConfig, "pinot config"); this.pinotClient = requireNonNull(pinotClient, "pinotClient is null"); long metadataCacheExpiryMillis = pinotConfig.getMetadataCacheExpiry().roundTo(TimeUnit.MILLISECONDS); - this.allTablesCache = buildNonEvictableCache( - CacheBuilder.newBuilder() - .refreshAfterWrite(metadataCacheExpiryMillis, TimeUnit.MILLISECONDS), - asyncReloading(CacheLoader.from(pinotClient::getAllTables), executor)); this.pinotTableColumnCache = buildNonEvictableCache( CacheBuilder.newBuilder() .refreshAfterWrite(metadataCacheExpiryMillis, TimeUnit.MILLISECONDS), @@ -129,7 +118,6 @@ public List load(String tableName) } }, executor)); - executor.execute(() -> this.allTablesCache.refresh(ALL_TABLES_CACHE_KEY)); this.maxRowsPerBrokerQuery = pinotConfig.getMaxRowsForBrokerQueries(); Function identifierQuote = identity(); // TODO identifier quoting not needed here? this.implementCountDistinct = new ImplementCountDistinct(identifierQuote); @@ -158,15 +146,11 @@ public PinotTableHandle getTableHandle(ConnectorSession session, SchemaTableName DynamicTable dynamicTable = DynamicTableBuilder.buildFromPql(this, tableName, pinotClient); return new PinotTableHandle(tableName.getSchemaName(), dynamicTable.getTableName(), TupleDomain.all(), OptionalLong.empty(), Optional.of(dynamicTable)); } - - try { - String pinotTableName = getPinotTableNameFromTrinoTableName(tableName.getTableName()); - return new PinotTableHandle(tableName.getSchemaName(), pinotTableName); - } - catch (TableNotFoundException e) { - log.debug(e, "Table not found: %s", tableName); + String pinotTableName = pinotClient.getPinotTableNameFromTrinoTableNameIfExists(tableName.getTableName()); + if (pinotTableName == null) { return null; } + return new PinotTableHandle(tableName.getSchemaName(), pinotTableName); } @Override @@ -192,11 +176,11 @@ public ConnectorTableMetadata getTableMetadata(ConnectorSession session, Connect @Override public List listTables(ConnectorSession session, Optional schemaNameOrNull) { - ImmutableList.Builder builder = ImmutableList.builder(); - for (String table : getPinotTableNames()) { + ImmutableSet.Builder builder = ImmutableSet.builder(); + for (String table : pinotClient.getPinotTableNames()) { builder.add(new SchemaTableName(SCHEMA_NAME, table)); } - return builder.build(); + return ImmutableList.copyOf(builder.build()); } @Override @@ -494,15 +478,10 @@ private static PinotColumnHandle resolveAggregateExpressionWithAlias(PinotColumn @VisibleForTesting public List getColumnsMetadata(String tableName) { - String pinotTableName = getPinotTableNameFromTrinoTableName(tableName); + String pinotTableName = pinotClient.getPinotTableNameFromTrinoTableName(tableName); return getFromCache(pinotTableColumnCache, pinotTableName); } - private List getPinotTableNames() - { - return getFromCache(allTablesCache, ALL_TABLES_CACHE_KEY); - } - private static V getFromCache(LoadingCache cache, K key) { try { @@ -513,22 +492,6 @@ private static V getFromCache(LoadingCache cache, K key) } } - private String getPinotTableNameFromTrinoTableName(String trinoTableName) - { - List allTables = getPinotTableNames(); - String pinotTableName = null; - for (String candidate : allTables) { - if (trinoTableName.equalsIgnoreCase(candidate)) { - pinotTableName = candidate; - break; - } - } - if (pinotTableName == null) { - throw new TableNotFoundException(new SchemaTableName(SCHEMA_NAME, trinoTableName)); - } - return pinotTableName; - } - private Map getDynamicTableColumnHandles(PinotTableHandle pinotTableHandle) { checkState(pinotTableHandle.getQuery().isPresent(), "dynamic table not present"); diff --git a/plugin/trino-pinot/src/main/java/io/trino/plugin/pinot/PinotModule.java b/plugin/trino-pinot/src/main/java/io/trino/plugin/pinot/PinotModule.java index 5b2ecea58b61..f09c089211d3 100755 --- a/plugin/trino-pinot/src/main/java/io/trino/plugin/pinot/PinotModule.java +++ b/plugin/trino-pinot/src/main/java/io/trino/plugin/pinot/PinotModule.java @@ -35,7 +35,7 @@ import javax.management.MBeanServer; import java.io.IOException; -import java.util.concurrent.Executor; +import java.util.concurrent.ExecutorService; import static com.google.inject.multibindings.OptionalBinder.newOptionalBinder; import static io.airlift.concurrent.Threads.threadsNamed; @@ -72,7 +72,7 @@ public void configure(Binder binder) binder.bind(PinotPageSourceProvider.class).in(Scopes.SINGLETON); binder.bind(PinotClient.class).in(Scopes.SINGLETON); binder.bind(PinotQueryClient.class).in(Scopes.SINGLETON); - binder.bind(Executor.class).annotatedWith(ForPinot.class) + binder.bind(ExecutorService.class).annotatedWith(ForPinot.class) .toInstance(newCachedThreadPool(threadsNamed("pinot-metadata-fetcher-" + catalogName))); binder.bind(PinotSessionProperties.class).in(Scopes.SINGLETON); diff --git a/plugin/trino-pinot/src/main/java/io/trino/plugin/pinot/PinotTableHandle.java b/plugin/trino-pinot/src/main/java/io/trino/plugin/pinot/PinotTableHandle.java index 8f6906791956..8c93aded3f45 100755 --- a/plugin/trino-pinot/src/main/java/io/trino/plugin/pinot/PinotTableHandle.java +++ b/plugin/trino-pinot/src/main/java/io/trino/plugin/pinot/PinotTableHandle.java @@ -25,7 +25,6 @@ import java.util.OptionalLong; import static com.google.common.base.MoreObjects.toStringHelper; -import static java.util.Locale.ENGLISH; import static java.util.Objects.requireNonNull; public class PinotTableHandle @@ -52,7 +51,7 @@ public PinotTableHandle( { this.schemaName = requireNonNull(schemaName, "schemaName is null"); - this.tableName = requireNonNull(tableName, "tableName is null").toLowerCase(ENGLISH); + this.tableName = requireNonNull(tableName, "tableName is null"); this.constraint = requireNonNull(constraint, "constraint is null"); this.limit = requireNonNull(limit, "limit is null"); this.query = requireNonNull(query, "query is null"); diff --git a/plugin/trino-pinot/src/main/java/io/trino/plugin/pinot/client/PinotClient.java b/plugin/trino-pinot/src/main/java/io/trino/plugin/pinot/client/PinotClient.java index 05e930ea0299..c345ae4df7ae 100755 --- a/plugin/trino-pinot/src/main/java/io/trino/plugin/pinot/client/PinotClient.java +++ b/plugin/trino-pinot/src/main/java/io/trino/plugin/pinot/client/PinotClient.java @@ -20,8 +20,10 @@ import com.google.common.annotations.VisibleForTesting; import com.google.common.cache.CacheBuilder; import com.google.common.cache.CacheLoader; +import com.google.common.cache.LoadingCache; import com.google.common.collect.AbstractIterator; import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableListMultimap; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableMultimap; import com.google.common.collect.Multimap; @@ -46,6 +48,8 @@ import io.trino.plugin.pinot.auth.PinotControllerAuthenticationProvider; import io.trino.plugin.pinot.query.PinotQueryInfo; import io.trino.spi.connector.ConnectorSession; +import io.trino.spi.connector.SchemaTableName; +import io.trino.spi.connector.TableNotFoundException; import org.apache.pinot.common.response.broker.BrokerResponseNative; import org.apache.pinot.common.response.broker.ResultTable; import org.apache.pinot.spi.data.Schema; @@ -55,12 +59,14 @@ import java.net.URI; import java.nio.charset.StandardCharsets; import java.util.ArrayList; +import java.util.Collection; import java.util.Collections; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.concurrent.ExecutionException; +import java.util.concurrent.ExecutorService; import java.util.concurrent.ThreadLocalRandom; import java.util.concurrent.TimeUnit; import java.util.function.Function; @@ -70,6 +76,7 @@ import java.util.stream.IntStream; import static com.google.common.base.Preconditions.checkState; +import static com.google.common.cache.CacheLoader.asyncReloading; import static com.google.common.collect.ImmutableMap.toImmutableMap; import static com.google.common.collect.Iterables.getOnlyElement; import static com.google.common.net.HttpHeaders.ACCEPT; @@ -81,13 +88,16 @@ import static io.airlift.json.JsonCodec.listJsonCodec; import static io.airlift.json.JsonCodec.mapJsonCodec; import static io.trino.collect.cache.SafeCaches.buildNonEvictableCache; +import static io.trino.plugin.pinot.PinotErrorCode.PINOT_AMBIGUOUS_TABLE_NAME; import static io.trino.plugin.pinot.PinotErrorCode.PINOT_EXCEPTION; import static io.trino.plugin.pinot.PinotErrorCode.PINOT_INVALID_CONFIGURATION; import static io.trino.plugin.pinot.PinotErrorCode.PINOT_UNABLE_TO_FIND_BROKER; +import static io.trino.plugin.pinot.PinotMetadata.SCHEMA_NAME; import static java.lang.String.format; import static java.util.Locale.ENGLISH; import static java.util.Objects.requireNonNull; import static java.util.function.UnaryOperator.identity; +import static java.util.stream.Collectors.joining; import static org.apache.pinot.spi.utils.builder.TableNameBuilder.extractRawTableName; public class PinotClient @@ -97,6 +107,7 @@ public class PinotClient private static final Pattern BROKER_PATTERN = Pattern.compile("Broker_(.*)_(\\d+)"); private static final String TIME_BOUNDARY_NOT_FOUND_ERROR_CODE = "404"; private static final JsonCodec>>> ROUTING_TABLE_CODEC = mapJsonCodec(String.class, mapJsonCodec(String.class, listJsonCodec(String.class))); + private static final Object ALL_TABLES_CACHE_KEY = new Object(); private static final JsonCodec QUERY_REQUEST_JSON_CODEC = jsonCodec(QueryRequest.class); private static final String GET_ALL_TABLES_API_TEMPLATE = "tables"; @@ -111,6 +122,7 @@ public class PinotClient private final PinotHostMapper pinotHostMapper; private final NonEvictableLoadingCache> brokersForTableCache; + private final NonEvictableLoadingCache> allTablesCache; private final JsonCodec tablesJsonCodec; private final JsonCodec brokersForTableJsonCodec; @@ -125,6 +137,7 @@ public PinotClient( PinotConfig config, PinotHostMapper pinotHostMapper, @ForPinot HttpClient httpClient, + @ForPinot ExecutorService executor, JsonCodec tablesJsonCodec, JsonCodec brokersForTableJsonCodec, JsonCodec timeBoundaryJsonCodec, @@ -150,6 +163,10 @@ public PinotClient( CacheBuilder.newBuilder() .expireAfterWrite(config.getMetadataCacheExpiry().roundTo(TimeUnit.MILLISECONDS), TimeUnit.MILLISECONDS), CacheLoader.from(this::getAllBrokersForTable)); + this.allTablesCache = buildNonEvictableCache( + CacheBuilder.newBuilder() + .refreshAfterWrite(config.getMetadataCacheExpiry().roundTo(TimeUnit.MILLISECONDS), TimeUnit.MILLISECONDS), + asyncReloading(CacheLoader.from(this::getAllTables), executor)); this.controllerAuthenticationProvider = controllerAuthenticationProvider; this.brokerAuthenticationProvider = brokerAuthenticationProvider; } @@ -239,9 +256,14 @@ public List getTables() } } - public List getAllTables() + protected Multimap getAllTables() { - return sendHttpGetToControllerJson(GET_ALL_TABLES_API_TEMPLATE, tablesJsonCodec).getTables(); + List allTables = sendHttpGetToControllerJson(GET_ALL_TABLES_API_TEMPLATE, tablesJsonCodec).getTables(); + ImmutableListMultimap.Builder builder = ImmutableListMultimap.builder(); + for (String table : allTables) { + builder.put(table.toLowerCase(ENGLISH), table); + } + return builder.build(); } public Schema getTableSchema(String table) @@ -250,6 +272,46 @@ public Schema getTableSchema(String table) return sendHttpGetToControllerJson(format(TABLE_SCHEMA_API_TEMPLATE, table), schemaJsonCodec); } + public List getPinotTableNames() + { + return ImmutableList.copyOf(getFromCache(allTablesCache, ALL_TABLES_CACHE_KEY).keySet()); + } + + public static V getFromCache(LoadingCache cache, K key) + { + V value = cache.getIfPresent(key); + if (value != null) { + return value; + } + try { + return cache.get(key); + } + catch (ExecutionException e) { + throw new PinotException(PinotErrorCode.PINOT_UNCLASSIFIED_ERROR, Optional.empty(), "Cannot fetch from cache " + key, e.getCause()); + } + } + + public String getPinotTableNameFromTrinoTableNameIfExists(String trinoTableName) + { + Collection candidates = getFromCache(allTablesCache, ALL_TABLES_CACHE_KEY).get(trinoTableName.toLowerCase(ENGLISH)); + if (candidates.isEmpty()) { + return null; + } + if (candidates.size() == 1) { + return getOnlyElement(candidates); + } + throw new PinotException(PINOT_AMBIGUOUS_TABLE_NAME, Optional.empty(), format("Ambiguous table names: %s", candidates.stream().collect(joining(", ")))); + } + + public String getPinotTableNameFromTrinoTableName(String trinoTableName) + { + String pinotTableName = getPinotTableNameFromTrinoTableNameIfExists(trinoTableName); + if (pinotTableName == null) { + throw new TableNotFoundException(new SchemaTableName(SCHEMA_NAME, trinoTableName)); + } + return pinotTableName; + } + public static class BrokersForTable { public static class InstancesInBroker diff --git a/plugin/trino-pinot/src/main/java/io/trino/plugin/pinot/query/DynamicTableBuilder.java b/plugin/trino-pinot/src/main/java/io/trino/plugin/pinot/query/DynamicTableBuilder.java index 85560439909a..4b71bd9938ff 100755 --- a/plugin/trino-pinot/src/main/java/io/trino/plugin/pinot/query/DynamicTableBuilder.java +++ b/plugin/trino-pinot/src/main/java/io/trino/plugin/pinot/query/DynamicTableBuilder.java @@ -86,10 +86,12 @@ public static DynamicTable buildFromPql(PinotMetadata pinotMetadata, SchemaTable BrokerRequest request = REQUEST_COMPILER.compileToBrokerRequest(query); PinotQuery pinotQuery = request.getPinotQuery(); QueryContext queryContext = BrokerRequestToQueryContextConverter.convert(request); - String pinotTableName = stripSuffix(request.getQuerySource().getTableName()); - Optional suffix = getSuffix(request.getQuerySource().getTableName()); + String tableName = request.getQuerySource().getTableName(); + String trinoTableName = stripSuffix(tableName).toLowerCase(ENGLISH); + String pinotTableName = pinotClient.getPinotTableNameFromTrinoTableName(trinoTableName); + Optional suffix = getSuffix(tableName); - Map columnHandles = pinotMetadata.getPinotColumnHandles(pinotTableName); + Map columnHandles = pinotMetadata.getPinotColumnHandles(trinoTableName); List orderBy = ImmutableList.of(); PinotTypeResolver pinotTypeResolver = new PinotTypeResolver(pinotClient, pinotTableName); List selectColumns = ImmutableList.of(); diff --git a/plugin/trino-pinot/src/test/java/io/trino/plugin/pinot/AbstractPinotIntegrationSmokeTest.java b/plugin/trino-pinot/src/test/java/io/trino/plugin/pinot/AbstractPinotIntegrationSmokeTest.java index 2845ca4180d6..03725cc7e3b5 100644 --- a/plugin/trino-pinot/src/test/java/io/trino/plugin/pinot/AbstractPinotIntegrationSmokeTest.java +++ b/plugin/trino-pinot/src/test/java/io/trino/plugin/pinot/AbstractPinotIntegrationSmokeTest.java @@ -17,6 +17,7 @@ import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableSet; import io.confluent.kafka.serializers.KafkaAvroSerializer; import io.trino.Session; import io.trino.plugin.pinot.client.PinotHostMapper; @@ -49,6 +50,7 @@ import java.util.Optional; import java.util.stream.Stream; +import static com.google.common.collect.ImmutableSet.toImmutableSet; import static com.google.common.collect.Iterables.getOnlyElement; import static com.google.inject.multibindings.OptionalBinder.newOptionalBinder; import static io.confluent.kafka.serializers.AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG; @@ -79,6 +81,9 @@ public abstract class AbstractPinotIntegrationSmokeTest private static final String MIXED_CASE_DISTINCT_TABLE = "mixed_case_distinct"; private static final String TOO_MANY_ROWS_TABLE = "too_many_rows"; private static final String TOO_MANY_BROKER_ROWS_TABLE = "too_many_broker_rows"; + private static final String MIXED_CASE_TABLE_NAME = "mixedCase"; + private static final String DUPLICATE_TABLE_LOWERCASE = "dup_table"; + private static final String DUPLICATE_TABLE_MIXED_CASE = "dup_Table"; private static final String JSON_TABLE = "my_table"; private static final String RESERVED_KEYWORD_TABLE = "reserved_keyword"; private static final String QUOTES_IN_COLUMN_NAME_TABLE = "quotes_in_column_name"; @@ -188,6 +193,10 @@ protected QueryRunner createQueryRunner() pinot.createSchema(getClass().getClassLoader().getResourceAsStream("mixed_case_distinct_schema.json"), MIXED_CASE_DISTINCT_TABLE); pinot.addRealTimeTable(getClass().getClassLoader().getResourceAsStream("mixed_case_distinct_realtimeSpec.json"), MIXED_CASE_DISTINCT_TABLE); + // Create mixed case table name, populated from the mixed case topic + pinot.createSchema(getClass().getClassLoader().getResourceAsStream("mixed_case_table_name_schema.json"), MIXED_CASE_TABLE_NAME); + pinot.addRealTimeTable(getClass().getClassLoader().getResourceAsStream("mixed_case_table_name_realtimeSpec.json"), MIXED_CASE_TABLE_NAME); + // Create and populate too many rows table and topic kafka.createTopic(TOO_MANY_ROWS_TABLE); Schema tooManyRowsAvroSchema = SchemaBuilder.record(TOO_MANY_ROWS_TABLE).fields() @@ -227,6 +236,15 @@ protected QueryRunner createQueryRunner() pinot.createSchema(getClass().getClassLoader().getResourceAsStream("too_many_broker_rows_schema.json"), TOO_MANY_BROKER_ROWS_TABLE); pinot.addRealTimeTable(getClass().getClassLoader().getResourceAsStream("too_many_broker_rows_realtimeSpec.json"), TOO_MANY_BROKER_ROWS_TABLE); + // Create the duplicate tables and topics + kafka.createTopic(DUPLICATE_TABLE_LOWERCASE); + pinot.createSchema(getClass().getClassLoader().getResourceAsStream("dup_table_lower_case_schema.json"), DUPLICATE_TABLE_LOWERCASE); + pinot.addRealTimeTable(getClass().getClassLoader().getResourceAsStream("dup_table_lower_case_realtimeSpec.json"), DUPLICATE_TABLE_LOWERCASE); + + kafka.createTopic(DUPLICATE_TABLE_MIXED_CASE); + pinot.createSchema(getClass().getClassLoader().getResourceAsStream("dup_table_mixed_case_schema.json"), DUPLICATE_TABLE_MIXED_CASE); + pinot.addRealTimeTable(getClass().getClassLoader().getResourceAsStream("dup_table_mixed_case_realtimeSpec.json"), DUPLICATE_TABLE_MIXED_CASE); + // Create and populate date time fields table and topic kafka.createTopic(DATE_TIME_FIELDS_TABLE); Schema dateTimeFieldsAvroSchema = SchemaBuilder.record(DATE_TIME_FIELDS_TABLE).fields() @@ -748,6 +766,67 @@ public void testNonLowerCaseColumnNames() .isFullyPushedDown(); } + @Test + public void testNonLowerTable() + { + long rowCount = (long) computeScalar("SELECT COUNT(*) FROM " + MIXED_CASE_TABLE_NAME); + List rows = new ArrayList<>(); + for (int i = 0; i < rowCount; i++) { + rows.add(format("('string_%s', '%s', '%s')", i, i, initialUpdatedAt.plusMillis(i * 1000).getEpochSecond())); + } + + String mixedCaseColumnNamesTableValues = rows.stream().collect(joining(",", "VALUES ", "")); + + // Test segment query all rows + assertQuery("SELECT stringcol, longcol, updatedatseconds" + + " FROM " + MIXED_CASE_TABLE_NAME, + mixedCaseColumnNamesTableValues); + + // Test broker query all rows + assertQuery("SELECT stringcol, longcol, updatedatseconds" + + " FROM \"SELECT updatedatseconds, longcol, stringcol FROM " + MIXED_CASE_TABLE_NAME + "\"", + mixedCaseColumnNamesTableValues); + + String singleRowValues = "VALUES (VARCHAR 'string_3', BIGINT '3', BIGINT '" + initialUpdatedAt.plusMillis(3 * 1000).getEpochSecond() + "')"; + + // Test segment query single row + assertThat(query("SELECT stringcol, longcol, updatedatseconds" + + " FROM " + MIXED_CASE_TABLE_NAME + + " WHERE longcol = 3")) + .matches(singleRowValues) + .isFullyPushedDown(); + + // Test broker query single row + assertThat(query("SELECT stringcol, longcol, updatedatseconds" + + " FROM \"SELECT updatedatseconds, longcol, stringcol FROM " + MIXED_CASE_TABLE_NAME + + "\" WHERE longcol = 3")) + .matches(singleRowValues) + .isFullyPushedDown(); + + // Test information schema + assertQuery( + "SELECT column_name FROM information_schema.columns WHERE table_schema = 'default' AND table_name = 'mixedcase'", + "VALUES 'stringcol', 'updatedatseconds', 'longcol'"); + assertQuery( + "SELECT column_name FROM information_schema.columns WHERE table_name = 'mixedcase'", + "VALUES 'stringcol', 'updatedatseconds', 'longcol'"); + assertEquals( + computeActual("SHOW COLUMNS FROM default.mixedcase").getMaterializedRows().stream() + .map(row -> row.getField(0)) + .collect(toImmutableSet()), + ImmutableSet.of("stringcol", "updatedatseconds", "longcol")); + } + + @Test + public void testAmbiguousTables() + { + assertQueryFails("SELECT * FROM " + DUPLICATE_TABLE_LOWERCASE, "Ambiguous table names: (" + DUPLICATE_TABLE_LOWERCASE + ", " + DUPLICATE_TABLE_MIXED_CASE + "|" + DUPLICATE_TABLE_MIXED_CASE + ", " + DUPLICATE_TABLE_LOWERCASE + ")"); + assertQueryFails("SELECT * FROM " + DUPLICATE_TABLE_MIXED_CASE, "Ambiguous table names: (" + DUPLICATE_TABLE_LOWERCASE + ", " + DUPLICATE_TABLE_MIXED_CASE + "|" + DUPLICATE_TABLE_MIXED_CASE + ", " + DUPLICATE_TABLE_LOWERCASE + ")"); + assertQueryFails("SELECT * FROM \"SELECT * FROM " + DUPLICATE_TABLE_LOWERCASE + "\"", "Ambiguous table names: (" + DUPLICATE_TABLE_LOWERCASE + ", " + DUPLICATE_TABLE_MIXED_CASE + "|" + DUPLICATE_TABLE_MIXED_CASE + ", " + DUPLICATE_TABLE_LOWERCASE + ")"); + assertQueryFails("SELECT * FROM \"SELECT * FROM " + DUPLICATE_TABLE_MIXED_CASE + "\"", "Ambiguous table names: (" + DUPLICATE_TABLE_LOWERCASE + ", " + DUPLICATE_TABLE_MIXED_CASE + "|" + DUPLICATE_TABLE_MIXED_CASE + ", " + DUPLICATE_TABLE_LOWERCASE + ")"); + assertQueryFails("SELECT * FROM information_schema.columns", "Ambiguous table names: (" + DUPLICATE_TABLE_LOWERCASE + ", " + DUPLICATE_TABLE_MIXED_CASE + "|" + DUPLICATE_TABLE_MIXED_CASE + ", " + DUPLICATE_TABLE_LOWERCASE + ")"); + } + @Test public void testReservedKeywordColumnNames() { diff --git a/plugin/trino-pinot/src/test/java/io/trino/plugin/pinot/MockPinotClient.java b/plugin/trino-pinot/src/test/java/io/trino/plugin/pinot/MockPinotClient.java index e7a9d11d27ea..67a6ba33c648 100755 --- a/plugin/trino-pinot/src/test/java/io/trino/plugin/pinot/MockPinotClient.java +++ b/plugin/trino-pinot/src/test/java/io/trino/plugin/pinot/MockPinotClient.java @@ -14,6 +14,7 @@ package io.trino.plugin.pinot; import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableListMultimap; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Multimap; import io.airlift.http.client.Request; @@ -26,15 +27,20 @@ import io.trino.plugin.pinot.client.PinotClient; import org.apache.pinot.spi.data.Schema; +import java.util.AbstractMap; import java.util.List; import java.util.Map; import java.util.Optional; +import static io.airlift.concurrent.Threads.threadsNamed; import static io.trino.plugin.pinot.MetadataUtil.BROKERS_FOR_TABLE_JSON_CODEC; import static io.trino.plugin.pinot.MetadataUtil.BROKER_RESPONSE_NATIVE_JSON_CODEC; import static io.trino.plugin.pinot.MetadataUtil.TABLES_JSON_CODEC; import static io.trino.plugin.pinot.MetadataUtil.TEST_TABLE; import static io.trino.plugin.pinot.MetadataUtil.TIME_BOUNDARY_JSON_CODEC; +import static java.util.Locale.ENGLISH; +import static java.util.concurrent.Executors.newCachedThreadPool; +import static java.util.stream.Collectors.toList; public class MockPinotClient extends PinotClient @@ -58,6 +64,7 @@ public MockPinotClient(PinotConfig pinotConfig, Map metadata, St pinotConfig, new IdentityPinotHostMapper(), new TestingHttpClient(request -> null), + newCachedThreadPool(threadsNamed("pinot-metadata-fetcher-testing")), TABLES_JSON_CODEC, BROKERS_FOR_TABLE_JSON_CODEC, TIME_BOUNDARY_JSON_CODEC, @@ -85,13 +92,15 @@ public T doHttpActionWithHeadersJson( } @Override - public List getAllTables() + public Multimap getAllTables() { - return ImmutableList.builder() - .add(TestPinotSplitManager.realtimeOnlyTable.getTableName()) - .add(TestPinotSplitManager.hybridTable.getTableName()) - .add(TEST_TABLE) - .addAll(metadata.keySet()) + return ImmutableListMultimap.builder() + .put(TestPinotSplitManager.realtimeOnlyTable.getTableName().toLowerCase(ENGLISH), TestPinotSplitManager.realtimeOnlyTable.getTableName()) + .put(TestPinotSplitManager.hybridTable.getTableName().toLowerCase(ENGLISH), TestPinotSplitManager.hybridTable.getTableName()) + .put(TEST_TABLE.toLowerCase(ENGLISH), TEST_TABLE) + .putAll(metadata.keySet().stream() + .map(key -> new AbstractMap.SimpleEntry<>(key.toLowerCase(ENGLISH), key)) + .collect(toList())) .build(); } diff --git a/plugin/trino-pinot/src/test/java/io/trino/plugin/pinot/TestDynamicTable.java b/plugin/trino-pinot/src/test/java/io/trino/plugin/pinot/TestDynamicTable.java index f1b0e1cc5435..f1a359122a4b 100755 --- a/plugin/trino-pinot/src/test/java/io/trino/plugin/pinot/TestDynamicTable.java +++ b/plugin/trino-pinot/src/test/java/io/trino/plugin/pinot/TestDynamicTable.java @@ -96,12 +96,12 @@ public void testFilter() "OR ((OriginCityName != 'catfish paradise') AND (OriginState != 'az') AND (AirTime between 1 and 5)) " + "AND AirTime NOT IN (7,8,9) " + "OR ((DepDelayMinutes < 10) AND (Distance >= 3) AND (ArrDelay > 4) AND (SecurityDelay < 5) AND (LateAircraftDelay <= 7)) limit 60", - tableName.toLowerCase(ENGLISH)); + tableName); String expected = format("select \"FlightNum\", \"AirlineID\" from %s where OR(AND(\"CancellationCode\" IN ('strike', 'weather', 'pilot_bac'), (\"Origin\") = 'jfk'), " + "AND((\"OriginCityName\") != 'catfish paradise', (\"OriginState\") != 'az', (\"AirTime\") BETWEEN '1' AND '5', \"AirTime\" NOT IN ('7', '8', '9')), " + "AND((\"DepDelayMinutes\") < '10', (\"Distance\") >= '3', (\"ArrDelay\") > '4', (\"SecurityDelay\") < '5', (\"LateAircraftDelay\") <= '7')) limit 60", - tableName.toLowerCase(ENGLISH)); + tableName); DynamicTable dynamicTable = buildFromPql(pinotMetadata, new SchemaTableName("default", query), mockClusterInfoFetcher); assertEquals(extractPql(dynamicTable, TupleDomain.all(), ImmutableList.of()), expected); } @@ -155,12 +155,12 @@ public void testFilterWithCaseStatements() String query = format("select FlightNum, AirlineID from %s " + "where case when cancellationcode = 'strike' then 3 else 4 end != 5 " + "AND case origincityname when 'nyc' then 'pizza' when 'la' then 'burrito' when 'boston' then 'clam chowder' " + - "else 'burger' end != 'salad'", tableName.toLowerCase(ENGLISH)); + "else 'burger' end != 'salad'", tableName); String expected = format("select \"FlightNum\", \"AirlineID\" from %s where AND((CASE WHEN equals(\"CancellationCode\", 'strike') " + "THEN '3' ELSE '4' END) != '5', (CASE WHEN equals(\"OriginCityName\", 'nyc') " + "THEN 'pizza' WHEN equals(\"OriginCityName\", 'la') THEN 'burrito' WHEN equals(\"OriginCityName\", 'boston') " + "THEN 'clam chowder' ELSE 'burger' END) != 'salad') limit 10", - tableName.toLowerCase(ENGLISH)); + tableName); DynamicTable dynamicTable = buildFromPql(pinotMetadata, new SchemaTableName("default", query), mockClusterInfoFetcher); assertEquals(extractPql(dynamicTable, TupleDomain.all(), ImmutableList.of()), expected); } @@ -176,7 +176,7 @@ public void testFilterWithPushdownConstraint() .put(columnHandle, Domain.create(ValueSet.ofRanges(Range.equal(VARCHAR, Slices.utf8Slice("Catfish Paradise"))), false)) .buildOrThrow()); - String expectedPql = "select \"FlightNum\" from realtimeonly where (\"OriginCityName\" = 'Catfish Paradise') limit 60"; + String expectedPql = "select \"FlightNum\" from realtimeOnly where (\"OriginCityName\" = 'Catfish Paradise') limit 60"; assertEquals(extractPql(dynamicTable, tupleDomain, ImmutableList.builder() .add(columnHandle) .build()), expectedPql); @@ -188,7 +188,7 @@ public void testFilterWithUdf() String tableName = realtimeOnlyTable.getTableName(); String query = format("select FlightNum from %s where DivLongestGTimes = FLOOR(EXP(2 * LN(3))) AND 5 < EXP(CarrierDelay) limit 60", tableName.toLowerCase(ENGLISH)); DynamicTable dynamicTable = buildFromPql(pinotMetadata, new SchemaTableName("default", query), mockClusterInfoFetcher); - String expectedPql = "select \"FlightNum\" from realtimeonly where AND((\"DivLongestGTimes\") = '9.0', (exp(\"CarrierDelay\")) > '5') limit 60"; + String expectedPql = "select \"FlightNum\" from realtimeOnly where AND((\"DivLongestGTimes\") = '9.0', (exp(\"CarrierDelay\")) > '5') limit 60"; assertEquals(extractPql(dynamicTable, TupleDomain.all(), ImmutableList.of()), expectedPql); } @@ -198,7 +198,7 @@ public void testSelectStarDynamicTable() String tableName = realtimeOnlyTable.getTableName(); String query = format("select * from %s limit 70", tableName.toLowerCase(ENGLISH)); DynamicTable dynamicTable = buildFromPql(pinotMetadata, new SchemaTableName("default", query), mockClusterInfoFetcher); - String expectedPql = format("select %s from %s limit 70", getColumnNames(tableName).stream().map(TestDynamicTable::quoteIdentifier).collect(joining(", ")), tableName.toLowerCase(ENGLISH)); + String expectedPql = format("select %s from %s limit 70", getColumnNames(tableName).stream().map(TestDynamicTable::quoteIdentifier).collect(joining(", ")), tableName); assertEquals(extractPql(dynamicTable, TupleDomain.all(), ImmutableList.of()), expectedPql); } diff --git a/plugin/trino-pinot/src/test/java/io/trino/plugin/pinot/TestPinotClient.java b/plugin/trino-pinot/src/test/java/io/trino/plugin/pinot/TestPinotClient.java index 90a3186aaa2d..43d79534f509 100755 --- a/plugin/trino-pinot/src/test/java/io/trino/plugin/pinot/TestPinotClient.java +++ b/plugin/trino-pinot/src/test/java/io/trino/plugin/pinot/TestPinotClient.java @@ -30,6 +30,9 @@ import java.util.concurrent.TimeUnit; +import static io.airlift.concurrent.Threads.threadsNamed; +import static java.util.concurrent.Executors.newCachedThreadPool; + public class TestPinotClient { @Test @@ -75,12 +78,13 @@ public void testBrokersParsed() " ]\n" + "}")); PinotConfig pinotConfig = new PinotConfig() - .setMetadataCacheExpiry(new Duration(0, TimeUnit.MILLISECONDS)) + .setMetadataCacheExpiry(new Duration(1, TimeUnit.MILLISECONDS)) .setControllerUrls("localhost:7900"); PinotClient pinotClient = new PinotClient( pinotConfig, new IdentityPinotHostMapper(), httpClient, + newCachedThreadPool(threadsNamed("pinot-metadata-fetcher-testing")), MetadataUtil.TABLES_JSON_CODEC, MetadataUtil.BROKERS_FOR_TABLE_JSON_CODEC, MetadataUtil.TIME_BOUNDARY_JSON_CODEC, diff --git a/plugin/trino-pinot/src/test/java/io/trino/plugin/pinot/TestPinotMetadata.java b/plugin/trino-pinot/src/test/java/io/trino/plugin/pinot/TestPinotMetadata.java index b28049336674..96a48b486e79 100755 --- a/plugin/trino-pinot/src/test/java/io/trino/plugin/pinot/TestPinotMetadata.java +++ b/plugin/trino-pinot/src/test/java/io/trino/plugin/pinot/TestPinotMetadata.java @@ -49,6 +49,6 @@ public void testTables() PinotTableHandle withAnotherSchema = metadata.getTableHandle(session, new SchemaTableName(TestPinotSplitManager.realtimeOnlyTable.getTableName(), TestPinotSplitManager.realtimeOnlyTable.getTableName())); assertEquals(withAnotherSchema.getTableName(), TestPinotSplitManager.realtimeOnlyTable.getTableName()); PinotTableHandle withUppercaseTable = metadata.getTableHandle(session, new SchemaTableName("default", TEST_TABLE)); - assertEquals(withUppercaseTable.getTableName(), "airlinestats"); + assertEquals(withUppercaseTable.getTableName(), "airlineStats"); } } diff --git a/plugin/trino-pinot/src/test/resources/dup_table_lower_case_realtimeSpec.json b/plugin/trino-pinot/src/test/resources/dup_table_lower_case_realtimeSpec.json new file mode 100644 index 000000000000..6418ee884c72 --- /dev/null +++ b/plugin/trino-pinot/src/test/resources/dup_table_lower_case_realtimeSpec.json @@ -0,0 +1,43 @@ +{ + "tableName": "dup_table", + "tableType": "REALTIME", + "segmentsConfig": { + "timeColumnName": "updated_at_seconds", + "timeType": "SECONDS", + "retentionTimeUnit": "DAYS", + "retentionTimeValue": "365", + "segmentPushType": "APPEND", + "segmentPushFrequency": "daily", + "segmentAssignmentStrategy": "BalanceNumSegmentAssignmentStrategy", + "schemaName": "dup_table", + "replicasPerPartition": "1" + }, + "tenants": { + "broker": "DefaultTenant", + "server": "DefaultTenant" + }, + "tableIndexConfig": { + "loadMode": "MMAP", + "invertedIndexColumns": ["string_col"], + "sortedColumn": ["updated_at_seconds"], + "streamConfigs": { + "streamType": "kafka", + "stream.kafka.consumer.type": "LowLevel", + "stream.kafka.topic.name": "too_many_rows", + "stream.kafka.decoder.class.name": "org.apache.pinot.plugin.inputformat.avro.confluent.KafkaConfluentSchemaRegistryAvroMessageDecoder", + "stream.kafka.consumer.factory.class.name": "org.apache.pinot.plugin.stream.kafka20.KafkaConsumerFactory", + "stream.kafka.decoder.prop.schema.registry.rest.url": "http://schema-registry:8081", + "stream.kafka.zk.broker.url": "zookeeper:2181/", + "stream.kafka.broker.list": "kafka:9092", + "realtime.segment.flush.threshold.time": "1m", + "realtime.segment.flush.threshold.size": "0", + "realtime.segment.flush.desired.size": "1M", + "isolation.level": "read_committed", + "stream.kafka.consumer.prop.auto.offset.reset": "smallest", + "stream.kafka.consumer.prop.group.id": "pinot_dup_table" + } + }, + "metadata": { + "customConfigs": {} + } +} diff --git a/plugin/trino-pinot/src/test/resources/dup_table_lower_case_schema.json b/plugin/trino-pinot/src/test/resources/dup_table_lower_case_schema.json new file mode 100644 index 000000000000..4d5b6596f9b2 --- /dev/null +++ b/plugin/trino-pinot/src/test/resources/dup_table_lower_case_schema.json @@ -0,0 +1,19 @@ +{ + "schemaName": "dup_table", + "dimensionFieldSpecs": [ + { + "name": "string_col", + "dataType": "STRING" + } + ], + "dateTimeFieldSpecs": [ + { + "name": "updated_at_seconds", + "dataType": "LONG", + "defaultNullValue" : 0, + "format": "1:SECONDS:EPOCH", + "transformFunction": "toEpochSeconds(updatedAt)", + "granularity" : "1:SECONDS" + } + ] +} diff --git a/plugin/trino-pinot/src/test/resources/dup_table_mixed_case_realtimeSpec.json b/plugin/trino-pinot/src/test/resources/dup_table_mixed_case_realtimeSpec.json new file mode 100644 index 000000000000..e9c5bf5f1a2b --- /dev/null +++ b/plugin/trino-pinot/src/test/resources/dup_table_mixed_case_realtimeSpec.json @@ -0,0 +1,43 @@ +{ + "tableName": "dup_Table", + "tableType": "REALTIME", + "segmentsConfig": { + "timeColumnName": "updated_at_seconds", + "timeType": "SECONDS", + "retentionTimeUnit": "DAYS", + "retentionTimeValue": "365", + "segmentPushType": "APPEND", + "segmentPushFrequency": "daily", + "segmentAssignmentStrategy": "BalanceNumSegmentAssignmentStrategy", + "schemaName": "dup_Table", + "replicasPerPartition": "1" + }, + "tenants": { + "broker": "DefaultTenant", + "server": "DefaultTenant" + }, + "tableIndexConfig": { + "loadMode": "MMAP", + "invertedIndexColumns": ["string_col"], + "sortedColumn": ["updated_at_seconds"], + "streamConfigs": { + "streamType": "kafka", + "stream.kafka.consumer.type": "LowLevel", + "stream.kafka.topic.name": "too_many_rows", + "stream.kafka.decoder.class.name": "org.apache.pinot.plugin.inputformat.avro.confluent.KafkaConfluentSchemaRegistryAvroMessageDecoder", + "stream.kafka.consumer.factory.class.name": "org.apache.pinot.plugin.stream.kafka20.KafkaConsumerFactory", + "stream.kafka.decoder.prop.schema.registry.rest.url": "http://schema-registry:8081", + "stream.kafka.zk.broker.url": "zookeeper:2181/", + "stream.kafka.broker.list": "kafka:9092", + "realtime.segment.flush.threshold.time": "1m", + "realtime.segment.flush.threshold.size": "0", + "realtime.segment.flush.desired.size": "1M", + "isolation.level": "read_committed", + "stream.kafka.consumer.prop.auto.offset.reset": "smallest", + "stream.kafka.consumer.prop.group.id": "pinot_dup_table2" + } + }, + "metadata": { + "customConfigs": {} + } +} diff --git a/plugin/trino-pinot/src/test/resources/dup_table_mixed_case_schema.json b/plugin/trino-pinot/src/test/resources/dup_table_mixed_case_schema.json new file mode 100644 index 000000000000..8abbafbe5761 --- /dev/null +++ b/plugin/trino-pinot/src/test/resources/dup_table_mixed_case_schema.json @@ -0,0 +1,19 @@ +{ + "schemaName": "dup_Table", + "dimensionFieldSpecs": [ + { + "name": "string_col", + "dataType": "STRING" + } + ], + "dateTimeFieldSpecs": [ + { + "name": "updated_at_seconds", + "dataType": "LONG", + "defaultNullValue" : 0, + "format": "1:SECONDS:EPOCH", + "transformFunction": "toEpochSeconds(updatedAt)", + "granularity" : "1:SECONDS" + } + ] +} diff --git a/plugin/trino-pinot/src/test/resources/mixed_case_table_name_realtimeSpec.json b/plugin/trino-pinot/src/test/resources/mixed_case_table_name_realtimeSpec.json new file mode 100644 index 000000000000..334070792b84 --- /dev/null +++ b/plugin/trino-pinot/src/test/resources/mixed_case_table_name_realtimeSpec.json @@ -0,0 +1,58 @@ +{ + "tableName": "mixedCase", + "tableType": "REALTIME", + "segmentsConfig": { + "timeColumnName": "updatedAtSeconds", + "timeType": "SECONDS", + "retentionTimeUnit": "DAYS", + "retentionTimeValue": "365", + "segmentPushType": "APPEND", + "segmentPushFrequency": "daily", + "segmentAssignmentStrategy": "BalanceNumSegmentAssignmentStrategy", + "schemaName": "mixedCase", + "replicasPerPartition": "1" + }, + "tenants": { + "broker": "DefaultTenant", + "server": "DefaultTenant" + }, + "tableIndexConfig": { + "loadMode": "MMAP", + "invertedIndexColumns": ["stringCol"], + "noDictionaryColumns": ["longCol"], + "sortedColumn": ["updatedAtSeconds"], + "starTreeIndexConfigs": [ + { + "dimensionsSplitOrder": ["stringCol"], + "functionColumnPairs": [ + "COUNT__longCol", + "MIN__longCol", + "MAX__longCol", + "AVG__longCol", + "SUM__longCol" + ] + } + ], + "aggregateMetrics": "true", + "nullHandlingEnabled": "true", + "streamConfigs": { + "streamType": "kafka", + "stream.kafka.consumer.type": "LowLevel", + "stream.kafka.topic.name": "mixed_case", + "stream.kafka.decoder.class.name": "org.apache.pinot.plugin.inputformat.avro.confluent.KafkaConfluentSchemaRegistryAvroMessageDecoder", + "stream.kafka.consumer.factory.class.name": "org.apache.pinot.plugin.stream.kafka20.KafkaConsumerFactory", + "stream.kafka.decoder.prop.schema.registry.rest.url": "http://schema-registry:8081", + "stream.kafka.zk.broker.url": "zookeeper:2181/", + "stream.kafka.broker.list": "kafka:9092", + "realtime.segment.flush.threshold.time": "1m", + "realtime.segment.flush.threshold.size": "0", + "realtime.segment.flush.desired.size": "1M", + "isolation.level": "read_committed", + "stream.kafka.consumer.prop.auto.offset.reset": "smallest", + "stream.kafka.consumer.prop.group.id": "pinot_mixedCase" + } + }, + "metadata": { + "customConfigs": {} + } +} diff --git a/plugin/trino-pinot/src/test/resources/mixed_case_table_name_schema.json b/plugin/trino-pinot/src/test/resources/mixed_case_table_name_schema.json new file mode 100644 index 000000000000..fa19a60ee681 --- /dev/null +++ b/plugin/trino-pinot/src/test/resources/mixed_case_table_name_schema.json @@ -0,0 +1,25 @@ +{ + "schemaName": "mixedCase", + "dimensionFieldSpecs": [ + { + "name": "stringCol", + "dataType": "STRING" + } + ], + "metricFieldSpecs": [ + { + "name": "longCol", + "dataType": "LONG" + } + ], + "dateTimeFieldSpecs": [ + { + "name": "updatedAtSeconds", + "dataType": "LONG", + "defaultNullValue" : 0, + "format": "1:SECONDS:EPOCH", + "transformFunction": "toEpochSeconds(updatedAt)", + "granularity" : "1:SECONDS" + } + ] +}