diff --git a/core/trino-main/src/test/java/io/trino/operator/BenchmarkWindowOperator.java b/core/trino-main/src/test/java/io/trino/operator/BenchmarkWindowOperator.java index 2070b8aa2c79..ee08482f6be7 100644 --- a/core/trino-main/src/test/java/io/trino/operator/BenchmarkWindowOperator.java +++ b/core/trino-main/src/test/java/io/trino/operator/BenchmarkWindowOperator.java @@ -79,7 +79,7 @@ public static class Context public int rowsPerPartition; @Param({"0", "1", "2", "3"}) - public int numberOfPregroupedColumns; + public int numberOfPreGroupedColumns; @Param({"10", "50", "100"}) public int partitionsPerGroup; @@ -96,7 +96,7 @@ public void setup() executor = newCachedThreadPool(daemonThreadsNamed(getClass().getSimpleName() + "-%s")); scheduledExecutor = newScheduledThreadPool(2, daemonThreadsNamed(getClass().getSimpleName() + "-scheduledExecutor-%s")); - createOperatorFactoryAndGenerateTestData(numberOfPregroupedColumns); + createOperatorFactoryAndGenerateTestData(numberOfPreGroupedColumns); } @TearDown @@ -311,7 +311,7 @@ private void verify( Context context = new Context(); context.rowsPerPartition = numberOfRowsPerPartition; - context.numberOfPregroupedColumns = numberOfPreGroupedColumns; + context.numberOfPreGroupedColumns = numberOfPreGroupedColumns; if (useSinglePartition) { context.partitionsPerGroup = 1; diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HiveMetadata.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HiveMetadata.java index 22088226995c..cc747f3325d9 100644 --- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HiveMetadata.java +++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HiveMetadata.java @@ -1588,15 +1588,15 @@ private void createEmptyFiles(ConnectorSession session, Path path, Table table, } hdfsEnvironment.doAs(session.getIdentity(), () -> { for (String fileName : fileNames) { - writeEmptyFile(session, new Path(path, fileName), conf, schema, format.getSerDe(), format.getOutputFormat()); + writeEmptyFile(session, new Path(path, fileName), conf, schema, format.getSerde(), format.getOutputFormat()); } }); } - private static void writeEmptyFile(ConnectorSession session, Path target, JobConf conf, Properties properties, String serDe, String outputFormatName) + private static void writeEmptyFile(ConnectorSession session, Path target, JobConf conf, Properties properties, String serde, String outputFormatName) { // Some serializers such as Avro set a property in the schema. - initializeSerializer(conf, properties, serDe); + initializeSerializer(conf, properties, serde); // The code below is not a try with resources because RecordWriter is not Closeable. FileSinkOperator.RecordWriter recordWriter = HiveWriteUtils.createRecordWriter(target, conf, properties, outputFormatName, session); @@ -3135,10 +3135,10 @@ private static HiveStorageFormat extractHiveStorageFormat(Table table) { StorageFormat storageFormat = table.getStorage().getStorageFormat(); String outputFormat = storageFormat.getOutputFormat(); - String serde = storageFormat.getSerDe(); + String serde = storageFormat.getSerde(); for (HiveStorageFormat format : HiveStorageFormat.values()) { - if (format.getOutputFormat().equals(outputFormat) && format.getSerDe().equals(serde)) { + if (format.getOutputFormat().equals(outputFormat) && format.getSerde().equals(serde)) { return format; } } diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HiveStorageFormat.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HiveStorageFormat.java index b9fe578ab838..84ed64130fbf 100644 --- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HiveStorageFormat.java +++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HiveStorageFormat.java @@ -116,7 +116,7 @@ public enum HiveStorageFormat this.estimatedWriterSystemMemoryUsage = requireNonNull(estimatedWriterSystemMemoryUsage, "estimatedWriterSystemMemoryUsage is null"); } - public String getSerDe() + public String getSerde() { return serde; } @@ -168,16 +168,16 @@ else if (type.getCategory() == Category.PRIMITIVE) { } private static final Map HIVE_STORAGE_FORMAT_FROM_STORAGE_FORMAT = Arrays.stream(HiveStorageFormat.values()) - .collect(toImmutableMap(format -> new SerdeAndInputFormat(format.getSerDe(), format.getInputFormat()), identity())); + .collect(toImmutableMap(format -> new SerdeAndInputFormat(format.getSerde(), format.getInputFormat()), identity())); private static final class SerdeAndInputFormat { - private final String serDe; + private final String serde; private final String inputFormat; - public SerdeAndInputFormat(String serDe, String inputFormat) + public SerdeAndInputFormat(String serde, String inputFormat) { - this.serDe = serDe; + this.serde = serde; this.inputFormat = inputFormat; } @@ -191,19 +191,19 @@ public boolean equals(Object o) return false; } SerdeAndInputFormat that = (SerdeAndInputFormat) o; - return serDe.equals(that.serDe) && inputFormat.equals(that.inputFormat); + return serde.equals(that.serde) && inputFormat.equals(that.inputFormat); } @Override public int hashCode() { - return Objects.hash(serDe, inputFormat); + return Objects.hash(serde, inputFormat); } } public static Optional getHiveStorageFormat(StorageFormat storageFormat) { - return Optional.ofNullable(HIVE_STORAGE_FORMAT_FROM_STORAGE_FORMAT.get(new SerdeAndInputFormat(storageFormat.getSerDe(), storageFormat.getInputFormat()))); + return Optional.ofNullable(HIVE_STORAGE_FORMAT_FROM_STORAGE_FORMAT.get(new SerdeAndInputFormat(storageFormat.getSerde(), storageFormat.getInputFormat()))); } private static PrimitiveTypeInfo primitiveTypeInfo(TypeInfo typeInfo) diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HiveType.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HiveType.java index 6b9ef6755439..a5c49f7e18cf 100644 --- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HiveType.java +++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HiveType.java @@ -179,7 +179,7 @@ public static boolean isSupportedType(TypeInfo typeInfo, StorageFormat storageFo // 3. The Parquet format doesn't support uniontypes itself so there's no need to add support for it in Trino. // 4. TODO: RCFile tables are not supported yet. // 5. TODO: The support for Avro is done in SerDeUtils so it's possible that formats other than Avro are also supported. But verification is needed. - if (storageFormat.getSerDe().equalsIgnoreCase(AVRO.getSerDe()) || storageFormat.getSerDe().equalsIgnoreCase(ORC.getSerDe())) { + if (storageFormat.getSerde().equalsIgnoreCase(AVRO.getSerde()) || storageFormat.getSerde().equalsIgnoreCase(ORC.getSerde())) { UnionTypeInfo unionTypeInfo = (UnionTypeInfo) typeInfo; return unionTypeInfo.getAllUnionObjectTypeInfos().stream() .allMatch(fieldTypeInfo -> isSupportedType(fieldTypeInfo, storageFormat)); diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/RcFileFileWriterFactory.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/RcFileFileWriterFactory.java index 3886324db6fa..8b42e1e5e79c 100644 --- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/RcFileFileWriterFactory.java +++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/RcFileFileWriterFactory.java @@ -108,10 +108,10 @@ public Optional createFileWriter( } RcFileEncoding rcFileEncoding; - if (LazyBinaryColumnarSerDe.class.getName().equals(storageFormat.getSerDe())) { + if (LazyBinaryColumnarSerDe.class.getName().equals(storageFormat.getSerde())) { rcFileEncoding = new BinaryRcFileEncoding(timeZone); } - else if (ColumnarSerDe.class.getName().equals(storageFormat.getSerDe())) { + else if (ColumnarSerDe.class.getName().equals(storageFormat.getSerde())) { rcFileEncoding = createTextVectorEncoding(schema); } else { diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/RecordFileWriter.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/RecordFileWriter.java index 3b4453af21c2..07c1bbd22d34 100644 --- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/RecordFileWriter.java +++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/RecordFileWriter.java @@ -99,8 +99,8 @@ public RecordFileWriter( fieldCount = fileColumnNames.size(); - String serDe = storageFormat.getSerDe(); - serializer = initializeSerializer(conf, schema, serDe); + String serde = storageFormat.getSerde(); + serializer = initializeSerializer(conf, schema, serde); List objectInspectors = getRowColumnInspectors(fileColumnTypes); tableInspector = getStandardStructObjectInspector(fileColumnNames, objectInspectors); diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/ViewReaderUtil.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/ViewReaderUtil.java index 7d3a83518dcd..1f63284d6317 100644 --- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/ViewReaderUtil.java +++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/ViewReaderUtil.java @@ -175,10 +175,10 @@ public static class HiveViewReader private final HiveMetastoreClient metastoreClient; private final TypeManager typeManager; - public HiveViewReader(HiveMetastoreClient hiveMetastoreClient, TypeManager typemanager) + public HiveViewReader(HiveMetastoreClient hiveMetastoreClient, TypeManager typeManager) { this.metastoreClient = requireNonNull(hiveMetastoreClient, "hiveMetastoreClient is null"); - this.typeManager = requireNonNull(typemanager, "typeManager is null"); + this.typeManager = requireNonNull(typeManager, "typeManager is null"); } @Override diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/MetastoreUtil.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/MetastoreUtil.java index 00ec193ac3d0..ccf62e70375d 100644 --- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/MetastoreUtil.java +++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/MetastoreUtil.java @@ -149,7 +149,7 @@ private static Properties getHiveSchema( for (Map.Entry param : sd.getSerdeParameters().entrySet()) { schema.setProperty(param.getKey(), (param.getValue() != null) ? param.getValue() : ""); } - schema.setProperty(SERIALIZATION_LIB, sd.getStorageFormat().getSerDe()); + schema.setProperty(SERIALIZATION_LIB, sd.getStorageFormat().getSerde()); StringBuilder columnNameBuilder = new StringBuilder(); StringBuilder columnTypeBuilder = new StringBuilder(); @@ -217,7 +217,7 @@ public static ProtectMode getProtectMode(Table table) public static boolean isAvroTableWithSchemaSet(Table table) { - return AVRO.getSerDe().equals(table.getStorage().getStorageFormat().getSerDeNullable()) && + return AVRO.getSerde().equals(table.getStorage().getStorageFormat().getSerDeNullable()) && (table.getParameters().get(AVRO_SCHEMA_URL_KEY) != null || (table.getStorage().getSerdeParameters().get(AVRO_SCHEMA_URL_KEY) != null)); } diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/StorageFormat.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/StorageFormat.java index 105f00717421..d87f0ddf9d0d 100644 --- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/StorageFormat.java +++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/StorageFormat.java @@ -32,23 +32,23 @@ public class StorageFormat { public static final StorageFormat VIEW_STORAGE_FORMAT = createNullable(null, null, null); - private final String serDe; + private final String serde; private final String inputFormat; private final String outputFormat; - private StorageFormat(String serDe, String inputFormat, String outputFormat) + private StorageFormat(String serde, String inputFormat, String outputFormat) { - this.serDe = serDe; + this.serde = serde; this.inputFormat = inputFormat; this.outputFormat = outputFormat; } - public String getSerDe() + public String getSerde() { - if (serDe == null) { + if (serde == null) { throw new TrinoException(HIVE_INVALID_METADATA, "SerDe is not present in StorageFormat"); } - return serDe; + return serde; } public String getInputFormat() @@ -67,10 +67,10 @@ public String getOutputFormat() return outputFormat; } - @JsonProperty("serDe") + @JsonProperty("serde") public String getSerDeNullable() { - return serDe; + return serde; } @JsonProperty("inputFormat") @@ -87,24 +87,24 @@ public String getOutputFormatNullable() public static StorageFormat fromHiveStorageFormat(HiveStorageFormat hiveStorageFormat) { - return new StorageFormat(hiveStorageFormat.getSerDe(), hiveStorageFormat.getInputFormat(), hiveStorageFormat.getOutputFormat()); + return new StorageFormat(hiveStorageFormat.getSerde(), hiveStorageFormat.getInputFormat(), hiveStorageFormat.getOutputFormat()); } public static StorageFormat create(String serde, String inputFormat, String outputFormat) { return new StorageFormat( - requireNonNull(serde, "serDe is null"), + requireNonNull(serde, "serde is null"), requireNonNull(inputFormat, "inputFormat is null"), requireNonNull(outputFormat, "outputFormat is null")); } @JsonCreator public static StorageFormat createNullable( - @JsonProperty("serDe") String serDe, + @JsonProperty("serde") String serde, @JsonProperty("inputFormat") String inputFormat, @JsonProperty("outputFormat") String outputFormat) { - return new StorageFormat(serDe, inputFormat, outputFormat); + return new StorageFormat(serde, inputFormat, outputFormat); } @Override @@ -117,7 +117,7 @@ public boolean equals(Object o) return false; } StorageFormat that = (StorageFormat) o; - return Objects.equals(serDe, that.serDe) && + return Objects.equals(serde, that.serde) && Objects.equals(inputFormat, that.inputFormat) && Objects.equals(outputFormat, that.outputFormat); } @@ -125,14 +125,14 @@ public boolean equals(Object o) @Override public int hashCode() { - return Objects.hash(serDe, inputFormat, outputFormat); + return Objects.hash(serde, inputFormat, outputFormat); } @Override public String toString() { return toStringHelper(this) - .add("serDe", serDe) + .add("serde", serde) .add("inputFormat", inputFormat) .add("outputFormat", outputFormat) .toString(); diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/glue/converter/GlueToTrinoConverter.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/glue/converter/GlueToTrinoConverter.java index c80579a23af3..47ee81b7aa26 100644 --- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/glue/converter/GlueToTrinoConverter.java +++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/glue/converter/GlueToTrinoConverter.java @@ -94,11 +94,11 @@ public static Table convertTable(com.amazonaws.services.glue.model.Table glueTab return tableBuilder.build(); } - private static Column convertColumn(com.amazonaws.services.glue.model.Column glueColumn, String serDe) + private static Column convertColumn(com.amazonaws.services.glue.model.Column glueColumn, String serde) { // OpenCSVSerde deserializes columns from csv file into strings, so we set the column type from the metastore // to string to avoid cast exceptions. - if (HiveStorageFormat.CSV.getSerDe().equals(serDe)) { + if (HiveStorageFormat.CSV.getSerde().equals(serde)) { //TODO(https://github.com/trinodb/trino/issues/7240) Add tests return new Column(glueColumn.getName(), HiveType.HIVE_STRING, Optional.ofNullable(glueColumn.getComment())); } @@ -107,9 +107,9 @@ private static Column convertColumn(com.amazonaws.services.glue.model.Column glu } } - private static List convertColumns(List glueColumns, String serDe) + private static List convertColumns(List glueColumns, String serde) { - return mappedCopy(glueColumns, glueColumn -> convertColumn(glueColumn, serDe)); + return mappedCopy(glueColumns, glueColumn -> convertColumn(glueColumn, serde)); } private static Map convertParameters(Map parameters) @@ -147,7 +147,7 @@ public GluePartitionConverter(Table table) this.tableName = requireNonNull(table.getTableName(), "tableName is null"); this.tableParameters = convertParameters(table.getParameters()); this.columnsConverter = memoizeLast(glueColumns -> convertColumns(glueColumns, - table.getStorage().getStorageFormat().getSerDe())); + table.getStorage().getStorageFormat().getSerde())); } @Override diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/thrift/ThriftMetastoreUtil.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/thrift/ThriftMetastoreUtil.java index 26a67259afe7..16153cdd329b 100644 --- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/thrift/ThriftMetastoreUtil.java +++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/thrift/ThriftMetastoreUtil.java @@ -417,12 +417,12 @@ public static boolean isAvroTableWithSchemaSet(org.apache.hadoop.hive.metastore. return serdeInfo.getSerializationLib() != null && (table.getParameters().get(AVRO_SCHEMA_URL_KEY) != null || (serdeInfo.getParameters() != null && serdeInfo.getParameters().get(AVRO_SCHEMA_URL_KEY) != null)) && - serdeInfo.getSerializationLib().equals(AVRO.getSerDe()); + serdeInfo.getSerializationLib().equals(AVRO.getSerde()); } public static boolean isCsvTable(org.apache.hadoop.hive.metastore.api.Table table) { - return CSV.getSerDe().equals(getSerdeInfo(table).getSerializationLib()); + return CSV.getSerde().equals(getSerdeInfo(table).getSerializationLib()); } public static List csvSchemaFields(List schemas) diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/util/HiveUtil.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/util/HiveUtil.java index 948481f8c533..02e4a8910433 100644 --- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/util/HiveUtil.java +++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/util/HiveUtil.java @@ -326,14 +326,14 @@ public static Optional getCompressionCodec(TextInputFormat inp Class> inputFormatClass = getInputFormatClass(jobConf, inputFormatName); if (symlinkTarget && inputFormatClass == SymlinkTextInputFormat.class) { - String serDe = getDeserializerClassName(schema); + String serde = getDeserializerClassName(schema); for (HiveStorageFormat format : HiveStorageFormat.values()) { - if (serDe.equals(format.getSerDe())) { + if (serde.equals(format.getSerde())) { inputFormatClass = getInputFormatClass(jobConf, format.getInputFormat()); return ReflectionUtils.newInstance(inputFormatClass, jobConf); } } - throw new TrinoException(HIVE_UNSUPPORTED_FORMAT, "Unknown SerDe for SymlinkTextInputFormat: " + serDe); + throw new TrinoException(HIVE_UNSUPPORTED_FORMAT, "Unknown SerDe for SymlinkTextInputFormat: " + serde); } return ReflectionUtils.newInstance(inputFormatClass, jobConf); diff --git a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/AbstractTestHive.java b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/AbstractTestHive.java index 79da1f6a6064..2c0f06271104 100644 --- a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/AbstractTestHive.java +++ b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/AbstractTestHive.java @@ -171,7 +171,6 @@ import static com.google.common.base.Preconditions.checkState; import static com.google.common.base.Verify.verify; import static com.google.common.collect.ImmutableList.toImmutableList; -import static com.google.common.collect.ImmutableMap.builder; import static com.google.common.collect.ImmutableMap.toImmutableMap; import static com.google.common.collect.ImmutableSet.toImmutableSet; import static com.google.common.collect.Iterables.concat; @@ -5149,7 +5148,7 @@ protected void createEmptyTable( tableBuilder.getStorageBuilder() .setLocation(targetPath.toString()) - .setStorageFormat(StorageFormat.create(hiveStorageFormat.getSerDe(), hiveStorageFormat.getInputFormat(), hiveStorageFormat.getOutputFormat())) + .setStorageFormat(StorageFormat.create(hiveStorageFormat.getSerde(), hiveStorageFormat.getInputFormat(), hiveStorageFormat.getOutputFormat())) .setBucketProperty(bucketProperty) .setSerdeParameters(ImmutableMap.of()); diff --git a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/AbstractTestHiveFileFormats.java b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/AbstractTestHiveFileFormats.java index 3c70a17d00fc..1a70e508e65d 100644 --- a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/AbstractTestHiveFileFormats.java +++ b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/AbstractTestHiveFileFormats.java @@ -608,7 +608,7 @@ public static FileSplit createTestFileHive( throws Exception { HiveOutputFormat outputFormat = newInstance(storageFormat.getOutputFormat(), HiveOutputFormat.class); - Serializer serializer = newInstance(storageFormat.getSerDe(), Serializer.class); + Serializer serializer = newInstance(storageFormat.getSerde(), Serializer.class); // filter out partition keys, which are not written to the file testColumns = testColumns.stream() diff --git a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/AbstractTestHiveLocal.java b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/AbstractTestHiveLocal.java index db6270b83484..8614f36b2078 100644 --- a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/AbstractTestHiveLocal.java +++ b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/AbstractTestHiveLocal.java @@ -251,7 +251,7 @@ private void createExternalTable(SchemaTableName schemaTableName, HiveStorageFor tableBuilder.getStorageBuilder() .setLocation(externalLocation.toString()) - .setStorageFormat(StorageFormat.create(hiveStorageFormat.getSerDe(), hiveStorageFormat.getInputFormat(), hiveStorageFormat.getOutputFormat())) + .setStorageFormat(StorageFormat.create(hiveStorageFormat.getSerde(), hiveStorageFormat.getInputFormat(), hiveStorageFormat.getOutputFormat())) .setBucketProperty(bucketProperty) .setSerdeParameters(ImmutableMap.of()); diff --git a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/TestBackgroundHiveSplitLoader.java b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/TestBackgroundHiveSplitLoader.java index 0723df324733..f22b8efc8e16 100644 --- a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/TestBackgroundHiveSplitLoader.java +++ b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/TestBackgroundHiveSplitLoader.java @@ -862,7 +862,7 @@ public void testBuildManifestFileIterator() CachingDirectoryLister directoryLister = new CachingDirectoryLister(new Duration(5, TimeUnit.MINUTES), 1000, ImmutableList.of()); Properties schema = new Properties(); schema.setProperty(FILE_INPUT_FORMAT, SymlinkTextInputFormat.class.getName()); - schema.setProperty(SERIALIZATION_LIB, AVRO.getSerDe()); + schema.setProperty(SERIALIZATION_LIB, AVRO.getSerde()); Path firstFilePath = new Path("hdfs://VOL1:9000/db_name/table_name/file1"); Path secondFilePath = new Path("hdfs://VOL1:9000/db_name/table_name/file2"); @@ -900,7 +900,7 @@ public void testBuildManifestFileIteratorNestedDirectory() CachingDirectoryLister directoryLister = new CachingDirectoryLister(new Duration(5, TimeUnit.MINUTES), 1000, ImmutableList.of()); Properties schema = new Properties(); schema.setProperty(FILE_INPUT_FORMAT, SymlinkTextInputFormat.class.getName()); - schema.setProperty(SERIALIZATION_LIB, AVRO.getSerDe()); + schema.setProperty(SERIALIZATION_LIB, AVRO.getSerde()); Path filePath = new Path("hdfs://VOL1:9000/db_name/table_name/file1"); Path directoryPath = new Path("hdfs://VOL1:9000/db_name/table_name/dir"); diff --git a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/TestHiveFileFormats.java b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/TestHiveFileFormats.java index e708f2c9334b..80f4fa178c7a 100644 --- a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/TestHiveFileFormats.java +++ b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/TestHiveFileFormats.java @@ -387,7 +387,7 @@ public void testAvroFileInSymlinkTable(int rowCount) FileSplit split = createTestFileHive(file.getAbsolutePath(), AVRO, HiveCompressionCodec.NONE, getTestColumnsSupportedByAvro(), rowCount); Properties splitProperties = new Properties(); splitProperties.setProperty(FILE_INPUT_FORMAT, SymlinkTextInputFormat.class.getName()); - splitProperties.setProperty(SERIALIZATION_LIB, AVRO.getSerDe()); + splitProperties.setProperty(SERIALIZATION_LIB, AVRO.getSerde()); testCursorProvider(createGenericHiveRecordCursorProvider(HDFS_ENVIRONMENT), split, splitProperties, getTestColumnsSupportedByAvro(), SESSION, file.length(), rowCount); } finally { @@ -846,7 +846,7 @@ private void testRecordPageSource( { Properties splitProperties = new Properties(); splitProperties.setProperty(FILE_INPUT_FORMAT, storageFormat.getInputFormat()); - splitProperties.setProperty(SERIALIZATION_LIB, storageFormat.getSerDe()); + splitProperties.setProperty(SERIALIZATION_LIB, storageFormat.getSerde()); ConnectorPageSource pageSource = createPageSourceFromCursorProvider(cursorProvider, split, splitProperties, fileSize, testReadColumns, session); checkPageSource(pageSource, testReadColumns, getTypes(getColumnHandles(testReadColumns)), rowCount); } @@ -862,7 +862,7 @@ private void testCursorProvider( { Properties splitProperties = new Properties(); splitProperties.setProperty(FILE_INPUT_FORMAT, storageFormat.getInputFormat()); - splitProperties.setProperty(SERIALIZATION_LIB, storageFormat.getSerDe()); + splitProperties.setProperty(SERIALIZATION_LIB, storageFormat.getSerde()); testCursorProvider(cursorProvider, split, splitProperties, testReadColumns, session, fileSize, rowCount); } @@ -973,7 +973,7 @@ private void testPageSourceFactory( { Properties splitProperties = new Properties(); splitProperties.setProperty(FILE_INPUT_FORMAT, storageFormat.getInputFormat()); - splitProperties.setProperty(SERIALIZATION_LIB, storageFormat.getSerDe()); + splitProperties.setProperty(SERIALIZATION_LIB, storageFormat.getSerde()); // Use full columns in split properties ImmutableList.Builder splitPropertiesColumnNames = ImmutableList.builder(); diff --git a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/TestHivePageSink.java b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/TestHivePageSink.java index 4d03d289fe7c..d97297350d82 100644 --- a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/TestHivePageSink.java +++ b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/TestHivePageSink.java @@ -220,7 +220,7 @@ private static ConnectorPageSource createPageSource(HiveTransactionHandle transa { Properties splitProperties = new Properties(); splitProperties.setProperty(FILE_INPUT_FORMAT, config.getHiveStorageFormat().getInputFormat()); - splitProperties.setProperty(SERIALIZATION_LIB, config.getHiveStorageFormat().getSerDe()); + splitProperties.setProperty(SERIALIZATION_LIB, config.getHiveStorageFormat().getSerde()); splitProperties.setProperty("columns", Joiner.on(',').join(getColumnHandles().stream().map(HiveColumnHandle::getName).collect(toImmutableList()))); splitProperties.setProperty("columns.types", Joiner.on(',').join(getColumnHandles().stream().map(HiveColumnHandle::getHiveType).map(hiveType -> hiveType.getHiveTypeName().toString()).collect(toImmutableList()))); HiveSplit split = new HiveSplit( diff --git a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/TestNodeLocalDynamicSplitPruning.java b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/TestNodeLocalDynamicSplitPruning.java index 3e351df85c95..a5abee6c539e 100644 --- a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/TestNodeLocalDynamicSplitPruning.java +++ b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/TestNodeLocalDynamicSplitPruning.java @@ -115,7 +115,7 @@ private static ConnectorPageSource createTestingPageSource(HiveTransactionHandle { Properties splitProperties = new Properties(); splitProperties.setProperty(FILE_INPUT_FORMAT, hiveConfig.getHiveStorageFormat().getInputFormat()); - splitProperties.setProperty(SERIALIZATION_LIB, hiveConfig.getHiveStorageFormat().getSerDe()); + splitProperties.setProperty(SERIALIZATION_LIB, hiveConfig.getHiveStorageFormat().getSerde()); HiveSplit split = new HiveSplit( SCHEMA_NAME, TABLE_NAME, diff --git a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/benchmark/AbstractFileFormat.java b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/benchmark/AbstractFileFormat.java index 1f3731169ed8..a12cb0d6a861 100644 --- a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/benchmark/AbstractFileFormat.java +++ b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/benchmark/AbstractFileFormat.java @@ -252,7 +252,7 @@ static List getBaseColumns(List columnNames, List columnNames, List columnTypes) { Properties schema = new Properties(); - schema.setProperty(SERIALIZATION_LIB, format.getSerDe()); + schema.setProperty(SERIALIZATION_LIB, format.getSerde()); schema.setProperty(FILE_INPUT_FORMAT, format.getInputFormat()); schema.setProperty(META_TABLE_COLUMNS, join(",", columnNames)); schema.setProperty(META_TABLE_COLUMN_TYPES, columnTypes.stream() diff --git a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/metastore/alluxio/TestProtoUtils.java b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/metastore/alluxio/TestProtoUtils.java index be1a83f0984c..4e0f842030bc 100644 --- a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/metastore/alluxio/TestProtoUtils.java +++ b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/metastore/alluxio/TestProtoUtils.java @@ -145,7 +145,7 @@ public void testStorageFormat() { alluxio.grpc.table.layout.hive.StorageFormat.Builder storageFormat = TestingAlluxioMetastoreObjects.getTestingStorageFormat(); StorageFormat fmt = ProtoUtils.fromProto(storageFormat.build()); - assertEquals(storageFormat.getSerde(), fmt.getSerDe()); + assertEquals(storageFormat.getSerde(), fmt.getSerde()); assertEquals(storageFormat.getInputFormat(), fmt.getInputFormat()); assertEquals(storageFormat.getOutputFormat(), fmt.getOutputFormat()); } diff --git a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/metastore/glue/TestGlueInputConverter.java b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/metastore/glue/TestGlueInputConverter.java index d5ad59676a5a..9b812db8b8cc 100644 --- a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/metastore/glue/TestGlueInputConverter.java +++ b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/metastore/glue/TestGlueInputConverter.java @@ -100,7 +100,7 @@ private static void assertColumn(com.amazonaws.services.glue.model.Column actual private static void assertStorage(StorageDescriptor actual, Storage expected) { assertEquals(actual.getLocation(), expected.getLocation()); - assertEquals(actual.getSerdeInfo().getSerializationLibrary(), expected.getStorageFormat().getSerDe()); + assertEquals(actual.getSerdeInfo().getSerializationLibrary(), expected.getStorageFormat().getSerde()); assertEquals(actual.getInputFormat(), expected.getStorageFormat().getInputFormat()); assertEquals(actual.getOutputFormat(), expected.getStorageFormat().getOutputFormat()); diff --git a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/metastore/glue/TestGlueToTrinoConverter.java b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/metastore/glue/TestGlueToTrinoConverter.java index a6fd96957af4..9abba0a8999f 100644 --- a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/metastore/glue/TestGlueToTrinoConverter.java +++ b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/metastore/glue/TestGlueToTrinoConverter.java @@ -239,7 +239,7 @@ private static void assertColumn(Column actual, com.amazonaws.services.glue.mode private static void assertStorage(Storage actual, StorageDescriptor expected) { assertEquals(actual.getLocation(), expected.getLocation()); - assertEquals(actual.getStorageFormat().getSerDe(), expected.getSerdeInfo().getSerializationLibrary()); + assertEquals(actual.getStorageFormat().getSerde(), expected.getSerdeInfo().getSerializationLibrary()); assertEquals(actual.getStorageFormat().getInputFormat(), expected.getInputFormat()); assertEquals(actual.getStorageFormat().getOutputFormat(), expected.getOutputFormat()); if (!isNullOrEmpty(expected.getBucketColumns())) { diff --git a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/metastore/glue/TestingMetastoreObjects.java b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/metastore/glue/TestingMetastoreObjects.java index b13fbed6a848..aa18a77e816b 100644 --- a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/metastore/glue/TestingMetastoreObjects.java +++ b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/metastore/glue/TestingMetastoreObjects.java @@ -81,14 +81,14 @@ public static StorageDescriptor getGlueTestStorageDescriptor() return getGlueTestStorageDescriptor(ImmutableList.of(getGlueTestColumn()), "SerdeLib"); } - public static StorageDescriptor getGlueTestStorageDescriptor(List columns, String serDe) + public static StorageDescriptor getGlueTestStorageDescriptor(List columns, String serde) { return new StorageDescriptor() .withBucketColumns(ImmutableList.of("test-bucket-col")) .withColumns(columns) .withParameters(ImmutableMap.of()) .withSerdeInfo(new SerDeInfo() - .withSerializationLibrary(serDe) + .withSerializationLibrary(serde) .withParameters(ImmutableMap.of())) .withInputFormat("InputFormat") .withOutputFormat("OutputFormat") diff --git a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/orc/TestOrcPageSourceFactory.java b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/orc/TestOrcPageSourceFactory.java index c119e29338c7..4e071ff58f92 100644 --- a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/orc/TestOrcPageSourceFactory.java +++ b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/orc/TestOrcPageSourceFactory.java @@ -316,7 +316,7 @@ private static HiveColumnHandle toHiveColumnHandle(NationColumn nationColumn, in private static Properties createSchema() { Properties schema = new Properties(); - schema.setProperty(SERIALIZATION_LIB, ORC.getSerDe()); + schema.setProperty(SERIALIZATION_LIB, ORC.getSerde()); schema.setProperty(FILE_INPUT_FORMAT, ORC.getInputFormat()); schema.setProperty(TABLE_IS_TRANSACTIONAL, "true"); return schema; diff --git a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/orc/TestOrcPredicates.java b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/orc/TestOrcPredicates.java index c9f08de90892..43acc230117b 100644 --- a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/orc/TestOrcPredicates.java +++ b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/orc/TestOrcPredicates.java @@ -167,7 +167,7 @@ private ConnectorPageSource createPageSource( Properties splitProperties = new Properties(); splitProperties.setProperty(FILE_INPUT_FORMAT, ORC.getInputFormat()); - splitProperties.setProperty(SERIALIZATION_LIB, ORC.getSerDe()); + splitProperties.setProperty(SERIALIZATION_LIB, ORC.getSerde()); // Use full columns in split properties ImmutableList.Builder splitPropertiesColumnNames = ImmutableList.builder(); diff --git a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/parquet/TestTimestampMicros.java b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/parquet/TestTimestampMicros.java index 05c259bf8fd7..f639f75ad9f5 100644 --- a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/parquet/TestTimestampMicros.java +++ b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/parquet/TestTimestampMicros.java @@ -106,7 +106,7 @@ private ConnectorPageSource createPageSource(ConnectorSession session, File parq HivePageSourceFactory pageSourceFactory = StandardFileFormats.TRINO_PARQUET.getHivePageSourceFactory(HDFS_ENVIRONMENT).orElseThrow(); Properties schema = new Properties(); - schema.setProperty(SERIALIZATION_LIB, HiveStorageFormat.PARQUET.getSerDe()); + schema.setProperty(SERIALIZATION_LIB, HiveStorageFormat.PARQUET.getSerde()); ReaderPageSource pageSourceWithProjections = pageSourceFactory.createPageSource( new Configuration(false), diff --git a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/util/TestHiveUtil.java b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/util/TestHiveUtil.java index bdbe9e987bf0..c3e228d11903 100644 --- a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/util/TestHiveUtil.java +++ b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/util/TestHiveUtil.java @@ -89,13 +89,13 @@ public void testGetInputFormat() Properties avroSymlinkSchema = new Properties(); avroSymlinkSchema.setProperty(FILE_INPUT_FORMAT, SymlinkTextInputFormat.class.getName()); - avroSymlinkSchema.setProperty(SERIALIZATION_LIB, AVRO.getSerDe()); + avroSymlinkSchema.setProperty(SERIALIZATION_LIB, AVRO.getSerde()); assertInstanceOf(getInputFormat(configuration, avroSymlinkSchema, false), SymlinkTextInputFormat.class); assertInstanceOf(getInputFormat(configuration, avroSymlinkSchema, true), AvroContainerInputFormat.class); Properties parquetSymlinkSchema = new Properties(); parquetSymlinkSchema.setProperty(FILE_INPUT_FORMAT, SymlinkTextInputFormat.class.getName()); - parquetSymlinkSchema.setProperty(SERIALIZATION_LIB, PARQUET.getSerDe()); + parquetSymlinkSchema.setProperty(SERIALIZATION_LIB, PARQUET.getSerde()); assertInstanceOf(getInputFormat(configuration, parquetSymlinkSchema, false), SymlinkTextInputFormat.class); assertInstanceOf(getInputFormat(configuration, parquetSymlinkSchema, true), MapredParquetInputFormat.class); diff --git a/plugin/trino-mysql/src/test/java/io/trino/plugin/mysql/TestCredentialPassthrough.java b/plugin/trino-mysql/src/test/java/io/trino/plugin/mysql/TestCredentialPassthrough.java index 5b7c7b4c6b3a..502b0f86f51b 100644 --- a/plugin/trino-mysql/src/test/java/io/trino/plugin/mysql/TestCredentialPassthrough.java +++ b/plugin/trino-mysql/src/test/java/io/trino/plugin/mysql/TestCredentialPassthrough.java @@ -29,32 +29,32 @@ public class TestCredentialPassthrough { - private TestingMySqlServer mysqlServer; + private TestingMySqlServer mySqlServer; private QueryRunner queryRunner; @Test public void testCredentialPassthrough() { - queryRunner.execute(getSession(mysqlServer), "CREATE TABLE test_create (a bigint, b double, c varchar)"); + queryRunner.execute(getSession(mySqlServer), "CREATE TABLE test_create (a bigint, b double, c varchar)"); } @BeforeClass public void createQueryRunner() throws Exception { - mysqlServer = new TestingMySqlServer(); + mySqlServer = new TestingMySqlServer(); try { queryRunner = DistributedQueryRunner.builder(testSessionBuilder().build()).build(); queryRunner.installPlugin(new MySqlPlugin()); Map properties = ImmutableMap.builder() - .put("connection-url", mysqlServer.getJdbcUrl()) + .put("connection-url", mySqlServer.getJdbcUrl()) .put("user-credential-name", "mysql.user") .put("password-credential-name", "mysql.password") .build(); queryRunner.createCatalog("mysql", "mysql", properties); } catch (Exception e) { - closeAllSuppress(e, queryRunner, mysqlServer); + closeAllSuppress(e, queryRunner, mySqlServer); throw e; } } @@ -64,8 +64,8 @@ public final void destroy() { queryRunner.close(); queryRunner = null; - mysqlServer.close(); - mysqlServer = null; + mySqlServer.close(); + mySqlServer = null; } private static Session getSession(TestingMySqlServer mySqlServer) diff --git a/plugin/trino-mysql/src/test/java/io/trino/plugin/mysql/TestMySqlConnectorTest.java b/plugin/trino-mysql/src/test/java/io/trino/plugin/mysql/TestMySqlConnectorTest.java index 2554552175a9..822025b64174 100644 --- a/plugin/trino-mysql/src/test/java/io/trino/plugin/mysql/TestMySqlConnectorTest.java +++ b/plugin/trino-mysql/src/test/java/io/trino/plugin/mysql/TestMySqlConnectorTest.java @@ -25,20 +25,20 @@ public class TestMySqlConnectorTest extends BaseMySqlConnectorTest { - private TestingMySqlServer mysqlServer; + private TestingMySqlServer mySqlServer; @Override protected QueryRunner createQueryRunner() throws Exception { - mysqlServer = closeAfterClass(new TestingMySqlServer(false)); - return createMySqlQueryRunner(mysqlServer, ImmutableMap.of(), ImmutableMap.of(), REQUIRED_TPCH_TABLES); + mySqlServer = closeAfterClass(new TestingMySqlServer(false)); + return createMySqlQueryRunner(mySqlServer, ImmutableMap.of(), ImmutableMap.of(), REQUIRED_TPCH_TABLES); } @Override protected SqlExecutor onRemoteDatabase() { - return mysqlServer::execute; + return mySqlServer::execute; } /** @@ -50,7 +50,7 @@ public void testNativeLargeIn() // Using IN list of size 140_000 as bigger list causes error: // "com.mysql.jdbc.PacketTooBigException: Packet for query is too large (XXX > 1048576). // You can change this value on the server by setting the max_allowed_packet' variable." - mysqlServer.execute("SELECT count(*) FROM tpch.orders WHERE " + getLongInClause(0, 140_000)); + mySqlServer.execute("SELECT count(*) FROM tpch.orders WHERE " + getLongInClause(0, 140_000)); } /** @@ -62,7 +62,7 @@ public void testNativeMultipleInClauses() String longInClauses = range(0, 14) .mapToObj(value -> getLongInClause(value * 10_000, 10_000)) .collect(joining(" OR ")); - mysqlServer.execute("SELECT count(*) FROM tpch.orders WHERE " + longInClauses); + mySqlServer.execute("SELECT count(*) FROM tpch.orders WHERE " + longInClauses); } private String getLongInClause(int start, int length) diff --git a/plugin/trino-mysql/src/test/java/io/trino/plugin/mysql/TestMySqlGlobalTransactionMyConnectorSmokeTest.java b/plugin/trino-mysql/src/test/java/io/trino/plugin/mysql/TestMySqlGlobalTransactionMyConnectorSmokeTest.java index 4ff6ede2f686..fa28159c0d87 100644 --- a/plugin/trino-mysql/src/test/java/io/trino/plugin/mysql/TestMySqlGlobalTransactionMyConnectorSmokeTest.java +++ b/plugin/trino-mysql/src/test/java/io/trino/plugin/mysql/TestMySqlGlobalTransactionMyConnectorSmokeTest.java @@ -23,14 +23,14 @@ public class TestMySqlGlobalTransactionMyConnectorSmokeTest extends BaseJdbcConnectorSmokeTest { - private TestingMySqlServer mysqlServer; + private TestingMySqlServer mySqlServer; @Override protected QueryRunner createQueryRunner() throws Exception { - mysqlServer = closeAfterClass(new TestingMySqlServer(true)); - return createMySqlQueryRunner(mysqlServer, ImmutableMap.of(), ImmutableMap.of(), REQUIRED_TPCH_TABLES); + mySqlServer = closeAfterClass(new TestingMySqlServer(true)); + return createMySqlQueryRunner(mySqlServer, ImmutableMap.of(), ImmutableMap.of(), REQUIRED_TPCH_TABLES); } @Override diff --git a/plugin/trino-mysql/src/test/java/io/trino/plugin/mysql/TestMySqlTypeMapping.java b/plugin/trino-mysql/src/test/java/io/trino/plugin/mysql/TestMySqlTypeMapping.java index c1b7aebc0313..44478529b139 100644 --- a/plugin/trino-mysql/src/test/java/io/trino/plugin/mysql/TestMySqlTypeMapping.java +++ b/plugin/trino-mysql/src/test/java/io/trino/plugin/mysql/TestMySqlTypeMapping.java @@ -71,7 +71,7 @@ public class TestMySqlTypeMapping extends AbstractTestQueryFramework { - private TestingMySqlServer mysqlServer; + private TestingMySqlServer mySqlServer; private final ZoneId jvmZone = ZoneId.systemDefault(); // no DST in 1970, but has DST in later years (e.g. 2018) @@ -94,8 +94,8 @@ public void setUp() protected QueryRunner createQueryRunner() throws Exception { - mysqlServer = closeAfterClass(new TestingMySqlServer()); - return createMySqlQueryRunner(mysqlServer, ImmutableMap.of(), ImmutableMap.of(), ImmutableList.of()); + mySqlServer = closeAfterClass(new TestingMySqlServer()); + return createMySqlQueryRunner(mySqlServer, ImmutableMap.of(), ImmutableMap.of(), ImmutableList.of()); } @Test @@ -240,7 +240,7 @@ public void testDecimalExceedingPrecisionMax() public void testDecimalExceedingPrecisionMaxWithExceedingIntegerValues() { try (TestTable testTable = new TestTable( - mysqlServer::execute, + mySqlServer::execute, "tpch.test_exceeding_max_decimal", "(d_col decimal(65,25))", asList("1234567890123456789012345678901234567890.123456789", "-1234567890123456789012345678901234567890.123456789"))) { @@ -271,7 +271,7 @@ public void testDecimalExceedingPrecisionMaxWithExceedingIntegerValues() public void testDecimalExceedingPrecisionMaxWithNonExceedingIntegerValues() { try (TestTable testTable = new TestTable( - mysqlServer::execute, + mySqlServer::execute, "tpch.test_exceeding_max_decimal", "(d_col decimal(60,20))", asList("123456789012345678901234567890.123456789012345", "-123456789012345678901234567890.123456789012345"))) { @@ -326,7 +326,7 @@ public void testDecimalExceedingPrecisionMaxWithNonExceedingIntegerValues() public void testDecimalExceedingPrecisionMaxWithSupportedValues(int typePrecision, int typeScale) { try (TestTable testTable = new TestTable( - mysqlServer::execute, + mySqlServer::execute, "tpch.test_exceeding_max_decimal", format("(d_col decimal(%d,%d))", typePrecision, typeScale), asList("12.01", "-12.01", "123", "-123", "1.12345678", "-1.12345678"))) { @@ -900,7 +900,7 @@ public void testUnsignedTypes() private void testUnsupportedDataType(String databaseDataType) { - SqlExecutor jdbcSqlExecutor = mysqlServer::execute; + SqlExecutor jdbcSqlExecutor = mySqlServer::execute; jdbcSqlExecutor.execute(format("CREATE TABLE tpch.test_unsupported_data_type(supported_column varchar(5), unsupported_column %s)", databaseDataType)); try { assertQuery( @@ -929,6 +929,6 @@ private DataSetup trinoCreateAndInsert(Session session, String tableNamePrefix) private DataSetup mysqlCreateAndInsert(String tableNamePrefix) { - return new CreateAndInsertDataSetup(mysqlServer::execute, tableNamePrefix); + return new CreateAndInsertDataSetup(mySqlServer::execute, tableNamePrefix); } } diff --git a/plugin/trino-resource-group-managers/src/test/java/io/trino/plugin/resourcegroups/db/TestDbResourceGroupsFlywayMigration.java b/plugin/trino-resource-group-managers/src/test/java/io/trino/plugin/resourcegroups/db/TestDbResourceGroupsFlywayMigration.java index 5c27cf957a0a..3f3d4f910c0e 100644 --- a/plugin/trino-resource-group-managers/src/test/java/io/trino/plugin/resourcegroups/db/TestDbResourceGroupsFlywayMigration.java +++ b/plugin/trino-resource-group-managers/src/test/java/io/trino/plugin/resourcegroups/db/TestDbResourceGroupsFlywayMigration.java @@ -26,24 +26,24 @@ @Test(singleThreaded = true) public class TestDbResourceGroupsFlywayMigration { - private TestingMysqlServer mysqlServer; + private TestingMysqlServer mySqlServer; private Jdbi jdbi; @BeforeClass public final void setup() { - mysqlServer = new TestingMysqlServer() + mySqlServer = new TestingMysqlServer() .withDatabaseName("resource_groups") .withUsername("test") .withPassword("test"); - mysqlServer.start(); - jdbi = Jdbi.create(mysqlServer.getJdbcUrl(), mysqlServer.getUsername(), mysqlServer.getPassword()); + mySqlServer.start(); + jdbi = Jdbi.create(mySqlServer.getJdbcUrl(), mySqlServer.getUsername(), mySqlServer.getPassword()); } @AfterClass(alwaysRun = true) public final void close() { - mysqlServer.close(); + mySqlServer.close(); } @AfterMethod(alwaysRun = true) @@ -56,9 +56,9 @@ public void cleanup() public void testMigrationWithEmptyDatabase() { DbResourceGroupConfig config = new DbResourceGroupConfig() - .setConfigDbUrl(mysqlServer.getJdbcUrl()) - .setConfigDbUser(mysqlServer.getUsername()) - .setConfigDbPassword(mysqlServer.getPassword()); + .setConfigDbUrl(mySqlServer.getJdbcUrl()) + .setConfigDbUser(mySqlServer.getUsername()) + .setConfigDbPassword(mySqlServer.getPassword()); FlywayMigration.migrate(config); verifyResourceGroupsSchema(0); } @@ -68,18 +68,18 @@ public void testMigrationWithNonEmptyDatabase() { String t1Create = "CREATE TABLE t1 (id INT)"; String t2Create = "CREATE TABLE t2 (id INT)"; - mysqlServer.executeSql(t1Create); - mysqlServer.executeSql(t2Create); + mySqlServer.executeSql(t1Create); + mySqlServer.executeSql(t2Create); DbResourceGroupConfig config = new DbResourceGroupConfig() - .setConfigDbUrl(mysqlServer.getJdbcUrl()) - .setConfigDbUser(mysqlServer.getUsername()) - .setConfigDbPassword(mysqlServer.getPassword()); + .setConfigDbUrl(mySqlServer.getJdbcUrl()) + .setConfigDbUser(mySqlServer.getUsername()) + .setConfigDbPassword(mySqlServer.getPassword()); FlywayMigration.migrate(config); verifyResourceGroupsSchema(0); String t1Drop = "DROP TABLE t1"; String t2Drop = "DROP TABLE t2"; - mysqlServer.executeSql(t1Drop); - mysqlServer.executeSql(t2Drop); + mySqlServer.executeSql(t1Drop); + mySqlServer.executeSql(t2Drop); } @Test @@ -87,11 +87,11 @@ public void testMigrationWithOldResourceGroupsSchema() { createOldSchema(); // add a row to one of the existing tables before migration - mysqlServer.executeSql("INSERT INTO resource_groups_global_properties VALUES ('a_name', 'a_value')"); + mySqlServer.executeSql("INSERT INTO resource_groups_global_properties VALUES ('a_name', 'a_value')"); DbResourceGroupConfig config = new DbResourceGroupConfig() - .setConfigDbUrl(mysqlServer.getJdbcUrl()) - .setConfigDbUser(mysqlServer.getUsername()) - .setConfigDbPassword(mysqlServer.getPassword()); + .setConfigDbUrl(mySqlServer.getJdbcUrl()) + .setConfigDbUser(mySqlServer.getUsername()) + .setConfigDbPassword(mySqlServer.getPassword()); FlywayMigration.migrate(config); verifyResourceGroupsSchema(1); } @@ -145,9 +145,9 @@ private void createOldSchema() " selector_resource_estimate VARCHAR(1024),\n" + " FOREIGN KEY (resource_group_id) REFERENCES resource_groups (resource_group_id) ON DELETE CASCADE\n" + ");"; - mysqlServer.executeSql(propertiesTable); - mysqlServer.executeSql(resourceGroupsTable); - mysqlServer.executeSql(selectorsTable); + mySqlServer.executeSql(propertiesTable); + mySqlServer.executeSql(resourceGroupsTable); + mySqlServer.executeSql(selectorsTable); } private void dropAllTables() @@ -157,10 +157,10 @@ private void dropAllTables() String selectorsTable = "DROP TABLE IF EXISTS selectors"; String exactMatchTable = "DROP TABLE IF EXISTS exact_match_source_selectors"; String flywayHistoryTable = "DROP TABLE IF EXISTS flyway_schema_history"; - mysqlServer.executeSql(propertiesTable); - mysqlServer.executeSql(selectorsTable); - mysqlServer.executeSql(resourceGroupsTable); - mysqlServer.executeSql(exactMatchTable); - mysqlServer.executeSql(flywayHistoryTable); + mySqlServer.executeSql(propertiesTable); + mySqlServer.executeSql(selectorsTable); + mySqlServer.executeSql(resourceGroupsTable); + mySqlServer.executeSql(exactMatchTable); + mySqlServer.executeSql(flywayHistoryTable); } } diff --git a/pom.xml b/pom.xml index 5e39787ef27d..8e68aeed6ba9 100644 --- a/pom.xml +++ b/pom.xml @@ -1823,6 +1823,7 @@ -Xep:EqualsIncompatibleType:ERROR \ -Xep:FallThrough:ERROR \ -Xep:ImmutableSetForContains:ERROR \ + -Xep:InconsistentCapitalization:ERROR \ -Xep:InconsistentHashCode:ERROR \ -Xep:InjectOnConstructorOfAbstractClass:ERROR \ -Xep:MissingCasesInEnumSwitch:ERROR \