diff --git a/java/adapter/jdbc/pom.xml b/java/adapter/jdbc/pom.xml new file mode 100644 index 00000000000..17d2381213a --- /dev/null +++ b/java/adapter/jdbc/pom.xml @@ -0,0 +1,106 @@ + + + + + 4.0.0 + + + org.apache.arrow + arrow-java-root + 0.10.0-SNAPSHOT + + + arrow-jdbc + Arrow JDBC Adapter + http://maven.apache.org + + + + + + org.apache.arrow + arrow-memory + ${project.version} + + + + + org.apache.arrow + arrow-vector + ${project.version} + + + + com.google.guava + guava + ${dep.guava.version} + + + + + junit + junit + ${dep.junit.version} + test + + + + + com.h2database + h2 + 1.4.196 + test + + + + com.fasterxml.jackson.dataformat + jackson-dataformat-yaml + ${dep.jackson.version} + test + + + + com.fasterxml.jackson.core + jackson-databind + ${dep.jackson.version} + test + + + + com.fasterxml.jackson.core + jackson-core + ${dep.jackson.version} + test + + + + com.fasterxml.jackson.core + jackson-annotations + ${dep.jackson.version} + test + + + + + + + org.apache.maven.plugins + maven-surefire-plugin + + + UTC + + + + + + \ No newline at end of file diff --git a/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/JdbcToArrow.java b/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/JdbcToArrow.java new file mode 100644 index 00000000000..436a570b14d --- /dev/null +++ b/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/JdbcToArrow.java @@ -0,0 +1,181 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.arrow.adapter.jdbc; + +import org.apache.arrow.memory.BaseAllocator; +import org.apache.arrow.memory.RootAllocator; +import org.apache.arrow.vector.VectorSchemaRoot; + +import java.io.IOException; +import java.sql.Connection; +import java.sql.SQLException; +import java.sql.ResultSet; +import java.sql.Statement; +import java.util.Calendar; +import java.util.Locale; +import java.util.TimeZone; + +import com.google.common.base.Preconditions; + +/** + * Utility class to convert JDBC objects to columnar Arrow format objects. + *

+ * This utility uses following data mapping to map JDBC/SQL datatype to Arrow data types. + *

+ * CHAR --> ArrowType.Utf8 + * NCHAR --> ArrowType.Utf8 + * VARCHAR --> ArrowType.Utf8 + * NVARCHAR --> ArrowType.Utf8 + * LONGVARCHAR --> ArrowType.Utf8 + * LONGNVARCHAR --> ArrowType.Utf8 + * NUMERIC --> ArrowType.Decimal(precision, scale) + * DECIMAL --> ArrowType.Decimal(precision, scale) + * BIT --> ArrowType.Bool + * TINYINT --> ArrowType.Int(8, signed) + * SMALLINT --> ArrowType.Int(16, signed) + * INTEGER --> ArrowType.Int(32, signed) + * BIGINT --> ArrowType.Int(64, signed) + * REAL --> ArrowType.FloatingPoint(FloatingPointPrecision.SINGLE) + * FLOAT --> ArrowType.FloatingPoint(FloatingPointPrecision.SINGLE) + * DOUBLE --> ArrowType.FloatingPoint(FloatingPointPrecision.DOUBLE) + * BINARY --> ArrowType.Binary + * VARBINARY --> ArrowType.Binary + * LONGVARBINARY --> ArrowType.Binary + * DATE --> ArrowType.Date(DateUnit.MILLISECOND) + * TIME --> ArrowType.Time(TimeUnit.MILLISECOND, 32) + * TIMESTAMP --> ArrowType.Timestamp(TimeUnit.MILLISECOND, timezone=null) + * CLOB --> ArrowType.Utf8 + * BLOB --> ArrowType.Binary + *

+ * TODO: At this time, SQL Data type java.sql.Types.ARRAY is still not supported. + * + * @since 0.10.0 + */ +public class JdbcToArrow { + + /** + * For the given SQL query, execute and fetch the data from Relational DB and convert it to Arrow objects. + * This method uses the default Calendar instance with default TimeZone and Locale as returned by the JVM. + * If you wish to use specific TimeZone or Locale for any Date, Time and Timestamp datasets, you may want use + * overloaded API that taken Calendar object instance. + * + * @param connection Database connection to be used. This method will not close the passed connection object. Since hte caller has passed + * the connection object it's the responsibility of the caller to close or return the connection to the pool. + * @param query The DB Query to fetch the data. + * @param allocator Memory allocator + * @return Arrow Data Objects {@link VectorSchemaRoot} + * @throws SQLException Propagate any SQL Exceptions to the caller after closing any resources opened such as ResultSet and Statement objects. + */ + public static VectorSchemaRoot sqlToArrow(Connection connection, String query, BaseAllocator allocator) throws SQLException, IOException { + Preconditions.checkNotNull(connection, "JDBC connection object can not be null"); + Preconditions.checkArgument(query != null && query.length() > 0, "SQL query can not be null or empty"); + Preconditions.checkNotNull(allocator, "Memory allocator object can not be null"); + + return sqlToArrow(connection, query, allocator, Calendar.getInstance(TimeZone.getTimeZone("UTC"), Locale.ROOT)); + } + + /** + * For the given SQL query, execute and fetch the data from Relational DB and convert it to Arrow objects. + * + * @param connection Database connection to be used. This method will not close the passed connection object. Since hte caller has passed + * the connection object it's the responsibility of the caller to close or return the connection to the pool. + * @param query The DB Query to fetch the data. + * @param allocator Memory allocator + * @param calendar Calendar object to use to handle Date, Time and Timestamp datasets. + * @return Arrow Data Objects {@link VectorSchemaRoot} + * @throws SQLException Propagate any SQL Exceptions to the caller after closing any resources opened such as ResultSet and Statement objects. + */ + public static VectorSchemaRoot sqlToArrow(Connection connection, String query, BaseAllocator allocator, Calendar calendar) throws SQLException, IOException { + Preconditions.checkNotNull(connection, "JDBC connection object can not be null"); + Preconditions.checkArgument(query != null && query.length() > 0, "SQL query can not be null or empty"); + Preconditions.checkNotNull(allocator, "Memory allocator object can not be null"); + Preconditions.checkNotNull(calendar, "Calendar object can not be null"); + + try (Statement stmt = connection.createStatement()) { + return sqlToArrow(stmt.executeQuery(query), allocator, calendar); + } + } + + /** + * For the given JDBC {@link ResultSet}, fetch the data from Relational DB and convert it to Arrow objects. This method + * uses the default RootAllocator and Calendar object. + * + * @param resultSet + * @return Arrow Data Objects {@link VectorSchemaRoot} + * @throws SQLException + */ + public static VectorSchemaRoot sqlToArrow(ResultSet resultSet) throws SQLException, IOException { + Preconditions.checkNotNull(resultSet, "JDBC ResultSet object can not be null"); + + return sqlToArrow(resultSet, Calendar.getInstance(TimeZone.getTimeZone("UTC"), Locale.ROOT)); + } + + /** + * For the given JDBC {@link ResultSet}, fetch the data from Relational DB and convert it to Arrow objects. + * + * @param resultSet + * @param allocator Memory allocator + * @return Arrow Data Objects {@link VectorSchemaRoot} + * @throws SQLException + */ + public static VectorSchemaRoot sqlToArrow(ResultSet resultSet, BaseAllocator allocator) throws SQLException, IOException { + Preconditions.checkNotNull(resultSet, "JDBC ResultSet object can not be null"); + Preconditions.checkNotNull(allocator, "Memory Allocator object can not be null"); + + return sqlToArrow(resultSet, allocator, Calendar.getInstance(TimeZone.getTimeZone("UTC"), Locale.ROOT)); + } + + /** + * For the given JDBC {@link ResultSet}, fetch the data from Relational DB and convert it to Arrow objects. + * + * @param resultSet + * @param calendar Calendar instance to use for Date, Time and Timestamp datasets. + * @return Arrow Data Objects {@link VectorSchemaRoot} + * @throws SQLException + */ + public static VectorSchemaRoot sqlToArrow(ResultSet resultSet, Calendar calendar) throws SQLException, IOException { + Preconditions.checkNotNull(resultSet, "JDBC ResultSet object can not be null"); + Preconditions.checkNotNull(calendar, "Calendar object can not be null"); + + RootAllocator rootAllocator = new RootAllocator(Integer.MAX_VALUE); + VectorSchemaRoot root = sqlToArrow(resultSet, rootAllocator, calendar); + + return root; + } + + /** + * For the given JDBC {@link ResultSet}, fetch the data from Relational DB and convert it to Arrow objects. + * + * @param resultSet + * @param allocator Memory allocator to use. + * @param calendar Calendar instance to use for Date, Time and Timestamp datasets. + * @return Arrow Data Objects {@link VectorSchemaRoot} + * @throws SQLException + */ + public static VectorSchemaRoot sqlToArrow(ResultSet resultSet, BaseAllocator allocator, Calendar calendar) throws SQLException, IOException { + Preconditions.checkNotNull(resultSet, "JDBC ResultSet object can not be null"); + Preconditions.checkNotNull(allocator, "Memory Allocator object can not be null"); + Preconditions.checkNotNull(calendar, "Calendar object can not be null"); + + VectorSchemaRoot root = VectorSchemaRoot.create( + JdbcToArrowUtils.jdbcToArrowSchema(resultSet.getMetaData(), calendar), allocator); + JdbcToArrowUtils.jdbcToArrowVectors(resultSet, root, calendar); + return root; + } +} diff --git a/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/JdbcToArrowUtils.java b/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/JdbcToArrowUtils.java new file mode 100644 index 00000000000..8621c9f1cd6 --- /dev/null +++ b/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/JdbcToArrowUtils.java @@ -0,0 +1,518 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.arrow.adapter.jdbc; + + +import com.google.common.base.Preconditions; +import io.netty.buffer.ArrowBuf; +import org.apache.arrow.vector.BaseFixedWidthVector; +import org.apache.arrow.vector.BigIntVector; +import org.apache.arrow.vector.BitVector; +import org.apache.arrow.vector.DateMilliVector; +import org.apache.arrow.vector.DecimalVector; +import org.apache.arrow.vector.FieldVector; +import org.apache.arrow.vector.Float4Vector; +import org.apache.arrow.vector.Float8Vector; +import org.apache.arrow.vector.IntVector; +import org.apache.arrow.vector.SmallIntVector; +import org.apache.arrow.vector.TimeMilliVector; +import org.apache.arrow.vector.TimeStampVector; +import org.apache.arrow.vector.TinyIntVector; +import org.apache.arrow.vector.VarBinaryVector; +import org.apache.arrow.vector.VarCharVector; +import org.apache.arrow.vector.VectorSchemaRoot; +import org.apache.arrow.vector.holders.NullableBigIntHolder; +import org.apache.arrow.vector.holders.NullableBitHolder; +import org.apache.arrow.vector.holders.NullableDateMilliHolder; +import org.apache.arrow.vector.holders.NullableDecimalHolder; +import org.apache.arrow.vector.holders.NullableFloat4Holder; +import org.apache.arrow.vector.holders.NullableFloat8Holder; +import org.apache.arrow.vector.holders.NullableIntHolder; +import org.apache.arrow.vector.holders.NullableSmallIntHolder; +import org.apache.arrow.vector.holders.NullableTimeMilliHolder; +import org.apache.arrow.vector.holders.NullableTinyIntHolder; +import org.apache.arrow.vector.holders.NullableVarCharHolder; +import org.apache.arrow.vector.holders.VarBinaryHolder; +import org.apache.arrow.vector.holders.VarCharHolder; +import org.apache.arrow.vector.types.DateUnit; +import org.apache.arrow.vector.types.TimeUnit; +import org.apache.arrow.vector.types.pojo.ArrowType; +import org.apache.arrow.vector.types.pojo.Field; +import org.apache.arrow.vector.types.pojo.FieldType; +import org.apache.arrow.vector.types.pojo.Schema; +import org.apache.arrow.vector.util.DecimalUtility; + +import java.io.ByteArrayInputStream; +import java.io.IOException; +import java.io.InputStream; +import java.math.BigDecimal; + +import java.nio.charset.StandardCharsets; +import java.sql.Blob; +import java.sql.Clob; +import java.sql.Date; +import java.sql.ResultSet; +import java.sql.ResultSetMetaData; +import java.sql.SQLException; +import java.sql.Time; +import java.sql.Timestamp; +import java.sql.Types; +import java.util.ArrayList; +import java.util.Calendar; +import java.util.List; + +import static org.apache.arrow.vector.types.FloatingPointPrecision.DOUBLE; +import static org.apache.arrow.vector.types.FloatingPointPrecision.SINGLE; + + +/** + * Class that does most of the work to convert JDBC ResultSet data into Arrow columnar format Vector objects. + * + * @since 0.10.0 + */ +public class JdbcToArrowUtils { + + private static final int DEFAULT_BUFFER_SIZE = 256; + private static final int DEFAULT_STREAM_BUFFER_SIZE = 1024; + private static final int DEFAULT_CLOB_SUBSTRING_READ_SIZE = 256; + + /** + * Create Arrow {@link Schema} object for the given JDBC {@link ResultSetMetaData}. + *

+ * This method currently performs following type mapping for JDBC SQL data types to corresponding Arrow data types. + *

+ * CHAR --> ArrowType.Utf8 + * NCHAR --> ArrowType.Utf8 + * VARCHAR --> ArrowType.Utf8 + * NVARCHAR --> ArrowType.Utf8 + * LONGVARCHAR --> ArrowType.Utf8 + * LONGNVARCHAR --> ArrowType.Utf8 + * NUMERIC --> ArrowType.Decimal(precision, scale) + * DECIMAL --> ArrowType.Decimal(precision, scale) + * BIT --> ArrowType.Bool + * TINYINT --> ArrowType.Int(8, signed) + * SMALLINT --> ArrowType.Int(16, signed) + * INTEGER --> ArrowType.Int(32, signed) + * BIGINT --> ArrowType.Int(64, signed) + * REAL --> ArrowType.FloatingPoint(FloatingPointPrecision.SINGLE) + * FLOAT --> ArrowType.FloatingPoint(FloatingPointPrecision.SINGLE) + * DOUBLE --> ArrowType.FloatingPoint(FloatingPointPrecision.DOUBLE) + * BINARY --> ArrowType.Binary + * VARBINARY --> ArrowType.Binary + * LONGVARBINARY --> ArrowType.Binary + * DATE --> ArrowType.Date(DateUnit.MILLISECOND) + * TIME --> ArrowType.Time(TimeUnit.MILLISECOND, 32) + * TIMESTAMP --> ArrowType.Timestamp(TimeUnit.MILLISECOND, timezone=null) + * CLOB --> ArrowType.Utf8 + * BLOB --> ArrowType.Binary + * + * @param rsmd ResultSetMetaData + * @return {@link Schema} + * @throws SQLException + */ + public static Schema jdbcToArrowSchema(ResultSetMetaData rsmd, Calendar calendar) throws SQLException { + + Preconditions.checkNotNull(rsmd, "JDBC ResultSetMetaData object can't be null"); + Preconditions.checkNotNull(calendar, "Calendar object can't be null"); + + List fields = new ArrayList<>(); + int columnCount = rsmd.getColumnCount(); + for (int i = 1; i <= columnCount; i++) { + String columnName = rsmd.getColumnName(i); + switch (rsmd.getColumnType(i)) { + case Types.BOOLEAN: + case Types.BIT: + fields.add(new Field(columnName, FieldType.nullable(new ArrowType.Bool()), null)); + break; + case Types.TINYINT: + fields.add(new Field(columnName, FieldType.nullable(new ArrowType.Int(8, true)), null)); + break; + case Types.SMALLINT: + fields.add(new Field(columnName, FieldType.nullable(new ArrowType.Int(16, true)), null)); + break; + case Types.INTEGER: + fields.add(new Field(columnName, FieldType.nullable(new ArrowType.Int(32, true)), null)); + break; + case Types.BIGINT: + fields.add(new Field(columnName, FieldType.nullable(new ArrowType.Int(64, true)), null)); + break; + case Types.NUMERIC: + case Types.DECIMAL: + int precision = rsmd.getPrecision(i); + int scale = rsmd.getScale(i); + fields.add(new Field(columnName, FieldType.nullable(new ArrowType.Decimal(precision, scale)), null)); + break; + case Types.REAL: + case Types.FLOAT: + fields.add(new Field(columnName, FieldType.nullable(new ArrowType.FloatingPoint(SINGLE)), null)); + break; + case Types.DOUBLE: + fields.add(new Field(columnName, FieldType.nullable(new ArrowType.FloatingPoint(DOUBLE)), null)); + break; + case Types.CHAR: + case Types.NCHAR: + case Types.VARCHAR: + case Types.NVARCHAR: + case Types.LONGVARCHAR: + case Types.LONGNVARCHAR: + fields.add(new Field(columnName, FieldType.nullable(new ArrowType.Utf8()), null)); + break; + case Types.DATE: + fields.add(new Field(columnName, FieldType.nullable(new ArrowType.Date(DateUnit.MILLISECOND)), null)); + break; + case Types.TIME: + fields.add(new Field(columnName, FieldType.nullable(new ArrowType.Time(TimeUnit.MILLISECOND, 32)), null)); + break; + case Types.TIMESTAMP: + fields.add(new Field(columnName, FieldType.nullable(new ArrowType.Timestamp(TimeUnit.MILLISECOND, calendar.getTimeZone().getID())), null)); + break; + case Types.BINARY: + case Types.VARBINARY: + case Types.LONGVARBINARY: + fields.add(new Field(columnName, FieldType.nullable(new ArrowType.Binary()), null)); + break; + case Types.ARRAY: +// TODO Need to handle this type +// fields.add(new Field("list", FieldType.nullable(new ArrowType.List()), null)); + break; + case Types.CLOB: + fields.add(new Field(columnName, FieldType.nullable(new ArrowType.Utf8()), null)); + break; + case Types.BLOB: + fields.add(new Field(columnName, FieldType.nullable(new ArrowType.Binary()), null)); + break; + + default: + // no-op, shouldn't get here + break; + } + } + + return new Schema(fields, null); + } + + private static void allocateVectors(VectorSchemaRoot root, int size) { + List vectors = root.getFieldVectors(); + for (FieldVector fieldVector : vectors) { + if (fieldVector instanceof BaseFixedWidthVector) { + ((BaseFixedWidthVector) fieldVector).allocateNew(size); + } else { + fieldVector.allocateNew(); + } + fieldVector.setInitialCapacity(size); + } + } + + /** + * Iterate the given JDBC {@link ResultSet} object to fetch the data and transpose it to populate + * the given Arrow Vector objects. + * + * @param rs ResultSet to use to fetch the data from underlying database + * @param root Arrow {@link VectorSchemaRoot} object to populate + * @throws SQLException + */ + public static void jdbcToArrowVectors(ResultSet rs, VectorSchemaRoot root, Calendar calendar) throws SQLException, IOException { + + Preconditions.checkNotNull(rs, "JDBC ResultSet object can't be null"); + Preconditions.checkNotNull(root, "JDBC ResultSet object can't be null"); + Preconditions.checkNotNull(calendar, "Calendar object can't be null"); + + ResultSetMetaData rsmd = rs.getMetaData(); + int columnCount = rsmd.getColumnCount(); + + allocateVectors(root, DEFAULT_BUFFER_SIZE); + + int rowCount = 0; + while (rs.next()) { + for (int i = 1; i <= columnCount; i++) { + String columnName = rsmd.getColumnName(i); + switch (rsmd.getColumnType(i)) { + case Types.BOOLEAN: + case Types.BIT: + updateVector((BitVector) root.getVector(columnName), + rs.getBoolean(i), !rs.wasNull(), rowCount); + break; + case Types.TINYINT: + updateVector((TinyIntVector) root.getVector(columnName), + rs.getInt(i), !rs.wasNull(), rowCount); + break; + case Types.SMALLINT: + updateVector((SmallIntVector) root.getVector(columnName), + rs.getInt(i), !rs.wasNull(), rowCount); + break; + case Types.INTEGER: + updateVector((IntVector) root.getVector(columnName), + rs.getInt(i), !rs.wasNull(), rowCount); + break; + case Types.BIGINT: + updateVector((BigIntVector) root.getVector(columnName), + rs.getLong(i), !rs.wasNull(), rowCount); + break; + case Types.NUMERIC: + case Types.DECIMAL: + updateVector((DecimalVector) root.getVector(columnName), + rs.getBigDecimal(i), !rs.wasNull(), rowCount); + break; + case Types.REAL: + case Types.FLOAT: + updateVector((Float4Vector) root.getVector(columnName), + rs.getFloat(i), !rs.wasNull(), rowCount); + break; + case Types.DOUBLE: + updateVector((Float8Vector) root.getVector(columnName), + rs.getDouble(i), !rs.wasNull(), rowCount); + break; + case Types.CHAR: + case Types.NCHAR: + case Types.VARCHAR: + case Types.NVARCHAR: + case Types.LONGVARCHAR: + case Types.LONGNVARCHAR: + updateVector((VarCharVector) root.getVector(columnName), + rs.getString(i), !rs.wasNull(), rowCount); + break; + case Types.DATE: + updateVector((DateMilliVector) root.getVector(columnName), + rs.getDate(i, calendar), !rs.wasNull(), rowCount); + break; + case Types.TIME: + updateVector((TimeMilliVector) root.getVector(columnName), + rs.getTime(i, calendar), !rs.wasNull(), rowCount); + break; + case Types.TIMESTAMP: + // TODO: Need to handle precision such as milli, micro, nano + updateVector((TimeStampVector) root.getVector(columnName), + rs.getTimestamp(i, calendar), !rs.wasNull(), rowCount); + break; + case Types.BINARY: + case Types.VARBINARY: + case Types.LONGVARBINARY: + updateVector((VarBinaryVector) root.getVector(columnName), + rs.getBinaryStream(i), !rs.wasNull(), rowCount); + break; + case Types.ARRAY: + // TODO Need to handle this type + // fields.add(new Field("list", FieldType.nullable(new ArrowType.List()), null)); + break; + case Types.CLOB: + updateVector((VarCharVector) root.getVector(columnName), + rs.getClob(i), !rs.wasNull(), rowCount); + break; + case Types.BLOB: + updateVector((VarBinaryVector) root.getVector(columnName), + rs.getBlob(i), !rs.wasNull(), rowCount); + break; + + default: + // no-op, shouldn't get here + break; + } + } + rowCount++; + } + root.setRowCount(rowCount); + } + + private static void updateVector(BitVector bitVector, boolean value, boolean isNonNull, int rowCount) { + NullableBitHolder holder = new NullableBitHolder(); + holder.isSet = isNonNull ? 1 : 0; + if (isNonNull) { + holder.value = value ? 1 : 0; + } + bitVector.setSafe(rowCount, holder); + bitVector.setValueCount(rowCount + 1); + } + + private static void updateVector(TinyIntVector tinyIntVector, int value, boolean isNonNull, int rowCount) { + NullableTinyIntHolder holder = new NullableTinyIntHolder(); + holder.isSet = isNonNull ? 1 : 0; + if (isNonNull) { + holder.value = (byte) value; + } + tinyIntVector.setSafe(rowCount, holder); + tinyIntVector.setValueCount(rowCount + 1); + } + + private static void updateVector(SmallIntVector smallIntVector, int value, boolean isNonNull, int rowCount) { + NullableSmallIntHolder holder = new NullableSmallIntHolder(); + holder.isSet = isNonNull ? 1 : 0; + if (isNonNull) { + holder.value = (short) value; + } + smallIntVector.setSafe(rowCount, holder); + smallIntVector.setValueCount(rowCount + 1); + } + + private static void updateVector(IntVector intVector, int value, boolean isNonNull, int rowCount) { + NullableIntHolder holder = new NullableIntHolder(); + holder.isSet = isNonNull ? 1 : 0; + if (isNonNull) { + holder.value = value; + } + intVector.setSafe(rowCount, holder); + intVector.setValueCount(rowCount + 1); + } + + private static void updateVector(BigIntVector bigIntVector, long value, boolean isNonNull, int rowCount) { + NullableBigIntHolder holder = new NullableBigIntHolder(); + holder.isSet = isNonNull ? 1 : 0; + if (isNonNull) { + holder.value = value; + } + bigIntVector.setSafe(rowCount, holder); + bigIntVector.setValueCount(rowCount + 1); + } + + private static void updateVector(DecimalVector decimalVector, BigDecimal value, boolean isNonNull, int rowCount) { + NullableDecimalHolder holder = new NullableDecimalHolder(); + holder.isSet = isNonNull ? 1 : 0; + if (isNonNull) { + holder.precision = value.precision(); + holder.scale = value.scale(); + holder.buffer = decimalVector.getAllocator().buffer(DEFAULT_BUFFER_SIZE); + holder.start = 0; + DecimalUtility.writeBigDecimalToArrowBuf(value, holder.buffer, holder.start); + } + decimalVector.setSafe(rowCount, holder); + decimalVector.setValueCount(rowCount + 1); + } + + private static void updateVector(Float4Vector float4Vector, float value, boolean isNonNull, int rowCount) { + NullableFloat4Holder holder = new NullableFloat4Holder(); + holder.isSet = isNonNull ? 1 : 0; + if (isNonNull) { + holder.value = value; + } + float4Vector.setSafe(rowCount, holder); + float4Vector.setValueCount(rowCount + 1); + } + + private static void updateVector(Float8Vector float8Vector, double value, boolean isNonNull, int rowCount) { + NullableFloat8Holder holder = new NullableFloat8Holder(); + holder.isSet = isNonNull ? 1 : 0; + if (isNonNull) { + holder.value = value; + } + float8Vector.setSafe(rowCount, holder); + float8Vector.setValueCount(rowCount + 1); + } + + private static void updateVector(VarCharVector varcharVector, String value, boolean isNonNull, int rowCount) { + NullableVarCharHolder holder = new NullableVarCharHolder(); + holder.isSet = isNonNull ? 1 : 0; + varcharVector.setIndexDefined(rowCount); + if (isNonNull) { + byte[] bytes = value.getBytes(StandardCharsets.UTF_8); + holder.buffer = varcharVector.getAllocator().buffer(bytes.length); + holder.buffer.setBytes(0, bytes, 0, bytes.length); + holder.start = 0; + holder.end = bytes.length; + } else { + holder.buffer = varcharVector.getAllocator().buffer(0); + } + varcharVector.setSafe(rowCount, holder); + varcharVector.setValueCount(rowCount + 1); + } + + private static void updateVector(DateMilliVector dateMilliVector, Date date, boolean isNonNull, int rowCount) { + NullableDateMilliHolder holder = new NullableDateMilliHolder(); + holder.isSet = isNonNull ? 1 : 0; + if (isNonNull) { + holder.value = date.getTime(); + } + dateMilliVector.setSafe(rowCount, holder); + dateMilliVector.setValueCount(rowCount + 1); + } + + private static void updateVector(TimeMilliVector timeMilliVector, Time time, boolean isNonNull, int rowCount) { + NullableTimeMilliHolder holder = new NullableTimeMilliHolder(); + holder.isSet = isNonNull ? 1 : 0; + if (isNonNull && time != null) { + holder.value = (int) time.getTime(); + } + timeMilliVector.setSafe(rowCount, holder); + timeMilliVector.setValueCount(rowCount + 1); + } + + private static void updateVector(TimeStampVector timeStampVector, Timestamp timestamp, boolean isNonNull, int rowCount) { + //TODO: Need to handle precision such as milli, micro, nano + timeStampVector.setValueCount(rowCount + 1); + if (timestamp != null) { + timeStampVector.setSafe(rowCount, timestamp.getTime()); + } else { + timeStampVector.setNull(rowCount); + } + } + + private static void updateVector(VarBinaryVector varBinaryVector, InputStream is, boolean isNonNull, int rowCount) throws IOException { + varBinaryVector.setValueCount(rowCount + 1); + if (isNonNull && is != null) { + VarBinaryHolder holder = new VarBinaryHolder(); + ArrowBuf arrowBuf = varBinaryVector.getDataBuffer(); + holder.start = 0; + byte[] bytes = new byte[DEFAULT_STREAM_BUFFER_SIZE]; + int total = 0; + while (true) { + int read = is.read(bytes, 0, DEFAULT_STREAM_BUFFER_SIZE); + if (read == -1) { + break; + } + arrowBuf.setBytes(total, bytes, total, read); + total += read; + } + holder.end = total; + holder.buffer = arrowBuf; + varBinaryVector.set(rowCount, holder); + varBinaryVector.setIndexDefined(rowCount); + } else { + varBinaryVector.setNull(rowCount); + } + } + + private static void updateVector(VarCharVector varcharVector, Clob clob, boolean isNonNull, int rowCount) throws SQLException, IOException { + varcharVector.setValueCount(rowCount + 1); + if (isNonNull && clob != null) { + VarCharHolder holder = new VarCharHolder(); + ArrowBuf arrowBuf = varcharVector.getDataBuffer(); + holder.start = 0; + long length = clob.length(); + int read = 1; + int readSize = length < DEFAULT_CLOB_SUBSTRING_READ_SIZE ? (int) length : DEFAULT_CLOB_SUBSTRING_READ_SIZE; + int totalBytes = 0; + while (read <= length) { + String str = clob.getSubString(read, readSize); + byte[] bytes = str.getBytes(StandardCharsets.UTF_8); + arrowBuf.setBytes(totalBytes, new ByteArrayInputStream(bytes, 0, bytes.length), bytes.length); + totalBytes += bytes.length; + read += readSize; + } + holder.end = totalBytes; + holder.buffer = arrowBuf; + varcharVector.set(rowCount, holder); + varcharVector.setIndexDefined(rowCount); + } else { + varcharVector.setNull(rowCount); + } + } + + private static void updateVector(VarBinaryVector varBinaryVector, Blob blob, boolean isNonNull, int rowCount) throws SQLException, IOException { + updateVector(varBinaryVector, blob != null ? blob.getBinaryStream() : null, isNonNull, rowCount); + } + +} diff --git a/java/adapter/jdbc/src/test/java/org/apache/arrow/adapter/jdbc/AbstractJdbcToArrowTest.java b/java/adapter/jdbc/src/test/java/org/apache/arrow/adapter/jdbc/AbstractJdbcToArrowTest.java new file mode 100644 index 00000000000..102824777ad --- /dev/null +++ b/java/adapter/jdbc/src/test/java/org/apache/arrow/adapter/jdbc/AbstractJdbcToArrowTest.java @@ -0,0 +1,120 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.arrow.adapter.jdbc; + +import java.io.IOException; +import java.sql.Connection; +import java.sql.DriverManager; +import java.sql.SQLException; +import java.sql.Statement; + +import org.apache.arrow.vector.VectorSchemaRoot; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.dataformat.yaml.YAMLFactory; + +/** + * Class to abstract out some common test functionality for testing JDBC to Arrow. + */ +public abstract class AbstractJdbcToArrowTest { + protected Connection conn = null; + protected Table table; + + /** + * This method creates Table object after reading YAML file + * + * @param ymlFilePath + * @return + * @throws IOException + */ + protected static Table getTable(String ymlFilePath, Class clss) throws IOException { + return new ObjectMapper(new YAMLFactory()).readValue( + clss.getClassLoader().getResourceAsStream(ymlFilePath), Table.class); + } + + + /** + * This method creates Connection object and DB table and also populate data into table for test + * + * @throws SQLException + * @throws ClassNotFoundException + */ + @Before + public void setUp() throws SQLException, ClassNotFoundException { + String url = "jdbc:h2:mem:JdbcToArrowTest"; + String driver = "org.h2.Driver"; + Class.forName(driver); + conn = DriverManager.getConnection(url); + try (Statement stmt = conn.createStatement();) { + stmt.executeUpdate(table.getCreate()); + for (String insert : table.getData()) { + stmt.executeUpdate(insert); + } + } + } + + /** + * Clean up method to close connection after test completes + * + * @throws SQLException + */ + @After + public void destroy() throws SQLException { + if (conn != null) { + conn.close(); + conn = null; + } + } + + /** + * This method returns collection of Table object for each test iteration + * + * @return + * @throws SQLException + * @throws ClassNotFoundException + * @throws IOException + */ + public static Object[][] prepareTestData(String[] testFiles, Class clss) throws SQLException, ClassNotFoundException, IOException { + Object[][] tableArr = new Object[testFiles.length][]; + int i = 0; + for (String testFile : testFiles) { + tableArr[i++] = new Object[]{getTable(testFile, clss)}; + } + return tableArr; + } + + /** + * Abstract method to implement test Functionality to test JdbcToArrow methods + * + * @throws SQLException + * @throws IOException + */ + @Test + public abstract void testJdbcToArroValues() throws SQLException, IOException; + + /** + * Abstract method to implement logic to assert test various datatype values + * + * @param root + */ + public abstract void testDataSets(VectorSchemaRoot root); + +} diff --git a/java/adapter/jdbc/src/test/java/org/apache/arrow/adapter/jdbc/JdbcToArrowTestHelper.java b/java/adapter/jdbc/src/test/java/org/apache/arrow/adapter/jdbc/JdbcToArrowTestHelper.java new file mode 100644 index 00000000000..71a829d71f7 --- /dev/null +++ b/java/adapter/jdbc/src/test/java/org/apache/arrow/adapter/jdbc/JdbcToArrowTestHelper.java @@ -0,0 +1,280 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.arrow.adapter.jdbc; + +import java.math.BigDecimal; +import java.nio.charset.Charset; + +import org.apache.arrow.vector.BaseValueVector; +import org.apache.arrow.vector.BigIntVector; +import org.apache.arrow.vector.BitVector; +import org.apache.arrow.vector.DateMilliVector; +import org.apache.arrow.vector.DecimalVector; +import org.apache.arrow.vector.Float4Vector; +import org.apache.arrow.vector.Float8Vector; +import org.apache.arrow.vector.IntVector; +import org.apache.arrow.vector.SmallIntVector; +import org.apache.arrow.vector.TimeMilliVector; +import org.apache.arrow.vector.TimeStampVector; +import org.apache.arrow.vector.TinyIntVector; +import org.apache.arrow.vector.VarBinaryVector; +import org.apache.arrow.vector.VarCharVector; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertArrayEquals; + +/** + * This is a Helper class which has functionalities to read and assert the values from the given FieldVector object + */ +public class JdbcToArrowTestHelper { + + public static void assertIntVectorValues(IntVector intVector, int rowCount, Integer[] values) { + assertEquals(rowCount, intVector.getValueCount()); + + for (int j = 0; j < intVector.getValueCount(); j++) { + assertEquals(values[j].intValue(), intVector.get(j)); + } + } + + public static void assertBooleanVectorValues(BitVector bitVector, int rowCount, Boolean[] values) { + assertEquals(rowCount, bitVector.getValueCount()); + + for (int j = 0; j < bitVector.getValueCount(); j++) { + assertEquals(values[j].booleanValue(), bitVector.get(j) == 1); + } + } + + public static void assertBitVectorValues(BitVector bitVector, int rowCount, Integer[] values) { + assertEquals(rowCount, bitVector.getValueCount()); + + for (int j = 0; j < bitVector.getValueCount(); j++) { + assertEquals(values[j].intValue(), bitVector.get(j)); + } + } + + public static void assertTinyIntVectorValues(TinyIntVector tinyIntVector, int rowCount, Integer[] values) { + assertEquals(rowCount, tinyIntVector.getValueCount()); + + for (int j = 0; j < tinyIntVector.getValueCount(); j++) { + assertEquals(values[j].intValue(), tinyIntVector.get(j)); + } + } + + public static void assertSmallIntVectorValues(SmallIntVector smallIntVector, int rowCount, Integer[] values) { + assertEquals(rowCount, smallIntVector.getValueCount()); + + for (int j = 0; j < smallIntVector.getValueCount(); j++) { + assertEquals(values[j].intValue(), smallIntVector.get(j)); + } + } + + public static void assertBigIntVectorValues(BigIntVector bigIntVector, int rowCount, Long[] values) { + assertEquals(rowCount, bigIntVector.getValueCount()); + + for (int j = 0; j < bigIntVector.getValueCount(); j++) { + assertEquals(values[j].longValue(), bigIntVector.get(j)); + } + } + + public static void assertDecimalVectorValues(DecimalVector decimalVector, int rowCount, BigDecimal[] values) { + assertEquals(rowCount, decimalVector.getValueCount()); + + for (int j = 0; j < decimalVector.getValueCount(); j++) { + assertNotNull(decimalVector.getObject(j)); + assertEquals(values[j].doubleValue(), decimalVector.getObject(j).doubleValue(), 0); + } + } + + public static void assertFloat8VectorValues(Float8Vector float8Vector, int rowCount, Double[] values) { + assertEquals(rowCount, float8Vector.getValueCount()); + + for (int j = 0; j < float8Vector.getValueCount(); j++) { + assertEquals(values[j], float8Vector.get(j), 0.01); + } + } + + public static void assertFloat4VectorValues(Float4Vector float4Vector, int rowCount, Float[] values) { + assertEquals(rowCount, float4Vector.getValueCount()); + + for (int j = 0; j < float4Vector.getValueCount(); j++) { + assertEquals(values[j], float4Vector.get(j), 0.01); + } + } + + public static void assertTimeVectorValues(TimeMilliVector timeMilliVector, int rowCount, Long[] values) { + assertEquals(rowCount, timeMilliVector.getValueCount()); + + for (int j = 0; j < timeMilliVector.getValueCount(); j++) { + assertEquals(values[j].longValue(), timeMilliVector.get(j)); + } + } + + public static void assertDateVectorValues(DateMilliVector dateMilliVector, int rowCount, Long[] values) { + assertEquals(rowCount, dateMilliVector.getValueCount()); + + for (int j = 0; j < dateMilliVector.getValueCount(); j++) { + assertEquals(values[j].longValue(), dateMilliVector.get(j)); + } + } + + public static void assertTimeStampVectorValues(TimeStampVector timeStampVector, int rowCount, Long[] values) { + assertEquals(rowCount, timeStampVector.getValueCount()); + + for (int j = 0; j < timeStampVector.getValueCount(); j++) { + assertEquals(values[j].longValue(), timeStampVector.get(j)); + } + } + + public static void assertVarBinaryVectorValues(VarBinaryVector varBinaryVector, int rowCount, byte[][] values) { + assertEquals(rowCount, varBinaryVector.getValueCount()); + + for (int j = 0; j < varBinaryVector.getValueCount(); j++) { + assertArrayEquals(values[j], varBinaryVector.get(j)); + } + } + + public static void assertVarcharVectorValues(VarCharVector varCharVector, int rowCount, byte[][] values) { + assertEquals(rowCount, varCharVector.getValueCount()); + + for (int j = 0; j < varCharVector.getValueCount(); j++) { + assertArrayEquals(values[j], varCharVector.get(j)); + } + } + + public static void assertNullValues(BaseValueVector vector, int rowCount) { + assertEquals(rowCount, vector.getValueCount()); + + for (int j = 0; j < vector.getValueCount(); j++) { + assertTrue(vector.isNull(j)); + } + } + + public static byte[] hexStringToByteArray(String s) { + int len = s.length(); + byte[] data = new byte[len / 2]; + for (int i = 0; i < len; i += 2) { + data[i / 2] = (byte) ((Character.digit(s.charAt(i), 16) << 4) + + Character.digit(s.charAt(i + 1), 16)); + } + return data; + } + + public static Integer[] getIntValues(String[] values, String dataType) { + String[] dataArr = getValues(values, dataType); + Integer[] valueArr = new Integer[dataArr.length]; + int i = 0; + for (String data : dataArr) { + valueArr[i++] = Integer.parseInt(data); + } + return valueArr; + } + + public static Boolean[] getBooleanValues(String[] values, String dataType) { + String[] dataArr = getValues(values, dataType); + Boolean[] valueArr = new Boolean[dataArr.length]; + int i = 0; + for (String data : dataArr) { + valueArr[i++] = data.trim().equals("1"); + } + return valueArr; + } + + public static BigDecimal[] getDecimalValues(String[] values, String dataType) { + String[] dataArr = getValues(values, dataType); + BigDecimal[] valueArr = new BigDecimal[dataArr.length]; + int i = 0; + for (String data : dataArr) { + valueArr[i++] = new BigDecimal(data); + } + return valueArr; + } + + public static Double[] getDoubleValues(String[] values, String dataType) { + String[] dataArr = getValues(values, dataType); + Double[] valueArr = new Double[dataArr.length]; + int i = 0; + for (String data : dataArr) { + valueArr[i++] = Double.parseDouble(data); + } + return valueArr; + } + + public static Float[] getFloatValues(String[] values, String dataType) { + String[] dataArr = getValues(values, dataType); + Float[] valueArr = new Float[dataArr.length]; + int i = 0; + for (String data : dataArr) { + valueArr[i++] = Float.parseFloat(data); + } + return valueArr; + } + + public static Long[] getLongValues(String[] values, String dataType) { + String[] dataArr = getValues(values, dataType); + Long[] valueArr = new Long[dataArr.length]; + int i = 0; + for (String data : dataArr) { + valueArr[i++] = Long.parseLong(data); + } + return valueArr; + } + + public static byte[][] getCharArray(String[] values, String dataType) { + String[] dataArr = getValues(values, dataType); + byte[][] valueArr = new byte[dataArr.length][]; + int i = 0; + for (String data : dataArr) { + valueArr[i++] = data.trim().getBytes(); + } + return valueArr; + } + + public static byte[][] getCharArrayWithCharSet(String[] values, String dataType, Charset charSet) { + String[] dataArr = getValues(values, dataType); + byte[][] valueArr = new byte[dataArr.length][]; + int i = 0; + for (String data : dataArr) { + valueArr[i++] = data.trim().getBytes(charSet); + } + return valueArr; + } + + public static byte[][] getBinaryValues(String[] values, String dataType) { + String[] dataArr = getValues(values, dataType); + byte[][] valueArr = new byte[dataArr.length][]; + int i = 0; + for (String data : dataArr) { + valueArr[i++] = hexStringToByteArray(data.trim()); + } + return valueArr; + } + + public static String[] getValues(String[] values, String dataType) { + String value = ""; + for (String val : values) { + if (val.startsWith(dataType)) { + value = val.split("=")[1]; + break; + } + } + return value.split(","); + } +} diff --git a/java/adapter/jdbc/src/test/java/org/apache/arrow/adapter/jdbc/Table.java b/java/adapter/jdbc/src/test/java/org/apache/arrow/adapter/jdbc/Table.java new file mode 100644 index 00000000000..5bfdf756403 --- /dev/null +++ b/java/adapter/jdbc/src/test/java/org/apache/arrow/adapter/jdbc/Table.java @@ -0,0 +1,235 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.arrow.adapter.jdbc; + +import java.math.BigDecimal; +import java.nio.charset.StandardCharsets; + +import com.fasterxml.jackson.annotation.JsonIgnoreProperties; + +/** + * POJO to handle the YAML data from the test YAML file. + */ +@JsonIgnoreProperties(ignoreUnknown = true) +public class Table { + private String name; + private String type; + private String vector; + private String timezone; + private String create; + private String[] data; + private String query; + private String drop; + private String[] values; + private String[] vectors; + private int rowCount; + + public Table() { + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public String getType() { + return type; + } + + public void setType(String type) { + this.type = type; + } + + public String getVector() { + return vector; + } + + public void setVector(String vector) { + this.vector = vector; + } + + public String[] getValues() { + return values; + } + + public void setValues(String[] values) { + this.values = values; + } + + public Long[] getLongValues() { + Long[] arr = new Long[values.length]; + int i = 0; + for (String str : values) { + arr[i++] = Long.parseLong(str); + } + return arr; + } + + public Integer[] getIntValues() { + Integer[] arr = new Integer[values.length]; + int i = 0; + for (String str : values) { + arr[i++] = Integer.parseInt(str); + } + return arr; + } + + public Boolean[] getBoolValues() { + Boolean[] arr = new Boolean[values.length]; + int i = 0; + for (String str : values) { + arr[i++] = Boolean.parseBoolean(str); + } + return arr; + } + + public BigDecimal[] getBigDecimalValues() { + BigDecimal[] arr = new BigDecimal[values.length]; + int i = 0; + for (String str : values) { + arr[i++] = new BigDecimal(str); + } + return arr; + } + + public Double[] getDoubleValues() { + Double[] arr = new Double[values.length]; + int i = 0; + for (String str : values) { + arr[i++] = Double.parseDouble(str); + } + return arr; + } + + public Float[] getFloatValues() { + Float[] arr = new Float[values.length]; + int i = 0; + for (String str : values) { + arr[i++] = Float.parseFloat(str); + } + return arr; + } + + public byte[][] getBinaryValues() { + return getHexToByteArray(values); + } + + public byte[][] getVarCharValues() { + return getByteArray(values); + } + + public byte[][] getBlobValues() { + return getBinaryValues(); + } + + public byte[][] getClobValues() { + return getByteArray(values); + } + + public byte[][] getCharValues() { + return getByteArray(values); + } + + public String getCreate() { + return create; + } + + public void setCreate(String create) { + this.create = create; + } + + public String[] getData() { + return data; + } + + public void setData(String[] data) { + this.data = data; + } + + public String getQuery() { + return query; + } + + public void setQuery(String query) { + this.query = query; + } + + public String getDrop() { + return drop; + } + + public void setDrop(String drop) { + this.drop = drop; + } + + public String getTimezone() { + return timezone; + } + + public void setTimezone(String timezone) { + this.timezone = timezone; + } + + public String[] getVectors() { + return vectors; + } + + public void setVectors(String[] vectors) { + this.vectors = vectors; + } + + public int getRowCount() { + return rowCount; + } + + public void setRowCount(int rowCount) { + this.rowCount = rowCount; + } + + private byte[][] getByteArray(String[] data) { + byte[][] byteArr = new byte[data.length][]; + + for (int i = 0; i < data.length; i++) { + byteArr[i] = data[i].getBytes(StandardCharsets.UTF_8); + } + return byteArr; + } + + private byte[][] getHexToByteArray(String[] data) { + byte[][] byteArr = new byte[data.length][]; + + for (int i = 0; i < data.length; i++) { + byteArr[i] = hexStringToByteArray(data[i]); + } + return byteArr; + } + + private static byte[] hexStringToByteArray(String s) { + int len = s.length(); + byte[] data = new byte[len / 2]; + for (int i = 0; i < len; i += 2) { + data[i / 2] = (byte) ((Character.digit(s.charAt(i), 16) << 4) + + Character.digit(s.charAt(i + 1), 16)); + } + return data; + } +} \ No newline at end of file diff --git a/java/adapter/jdbc/src/test/java/org/apache/arrow/adapter/jdbc/h2/JdbcToArrowCharSetTest.java b/java/adapter/jdbc/src/test/java/org/apache/arrow/adapter/jdbc/h2/JdbcToArrowCharSetTest.java new file mode 100644 index 00000000000..473b8f70263 --- /dev/null +++ b/java/adapter/jdbc/src/test/java/org/apache/arrow/adapter/jdbc/h2/JdbcToArrowCharSetTest.java @@ -0,0 +1,134 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.adapter.jdbc.h2; + +import static org.apache.arrow.adapter.jdbc.JdbcToArrowTestHelper.assertVarcharVectorValues; + +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.sql.DriverManager; +import java.sql.SQLException; +import java.sql.Statement; +import java.util.Arrays; +import java.util.Calendar; +import java.util.Collection; + +import org.apache.arrow.adapter.jdbc.AbstractJdbcToArrowTest; +import org.apache.arrow.adapter.jdbc.JdbcToArrow; +import org.apache.arrow.adapter.jdbc.Table; +import org.apache.arrow.memory.RootAllocator; +import org.apache.arrow.vector.VarCharVector; +import org.apache.arrow.vector.VectorSchemaRoot; +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; +import org.junit.runners.Parameterized.Parameters; + +import static org.apache.arrow.adapter.jdbc.JdbcToArrowTestHelper.getCharArrayWithCharSet; + +/** + * JUnit Test Class which contains methods to test JDBC to Arrow data conversion functionality with UTF-8 Charset, including + * the multi-byte CJK characters for H2 database + */ +@RunWith(Parameterized.class) +public class JdbcToArrowCharSetTest extends AbstractJdbcToArrowTest { + private static final String VARCHAR = "VARCHAR_FIELD13"; + private static final String CHAR = "CHAR_FIELD16"; + private static final String CLOB = "CLOB_FIELD15"; + + private static final String[] testFiles = { + "h2/test1_charset_h2.yml", + "h2/test1_charset_ch_h2.yml", + "h2/test1_charset_jp_h2.yml", + "h2/test1_charset_kr_h2.yml" + }; + + /** + * Constructor which populate table object for each test iteration + * + * @param table + */ + public JdbcToArrowCharSetTest(Table table) { + this.table = table; + } + + /** + * This method creates Connection object and DB table and also populate data into table for test + * + * @throws SQLException + * @throws ClassNotFoundException + */ + @Before + public void setUp() throws SQLException, ClassNotFoundException { + String url = "jdbc:h2:mem:JdbcToArrowTest?characterEncoding=UTF-8"; + String driver = "org.h2.Driver"; + Class.forName(driver); + conn = DriverManager.getConnection(url); + try (Statement stmt = conn.createStatement();) { + stmt.executeUpdate(table.getCreate()); + for (String insert : table.getData()) { + stmt.executeUpdate(insert); + } + } + } + + /** + * This method returns collection of Table object for each test iteration + * + * @return + * @throws SQLException + * @throws ClassNotFoundException + * @throws IOException + */ + @Parameters + public static Collection getTestData() throws SQLException, ClassNotFoundException, IOException { + return Arrays.asList(prepareTestData(testFiles, JdbcToArrowCharSetTest.class)); + } + + /** + * Test Method to test JdbcToArrow Functionality for various H2 DB based datatypes with UTF-8 Charset, including + * the multi-byte CJK characters + */ + @Test + public void testJdbcToArroValues() throws SQLException, IOException { + testDataSets(JdbcToArrow.sqlToArrow(conn, table.getQuery(), new RootAllocator(Integer.MAX_VALUE), Calendar.getInstance())); + testDataSets(JdbcToArrow.sqlToArrow(conn, table.getQuery(), new RootAllocator(Integer.MAX_VALUE))); + testDataSets(JdbcToArrow.sqlToArrow(conn.createStatement().executeQuery(table.getQuery()), new RootAllocator(Integer.MAX_VALUE), + Calendar.getInstance())); + testDataSets(JdbcToArrow.sqlToArrow(conn.createStatement().executeQuery(table.getQuery()))); + testDataSets(JdbcToArrow.sqlToArrow(conn.createStatement().executeQuery(table.getQuery()), new RootAllocator(Integer.MAX_VALUE))); + testDataSets(JdbcToArrow.sqlToArrow(conn.createStatement().executeQuery(table.getQuery()), Calendar.getInstance())); + } + + /** + * This method calls the assert methods for various DataSets + * + * @param root + */ + public void testDataSets(VectorSchemaRoot root) { + assertVarcharVectorValues((VarCharVector) root.getVector(CLOB), table.getRowCount(), + getCharArrayWithCharSet(table.getValues(), CLOB, StandardCharsets.UTF_8)); + + assertVarcharVectorValues((VarCharVector) root.getVector(VARCHAR), table.getRowCount(), + getCharArrayWithCharSet(table.getValues(), VARCHAR, StandardCharsets.UTF_8)); + + assertVarcharVectorValues((VarCharVector) root.getVector(CHAR), table.getRowCount(), + getCharArrayWithCharSet(table.getValues(), CHAR, StandardCharsets.UTF_8)); + } +} diff --git a/java/adapter/jdbc/src/test/java/org/apache/arrow/adapter/jdbc/h2/JdbcToArrowDataTypesTest.java b/java/adapter/jdbc/src/test/java/org/apache/arrow/adapter/jdbc/h2/JdbcToArrowDataTypesTest.java new file mode 100644 index 00000000000..aa256643008 --- /dev/null +++ b/java/adapter/jdbc/src/test/java/org/apache/arrow/adapter/jdbc/h2/JdbcToArrowDataTypesTest.java @@ -0,0 +1,200 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.adapter.jdbc.h2; + +import org.apache.arrow.adapter.jdbc.AbstractJdbcToArrowTest; +import org.apache.arrow.adapter.jdbc.JdbcToArrow; +import org.apache.arrow.adapter.jdbc.Table; +import org.apache.arrow.memory.RootAllocator; +import org.apache.arrow.vector.BigIntVector; +import org.apache.arrow.vector.BitVector; +import org.apache.arrow.vector.DateMilliVector; +import org.apache.arrow.vector.DecimalVector; +import org.apache.arrow.vector.Float4Vector; +import org.apache.arrow.vector.Float8Vector; +import org.apache.arrow.vector.IntVector; +import org.apache.arrow.vector.SmallIntVector; +import org.apache.arrow.vector.TimeMilliVector; +import org.apache.arrow.vector.TimeStampVector; +import org.apache.arrow.vector.TinyIntVector; +import org.apache.arrow.vector.VarBinaryVector; +import org.apache.arrow.vector.VarCharVector; +import org.apache.arrow.vector.VectorSchemaRoot; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; +import org.junit.runners.Parameterized.Parameters; + +import java.io.IOException; +import java.sql.SQLException; +import java.util.Arrays; +import java.util.Calendar; +import java.util.Collection; + +import static org.apache.arrow.adapter.jdbc.JdbcToArrowTestHelper.assertBigIntVectorValues; +import static org.apache.arrow.adapter.jdbc.JdbcToArrowTestHelper.assertBitVectorValues; +import static org.apache.arrow.adapter.jdbc.JdbcToArrowTestHelper.assertBooleanVectorValues; +import static org.apache.arrow.adapter.jdbc.JdbcToArrowTestHelper.assertDateVectorValues; +import static org.apache.arrow.adapter.jdbc.JdbcToArrowTestHelper.assertDecimalVectorValues; +import static org.apache.arrow.adapter.jdbc.JdbcToArrowTestHelper.assertFloat4VectorValues; +import static org.apache.arrow.adapter.jdbc.JdbcToArrowTestHelper.assertFloat8VectorValues; +import static org.apache.arrow.adapter.jdbc.JdbcToArrowTestHelper.assertIntVectorValues; +import static org.apache.arrow.adapter.jdbc.JdbcToArrowTestHelper.assertSmallIntVectorValues; +import static org.apache.arrow.adapter.jdbc.JdbcToArrowTestHelper.assertTimeStampVectorValues; +import static org.apache.arrow.adapter.jdbc.JdbcToArrowTestHelper.assertTimeVectorValues; +import static org.apache.arrow.adapter.jdbc.JdbcToArrowTestHelper.assertTinyIntVectorValues; +import static org.apache.arrow.adapter.jdbc.JdbcToArrowTestHelper.assertVarBinaryVectorValues; +import static org.apache.arrow.adapter.jdbc.JdbcToArrowTestHelper.assertVarcharVectorValues; + +/** + * JUnit Test Class which contains methods to test JDBC to Arrow data conversion functionality with various data types for H2 database + * using multiple test data files + */ +@RunWith(Parameterized.class) +public class JdbcToArrowDataTypesTest extends AbstractJdbcToArrowTest { + + private static final String BIGINT = "big_int"; + private static final String BINARY = "binary"; + private static final String BIT = "bit"; + private static final String BLOB = "blob"; + private static final String BOOL = "bool"; + private static final String CHAR = "char"; + private static final String CLOB = "clob"; + private static final String DATE = "date"; + private static final String DECIMAL = "decimal"; + private static final String DOUBLE = "double"; + private static final String INT = "int"; + private static final String REAL = "real"; + private static final String SMALLINT = "small_int"; + private static final String TIME = "time"; + private static final String TIMESTAMP = "timestamp"; + private static final String TINYINT = "tiny_int"; + private static final String VARCHAR = "varchar"; + + private static final String[] testFiles = { + "h2/test1_bigint_h2.yml", + "h2/test1_binary_h2.yml", + "h2/test1_bit_h2.yml", + "h2/test1_blob_h2.yml", + "h2/test1_bool_h2.yml", + "h2/test1_char_h2.yml", + "h2/test1_clob_h2.yml", + "h2/test1_date_h2.yml", + "h2/test1_decimal_h2.yml", + "h2/test1_double_h2.yml", + "h2/test1_int_h2.yml", + "h2/test1_real_h2.yml", + "h2/test1_smallint_h2.yml", + "h2/test1_time_h2.yml", + "h2/test1_timestamp_h2.yml", + "h2/test1_tinyint_h2.yml", + "h2/test1_varchar_h2.yml" + }; + + /** + * Constructor which populate table object for each test iteration + * + * @param table + */ + public JdbcToArrowDataTypesTest(Table table) { + this.table = table; + } + + /** + * This method returns collection of Table object for each test iteration + * + * @return + * @throws SQLException + * @throws ClassNotFoundException + * @throws IOException + */ + @Parameters + public static Collection getTestData() throws SQLException, ClassNotFoundException, IOException { + return Arrays.asList(prepareTestData(testFiles, JdbcToArrowDataTypesTest.class)); + } + + /** + * Test Method to test JdbcToArrow Functionality for various H2 DB based datatypes + */ + @Test + public void testJdbcToArroValues() throws SQLException, IOException { + testDataSets(JdbcToArrow.sqlToArrow(conn, table.getQuery(), new RootAllocator(Integer.MAX_VALUE), Calendar.getInstance())); + testDataSets(JdbcToArrow.sqlToArrow(conn, table.getQuery(), new RootAllocator(Integer.MAX_VALUE))); + testDataSets(JdbcToArrow.sqlToArrow(conn.createStatement().executeQuery(table.getQuery()), new RootAllocator(Integer.MAX_VALUE), + Calendar.getInstance())); + testDataSets(JdbcToArrow.sqlToArrow(conn.createStatement().executeQuery(table.getQuery()))); + testDataSets(JdbcToArrow.sqlToArrow(conn.createStatement().executeQuery(table.getQuery()), new RootAllocator(Integer.MAX_VALUE))); + testDataSets(JdbcToArrow.sqlToArrow(conn.createStatement().executeQuery(table.getQuery()), Calendar.getInstance())); + } + + /** + * This method calls the assert methods for various DataSets + * + * @param root + */ + public void testDataSets(VectorSchemaRoot root) { + switch (table.getType()) { + case BIGINT: + assertBigIntVectorValues((BigIntVector) root.getVector(table.getVector()), table.getValues().length, table.getLongValues()); + break; + case BINARY: + case BLOB: + assertVarBinaryVectorValues((VarBinaryVector) root.getVector(table.getVector()), table.getValues().length, table.getBinaryValues()); + break; + case BIT: + assertBitVectorValues((BitVector) root.getVector(table.getVector()), table.getValues().length, table.getIntValues()); + break; + case BOOL: + assertBooleanVectorValues((BitVector) root.getVector(table.getVector()), table.getValues().length, table.getBoolValues()); + break; + case CHAR: + case VARCHAR: + case CLOB: + assertVarcharVectorValues((VarCharVector) root.getVector(table.getVector()), table.getValues().length, table.getCharValues()); + break; + case DATE: + assertDateVectorValues((DateMilliVector) root.getVector(table.getVector()), table.getValues().length, table.getLongValues()); + break; + case TIME: + assertTimeVectorValues((TimeMilliVector) root.getVector(table.getVector()), table.getValues().length, table.getLongValues()); + break; + case TIMESTAMP: + assertTimeStampVectorValues((TimeStampVector) root.getVector(table.getVector()), table.getValues().length, table.getLongValues()); + break; + case DECIMAL: + assertDecimalVectorValues((DecimalVector) root.getVector(table.getVector()), table.getValues().length, table.getBigDecimalValues()); + break; + case DOUBLE: + assertFloat8VectorValues((Float8Vector) root.getVector(table.getVector()), table.getValues().length, table.getDoubleValues()); + break; + case INT: + assertIntVectorValues((IntVector) root.getVector(table.getVector()), table.getValues().length, table.getIntValues()); + break; + case SMALLINT: + assertSmallIntVectorValues((SmallIntVector) root.getVector(table.getVector()), table.getValues().length, table.getIntValues()); + break; + case TINYINT: + assertTinyIntVectorValues((TinyIntVector) root.getVector(table.getVector()), table.getValues().length, table.getIntValues()); + break; + case REAL: + assertFloat4VectorValues((Float4Vector) root.getVector(table.getVector()), table.getValues().length, table.getFloatValues()); + break; + } + } +} + diff --git a/java/adapter/jdbc/src/test/java/org/apache/arrow/adapter/jdbc/h2/JdbcToArrowNullTest.java b/java/adapter/jdbc/src/test/java/org/apache/arrow/adapter/jdbc/h2/JdbcToArrowNullTest.java new file mode 100644 index 00000000000..7df5278288a --- /dev/null +++ b/java/adapter/jdbc/src/test/java/org/apache/arrow/adapter/jdbc/h2/JdbcToArrowNullTest.java @@ -0,0 +1,168 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.adapter.jdbc.h2; + +import static org.apache.arrow.adapter.jdbc.JdbcToArrowTestHelper.assertNullValues; + +import java.io.IOException; +import java.sql.SQLException; +import java.util.Arrays; +import java.util.Calendar; +import java.util.Collection; + +import org.apache.arrow.adapter.jdbc.AbstractJdbcToArrowTest; +import org.apache.arrow.adapter.jdbc.JdbcToArrow; +import org.apache.arrow.adapter.jdbc.Table; +import org.apache.arrow.memory.RootAllocator; +import org.apache.arrow.vector.BigIntVector; +import org.apache.arrow.vector.BitVector; +import org.apache.arrow.vector.DateMilliVector; +import org.apache.arrow.vector.DecimalVector; +import org.apache.arrow.vector.Float4Vector; +import org.apache.arrow.vector.Float8Vector; +import org.apache.arrow.vector.IntVector; +import org.apache.arrow.vector.SmallIntVector; +import org.apache.arrow.vector.TimeMilliVector; +import org.apache.arrow.vector.TimeStampVector; +import org.apache.arrow.vector.TinyIntVector; +import org.apache.arrow.vector.VarBinaryVector; +import org.apache.arrow.vector.VarCharVector; +import org.apache.arrow.vector.VectorSchemaRoot; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; +import org.junit.runners.Parameterized.Parameters; + +/** + * JUnit Test Class which contains methods to test JDBC to Arrow data conversion functionality with null values for H2 database + */ +@RunWith(Parameterized.class) +public class JdbcToArrowNullTest extends AbstractJdbcToArrowTest { + + private static final String NULL = "null"; + private static final String SELECTED_NULL_COLUMN = "selected_null_column"; + + private static final String[] testFiles = { + "h2/test1_all_datatypes_null_h2.yml", + "h2/test1_selected_datatypes_null_h2.yml" + }; + + /** + * Constructor which populate table object for each test iteration + * + * @param table + */ + public JdbcToArrowNullTest(Table table) { + this.table = table; + } + + /** + * This method returns collection of Table object for each test iteration + * + * @return + * @throws SQLException + * @throws ClassNotFoundException + * @throws IOException + */ + @Parameters + public static Collection getTestData() throws SQLException, ClassNotFoundException, IOException { + return Arrays.asList(prepareTestData(testFiles, JdbcToArrowNullTest.class)); + } + + /** + * Test Method to test JdbcToArrow Functionality for various H2 DB based datatypes with null values + */ + @Test + public void testJdbcToArroValues() throws SQLException, IOException { + testDataSets(JdbcToArrow.sqlToArrow(conn, table.getQuery(), new RootAllocator(Integer.MAX_VALUE), Calendar.getInstance())); + testDataSets(JdbcToArrow.sqlToArrow(conn, table.getQuery(), new RootAllocator(Integer.MAX_VALUE))); + testDataSets(JdbcToArrow.sqlToArrow(conn.createStatement().executeQuery(table.getQuery()), new RootAllocator(Integer.MAX_VALUE), + Calendar.getInstance())); + testDataSets(JdbcToArrow.sqlToArrow(conn.createStatement().executeQuery(table.getQuery()))); + testDataSets(JdbcToArrow.sqlToArrow(conn.createStatement().executeQuery(table.getQuery()), new RootAllocator(Integer.MAX_VALUE))); + testDataSets(JdbcToArrow.sqlToArrow(conn.createStatement().executeQuery(table.getQuery()), Calendar.getInstance())); + } + + + /** + * This method calls the assert methods for various DataSets + * + * @param root + */ + public void testDataSets(VectorSchemaRoot root) { + switch (table.getType()) { + case NULL: + sqlToArrowTestNullValues(table.getVectors(), root, table.getRowCount()); + break; + case SELECTED_NULL_COLUMN: + sqlToArrowTestSelectedNullColumnsValues(table.getVectors(), root, table.getRowCount()); + break; + } + } + + /** + * This method assert tests null values in vectors for all the datatypes + * + * @param vectors + * @param root + * @param rowCount + */ + public void sqlToArrowTestNullValues(String[] vectors, VectorSchemaRoot root, int rowCount) { + assertNullValues((IntVector) root.getVector(vectors[0]), rowCount); + assertNullValues((BitVector) root.getVector(vectors[1]), rowCount); + assertNullValues((TinyIntVector) root.getVector(vectors[2]), rowCount); + assertNullValues((SmallIntVector) root.getVector(vectors[3]), rowCount); + assertNullValues((BigIntVector) root.getVector(vectors[4]), rowCount); + assertNullValues((DecimalVector) root.getVector(vectors[5]), rowCount); + assertNullValues((Float8Vector) root.getVector(vectors[6]), rowCount); + assertNullValues((Float4Vector) root.getVector(vectors[7]), rowCount); + assertNullValues((TimeMilliVector) root.getVector(vectors[8]), rowCount); + assertNullValues((DateMilliVector) root.getVector(vectors[9]), rowCount); + assertNullValues((TimeStampVector) root.getVector(vectors[10]), rowCount); + assertNullValues((VarBinaryVector) root.getVector(vectors[11]), rowCount); + assertNullValues((VarCharVector) root.getVector(vectors[12]), rowCount); + assertNullValues((VarBinaryVector) root.getVector(vectors[13]), rowCount); + assertNullValues((VarCharVector) root.getVector(vectors[14]), rowCount); + assertNullValues((VarCharVector) root.getVector(vectors[15]), rowCount); + assertNullValues((BitVector) root.getVector(vectors[16]), rowCount); + } + + /** + * This method assert tests null values in vectors for some selected datatypes + * + * @param vectors + * @param root + * @param rowCount + */ + public void sqlToArrowTestSelectedNullColumnsValues(String[] vectors, VectorSchemaRoot root, int rowCount) { + assertNullValues((BigIntVector) root.getVector(vectors[0]), rowCount); + assertNullValues((DecimalVector) root.getVector(vectors[1]), rowCount); + assertNullValues((Float8Vector) root.getVector(vectors[2]), rowCount); + assertNullValues((Float4Vector) root.getVector(vectors[3]), rowCount); + assertNullValues((TimeMilliVector) root.getVector(vectors[4]), rowCount); + assertNullValues((DateMilliVector) root.getVector(vectors[5]), rowCount); + assertNullValues((TimeStampVector) root.getVector(vectors[6]), rowCount); + assertNullValues((VarBinaryVector) root.getVector(vectors[7]), rowCount); + assertNullValues((VarCharVector) root.getVector(vectors[8]), rowCount); + assertNullValues((VarBinaryVector) root.getVector(vectors[9]), rowCount); + assertNullValues((VarCharVector) root.getVector(vectors[10]), rowCount); + assertNullValues((VarCharVector) root.getVector(vectors[11]), rowCount); + assertNullValues((BitVector) root.getVector(vectors[12]), rowCount); + } + +} diff --git a/java/adapter/jdbc/src/test/java/org/apache/arrow/adapter/jdbc/h2/JdbcToArrowTest.java b/java/adapter/jdbc/src/test/java/org/apache/arrow/adapter/jdbc/h2/JdbcToArrowTest.java new file mode 100644 index 00000000000..8de6f3eeaea --- /dev/null +++ b/java/adapter/jdbc/src/test/java/org/apache/arrow/adapter/jdbc/h2/JdbcToArrowTest.java @@ -0,0 +1,194 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.arrow.adapter.jdbc.h2; + +import org.apache.arrow.adapter.jdbc.AbstractJdbcToArrowTest; +import org.apache.arrow.adapter.jdbc.JdbcToArrow; +import org.apache.arrow.adapter.jdbc.Table; +import org.apache.arrow.memory.RootAllocator; +import org.apache.arrow.vector.VectorSchemaRoot; +import org.apache.arrow.vector.BigIntVector; +import org.apache.arrow.vector.BitVector; +import org.apache.arrow.vector.DateMilliVector; +import org.apache.arrow.vector.DecimalVector; +import org.apache.arrow.vector.Float4Vector; +import org.apache.arrow.vector.Float8Vector; +import org.apache.arrow.vector.IntVector; +import org.apache.arrow.vector.SmallIntVector; +import org.apache.arrow.vector.TimeMilliVector; +import org.apache.arrow.vector.TimeStampVector; +import org.apache.arrow.vector.TinyIntVector; +import org.apache.arrow.vector.VarBinaryVector; +import org.apache.arrow.vector.VarCharVector; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; +import org.junit.runners.Parameterized.Parameters; + +import java.io.IOException; +import java.sql.SQLException; +import java.util.Arrays; +import java.util.Calendar; +import java.util.Collection; + +import static org.apache.arrow.adapter.jdbc.JdbcToArrowTestHelper.assertBigIntVectorValues; +import static org.apache.arrow.adapter.jdbc.JdbcToArrowTestHelper.assertBitVectorValues; +import static org.apache.arrow.adapter.jdbc.JdbcToArrowTestHelper.assertBooleanVectorValues; +import static org.apache.arrow.adapter.jdbc.JdbcToArrowTestHelper.assertDateVectorValues; +import static org.apache.arrow.adapter.jdbc.JdbcToArrowTestHelper.assertDecimalVectorValues; +import static org.apache.arrow.adapter.jdbc.JdbcToArrowTestHelper.assertFloat4VectorValues; +import static org.apache.arrow.adapter.jdbc.JdbcToArrowTestHelper.assertFloat8VectorValues; +import static org.apache.arrow.adapter.jdbc.JdbcToArrowTestHelper.assertIntVectorValues; +import static org.apache.arrow.adapter.jdbc.JdbcToArrowTestHelper.assertSmallIntVectorValues; +import static org.apache.arrow.adapter.jdbc.JdbcToArrowTestHelper.assertTimeStampVectorValues; +import static org.apache.arrow.adapter.jdbc.JdbcToArrowTestHelper.assertTimeVectorValues; +import static org.apache.arrow.adapter.jdbc.JdbcToArrowTestHelper.assertTinyIntVectorValues; +import static org.apache.arrow.adapter.jdbc.JdbcToArrowTestHelper.assertVarBinaryVectorValues; +import static org.apache.arrow.adapter.jdbc.JdbcToArrowTestHelper.assertVarcharVectorValues; +import static org.apache.arrow.adapter.jdbc.JdbcToArrowTestHelper.getLongValues; +import static org.apache.arrow.adapter.jdbc.JdbcToArrowTestHelper.getIntValues; +import static org.apache.arrow.adapter.jdbc.JdbcToArrowTestHelper.getBooleanValues; +import static org.apache.arrow.adapter.jdbc.JdbcToArrowTestHelper.getDecimalValues; +import static org.apache.arrow.adapter.jdbc.JdbcToArrowTestHelper.getDoubleValues; +import static org.apache.arrow.adapter.jdbc.JdbcToArrowTestHelper.getFloatValues; +import static org.apache.arrow.adapter.jdbc.JdbcToArrowTestHelper.getCharArray; +import static org.apache.arrow.adapter.jdbc.JdbcToArrowTestHelper.getBinaryValues; + +/** + * JUnit Test Class which contains methods to test JDBC to Arrow data conversion functionality with various data types for H2 database + * using single test data file + */ +@RunWith(Parameterized.class) +public class JdbcToArrowTest extends AbstractJdbcToArrowTest { + + private static final String BIGINT = "BIGINT_FIELD5"; + private static final String BINARY = "BINARY_FIELD12"; + private static final String BIT = "BIT_FIELD17"; + private static final String BLOB = "BLOB_FIELD14"; + private static final String BOOL = "BOOL_FIELD2"; + private static final String CHAR = "CHAR_FIELD16"; + private static final String CLOB = "CLOB_FIELD15"; + private static final String DATE = "DATE_FIELD10"; + private static final String DECIMAL = "DECIMAL_FIELD6"; + private static final String DOUBLE = "DOUBLE_FIELD7"; + private static final String INT = "INT_FIELD1"; + private static final String REAL = "REAL_FIELD8"; + private static final String SMALLINT = "SMALLINT_FIELD4"; + private static final String TIME = "TIME_FIELD9"; + private static final String TIMESTAMP = "TIMESTAMP_FIELD11"; + private static final String TINYINT = "TINYINT_FIELD3"; + private static final String VARCHAR = "VARCHAR_FIELD13"; + + private static final String[] testFiles = {"h2/test1_all_datatypes_h2.yml"}; + + /** + * Constructor which populate table object for each test iteration + * + * @param table + */ + public JdbcToArrowTest(Table table) { + this.table = table; + } + + /** + * This method returns collection of Table object for each test iteration + * + * @return + * @throws SQLException + * @throws ClassNotFoundException + * @throws IOException + */ + @Parameters + public static Collection getTestData() throws SQLException, ClassNotFoundException, IOException { + return Arrays.asList(prepareTestData(testFiles, JdbcToArrowTest.class)); + } + + /** + * Test Method to test JdbcToArrow Functionality for various H2 DB based datatypes with only one test data file + */ + @Test + public void testJdbcToArroValues() throws SQLException, IOException { + testDataSets(JdbcToArrow.sqlToArrow(conn, table.getQuery(), new RootAllocator(Integer.MAX_VALUE), Calendar.getInstance())); + testDataSets(JdbcToArrow.sqlToArrow(conn, table.getQuery(), new RootAllocator(Integer.MAX_VALUE))); + testDataSets(JdbcToArrow.sqlToArrow(conn.createStatement().executeQuery(table.getQuery()), new RootAllocator(Integer.MAX_VALUE), + Calendar.getInstance())); + testDataSets(JdbcToArrow.sqlToArrow(conn.createStatement().executeQuery(table.getQuery()))); + testDataSets(JdbcToArrow.sqlToArrow(conn.createStatement().executeQuery(table.getQuery()), new RootAllocator(Integer.MAX_VALUE))); + testDataSets(JdbcToArrow.sqlToArrow(conn.createStatement().executeQuery(table.getQuery()), Calendar.getInstance())); + } + + /** + * This method calls the assert methods for various DataSets + * + * @param root + */ + public void testDataSets(VectorSchemaRoot root) { + assertBigIntVectorValues((BigIntVector) root.getVector(BIGINT), table.getRowCount(), + getLongValues(table.getValues(), BIGINT)); + + assertTinyIntVectorValues((TinyIntVector) root.getVector(TINYINT), table.getRowCount(), + getIntValues(table.getValues(), TINYINT)); + + assertSmallIntVectorValues((SmallIntVector) root.getVector(SMALLINT), table.getRowCount(), + getIntValues(table.getValues(), SMALLINT)); + + assertVarBinaryVectorValues((VarBinaryVector) root.getVector(BINARY), table.getRowCount(), + getBinaryValues(table.getValues(), BINARY)); + + assertVarBinaryVectorValues((VarBinaryVector) root.getVector(BLOB), table.getRowCount(), + getBinaryValues(table.getValues(), BLOB)); + + assertVarcharVectorValues((VarCharVector) root.getVector(CLOB), table.getRowCount(), + getCharArray(table.getValues(), CLOB)); + + assertVarcharVectorValues((VarCharVector) root.getVector(VARCHAR), table.getRowCount(), + getCharArray(table.getValues(), VARCHAR)); + + assertVarcharVectorValues((VarCharVector) root.getVector(CHAR), table.getRowCount(), + getCharArray(table.getValues(), CHAR)); + + assertIntVectorValues((IntVector) root.getVector(INT), table.getRowCount(), + getIntValues(table.getValues(), INT)); + + assertBitVectorValues((BitVector) root.getVector(BIT), table.getRowCount(), + getIntValues(table.getValues(), BIT)); + + assertBooleanVectorValues((BitVector) root.getVector(BOOL), table.getRowCount(), + getBooleanValues(table.getValues(), BOOL)); + + assertDateVectorValues((DateMilliVector) root.getVector(DATE), table.getRowCount(), + getLongValues(table.getValues(), DATE)); + + assertTimeVectorValues((TimeMilliVector) root.getVector(TIME), table.getRowCount(), + getLongValues(table.getValues(), TIME)); + + assertTimeStampVectorValues((TimeStampVector) root.getVector(TIMESTAMP), table.getRowCount(), + getLongValues(table.getValues(), TIMESTAMP)); + + assertDecimalVectorValues((DecimalVector) root.getVector(DECIMAL), table.getRowCount(), + getDecimalValues(table.getValues(), DECIMAL)); + + assertFloat8VectorValues((Float8Vector) root.getVector(DOUBLE), table.getRowCount(), + getDoubleValues(table.getValues(), DOUBLE)); + + assertFloat4VectorValues((Float4Vector) root.getVector(REAL), table.getRowCount(), + getFloatValues(table.getValues(), REAL)); + } + +} diff --git a/java/adapter/jdbc/src/test/java/org/apache/arrow/adapter/jdbc/h2/JdbcToArrowTimeZoneTest.java b/java/adapter/jdbc/src/test/java/org/apache/arrow/adapter/jdbc/h2/JdbcToArrowTimeZoneTest.java new file mode 100644 index 00000000000..87003d001ed --- /dev/null +++ b/java/adapter/jdbc/src/test/java/org/apache/arrow/adapter/jdbc/h2/JdbcToArrowTimeZoneTest.java @@ -0,0 +1,135 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.adapter.jdbc.h2; + +import static org.apache.arrow.adapter.jdbc.JdbcToArrowTestHelper.assertDateVectorValues; +import static org.apache.arrow.adapter.jdbc.JdbcToArrowTestHelper.assertTimeStampVectorValues; +import static org.apache.arrow.adapter.jdbc.JdbcToArrowTestHelper.assertTimeVectorValues; + +import java.io.IOException; +import java.sql.SQLException; +import java.util.Arrays; +import java.util.Calendar; +import java.util.Collection; +import java.util.TimeZone; + +import org.apache.arrow.adapter.jdbc.AbstractJdbcToArrowTest; +import org.apache.arrow.adapter.jdbc.JdbcToArrow; +import org.apache.arrow.adapter.jdbc.Table; +import org.apache.arrow.memory.RootAllocator; +import org.apache.arrow.vector.DateMilliVector; +import org.apache.arrow.vector.TimeMilliVector; +import org.apache.arrow.vector.TimeStampVector; +import org.apache.arrow.vector.VectorSchemaRoot; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; +import org.junit.runners.Parameterized.Parameters; + +/** + * JUnit Test Class which contains methods to test JDBC to Arrow data conversion functionality with TimeZone based Date, + * Time and Timestamp datatypes for H2 database + */ + +@RunWith(Parameterized.class) +public class JdbcToArrowTimeZoneTest extends AbstractJdbcToArrowTest { + + private static final String EST_DATE = "est_date"; + private static final String EST_TIME = "est_time"; + private static final String EST_TIMESTAMP = "est_timestamp"; + private static final String GMT_DATE = "gmt_date"; + private static final String GMT_TIME = "gmt_time"; + private static final String GMT_TIMESTAMP = "gmt_timestamp"; + private static final String PST_DATE = "pst_date"; + private static final String PST_TIME = "pst_time"; + private static final String PST_TIMESTAMP = "pst_timestamp"; + + private static final String[] testFiles = { + "h2/test1_est_date_h2.yml", + "h2/test1_est_time_h2.yml", + "h2/test1_est_timestamp_h2.yml", + "h2/test1_gmt_date_h2.yml", + "h2/test1_gmt_time_h2.yml", + "h2/test1_gmt_timestamp_h2.yml", + "h2/test1_pst_date_h2.yml", + "h2/test1_pst_time_h2.yml", + "h2/test1_pst_timestamp_h2.yml" + }; + + /** + * Constructor which populate table object for each test iteration + * + * @param table + */ + public JdbcToArrowTimeZoneTest(Table table) { + this.table = table; + } + + /** + * This method returns collection of Table object for each test iteration + * + * @return + * @throws SQLException + * @throws ClassNotFoundException + * @throws IOException + */ + @Parameters + public static Collection getTestData() throws SQLException, ClassNotFoundException, IOException { + return Arrays.asList(prepareTestData(testFiles, JdbcToArrowTimeZoneTest.class)); + } + + /** + * Test Method to test JdbcToArrow Functionality for various H2 DB based datatypes with TimeZone based Date, + * Time and Timestamp datatype + */ + @Test + public void testJdbcToArroValues() throws SQLException, IOException { + testDataSets(JdbcToArrow.sqlToArrow(conn, table.getQuery(), new RootAllocator(Integer.MAX_VALUE), + Calendar.getInstance(TimeZone.getTimeZone(table.getTimezone())))); + testDataSets(JdbcToArrow.sqlToArrow(conn.createStatement().executeQuery(table.getQuery()), new RootAllocator(Integer.MAX_VALUE), + Calendar.getInstance(TimeZone.getTimeZone(table.getTimezone())))); + testDataSets(JdbcToArrow.sqlToArrow(conn.createStatement().executeQuery(table.getQuery()), + Calendar.getInstance(TimeZone.getTimeZone(table.getTimezone())))); + } + + /** + * This method calls the assert methods for various DataSets + * + * @param root + */ + public void testDataSets(VectorSchemaRoot root) { + switch (table.getType()) { + case EST_DATE: + case GMT_DATE: + case PST_DATE: + assertDateVectorValues((DateMilliVector) root.getVector(table.getVector()), table.getValues().length, table.getLongValues()); + break; + case EST_TIME: + case GMT_TIME: + case PST_TIME: + assertTimeVectorValues((TimeMilliVector) root.getVector(table.getVector()), table.getValues().length, table.getLongValues()); + break; + case EST_TIMESTAMP: + case GMT_TIMESTAMP: + case PST_TIMESTAMP: + assertTimeStampVectorValues((TimeStampVector) root.getVector(table.getVector()), table.getValues().length, table.getLongValues()); + break; + } + } + +} diff --git a/java/adapter/jdbc/src/test/resources/h2/test1_all_datatypes_h2.yml b/java/adapter/jdbc/src/test/resources/h2/test1_all_datatypes_h2.yml new file mode 100644 index 00000000000..03b3d3fed66 --- /dev/null +++ b/java/adapter/jdbc/src/test/resources/h2/test1_all_datatypes_h2.yml @@ -0,0 +1,120 @@ +#Licensed to the Apache Software Foundation (ASF) under one or more contributor +#license agreements. See the NOTICE file distributed with this work for additional +#information regarding copyright ownership. The ASF licenses this file to +#You under the Apache License, Version 2.0 (the "License"); you may not use +#this file except in compliance with the License. You may obtain a copy of +#the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required +#by applicable law or agreed to in writing, software distributed under the +#License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS +#OF ANY KIND, either express or implied. See the License for the specific +#language governing permissions and limitations under the License. + +name: 'table1' + +create: 'CREATE TABLE table1 (int_field1 INT, bool_field2 BOOLEAN, tinyint_field3 TINYINT, smallint_field4 SMALLINT, bigint_field5 BIGINT, + decimal_field6 DECIMAL(20,2), double_field7 DOUBLE, real_field8 REAL, time_field9 TIME, date_field10 DATE, timestamp_field11 TIMESTAMP, + binary_field12 BINARY(100), varchar_field13 VARCHAR(256), blob_field14 BLOB, clob_field15 CLOB, char_field16 CHAR(16), bit_field17 BIT);' + +data: + - 'INSERT INTO table1 VALUES (101, 1, 45, 12000, 92233720, 17345667789.23, 56478356785.345, 56478356785.345, PARSEDATETIME(''12:45:35 GMT'', ''HH:mm:ss z''), + PARSEDATETIME(''2018-02-12 GMT'', ''yyyy-MM-dd z''), PARSEDATETIME(''2018-02-12 12:45:35 GMT'', ''yyyy-MM-dd HH:mm:ss z''), + ''736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279'', ''some text that needs to be converted to varchar'', + ''736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279'', ''some text that needs to be converted to clob'', ''some char text'', 1);' + + - 'INSERT INTO table1 VALUES (101, 1, 45, 12000, 92233720, 17345667789.23, 56478356785.345, 56478356785.345, PARSEDATETIME(''12:45:35 GMT'', ''HH:mm:ss z''), + PARSEDATETIME(''2018-02-12 GMT'', ''yyyy-MM-dd z''), PARSEDATETIME(''2018-02-12 12:45:35 GMT'', ''yyyy-MM-dd HH:mm:ss z''), + ''736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279'', ''some text that needs to be converted to varchar'', + ''736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279'', ''some text that needs to be converted to clob'', ''some char text'', 1);' + + - 'INSERT INTO table1 VALUES (101, 1, 45, 12000, 92233720, 17345667789.23, 56478356785.345, 56478356785.345, PARSEDATETIME(''12:45:35 GMT'', ''HH:mm:ss z''), + PARSEDATETIME(''2018-02-12 GMT'', ''yyyy-MM-dd z''), PARSEDATETIME(''2018-02-12 12:45:35 GMT'', ''yyyy-MM-dd HH:mm:ss z''), + ''736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279'', ''some text that needs to be converted to varchar'', + ''736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279'', ''some text that needs to be converted to clob'', ''some char text'', 1);' + + - 'INSERT INTO table1 VALUES (101, 1, 45, 12000, 92233720, 17345667789.23, 56478356785.345, 56478356785.345, PARSEDATETIME(''12:45:35 GMT'', ''HH:mm:ss z''), + PARSEDATETIME(''2018-02-12 GMT'', ''yyyy-MM-dd z''), PARSEDATETIME(''2018-02-12 12:45:35 GMT'', ''yyyy-MM-dd HH:mm:ss z''), + ''736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279'', ''some text that needs to be converted to varchar'', + ''736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279'', ''some text that needs to be converted to clob'', ''some char text'', 1);' + + - 'INSERT INTO table1 VALUES (101, 1, 45, 12000, 92233720, 17345667789.23, 56478356785.345, 56478356785.345, PARSEDATETIME(''12:45:35 GMT'', ''HH:mm:ss z''), + PARSEDATETIME(''2018-02-12 GMT'', ''yyyy-MM-dd z''), PARSEDATETIME(''2018-02-12 12:45:35 GMT'', ''yyyy-MM-dd HH:mm:ss z''), + ''736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279'', ''some text that needs to be converted to varchar'', + ''736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279'', ''some text that needs to be converted to clob'', ''some char text'', 1);' + + - 'INSERT INTO table1 VALUES (101, 1, 45, 12000, 92233720, 17345667789.23, 56478356785.345, 56478356785.345, PARSEDATETIME(''12:45:35 GMT'', ''HH:mm:ss z''), + PARSEDATETIME(''2018-02-12 GMT'', ''yyyy-MM-dd z''), PARSEDATETIME(''2018-02-12 12:45:35 GMT'', ''yyyy-MM-dd HH:mm:ss z''), + ''736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279'', ''some text that needs to be converted to varchar'', + ''736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279'', ''some text that needs to be converted to clob'', ''some char text'', 1);' + + - 'INSERT INTO table1 VALUES (101, 1, 45, 12000, 92233720, 17345667789.23, 56478356785.345, 56478356785.345, PARSEDATETIME(''12:45:35 GMT'', ''HH:mm:ss z''), + PARSEDATETIME(''2018-02-12 GMT'', ''yyyy-MM-dd z''), PARSEDATETIME(''2018-02-12 12:45:35 GMT'', ''yyyy-MM-dd HH:mm:ss z''), + ''736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279'', ''some text that needs to be converted to varchar'', + ''736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279'', ''some text that needs to be converted to clob'', ''some char text'', 1);' + + - 'INSERT INTO table1 VALUES (101, 1, 45, 12000, 92233720, 17345667789.23, 56478356785.345, 56478356785.345, PARSEDATETIME(''12:45:35 GMT'', ''HH:mm:ss z''), + PARSEDATETIME(''2018-02-12 GMT'', ''yyyy-MM-dd z''), PARSEDATETIME(''2018-02-12 12:45:35 GMT'', ''yyyy-MM-dd HH:mm:ss z''), + ''736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279'', ''some text that needs to be converted to varchar'', + ''736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279'', ''some text that needs to be converted to clob'', ''some char text'', 1);' + + - 'INSERT INTO table1 VALUES (101, 1, 45, 12000, 92233720, 17345667789.23, 56478356785.345, 56478356785.345, PARSEDATETIME(''12:45:35 GMT'', ''HH:mm:ss z''), + PARSEDATETIME(''2018-02-12 GMT'', ''yyyy-MM-dd z''), PARSEDATETIME(''2018-02-12 12:45:35 GMT'', ''yyyy-MM-dd HH:mm:ss z''), + ''736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279'', ''some text that needs to be converted to varchar'', + ''736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279'', ''some text that needs to be converted to clob'', ''some char text'', 1);' + + - 'INSERT INTO table1 VALUES (101, 1, 45, 12000, 92233720, 17345667789.23, 56478356785.345, 56478356785.345, PARSEDATETIME(''12:45:35 GMT'', ''HH:mm:ss z''), + PARSEDATETIME(''2018-02-12 GMT'', ''yyyy-MM-dd z''), PARSEDATETIME(''2018-02-12 12:45:35 GMT'', ''yyyy-MM-dd HH:mm:ss z''), + ''736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279'', ''some text that needs to be converted to varchar'', + ''736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279'', ''some text that needs to be converted to clob'', ''some char text'', 1);' + +query: 'select int_field1, bool_field2, tinyint_field3, smallint_field4, bigint_field5, decimal_field6, double_field7, real_field8, + time_field9, date_field10, timestamp_field11, binary_field12, varchar_field13, blob_field14, clob_field15, char_field16, bit_field17 from table1' + +drop: 'DROP table table1;' + +rowCount: '10' + +values: + - 'INT_FIELD1=101,101,101,101,101,101,101,101,101,101' + - 'BOOL_FIELD2=1,1,1,1,1,1,1,1,1,1' + - 'BIT_FIELD17=1,1,1,1,1,1,1,1,1,1' + - 'TINYINT_FIELD3=45,45,45,45,45,45,45,45,45,45' + - 'SMALLINT_FIELD4=12000,12000,12000,12000,12000,12000,12000,12000,12000,12000' + - 'BIGINT_FIELD5=92233720,92233720,92233720,92233720,92233720,92233720,92233720,92233720,92233720,92233720' + - 'REAL_FIELD8=56478356785.345f,56478356785.345f,56478356785.345f,56478356785.345f,56478356785.345f,56478356785.345f,56478356785.345f,56478356785.345f,56478356785.345f,56478356785.345f' + - 'DECIMAL_FIELD6=17345667789.23,17345667789.23,17345667789.23,17345667789.23,17345667789.23,17345667789.23,17345667789.23,17345667789.23,17345667789.23,17345667789.23' + - 'DOUBLE_FIELD7=56478356785.345,56478356785.345,56478356785.345,56478356785.345,56478356785.345,56478356785.345,56478356785.345,56478356785.345,56478356785.345,56478356785.345' + - 'TIME_FIELD9=45935000,45935000,45935000,45935000,45935000,45935000,45935000,45935000,45935000,45935000' + - 'DATE_FIELD10=1518393600000,1518393600000,1518393600000,1518393600000,1518393600000,1518393600000,1518393600000,1518393600000,1518393600000,1518393600000' + - 'TIMESTAMP_FIELD11=1518439535000,1518439535000,1518439535000,1518439535000,1518439535000,1518439535000,1518439535000,1518439535000,1518439535000,1518439535000' + - 'CHAR_FIELD16=some char text,some char text,some char text,some char text,some char text, + some char text,some char text,some char text,some char text,some char text' + - 'VARCHAR_FIELD13=some text that needs to be converted to varchar,some text that needs to be converted to varchar, + some text that needs to be converted to varchar,some text that needs to be converted to varchar, + some text that needs to be converted to varchar,some text that needs to be converted to varchar, + some text that needs to be converted to varchar,some text that needs to be converted to varchar, + some text that needs to be converted to varchar,some text that needs to be converted to varchar' + - 'BINARY_FIELD12=736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279, + 736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279, + 736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279, + 736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279, + 736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279, + 736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279, + 736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279, + 736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279, + 736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279, + 736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279' + - 'BLOB_FIELD14=736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279, + 736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279, + 736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279, + 736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279, + 736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279, + 736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279, + 736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279, + 736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279, + 736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279, + 736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279' + - 'CLOB_FIELD15=some text that needs to be converted to clob,some text that needs to be converted to clob, + some text that needs to be converted to clob,some text that needs to be converted to clob, + some text that needs to be converted to clob,some text that needs to be converted to clob, + some text that needs to be converted to clob,some text that needs to be converted to clob, + some text that needs to be converted to clob,some text that needs to be converted to clob' \ No newline at end of file diff --git a/java/adapter/jdbc/src/test/resources/h2/test1_all_datatypes_null_h2.yml b/java/adapter/jdbc/src/test/resources/h2/test1_all_datatypes_null_h2.yml new file mode 100644 index 00000000000..977879df491 --- /dev/null +++ b/java/adapter/jdbc/src/test/resources/h2/test1_all_datatypes_null_h2.yml @@ -0,0 +1,51 @@ +#Licensed to the Apache Software Foundation (ASF) under one or more contributor +#license agreements. See the NOTICE file distributed with this work for additional +#information regarding copyright ownership. The ASF licenses this file to +#You under the Apache License, Version 2.0 (the "License"); you may not use +#this file except in compliance with the License. You may obtain a copy of +#the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required +#by applicable law or agreed to in writing, software distributed under the +#License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS +#OF ANY KIND, either express or implied. See the License for the specific +#language governing permissions and limitations under the License. + +name: 'table1' + +type: 'null' + +vectors: + - 'INT_FIELD1' + - 'BOOL_FIELD2' + - 'TINYINT_FIELD3' + - 'SMALLINT_FIELD4' + - 'BIGINT_FIELD5' + - 'DECIMAL_FIELD6' + - 'DOUBLE_FIELD7' + - 'REAL_FIELD8' + - 'TIME_FIELD9' + - 'DATE_FIELD10' + - 'TIMESTAMP_FIELD11' + - 'BINARY_FIELD12' + - 'VARCHAR_FIELD13' + - 'BLOB_FIELD14' + - 'CLOB_FIELD15' + - 'CHAR_FIELD16' + - 'BIT_FIELD17' + +rowCount: '5' + +create: 'CREATE TABLE table1 (int_field1 INT, bool_field2 BOOLEAN, tinyint_field3 TINYINT, smallint_field4 SMALLINT, bigint_field5 BIGINT, + decimal_field6 DECIMAL(20,2), double_field7 DOUBLE, real_field8 REAL, time_field9 TIME, date_field10 DATE, timestamp_field11 TIMESTAMP, + binary_field12 BINARY(100), varchar_field13 VARCHAR(256), blob_field14 BLOB, clob_field15 CLOB, char_field16 CHAR(16), bit_field17 BIT);' + +data: + - 'INSERT INTO table1 VALUES (null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null);' + - 'INSERT INTO table1 VALUES (null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null);' + - 'INSERT INTO table1 VALUES (null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null);' + - 'INSERT INTO table1 VALUES (null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null);' + - 'INSERT INTO table1 VALUES (null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null);' + +query: 'select int_field1, bool_field2, tinyint_field3, smallint_field4, bigint_field5, decimal_field6, double_field7, real_field8, + time_field9, date_field10, timestamp_field11, binary_field12, varchar_field13, blob_field14, clob_field15, char_field16, bit_field17 from table1' + +drop: 'DROP table table1;' \ No newline at end of file diff --git a/java/adapter/jdbc/src/test/resources/h2/test1_bigint_h2.yml b/java/adapter/jdbc/src/test/resources/h2/test1_bigint_h2.yml new file mode 100644 index 00000000000..066bececfd7 --- /dev/null +++ b/java/adapter/jdbc/src/test/resources/h2/test1_bigint_h2.yml @@ -0,0 +1,46 @@ +#Licensed to the Apache Software Foundation (ASF) under one or more contributor +#license agreements. See the NOTICE file distributed with this work for additional +#information regarding copyright ownership. The ASF licenses this file to +#You under the Apache License, Version 2.0 (the "License"); you may not use +#this file except in compliance with the License. You may obtain a copy of +#the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required +#by applicable law or agreed to in writing, software distributed under the +#License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS +#OF ANY KIND, either express or implied. See the License for the specific +#language governing permissions and limitations under the License. + +name: 'table1' + +type: 'big_int' + +vector: 'BIGINT_FIELD5' + +create: 'CREATE TABLE table1 (bigint_field5 BIGINT);' + +data: + - 'INSERT INTO table1 VALUES (92233720);' + - 'INSERT INTO table1 VALUES (92233720);' + - 'INSERT INTO table1 VALUES (92233720);' + - 'INSERT INTO table1 VALUES (92233720);' + - 'INSERT INTO table1 VALUES (92233720);' + - 'INSERT INTO table1 VALUES (92233720);' + - 'INSERT INTO table1 VALUES (92233720);' + - 'INSERT INTO table1 VALUES (92233720);' + - 'INSERT INTO table1 VALUES (92233720);' + - 'INSERT INTO table1 VALUES (92233720);' + +query: 'select bigint_field5 from table1;' + +drop: 'DROP table table1;' + +values: + - '92233720' + - '92233720' + - '92233720' + - '92233720' + - '92233720' + - '92233720' + - '92233720' + - '92233720' + - '92233720' + - '92233720' diff --git a/java/adapter/jdbc/src/test/resources/h2/test1_binary_h2.yml b/java/adapter/jdbc/src/test/resources/h2/test1_binary_h2.yml new file mode 100644 index 00000000000..ce3e4f12717 --- /dev/null +++ b/java/adapter/jdbc/src/test/resources/h2/test1_binary_h2.yml @@ -0,0 +1,46 @@ +#Licensed to the Apache Software Foundation (ASF) under one or more contributor +#license agreements. See the NOTICE file distributed with this work for additional +#information regarding copyright ownership. The ASF licenses this file to +#You under the Apache License, Version 2.0 (the "License"); you may not use +#this file except in compliance with the License. You may obtain a copy of +#the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required +#by applicable law or agreed to in writing, software distributed under the +#License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS +#OF ANY KIND, either express or implied. See the License for the specific +#language governing permissions and limitations under the License. + +name: 'table1' + +type: 'binary' + +vector: 'BINARY_FIELD12' + +create: 'CREATE TABLE table1 (binary_field12 BINARY(100));' + +data: + - 'INSERT INTO table1 VALUES (''736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279'');' + - 'INSERT INTO table1 VALUES (''736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279'');' + - 'INSERT INTO table1 VALUES (''736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279'');' + - 'INSERT INTO table1 VALUES (''736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279'');' + - 'INSERT INTO table1 VALUES (''736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279'');' + - 'INSERT INTO table1 VALUES (''736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279'');' + - 'INSERT INTO table1 VALUES (''736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279'');' + - 'INSERT INTO table1 VALUES (''736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279'');' + - 'INSERT INTO table1 VALUES (''736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279'');' + - 'INSERT INTO table1 VALUES (''736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279'');' + +query: 'select binary_field12 from table1;' + +drop: 'DROP table table1;' + +values: + - '736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279' + - '736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279' + - '736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279' + - '736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279' + - '736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279' + - '736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279' + - '736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279' + - '736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279' + - '736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279' + - '736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279' diff --git a/java/adapter/jdbc/src/test/resources/h2/test1_bit_h2.yml b/java/adapter/jdbc/src/test/resources/h2/test1_bit_h2.yml new file mode 100644 index 00000000000..aeb7a20e0b3 --- /dev/null +++ b/java/adapter/jdbc/src/test/resources/h2/test1_bit_h2.yml @@ -0,0 +1,46 @@ +#Licensed to the Apache Software Foundation (ASF) under one or more contributor +#license agreements. See the NOTICE file distributed with this work for additional +#information regarding copyright ownership. The ASF licenses this file to +#You under the Apache License, Version 2.0 (the "License"); you may not use +#this file except in compliance with the License. You may obtain a copy of +#the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required +#by applicable law or agreed to in writing, software distributed under the +#License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS +#OF ANY KIND, either express or implied. See the License for the specific +#language governing permissions and limitations under the License. + +name: 'table1' + +type: 'bit' + +vector: 'BIT_FIELD17' + +create: 'CREATE TABLE table1 (bit_field17 BIT);' + +data: + - 'INSERT INTO table1 VALUES (1);' + - 'INSERT INTO table1 VALUES (1);' + - 'INSERT INTO table1 VALUES (1);' + - 'INSERT INTO table1 VALUES (1);' + - 'INSERT INTO table1 VALUES (1);' + - 'INSERT INTO table1 VALUES (1);' + - 'INSERT INTO table1 VALUES (1);' + - 'INSERT INTO table1 VALUES (1);' + - 'INSERT INTO table1 VALUES (1);' + - 'INSERT INTO table1 VALUES (1);' + +query: 'select bit_field17 from table1;' + +drop: 'DROP table table1;' + +values: + - '1' + - '1' + - '1' + - '1' + - '1' + - '1' + - '1' + - '1' + - '1' + - '1' diff --git a/java/adapter/jdbc/src/test/resources/h2/test1_blob_h2.yml b/java/adapter/jdbc/src/test/resources/h2/test1_blob_h2.yml new file mode 100644 index 00000000000..b4cd2ca80ca --- /dev/null +++ b/java/adapter/jdbc/src/test/resources/h2/test1_blob_h2.yml @@ -0,0 +1,46 @@ +#Licensed to the Apache Software Foundation (ASF) under one or more contributor +#license agreements. See the NOTICE file distributed with this work for additional +#information regarding copyright ownership. The ASF licenses this file to +#You under the Apache License, Version 2.0 (the "License"); you may not use +#this file except in compliance with the License. You may obtain a copy of +#the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required +#by applicable law or agreed to in writing, software distributed under the +#License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS +#OF ANY KIND, either express or implied. See the License for the specific +#language governing permissions and limitations under the License. + +name: 'table1' + +type: 'blob' + +vector: 'BLOB_FIELD14' + +create: 'CREATE TABLE table1 (blob_field14 BLOB);' + +data: + - 'INSERT INTO table1 VALUES (''736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279'');' + - 'INSERT INTO table1 VALUES (''736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279'');' + - 'INSERT INTO table1 VALUES (''736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279'');' + - 'INSERT INTO table1 VALUES (''736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279'');' + - 'INSERT INTO table1 VALUES (''736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279'');' + - 'INSERT INTO table1 VALUES (''736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279'');' + - 'INSERT INTO table1 VALUES (''736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279'');' + - 'INSERT INTO table1 VALUES (''736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279'');' + - 'INSERT INTO table1 VALUES (''736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279'');' + - 'INSERT INTO table1 VALUES (''736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279'');' + +query: 'select blob_field14 from table1;' + +drop: 'DROP table table1;' + +values: + - '736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279' + - '736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279' + - '736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279' + - '736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279' + - '736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279' + - '736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279' + - '736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279' + - '736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279' + - '736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279' + - '736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279' diff --git a/java/adapter/jdbc/src/test/resources/h2/test1_bool_h2.yml b/java/adapter/jdbc/src/test/resources/h2/test1_bool_h2.yml new file mode 100644 index 00000000000..8219a55ecab --- /dev/null +++ b/java/adapter/jdbc/src/test/resources/h2/test1_bool_h2.yml @@ -0,0 +1,46 @@ +#Licensed to the Apache Software Foundation (ASF) under one or more contributor +#license agreements. See the NOTICE file distributed with this work for additional +#information regarding copyright ownership. The ASF licenses this file to +#You under the Apache License, Version 2.0 (the "License"); you may not use +#this file except in compliance with the License. You may obtain a copy of +#the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required +#by applicable law or agreed to in writing, software distributed under the +#License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS +#OF ANY KIND, either express or implied. See the License for the specific +#language governing permissions and limitations under the License. + +name: 'table1' + +type: 'bool' + +vector: 'BOOL_FIELD2' + +create: 'CREATE TABLE table1 (bool_field2 BOOLEAN);' + +data: + - 'INSERT INTO table1 VALUES (1);' + - 'INSERT INTO table1 VALUES (1);' + - 'INSERT INTO table1 VALUES (1);' + - 'INSERT INTO table1 VALUES (1);' + - 'INSERT INTO table1 VALUES (1);' + - 'INSERT INTO table1 VALUES (1);' + - 'INSERT INTO table1 VALUES (1);' + - 'INSERT INTO table1 VALUES (1);' + - 'INSERT INTO table1 VALUES (1);' + - 'INSERT INTO table1 VALUES (1);' + +query: 'select bool_field2 from table1;' + +drop: 'DROP table table1;' + +values: + - 'true' + - 'true' + - 'true' + - 'true' + - 'true' + - 'true' + - 'true' + - 'true' + - 'true' + - 'true' diff --git a/java/adapter/jdbc/src/test/resources/h2/test1_char_h2.yml b/java/adapter/jdbc/src/test/resources/h2/test1_char_h2.yml new file mode 100644 index 00000000000..6e2cb185c3b --- /dev/null +++ b/java/adapter/jdbc/src/test/resources/h2/test1_char_h2.yml @@ -0,0 +1,46 @@ +#Licensed to the Apache Software Foundation (ASF) under one or more contributor +#license agreements. See the NOTICE file distributed with this work for additional +#information regarding copyright ownership. The ASF licenses this file to +#You under the Apache License, Version 2.0 (the "License"); you may not use +#this file except in compliance with the License. You may obtain a copy of +#the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required +#by applicable law or agreed to in writing, software distributed under the +#License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS +#OF ANY KIND, either express or implied. See the License for the specific +#language governing permissions and limitations under the License. + +name: 'table1' + +type: 'char' + +vector: 'CHAR_FIELD16' + +create: 'CREATE TABLE table1 (char_field16 CHAR(16));' + +data: + - 'INSERT INTO table1 VALUES (''some char text'');' + - 'INSERT INTO table1 VALUES (''some char text'');' + - 'INSERT INTO table1 VALUES (''some char text'');' + - 'INSERT INTO table1 VALUES (''some char text'');' + - 'INSERT INTO table1 VALUES (''some char text'');' + - 'INSERT INTO table1 VALUES (''some char text'');' + - 'INSERT INTO table1 VALUES (''some char text'');' + - 'INSERT INTO table1 VALUES (''some char text'');' + - 'INSERT INTO table1 VALUES (''some char text'');' + - 'INSERT INTO table1 VALUES (''some char text'');' + +query: 'select char_field16 from table1;' + +drop: 'DROP table table1;' + +values: + - 'some char text' + - 'some char text' + - 'some char text' + - 'some char text' + - 'some char text' + - 'some char text' + - 'some char text' + - 'some char text' + - 'some char text' + - 'some char text' diff --git a/java/adapter/jdbc/src/test/resources/h2/test1_charset_ch_h2.yml b/java/adapter/jdbc/src/test/resources/h2/test1_charset_ch_h2.yml new file mode 100644 index 00000000000..1b6d7d503b8 --- /dev/null +++ b/java/adapter/jdbc/src/test/resources/h2/test1_charset_ch_h2.yml @@ -0,0 +1,43 @@ +#Licensed to the Apache Software Foundation (ASF) under one or more contributor +#license agreements. See the NOTICE file distributed with this work for additional +#information regarding copyright ownership. The ASF licenses this file to +#You under the Apache License, Version 2.0 (the "License"); you may not use +#this file except in compliance with the License. You may obtain a copy of +#the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required +#by applicable law or agreed to in writing, software distributed under the +#License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS +#OF ANY KIND, either express or implied. See the License for the specific +#language governing permissions and limitations under the License. + +name: 'table1' + +type: 'charset' + +vectors: + - 'VARCHAR_FIELD13' + - 'CHAR_FIELD16' + - 'CLOB_FIELD15' + +rowCount: '5' + +charSet: 'GBK' + +create: 'CREATE TABLE table1 (int_field1 INT, varchar_field13 VARCHAR(256), clob_field15 CLOB, char_field16 CHAR(128));' + +data: + - 'INSERT INTO table1 VALUES (101,''一些帶有char編碼的文本需要轉換為varchar'', ''一些带有char编码的文本需要转换为clob'', ''一些char编码的字符文本'');' + - 'INSERT INTO table1 VALUES (101,''一些帶有char編碼的文本需要轉換為varchar'', ''一些带有char编码的文本需要转换为clob'', ''一些char编码的字符文本'');' + - 'INSERT INTO table1 VALUES (101,''一些帶有char編碼的文本需要轉換為varchar'', ''一些带有char编码的文本需要转换为clob'', ''一些char编码的字符文本'');' + - 'INSERT INTO table1 VALUES (101,''一些帶有char編碼的文本需要轉換為varchar'', ''一些带有char编码的文本需要转换为clob'', ''一些char编码的字符文本'');' + - 'INSERT INTO table1 VALUES (101,''一些帶有char編碼的文本需要轉換為varchar'', ''一些带有char编码的文本需要转换为clob'', ''一些char编码的字符文本'');' + +query: 'select varchar_field13, clob_field15, char_field16 from table1' + +drop: 'DROP table table1;' + +values: + - 'VARCHAR_FIELD13=一些帶有char編碼的文本需要轉換為varchar,一些帶有char編碼的文本需要轉換為varchar,一些帶有char編碼的文本需要轉換為varchar, + 一些帶有char編碼的文本需要轉換為varchar,一些帶有char編碼的文本需要轉換為varchar' + - 'CLOB_FIELD15=一些带有char编码的文本需要转换为clob,一些带有char编码的文本需要转换为clob,一些带有char编码的文本需要转换为clob, + 一些带有char编码的文本需要转换为clob,一些带有char编码的文本需要转换为clob' + - 'CHAR_FIELD16=一些char编码的字符文本,一些char编码的字符文本,一些char编码的字符文本,一些char编码的字符文本,一些char编码的字符文本' \ No newline at end of file diff --git a/java/adapter/jdbc/src/test/resources/h2/test1_charset_h2.yml b/java/adapter/jdbc/src/test/resources/h2/test1_charset_h2.yml new file mode 100644 index 00000000000..75734a221b8 --- /dev/null +++ b/java/adapter/jdbc/src/test/resources/h2/test1_charset_h2.yml @@ -0,0 +1,53 @@ +#Licensed to the Apache Software Foundation (ASF) under one or more contributor +#license agreements. See the NOTICE file distributed with this work for additional +#information regarding copyright ownership. The ASF licenses this file to +#You under the Apache License, Version 2.0 (the "License"); you may not use +#this file except in compliance with the License. You may obtain a copy of +#the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required +#by applicable law or agreed to in writing, software distributed under the +#License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS +#OF ANY KIND, either express or implied. See the License for the specific +#language governing permissions and limitations under the License. + +name: 'table1' + +type: 'charset' + +vectors: + - 'VARCHAR_FIELD13' + - 'CHAR_FIELD16' + - 'CLOB_FIELD15' + +rowCount: '10' + +create: 'CREATE TABLE table1 (int_field1 INT, varchar_field13 VARCHAR(256), clob_field15 CLOB, char_field16 CHAR(128));' + +data: + - 'INSERT INTO table1 VALUES (101,''some text with char encoding that needs to be converted to varchar'', ''some text with char encoding that needs to be converted to clob'', ''some char text with char encoding'');' + - 'INSERT INTO table1 VALUES (101,''some text with char encoding that needs to be converted to varchar'', ''some text with char encoding that needs to be converted to clob'', ''some char text with char encoding'');' + - 'INSERT INTO table1 VALUES (101,''some text with char encoding that needs to be converted to varchar'', ''some text with char encoding that needs to be converted to clob'', ''some char text with char encoding'');' + - 'INSERT INTO table1 VALUES (101,''some text with char encoding that needs to be converted to varchar'', ''some text with char encoding that needs to be converted to clob'', ''some char text with char encoding'');' + - 'INSERT INTO table1 VALUES (101,''some text with char encoding that needs to be converted to varchar'', ''some text with char encoding that needs to be converted to clob'', ''some char text with char encoding'');' + - 'INSERT INTO table1 VALUES (101,''some text with char encoding that needs to be converted to varchar'', ''some text with char encoding that needs to be converted to clob'', ''some char text with char encoding'');' + - 'INSERT INTO table1 VALUES (101,''some text with char encoding that needs to be converted to varchar'', ''some text with char encoding that needs to be converted to clob'', ''some char text with char encoding'');' + - 'INSERT INTO table1 VALUES (101,''some text with char encoding that needs to be converted to varchar'', ''some text with char encoding that needs to be converted to clob'', ''some char text with char encoding'');' + - 'INSERT INTO table1 VALUES (101,''some text with char encoding that needs to be converted to varchar'', ''some text with char encoding that needs to be converted to clob'', ''some char text with char encoding'');' + - 'INSERT INTO table1 VALUES (101,''some text with char encoding that needs to be converted to varchar'', ''some text with char encoding that needs to be converted to clob'', ''some char text with char encoding'');' + +query: 'select varchar_field13, clob_field15, char_field16 from table1' + +drop: 'DROP table table1;' + +values: + - 'VARCHAR_FIELD13=some text with char encoding that needs to be converted to varchar,some text with char encoding that needs to be converted to varchar, + some text with char encoding that needs to be converted to varchar,some text with char encoding that needs to be converted to varchar, + some text with char encoding that needs to be converted to varchar,some text with char encoding that needs to be converted to varchar, + some text with char encoding that needs to be converted to varchar,some text with char encoding that needs to be converted to varchar, + some text with char encoding that needs to be converted to varchar,some text with char encoding that needs to be converted to varchar' + - 'CLOB_FIELD15=some text with char encoding that needs to be converted to clob,some text with char encoding that needs to be converted to clob, + some text with char encoding that needs to be converted to clob,some text with char encoding that needs to be converted to clob, + some text with char encoding that needs to be converted to clob,some text with char encoding that needs to be converted to clob, + some text with char encoding that needs to be converted to clob,some text with char encoding that needs to be converted to clob, + some text with char encoding that needs to be converted to clob,some text with char encoding that needs to be converted to clob' + - 'CHAR_FIELD16=some char text with char encoding,some char text with char encoding,some char text with char encoding,some char text with char encoding,some char text with char encoding, + some char text with char encoding,some char text with char encoding,some char text with char encoding,some char text with char encoding,some char text with char encoding' \ No newline at end of file diff --git a/java/adapter/jdbc/src/test/resources/h2/test1_charset_jp_h2.yml b/java/adapter/jdbc/src/test/resources/h2/test1_charset_jp_h2.yml new file mode 100644 index 00000000000..10c33f443b6 --- /dev/null +++ b/java/adapter/jdbc/src/test/resources/h2/test1_charset_jp_h2.yml @@ -0,0 +1,43 @@ +#Licensed to the Apache Software Foundation (ASF) under one or more contributor +#license agreements. See the NOTICE file distributed with this work for additional +#information regarding copyright ownership. The ASF licenses this file to +#You under the Apache License, Version 2.0 (the "License"); you may not use +#this file except in compliance with the License. You may obtain a copy of +#the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required +#by applicable law or agreed to in writing, software distributed under the +#License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS +#OF ANY KIND, either express or implied. See the License for the specific +#language governing permissions and limitations under the License. + +name: 'table1' + +type: 'charset' + +vectors: + - 'VARCHAR_FIELD13' + - 'CHAR_FIELD16' + - 'CLOB_FIELD15' + +rowCount: '5' + +charSet: 'SJIS' + +create: 'CREATE TABLE table1 (int_field1 INT, varchar_field13 VARCHAR(256), clob_field15 CLOB, char_field16 CHAR(128));' + +data: + - 'INSERT INTO table1 VALUES (101,''varcharに変換する必要があるcharエンコーディングのテキスト'', ''charエンコーディングのあるテキストをclobに変換する必要がある'', ''charエンコーディングのあるcharテキスト'');' + - 'INSERT INTO table1 VALUES (101,''varcharに変換する必要があるcharエンコーディングのテキスト'', ''charエンコーディングのあるテキストをclobに変換する必要がある'', ''charエンコーディングのあるcharテキスト'');' + - 'INSERT INTO table1 VALUES (101,''varcharに変換する必要があるcharエンコーディングのテキスト'', ''charエンコーディングのあるテキストをclobに変換する必要がある'', ''charエンコーディングのあるcharテキスト'');' + - 'INSERT INTO table1 VALUES (101,''varcharに変換する必要があるcharエンコーディングのテキスト'', ''charエンコーディングのあるテキストをclobに変換する必要がある'', ''charエンコーディングのあるcharテキスト'');' + - 'INSERT INTO table1 VALUES (101,''varcharに変換する必要があるcharエンコーディングのテキスト'', ''charエンコーディングのあるテキストをclobに変換する必要がある'', ''charエンコーディングのあるcharテキスト'');' + +query: 'select varchar_field13, clob_field15, char_field16 from table1' + +drop: 'DROP table table1;' + +values: + - 'VARCHAR_FIELD13=varcharに変換する必要があるcharエンコーディングのテキスト,varcharに変換する必要があるcharエンコーディングのテキスト,varcharに変換する必要があるcharエンコーディングのテキスト, + varcharに変換する必要があるcharエンコーディングのテキスト,varcharに変換する必要があるcharエンコーディングのテキスト' + - 'CLOB_FIELD15=charエンコーディングのあるテキストをclobに変換する必要がある,charエンコーディングのあるテキストをclobに変換する必要がある,charエンコーディングのあるテキストをclobに変換する必要がある, + charエンコーディングのあるテキストをclobに変換する必要がある,charエンコーディングのあるテキストをclobに変換する必要がある' + - 'CHAR_FIELD16=charエンコーディングのあるcharテキスト,charエンコーディングのあるcharテキスト,charエンコーディングのあるcharテキスト,charエンコーディングのあるcharテキスト,charエンコーディングのあるcharテキスト' \ No newline at end of file diff --git a/java/adapter/jdbc/src/test/resources/h2/test1_charset_kr_h2.yml b/java/adapter/jdbc/src/test/resources/h2/test1_charset_kr_h2.yml new file mode 100644 index 00000000000..a00a41b539a --- /dev/null +++ b/java/adapter/jdbc/src/test/resources/h2/test1_charset_kr_h2.yml @@ -0,0 +1,43 @@ +#Licensed to the Apache Software Foundation (ASF) under one or more contributor +#license agreements. See the NOTICE file distributed with this work for additional +#information regarding copyright ownership. The ASF licenses this file to +#You under the Apache License, Version 2.0 (the "License"); you may not use +#this file except in compliance with the License. You may obtain a copy of +#the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required +#by applicable law or agreed to in writing, software distributed under the +#License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS +#OF ANY KIND, either express or implied. See the License for the specific +#language governing permissions and limitations under the License. + +name: 'table1' + +type: 'charset' + +vectors: + - 'VARCHAR_FIELD13' + - 'CHAR_FIELD16' + - 'CLOB_FIELD15' + +rowCount: '5' + +charSet: 'EUC-KR' + +create: 'CREATE TABLE table1 (int_field1 INT, varchar_field13 VARCHAR(256), clob_field15 CLOB, char_field16 CHAR(128));' + +data: + - 'INSERT INTO table1 VALUES (101,''char 인코딩을 사용하는 일부 텍스트를 varchar로 변환해야합니다.'', ''clob로 변환해야하는 char 인코딩을 가진 텍스트'', ''char 인코딩을 사용한 char 텍스트'');' + - 'INSERT INTO table1 VALUES (101,''char 인코딩을 사용하는 일부 텍스트를 varchar로 변환해야합니다.'', ''clob로 변환해야하는 char 인코딩을 가진 텍스트'', ''char 인코딩을 사용한 char 텍스트'');' + - 'INSERT INTO table1 VALUES (101,''char 인코딩을 사용하는 일부 텍스트를 varchar로 변환해야합니다.'', ''clob로 변환해야하는 char 인코딩을 가진 텍스트'', ''char 인코딩을 사용한 char 텍스트'');' + - 'INSERT INTO table1 VALUES (101,''char 인코딩을 사용하는 일부 텍스트를 varchar로 변환해야합니다.'', ''clob로 변환해야하는 char 인코딩을 가진 텍스트'', ''char 인코딩을 사용한 char 텍스트'');' + - 'INSERT INTO table1 VALUES (101,''char 인코딩을 사용하는 일부 텍스트를 varchar로 변환해야합니다.'', ''clob로 변환해야하는 char 인코딩을 가진 텍스트'', ''char 인코딩을 사용한 char 텍스트'');' + +query: 'select varchar_field13, clob_field15, char_field16 from table1' + +drop: 'DROP table table1;' + +values: + - 'VARCHAR_FIELD13=char 인코딩을 사용하는 일부 텍스트를 varchar로 변환해야합니다.,char 인코딩을 사용하는 일부 텍스트를 varchar로 변환해야합니다.,char 인코딩을 사용하는 일부 텍스트를 varchar로 변환해야합니다., + char 인코딩을 사용하는 일부 텍스트를 varchar로 변환해야합니다.,char 인코딩을 사용하는 일부 텍스트를 varchar로 변환해야합니다.' + - 'CLOB_FIELD15=clob로 변환해야하는 char 인코딩을 가진 텍스트,clob로 변환해야하는 char 인코딩을 가진 텍스트,clob로 변환해야하는 char 인코딩을 가진 텍스트, + clob로 변환해야하는 char 인코딩을 가진 텍스트,clob로 변환해야하는 char 인코딩을 가진 텍스트' + - 'CHAR_FIELD16=char 인코딩을 사용한 char 텍스트,char 인코딩을 사용한 char 텍스트,char 인코딩을 사용한 char 텍스트,char 인코딩을 사용한 char 텍스트,char 인코딩을 사용한 char 텍스트' \ No newline at end of file diff --git a/java/adapter/jdbc/src/test/resources/h2/test1_clob_h2.yml b/java/adapter/jdbc/src/test/resources/h2/test1_clob_h2.yml new file mode 100644 index 00000000000..57c69ffe06d --- /dev/null +++ b/java/adapter/jdbc/src/test/resources/h2/test1_clob_h2.yml @@ -0,0 +1,46 @@ +#Licensed to the Apache Software Foundation (ASF) under one or more contributor +#license agreements. See the NOTICE file distributed with this work for additional +#information regarding copyright ownership. The ASF licenses this file to +#You under the Apache License, Version 2.0 (the "License"); you may not use +#this file except in compliance with the License. You may obtain a copy of +#the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required +#by applicable law or agreed to in writing, software distributed under the +#License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS +#OF ANY KIND, either express or implied. See the License for the specific +#language governing permissions and limitations under the License. + +name: 'table1' + +type: 'clob' + +vector: 'CLOB_FIELD15' + +create: 'CREATE TABLE table1 (clob_field15 CLOB);' + +data: + - 'INSERT INTO table1 VALUES (''some text that needs to be converted to clob'');' + - 'INSERT INTO table1 VALUES (''some text that needs to be converted to clob'');' + - 'INSERT INTO table1 VALUES (''some text that needs to be converted to clob'');' + - 'INSERT INTO table1 VALUES (''some text that needs to be converted to clob'');' + - 'INSERT INTO table1 VALUES (''some text that needs to be converted to clob'');' + - 'INSERT INTO table1 VALUES (''some text that needs to be converted to clob'');' + - 'INSERT INTO table1 VALUES (''some text that needs to be converted to clob'');' + - 'INSERT INTO table1 VALUES (''some text that needs to be converted to clob'');' + - 'INSERT INTO table1 VALUES (''some text that needs to be converted to clob'');' + - 'INSERT INTO table1 VALUES (''some text that needs to be converted to clob'');' + +query: 'select CLOB_FIELD15 from table1;' + +drop: 'DROP table table1;' + +values: + - 'some text that needs to be converted to clob' + - 'some text that needs to be converted to clob' + - 'some text that needs to be converted to clob' + - 'some text that needs to be converted to clob' + - 'some text that needs to be converted to clob' + - 'some text that needs to be converted to clob' + - 'some text that needs to be converted to clob' + - 'some text that needs to be converted to clob' + - 'some text that needs to be converted to clob' + - 'some text that needs to be converted to clob' diff --git a/java/adapter/jdbc/src/test/resources/h2/test1_date_h2.yml b/java/adapter/jdbc/src/test/resources/h2/test1_date_h2.yml new file mode 100644 index 00000000000..45aa56c7417 --- /dev/null +++ b/java/adapter/jdbc/src/test/resources/h2/test1_date_h2.yml @@ -0,0 +1,46 @@ +#Licensed to the Apache Software Foundation (ASF) under one or more contributor +#license agreements. See the NOTICE file distributed with this work for additional +#information regarding copyright ownership. The ASF licenses this file to +#You under the Apache License, Version 2.0 (the "License"); you may not use +#this file except in compliance with the License. You may obtain a copy of +#the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required +#by applicable law or agreed to in writing, software distributed under the +#License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS +#OF ANY KIND, either express or implied. See the License for the specific +#language governing permissions and limitations under the License. + +name: 'table1' + +type: 'date' + +vector: 'DATE_FIELD10' + +create: 'CREATE TABLE table1 (date_field10 DATE);' + +data: + - 'INSERT INTO table1 VALUES (''2018-02-12'');' + - 'INSERT INTO table1 VALUES (''2018-02-12'');' + - 'INSERT INTO table1 VALUES (''2018-02-12'');' + - 'INSERT INTO table1 VALUES (''2018-02-12'');' + - 'INSERT INTO table1 VALUES (''2018-02-12'');' + - 'INSERT INTO table1 VALUES (''2018-02-12'');' + - 'INSERT INTO table1 VALUES (''2018-02-12'');' + - 'INSERT INTO table1 VALUES (''2018-02-12'');' + - 'INSERT INTO table1 VALUES (''2018-02-12'');' + - 'INSERT INTO table1 VALUES (''2018-02-12'');' + +query: 'select date_field10 from table1;' + +drop: 'DROP table table1;' + +values: + - '1518393600000' + - '1518393600000' + - '1518393600000' + - '1518393600000' + - '1518393600000' + - '1518393600000' + - '1518393600000' + - '1518393600000' + - '1518393600000' + - '1518393600000' diff --git a/java/adapter/jdbc/src/test/resources/h2/test1_decimal_h2.yml b/java/adapter/jdbc/src/test/resources/h2/test1_decimal_h2.yml new file mode 100644 index 00000000000..3ee15c40942 --- /dev/null +++ b/java/adapter/jdbc/src/test/resources/h2/test1_decimal_h2.yml @@ -0,0 +1,46 @@ +#Licensed to the Apache Software Foundation (ASF) under one or more contributor +#license agreements. See the NOTICE file distributed with this work for additional +#information regarding copyright ownership. The ASF licenses this file to +#You under the Apache License, Version 2.0 (the "License"); you may not use +#this file except in compliance with the License. You may obtain a copy of +#the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required +#by applicable law or agreed to in writing, software distributed under the +#License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS +#OF ANY KIND, either express or implied. See the License for the specific +#language governing permissions and limitations under the License. + +name: 'table1' + +type: 'decimal' + +vector: 'DECIMAL_FIELD6' + +create: 'CREATE TABLE table1 (decimal_field6 DECIMAL(20,2));' + +data: + - 'INSERT INTO table1 VALUES (17345667789.23);' + - 'INSERT INTO table1 VALUES (17345667789.23);' + - 'INSERT INTO table1 VALUES (17345667789.23);' + - 'INSERT INTO table1 VALUES (17345667789.23);' + - 'INSERT INTO table1 VALUES (17345667789.23);' + - 'INSERT INTO table1 VALUES (17345667789.23);' + - 'INSERT INTO table1 VALUES (17345667789.23);' + - 'INSERT INTO table1 VALUES (17345667789.23);' + - 'INSERT INTO table1 VALUES (17345667789.23);' + - 'INSERT INTO table1 VALUES (17345667789.23);' + +query: 'select decimal_field6 from table1;' + +drop: 'DROP table table1;' + +values: + - '17345667789.23' + - '17345667789.23' + - '17345667789.23' + - '17345667789.23' + - '17345667789.23' + - '17345667789.23' + - '17345667789.23' + - '17345667789.23' + - '17345667789.23' + - '17345667789.23' diff --git a/java/adapter/jdbc/src/test/resources/h2/test1_double_h2.yml b/java/adapter/jdbc/src/test/resources/h2/test1_double_h2.yml new file mode 100644 index 00000000000..f4190092871 --- /dev/null +++ b/java/adapter/jdbc/src/test/resources/h2/test1_double_h2.yml @@ -0,0 +1,46 @@ +#Licensed to the Apache Software Foundation (ASF) under one or more contributor +#license agreements. See the NOTICE file distributed with this work for additional +#information regarding copyright ownership. The ASF licenses this file to +#You under the Apache License, Version 2.0 (the "License"); you may not use +#this file except in compliance with the License. You may obtain a copy of +#the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required +#by applicable law or agreed to in writing, software distributed under the +#License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS +#OF ANY KIND, either express or implied. See the License for the specific +#language governing permissions and limitations under the License. + +name: 'table1' + +type: 'double' + +vector: 'DOUBLE_FIELD7' + +create: 'CREATE TABLE table1 (double_field7 DOUBLE);' + +data: + - 'INSERT INTO table1 VALUES (56478356785.345);' + - 'INSERT INTO table1 VALUES (56478356785.345);' + - 'INSERT INTO table1 VALUES (56478356785.345);' + - 'INSERT INTO table1 VALUES (56478356785.345);' + - 'INSERT INTO table1 VALUES (56478356785.345);' + - 'INSERT INTO table1 VALUES (56478356785.345);' + - 'INSERT INTO table1 VALUES (56478356785.345);' + - 'INSERT INTO table1 VALUES (56478356785.345);' + - 'INSERT INTO table1 VALUES (56478356785.345);' + - 'INSERT INTO table1 VALUES (56478356785.345);' + +query: 'select double_field7 from table1;' + +drop: 'DROP table table1;' + +values: + - '56478356785.345' + - '56478356785.345' + - '56478356785.345' + - '56478356785.345' + - '56478356785.345' + - '56478356785.345' + - '56478356785.345' + - '56478356785.345' + - '56478356785.345' + - '56478356785.345' diff --git a/java/adapter/jdbc/src/test/resources/h2/test1_est_date_h2.yml b/java/adapter/jdbc/src/test/resources/h2/test1_est_date_h2.yml new file mode 100644 index 00000000000..290c32ebafa --- /dev/null +++ b/java/adapter/jdbc/src/test/resources/h2/test1_est_date_h2.yml @@ -0,0 +1,48 @@ +#Licensed to the Apache Software Foundation (ASF) under one or more contributor +#license agreements. See the NOTICE file distributed with this work for additional +#information regarding copyright ownership. The ASF licenses this file to +#You under the Apache License, Version 2.0 (the "License"); you may not use +#this file except in compliance with the License. You may obtain a copy of +#the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required +#by applicable law or agreed to in writing, software distributed under the +#License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS +#OF ANY KIND, either express or implied. See the License for the specific +#language governing permissions and limitations under the License. + +name: 'table1' + +type: 'est_date' + +timezone: 'EST' + +vector: 'DATE_FIELD10' + +create: 'CREATE TABLE table1 (date_field10 DATE);' + +data: + - 'INSERT INTO table1 VALUES (''2018-02-12'');' + - 'INSERT INTO table1 VALUES (''2018-02-12'');' + - 'INSERT INTO table1 VALUES (''2018-02-12'');' + - 'INSERT INTO table1 VALUES (''2018-02-12'');' + - 'INSERT INTO table1 VALUES (''2018-02-12'');' + - 'INSERT INTO table1 VALUES (''2018-02-12'');' + - 'INSERT INTO table1 VALUES (''2018-02-12'');' + - 'INSERT INTO table1 VALUES (''2018-02-12'');' + - 'INSERT INTO table1 VALUES (''2018-02-12'');' + - 'INSERT INTO table1 VALUES (''2018-02-12'');' + +query: 'select date_field10 from table1;' + +drop: 'DROP table table1;' + +values: + - '1518411600000' + - '1518411600000' + - '1518411600000' + - '1518411600000' + - '1518411600000' + - '1518411600000' + - '1518411600000' + - '1518411600000' + - '1518411600000' + - '1518411600000' diff --git a/java/adapter/jdbc/src/test/resources/h2/test1_est_time_h2.yml b/java/adapter/jdbc/src/test/resources/h2/test1_est_time_h2.yml new file mode 100644 index 00000000000..c6fc7a18035 --- /dev/null +++ b/java/adapter/jdbc/src/test/resources/h2/test1_est_time_h2.yml @@ -0,0 +1,48 @@ +#Licensed to the Apache Software Foundation (ASF) under one or more contributor +#license agreements. See the NOTICE file distributed with this work for additional +#information regarding copyright ownership. The ASF licenses this file to +#You under the Apache License, Version 2.0 (the "License"); you may not use +#this file except in compliance with the License. You may obtain a copy of +#the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required +#by applicable law or agreed to in writing, software distributed under the +#License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS +#OF ANY KIND, either express or implied. See the License for the specific +#language governing permissions and limitations under the License. + +name: 'table1' + +type: 'est_time' + +timezone: 'EST' + +vector: 'TIME_FIELD9' + +create: 'CREATE TABLE table1 (time_field9 TIME);' + +data: + - 'INSERT INTO table1 VALUES (''12:45:35'');' + - 'INSERT INTO table1 VALUES (''12:45:35'');' + - 'INSERT INTO table1 VALUES (''12:45:35'');' + - 'INSERT INTO table1 VALUES (''12:45:35'');' + - 'INSERT INTO table1 VALUES (''12:45:35'');' + - 'INSERT INTO table1 VALUES (''12:45:35'');' + - 'INSERT INTO table1 VALUES (''12:45:35'');' + - 'INSERT INTO table1 VALUES (''12:45:35'');' + - 'INSERT INTO table1 VALUES (''12:45:35'');' + - 'INSERT INTO table1 VALUES (''12:45:35'');' + +query: 'select time_field9 from table1;' + +drop: 'DROP table table1;' + +values: + - '63935000' + - '63935000' + - '63935000' + - '63935000' + - '63935000' + - '63935000' + - '63935000' + - '63935000' + - '63935000' + - '63935000' diff --git a/java/adapter/jdbc/src/test/resources/h2/test1_est_timestamp_h2.yml b/java/adapter/jdbc/src/test/resources/h2/test1_est_timestamp_h2.yml new file mode 100644 index 00000000000..b0ec5b70839 --- /dev/null +++ b/java/adapter/jdbc/src/test/resources/h2/test1_est_timestamp_h2.yml @@ -0,0 +1,49 @@ +#Licensed to the Apache Software Foundation (ASF) under one or more contributor +#license agreements. See the NOTICE file distributed with this work for additional +#information regarding copyright ownership. The ASF licenses this file to +#You under the Apache License, Version 2.0 (the "License"); you may not use +#this file except in compliance with the License. You may obtain a copy of +#the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required +#by applicable law or agreed to in writing, software distributed under the +#License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS +#OF ANY KIND, either express or implied. See the License for the specific +#language governing permissions and limitations under the License. + +name: 'table1' + +type: 'est_timestamp' + +timezone: 'EST' + +vector: 'TIMESTAMP_FIELD11' + +create: 'CREATE TABLE table1 (timestamp_field11 TIMESTAMP);' + +data: + - 'INSERT INTO table1 VALUES (''2018-02-12 12:45:35'');' + - 'INSERT INTO table1 VALUES (''2018-02-12 12:45:35'');' + - 'INSERT INTO table1 VALUES (''2018-02-12 12:45:35'');' + - 'INSERT INTO table1 VALUES (''2018-02-12 12:45:35'');' + - 'INSERT INTO table1 VALUES (''2018-02-12 12:45:35'');' + - 'INSERT INTO table1 VALUES (''2018-02-12 12:45:35'');' + - 'INSERT INTO table1 VALUES (''2018-02-12 12:45:35'');' + - 'INSERT INTO table1 VALUES (''2018-02-12 12:45:35'');' + - 'INSERT INTO table1 VALUES (''2018-02-12 12:45:35'');' + - 'INSERT INTO table1 VALUES (''2018-02-12 12:45:35'');' + +query: 'select timestamp_field11 from table1;' + +drop: 'DROP table table1;' + +values: + - '1518457535000' + - '1518457535000' + - '1518457535000' + - '1518457535000' + - '1518457535000' + - '1518457535000' + - '1518457535000' + - '1518457535000' + - '1518457535000' + - '1518457535000' + \ No newline at end of file diff --git a/java/adapter/jdbc/src/test/resources/h2/test1_gmt_date_h2.yml b/java/adapter/jdbc/src/test/resources/h2/test1_gmt_date_h2.yml new file mode 100644 index 00000000000..03929c936e4 --- /dev/null +++ b/java/adapter/jdbc/src/test/resources/h2/test1_gmt_date_h2.yml @@ -0,0 +1,48 @@ +#Licensed to the Apache Software Foundation (ASF) under one or more contributor +#license agreements. See the NOTICE file distributed with this work for additional +#information regarding copyright ownership. The ASF licenses this file to +#You under the Apache License, Version 2.0 (the "License"); you may not use +#this file except in compliance with the License. You may obtain a copy of +#the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required +#by applicable law or agreed to in writing, software distributed under the +#License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS +#OF ANY KIND, either express or implied. See the License for the specific +#language governing permissions and limitations under the License. + +name: 'table1' + +type: 'gmt_date' + +timezone: 'GMT' + +vector: 'DATE_FIELD10' + +create: 'CREATE TABLE table1 (date_field10 DATE);' + +data: + - 'INSERT INTO table1 VALUES (''2018-02-12'');' + - 'INSERT INTO table1 VALUES (''2018-02-12'');' + - 'INSERT INTO table1 VALUES (''2018-02-12'');' + - 'INSERT INTO table1 VALUES (''2018-02-12'');' + - 'INSERT INTO table1 VALUES (''2018-02-12'');' + - 'INSERT INTO table1 VALUES (''2018-02-12'');' + - 'INSERT INTO table1 VALUES (''2018-02-12'');' + - 'INSERT INTO table1 VALUES (''2018-02-12'');' + - 'INSERT INTO table1 VALUES (''2018-02-12'');' + - 'INSERT INTO table1 VALUES (''2018-02-12'');' + +query: 'select date_field10 from table1;' + +drop: 'DROP table table1;' + +values: + - '1518393600000' + - '1518393600000' + - '1518393600000' + - '1518393600000' + - '1518393600000' + - '1518393600000' + - '1518393600000' + - '1518393600000' + - '1518393600000' + - '1518393600000' \ No newline at end of file diff --git a/java/adapter/jdbc/src/test/resources/h2/test1_gmt_time_h2.yml b/java/adapter/jdbc/src/test/resources/h2/test1_gmt_time_h2.yml new file mode 100644 index 00000000000..ae28c51e285 --- /dev/null +++ b/java/adapter/jdbc/src/test/resources/h2/test1_gmt_time_h2.yml @@ -0,0 +1,48 @@ +#Licensed to the Apache Software Foundation (ASF) under one or more contributor +#license agreements. See the NOTICE file distributed with this work for additional +#information regarding copyright ownership. The ASF licenses this file to +#You under the Apache License, Version 2.0 (the "License"); you may not use +#this file except in compliance with the License. You may obtain a copy of +#the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required +#by applicable law or agreed to in writing, software distributed under the +#License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS +#OF ANY KIND, either express or implied. See the License for the specific +#language governing permissions and limitations under the License. + +name: 'table1' + +type: 'gmt_time' + +timezone: 'GMT' + +vector: 'TIME_FIELD9' + +create: 'CREATE TABLE table1 (time_field9 TIME);' + +data: + - 'INSERT INTO table1 VALUES (''12:45:35'');' + - 'INSERT INTO table1 VALUES (''12:45:35'');' + - 'INSERT INTO table1 VALUES (''12:45:35'');' + - 'INSERT INTO table1 VALUES (''12:45:35'');' + - 'INSERT INTO table1 VALUES (''12:45:35'');' + - 'INSERT INTO table1 VALUES (''12:45:35'');' + - 'INSERT INTO table1 VALUES (''12:45:35'');' + - 'INSERT INTO table1 VALUES (''12:45:35'');' + - 'INSERT INTO table1 VALUES (''12:45:35'');' + - 'INSERT INTO table1 VALUES (''12:45:35'');' + +query: 'select time_field9 from table1;' + +drop: 'DROP table table1;' + +values: + - '45935000' + - '45935000' + - '45935000' + - '45935000' + - '45935000' + - '45935000' + - '45935000' + - '45935000' + - '45935000' + - '45935000' \ No newline at end of file diff --git a/java/adapter/jdbc/src/test/resources/h2/test1_gmt_timestamp_h2.yml b/java/adapter/jdbc/src/test/resources/h2/test1_gmt_timestamp_h2.yml new file mode 100644 index 00000000000..b468f5af948 --- /dev/null +++ b/java/adapter/jdbc/src/test/resources/h2/test1_gmt_timestamp_h2.yml @@ -0,0 +1,48 @@ +#Licensed to the Apache Software Foundation (ASF) under one or more contributor +#license agreements. See the NOTICE file distributed with this work for additional +#information regarding copyright ownership. The ASF licenses this file to +#You under the Apache License, Version 2.0 (the "License"); you may not use +#this file except in compliance with the License. You may obtain a copy of +#the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required +#by applicable law or agreed to in writing, software distributed under the +#License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS +#OF ANY KIND, either express or implied. See the License for the specific +#language governing permissions and limitations under the License. + +name: 'table1' + +type: 'gmt_timestamp' + +timezone: 'GMT' + +vector: 'TIMESTAMP_FIELD11' + +create: 'CREATE TABLE table1 (timestamp_field11 TIMESTAMP);' + +data: + - 'INSERT INTO table1 VALUES (''2018-02-12 12:45:35'');' + - 'INSERT INTO table1 VALUES (''2018-02-12 12:45:35'');' + - 'INSERT INTO table1 VALUES (''2018-02-12 12:45:35'');' + - 'INSERT INTO table1 VALUES (''2018-02-12 12:45:35'');' + - 'INSERT INTO table1 VALUES (''2018-02-12 12:45:35'');' + - 'INSERT INTO table1 VALUES (''2018-02-12 12:45:35'');' + - 'INSERT INTO table1 VALUES (''2018-02-12 12:45:35'');' + - 'INSERT INTO table1 VALUES (''2018-02-12 12:45:35'');' + - 'INSERT INTO table1 VALUES (''2018-02-12 12:45:35'');' + - 'INSERT INTO table1 VALUES (''2018-02-12 12:45:35'');' + +query: 'select timestamp_field11 from table1;' + +drop: 'DROP table table1;' + +values: + - '1518439535000' + - '1518439535000' + - '1518439535000' + - '1518439535000' + - '1518439535000' + - '1518439535000' + - '1518439535000' + - '1518439535000' + - '1518439535000' + - '1518439535000' \ No newline at end of file diff --git a/java/adapter/jdbc/src/test/resources/h2/test1_int_h2.yml b/java/adapter/jdbc/src/test/resources/h2/test1_int_h2.yml new file mode 100644 index 00000000000..7fb6d686720 --- /dev/null +++ b/java/adapter/jdbc/src/test/resources/h2/test1_int_h2.yml @@ -0,0 +1,46 @@ +#Licensed to the Apache Software Foundation (ASF) under one or more contributor +#license agreements. See the NOTICE file distributed with this work for additional +#information regarding copyright ownership. The ASF licenses this file to +#You under the Apache License, Version 2.0 (the "License"); you may not use +#this file except in compliance with the License. You may obtain a copy of +#the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required +#by applicable law or agreed to in writing, software distributed under the +#License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS +#OF ANY KIND, either express or implied. See the License for the specific +#language governing permissions and limitations under the License. + +name: 'table1' + +type: 'int' + +vector: 'INT_FIELD1' + +create: 'CREATE TABLE table1 (int_field1 INT);' + +data: + - 'INSERT INTO table1 VALUES (101);' + - 'INSERT INTO table1 VALUES (101);' + - 'INSERT INTO table1 VALUES (101);' + - 'INSERT INTO table1 VALUES (101);' + - 'INSERT INTO table1 VALUES (101);' + - 'INSERT INTO table1 VALUES (101);' + - 'INSERT INTO table1 VALUES (101);' + - 'INSERT INTO table1 VALUES (101);' + - 'INSERT INTO table1 VALUES (101);' + - 'INSERT INTO table1 VALUES (101);' + +values: + - '101' + - '101' + - '101' + - '101' + - '101' + - '101' + - '101' + - '101' + - '101' + - '101' + +query: 'select int_field1 from table1;' + +drop: 'DROP table table1;' \ No newline at end of file diff --git a/java/adapter/jdbc/src/test/resources/h2/test1_pst_date_h2.yml b/java/adapter/jdbc/src/test/resources/h2/test1_pst_date_h2.yml new file mode 100644 index 00000000000..81a668f37a4 --- /dev/null +++ b/java/adapter/jdbc/src/test/resources/h2/test1_pst_date_h2.yml @@ -0,0 +1,48 @@ +#Licensed to the Apache Software Foundation (ASF) under one or more contributor +#license agreements. See the NOTICE file distributed with this work for additional +#information regarding copyright ownership. The ASF licenses this file to +#You under the Apache License, Version 2.0 (the "License"); you may not use +#this file except in compliance with the License. You may obtain a copy of +#the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required +#by applicable law or agreed to in writing, software distributed under the +#License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS +#OF ANY KIND, either express or implied. See the License for the specific +#language governing permissions and limitations under the License. + +name: 'table1' + +type: 'pst_date' + +timezone: 'PST' + +vector: 'DATE_FIELD10' + +create: 'CREATE TABLE table1 (date_field10 DATE);' + +data: + - 'INSERT INTO table1 VALUES (''2018-02-12'');' + - 'INSERT INTO table1 VALUES (''2018-02-12'');' + - 'INSERT INTO table1 VALUES (''2018-02-12'');' + - 'INSERT INTO table1 VALUES (''2018-02-12'');' + - 'INSERT INTO table1 VALUES (''2018-02-12'');' + - 'INSERT INTO table1 VALUES (''2018-02-12'');' + - 'INSERT INTO table1 VALUES (''2018-02-12'');' + - 'INSERT INTO table1 VALUES (''2018-02-12'');' + - 'INSERT INTO table1 VALUES (''2018-02-12'');' + - 'INSERT INTO table1 VALUES (''2018-02-12'');' + +query: 'select date_field10 from table1;' + +drop: 'DROP table table1;' + +values: + - '1518422400000' + - '1518422400000' + - '1518422400000' + - '1518422400000' + - '1518422400000' + - '1518422400000' + - '1518422400000' + - '1518422400000' + - '1518422400000' + - '1518422400000' \ No newline at end of file diff --git a/java/adapter/jdbc/src/test/resources/h2/test1_pst_time_h2.yml b/java/adapter/jdbc/src/test/resources/h2/test1_pst_time_h2.yml new file mode 100644 index 00000000000..058d54d2027 --- /dev/null +++ b/java/adapter/jdbc/src/test/resources/h2/test1_pst_time_h2.yml @@ -0,0 +1,48 @@ +#Licensed to the Apache Software Foundation (ASF) under one or more contributor +#license agreements. See the NOTICE file distributed with this work for additional +#information regarding copyright ownership. The ASF licenses this file to +#You under the Apache License, Version 2.0 (the "License"); you may not use +#this file except in compliance with the License. You may obtain a copy of +#the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required +#by applicable law or agreed to in writing, software distributed under the +#License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS +#OF ANY KIND, either express or implied. See the License for the specific +#language governing permissions and limitations under the License. + +name: 'table1' + +type: 'pst_time' + +timezone: 'PST' + +vector: 'TIME_FIELD9' + +create: 'CREATE TABLE table1 (time_field9 TIME);' + +data: + - 'INSERT INTO table1 VALUES (''12:45:35'');' + - 'INSERT INTO table1 VALUES (''12:45:35'');' + - 'INSERT INTO table1 VALUES (''12:45:35'');' + - 'INSERT INTO table1 VALUES (''12:45:35'');' + - 'INSERT INTO table1 VALUES (''12:45:35'');' + - 'INSERT INTO table1 VALUES (''12:45:35'');' + - 'INSERT INTO table1 VALUES (''12:45:35'');' + - 'INSERT INTO table1 VALUES (''12:45:35'');' + - 'INSERT INTO table1 VALUES (''12:45:35'');' + - 'INSERT INTO table1 VALUES (''12:45:35'');' + +query: 'select time_field9 from table1;' + +drop: 'DROP table table1;' + +values: + - '74735000' + - '74735000' + - '74735000' + - '74735000' + - '74735000' + - '74735000' + - '74735000' + - '74735000' + - '74735000' + - '74735000' \ No newline at end of file diff --git a/java/adapter/jdbc/src/test/resources/h2/test1_pst_timestamp_h2.yml b/java/adapter/jdbc/src/test/resources/h2/test1_pst_timestamp_h2.yml new file mode 100644 index 00000000000..19b6b5f4405 --- /dev/null +++ b/java/adapter/jdbc/src/test/resources/h2/test1_pst_timestamp_h2.yml @@ -0,0 +1,48 @@ +#Licensed to the Apache Software Foundation (ASF) under one or more contributor +#license agreements. See the NOTICE file distributed with this work for additional +#information regarding copyright ownership. The ASF licenses this file to +#You under the Apache License, Version 2.0 (the "License"); you may not use +#this file except in compliance with the License. You may obtain a copy of +#the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required +#by applicable law or agreed to in writing, software distributed under the +#License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS +#OF ANY KIND, either express or implied. See the License for the specific +#language governing permissions and limitations under the License. + +name: 'table1' + +type: 'pst_timestamp' + +timezone: 'PST' + +vector: 'TIMESTAMP_FIELD11' + +create: 'CREATE TABLE table1 (timestamp_field11 TIMESTAMP);' + +data: + - 'INSERT INTO table1 VALUES (''2018-02-12 12:45:35'');' + - 'INSERT INTO table1 VALUES (''2018-02-12 12:45:35'');' + - 'INSERT INTO table1 VALUES (''2018-02-12 12:45:35'');' + - 'INSERT INTO table1 VALUES (''2018-02-12 12:45:35'');' + - 'INSERT INTO table1 VALUES (''2018-02-12 12:45:35'');' + - 'INSERT INTO table1 VALUES (''2018-02-12 12:45:35'');' + - 'INSERT INTO table1 VALUES (''2018-02-12 12:45:35'');' + - 'INSERT INTO table1 VALUES (''2018-02-12 12:45:35'');' + - 'INSERT INTO table1 VALUES (''2018-02-12 12:45:35'');' + - 'INSERT INTO table1 VALUES (''2018-02-12 12:45:35'');' + +query: 'select timestamp_field11 from table1;' + +drop: 'DROP table table1;' + +values: + - '1518468335000' + - '1518468335000' + - '1518468335000' + - '1518468335000' + - '1518468335000' + - '1518468335000' + - '1518468335000' + - '1518468335000' + - '1518468335000' + - '1518468335000' \ No newline at end of file diff --git a/java/adapter/jdbc/src/test/resources/h2/test1_real_h2.yml b/java/adapter/jdbc/src/test/resources/h2/test1_real_h2.yml new file mode 100644 index 00000000000..c8f8aeb7810 --- /dev/null +++ b/java/adapter/jdbc/src/test/resources/h2/test1_real_h2.yml @@ -0,0 +1,46 @@ +#Licensed to the Apache Software Foundation (ASF) under one or more contributor +#license agreements. See the NOTICE file distributed with this work for additional +#information regarding copyright ownership. The ASF licenses this file to +#You under the Apache License, Version 2.0 (the "License"); you may not use +#this file except in compliance with the License. You may obtain a copy of +#the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required +#by applicable law or agreed to in writing, software distributed under the +#License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS +#OF ANY KIND, either express or implied. See the License for the specific +#language governing permissions and limitations under the License. + +name: 'table1' + +type: 'real' + +vector: 'REAL_FIELD8' + +create: 'CREATE TABLE table1 (real_field8 REAL);' + +data: + - 'INSERT INTO table1 VALUES (56478356785.345);' + - 'INSERT INTO table1 VALUES (56478356785.345);' + - 'INSERT INTO table1 VALUES (56478356785.345);' + - 'INSERT INTO table1 VALUES (56478356785.345);' + - 'INSERT INTO table1 VALUES (56478356785.345);' + - 'INSERT INTO table1 VALUES (56478356785.345);' + - 'INSERT INTO table1 VALUES (56478356785.345);' + - 'INSERT INTO table1 VALUES (56478356785.345);' + - 'INSERT INTO table1 VALUES (56478356785.345);' + - 'INSERT INTO table1 VALUES (56478356785.345);' + +query: 'select real_field8 from table1;' + +drop: 'DROP table table1;' + +values: + - '56478356785.345f' + - '56478356785.345f' + - '56478356785.345f' + - '56478356785.345f' + - '56478356785.345f' + - '56478356785.345f' + - '56478356785.345f' + - '56478356785.345f' + - '56478356785.345f' + - '56478356785.345f' diff --git a/java/adapter/jdbc/src/test/resources/h2/test1_selected_datatypes_null_h2.yml b/java/adapter/jdbc/src/test/resources/h2/test1_selected_datatypes_null_h2.yml new file mode 100644 index 00000000000..93b1aae556f --- /dev/null +++ b/java/adapter/jdbc/src/test/resources/h2/test1_selected_datatypes_null_h2.yml @@ -0,0 +1,46 @@ +#Licensed to the Apache Software Foundation (ASF) under one or more contributor +#license agreements. See the NOTICE file distributed with this work for additional +#information regarding copyright ownership. The ASF licenses this file to +#You under the Apache License, Version 2.0 (the "License"); you may not use +#this file except in compliance with the License. You may obtain a copy of +#the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required +#by applicable law or agreed to in writing, software distributed under the +#License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS +#OF ANY KIND, either express or implied. See the License for the specific +#language governing permissions and limitations under the License. + +name: 'table1' + +type: 'selected_null_column' + +vectors: + - 'BIGINT_FIELD5' + - 'DECIMAL_FIELD6' + - 'DOUBLE_FIELD7' + - 'REAL_FIELD8' + - 'TIME_FIELD9' + - 'DATE_FIELD10' + - 'TIMESTAMP_FIELD11' + - 'BINARY_FIELD12' + - 'VARCHAR_FIELD13' + - 'BLOB_FIELD14' + - 'CLOB_FIELD15' + - 'CHAR_FIELD16' + - 'BIT_FIELD17' + +rowCount: '5' + +create: 'CREATE TABLE table1 (int_field1 INT, bool_field2 BOOLEAN, tinyint_field3 TINYINT, smallint_field4 SMALLINT, bigint_field5 BIGINT, + decimal_field6 DECIMAL(20,2), double_field7 DOUBLE, real_field8 REAL, time_field9 TIME, date_field10 DATE, timestamp_field11 TIMESTAMP, + binary_field12 BINARY(100), varchar_field13 VARCHAR(256), blob_field14 BLOB, clob_field15 CLOB, char_field16 CHAR(16), bit_field17 BIT);' + +data: + - 'INSERT INTO table1 (int_field1, bool_field2, tinyint_field3, smallint_field4) VALUES (102, 0, 46, 12001);' + - 'INSERT INTO table1 (int_field1, bool_field2, tinyint_field3, smallint_field4) VALUES (102, 0, 46, 12001);' + - 'INSERT INTO table1 (int_field1, bool_field2, tinyint_field3, smallint_field4) VALUES (102, 0, 46, 12001);' + - 'INSERT INTO table1 (int_field1, bool_field2, tinyint_field3, smallint_field4) VALUES (102, 0, 46, 12001);' + - 'INSERT INTO table1 (int_field1, bool_field2, tinyint_field3, smallint_field4) VALUES (102, 0, 46, 12001);' + +query: 'select bigint_field5, decimal_field6, double_field7, real_field8, time_field9, date_field10, timestamp_field11, binary_field12, varchar_field13, blob_field14, clob_field15, char_field16, bit_field17 from table1' + +drop: 'DROP table table1;' \ No newline at end of file diff --git a/java/adapter/jdbc/src/test/resources/h2/test1_smallint_h2.yml b/java/adapter/jdbc/src/test/resources/h2/test1_smallint_h2.yml new file mode 100644 index 00000000000..887c74f4db5 --- /dev/null +++ b/java/adapter/jdbc/src/test/resources/h2/test1_smallint_h2.yml @@ -0,0 +1,46 @@ +#Licensed to the Apache Software Foundation (ASF) under one or more contributor +#license agreements. See the NOTICE file distributed with this work for additional +#information regarding copyright ownership. The ASF licenses this file to +#You under the Apache License, Version 2.0 (the "License"); you may not use +#this file except in compliance with the License. You may obtain a copy of +#the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required +#by applicable law or agreed to in writing, software distributed under the +#License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS +#OF ANY KIND, either express or implied. See the License for the specific +#language governing permissions and limitations under the License. + +name: 'table1' + +type: 'small_int' + +vector: 'SMALLINT_FIELD4' + +create: 'CREATE TABLE table1 (smallint_field4 SMALLINT);' + +data: + - 'INSERT INTO table1 VALUES (12000);' + - 'INSERT INTO table1 VALUES (12000);' + - 'INSERT INTO table1 VALUES (12000);' + - 'INSERT INTO table1 VALUES (12000);' + - 'INSERT INTO table1 VALUES (12000);' + - 'INSERT INTO table1 VALUES (12000);' + - 'INSERT INTO table1 VALUES (12000);' + - 'INSERT INTO table1 VALUES (12000);' + - 'INSERT INTO table1 VALUES (12000);' + - 'INSERT INTO table1 VALUES (12000);' + +query: 'select smallint_field4 from table1;' + +drop: 'DROP table table1;' + +values: + - '12000' + - '12000' + - '12000' + - '12000' + - '12000' + - '12000' + - '12000' + - '12000' + - '12000' + - '12000' diff --git a/java/adapter/jdbc/src/test/resources/h2/test1_time_h2.yml b/java/adapter/jdbc/src/test/resources/h2/test1_time_h2.yml new file mode 100644 index 00000000000..c9baaee8dd1 --- /dev/null +++ b/java/adapter/jdbc/src/test/resources/h2/test1_time_h2.yml @@ -0,0 +1,46 @@ +#Licensed to the Apache Software Foundation (ASF) under one or more contributor +#license agreements. See the NOTICE file distributed with this work for additional +#information regarding copyright ownership. The ASF licenses this file to +#You under the Apache License, Version 2.0 (the "License"); you may not use +#this file except in compliance with the License. You may obtain a copy of +#the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required +#by applicable law or agreed to in writing, software distributed under the +#License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS +#OF ANY KIND, either express or implied. See the License for the specific +#language governing permissions and limitations under the License. + +name: 'table1' + +type: 'time' + +vector: 'TIME_FIELD9' + +create: 'CREATE TABLE table1 (time_field9 TIME);' + +data: + - 'INSERT INTO table1 VALUES (''12:45:35'');' + - 'INSERT INTO table1 VALUES (''12:45:35'');' + - 'INSERT INTO table1 VALUES (''12:45:35'');' + - 'INSERT INTO table1 VALUES (''12:45:35'');' + - 'INSERT INTO table1 VALUES (''12:45:35'');' + - 'INSERT INTO table1 VALUES (''12:45:35'');' + - 'INSERT INTO table1 VALUES (''12:45:35'');' + - 'INSERT INTO table1 VALUES (''12:45:35'');' + - 'INSERT INTO table1 VALUES (''12:45:35'');' + - 'INSERT INTO table1 VALUES (''12:45:35'');' + +query: 'select time_field9 from table1;' + +drop: 'DROP table table1;' + +values: + - '45935000' + - '45935000' + - '45935000' + - '45935000' + - '45935000' + - '45935000' + - '45935000' + - '45935000' + - '45935000' + - '45935000' diff --git a/java/adapter/jdbc/src/test/resources/h2/test1_timestamp_h2.yml b/java/adapter/jdbc/src/test/resources/h2/test1_timestamp_h2.yml new file mode 100644 index 00000000000..7d93faad1fa --- /dev/null +++ b/java/adapter/jdbc/src/test/resources/h2/test1_timestamp_h2.yml @@ -0,0 +1,46 @@ +#Licensed to the Apache Software Foundation (ASF) under one or more contributor +#license agreements. See the NOTICE file distributed with this work for additional +#information regarding copyright ownership. The ASF licenses this file to +#You under the Apache License, Version 2.0 (the "License"); you may not use +#this file except in compliance with the License. You may obtain a copy of +#the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required +#by applicable law or agreed to in writing, software distributed under the +#License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS +#OF ANY KIND, either express or implied. See the License for the specific +#language governing permissions and limitations under the License. + +name: 'table1' + +type: 'timestamp' + +vector: 'TIMESTAMP_FIELD11' + +create: 'CREATE TABLE table1 (timestamp_field11 TIMESTAMP);' + +data: + - 'INSERT INTO table1 VALUES (''2018-02-12 12:45:35'');' + - 'INSERT INTO table1 VALUES (''2018-02-12 12:45:35'');' + - 'INSERT INTO table1 VALUES (''2018-02-12 12:45:35'');' + - 'INSERT INTO table1 VALUES (''2018-02-12 12:45:35'');' + - 'INSERT INTO table1 VALUES (''2018-02-12 12:45:35'');' + - 'INSERT INTO table1 VALUES (''2018-02-12 12:45:35'');' + - 'INSERT INTO table1 VALUES (''2018-02-12 12:45:35'');' + - 'INSERT INTO table1 VALUES (''2018-02-12 12:45:35'');' + - 'INSERT INTO table1 VALUES (''2018-02-12 12:45:35'');' + - 'INSERT INTO table1 VALUES (''2018-02-12 12:45:35'');' + +query: 'select timestamp_field11 from table1;' + +drop: 'DROP table table1;' + +values: + - '1518439535000' + - '1518439535000' + - '1518439535000' + - '1518439535000' + - '1518439535000' + - '1518439535000' + - '1518439535000' + - '1518439535000' + - '1518439535000' + - '1518439535000' diff --git a/java/adapter/jdbc/src/test/resources/h2/test1_tinyint_h2.yml b/java/adapter/jdbc/src/test/resources/h2/test1_tinyint_h2.yml new file mode 100644 index 00000000000..a419416c8f1 --- /dev/null +++ b/java/adapter/jdbc/src/test/resources/h2/test1_tinyint_h2.yml @@ -0,0 +1,46 @@ +#Licensed to the Apache Software Foundation (ASF) under one or more contributor +#license agreements. See the NOTICE file distributed with this work for additional +#information regarding copyright ownership. The ASF licenses this file to +#You under the Apache License, Version 2.0 (the "License"); you may not use +#this file except in compliance with the License. You may obtain a copy of +#the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required +#by applicable law or agreed to in writing, software distributed under the +#License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS +#OF ANY KIND, either express or implied. See the License for the specific +#language governing permissions and limitations under the License. + +name: 'table1' + +type: 'tinyint' + +vector: 'TINYINT_FIELD3' + +create: 'CREATE TABLE table1 (tinyint_field3 TINYINT);' + +data: + - 'INSERT INTO table1 VALUES (45);' + - 'INSERT INTO table1 VALUES (45);' + - 'INSERT INTO table1 VALUES (45);' + - 'INSERT INTO table1 VALUES (45);' + - 'INSERT INTO table1 VALUES (45);' + - 'INSERT INTO table1 VALUES (45);' + - 'INSERT INTO table1 VALUES (45);' + - 'INSERT INTO table1 VALUES (45);' + - 'INSERT INTO table1 VALUES (45);' + - 'INSERT INTO table1 VALUES (45);' + +query: 'select tinyint_field3 from table1;' + +drop: 'DROP table table1;' + +values: + - '45' + - '45' + - '45' + - '45' + - '45' + - '45' + - '45' + - '45' + - '45' + - '45' diff --git a/java/adapter/jdbc/src/test/resources/h2/test1_varchar_h2.yml b/java/adapter/jdbc/src/test/resources/h2/test1_varchar_h2.yml new file mode 100644 index 00000000000..0bd142178cc --- /dev/null +++ b/java/adapter/jdbc/src/test/resources/h2/test1_varchar_h2.yml @@ -0,0 +1,46 @@ +#Licensed to the Apache Software Foundation (ASF) under one or more contributor +#license agreements. See the NOTICE file distributed with this work for additional +#information regarding copyright ownership. The ASF licenses this file to +#You under the Apache License, Version 2.0 (the "License"); you may not use +#this file except in compliance with the License. You may obtain a copy of +#the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required +#by applicable law or agreed to in writing, software distributed under the +#License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS +#OF ANY KIND, either express or implied. See the License for the specific +#language governing permissions and limitations under the License. + +name: 'table1' + +type: 'varchar' + +vector: 'VARCHAR_FIELD13' + +create: 'CREATE TABLE table1 (varchar_field13 VARCHAR(256));' + +data: + - 'INSERT INTO table1 VALUES (''some text that needs to be converted to varchar'');' + - 'INSERT INTO table1 VALUES (''some text that needs to be converted to varchar'');' + - 'INSERT INTO table1 VALUES (''some text that needs to be converted to varchar'');' + - 'INSERT INTO table1 VALUES (''some text that needs to be converted to varchar'');' + - 'INSERT INTO table1 VALUES (''some text that needs to be converted to varchar'');' + - 'INSERT INTO table1 VALUES (''some text that needs to be converted to varchar'');' + - 'INSERT INTO table1 VALUES (''some text that needs to be converted to varchar'');' + - 'INSERT INTO table1 VALUES (''some text that needs to be converted to varchar'');' + - 'INSERT INTO table1 VALUES (''some text that needs to be converted to varchar'');' + - 'INSERT INTO table1 VALUES (''some text that needs to be converted to varchar'');' + +query: 'select varchar_field13 from table1;' + +drop: 'DROP table table1;' + +values: + - 'some text that needs to be converted to varchar' + - 'some text that needs to be converted to varchar' + - 'some text that needs to be converted to varchar' + - 'some text that needs to be converted to varchar' + - 'some text that needs to be converted to varchar' + - 'some text that needs to be converted to varchar' + - 'some text that needs to be converted to varchar' + - 'some text that needs to be converted to varchar' + - 'some text that needs to be converted to varchar' + - 'some text that needs to be converted to varchar' diff --git a/java/pom.xml b/java/pom.xml index ce7c550ddcc..834465aa655 100644 --- a/java/pom.xml +++ b/java/pom.xml @@ -118,7 +118,6 @@ **/*.proto **/*.fmpp **/target/** - **/*.iml **/*.tdd **/*.project **/TAGS @@ -132,6 +131,7 @@ **/*.linux **/client/build/** **/*.tbl + **/*.iml @@ -623,6 +623,7 @@ memory vector tools + adapter/jdbc plasma