diff --git a/docker/demo/config/test-suite/target.avsc b/docker/demo/config/test-suite/target.avsc index 11e23a493b874..29a2500a97263 100644 --- a/docker/demo/config/test-suite/target.avsc +++ b/docker/demo/config/test-suite/target.avsc @@ -21,7 +21,7 @@ "fields" : [ { "name" : "timestamp", - "type" : "double" + "type" : "long" }, { "name" : "_row_key", "type" : "string" diff --git a/hudi-cli/src/test/java/org/apache/hudi/cli/integ/ITTestBootstrapCommand.java b/hudi-cli/src/test/java/org/apache/hudi/cli/integ/ITTestBootstrapCommand.java index 7ac1d61b0e199..0cb278e634619 100644 --- a/hudi-cli/src/test/java/org/apache/hudi/cli/integ/ITTestBootstrapCommand.java +++ b/hudi-cli/src/test/java/org/apache/hudi/cli/integ/ITTestBootstrapCommand.java @@ -64,7 +64,7 @@ public void init() { // generate test data partitions = Arrays.asList("2018", "2019", "2020"); - double timestamp = new Double(Instant.now().toEpochMilli()).longValue(); + long timestamp = Instant.now().toEpochMilli(); for (int i = 0; i < partitions.size(); i++) { Dataset df = TestBootstrap.generateTestRawTripDataset(timestamp, i * NUM_OF_RECORDS, i * NUM_OF_RECORDS + NUM_OF_RECORDS, null, jsc, sqlContext); diff --git a/hudi-cli/src/test/java/org/apache/hudi/cli/integ/ITTestHDFSParquetImportCommand.java b/hudi-cli/src/test/java/org/apache/hudi/cli/integ/ITTestHDFSParquetImportCommand.java index f68b59c4e84c8..17b997ac26f23 100644 --- a/hudi-cli/src/test/java/org/apache/hudi/cli/integ/ITTestHDFSParquetImportCommand.java +++ b/hudi-cli/src/test/java/org/apache/hudi/cli/integ/ITTestHDFSParquetImportCommand.java @@ -163,12 +163,12 @@ private void verifyResultData(List expectData) { List readData = ds.select("timestamp", "_row_key", "rider", "driver", "begin_lat", "begin_lon", "end_lat", "end_lon").collectAsList(); List result = readData.stream().map(row -> - new HoodieTripModel(row.getDouble(0), row.getString(1), row.getString(2), row.getString(3), row.getDouble(4), + new HoodieTripModel(row.getLong(0), row.getString(1), row.getString(2), row.getString(3), row.getDouble(4), row.getDouble(5), row.getDouble(6), row.getDouble(7))) .collect(Collectors.toList()); List expected = expectData.stream().map(g -> - new HoodieTripModel(Double.parseDouble(g.get("timestamp").toString()), + new HoodieTripModel(Long.parseLong(g.get("timestamp").toString()), g.get("_row_key").toString(), g.get("rider").toString(), g.get("driver").toString(), diff --git a/hudi-client/src/test/java/org/apache/hudi/table/action/commit/TestCopyOnWriteActionExecutor.java b/hudi-client/src/test/java/org/apache/hudi/table/action/commit/TestCopyOnWriteActionExecutor.java index 564ed080204d0..0e115d0b193e0 100644 --- a/hudi-client/src/test/java/org/apache/hudi/table/action/commit/TestCopyOnWriteActionExecutor.java +++ b/hudi-client/src/test/java/org/apache/hudi/table/action/commit/TestCopyOnWriteActionExecutor.java @@ -401,7 +401,8 @@ public void testFileSizeUpsertRecords() throws Exception { @Test public void testInsertUpsertWithHoodieAvroPayload() throws Exception { - HoodieWriteConfig config = makeHoodieClientConfigBuilder() + Schema schema = getSchemaFromResource(TestCopyOnWriteActionExecutor.class, "/testDataGeneratorSchema.txt"); + HoodieWriteConfig config = HoodieWriteConfig.newBuilder().withPath(basePath).withSchema(schema.toString()) .withStorageConfig(HoodieStorageConfig.newBuilder() .parquetMaxFileSize(1000 * 1024).hfileMaxFileSize(1000 * 1024).build()).build(); metaClient = HoodieTableMetaClient.reload(metaClient); diff --git a/hudi-client/src/test/resources/testDataGeneratorSchema.txt b/hudi-client/src/test/resources/testDataGeneratorSchema.txt new file mode 100644 index 0000000000000..ada01b3530ff5 --- /dev/null +++ b/hudi-client/src/test/resources/testDataGeneratorSchema.txt @@ -0,0 +1,128 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +{ + "type" : "record", + "name" : "triprec", + "fields" : [ + { + "name" : "timestamp", + "type" : "long" + }, { + "name" : "_row_key", + "type" : "string" + }, { + "name" : "rider", + "type" : "string" + }, { + "name" : "driver", + "type" : "string" + }, { + "name" : "begin_lat", + "type" : "double" + }, { + "name" : "begin_lon", + "type" : "double" + }, { + "name" : "end_lat", + "type" : "double" + }, { + "name" : "end_lon", + "type" : "double" + }, { + "name" : "distance_in_meters", + "type" : "int" + }, { + "name" : "seconds_since_epoch", + "type" : "long" + }, { + "name" : "weight", + "type" : "float" + },{ + "name" : "nation", + "type" : "bytes" + },{ + "name" : "current_date", + "type" : { + "type" : "int", + "logicalType" : "date" + } + },{ + "name" : "current_ts", + "type" : { + "type" : "long" + } + },{ + "name" : "height", + "type" : { + "type" : "fixed", + "name" : "abc", + "size" : 5, + "logicalType" : "decimal", + "precision" : 10, + "scale": 6 + } + }, { + "name" :"city_to_state", + "type" : { + "type" : "map", + "values": "string" + } + }, + { + "name" : "fare", + "type" : { + "type" : "record", + "name" : "fare", + "fields" : [ + { + "name" : "amount", + "type" : "double" + }, + { + "name" : "currency", + "type" : "string" + } + ] + } + }, + { + "name" : "tip_history", + "type" : { + "type" : "array", + "items" : { + "type" : "record", + "name" : "tip_history", + "fields" : [ + { + "name" : "amount", + "type" : "double" + }, + { + "name" : "currency", + "type" : "string" + } + ] + } + } + }, + { + "name" : "_hoodie_is_deleted", + "type" : "boolean", + "default" : false + } ] +} diff --git a/hudi-common/src/test/java/org/apache/hudi/common/testutils/HoodieTestDataGenerator.java b/hudi-common/src/test/java/org/apache/hudi/common/testutils/HoodieTestDataGenerator.java index 90b15d023cce2..68b66b0dab06d 100644 --- a/hudi-common/src/test/java/org/apache/hudi/common/testutils/HoodieTestDataGenerator.java +++ b/hudi-common/src/test/java/org/apache/hudi/common/testutils/HoodieTestDataGenerator.java @@ -89,7 +89,7 @@ public class HoodieTestDataGenerator { {DEFAULT_FIRST_PARTITION_PATH, DEFAULT_SECOND_PARTITION_PATH, DEFAULT_THIRD_PARTITION_PATH}; public static final int DEFAULT_PARTITION_DEPTH = 3; public static final String TRIP_SCHEMA_PREFIX = "{\"type\": \"record\"," + "\"name\": \"triprec\"," + "\"fields\": [ " - + "{\"name\": \"timestamp\",\"type\": \"double\"}," + "{\"name\": \"_row_key\", \"type\": \"string\"}," + + "{\"name\": \"timestamp\",\"type\": \"long\"}," + "{\"name\": \"_row_key\", \"type\": \"string\"}," + "{\"name\": \"rider\", \"type\": \"string\"}," + "{\"name\": \"driver\", \"type\": \"string\"}," + "{\"name\": \"begin_lat\", \"type\": \"double\"}," + "{\"name\": \"begin_lon\", \"type\": \"double\"}," + "{\"name\": \"end_lat\", \"type\": \"double\"}," + "{\"name\": \"end_lon\", \"type\": \"double\"},"; @@ -116,14 +116,14 @@ public class HoodieTestDataGenerator { TRIP_SCHEMA_PREFIX + FARE_FLATTENED_SCHEMA + TRIP_SCHEMA_SUFFIX; public static final String TRIP_SCHEMA = "{\"type\":\"record\",\"name\":\"tripUberRec\",\"fields\":[" - + "{\"name\":\"timestamp\",\"type\":\"double\"},{\"name\":\"_row_key\",\"type\":\"string\"},{\"name\":\"rider\",\"type\":\"string\"}," + + "{\"name\":\"timestamp\",\"type\":\"long\"},{\"name\":\"_row_key\",\"type\":\"string\"},{\"name\":\"rider\",\"type\":\"string\"}," + "{\"name\":\"driver\",\"type\":\"string\"},{\"name\":\"fare\",\"type\":\"double\"},{\"name\": \"_hoodie_is_deleted\", \"type\": \"boolean\", \"default\": false}]}"; public static final String SHORT_TRIP_SCHEMA = "{\"type\":\"record\",\"name\":\"shortTripRec\",\"fields\":[" - + "{\"name\":\"timestamp\",\"type\":\"double\"},{\"name\":\"_row_key\",\"type\":\"string\"},{\"name\":\"rider\",\"type\":\"string\"}," + + "{\"name\":\"timestamp\",\"type\":\"long\"},{\"name\":\"_row_key\",\"type\":\"string\"},{\"name\":\"rider\",\"type\":\"string\"}," + "{\"name\":\"driver\",\"type\":\"string\"},{\"name\":\"fare\",\"type\":\"double\"},{\"name\": \"_hoodie_is_deleted\", \"type\": \"boolean\", \"default\": false}]}"; public static final String NULL_SCHEMA = Schema.create(Schema.Type.NULL).toString(); - public static final String TRIP_HIVE_COLUMN_TYPES = "double,string,string,string,double,double,double,double,int,bigint,float,binary,int,bigint,decimal(10,6)," + public static final String TRIP_HIVE_COLUMN_TYPES = "bigint,string,string,string,double,double,double,double,int,bigint,float,binary,int,bigint,decimal(10,6)," + "map,struct,array>,boolean"; @@ -204,7 +204,7 @@ public static RawTripTestPayload generateRandomValue(HoodieKey key, String insta public static RawTripTestPayload generateRandomValue( HoodieKey key, String instantTime, boolean isFlattened) throws IOException { GenericRecord rec = generateGenericRecord( - key.getRecordKey(), "rider-" + instantTime, "driver-" + instantTime, 0.0, + key.getRecordKey(), "rider-" + instantTime, "driver-" + instantTime, 0, false, isFlattened); return new RawTripTestPayload(rec.toString(), key.getRecordKey(), key.getPartitionPath(), TRIP_EXAMPLE_SCHEMA); } @@ -213,12 +213,12 @@ public static RawTripTestPayload generateRandomValue( * Generates a new avro record with TRIP_SCHEMA, retaining the key if optionally provided. */ public RawTripTestPayload generatePayloadForTripSchema(HoodieKey key, String commitTime) throws IOException { - GenericRecord rec = generateRecordForTripSchema(key.getRecordKey(), "rider-" + commitTime, "driver-" + commitTime, 0.0); + GenericRecord rec = generateRecordForTripSchema(key.getRecordKey(), "rider-" + commitTime, "driver-" + commitTime, 0); return new RawTripTestPayload(rec.toString(), key.getRecordKey(), key.getPartitionPath(), TRIP_SCHEMA); } public RawTripTestPayload generatePayloadForShortTripSchema(HoodieKey key, String commitTime) throws IOException { - GenericRecord rec = generateRecordForShortTripSchema(key.getRecordKey(), "rider-" + commitTime, "driver-" + commitTime, 0.0); + GenericRecord rec = generateRecordForShortTripSchema(key.getRecordKey(), "rider-" + commitTime, "driver-" + commitTime, 0); return new RawTripTestPayload(rec.toString(), key.getRecordKey(), key.getPartitionPath(), SHORT_TRIP_SCHEMA); } @@ -226,7 +226,7 @@ public RawTripTestPayload generatePayloadForShortTripSchema(HoodieKey key, Strin * Generates a new avro record of the above schema format for a delete. */ public static RawTripTestPayload generateRandomDeleteValue(HoodieKey key, String instantTime) throws IOException { - GenericRecord rec = generateGenericRecord(key.getRecordKey(), "rider-" + instantTime, "driver-" + instantTime, 0.0, + GenericRecord rec = generateGenericRecord(key.getRecordKey(), "rider-" + instantTime, "driver-" + instantTime, 0, true, false); return new RawTripTestPayload(Option.of(rec.toString()), key.getRecordKey(), key.getPartitionPath(), TRIP_EXAMPLE_SCHEMA, true); } @@ -235,17 +235,17 @@ public static RawTripTestPayload generateRandomDeleteValue(HoodieKey key, String * Generates a new avro record of the above schema format, retaining the key if optionally provided. */ public static HoodieAvroPayload generateAvroPayload(HoodieKey key, String instantTime) { - GenericRecord rec = generateGenericRecord(key.getRecordKey(), "rider-" + instantTime, "driver-" + instantTime, 0.0); + GenericRecord rec = generateGenericRecord(key.getRecordKey(), "rider-" + instantTime, "driver-" + instantTime, 0); return new HoodieAvroPayload(Option.of(rec)); } public static GenericRecord generateGenericRecord(String rowKey, String riderName, String driverName, - double timestamp) { + long timestamp) { return generateGenericRecord(rowKey, riderName, driverName, timestamp, false, false); } public static GenericRecord generateGenericRecord(String rowKey, String riderName, String driverName, - double timestamp, boolean isDeleteRecord, + long timestamp, boolean isDeleteRecord, boolean isFlattened) { GenericRecord rec = new GenericData.Record(isFlattened ? FLATTENED_AVRO_SCHEMA : AVRO_SCHEMA); rec.put("_row_key", rowKey); @@ -303,7 +303,7 @@ public static GenericRecord generateGenericRecord(String rowKey, String riderNam /* Generate random record using TRIP_SCHEMA */ - public GenericRecord generateRecordForTripSchema(String rowKey, String riderName, String driverName, double timestamp) { + public GenericRecord generateRecordForTripSchema(String rowKey, String riderName, String driverName, long timestamp) { GenericRecord rec = new GenericData.Record(AVRO_TRIP_SCHEMA); rec.put("_row_key", rowKey); rec.put("timestamp", timestamp); @@ -314,7 +314,7 @@ public GenericRecord generateRecordForTripSchema(String rowKey, String riderName return rec; } - public GenericRecord generateRecordForShortTripSchema(String rowKey, String riderName, String driverName, double timestamp) { + public GenericRecord generateRecordForShortTripSchema(String rowKey, String riderName, String driverName, long timestamp) { GenericRecord rec = new GenericData.Record(AVRO_SHORT_TRIP_SCHEMA); rec.put("_row_key", rowKey); rec.put("timestamp", timestamp); @@ -780,7 +780,7 @@ public List generateGenericRecords(int numRecords) { List list = new ArrayList<>(); IntStream.range(0, numRecords).forEach(i -> { list.add(generateGenericRecord(UUID.randomUUID().toString(), UUID.randomUUID().toString(), UUID.randomUUID() - .toString(), RAND.nextDouble())); + .toString(), RAND.nextLong())); }); return list; } diff --git a/hudi-examples/src/main/java/org/apache/hudi/examples/common/HoodieExampleDataGenerator.java b/hudi-examples/src/main/java/org/apache/hudi/examples/common/HoodieExampleDataGenerator.java index a9847ecc0524f..4a9868bd39fea 100644 --- a/hudi-examples/src/main/java/org/apache/hudi/examples/common/HoodieExampleDataGenerator.java +++ b/hudi-examples/src/main/java/org/apache/hudi/examples/common/HoodieExampleDataGenerator.java @@ -86,12 +86,12 @@ public HoodieExampleDataGenerator(String[] partitionPaths, Map,array>,boolean"; @TempDir @@ -155,7 +155,7 @@ private void reloadInputFormats() { rtInputFormat.setConf(rtJobConf); } - public Schema generateNewDataSetAndReturnSchema(double timestamp, int numRecords, List partitionPaths, + public Schema generateNewDataSetAndReturnSchema(long timestamp, int numRecords, List partitionPaths, String srcPath) throws Exception { boolean isPartitioned = partitionPaths != null && !partitionPaths.isEmpty(); Dataset df = generateTestRawTripDataset(timestamp, 0, numRecords, partitionPaths, jsc, sqlContext); @@ -234,7 +234,7 @@ private void testBootstrapCommon(boolean partitioned, boolean deltaCommit, Effec break; } List partitions = Arrays.asList("2020/04/01", "2020/04/02", "2020/04/03"); - double timestamp = new Double(Instant.now().toEpochMilli()).longValue(); + long timestamp = Instant.now().toEpochMilli(); Schema schema = generateNewDataSetAndReturnSchema(timestamp, totalRecords, partitions, bootstrapBasePath); HoodieWriteConfig config = getConfigBuilder(schema.toString()) .withAutoCommit(true) @@ -282,7 +282,7 @@ private void testBootstrapCommon(boolean partitioned, boolean deltaCommit, Effec numInstantsAfterBootstrap, timestamp, timestamp, deltaCommit, bootstrapInstants); // Upsert case - double updateTimestamp = new Double(Instant.now().toEpochMilli()).longValue(); + long updateTimestamp = Instant.now().toEpochMilli(); String updateSPath = tmpFolder.toAbsolutePath().toString() + "/data2"; generateNewDataSetAndReturnSchema(updateTimestamp, totalRecords, partitions, updateSPath); JavaRDD updateBatch = @@ -329,13 +329,13 @@ public void testMetadataAndFullBootstrapWithUpdatesMOR() throws Exception { } private void checkBootstrapResults(int totalRecords, Schema schema, String maxInstant, boolean checkNumRawFiles, - int expNumInstants, double expTimestamp, double expROTimestamp, boolean isDeltaCommit) throws Exception { + int expNumInstants, long expTimestamp, long expROTimestamp, boolean isDeltaCommit) throws Exception { checkBootstrapResults(totalRecords, schema, maxInstant, checkNumRawFiles, expNumInstants, expNumInstants, expTimestamp, expROTimestamp, isDeltaCommit, Arrays.asList(maxInstant)); } private void checkBootstrapResults(int totalRecords, Schema schema, String instant, boolean checkNumRawFiles, - int expNumInstants, int numVersions, double expTimestamp, double expROTimestamp, boolean isDeltaCommit, + int expNumInstants, int numVersions, long expTimestamp, long expROTimestamp, boolean isDeltaCommit, List instantsWithValidRecords) throws Exception { metaClient.reloadActiveTimeline(); assertEquals(expNumInstants, metaClient.getCommitsTimeline().filterCompletedInstants().countInstants()); @@ -378,7 +378,7 @@ private void checkBootstrapResults(int totalRecords, Schema schema, String insta Set seenKeys = new HashSet<>(); for (GenericRecord r : records) { assertEquals(r.get("_row_key").toString(), r.get("_hoodie_record_key").toString(), "Record :" + r); - assertEquals(expROTimestamp, ((DoubleWritable)r.get("timestamp")).get(), 0.1, "Record :" + r); + assertEquals(expROTimestamp, ((LongWritable)r.get("timestamp")).get(), 0.1, "Record :" + r); assertFalse(seenKeys.contains(r.get("_hoodie_record_key").toString())); seenKeys.add(r.get("_hoodie_record_key").toString()); } @@ -395,7 +395,7 @@ private void checkBootstrapResults(int totalRecords, Schema schema, String insta assertEquals(totalRecords, records.size()); for (GenericRecord r : records) { assertEquals(r.get("_row_key").toString(), r.get("_hoodie_record_key").toString(), "Realtime Record :" + r); - assertEquals(expTimestamp, ((DoubleWritable)r.get("timestamp")).get(),0.1, "Realtime Record :" + r); + assertEquals(expTimestamp, ((LongWritable)r.get("timestamp")).get(),0.1, "Realtime Record :" + r); assertFalse(seenKeys.contains(r.get("_hoodie_record_key").toString())); seenKeys.add(r.get("_hoodie_record_key").toString()); } @@ -550,7 +550,7 @@ public HoodieWriteConfig.Builder getConfigBuilder(String schemaStr) { return builder; } - public static Dataset generateTestRawTripDataset(double timestamp, int from, int to, List partitionPaths, + public static Dataset generateTestRawTripDataset(long timestamp, int from, int to, List partitionPaths, JavaSparkContext jsc, SQLContext sqlContext) { boolean isPartitioned = partitionPaths != null && !partitionPaths.isEmpty(); final List records = new ArrayList<>(); diff --git a/hudi-utilities/src/test/java/org/apache/hudi/utilities/functional/TestHDFSParquetImporter.java b/hudi-utilities/src/test/java/org/apache/hudi/utilities/functional/TestHDFSParquetImporter.java index 414cfb8fcd585..1fe6800eb7e1e 100644 --- a/hudi-utilities/src/test/java/org/apache/hudi/utilities/functional/TestHDFSParquetImporter.java +++ b/hudi-utilities/src/test/java/org/apache/hudi/utilities/functional/TestHDFSParquetImporter.java @@ -160,12 +160,12 @@ public void testImportWithInsert() throws IOException, ParseException { List readData = ds.select("timestamp", "_row_key", "rider", "driver", "begin_lat", "begin_lon", "end_lat", "end_lon").collectAsList(); List result = readData.stream().map(row -> - new HoodieTripModel(row.getDouble(0), row.getString(1), row.getString(2), row.getString(3), row.getDouble(4), + new HoodieTripModel(row.getLong(0), row.getString(1), row.getString(2), row.getString(3), row.getDouble(4), row.getDouble(5), row.getDouble(6), row.getDouble(7))) .collect(Collectors.toList()); List expected = insertData.stream().map(g -> - new HoodieTripModel(Double.parseDouble(g.get("timestamp").toString()), + new HoodieTripModel(Long.parseLong(g.get("timestamp").toString()), g.get("_row_key").toString(), g.get("rider").toString(), g.get("driver").toString(), @@ -207,13 +207,13 @@ public void testImportWithUpsert() throws IOException, ParseException { List readData = ds.select("timestamp", "_row_key", "rider", "driver", "begin_lat", "begin_lon", "end_lat", "end_lon").collectAsList(); List result = readData.stream().map(row -> - new HoodieTripModel(row.getDouble(0), row.getString(1), row.getString(2), row.getString(3), row.getDouble(4), + new HoodieTripModel(row.getLong(0), row.getString(1), row.getString(2), row.getString(3), row.getDouble(4), row.getDouble(5), row.getDouble(6), row.getDouble(7))) .collect(Collectors.toList()); // get expected result. List expected = expectData.stream().map(g -> - new HoodieTripModel(Double.parseDouble(g.get("timestamp").toString()), + new HoodieTripModel(Long.parseLong(g.get("timestamp").toString()), g.get("_row_key").toString(), g.get("rider").toString(), g.get("driver").toString(), @@ -336,7 +336,7 @@ public HDFSParquetImporter.Config getHDFSParquetImporterConfig(String srcPath, S */ public static class HoodieTripModel { - double timestamp; + long timestamp; String rowKey; String rider; String driver; @@ -345,7 +345,7 @@ public static class HoodieTripModel { double endLat; double endLon; - public HoodieTripModel(double timestamp, String rowKey, String rider, String driver, double beginLat, + public HoodieTripModel(long timestamp, String rowKey, String rider, String driver, double beginLat, double beginLon, double endLat, double endLon) { this.timestamp = timestamp; this.rowKey = rowKey; diff --git a/hudi-utilities/src/test/resources/delta-streamer-config/source-flattened.avsc b/hudi-utilities/src/test/resources/delta-streamer-config/source-flattened.avsc index ed3a7be358efa..7b3ee50d9fd92 100644 --- a/hudi-utilities/src/test/resources/delta-streamer-config/source-flattened.avsc +++ b/hudi-utilities/src/test/resources/delta-streamer-config/source-flattened.avsc @@ -21,7 +21,7 @@ "fields" : [ { "name" : "timestamp", - "type" : "double" + "type" : "long" }, { "name" : "_row_key", "type" : "string" diff --git a/hudi-utilities/src/test/resources/delta-streamer-config/source.avsc b/hudi-utilities/src/test/resources/delta-streamer-config/source.avsc index e912573a6d5fb..4b4beb35dd8f3 100644 --- a/hudi-utilities/src/test/resources/delta-streamer-config/source.avsc +++ b/hudi-utilities/src/test/resources/delta-streamer-config/source.avsc @@ -21,7 +21,7 @@ "fields" : [ { "name" : "timestamp", - "type" : "double" + "type" : "long" }, { "name" : "_row_key", "type" : "string" diff --git a/hudi-utilities/src/test/resources/delta-streamer-config/source_short_trip_uber.avsc b/hudi-utilities/src/test/resources/delta-streamer-config/source_short_trip_uber.avsc index 8a589bdf7380b..f0c6098320f14 100644 --- a/hudi-utilities/src/test/resources/delta-streamer-config/source_short_trip_uber.avsc +++ b/hudi-utilities/src/test/resources/delta-streamer-config/source_short_trip_uber.avsc @@ -21,7 +21,7 @@ "fields" : [ { "name" : "timestamp", - "type" : "double" + "type" : "long" }, { "name" : "_row_key", "type" : "string" diff --git a/hudi-utilities/src/test/resources/delta-streamer-config/source_uber.avsc b/hudi-utilities/src/test/resources/delta-streamer-config/source_uber.avsc index 324862ee90473..4d5248975d2c6 100644 --- a/hudi-utilities/src/test/resources/delta-streamer-config/source_uber.avsc +++ b/hudi-utilities/src/test/resources/delta-streamer-config/source_uber.avsc @@ -21,7 +21,7 @@ "fields" : [ { "name" : "timestamp", - "type" : "double" + "type" : "long" }, { "name" : "_row_key", "type" : "string" diff --git a/hudi-utilities/src/test/resources/delta-streamer-config/target-flattened.avsc b/hudi-utilities/src/test/resources/delta-streamer-config/target-flattened.avsc index 4e9e4afa37726..dcea4cb090b4a 100644 --- a/hudi-utilities/src/test/resources/delta-streamer-config/target-flattened.avsc +++ b/hudi-utilities/src/test/resources/delta-streamer-config/target-flattened.avsc @@ -21,7 +21,7 @@ "fields" : [ { "name" : "timestamp", - "type" : "double" + "type" : "long" }, { "name" : "_row_key", "type" : "string" diff --git a/hudi-utilities/src/test/resources/delta-streamer-config/target.avsc b/hudi-utilities/src/test/resources/delta-streamer-config/target.avsc index a6234f41922b5..4252b7e66ce19 100644 --- a/hudi-utilities/src/test/resources/delta-streamer-config/target.avsc +++ b/hudi-utilities/src/test/resources/delta-streamer-config/target.avsc @@ -21,7 +21,7 @@ "fields" : [ { "name" : "timestamp", - "type" : "double" + "type" : "long" }, { "name" : "_row_key", "type" : "string" diff --git a/hudi-utilities/src/test/resources/delta-streamer-config/target_short_trip_uber.avsc b/hudi-utilities/src/test/resources/delta-streamer-config/target_short_trip_uber.avsc index 8a589bdf7380b..f0c6098320f14 100644 --- a/hudi-utilities/src/test/resources/delta-streamer-config/target_short_trip_uber.avsc +++ b/hudi-utilities/src/test/resources/delta-streamer-config/target_short_trip_uber.avsc @@ -21,7 +21,7 @@ "fields" : [ { "name" : "timestamp", - "type" : "double" + "type" : "long" }, { "name" : "_row_key", "type" : "string" diff --git a/hudi-utilities/src/test/resources/delta-streamer-config/target_uber.avsc b/hudi-utilities/src/test/resources/delta-streamer-config/target_uber.avsc index 324862ee90473..4d5248975d2c6 100644 --- a/hudi-utilities/src/test/resources/delta-streamer-config/target_uber.avsc +++ b/hudi-utilities/src/test/resources/delta-streamer-config/target_uber.avsc @@ -21,7 +21,7 @@ "fields" : [ { "name" : "timestamp", - "type" : "double" + "type" : "long" }, { "name" : "_row_key", "type" : "string"