Skip to content

Commit 8abd4d3

Browse files
committed
Fix hudi-spark2 module tests
1 parent 4b1d448 commit 8abd4d3

File tree

4 files changed

+10
-20
lines changed

4 files changed

+10
-20
lines changed

hudi-spark-datasource/hudi-spark-common/src/test/java/org/apache/hudi/internal/HoodieBulkInsertInternalWriterTestBase.java

Lines changed: 6 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,6 @@
2929
import org.apache.hudi.keygen.SimpleKeyGenerator;
3030
import org.apache.hudi.testutils.HoodieClientTestHarness;
3131
import org.apache.hudi.testutils.SparkDatasetTestUtils;
32-
3332
import org.apache.spark.sql.Dataset;
3433
import org.apache.spark.sql.Row;
3534
import org.junit.jupiter.api.AfterEach;
@@ -62,6 +61,7 @@ public void setUp() throws Exception {
6261
initFileSystem();
6362
initTestDataGenerator();
6463
initMetaClient();
64+
initTimelineService();
6565
}
6666

6767
@AfterEach
@@ -87,11 +87,11 @@ protected HoodieWriteConfig getWriteConfig(boolean populateMetaFields, String hi
8787

8888
protected void assertWriteStatuses(List<HoodieInternalWriteStatus> writeStatuses, int batches, int size,
8989
Option<List<String>> fileAbsPaths, Option<List<String>> fileNames) {
90-
assertWriteStatuses(writeStatuses, batches, size, false, fileAbsPaths, fileNames);
90+
assertWriteStatuses(writeStatuses, batches, size, false, fileAbsPaths, fileNames, false);
9191
}
9292

9393
protected void assertWriteStatuses(List<HoodieInternalWriteStatus> writeStatuses, int batches, int size, boolean areRecordsSorted,
94-
Option<List<String>> fileAbsPaths, Option<List<String>> fileNames) {
94+
Option<List<String>> fileAbsPaths, Option<List<String>> fileNames, boolean isHiveStylePartitioning) {
9595
if (areRecordsSorted) {
9696
assertEquals(batches, writeStatuses.size());
9797
} else {
@@ -114,7 +114,8 @@ protected void assertWriteStatuses(List<HoodieInternalWriteStatus> writeStatuses
114114
int counter = 0;
115115
for (HoodieInternalWriteStatus writeStatus : writeStatuses) {
116116
// verify write status
117-
assertEquals(HoodieTestDataGenerator.DEFAULT_PARTITION_PATHS[counter % 3], writeStatus.getPartitionPath());
117+
String actualPartitionPathFormat = isHiveStylePartitioning ? SparkDatasetTestUtils.PARTITION_PATH_FIELD_NAME + "=%s" : "%s";
118+
assertEquals(String.format(actualPartitionPathFormat, HoodieTestDataGenerator.DEFAULT_PARTITION_PATHS[counter % 3]), writeStatus.getPartitionPath());
118119
if (areRecordsSorted) {
119120
assertEquals(writeStatus.getTotalRecords(), size);
120121
} else {
@@ -142,7 +143,7 @@ protected void assertWriteStatuses(List<HoodieInternalWriteStatus> writeStatuses
142143
assertEquals(sizeMap.get(HoodieTestDataGenerator.DEFAULT_PARTITION_PATHS[counter % 3]), writeStat.getNumWrites());
143144
}
144145
assertEquals(fileId, writeStat.getFileId());
145-
assertEquals(HoodieTestDataGenerator.DEFAULT_PARTITION_PATHS[counter++ % 3], writeStat.getPartitionPath());
146+
assertEquals(String.format(actualPartitionPathFormat, HoodieTestDataGenerator.DEFAULT_PARTITION_PATHS[counter++ % 3]), writeStat.getPartitionPath());
146147
assertEquals(0, writeStat.getNumDeletes());
147148
assertEquals(0, writeStat.getNumUpdateWrites());
148149
assertEquals(0, writeStat.getTotalWriteErrors());

hudi-spark-datasource/hudi-spark2/src/test/java/org/apache/hudi/internal/TestHoodieBulkInsertDataInternalWriter.java

Lines changed: 3 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -18,19 +18,14 @@
1818

1919
package org.apache.hudi.internal;
2020

21-
import org.apache.hudi.common.model.HoodieRecord;
2221
import org.apache.hudi.common.testutils.HoodieTestDataGenerator;
2322
import org.apache.hudi.common.util.Option;
2423
import org.apache.hudi.config.HoodieWriteConfig;
2524
import org.apache.hudi.table.HoodieSparkTable;
2625
import org.apache.hudi.table.HoodieTable;
27-
import org.apache.hudi.testutils.SparkDatasetTestUtils;
28-
2926
import org.apache.spark.sql.Dataset;
3027
import org.apache.spark.sql.Row;
3128
import org.apache.spark.sql.catalyst.InternalRow;
32-
import org.junit.jupiter.api.Assertions;
33-
import org.junit.jupiter.api.Disabled;
3429
import org.junit.jupiter.api.Test;
3530
import org.junit.jupiter.params.ParameterizedTest;
3631
import org.junit.jupiter.params.provider.Arguments;
@@ -50,7 +45,6 @@
5045
/**
5146
* Unit tests {@link HoodieBulkInsertDataInternalWriter}.
5247
*/
53-
@Disabled("temp")
5448
public class TestHoodieBulkInsertDataInternalWriter extends
5549
HoodieBulkInsertInternalWriterTestBase {
5650

@@ -106,7 +100,7 @@ public void testDataInternalWriter(boolean sorted, boolean populateMetaFields) t
106100
Option<List<String>> fileNames = Option.of(new ArrayList<>());
107101

108102
// verify write statuses
109-
assertWriteStatuses(commitMetadata.getWriteStatuses(), batches, size, sorted, fileAbsPaths, fileNames);
103+
assertWriteStatuses(commitMetadata.getWriteStatuses(), batches, size, sorted, fileAbsPaths, fileNames,false);
110104

111105
// verify rows
112106
Dataset<Row> result = sqlContext.read().parquet(fileAbsPaths.get().toArray(new String[0]));
@@ -148,14 +142,11 @@ public void testDataInternalWriterHiveStylePartitioning() throws Exception {
148142
Option<List<String>> fileNames = Option.of(new ArrayList<>());
149143

150144
// verify write statuses
151-
assertWriteStatuses(commitMetadata.getWriteStatuses(), batches, size, sorted, fileAbsPaths, fileNames);
145+
assertWriteStatuses(commitMetadata.getWriteStatuses(), batches, size, sorted, fileAbsPaths, fileNames, true);
152146

153147
// verify rows
154148
Dataset<Row> result = sqlContext.read().parquet(fileAbsPaths.get().toArray(new String[0]));
155149
assertOutput(totalInputRows, result, instantTime, fileNames, populateMetaFields);
156-
157-
result.collectAsList().forEach(entry -> Assertions.assertTrue(entry.getAs(HoodieRecord.PARTITION_PATH_METADATA_FIELD).toString()
158-
.contains(SparkDatasetTestUtils.PARTITION_PATH_FIELD_NAME + "=")));
159150
}
160151
}
161152

@@ -204,7 +195,7 @@ public void testGlobalFailure() throws Exception {
204195
Option<List<String>> fileAbsPaths = Option.of(new ArrayList<>());
205196
Option<List<String>> fileNames = Option.of(new ArrayList<>());
206197
// verify write statuses
207-
assertWriteStatuses(commitMetadata.getWriteStatuses(), 1, size / 2, false, fileAbsPaths, fileNames);
198+
assertWriteStatuses(commitMetadata.getWriteStatuses(), 1, size / 2, false, fileAbsPaths, fileNames, false);
208199

209200
// verify rows
210201
Dataset<Row> result = sqlContext.read().parquet(fileAbsPaths.get().toArray(new String[0]));

hudi-spark-datasource/hudi-spark2/src/test/java/org/apache/hudi/internal/TestHoodieDataSourceInternalWriter.java

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,6 @@
2424
import org.apache.hudi.common.util.Option;
2525
import org.apache.hudi.config.HoodieWriteConfig;
2626
import org.apache.hudi.testutils.HoodieClientTestUtils;
27-
2827
import org.apache.spark.sql.Dataset;
2928
import org.apache.spark.sql.Row;
3029
import org.apache.spark.sql.catalyst.InternalRow;
@@ -54,7 +53,6 @@
5453
/**
5554
* Unit tests {@link HoodieDataSourceInternalWriter}.
5655
*/
57-
@Disabled("temp")
5856
public class TestHoodieDataSourceInternalWriter extends
5957
HoodieBulkInsertInternalWriterTestBase {
6058

hudi-spark-datasource/hudi-spark3/src/test/java/org/apache/hudi/spark3/internal/TestHoodieBulkInsertDataInternalWriter.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -102,7 +102,7 @@ public void testDataInternalWriter(boolean sorted, boolean populateMetaFields) t
102102
Option<List<String>> fileNames = Option.of(new ArrayList<>());
103103

104104
// verify write statuses
105-
assertWriteStatuses(commitMetadata.getWriteStatuses(), batches, size, sorted, fileAbsPaths, fileNames);
105+
assertWriteStatuses(commitMetadata.getWriteStatuses(), batches, size, sorted, fileAbsPaths, fileNames, false);
106106

107107
// verify rows
108108
Dataset<Row> result = sqlContext.read().parquet(fileAbsPaths.get().toArray(new String[0]));

0 commit comments

Comments
 (0)