Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ import org.apache.hudi.DataSourceWriteOptions._
import org.apache.hudi.client.SparkRDDWriteClient
import org.apache.hudi.client.common.HoodieSparkEngineContext
import org.apache.hudi.client.utils.MetadataConversionUtils
import org.apache.hudi.common.config.HoodieMetadataConfig
import org.apache.hudi.common.config.{HoodieMetadataConfig, TypedProperties}
import org.apache.hudi.common.model._
import org.apache.hudi.common.table.timeline.HoodieInstant
import org.apache.hudi.common.table.{HoodieTableConfig, HoodieTableMetaClient}
Expand All @@ -37,12 +37,10 @@ import org.apache.spark.sql.functions.{col, not}
import org.junit.jupiter.api.Assertions.{assertEquals, assertTrue}
import org.junit.jupiter.api._

import java.util.Properties
import java.util.concurrent.atomic.AtomicInteger
import java.util.stream.Collectors
import scala.collection.JavaConverters._
import scala.collection.{JavaConverters, mutable}
import scala.util.Using

class RecordLevelIndexTestBase extends HoodieSparkClientTestBase {
var spark: SparkSession = _
Expand Down Expand Up @@ -230,8 +228,7 @@ class RecordLevelIndexTestBase extends HoodieSparkClientTestBase {
}

protected def getWriteConfig(hudiOpts: Map[String, String]): HoodieWriteConfig = {
val props = new Properties()
props.putAll(JavaConverters.mapAsJavaMapConverter(hudiOpts).asJava)
val props = TypedProperties.fromMap(JavaConverters.mapAsJavaMapConverter(hudiOpts).asJava)
HoodieWriteConfig.newBuilder()
.withProps(props)
.withPath(basePath)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ import org.apache.hudi.DataSourceWriteOptions.{DELETE_OPERATION_OPT_VAL, PRECOMB
import org.apache.hudi.client.SparkRDDWriteClient
import org.apache.hudi.client.common.HoodieSparkEngineContext
import org.apache.hudi.client.utils.MetadataConversionUtils
import org.apache.hudi.common.config.HoodieMetadataConfig
import org.apache.hudi.common.config.{HoodieMetadataConfig, TypedProperties}
import org.apache.hudi.common.fs.FSUtils
import org.apache.hudi.common.model.{HoodieCommitMetadata, HoodieTableType, WriteOperationType}
import org.apache.hudi.common.table.HoodieTableConfig
Expand All @@ -40,7 +40,6 @@ import org.junit.jupiter.api.Assertions.{assertEquals, assertFalse, assertTrue}
import org.junit.jupiter.params.ParameterizedTest
import org.junit.jupiter.params.provider.MethodSource

import java.util.Properties
import scala.collection.JavaConverters
import scala.jdk.CollectionConverters.{asScalaIteratorConverter, collectionAsScalaIterableConverter}

Expand Down Expand Up @@ -299,8 +298,7 @@ class TestColumnStatsIndexWithSQL extends ColumnStatIndexTestBase {
}

protected def getWriteConfig(hudiOpts: Map[String, String]): HoodieWriteConfig = {
val props = new Properties()
props.putAll(JavaConverters.mapAsJavaMapConverter(hudiOpts).asJava)
val props = TypedProperties.fromMap(JavaConverters.mapAsJavaMapConverter(hudiOpts).asJava)
HoodieWriteConfig.newBuilder()
.withProps(props)
.withPath(basePath)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ package org.apache.hudi.functional

import org.apache.hadoop.fs.Path
import org.apache.hudi.DataSourceWriteOptions._
import org.apache.hudi.common.config.HoodieMetadataConfig
import org.apache.hudi.common.config.{HoodieMetadataConfig, TypedProperties}
import org.apache.hudi.common.model.HoodieTableType
import org.apache.hudi.common.table.timeline.HoodieInstant
import org.apache.hudi.common.table.{HoodieTableConfig, HoodieTableMetaClient}
Expand All @@ -35,7 +35,6 @@ import org.junit.jupiter.params.ParameterizedTest
import org.junit.jupiter.params.provider.EnumSource

import java.util.concurrent.atomic.AtomicInteger
import java.util.Properties
import scala.collection.JavaConverters._
import scala.collection.mutable

Expand Down Expand Up @@ -158,8 +157,7 @@ class TestMetadataRecordIndex extends HoodieSparkClientTestBase {
}

private def getWriteConfig(hudiOpts: Map[String, String]): HoodieWriteConfig = {
val props = new Properties()
props.putAll(hudiOpts.asJava)
val props = TypedProperties.fromMap(hudiOpts.asJava)
HoodieWriteConfig.newBuilder()
.withProps(props)
.withPath(basePath)
Expand Down