diff --git a/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/utils/PolarisCatalogUtils.java b/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/utils/PolarisCatalogUtils.java index 5493f0dc36..8d78807e0f 100644 --- a/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/utils/PolarisCatalogUtils.java +++ b/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/utils/PolarisCatalogUtils.java @@ -40,6 +40,9 @@ import org.apache.spark.sql.execution.datasources.v2.DataSourceV2Utils; import org.apache.spark.sql.util.CaseInsensitiveStringMap; import scala.Option; +import scala.Tuple2; +import scala.collection.immutable.Map$; +import scala.collection.mutable.Builder; public class PolarisCatalogUtils { @@ -125,10 +128,10 @@ public static Table loadV1SparkTable( new TableIdentifier( identifier.name(), Option.apply(namespacePath), Option.apply(catalogName)); - scala.collection.immutable.Map scalaOptions = - (scala.collection.immutable.Map) - scala.collection.immutable.Map$.MODULE$.apply( - scala.collection.JavaConverters.mapAsScalaMap(tableProperties).toSeq()); + Builder, scala.collection.immutable.Map> mb = + Map$.MODULE$.newBuilder(); + tableProperties.forEach((k, v) -> mb.$plus$eq(Tuple2.apply(k, v))); + scala.collection.immutable.Map scalaOptions = mb.result(); org.apache.spark.sql.catalyst.catalog.CatalogStorageFormat storage = DataSource.buildStorageFormatFromOptions(scalaOptions);