diff --git a/common/utils/src/main/scala/org/apache/spark/ErrorClassesJSONReader.scala b/common/utils/src/main/scala/org/apache/spark/ErrorClassesJSONReader.scala index ca7b9cc1bd647..3c1a9a27bb565 100644 --- a/common/utils/src/main/scala/org/apache/spark/ErrorClassesJSONReader.scala +++ b/common/utils/src/main/scala/org/apache/spark/ErrorClassesJSONReader.scala @@ -19,8 +19,8 @@ package org.apache.spark import java.net.URL -import scala.collection.JavaConverters._ import scala.collection.immutable.Map +import scala.jdk.CollectionConverters._ import com.fasterxml.jackson.annotation.JsonIgnore import com.fasterxml.jackson.core.`type`.TypeReference diff --git a/common/utils/src/main/scala/org/apache/spark/SparkException.scala b/common/utils/src/main/scala/org/apache/spark/SparkException.scala index 5c5bf17c942d6..828948b48c134 100644 --- a/common/utils/src/main/scala/org/apache/spark/SparkException.scala +++ b/common/utils/src/main/scala/org/apache/spark/SparkException.scala @@ -22,7 +22,7 @@ import java.sql.{SQLException, SQLFeatureNotSupportedException} import java.time.DateTimeException import java.util.ConcurrentModificationException -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ class SparkException( message: String, diff --git a/common/utils/src/main/scala/org/apache/spark/SparkThrowableHelper.scala b/common/utils/src/main/scala/org/apache/spark/SparkThrowableHelper.scala index 0f329b5655b32..5d58d66eec364 100644 --- a/common/utils/src/main/scala/org/apache/spark/SparkThrowableHelper.scala +++ b/common/utils/src/main/scala/org/apache/spark/SparkThrowableHelper.scala @@ -17,7 +17,7 @@ package org.apache.spark -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.util.JsonUtils.toJsonString import org.apache.spark.util.SparkClassUtils diff --git a/common/utils/src/main/scala/org/apache/spark/internal/Logging.scala b/common/utils/src/main/scala/org/apache/spark/internal/Logging.scala index 83e01330ce3f6..80c622bd53288 100644 --- a/common/utils/src/main/scala/org/apache/spark/internal/Logging.scala +++ b/common/utils/src/main/scala/org/apache/spark/internal/Logging.scala @@ -17,7 +17,7 @@ package org.apache.spark.internal -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.logging.log4j.{Level, LogManager} import org.apache.logging.log4j.core.{Filter, LifeCycle, LogEvent, Logger => Log4jLogger, LoggerContext} diff --git a/connector/avro/src/main/scala/org/apache/spark/sql/avro/AvroDeserializer.scala b/connector/avro/src/main/scala/org/apache/spark/sql/avro/AvroDeserializer.scala index a78ee89a3e933..a46997fcc0a5e 100644 --- a/connector/avro/src/main/scala/org/apache/spark/sql/avro/AvroDeserializer.scala +++ b/connector/avro/src/main/scala/org/apache/spark/sql/avro/AvroDeserializer.scala @@ -20,7 +20,7 @@ package org.apache.spark.sql.avro import java.math.BigDecimal import java.nio.ByteBuffer -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.avro.{LogicalTypes, Schema, SchemaBuilder} import org.apache.avro.Conversions.DecimalConversion diff --git a/connector/avro/src/main/scala/org/apache/spark/sql/avro/AvroOutputWriter.scala b/connector/avro/src/main/scala/org/apache/spark/sql/avro/AvroOutputWriter.scala index 23dfe86b6bce1..999955492545d 100644 --- a/connector/avro/src/main/scala/org/apache/spark/sql/avro/AvroOutputWriter.scala +++ b/connector/avro/src/main/scala/org/apache/spark/sql/avro/AvroOutputWriter.scala @@ -19,7 +19,7 @@ package org.apache.spark.sql.avro import java.io.{IOException, OutputStream} -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.avro.Schema import org.apache.avro.generic.GenericRecord diff --git a/connector/avro/src/main/scala/org/apache/spark/sql/avro/AvroSerializer.scala b/connector/avro/src/main/scala/org/apache/spark/sql/avro/AvroSerializer.scala index 34bf47613e7bf..5d052c3672bda 100644 --- a/connector/avro/src/main/scala/org/apache/spark/sql/avro/AvroSerializer.scala +++ b/connector/avro/src/main/scala/org/apache/spark/sql/avro/AvroSerializer.scala @@ -19,7 +19,7 @@ package org.apache.spark.sql.avro import java.nio.ByteBuffer -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.avro.Conversions.DecimalConversion import org.apache.avro.LogicalTypes diff --git a/connector/avro/src/main/scala/org/apache/spark/sql/avro/AvroUtils.scala b/connector/avro/src/main/scala/org/apache/spark/sql/avro/AvroUtils.scala index 2554106d78e9d..55dea6ed959f8 100644 --- a/connector/avro/src/main/scala/org/apache/spark/sql/avro/AvroUtils.scala +++ b/connector/avro/src/main/scala/org/apache/spark/sql/avro/AvroUtils.scala @@ -19,7 +19,7 @@ package org.apache.spark.sql.avro import java.io.{FileNotFoundException, IOException} import java.util.Locale -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.avro.Schema import org.apache.avro.file.{DataFileReader, FileReader} diff --git a/connector/avro/src/main/scala/org/apache/spark/sql/avro/SchemaConverters.scala b/connector/avro/src/main/scala/org/apache/spark/sql/avro/SchemaConverters.scala index 6f21639e28d68..90d09aa1e41ed 100644 --- a/connector/avro/src/main/scala/org/apache/spark/sql/avro/SchemaConverters.scala +++ b/connector/avro/src/main/scala/org/apache/spark/sql/avro/SchemaConverters.scala @@ -19,8 +19,8 @@ package org.apache.spark.sql.avro import java.util.Locale -import scala.collection.JavaConverters._ import scala.collection.mutable +import scala.jdk.CollectionConverters._ import org.apache.avro.{LogicalTypes, Schema, SchemaBuilder} import org.apache.avro.LogicalTypes.{Date, Decimal, LocalTimestampMicros, LocalTimestampMillis, TimestampMicros, TimestampMillis} diff --git a/connector/avro/src/main/scala/org/apache/spark/sql/avro/functions.scala b/connector/avro/src/main/scala/org/apache/spark/sql/avro/functions.scala index 74bfaaed9d8a4..5830b2ec42383 100755 --- a/connector/avro/src/main/scala/org/apache/spark/sql/avro/functions.scala +++ b/connector/avro/src/main/scala/org/apache/spark/sql/avro/functions.scala @@ -17,7 +17,7 @@ package org.apache.spark.sql.avro -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.annotation.Experimental import org.apache.spark.sql.Column diff --git a/connector/avro/src/main/scala/org/apache/spark/sql/v2/avro/AvroScan.scala b/connector/avro/src/main/scala/org/apache/spark/sql/v2/avro/AvroScan.scala index 763b9abe4f91b..a9f3bb78658ca 100644 --- a/connector/avro/src/main/scala/org/apache/spark/sql/v2/avro/AvroScan.scala +++ b/connector/avro/src/main/scala/org/apache/spark/sql/v2/avro/AvroScan.scala @@ -16,7 +16,7 @@ */ package org.apache.spark.sql.v2.avro -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.hadoop.fs.Path diff --git a/connector/avro/src/main/scala/org/apache/spark/sql/v2/avro/AvroTable.scala b/connector/avro/src/main/scala/org/apache/spark/sql/v2/avro/AvroTable.scala index f19d856252b50..fe61fe3db8786 100644 --- a/connector/avro/src/main/scala/org/apache/spark/sql/v2/avro/AvroTable.scala +++ b/connector/avro/src/main/scala/org/apache/spark/sql/v2/avro/AvroTable.scala @@ -16,7 +16,7 @@ */ package org.apache.spark.sql.v2.avro -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.hadoop.fs.FileStatus diff --git a/connector/avro/src/test/scala/org/apache/spark/sql/avro/AvroFunctionsSuite.scala b/connector/avro/src/test/scala/org/apache/spark/sql/avro/AvroFunctionsSuite.scala index 62f61da75b5dc..07865787d2875 100644 --- a/connector/avro/src/test/scala/org/apache/spark/sql/avro/AvroFunctionsSuite.scala +++ b/connector/avro/src/test/scala/org/apache/spark/sql/avro/AvroFunctionsSuite.scala @@ -19,7 +19,7 @@ package org.apache.spark.sql.avro import java.io.ByteArrayOutputStream -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.avro.{Schema, SchemaBuilder} import org.apache.avro.generic.{GenericDatumWriter, GenericRecord, GenericRecordBuilder} diff --git a/connector/avro/src/test/scala/org/apache/spark/sql/avro/AvroSuite.scala b/connector/avro/src/test/scala/org/apache/spark/sql/avro/AvroSuite.scala index d22a2d3697579..47a2fc41fcb01 100644 --- a/connector/avro/src/test/scala/org/apache/spark/sql/avro/AvroSuite.scala +++ b/connector/avro/src/test/scala/org/apache/spark/sql/avro/AvroSuite.scala @@ -23,7 +23,7 @@ import java.nio.file.{Files, Paths, StandardCopyOption} import java.sql.{Date, Timestamp} import java.util.UUID -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.avro.{AvroTypeException, Schema, SchemaBuilder} import org.apache.avro.Schema.{Field, Type} diff --git a/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/Column.scala b/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/Column.scala index 4a527040d80cf..326b9ee2c7a85 100644 --- a/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/Column.scala +++ b/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/Column.scala @@ -16,7 +16,7 @@ */ package org.apache.spark.sql -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.annotation.DeveloperApi import org.apache.spark.connect.proto diff --git a/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/DataFrameNaFunctions.scala b/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/DataFrameNaFunctions.scala index 17b95018f8986..37c2ae1b26288 100644 --- a/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/DataFrameNaFunctions.scala +++ b/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/DataFrameNaFunctions.scala @@ -19,7 +19,7 @@ package org.apache.spark.sql import java.util.Locale -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.connect.proto.{NAReplace, Relation} import org.apache.spark.connect.proto.Expression.{Literal => GLiteral} diff --git a/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/DataFrameReader.scala b/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/DataFrameReader.scala index b01d1479a8c78..1ad98dc91b216 100644 --- a/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/DataFrameReader.scala +++ b/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/DataFrameReader.scala @@ -19,7 +19,7 @@ package org.apache.spark.sql import java.util.Properties -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.annotation.Stable import org.apache.spark.connect.proto.Parse.ParseFormat diff --git a/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/DataFrameStatFunctions.scala b/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/DataFrameStatFunctions.scala index 4d35b4e876795..4daa9fa88e660 100644 --- a/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/DataFrameStatFunctions.scala +++ b/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/DataFrameStatFunctions.scala @@ -20,7 +20,7 @@ package org.apache.spark.sql import java.{lang => jl, util => ju} import java.io.ByteArrayInputStream -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.SparkException import org.apache.spark.connect.proto.{Relation, StatSampleBy} diff --git a/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/DataFrameWriter.scala b/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/DataFrameWriter.scala index 0cadcf54af3e2..563a9865e73f2 100644 --- a/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/DataFrameWriter.scala +++ b/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/DataFrameWriter.scala @@ -19,7 +19,7 @@ package org.apache.spark.sql import java.util.{Locale, Properties} -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.annotation.Stable import org.apache.spark.connect.proto diff --git a/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/DataFrameWriterV2.scala b/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/DataFrameWriterV2.scala index b698e1dfaa1c9..7107895c0ad2f 100644 --- a/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/DataFrameWriterV2.scala +++ b/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/DataFrameWriterV2.scala @@ -17,8 +17,8 @@ package org.apache.spark.sql -import scala.collection.JavaConverters._ import scala.collection.mutable +import scala.jdk.CollectionConverters._ import org.apache.spark.annotation.Experimental import org.apache.spark.connect.proto diff --git a/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/Dataset.scala b/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/Dataset.scala index 1d83f196b53b1..eb5460cbc86a0 100644 --- a/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/Dataset.scala +++ b/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/Dataset.scala @@ -18,8 +18,8 @@ package org.apache.spark.sql import java.util.{Collections, Locale} -import scala.collection.JavaConverters._ import scala.collection.mutable +import scala.jdk.CollectionConverters._ import scala.reflect.ClassTag import scala.reflect.runtime.universe.TypeTag import scala.util.control.NonFatal diff --git a/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/KeyValueGroupedDataset.scala b/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/KeyValueGroupedDataset.scala index 88c8b6a4f8bad..9e7d8e1320eff 100644 --- a/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/KeyValueGroupedDataset.scala +++ b/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/KeyValueGroupedDataset.scala @@ -19,7 +19,7 @@ package org.apache.spark.sql import java.util.Arrays -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.language.existentials import org.apache.spark.api.java.function._ diff --git a/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/RelationalGroupedDataset.scala b/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/RelationalGroupedDataset.scala index c19314a0d5cfa..5ed97e45c7701 100644 --- a/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/RelationalGroupedDataset.scala +++ b/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/RelationalGroupedDataset.scala @@ -19,7 +19,7 @@ package org.apache.spark.sql import java.util.Locale -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.connect.proto diff --git a/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/SparkSession.scala b/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/SparkSession.scala index 5aa8c5a2bd5c7..abe4d6a96e60a 100644 --- a/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/SparkSession.scala +++ b/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/SparkSession.scala @@ -21,7 +21,7 @@ import java.net.URI import java.util.concurrent.TimeUnit._ import java.util.concurrent.atomic.{AtomicLong, AtomicReference} -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.reflect.runtime.universe.TypeTag import com.google.common.cache.{CacheBuilder, CacheLoader} diff --git a/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/avro/functions.scala b/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/avro/functions.scala index c4b16ca0d5e75..61a9c908c7a4b 100644 --- a/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/avro/functions.scala +++ b/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/avro/functions.scala @@ -17,7 +17,7 @@ package org.apache.spark.sql.avro -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.annotation.Experimental import org.apache.spark.sql.Column diff --git a/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/catalog/Catalog.scala b/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/catalog/Catalog.scala index 11c3f4e3d1881..cf0fef147ee84 100644 --- a/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/catalog/Catalog.scala +++ b/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/catalog/Catalog.scala @@ -17,7 +17,7 @@ package org.apache.spark.sql.catalog -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.sql.{AnalysisException, DataFrame, Dataset} import org.apache.spark.sql.types.StructType diff --git a/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/expressions/UserDefinedFunction.scala b/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/expressions/UserDefinedFunction.scala index e060dba0b7e42..dcc038eb51da1 100644 --- a/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/expressions/UserDefinedFunction.scala +++ b/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/expressions/UserDefinedFunction.scala @@ -16,7 +16,7 @@ */ package org.apache.spark.sql.expressions -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.reflect.runtime.universe.TypeTag import scala.util.control.NonFatal diff --git a/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/expressions/WindowSpec.scala b/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/expressions/WindowSpec.scala index cecfb6a0d919f..681eec17cf22e 100644 --- a/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/expressions/WindowSpec.scala +++ b/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/expressions/WindowSpec.scala @@ -17,7 +17,7 @@ package org.apache.spark.sql.expressions -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.annotation.Stable import org.apache.spark.connect.proto diff --git a/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/functions.scala b/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/functions.scala index 5bb8a92c1d2e5..36f1aeb3a6f94 100644 --- a/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/functions.scala +++ b/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/functions.scala @@ -18,7 +18,7 @@ package org.apache.spark.sql import java.util.Collections -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.reflect.runtime.universe.{typeTag, TypeTag} import org.apache.spark.connect.proto diff --git a/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/protobuf/functions.scala b/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/protobuf/functions.scala index 293490928a278..40d4dd0642db1 100644 --- a/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/protobuf/functions.scala +++ b/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/protobuf/functions.scala @@ -20,7 +20,7 @@ import java.io.FileNotFoundException import java.nio.file.{Files, NoSuchFileException, Paths} import java.util.Collections -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.util.control.NonFatal import org.apache.spark.annotation.Experimental diff --git a/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/streaming/DataStreamReader.scala b/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/streaming/DataStreamReader.scala index 3d2b8c7be2599..bc8e30cd300c6 100644 --- a/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/streaming/DataStreamReader.scala +++ b/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/streaming/DataStreamReader.scala @@ -17,7 +17,7 @@ package org.apache.spark.sql.streaming -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.annotation.Evolving import org.apache.spark.connect.proto.Read.DataSource diff --git a/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/streaming/DataStreamWriter.scala b/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/streaming/DataStreamWriter.scala index 54eb6e761407c..f05d29c6f1ab4 100644 --- a/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/streaming/DataStreamWriter.scala +++ b/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/streaming/DataStreamWriter.scala @@ -20,7 +20,7 @@ package org.apache.spark.sql.streaming import java.util.Locale import java.util.concurrent.TimeoutException -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import com.google.protobuf.ByteString diff --git a/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/streaming/StreamingQuery.scala b/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/streaming/StreamingQuery.scala index ceb096b9aff1b..a48367b468d2e 100644 --- a/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/streaming/StreamingQuery.scala +++ b/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/streaming/StreamingQuery.scala @@ -20,7 +20,7 @@ package org.apache.spark.sql.streaming import java.util.UUID import java.util.concurrent.TimeoutException -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.annotation.Evolving import org.apache.spark.connect.proto.Command diff --git a/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/streaming/StreamingQueryManager.scala b/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/streaming/StreamingQueryManager.scala index d16638e594599..bea1038801497 100644 --- a/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/streaming/StreamingQueryManager.scala +++ b/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/streaming/StreamingQueryManager.scala @@ -20,7 +20,7 @@ package org.apache.spark.sql.streaming import java.util.UUID import java.util.concurrent.{ConcurrentHashMap, ConcurrentMap} -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import com.google.protobuf.ByteString diff --git a/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/streaming/progress.scala b/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/streaming/progress.scala index 4002ae21f8c41..a0c124f810e92 100644 --- a/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/streaming/progress.scala +++ b/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/streaming/progress.scala @@ -21,7 +21,7 @@ import java.{util => ju} import java.lang.{Long => JLong} import java.util.UUID -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.util.control.NonFatal import com.fasterxml.jackson.databind.{DeserializationFeature, ObjectMapper} diff --git a/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/ClientE2ETestSuite.scala b/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/ClientE2ETestSuite.scala index 55718ed9c0be8..72892097c7786 100644 --- a/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/ClientE2ETestSuite.scala +++ b/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/ClientE2ETestSuite.scala @@ -21,8 +21,8 @@ import java.nio.file.Files import java.time.DateTimeException import java.util.Properties -import scala.collection.JavaConverters._ import scala.collection.mutable +import scala.jdk.CollectionConverters._ import org.apache.commons.io.FileUtils import org.apache.commons.io.output.TeeOutputStream diff --git a/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/ColumnTestSuite.scala b/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/ColumnTestSuite.scala index a88d6ec116a42..c1e4399ccb054 100644 --- a/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/ColumnTestSuite.scala +++ b/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/ColumnTestSuite.scala @@ -18,7 +18,7 @@ package org.apache.spark.sql import java.io.ByteArrayOutputStream -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.sql.{functions => fn} import org.apache.spark.sql.test.ConnectFunSuite diff --git a/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/DataFrameNaFunctionSuite.scala b/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/DataFrameNaFunctionSuite.scala index 393fa19fa70b4..ef1cf789d1e5b 100644 --- a/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/DataFrameNaFunctionSuite.scala +++ b/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/DataFrameNaFunctionSuite.scala @@ -17,7 +17,7 @@ package org.apache.spark.sql -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.sql.internal.SqlApiConf import org.apache.spark.sql.test.{QueryTest, SQLHelper} diff --git a/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/FunctionTestSuite.scala b/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/FunctionTestSuite.scala index 65dd5862d811e..e350bde9946f2 100644 --- a/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/FunctionTestSuite.scala +++ b/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/FunctionTestSuite.scala @@ -18,7 +18,7 @@ package org.apache.spark.sql import java.util.Collections -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.sql.avro.{functions => avroFn} import org.apache.spark.sql.functions._ diff --git a/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/PlanGenerationTestSuite.scala b/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/PlanGenerationTestSuite.scala index aa15fbd75ff4c..db6ddfc53db71 100644 --- a/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/PlanGenerationTestSuite.scala +++ b/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/PlanGenerationTestSuite.scala @@ -20,8 +20,8 @@ import java.nio.file.{Files, Path} import java.util.{Collections, Properties} import java.util.concurrent.atomic.AtomicLong -import scala.collection.JavaConverters._ import scala.collection.mutable +import scala.jdk.CollectionConverters._ import scala.util.{Failure, Success, Try} import com.google.protobuf.util.JsonFormat diff --git a/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/UserDefinedFunctionE2ETestSuite.scala b/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/UserDefinedFunctionE2ETestSuite.scala index fbc2c1c266262..609fad5a4adbe 100644 --- a/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/UserDefinedFunctionE2ETestSuite.scala +++ b/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/UserDefinedFunctionE2ETestSuite.scala @@ -21,7 +21,7 @@ import java.util.{Iterator => JIterator} import java.util.Arrays import java.util.concurrent.atomic.AtomicLong -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.api.java.function._ import org.apache.spark.sql.api.java.UDF2 diff --git a/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/client/ArtifactSuite.scala b/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/client/ArtifactSuite.scala index 770143f2e9b4e..9c06f9428154c 100644 --- a/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/client/ArtifactSuite.scala +++ b/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/client/ArtifactSuite.scala @@ -20,7 +20,7 @@ import java.io.InputStream import java.nio.file.{Files, Path, Paths} import java.util.concurrent.TimeUnit -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import com.google.protobuf.ByteString import io.grpc.{ManagedChannel, Server} diff --git a/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/client/SparkConnectClientSuite.scala b/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/client/SparkConnectClientSuite.scala index 80e245ec78b7d..488118d055287 100644 --- a/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/client/SparkConnectClientSuite.scala +++ b/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/client/SparkConnectClientSuite.scala @@ -19,8 +19,8 @@ package org.apache.spark.sql.connect.client import java.util.UUID import java.util.concurrent.TimeUnit -import scala.collection.JavaConverters._ import scala.collection.mutable +import scala.jdk.CollectionConverters._ import io.grpc.{CallOptions, Channel, ClientCall, ClientInterceptor, MethodDescriptor, Server, Status, StatusRuntimeException} import io.grpc.netty.NettyServerBuilder diff --git a/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/streaming/ClientStreamingQuerySuite.scala b/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/streaming/ClientStreamingQuerySuite.scala index 5d281cfbfeb35..8bb83a1ac258c 100644 --- a/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/streaming/ClientStreamingQuerySuite.scala +++ b/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/streaming/ClientStreamingQuerySuite.scala @@ -20,8 +20,8 @@ package org.apache.spark.sql.streaming import java.io.{File, FileWriter} import java.util.concurrent.TimeUnit -import scala.collection.JavaConverters._ import scala.collection.mutable +import scala.jdk.CollectionConverters._ import org.scalatest.concurrent.Eventually.eventually import org.scalatest.concurrent.Futures.timeout diff --git a/connector/connect/common/src/main/scala/org/apache/spark/sql/connect/client/ArtifactManager.scala b/connector/connect/common/src/main/scala/org/apache/spark/sql/connect/client/ArtifactManager.scala index b1a7746a84ad6..6b08737ed213e 100644 --- a/connector/connect/common/src/main/scala/org/apache/spark/sql/connect/client/ArtifactManager.scala +++ b/connector/connect/common/src/main/scala/org/apache/spark/sql/connect/client/ArtifactManager.scala @@ -23,10 +23,10 @@ import java.util.Arrays import java.util.concurrent.CopyOnWriteArrayList import java.util.zip.{CheckedInputStream, CRC32} -import scala.collection.JavaConverters._ import scala.collection.mutable import scala.concurrent.Promise import scala.concurrent.duration.Duration +import scala.jdk.CollectionConverters._ import scala.util.control.NonFatal import Artifact._ diff --git a/connector/connect/common/src/main/scala/org/apache/spark/sql/connect/client/ClassFinder.scala b/connector/connect/common/src/main/scala/org/apache/spark/sql/connect/client/ClassFinder.scala index ff6473bfcb1fa..94486c31a1636 100644 --- a/connector/connect/common/src/main/scala/org/apache/spark/sql/connect/client/ClassFinder.scala +++ b/connector/connect/common/src/main/scala/org/apache/spark/sql/connect/client/ClassFinder.scala @@ -19,7 +19,7 @@ package org.apache.spark.sql.connect.client import java.nio.file.{Files, LinkOption, Path, Paths} -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.sql.connect.client.Artifact.LocalFile diff --git a/connector/connect/common/src/main/scala/org/apache/spark/sql/connect/client/CustomSparkConnectBlockingStub.scala b/connector/connect/common/src/main/scala/org/apache/spark/sql/connect/client/CustomSparkConnectBlockingStub.scala index f02704b2a02c4..f2efa26f6b609 100644 --- a/connector/connect/common/src/main/scala/org/apache/spark/sql/connect/client/CustomSparkConnectBlockingStub.scala +++ b/connector/connect/common/src/main/scala/org/apache/spark/sql/connect/client/CustomSparkConnectBlockingStub.scala @@ -16,7 +16,7 @@ */ package org.apache.spark.sql.connect.client -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import io.grpc.ManagedChannel diff --git a/connector/connect/common/src/main/scala/org/apache/spark/sql/connect/client/ExecutePlanResponseReattachableIterator.scala b/connector/connect/common/src/main/scala/org/apache/spark/sql/connect/client/ExecutePlanResponseReattachableIterator.scala index 57a629264be10..4c0c1d1f39047 100644 --- a/connector/connect/common/src/main/scala/org/apache/spark/sql/connect/client/ExecutePlanResponseReattachableIterator.scala +++ b/connector/connect/common/src/main/scala/org/apache/spark/sql/connect/client/ExecutePlanResponseReattachableIterator.scala @@ -18,7 +18,7 @@ package org.apache.spark.sql.connect.client import java.util.UUID -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.util.control.NonFatal import io.grpc.{ManagedChannel, StatusRuntimeException} diff --git a/connector/connect/common/src/main/scala/org/apache/spark/sql/connect/client/GrpcExceptionConverter.scala b/connector/connect/common/src/main/scala/org/apache/spark/sql/connect/client/GrpcExceptionConverter.scala index 2d86e8c1e417e..0cc0fed52b096 100644 --- a/connector/connect/common/src/main/scala/org/apache/spark/sql/connect/client/GrpcExceptionConverter.scala +++ b/connector/connect/common/src/main/scala/org/apache/spark/sql/connect/client/GrpcExceptionConverter.scala @@ -18,7 +18,7 @@ package org.apache.spark.sql.connect.client import java.time.DateTimeException -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.reflect.ClassTag import com.google.rpc.ErrorInfo diff --git a/connector/connect/common/src/main/scala/org/apache/spark/sql/connect/client/SparkConnectClient.scala b/connector/connect/common/src/main/scala/org/apache/spark/sql/connect/client/SparkConnectClient.scala index a0853cc0621fa..86ed2a2966730 100644 --- a/connector/connect/common/src/main/scala/org/apache/spark/sql/connect/client/SparkConnectClient.scala +++ b/connector/connect/common/src/main/scala/org/apache/spark/sql/connect/client/SparkConnectClient.scala @@ -21,8 +21,8 @@ import java.net.URI import java.util.UUID import java.util.concurrent.Executor -import scala.collection.JavaConverters._ import scala.collection.mutable +import scala.jdk.CollectionConverters._ import com.google.protobuf.ByteString import io.grpc._ diff --git a/connector/connect/common/src/main/scala/org/apache/spark/sql/connect/client/arrow/ArrowEncoderUtils.scala b/connector/connect/common/src/main/scala/org/apache/spark/sql/connect/client/arrow/ArrowEncoderUtils.scala index b9badc5c936fa..18a290ba52620 100644 --- a/connector/connect/common/src/main/scala/org/apache/spark/sql/connect/client/arrow/ArrowEncoderUtils.scala +++ b/connector/connect/common/src/main/scala/org/apache/spark/sql/connect/client/arrow/ArrowEncoderUtils.scala @@ -16,7 +16,7 @@ */ package org.apache.spark.sql.connect.client.arrow -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.reflect.ClassTag import org.apache.arrow.vector.{FieldVector, VectorSchemaRoot} diff --git a/connector/connect/common/src/main/scala/org/apache/spark/sql/connect/client/arrow/ArrowSerializer.scala b/connector/connect/common/src/main/scala/org/apache/spark/sql/connect/client/arrow/ArrowSerializer.scala index 4c14489947f92..7dc437d8cbc45 100644 --- a/connector/connect/common/src/main/scala/org/apache/spark/sql/connect/client/arrow/ArrowSerializer.scala +++ b/connector/connect/common/src/main/scala/org/apache/spark/sql/connect/client/arrow/ArrowSerializer.scala @@ -23,7 +23,7 @@ import java.nio.channels.Channels import java.time.{Duration, Instant, LocalDate, LocalDateTime, Period} import java.util.{Map => JMap, Objects} -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import com.google.protobuf.ByteString import org.apache.arrow.memory.BufferAllocator diff --git a/connector/connect/common/src/main/scala/org/apache/spark/sql/connect/common/LiteralValueProtoConverter.scala b/connector/connect/common/src/main/scala/org/apache/spark/sql/connect/common/LiteralValueProtoConverter.scala index 00546c02bc7cf..de7b964e8a72e 100644 --- a/connector/connect/common/src/main/scala/org/apache/spark/sql/connect/common/LiteralValueProtoConverter.scala +++ b/connector/connect/common/src/main/scala/org/apache/spark/sql/connect/common/LiteralValueProtoConverter.scala @@ -22,8 +22,8 @@ import java.math.{BigDecimal => JBigDecimal} import java.sql.{Date, Timestamp} import java.time._ -import scala.collection.JavaConverters._ import scala.collection.mutable +import scala.jdk.CollectionConverters._ import scala.reflect.ClassTag import scala.reflect.runtime.universe.TypeTag import scala.util.Try diff --git a/connector/connect/common/src/main/scala/org/apache/spark/sql/connect/common/ProtoUtils.scala b/connector/connect/common/src/main/scala/org/apache/spark/sql/connect/common/ProtoUtils.scala index efbdb6ac19f89..c7bf3f93bd0f2 100644 --- a/connector/connect/common/src/main/scala/org/apache/spark/sql/connect/common/ProtoUtils.scala +++ b/connector/connect/common/src/main/scala/org/apache/spark/sql/connect/common/ProtoUtils.scala @@ -17,7 +17,7 @@ package org.apache.spark.sql.connect.common -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import com.google.protobuf.{ByteString, Message} import com.google.protobuf.Descriptors.FieldDescriptor diff --git a/connector/connect/common/src/main/scala/org/apache/spark/sql/connect/common/UdfUtils.scala b/connector/connect/common/src/main/scala/org/apache/spark/sql/connect/common/UdfUtils.scala index 433614a4afc81..d7be199f033e1 100644 --- a/connector/connect/common/src/main/scala/org/apache/spark/sql/connect/common/UdfUtils.scala +++ b/connector/connect/common/src/main/scala/org/apache/spark/sql/connect/common/UdfUtils.scala @@ -16,7 +16,7 @@ */ package org.apache.spark.sql.connect.common -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.api.java.function._ import org.apache.spark.sql.Row diff --git a/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/SparkConnectPlugin.scala b/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/SparkConnectPlugin.scala index ca8617cbe1a74..c801c7e32a483 100644 --- a/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/SparkConnectPlugin.scala +++ b/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/SparkConnectPlugin.scala @@ -19,7 +19,7 @@ package org.apache.spark.sql.connect import java.util -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.SparkContext import org.apache.spark.api.plugin.{DriverPlugin, ExecutorPlugin, PluginContext, SparkPlugin} diff --git a/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/artifact/SparkConnectArtifactManager.scala b/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/artifact/SparkConnectArtifactManager.scala index fee99532bd55f..804c314ce67a2 100644 --- a/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/artifact/SparkConnectArtifactManager.scala +++ b/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/artifact/SparkConnectArtifactManager.scala @@ -23,7 +23,7 @@ import java.nio.file.{Files, Path, Paths, StandardCopyOption} import java.util.concurrent.CopyOnWriteArrayList import javax.ws.rs.core.UriBuilder -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.reflect.ClassTag import org.apache.commons.io.{FilenameUtils, FileUtils} diff --git a/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/dsl/package.scala b/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/dsl/package.scala index 86c38277c1b9b..7c41491ba0674 100644 --- a/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/dsl/package.scala +++ b/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/dsl/package.scala @@ -16,7 +16,7 @@ */ package org.apache.spark.sql.connect -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.language.implicitConversions import org.apache.spark.connect.proto diff --git a/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/execution/SparkConnectPlanExecution.scala b/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/execution/SparkConnectPlanExecution.scala index 1ad11490c3507..869608a9ab900 100644 --- a/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/execution/SparkConnectPlanExecution.scala +++ b/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/execution/SparkConnectPlanExecution.scala @@ -17,8 +17,8 @@ package org.apache.spark.sql.connect.execution -import scala.collection.JavaConverters._ import scala.concurrent.duration.Duration +import scala.jdk.CollectionConverters._ import scala.util.{Failure, Success} import com.google.protobuf.ByteString diff --git a/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/planner/SparkConnectPlanner.scala b/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/planner/SparkConnectPlanner.scala index dda7a713fa094..33dc4bb34b191 100644 --- a/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/planner/SparkConnectPlanner.scala +++ b/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/planner/SparkConnectPlanner.scala @@ -17,8 +17,8 @@ package org.apache.spark.sql.connect.planner -import scala.collection.JavaConverters._ import scala.collection.mutable +import scala.jdk.CollectionConverters._ import scala.util.Try import scala.util.control.NonFatal diff --git a/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/planner/StreamingForeachBatchHelper.scala b/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/planner/StreamingForeachBatchHelper.scala index 5ef0aea6b61c8..a5c0f863a1748 100644 --- a/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/planner/StreamingForeachBatchHelper.scala +++ b/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/planner/StreamingForeachBatchHelper.scala @@ -22,7 +22,7 @@ import java.util.UUID import java.util.concurrent.ConcurrentHashMap import java.util.concurrent.ConcurrentMap -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.util.control.NonFatal import org.apache.spark.SparkException diff --git a/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/service/ExecuteHolder.scala b/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/service/ExecuteHolder.scala index 7e0e4725f93c0..0593edc2f6fda 100644 --- a/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/service/ExecuteHolder.scala +++ b/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/service/ExecuteHolder.scala @@ -19,8 +19,8 @@ package org.apache.spark.sql.connect.service import java.util.UUID -import scala.collection.JavaConverters._ import scala.collection.mutable +import scala.jdk.CollectionConverters._ import org.apache.spark.{SparkEnv, SparkSQLException} import org.apache.spark.connect.proto diff --git a/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/service/SessionHolder.scala b/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/service/SessionHolder.scala index 0748cd237bf02..27f471233f1b2 100644 --- a/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/service/SessionHolder.scala +++ b/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/service/SessionHolder.scala @@ -21,8 +21,8 @@ import java.nio.file.Path import java.util.UUID import java.util.concurrent.{ConcurrentHashMap, ConcurrentMap, TimeUnit} -import scala.collection.JavaConverters._ import scala.collection.mutable +import scala.jdk.CollectionConverters._ import com.google.common.base.Ticker import com.google.common.cache.CacheBuilder diff --git a/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/service/SparkConnectAnalyzeHandler.scala b/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/service/SparkConnectAnalyzeHandler.scala index 414a852380fd2..9d1cf9e36d094 100644 --- a/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/service/SparkConnectAnalyzeHandler.scala +++ b/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/service/SparkConnectAnalyzeHandler.scala @@ -17,7 +17,7 @@ package org.apache.spark.sql.connect.service -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import io.grpc.stub.StreamObserver diff --git a/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/service/SparkConnectArtifactStatusesHandler.scala b/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/service/SparkConnectArtifactStatusesHandler.scala index da5843ed81949..5699dd11bde3f 100644 --- a/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/service/SparkConnectArtifactStatusesHandler.scala +++ b/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/service/SparkConnectArtifactStatusesHandler.scala @@ -16,7 +16,7 @@ */ package org.apache.spark.sql.connect.service -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import io.grpc.stub.StreamObserver diff --git a/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/service/SparkConnectConfigHandler.scala b/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/service/SparkConnectConfigHandler.scala index 38fd88297f354..9e514f4f65d8c 100644 --- a/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/service/SparkConnectConfigHandler.scala +++ b/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/service/SparkConnectConfigHandler.scala @@ -17,7 +17,7 @@ package org.apache.spark.sql.connect.service -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import io.grpc.stub.StreamObserver diff --git a/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/service/SparkConnectExecutionManager.scala b/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/service/SparkConnectExecutionManager.scala index 21f59bdd68ea5..3c72548978222 100644 --- a/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/service/SparkConnectExecutionManager.scala +++ b/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/service/SparkConnectExecutionManager.scala @@ -19,8 +19,8 @@ package org.apache.spark.sql.connect.service import java.util.concurrent.{Executors, ScheduledExecutorService, TimeUnit} -import scala.collection.JavaConverters._ import scala.collection.mutable +import scala.jdk.CollectionConverters._ import scala.util.control.NonFatal import com.google.common.cache.CacheBuilder diff --git a/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/service/SparkConnectInterruptHandler.scala b/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/service/SparkConnectInterruptHandler.scala index a9ed391460ca9..97b57c4940b62 100644 --- a/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/service/SparkConnectInterruptHandler.scala +++ b/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/service/SparkConnectInterruptHandler.scala @@ -17,7 +17,7 @@ package org.apache.spark.sql.connect.service -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import io.grpc.stub.StreamObserver diff --git a/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/utils/MetricGenerator.scala b/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/utils/MetricGenerator.scala index 6395fb588ab84..0ddaf4d0c1312 100644 --- a/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/utils/MetricGenerator.scala +++ b/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/utils/MetricGenerator.scala @@ -17,7 +17,7 @@ package org.apache.spark.sql.connect.utils -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.connect.proto.ExecutePlanResponse import org.apache.spark.sql.DataFrame diff --git a/connector/connect/server/src/test/scala/org/apache/spark/sql/connect/planner/SparkConnectPlannerSuite.scala b/connector/connect/server/src/test/scala/org/apache/spark/sql/connect/planner/SparkConnectPlannerSuite.scala index 0caa02a0b6112..eb84dfc4e3df8 100644 --- a/connector/connect/server/src/test/scala/org/apache/spark/sql/connect/planner/SparkConnectPlannerSuite.scala +++ b/connector/connect/server/src/test/scala/org/apache/spark/sql/connect/planner/SparkConnectPlannerSuite.scala @@ -17,7 +17,7 @@ package org.apache.spark.sql.connect.planner -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import com.google.protobuf.ByteString import io.grpc.stub.StreamObserver diff --git a/connector/connect/server/src/test/scala/org/apache/spark/sql/connect/planner/SparkConnectProtoSuite.scala b/connector/connect/server/src/test/scala/org/apache/spark/sql/connect/planner/SparkConnectProtoSuite.scala index 0c12bf5e625a9..21c3e8f3740e8 100644 --- a/connector/connect/server/src/test/scala/org/apache/spark/sql/connect/planner/SparkConnectProtoSuite.scala +++ b/connector/connect/server/src/test/scala/org/apache/spark/sql/connect/planner/SparkConnectProtoSuite.scala @@ -18,7 +18,7 @@ package org.apache.spark.sql.connect.planner import java.nio.file.{Files, Paths} -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import com.google.protobuf.ByteString diff --git a/connector/connect/server/src/test/scala/org/apache/spark/sql/connect/planner/SparkConnectServiceSuite.scala b/connector/connect/server/src/test/scala/org/apache/spark/sql/connect/planner/SparkConnectServiceSuite.scala index 90c9d13def616..abbb181f3fc2c 100644 --- a/connector/connect/server/src/test/scala/org/apache/spark/sql/connect/planner/SparkConnectServiceSuite.scala +++ b/connector/connect/server/src/test/scala/org/apache/spark/sql/connect/planner/SparkConnectServiceSuite.scala @@ -19,8 +19,8 @@ package org.apache.spark.sql.connect.planner import java.util.UUID import java.util.concurrent.Semaphore -import scala.collection.JavaConverters._ import scala.collection.mutable +import scala.jdk.CollectionConverters._ import com.google.protobuf import com.google.protobuf.ByteString diff --git a/connector/connect/server/src/test/scala/org/apache/spark/sql/connect/service/AddArtifactsHandlerSuite.scala b/connector/connect/server/src/test/scala/org/apache/spark/sql/connect/service/AddArtifactsHandlerSuite.scala index 2e199bff5e7dc..7dedf5ed94109 100644 --- a/connector/connect/server/src/test/scala/org/apache/spark/sql/connect/service/AddArtifactsHandlerSuite.scala +++ b/connector/connect/server/src/test/scala/org/apache/spark/sql/connect/service/AddArtifactsHandlerSuite.scala @@ -20,10 +20,10 @@ import java.io.InputStream import java.nio.file.{Files, Path} import java.util.UUID -import scala.collection.JavaConverters._ import scala.collection.mutable import scala.concurrent.Promise import scala.concurrent.duration._ +import scala.jdk.CollectionConverters._ import com.google.protobuf.ByteString import io.grpc.stub.StreamObserver diff --git a/connector/connect/server/src/test/scala/org/apache/spark/sql/connect/service/ArtifactStatusesHandlerSuite.scala b/connector/connect/server/src/test/scala/org/apache/spark/sql/connect/service/ArtifactStatusesHandlerSuite.scala index b2e7f52825b4f..b7a92fa68c328 100644 --- a/connector/connect/server/src/test/scala/org/apache/spark/sql/connect/service/ArtifactStatusesHandlerSuite.scala +++ b/connector/connect/server/src/test/scala/org/apache/spark/sql/connect/service/ArtifactStatusesHandlerSuite.scala @@ -16,9 +16,9 @@ */ package org.apache.spark.sql.connect.service -import scala.collection.JavaConverters._ import scala.concurrent.Promise import scala.concurrent.duration._ +import scala.jdk.CollectionConverters._ import io.grpc.stub.StreamObserver import org.apache.commons.codec.digest.DigestUtils.sha256Hex diff --git a/connector/connect/server/src/test/scala/org/apache/spark/sql/connect/service/SparkConnectSessionHodlerSuite.scala b/connector/connect/server/src/test/scala/org/apache/spark/sql/connect/service/SparkConnectSessionHodlerSuite.scala index 6e3271d5e6996..a6451de8fc27b 100644 --- a/connector/connect/server/src/test/scala/org/apache/spark/sql/connect/service/SparkConnectSessionHodlerSuite.scala +++ b/connector/connect/server/src/test/scala/org/apache/spark/sql/connect/service/SparkConnectSessionHodlerSuite.scala @@ -20,8 +20,8 @@ package org.apache.spark.sql.connect.service import java.nio.charset.StandardCharsets import java.nio.file.Files -import scala.collection.JavaConverters._ import scala.collection.mutable +import scala.jdk.CollectionConverters._ import scala.sys.process.Process import com.google.common.collect.Lists diff --git a/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/DockerJDBCIntegrationSuite.scala b/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/DockerJDBCIntegrationSuite.scala index 40e8cbb6546b5..bcad9ae874eb3 100644 --- a/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/DockerJDBCIntegrationSuite.scala +++ b/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/DockerJDBCIntegrationSuite.scala @@ -21,7 +21,7 @@ import java.net.ServerSocket import java.sql.{Connection, DriverManager} import java.util.Properties -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.util.control.NonFatal import com.spotify.docker.client._ diff --git a/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/DB2NamespaceSuite.scala b/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/DB2NamespaceSuite.scala index f53dc1d5f6da7..3bf3e785cdae8 100644 --- a/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/DB2NamespaceSuite.scala +++ b/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/DB2NamespaceSuite.scala @@ -19,7 +19,7 @@ package org.apache.spark.sql.jdbc.v2 import java.sql.Connection -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.sql.jdbc.{DatabaseOnDocker, DockerJDBCIntegrationSuite} import org.apache.spark.sql.util.CaseInsensitiveStringMap diff --git a/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/MsSqlServerNamespaceSuite.scala b/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/MsSqlServerNamespaceSuite.scala index b0a2d37e465ac..4bdc80dedfbe1 100644 --- a/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/MsSqlServerNamespaceSuite.scala +++ b/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/MsSqlServerNamespaceSuite.scala @@ -19,7 +19,7 @@ package org.apache.spark.sql.jdbc.v2 import java.sql.Connection -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.sql.jdbc.{DatabaseOnDocker, DockerJDBCIntegrationSuite} import org.apache.spark.sql.util.CaseInsensitiveStringMap diff --git a/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/MySQLNamespaceSuite.scala b/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/MySQLNamespaceSuite.scala index d58146fecdf42..03d6ff3f3c17f 100644 --- a/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/MySQLNamespaceSuite.scala +++ b/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/MySQLNamespaceSuite.scala @@ -19,7 +19,7 @@ package org.apache.spark.sql.jdbc.v2 import java.sql.Connection -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.SparkSQLFeatureNotSupportedException import org.apache.spark.sql.connector.catalog.NamespaceChange diff --git a/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/OracleNamespaceSuite.scala b/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/OracleNamespaceSuite.scala index a365a1c4e82e4..6cc0bb1239400 100644 --- a/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/OracleNamespaceSuite.scala +++ b/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/OracleNamespaceSuite.scala @@ -19,7 +19,7 @@ package org.apache.spark.sql.jdbc.v2 import java.sql.Connection -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.sql.jdbc.{DatabaseOnDocker, DockerJDBCIntegrationSuite} import org.apache.spark.sql.util.CaseInsensitiveStringMap diff --git a/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/PostgresNamespaceSuite.scala b/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/PostgresNamespaceSuite.scala index cf7266e67e325..980e0a1ec7e6c 100644 --- a/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/PostgresNamespaceSuite.scala +++ b/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/PostgresNamespaceSuite.scala @@ -19,7 +19,7 @@ package org.apache.spark.sql.jdbc.v2 import java.sql.Connection -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.sql.jdbc.{DatabaseOnDocker, DockerJDBCIntegrationSuite} import org.apache.spark.sql.util.CaseInsensitiveStringMap diff --git a/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/V2JDBCNamespaceTest.scala b/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/V2JDBCNamespaceTest.scala index b7c6e0aff20a7..4eacfbfbd8804 100644 --- a/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/V2JDBCNamespaceTest.scala +++ b/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/V2JDBCNamespaceTest.scala @@ -20,7 +20,7 @@ package org.apache.spark.sql.jdbc.v2 import java.util import java.util.Collections -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.logging.log4j.Level diff --git a/connector/docker-integration-tests/src/test/scala/org/apache/spark/util/DockerUtils.scala b/connector/docker-integration-tests/src/test/scala/org/apache/spark/util/DockerUtils.scala index 5abca8df77dcd..99096da56c732 100644 --- a/connector/docker-integration-tests/src/test/scala/org/apache/spark/util/DockerUtils.scala +++ b/connector/docker-integration-tests/src/test/scala/org/apache/spark/util/DockerUtils.scala @@ -19,7 +19,7 @@ package org.apache.spark.util import java.net.{Inet4Address, InetAddress, NetworkInterface} -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.sys.process._ import scala.util.Try diff --git a/connector/kafka-0-10-sql/src/main/scala/org/apache/spark/sql/kafka010/ConsumerStrategy.scala b/connector/kafka-0-10-sql/src/main/scala/org/apache/spark/sql/kafka010/ConsumerStrategy.scala index a0331d7889e04..10d5062848b50 100644 --- a/connector/kafka-0-10-sql/src/main/scala/org/apache/spark/sql/kafka010/ConsumerStrategy.scala +++ b/connector/kafka-0-10-sql/src/main/scala/org/apache/spark/sql/kafka010/ConsumerStrategy.scala @@ -19,8 +19,8 @@ package org.apache.spark.sql.kafka010 import java.{util => ju} -import scala.collection.JavaConverters._ import scala.collection.mutable +import scala.jdk.CollectionConverters._ import org.apache.kafka.clients.admin.Admin import org.apache.kafka.clients.consumer.{Consumer, KafkaConsumer} diff --git a/connector/kafka-0-10-sql/src/main/scala/org/apache/spark/sql/kafka010/KafkaMicroBatchStream.scala b/connector/kafka-0-10-sql/src/main/scala/org/apache/spark/sql/kafka010/KafkaMicroBatchStream.scala index 24244c6de63b3..3287761b1f5d4 100644 --- a/connector/kafka-0-10-sql/src/main/scala/org/apache/spark/sql/kafka010/KafkaMicroBatchStream.scala +++ b/connector/kafka-0-10-sql/src/main/scala/org/apache/spark/sql/kafka010/KafkaMicroBatchStream.scala @@ -20,7 +20,7 @@ package org.apache.spark.sql.kafka010 import java.{util => ju} import java.util.Optional -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.kafka.common.TopicPartition diff --git a/connector/kafka-0-10-sql/src/main/scala/org/apache/spark/sql/kafka010/KafkaOffsetReaderAdmin.scala b/connector/kafka-0-10-sql/src/main/scala/org/apache/spark/sql/kafka010/KafkaOffsetReaderAdmin.scala index b443bbcee0fc3..7c4c35998e4f2 100644 --- a/connector/kafka-0-10-sql/src/main/scala/org/apache/spark/sql/kafka010/KafkaOffsetReaderAdmin.scala +++ b/connector/kafka-0-10-sql/src/main/scala/org/apache/spark/sql/kafka010/KafkaOffsetReaderAdmin.scala @@ -20,8 +20,8 @@ package org.apache.spark.sql.kafka010 import java.{util => ju} import java.util.Locale -import scala.collection.JavaConverters._ import scala.collection.mutable.ArrayBuffer +import scala.jdk.CollectionConverters._ import scala.util.control.NonFatal import org.apache.kafka.clients.admin.{Admin, ListOffsetsOptions, ListOffsetsResult, OffsetSpec} diff --git a/connector/kafka-0-10-sql/src/main/scala/org/apache/spark/sql/kafka010/KafkaOffsetReaderConsumer.scala b/connector/kafka-0-10-sql/src/main/scala/org/apache/spark/sql/kafka010/KafkaOffsetReaderConsumer.scala index a1f7f71d5f302..6198b6461223d 100644 --- a/connector/kafka-0-10-sql/src/main/scala/org/apache/spark/sql/kafka010/KafkaOffsetReaderConsumer.scala +++ b/connector/kafka-0-10-sql/src/main/scala/org/apache/spark/sql/kafka010/KafkaOffsetReaderConsumer.scala @@ -19,8 +19,8 @@ package org.apache.spark.sql.kafka010 import java.{util => ju} -import scala.collection.JavaConverters._ import scala.collection.mutable.ArrayBuffer +import scala.jdk.CollectionConverters._ import scala.util.control.NonFatal import org.apache.kafka.clients.consumer.{Consumer, ConsumerConfig, OffsetAndTimestamp} diff --git a/connector/kafka-0-10-sql/src/main/scala/org/apache/spark/sql/kafka010/KafkaRecordToRowConverter.scala b/connector/kafka-0-10-sql/src/main/scala/org/apache/spark/sql/kafka010/KafkaRecordToRowConverter.scala index aed099c142bc3..56456f9b1f776 100644 --- a/connector/kafka-0-10-sql/src/main/scala/org/apache/spark/sql/kafka010/KafkaRecordToRowConverter.scala +++ b/connector/kafka-0-10-sql/src/main/scala/org/apache/spark/sql/kafka010/KafkaRecordToRowConverter.scala @@ -19,7 +19,7 @@ package org.apache.spark.sql.kafka010 import java.sql.Timestamp -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.kafka.clients.consumer.ConsumerRecord diff --git a/connector/kafka-0-10-sql/src/main/scala/org/apache/spark/sql/kafka010/KafkaSourceProvider.scala b/connector/kafka-0-10-sql/src/main/scala/org/apache/spark/sql/kafka010/KafkaSourceProvider.scala index d9e3a1256ea47..27ebfaad617b4 100644 --- a/connector/kafka-0-10-sql/src/main/scala/org/apache/spark/sql/kafka010/KafkaSourceProvider.scala +++ b/connector/kafka-0-10-sql/src/main/scala/org/apache/spark/sql/kafka010/KafkaSourceProvider.scala @@ -20,7 +20,7 @@ package org.apache.spark.sql.kafka010 import java.{util => ju} import java.util.{Locale, UUID} -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.kafka.clients.consumer.ConsumerConfig import org.apache.kafka.clients.producer.ProducerConfig diff --git a/connector/kafka-0-10-sql/src/main/scala/org/apache/spark/sql/kafka010/KafkaWriteTask.scala b/connector/kafka-0-10-sql/src/main/scala/org/apache/spark/sql/kafka010/KafkaWriteTask.scala index fddba3f0f9919..e8f98262a8972 100644 --- a/connector/kafka-0-10-sql/src/main/scala/org/apache/spark/sql/kafka010/KafkaWriteTask.scala +++ b/connector/kafka-0-10-sql/src/main/scala/org/apache/spark/sql/kafka010/KafkaWriteTask.scala @@ -19,7 +19,7 @@ package org.apache.spark.sql.kafka010 import java.{util => ju} -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.kafka.clients.producer.{Callback, KafkaProducer, ProducerRecord, RecordMetadata} import org.apache.kafka.common.header.Header diff --git a/connector/kafka-0-10-sql/src/main/scala/org/apache/spark/sql/kafka010/consumer/KafkaDataConsumer.scala b/connector/kafka-0-10-sql/src/main/scala/org/apache/spark/sql/kafka010/consumer/KafkaDataConsumer.scala index a9e394d3c888b..245700d5cc837 100644 --- a/connector/kafka-0-10-sql/src/main/scala/org/apache/spark/sql/kafka010/consumer/KafkaDataConsumer.scala +++ b/connector/kafka-0-10-sql/src/main/scala/org/apache/spark/sql/kafka010/consumer/KafkaDataConsumer.scala @@ -22,7 +22,7 @@ import java.io.Closeable import java.time.Duration import java.util.concurrent.TimeoutException -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.kafka.clients.CommonClientConfigs import org.apache.kafka.clients.consumer.{ConsumerConfig, ConsumerRecord, KafkaConsumer, OffsetOutOfRangeException} diff --git a/connector/kafka-0-10-sql/src/main/scala/org/apache/spark/sql/kafka010/producer/InternalKafkaProducerPool.scala b/connector/kafka-0-10-sql/src/main/scala/org/apache/spark/sql/kafka010/producer/InternalKafkaProducerPool.scala index 8d1f9b8d37f60..2876b7037f8bb 100644 --- a/connector/kafka-0-10-sql/src/main/scala/org/apache/spark/sql/kafka010/producer/InternalKafkaProducerPool.scala +++ b/connector/kafka-0-10-sql/src/main/scala/org/apache/spark/sql/kafka010/producer/InternalKafkaProducerPool.scala @@ -21,8 +21,8 @@ import java.{util => ju} import java.util.concurrent.{ScheduledExecutorService, ScheduledFuture, TimeUnit} import javax.annotation.concurrent.GuardedBy -import scala.collection.JavaConverters._ import scala.collection.mutable +import scala.jdk.CollectionConverters._ import org.apache.kafka.clients.producer.KafkaProducer diff --git a/connector/kafka-0-10-sql/src/test/scala/org/apache/spark/sql/kafka010/ConsumerStrategySuite.scala b/connector/kafka-0-10-sql/src/test/scala/org/apache/spark/sql/kafka010/ConsumerStrategySuite.scala index 939cf0bb36a8c..44baab7f24686 100644 --- a/connector/kafka-0-10-sql/src/test/scala/org/apache/spark/sql/kafka010/ConsumerStrategySuite.scala +++ b/connector/kafka-0-10-sql/src/test/scala/org/apache/spark/sql/kafka010/ConsumerStrategySuite.scala @@ -19,7 +19,7 @@ package org.apache.spark.sql.kafka010 import java.util.UUID -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.kafka.clients.CommonClientConfigs import org.apache.kafka.clients.admin.Admin diff --git a/connector/kafka-0-10-sql/src/test/scala/org/apache/spark/sql/kafka010/KafkaMicroBatchSourceSuite.scala b/connector/kafka-0-10-sql/src/test/scala/org/apache/spark/sql/kafka010/KafkaMicroBatchSourceSuite.scala index d63b9805e5530..2315147bb8cfc 100644 --- a/connector/kafka-0-10-sql/src/test/scala/org/apache/spark/sql/kafka010/KafkaMicroBatchSourceSuite.scala +++ b/connector/kafka-0-10-sql/src/test/scala/org/apache/spark/sql/kafka010/KafkaMicroBatchSourceSuite.scala @@ -24,9 +24,9 @@ import java.util.{Locale, Optional} import java.util.concurrent.ConcurrentLinkedQueue import java.util.concurrent.atomic.AtomicInteger -import scala.collection.JavaConverters._ import scala.collection.mutable.ListBuffer import scala.io.Source +import scala.jdk.CollectionConverters._ import scala.util.Random import org.apache.commons.io.FileUtils diff --git a/connector/kafka-0-10-sql/src/test/scala/org/apache/spark/sql/kafka010/KafkaOffsetRangeCalculatorSuite.scala b/connector/kafka-0-10-sql/src/test/scala/org/apache/spark/sql/kafka010/KafkaOffsetRangeCalculatorSuite.scala index 4ef019cb5aca9..89ab0902f4d6f 100644 --- a/connector/kafka-0-10-sql/src/test/scala/org/apache/spark/sql/kafka010/KafkaOffsetRangeCalculatorSuite.scala +++ b/connector/kafka-0-10-sql/src/test/scala/org/apache/spark/sql/kafka010/KafkaOffsetRangeCalculatorSuite.scala @@ -17,7 +17,7 @@ package org.apache.spark.sql.kafka010 -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.kafka.common.TopicPartition diff --git a/connector/kafka-0-10-sql/src/test/scala/org/apache/spark/sql/kafka010/KafkaSourceProviderSuite.scala b/connector/kafka-0-10-sql/src/test/scala/org/apache/spark/sql/kafka010/KafkaSourceProviderSuite.scala index 8c35bf1308e55..303394b445792 100644 --- a/connector/kafka-0-10-sql/src/test/scala/org/apache/spark/sql/kafka010/KafkaSourceProviderSuite.scala +++ b/connector/kafka-0-10-sql/src/test/scala/org/apache/spark/sql/kafka010/KafkaSourceProviderSuite.scala @@ -19,7 +19,7 @@ package org.apache.spark.sql.kafka010 import java.util.Locale -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.mockito.Mockito.{mock, when} diff --git a/connector/kafka-0-10-sql/src/test/scala/org/apache/spark/sql/kafka010/KafkaTestUtils.scala b/connector/kafka-0-10-sql/src/test/scala/org/apache/spark/sql/kafka010/KafkaTestUtils.scala index 4e11a66bc2e84..c54afc6290b13 100644 --- a/connector/kafka-0-10-sql/src/test/scala/org/apache/spark/sql/kafka010/KafkaTestUtils.scala +++ b/connector/kafka-0-10-sql/src/test/scala/org/apache/spark/sql/kafka010/KafkaTestUtils.scala @@ -24,8 +24,8 @@ import java.util.{Collections, Properties, UUID} import java.util.concurrent.TimeUnit import javax.security.auth.login.Configuration -import scala.collection.JavaConverters._ import scala.io.Source +import scala.jdk.CollectionConverters._ import com.google.common.io.Files import kafka.api.Request diff --git a/connector/kafka-0-10-sql/src/test/scala/org/apache/spark/sql/kafka010/RecordBuilder.scala b/connector/kafka-0-10-sql/src/test/scala/org/apache/spark/sql/kafka010/RecordBuilder.scala index ef07798442e56..9b70fc11ad16f 100644 --- a/connector/kafka-0-10-sql/src/test/scala/org/apache/spark/sql/kafka010/RecordBuilder.scala +++ b/connector/kafka-0-10-sql/src/test/scala/org/apache/spark/sql/kafka010/RecordBuilder.scala @@ -19,7 +19,7 @@ package org.apache.spark.sql.kafka010 import java.lang.{Integer => JInt, Long => JLong} -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.kafka.clients.producer.ProducerRecord import org.apache.kafka.common.header.Header diff --git a/connector/kafka-0-10-sql/src/test/scala/org/apache/spark/sql/kafka010/consumer/FetchedDataPoolSuite.scala b/connector/kafka-0-10-sql/src/test/scala/org/apache/spark/sql/kafka010/consumer/FetchedDataPoolSuite.scala index 09d50ef0660cf..0b47a9527ee2b 100644 --- a/connector/kafka-0-10-sql/src/test/scala/org/apache/spark/sql/kafka010/consumer/FetchedDataPoolSuite.scala +++ b/connector/kafka-0-10-sql/src/test/scala/org/apache/spark/sql/kafka010/consumer/FetchedDataPoolSuite.scala @@ -20,8 +20,8 @@ package org.apache.spark.sql.kafka010.consumer import java.{util => ju} import java.util.concurrent.TimeUnit -import scala.collection.JavaConverters._ import scala.collection.mutable +import scala.jdk.CollectionConverters._ import org.apache.kafka.clients.consumer.ConsumerRecord import org.apache.kafka.common.TopicPartition diff --git a/connector/kafka-0-10-sql/src/test/scala/org/apache/spark/sql/kafka010/consumer/InternalKafkaConsumerPoolSuite.scala b/connector/kafka-0-10-sql/src/test/scala/org/apache/spark/sql/kafka010/consumer/InternalKafkaConsumerPoolSuite.scala index 3797d5b5bd6a3..2e18dfa45ba44 100644 --- a/connector/kafka-0-10-sql/src/test/scala/org/apache/spark/sql/kafka010/consumer/InternalKafkaConsumerPoolSuite.scala +++ b/connector/kafka-0-10-sql/src/test/scala/org/apache/spark/sql/kafka010/consumer/InternalKafkaConsumerPoolSuite.scala @@ -19,7 +19,7 @@ package org.apache.spark.sql.kafka010.consumer import java.{util => ju} -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.kafka.clients.consumer.ConsumerConfig._ import org.apache.kafka.common.TopicPartition diff --git a/connector/kafka-0-10-sql/src/test/scala/org/apache/spark/sql/kafka010/consumer/KafkaDataConsumerSuite.scala b/connector/kafka-0-10-sql/src/test/scala/org/apache/spark/sql/kafka010/consumer/KafkaDataConsumerSuite.scala index 30e8e348f74d2..8c5289a826929 100644 --- a/connector/kafka-0-10-sql/src/test/scala/org/apache/spark/sql/kafka010/consumer/KafkaDataConsumerSuite.scala +++ b/connector/kafka-0-10-sql/src/test/scala/org/apache/spark/sql/kafka010/consumer/KafkaDataConsumerSuite.scala @@ -21,8 +21,8 @@ import java.{util => ju} import java.nio.charset.StandardCharsets import java.util.concurrent.{Executors, TimeUnit} -import scala.collection.JavaConverters._ import scala.collection.immutable +import scala.jdk.CollectionConverters._ import scala.util.Random import org.apache.kafka.clients.consumer.ConsumerConfig._ diff --git a/connector/kafka-0-10-token-provider/src/main/scala/org/apache/spark/kafka010/KafkaConfigUpdater.scala b/connector/kafka-0-10-token-provider/src/main/scala/org/apache/spark/kafka010/KafkaConfigUpdater.scala index f54ff0d146f7a..2873264098e7b 100644 --- a/connector/kafka-0-10-token-provider/src/main/scala/org/apache/spark/kafka010/KafkaConfigUpdater.scala +++ b/connector/kafka-0-10-token-provider/src/main/scala/org/apache/spark/kafka010/KafkaConfigUpdater.scala @@ -19,7 +19,7 @@ package org.apache.spark.kafka010 import java.{util => ju} -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.kafka.clients.CommonClientConfigs import org.apache.kafka.common.config.SaslConfigs diff --git a/connector/kafka-0-10-token-provider/src/main/scala/org/apache/spark/kafka010/KafkaTokenUtil.scala b/connector/kafka-0-10-token-provider/src/main/scala/org/apache/spark/kafka010/KafkaTokenUtil.scala index 2b28ccaddc636..93cc522e47578 100644 --- a/connector/kafka-0-10-token-provider/src/main/scala/org/apache/spark/kafka010/KafkaTokenUtil.scala +++ b/connector/kafka-0-10-token-provider/src/main/scala/org/apache/spark/kafka010/KafkaTokenUtil.scala @@ -21,7 +21,7 @@ import java.{util => ju} import java.text.SimpleDateFormat import java.util.regex.Pattern -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.util.control.NonFatal import org.apache.hadoop.io.Text diff --git a/connector/kafka-0-10-token-provider/src/test/scala/org/apache/spark/kafka010/KafkaConfigUpdaterSuite.scala b/connector/kafka-0-10-token-provider/src/test/scala/org/apache/spark/kafka010/KafkaConfigUpdaterSuite.scala index dc1e7cb8d979e..c3f98de93e519 100644 --- a/connector/kafka-0-10-token-provider/src/test/scala/org/apache/spark/kafka010/KafkaConfigUpdaterSuite.scala +++ b/connector/kafka-0-10-token-provider/src/test/scala/org/apache/spark/kafka010/KafkaConfigUpdaterSuite.scala @@ -19,7 +19,7 @@ package org.apache.spark.kafka010 import java.{util => ju} -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.kafka.clients.CommonClientConfigs import org.apache.kafka.common.config.SaslConfigs diff --git a/connector/kafka-0-10-token-provider/src/test/scala/org/apache/spark/kafka010/KafkaTokenUtilSuite.scala b/connector/kafka-0-10-token-provider/src/test/scala/org/apache/spark/kafka010/KafkaTokenUtilSuite.scala index 7e4efb948b77f..8a606a1adc767 100644 --- a/connector/kafka-0-10-token-provider/src/test/scala/org/apache/spark/kafka010/KafkaTokenUtilSuite.scala +++ b/connector/kafka-0-10-token-provider/src/test/scala/org/apache/spark/kafka010/KafkaTokenUtilSuite.scala @@ -20,7 +20,7 @@ package org.apache.spark.kafka010 import java.{util => ju} import java.security.PrivilegedExceptionAction -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.hadoop.io.Text import org.apache.hadoop.security.UserGroupInformation diff --git a/connector/kafka-0-10/src/main/scala/org/apache/spark/streaming/kafka010/ConsumerStrategy.scala b/connector/kafka-0-10/src/main/scala/org/apache/spark/streaming/kafka010/ConsumerStrategy.scala index 2468cc030ea5a..b96a2597f5ddd 100644 --- a/connector/kafka-0-10/src/main/scala/org/apache/spark/streaming/kafka010/ConsumerStrategy.scala +++ b/connector/kafka-0-10/src/main/scala/org/apache/spark/streaming/kafka010/ConsumerStrategy.scala @@ -20,7 +20,7 @@ package org.apache.spark.streaming.kafka010 import java.{lang => jl, util => ju} import java.util.Locale -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.kafka.clients.consumer._ import org.apache.kafka.clients.consumer.internals.NoOpConsumerRebalanceListener diff --git a/connector/kafka-0-10/src/main/scala/org/apache/spark/streaming/kafka010/DirectKafkaInputDStream.scala b/connector/kafka-0-10/src/main/scala/org/apache/spark/streaming/kafka010/DirectKafkaInputDStream.scala index fb86111cb1876..971b3b347faf3 100644 --- a/connector/kafka-0-10/src/main/scala/org/apache/spark/streaming/kafka010/DirectKafkaInputDStream.scala +++ b/connector/kafka-0-10/src/main/scala/org/apache/spark/streaming/kafka010/DirectKafkaInputDStream.scala @@ -21,8 +21,8 @@ import java.{ util => ju } import java.util.concurrent.ConcurrentLinkedQueue import java.util.concurrent.atomic.AtomicReference -import scala.collection.JavaConverters._ import scala.collection.mutable +import scala.jdk.CollectionConverters._ import org.apache.kafka.clients.consumer._ import org.apache.kafka.common.TopicPartition diff --git a/connector/kafka-0-10/src/main/scala/org/apache/spark/streaming/kafka010/KafkaDataConsumer.scala b/connector/kafka-0-10/src/main/scala/org/apache/spark/streaming/kafka010/KafkaDataConsumer.scala index 09af5a0815147..8becbe421c6b3 100644 --- a/connector/kafka-0-10/src/main/scala/org/apache/spark/streaming/kafka010/KafkaDataConsumer.scala +++ b/connector/kafka-0-10/src/main/scala/org/apache/spark/streaming/kafka010/KafkaDataConsumer.scala @@ -20,7 +20,7 @@ package org.apache.spark.streaming.kafka010 import java.{util => ju} import java.time.Duration -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.kafka.clients.consumer.{ConsumerConfig, ConsumerRecord, KafkaConsumer} import org.apache.kafka.common.{KafkaException, TopicPartition} diff --git a/connector/kafka-0-10/src/main/scala/org/apache/spark/streaming/kafka010/LocationStrategy.scala b/connector/kafka-0-10/src/main/scala/org/apache/spark/streaming/kafka010/LocationStrategy.scala index b4d9669f08387..f02bff06b3bb7 100644 --- a/connector/kafka-0-10/src/main/scala/org/apache/spark/streaming/kafka010/LocationStrategy.scala +++ b/connector/kafka-0-10/src/main/scala/org/apache/spark/streaming/kafka010/LocationStrategy.scala @@ -19,7 +19,7 @@ package org.apache.spark.streaming.kafka010 import java.{ util => ju } -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.kafka.common.TopicPartition diff --git a/connector/kafka-0-10/src/test/java/org/apache/spark/streaming/kafka010/JavaConsumerStrategySuite.java b/connector/kafka-0-10/src/test/java/org/apache/spark/streaming/kafka010/JavaConsumerStrategySuite.java index 819d6f0f2e0cd..a248fb8674297 100644 --- a/connector/kafka-0-10/src/test/java/org/apache/spark/streaming/kafka010/JavaConsumerStrategySuite.java +++ b/connector/kafka-0-10/src/test/java/org/apache/spark/streaming/kafka010/JavaConsumerStrategySuite.java @@ -21,7 +21,7 @@ import java.util.*; import java.util.regex.Pattern; -import scala.collection.JavaConverters; +import scala.jdk.CollectionConverters; import org.apache.kafka.common.TopicPartition; @@ -36,22 +36,22 @@ public void testConsumerStrategyConstructors() { final Pattern pat = Pattern.compile("top.*"); final Collection topics = Arrays.asList(topic1); final scala.collection.Iterable sTopics = - JavaConverters.collectionAsScalaIterableConverter(topics).asScala(); + CollectionConverters.CollectionHasAsScala(topics).asScala(); final TopicPartition tp1 = new TopicPartition(topic1, 0); final TopicPartition tp2 = new TopicPartition(topic1, 1); final Collection parts = Arrays.asList(tp1, tp2); final scala.collection.Iterable sParts = - JavaConverters.collectionAsScalaIterableConverter(parts).asScala(); + CollectionConverters.CollectionHasAsScala(parts).asScala(); final Map kafkaParams = new HashMap<>(); kafkaParams.put("bootstrap.servers", "not used"); final scala.collection.Map sKafkaParams = - JavaConverters.mapAsScalaMapConverter(kafkaParams).asScala(); + CollectionConverters.MapHasAsScala(kafkaParams).asScala(); final Map offsets = new HashMap<>(); offsets.put(tp1, 23L); final Map dummyOffsets = new HashMap<>(); dummyOffsets.putAll(offsets); final scala.collection.Map sOffsets = - JavaConverters.mapAsScalaMap(dummyOffsets); + CollectionConverters.MapHasAsScala(dummyOffsets).asScala(); final ConsumerStrategy sub1 = ConsumerStrategies.Subscribe(sTopics, sKafkaParams, sOffsets); diff --git a/connector/kafka-0-10/src/test/java/org/apache/spark/streaming/kafka010/JavaLocationStrategySuite.java b/connector/kafka-0-10/src/test/java/org/apache/spark/streaming/kafka010/JavaLocationStrategySuite.java index 41ccb0ebe7bfa..1a4ff99db5e83 100644 --- a/connector/kafka-0-10/src/test/java/org/apache/spark/streaming/kafka010/JavaLocationStrategySuite.java +++ b/connector/kafka-0-10/src/test/java/org/apache/spark/streaming/kafka010/JavaLocationStrategySuite.java @@ -20,7 +20,7 @@ import java.io.Serializable; import java.util.*; -import scala.collection.JavaConverters; +import scala.jdk.CollectionConverters; import org.apache.kafka.common.TopicPartition; @@ -38,7 +38,7 @@ public void testLocationStrategyConstructors() { hosts.put(tp1, "node1"); hosts.put(tp2, "node2"); final scala.collection.Map sHosts = - JavaConverters.mapAsScalaMapConverter(hosts).asScala(); + CollectionConverters.MapHasAsScala(hosts).asScala(); // make sure constructors can be called from java final LocationStrategy c1 = LocationStrategies.PreferConsistent(); diff --git a/connector/kafka-0-10/src/test/scala/org/apache/spark/streaming/kafka010/DirectKafkaStreamSuite.scala b/connector/kafka-0-10/src/test/scala/org/apache/spark/streaming/kafka010/DirectKafkaStreamSuite.scala index 2b7fef1e0fde3..978baaae4128c 100644 --- a/connector/kafka-0-10/src/test/scala/org/apache/spark/streaming/kafka010/DirectKafkaStreamSuite.scala +++ b/connector/kafka-0-10/src/test/scala/org/apache/spark/streaming/kafka010/DirectKafkaStreamSuite.scala @@ -24,8 +24,8 @@ import java.util.concurrent.ConcurrentHashMap import java.util.concurrent.ConcurrentLinkedQueue import java.util.concurrent.atomic.AtomicLong -import scala.collection.JavaConverters._ import scala.concurrent.duration._ +import scala.jdk.CollectionConverters._ import scala.util.Random import org.apache.kafka.clients.consumer._ diff --git a/connector/kafka-0-10/src/test/scala/org/apache/spark/streaming/kafka010/KafkaDataConsumerSuite.scala b/connector/kafka-0-10/src/test/scala/org/apache/spark/streaming/kafka010/KafkaDataConsumerSuite.scala index c7712b1aaee02..0d1e13bc95265 100644 --- a/connector/kafka-0-10/src/test/scala/org/apache/spark/streaming/kafka010/KafkaDataConsumerSuite.scala +++ b/connector/kafka-0-10/src/test/scala/org/apache/spark/streaming/kafka010/KafkaDataConsumerSuite.scala @@ -19,7 +19,7 @@ package org.apache.spark.streaming.kafka010 import java.util.concurrent.{Executors, TimeUnit} -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.util.Random import org.apache.kafka.clients.consumer.ConsumerConfig._ diff --git a/connector/kafka-0-10/src/test/scala/org/apache/spark/streaming/kafka010/KafkaRDDSuite.scala b/connector/kafka-0-10/src/test/scala/org/apache/spark/streaming/kafka010/KafkaRDDSuite.scala index 431b549f42570..735ec2f7b4484 100644 --- a/connector/kafka-0-10/src/test/scala/org/apache/spark/streaming/kafka010/KafkaRDDSuite.scala +++ b/connector/kafka-0-10/src/test/scala/org/apache/spark/streaming/kafka010/KafkaRDDSuite.scala @@ -20,8 +20,8 @@ package org.apache.spark.streaming.kafka010 import java.{ util => ju } import java.io.File -import scala.collection.JavaConverters._ import scala.concurrent.duration._ +import scala.jdk.CollectionConverters._ import scala.util.Random import kafka.log.{CleanerConfig, LogCleaner, LogConfig, ProducerStateManagerConfig, UnifiedLog} diff --git a/connector/kafka-0-10/src/test/scala/org/apache/spark/streaming/kafka010/KafkaTestUtils.scala b/connector/kafka-0-10/src/test/scala/org/apache/spark/streaming/kafka010/KafkaTestUtils.scala index 1a7a6e9676a16..6a9ef52e990ea 100644 --- a/connector/kafka-0-10/src/test/scala/org/apache/spark/streaming/kafka010/KafkaTestUtils.scala +++ b/connector/kafka-0-10/src/test/scala/org/apache/spark/streaming/kafka010/KafkaTestUtils.scala @@ -24,7 +24,7 @@ import java.util.{Map => JMap, Properties} import java.util.concurrent.{TimeoutException, TimeUnit} import scala.annotation.tailrec -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.util.control.NonFatal import kafka.api.Request diff --git a/connector/kinesis-asl/src/main/scala/org/apache/spark/examples/streaming/KinesisExampleUtils.scala b/connector/kinesis-asl/src/main/scala/org/apache/spark/examples/streaming/KinesisExampleUtils.scala index 2eebd6130d4da..737e5199e71a4 100644 --- a/connector/kinesis-asl/src/main/scala/org/apache/spark/examples/streaming/KinesisExampleUtils.scala +++ b/connector/kinesis-asl/src/main/scala/org/apache/spark/examples/streaming/KinesisExampleUtils.scala @@ -17,7 +17,7 @@ package org.apache.spark.examples.streaming -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import com.amazonaws.regions.RegionUtils import com.amazonaws.services.kinesis.AmazonKinesis diff --git a/connector/kinesis-asl/src/main/scala/org/apache/spark/streaming/kinesis/KinesisBackedBlockRDD.scala b/connector/kinesis-asl/src/main/scala/org/apache/spark/streaming/kinesis/KinesisBackedBlockRDD.scala index ab55d545770e9..d22496a84b581 100644 --- a/connector/kinesis-asl/src/main/scala/org/apache/spark/streaming/kinesis/KinesisBackedBlockRDD.scala +++ b/connector/kinesis-asl/src/main/scala/org/apache/spark/streaming/kinesis/KinesisBackedBlockRDD.scala @@ -19,7 +19,7 @@ package org.apache.spark.streaming.kinesis import java.util.concurrent.TimeUnit -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.reflect.ClassTag import scala.util.control.NonFatal diff --git a/connector/kinesis-asl/src/main/scala/org/apache/spark/streaming/kinesis/KinesisInputDStream.scala b/connector/kinesis-asl/src/main/scala/org/apache/spark/streaming/kinesis/KinesisInputDStream.scala index c68af38649388..9e432eda6251b 100644 --- a/connector/kinesis-asl/src/main/scala/org/apache/spark/streaming/kinesis/KinesisInputDStream.scala +++ b/connector/kinesis-asl/src/main/scala/org/apache/spark/streaming/kinesis/KinesisInputDStream.scala @@ -17,7 +17,7 @@ package org.apache.spark.streaming.kinesis -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.reflect.ClassTag import com.amazonaws.services.kinesis.clientlibrary.lib.worker.{InitialPositionInStream, KinesisClientLibConfiguration} diff --git a/connector/kinesis-asl/src/main/scala/org/apache/spark/streaming/kinesis/KinesisReceiver.scala b/connector/kinesis-asl/src/main/scala/org/apache/spark/streaming/kinesis/KinesisReceiver.scala index 6feb8f1b5598f..7824daea8319a 100644 --- a/connector/kinesis-asl/src/main/scala/org/apache/spark/streaming/kinesis/KinesisReceiver.scala +++ b/connector/kinesis-asl/src/main/scala/org/apache/spark/streaming/kinesis/KinesisReceiver.scala @@ -19,8 +19,8 @@ package org.apache.spark.streaming.kinesis import java.util.UUID import java.util.concurrent.ConcurrentHashMap -import scala.collection.JavaConverters._ import scala.collection.mutable +import scala.jdk.CollectionConverters._ import scala.util.control.NonFatal import com.amazonaws.services.kinesis.clientlibrary.interfaces.{IRecordProcessor, IRecordProcessorCheckpointer, IRecordProcessorFactory} diff --git a/connector/kinesis-asl/src/main/scala/org/apache/spark/streaming/kinesis/KinesisTestUtils.scala b/connector/kinesis-asl/src/main/scala/org/apache/spark/streaming/kinesis/KinesisTestUtils.scala index 4dacc03c72719..50dfd50aa239f 100644 --- a/connector/kinesis-asl/src/main/scala/org/apache/spark/streaming/kinesis/KinesisTestUtils.scala +++ b/connector/kinesis-asl/src/main/scala/org/apache/spark/streaming/kinesis/KinesisTestUtils.scala @@ -21,9 +21,9 @@ import java.nio.ByteBuffer import java.nio.charset.StandardCharsets import java.util.concurrent.TimeUnit -import scala.collection.JavaConverters._ import scala.collection.mutable import scala.collection.mutable.ArrayBuffer +import scala.jdk.CollectionConverters._ import scala.util.{Failure, Random, Success, Try} import com.amazonaws.auth.{AWSCredentials, DefaultAWSCredentialsProviderChain} diff --git a/connector/kinesis-asl/src/test/scala/org/apache/spark/streaming/kinesis/KinesisInputDStreamBuilderSuite.scala b/connector/kinesis-asl/src/test/scala/org/apache/spark/streaming/kinesis/KinesisInputDStreamBuilderSuite.scala index babdb15987a6c..ba04c0189e440 100644 --- a/connector/kinesis-asl/src/test/scala/org/apache/spark/streaming/kinesis/KinesisInputDStreamBuilderSuite.scala +++ b/connector/kinesis-asl/src/test/scala/org/apache/spark/streaming/kinesis/KinesisInputDStreamBuilderSuite.scala @@ -19,7 +19,7 @@ package org.apache.spark.streaming.kinesis import java.util.Calendar -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import com.amazonaws.services.kinesis.clientlibrary.lib.worker.{InitialPositionInStream, KinesisClientLibConfiguration} import com.amazonaws.services.kinesis.metrics.interfaces.MetricsLevel diff --git a/connector/protobuf/src/main/scala/org/apache/spark/sql/protobuf/ProtobufDataToCatalyst.scala b/connector/protobuf/src/main/scala/org/apache/spark/sql/protobuf/ProtobufDataToCatalyst.scala index 5c4a5ff068968..a239a627125dc 100644 --- a/connector/protobuf/src/main/scala/org/apache/spark/sql/protobuf/ProtobufDataToCatalyst.scala +++ b/connector/protobuf/src/main/scala/org/apache/spark/sql/protobuf/ProtobufDataToCatalyst.scala @@ -16,7 +16,7 @@ */ package org.apache.spark.sql.protobuf -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.util.control.NonFatal import com.google.protobuf.DynamicMessage diff --git a/connector/protobuf/src/main/scala/org/apache/spark/sql/protobuf/ProtobufSerializer.scala b/connector/protobuf/src/main/scala/org/apache/spark/sql/protobuf/ProtobufSerializer.scala index 611b753d02470..4684934a56583 100644 --- a/connector/protobuf/src/main/scala/org/apache/spark/sql/protobuf/ProtobufSerializer.scala +++ b/connector/protobuf/src/main/scala/org/apache/spark/sql/protobuf/ProtobufSerializer.scala @@ -16,7 +16,7 @@ */ package org.apache.spark.sql.protobuf -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import com.google.protobuf.{Duration, DynamicMessage, Timestamp} import com.google.protobuf.Descriptors.{Descriptor, FieldDescriptor} diff --git a/connector/protobuf/src/main/scala/org/apache/spark/sql/protobuf/functions.scala b/connector/protobuf/src/main/scala/org/apache/spark/sql/protobuf/functions.scala index 6a33dfa1da1b8..91e87dee50482 100644 --- a/connector/protobuf/src/main/scala/org/apache/spark/sql/protobuf/functions.scala +++ b/connector/protobuf/src/main/scala/org/apache/spark/sql/protobuf/functions.scala @@ -16,7 +16,7 @@ */ package org.apache.spark.sql.protobuf -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.annotation.Experimental import org.apache.spark.sql.Column diff --git a/connector/protobuf/src/main/scala/org/apache/spark/sql/protobuf/utils/ProtobufUtils.scala b/connector/protobuf/src/main/scala/org/apache/spark/sql/protobuf/utils/ProtobufUtils.scala index 8b42ff802f78a..2388668f66afb 100644 --- a/connector/protobuf/src/main/scala/org/apache/spark/sql/protobuf/utils/ProtobufUtils.scala +++ b/connector/protobuf/src/main/scala/org/apache/spark/sql/protobuf/utils/ProtobufUtils.scala @@ -22,7 +22,7 @@ import java.io.FileNotFoundException import java.nio.file.NoSuchFileException import java.util.Locale -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.util.control.NonFatal import com.google.protobuf.{DescriptorProtos, Descriptors, InvalidProtocolBufferException, Message} diff --git a/connector/protobuf/src/main/scala/org/apache/spark/sql/protobuf/utils/SchemaConverters.scala b/connector/protobuf/src/main/scala/org/apache/spark/sql/protobuf/utils/SchemaConverters.scala index 33b7ef87744e0..aa3ac998a746b 100644 --- a/connector/protobuf/src/main/scala/org/apache/spark/sql/protobuf/utils/SchemaConverters.scala +++ b/connector/protobuf/src/main/scala/org/apache/spark/sql/protobuf/utils/SchemaConverters.scala @@ -16,7 +16,7 @@ */ package org.apache.spark.sql.protobuf.utils -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import com.google.protobuf.Descriptors.{Descriptor, FieldDescriptor} diff --git a/connector/protobuf/src/test/scala/org/apache/spark/sql/protobuf/ProtobufFunctionsSuite.scala b/connector/protobuf/src/test/scala/org/apache/spark/sql/protobuf/ProtobufFunctionsSuite.scala index 51baf2052df8e..33dea4df181f8 100644 --- a/connector/protobuf/src/test/scala/org/apache/spark/sql/protobuf/ProtobufFunctionsSuite.scala +++ b/connector/protobuf/src/test/scala/org/apache/spark/sql/protobuf/ProtobufFunctionsSuite.scala @@ -19,7 +19,7 @@ package org.apache.spark.sql.protobuf import java.sql.Timestamp import java.time.Duration -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import com.google.protobuf.{Any => AnyProto, ByteString, DynamicMessage} import org.json4s.StringInput diff --git a/connector/protobuf/src/test/scala/org/apache/spark/sql/protobuf/ProtobufTestBase.scala b/connector/protobuf/src/test/scala/org/apache/spark/sql/protobuf/ProtobufTestBase.scala index 9824a7786ce9a..e3add49f2b807 100644 --- a/connector/protobuf/src/test/scala/org/apache/spark/sql/protobuf/ProtobufTestBase.scala +++ b/connector/protobuf/src/test/scala/org/apache/spark/sql/protobuf/ProtobufTestBase.scala @@ -19,7 +19,7 @@ package org.apache.spark.sql.protobuf import java.io.File -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import com.google.protobuf.DescriptorProtos.FileDescriptorSet diff --git a/core/src/main/java/org/apache/spark/shuffle/sort/UnsafeShuffleWriter.java b/core/src/main/java/org/apache/spark/shuffle/sort/UnsafeShuffleWriter.java index 9c54184105951..adea53689b547 100644 --- a/core/src/main/java/org/apache/spark/shuffle/sort/UnsafeShuffleWriter.java +++ b/core/src/main/java/org/apache/spark/shuffle/sort/UnsafeShuffleWriter.java @@ -28,7 +28,7 @@ import scala.Option; import scala.Product2; -import scala.collection.JavaConverters; +import scala.jdk.CollectionConverters; import scala.reflect.ClassTag; import scala.reflect.ClassTag$; @@ -166,7 +166,7 @@ public long getPeakMemoryUsedBytes() { */ @VisibleForTesting public void write(Iterator> records) throws IOException { - write(JavaConverters.asScalaIteratorConverter(records).asScala()); + write(CollectionConverters.IteratorHasAsScala(records).asScala()); } @Override diff --git a/core/src/main/scala/org/apache/spark/BarrierTaskContext.scala b/core/src/main/scala/org/apache/spark/BarrierTaskContext.scala index ecc0c891ea161..ff14103641407 100644 --- a/core/src/main/scala/org/apache/spark/BarrierTaskContext.scala +++ b/core/src/main/scala/org/apache/spark/BarrierTaskContext.scala @@ -19,8 +19,8 @@ package org.apache.spark import java.util.{Properties, Timer, TimerTask} -import scala.collection.JavaConverters._ import scala.concurrent.duration._ +import scala.jdk.CollectionConverters._ import scala.util.{Failure, Success => ScalaSuccess, Try} import org.apache.spark.annotation.{Experimental, Since} diff --git a/core/src/main/scala/org/apache/spark/ContextCleaner.scala b/core/src/main/scala/org/apache/spark/ContextCleaner.scala index a6fa28b8ae8ef..a1871cb231cfb 100644 --- a/core/src/main/scala/org/apache/spark/ContextCleaner.scala +++ b/core/src/main/scala/org/apache/spark/ContextCleaner.scala @@ -21,7 +21,7 @@ import java.lang.ref.{ReferenceQueue, WeakReference} import java.util.Collections import java.util.concurrent.{ConcurrentHashMap, ConcurrentLinkedQueue, ScheduledExecutorService, TimeUnit} -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.broadcast.Broadcast import org.apache.spark.internal.Logging diff --git a/core/src/main/scala/org/apache/spark/FutureAction.scala b/core/src/main/scala/org/apache/spark/FutureAction.scala index 4bdcafce0d75a..9100d4ce041bf 100644 --- a/core/src/main/scala/org/apache/spark/FutureAction.scala +++ b/core/src/main/scala/org/apache/spark/FutureAction.scala @@ -255,7 +255,7 @@ private[spark] class JavaFutureActionWrapper[S, T](futureAction: FutureAction[S], converter: S => T) extends JavaFutureAction[T] { - import scala.collection.JavaConverters._ + import scala.jdk.CollectionConverters._ override def isCancelled: Boolean = futureAction.isCancelled diff --git a/core/src/main/scala/org/apache/spark/MapOutputTracker.scala b/core/src/main/scala/org/apache/spark/MapOutputTracker.scala index 4e6c2213e1e7b..6d5c04635ad7a 100644 --- a/core/src/main/scala/org/apache/spark/MapOutputTracker.scala +++ b/core/src/main/scala/org/apache/spark/MapOutputTracker.scala @@ -23,10 +23,10 @@ import java.util.concurrent.{ConcurrentHashMap, LinkedBlockingQueue, ThreadPoolE import java.util.concurrent.locks.ReentrantReadWriteLock import scala.collection -import scala.collection.JavaConverters._ import scala.collection.mutable.{HashMap, ListBuffer, Map} import scala.concurrent.{ExecutionContext, Future} import scala.concurrent.duration.Duration +import scala.jdk.CollectionConverters._ import scala.reflect.ClassTag import scala.util.control.NonFatal diff --git a/core/src/main/scala/org/apache/spark/SparkConf.scala b/core/src/main/scala/org/apache/spark/SparkConf.scala index 8c054d24b10d7..0953ffe5ff80b 100644 --- a/core/src/main/scala/org/apache/spark/SparkConf.scala +++ b/core/src/main/scala/org/apache/spark/SparkConf.scala @@ -20,8 +20,8 @@ package org.apache.spark import java.util.{Map => JMap} import java.util.concurrent.ConcurrentHashMap -import scala.collection.JavaConverters._ import scala.collection.mutable.LinkedHashSet +import scala.jdk.CollectionConverters._ import org.apache.avro.{Schema, SchemaNormalization} diff --git a/core/src/main/scala/org/apache/spark/SparkContext.scala b/core/src/main/scala/org/apache/spark/SparkContext.scala index 640cfc2afaaaa..277ff6364d4f7 100644 --- a/core/src/main/scala/org/apache/spark/SparkContext.scala +++ b/core/src/main/scala/org/apache/spark/SparkContext.scala @@ -24,11 +24,11 @@ import java.util.concurrent.{ConcurrentHashMap, ConcurrentMap} import java.util.concurrent.atomic.{AtomicBoolean, AtomicInteger, AtomicReference} import javax.ws.rs.core.UriBuilder -import scala.collection.JavaConverters._ import scala.collection.Map import scala.collection.concurrent.{Map => ScalaConcurrentMap} import scala.collection.immutable import scala.collection.mutable.HashMap +import scala.jdk.CollectionConverters._ import scala.language.implicitConversions import scala.reflect.{classTag, ClassTag} import scala.util.control.NonFatal diff --git a/core/src/main/scala/org/apache/spark/SparkEnv.scala b/core/src/main/scala/org/apache/spark/SparkEnv.scala index e404c9ee8b4cf..539b93fa29a33 100644 --- a/core/src/main/scala/org/apache/spark/SparkEnv.scala +++ b/core/src/main/scala/org/apache/spark/SparkEnv.scala @@ -20,9 +20,9 @@ package org.apache.spark import java.io.File import java.util.Locale -import scala.collection.JavaConverters._ import scala.collection.concurrent import scala.collection.mutable +import scala.jdk.CollectionConverters._ import scala.util.Properties import com.google.common.cache.CacheBuilder diff --git a/core/src/main/scala/org/apache/spark/SparkFileAlreadyExistsException.scala b/core/src/main/scala/org/apache/spark/SparkFileAlreadyExistsException.scala index 532fd04134eef..0e578f045452e 100644 --- a/core/src/main/scala/org/apache/spark/SparkFileAlreadyExistsException.scala +++ b/core/src/main/scala/org/apache/spark/SparkFileAlreadyExistsException.scala @@ -17,7 +17,7 @@ package org.apache.spark -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.hadoop.fs.FileAlreadyExistsException diff --git a/core/src/main/scala/org/apache/spark/TaskContextImpl.scala b/core/src/main/scala/org/apache/spark/TaskContextImpl.scala index 526627c28607d..4cc5f165794e4 100644 --- a/core/src/main/scala/org/apache/spark/TaskContextImpl.scala +++ b/core/src/main/scala/org/apache/spark/TaskContextImpl.scala @@ -20,8 +20,8 @@ package org.apache.spark import java.util.{Properties, Stack} import javax.annotation.concurrent.GuardedBy -import scala.collection.JavaConverters._ import scala.collection.mutable.ArrayBuffer +import scala.jdk.CollectionConverters._ import org.apache.spark.executor.TaskMetrics import org.apache.spark.internal.{config, Logging} diff --git a/core/src/main/scala/org/apache/spark/TestUtils.scala b/core/src/main/scala/org/apache/spark/TestUtils.scala index 13ae6aca38b8c..f0dbfa9ad5a6f 100644 --- a/core/src/main/scala/org/apache/spark/TestUtils.scala +++ b/core/src/main/scala/org/apache/spark/TestUtils.scala @@ -31,10 +31,10 @@ import java.util.regex.Pattern import javax.net.ssl._ import javax.tools.{JavaFileObject, SimpleJavaFileObject, ToolProvider} -import scala.collection.JavaConverters._ import scala.collection.mutable import scala.collection.mutable.ArrayBuffer import scala.io.Source +import scala.jdk.CollectionConverters._ import scala.reflect.{classTag, ClassTag} import scala.sys.process.{Process, ProcessLogger} import scala.util.Try diff --git a/core/src/main/scala/org/apache/spark/api/java/JavaHadoopRDD.scala b/core/src/main/scala/org/apache/spark/api/java/JavaHadoopRDD.scala index 891bcddeac286..3e8911244c016 100644 --- a/core/src/main/scala/org/apache/spark/api/java/JavaHadoopRDD.scala +++ b/core/src/main/scala/org/apache/spark/api/java/JavaHadoopRDD.scala @@ -17,7 +17,7 @@ package org.apache.spark.api.java -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.reflect.ClassTag import org.apache.hadoop.mapred.InputSplit diff --git a/core/src/main/scala/org/apache/spark/api/java/JavaNewHadoopRDD.scala b/core/src/main/scala/org/apache/spark/api/java/JavaNewHadoopRDD.scala index 0f49279f3e647..936e7b684a5be 100644 --- a/core/src/main/scala/org/apache/spark/api/java/JavaNewHadoopRDD.scala +++ b/core/src/main/scala/org/apache/spark/api/java/JavaNewHadoopRDD.scala @@ -17,7 +17,7 @@ package org.apache.spark.api.java -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.reflect.ClassTag import org.apache.hadoop.mapreduce.InputSplit diff --git a/core/src/main/scala/org/apache/spark/api/java/JavaPairRDD.scala b/core/src/main/scala/org/apache/spark/api/java/JavaPairRDD.scala index 6dd36309378cc..a41cbb058e9ee 100644 --- a/core/src/main/scala/org/apache/spark/api/java/JavaPairRDD.scala +++ b/core/src/main/scala/org/apache/spark/api/java/JavaPairRDD.scala @@ -21,7 +21,7 @@ import java.{lang => jl} import java.lang.{Iterable => JIterable} import java.util.{Comparator, List => JList} -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.language.implicitConversions import scala.reflect.ClassTag diff --git a/core/src/main/scala/org/apache/spark/api/java/JavaRDDLike.scala b/core/src/main/scala/org/apache/spark/api/java/JavaRDDLike.scala index c17c9a058de0e..ee963096c196b 100644 --- a/core/src/main/scala/org/apache/spark/api/java/JavaRDDLike.scala +++ b/core/src/main/scala/org/apache/spark/api/java/JavaRDDLike.scala @@ -21,7 +21,7 @@ import java.{lang => jl} import java.lang.{Iterable => JIterable} import java.util.{Comparator, Iterator => JIterator, List => JList, Map => JMap} -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.reflect.ClassTag import org.apache.hadoop.io.compress.CompressionCodec @@ -366,8 +366,7 @@ trait JavaRDDLike[T, This <: JavaRDDLike[T, This]] extends Serializable { * * The iterator will consume as much memory as the largest partition in this RDD. */ - def toLocalIterator(): JIterator[T] = - asJavaIteratorConverter(rdd.toLocalIterator).asJava + def toLocalIterator(): JIterator[T] = rdd.toLocalIterator.asJava /** * Return an array that contains all of the elements in a specific partition of this RDD. diff --git a/core/src/main/scala/org/apache/spark/api/java/JavaSparkContext.scala b/core/src/main/scala/org/apache/spark/api/java/JavaSparkContext.scala index 8c99e5622baa3..80d56e4fac352 100644 --- a/core/src/main/scala/org/apache/spark/api/java/JavaSparkContext.scala +++ b/core/src/main/scala/org/apache/spark/api/java/JavaSparkContext.scala @@ -22,7 +22,7 @@ import java.util import java.util.{Map => JMap} import scala.annotation.varargs -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.language.implicitConversions import scala.reflect.ClassTag diff --git a/core/src/main/scala/org/apache/spark/api/python/PythonHadoopUtil.scala b/core/src/main/scala/org/apache/spark/api/python/PythonHadoopUtil.scala index c798e363c45db..3f476772d65af 100644 --- a/core/src/main/scala/org/apache/spark/api/python/PythonHadoopUtil.scala +++ b/core/src/main/scala/org/apache/spark/api/python/PythonHadoopUtil.scala @@ -17,7 +17,7 @@ package org.apache.spark.api.python -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.util.{Failure, Success, Try} import org.apache.hadoop.conf.Configuration diff --git a/core/src/main/scala/org/apache/spark/api/python/PythonRDD.scala b/core/src/main/scala/org/apache/spark/api/python/PythonRDD.scala index a2f2d566db5a3..3eea0ebcdb2a6 100644 --- a/core/src/main/scala/org/apache/spark/api/python/PythonRDD.scala +++ b/core/src/main/scala/org/apache/spark/api/python/PythonRDD.scala @@ -22,9 +22,9 @@ import java.net._ import java.nio.charset.StandardCharsets import java.util.{ArrayList => JArrayList, List => JList, Map => JMap} -import scala.collection.JavaConverters._ import scala.collection.mutable import scala.concurrent.duration.Duration +import scala.jdk.CollectionConverters._ import scala.reflect.ClassTag import org.apache.hadoop.conf.Configuration diff --git a/core/src/main/scala/org/apache/spark/api/python/PythonRunner.scala b/core/src/main/scala/org/apache/spark/api/python/PythonRunner.scala index db95e6c2bd6e1..0768e6ffa1ceb 100644 --- a/core/src/main/scala/org/apache/spark/api/python/PythonRunner.scala +++ b/core/src/main/scala/org/apache/spark/api/python/PythonRunner.scala @@ -27,7 +27,7 @@ import java.nio.file.{Files => JavaFiles, Path} import java.util.concurrent.ConcurrentHashMap import java.util.concurrent.atomic.AtomicBoolean -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.util.control.NonFatal import org.apache.spark._ diff --git a/core/src/main/scala/org/apache/spark/api/python/PythonUtils.scala b/core/src/main/scala/org/apache/spark/api/python/PythonUtils.scala index 30ab838c4352b..0e61e38ff2b03 100644 --- a/core/src/main/scala/org/apache/spark/api/python/PythonUtils.scala +++ b/core/src/main/scala/org/apache/spark/api/python/PythonUtils.scala @@ -20,8 +20,8 @@ package org.apache.spark.api.python import java.io.File import java.util.{List => JList} -import scala.collection.JavaConverters._ import scala.collection.mutable.ArrayBuffer +import scala.jdk.CollectionConverters._ import org.apache.spark.{SparkContext, SparkEnv} import org.apache.spark.api.java.{JavaRDD, JavaSparkContext} diff --git a/core/src/main/scala/org/apache/spark/api/python/PythonWorkerFactory.scala b/core/src/main/scala/org/apache/spark/api/python/PythonWorkerFactory.scala index d0776eb2cc736..e7a49b7ee980b 100644 --- a/core/src/main/scala/org/apache/spark/api/python/PythonWorkerFactory.scala +++ b/core/src/main/scala/org/apache/spark/api/python/PythonWorkerFactory.scala @@ -24,8 +24,8 @@ import java.util.Arrays import java.util.concurrent.TimeUnit import javax.annotation.concurrent.GuardedBy -import scala.collection.JavaConverters._ import scala.collection.mutable +import scala.jdk.CollectionConverters._ import org.apache.spark._ import org.apache.spark.errors.SparkCoreErrors diff --git a/core/src/main/scala/org/apache/spark/api/python/SerDeUtil.scala b/core/src/main/scala/org/apache/spark/api/python/SerDeUtil.scala index a2a7fb5c10096..4b6d0768005bc 100644 --- a/core/src/main/scala/org/apache/spark/api/python/SerDeUtil.scala +++ b/core/src/main/scala/org/apache/spark/api/python/SerDeUtil.scala @@ -19,8 +19,8 @@ package org.apache.spark.api.python import java.util.{ArrayList => JArrayList} -import scala.collection.JavaConverters._ import scala.collection.mutable +import scala.jdk.CollectionConverters._ import scala.util.Failure import scala.util.Try diff --git a/core/src/main/scala/org/apache/spark/api/python/StreamingPythonRunner.scala b/core/src/main/scala/org/apache/spark/api/python/StreamingPythonRunner.scala index bd2a8a01cacec..2fb5d15bcfd44 100644 --- a/core/src/main/scala/org/apache/spark/api/python/StreamingPythonRunner.scala +++ b/core/src/main/scala/org/apache/spark/api/python/StreamingPythonRunner.scala @@ -19,7 +19,7 @@ package org.apache.spark.api.python import java.io.{BufferedInputStream, BufferedOutputStream, DataInputStream, DataOutputStream} -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.SparkEnv import org.apache.spark.internal.Logging diff --git a/core/src/main/scala/org/apache/spark/api/python/WriteInputFormatTestDataGenerator.scala b/core/src/main/scala/org/apache/spark/api/python/WriteInputFormatTestDataGenerator.scala index d30e9c5e2ce61..00bd550d6ac5e 100644 --- a/core/src/main/scala/org/apache/spark/api/python/WriteInputFormatTestDataGenerator.scala +++ b/core/src/main/scala/org/apache/spark/api/python/WriteInputFormatTestDataGenerator.scala @@ -21,7 +21,7 @@ import java.{util => ju} import java.io.{DataInput, DataOutput} import java.nio.charset.StandardCharsets -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.hadoop.io._ import org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat diff --git a/core/src/main/scala/org/apache/spark/api/r/RRDD.scala b/core/src/main/scala/org/apache/spark/api/r/RRDD.scala index 892e69bfce5ce..b60d90275cb70 100644 --- a/core/src/main/scala/org/apache/spark/api/r/RRDD.scala +++ b/core/src/main/scala/org/apache/spark/api/r/RRDD.scala @@ -21,7 +21,7 @@ import java.io.{File, OutputStream} import java.net.Socket import java.util.{Map => JMap} -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.reflect.ClassTag import org.apache.spark._ diff --git a/core/src/main/scala/org/apache/spark/broadcast/TorrentBroadcast.scala b/core/src/main/scala/org/apache/spark/broadcast/TorrentBroadcast.scala index e211bd5749af3..0d35d1bd1f295 100644 --- a/core/src/main/scala/org/apache/spark/broadcast/TorrentBroadcast.scala +++ b/core/src/main/scala/org/apache/spark/broadcast/TorrentBroadcast.scala @@ -22,7 +22,7 @@ import java.lang.ref.{Reference, SoftReference, WeakReference} import java.nio.ByteBuffer import java.util.zip.Adler32 -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.reflect.ClassTag import scala.util.Random diff --git a/core/src/main/scala/org/apache/spark/deploy/ExternalShuffleService.scala b/core/src/main/scala/org/apache/spark/deploy/ExternalShuffleService.scala index 22b138e5d9881..466c1f2e14b17 100644 --- a/core/src/main/scala/org/apache/spark/deploy/ExternalShuffleService.scala +++ b/core/src/main/scala/org/apache/spark/deploy/ExternalShuffleService.scala @@ -20,7 +20,7 @@ package org.apache.spark.deploy import java.io.File import java.util.concurrent.CountDownLatch -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.{SecurityManager, SparkConf} import org.apache.spark.internal.{config, Logging} diff --git a/core/src/main/scala/org/apache/spark/deploy/PythonRunner.scala b/core/src/main/scala/org/apache/spark/deploy/PythonRunner.scala index c3cb6831e399f..15a402d3cc9df 100644 --- a/core/src/main/scala/org/apache/spark/deploy/PythonRunner.scala +++ b/core/src/main/scala/org/apache/spark/deploy/PythonRunner.scala @@ -21,8 +21,8 @@ import java.io.File import java.net.URI import java.nio.file.Files -import scala.collection.JavaConverters._ import scala.collection.mutable.ArrayBuffer +import scala.jdk.CollectionConverters._ import scala.util.Try import org.apache.spark.{SparkConf, SparkUserAppException} diff --git a/core/src/main/scala/org/apache/spark/deploy/RPackageUtils.scala b/core/src/main/scala/org/apache/spark/deploy/RPackageUtils.scala index 7d356e8fc1c00..1a1a680c7faf5 100644 --- a/core/src/main/scala/org/apache/spark/deploy/RPackageUtils.scala +++ b/core/src/main/scala/org/apache/spark/deploy/RPackageUtils.scala @@ -22,7 +22,7 @@ import java.util.jar.JarFile import java.util.logging.Level import java.util.zip.{ZipEntry, ZipOutputStream} -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import com.google.common.io.{ByteStreams, Files} diff --git a/core/src/main/scala/org/apache/spark/deploy/RRunner.scala b/core/src/main/scala/org/apache/spark/deploy/RRunner.scala index b32f9ea3b4747..769c014ddff9d 100644 --- a/core/src/main/scala/org/apache/spark/deploy/RRunner.scala +++ b/core/src/main/scala/org/apache/spark/deploy/RRunner.scala @@ -20,7 +20,7 @@ package org.apache.spark.deploy import java.io._ import java.util.concurrent.{Semaphore, TimeUnit} -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.hadoop.fs.Path diff --git a/core/src/main/scala/org/apache/spark/deploy/SparkCuratorUtil.scala b/core/src/main/scala/org/apache/spark/deploy/SparkCuratorUtil.scala index b89ae1b35e693..5972f8ab86bed 100644 --- a/core/src/main/scala/org/apache/spark/deploy/SparkCuratorUtil.scala +++ b/core/src/main/scala/org/apache/spark/deploy/SparkCuratorUtil.scala @@ -17,7 +17,7 @@ package org.apache.spark.deploy -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.curator.framework.{CuratorFramework, CuratorFrameworkFactory} import org.apache.curator.retry.ExponentialBackoffRetry diff --git a/core/src/main/scala/org/apache/spark/deploy/SparkHadoopUtil.scala b/core/src/main/scala/org/apache/spark/deploy/SparkHadoopUtil.scala index 5e4f7730fa810..78434f35a63c3 100644 --- a/core/src/main/scala/org/apache/spark/deploy/SparkHadoopUtil.scala +++ b/core/src/main/scala/org/apache/spark/deploy/SparkHadoopUtil.scala @@ -23,9 +23,9 @@ import java.security.PrivilegedExceptionAction import java.text.DateFormat import java.util.{Date, Locale} -import scala.collection.JavaConverters._ import scala.collection.mutable import scala.collection.mutable.HashMap +import scala.jdk.CollectionConverters._ import scala.language.existentials import org.apache.hadoop.conf.Configuration diff --git a/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala b/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala index 60253ed5fda1f..d5947649592ea 100644 --- a/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala +++ b/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala @@ -28,8 +28,8 @@ import java.util.jar.JarInputStream import javax.ws.rs.core.UriBuilder import scala.annotation.tailrec -import scala.collection.JavaConverters._ import scala.collection.mutable.ArrayBuffer +import scala.jdk.CollectionConverters._ import scala.util.{Properties, Try} import org.apache.commons.lang3.StringUtils diff --git a/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala b/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala index 867fc05cb8a12..c0a6b03d22fa2 100644 --- a/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala +++ b/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala @@ -21,9 +21,9 @@ import java.io.{ByteArrayOutputStream, File, PrintStream} import java.nio.charset.StandardCharsets import java.util.{List => JList} -import scala.collection.JavaConverters._ import scala.collection.mutable.{ArrayBuffer, HashMap} import scala.io.Source +import scala.jdk.CollectionConverters._ import scala.util.Try import org.apache.spark.{SparkConf, SparkException, SparkUserAppException} diff --git a/core/src/main/scala/org/apache/spark/deploy/history/ApplicationCache.scala b/core/src/main/scala/org/apache/spark/deploy/history/ApplicationCache.scala index 909f5ea937cee..9d25310167758 100644 --- a/core/src/main/scala/org/apache/spark/deploy/history/ApplicationCache.scala +++ b/core/src/main/scala/org/apache/spark/deploy/history/ApplicationCache.scala @@ -21,7 +21,7 @@ import java.util.concurrent.ExecutionException import javax.servlet.{DispatcherType, Filter, FilterChain, FilterConfig, ServletException, ServletRequest, ServletResponse} import javax.servlet.http.{HttpServletRequest, HttpServletResponse} -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import com.codahale.metrics.{Counter, MetricRegistry, Timer} import com.google.common.cache.{CacheBuilder, CacheLoader, LoadingCache, RemovalListener, RemovalNotification} diff --git a/core/src/main/scala/org/apache/spark/deploy/history/EventLogFileCompactor.scala b/core/src/main/scala/org/apache/spark/deploy/history/EventLogFileCompactor.scala index 27040e83533ff..07a873ac704dc 100644 --- a/core/src/main/scala/org/apache/spark/deploy/history/EventLogFileCompactor.scala +++ b/core/src/main/scala/org/apache/spark/deploy/history/EventLogFileCompactor.scala @@ -21,7 +21,7 @@ import java.io.IOException import java.net.URI import java.util.ServiceLoader -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.hadoop.conf.Configuration import org.apache.hadoop.fs.{FileStatus, FileSystem, Path} diff --git a/core/src/main/scala/org/apache/spark/deploy/history/FsHistoryProvider.scala b/core/src/main/scala/org/apache/spark/deploy/history/FsHistoryProvider.scala index 7b226137070f9..bc2168d172607 100644 --- a/core/src/main/scala/org/apache/spark/deploy/history/FsHistoryProvider.scala +++ b/core/src/main/scala/org/apache/spark/deploy/history/FsHistoryProvider.scala @@ -23,9 +23,9 @@ import java.util.{Date, NoSuchElementException, ServiceLoader} import java.util.concurrent.{ConcurrentHashMap, ExecutorService, TimeUnit} import java.util.zip.ZipOutputStream -import scala.collection.JavaConverters._ import scala.collection.mutable import scala.io.{Codec, Source} +import scala.jdk.CollectionConverters._ import scala.util.control.NonFatal import scala.xml.Node diff --git a/core/src/main/scala/org/apache/spark/deploy/history/HybridStore.scala b/core/src/main/scala/org/apache/spark/deploy/history/HybridStore.scala index f89c41471c2d3..19ccbbb89b09a 100644 --- a/core/src/main/scala/org/apache/spark/deploy/history/HybridStore.scala +++ b/core/src/main/scala/org/apache/spark/deploy/history/HybridStore.scala @@ -21,7 +21,7 @@ import java.util.Collection import java.util.concurrent.ConcurrentHashMap import java.util.concurrent.atomic.AtomicBoolean -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import com.google.common.collect.Lists; diff --git a/core/src/main/scala/org/apache/spark/deploy/master/ZooKeeperPersistenceEngine.scala b/core/src/main/scala/org/apache/spark/deploy/master/ZooKeeperPersistenceEngine.scala index ded816b992db8..d02f09e183596 100644 --- a/core/src/main/scala/org/apache/spark/deploy/master/ZooKeeperPersistenceEngine.scala +++ b/core/src/main/scala/org/apache/spark/deploy/master/ZooKeeperPersistenceEngine.scala @@ -19,7 +19,7 @@ package org.apache.spark.deploy.master import java.nio.ByteBuffer -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.reflect.ClassTag import org.apache.curator.framework.CuratorFramework diff --git a/core/src/main/scala/org/apache/spark/deploy/security/HadoopFSDelegationTokenProvider.scala b/core/src/main/scala/org/apache/spark/deploy/security/HadoopFSDelegationTokenProvider.scala index 6ec281f5b4406..9242fe82d2495 100644 --- a/core/src/main/scala/org/apache/spark/deploy/security/HadoopFSDelegationTokenProvider.scala +++ b/core/src/main/scala/org/apache/spark/deploy/security/HadoopFSDelegationTokenProvider.scala @@ -17,7 +17,7 @@ package org.apache.spark.deploy.security -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.util.Try import scala.util.control.NonFatal diff --git a/core/src/main/scala/org/apache/spark/deploy/worker/CommandUtils.scala b/core/src/main/scala/org/apache/spark/deploy/worker/CommandUtils.scala index 8240bd6d2f438..9a4a037e35c66 100644 --- a/core/src/main/scala/org/apache/spark/deploy/worker/CommandUtils.scala +++ b/core/src/main/scala/org/apache/spark/deploy/worker/CommandUtils.scala @@ -19,8 +19,8 @@ package org.apache.spark.deploy.worker import java.io.{File, FileOutputStream, InputStream, IOException} -import scala.collection.JavaConverters._ import scala.collection.Map +import scala.jdk.CollectionConverters._ import org.apache.spark.SecurityManager import org.apache.spark.deploy.Command diff --git a/core/src/main/scala/org/apache/spark/deploy/worker/DriverRunner.scala b/core/src/main/scala/org/apache/spark/deploy/worker/DriverRunner.scala index 910c27fc71044..e7fca402a8870 100644 --- a/core/src/main/scala/org/apache/spark/deploy/worker/DriverRunner.scala +++ b/core/src/main/scala/org/apache/spark/deploy/worker/DriverRunner.scala @@ -21,7 +21,7 @@ import java.io._ import java.net.URI import java.nio.charset.StandardCharsets -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import com.google.common.io.Files diff --git a/core/src/main/scala/org/apache/spark/deploy/worker/ExecutorRunner.scala b/core/src/main/scala/org/apache/spark/deploy/worker/ExecutorRunner.scala index bf5d889a87cb0..5547593a28f5e 100644 --- a/core/src/main/scala/org/apache/spark/deploy/worker/ExecutorRunner.scala +++ b/core/src/main/scala/org/apache/spark/deploy/worker/ExecutorRunner.scala @@ -20,7 +20,7 @@ package org.apache.spark.deploy.worker import java.io._ import java.nio.charset.StandardCharsets -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import com.google.common.io.Files diff --git a/core/src/main/scala/org/apache/spark/errors/SparkCoreErrors.scala b/core/src/main/scala/org/apache/spark/errors/SparkCoreErrors.scala index 728ff01a21dbd..476618cf47ed1 100644 --- a/core/src/main/scala/org/apache/spark/errors/SparkCoreErrors.scala +++ b/core/src/main/scala/org/apache/spark/errors/SparkCoreErrors.scala @@ -20,7 +20,7 @@ package org.apache.spark.errors import java.io.{File, IOException} import java.util.concurrent.TimeoutException -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.hadoop.fs.Path diff --git a/core/src/main/scala/org/apache/spark/executor/Executor.scala b/core/src/main/scala/org/apache/spark/executor/Executor.scala index dfd749858fef9..6b1e07151b374 100644 --- a/core/src/main/scala/org/apache/spark/executor/Executor.scala +++ b/core/src/main/scala/org/apache/spark/executor/Executor.scala @@ -29,10 +29,10 @@ import java.util.concurrent.locks.ReentrantLock import javax.annotation.concurrent.GuardedBy import javax.ws.rs.core.UriBuilder -import scala.collection.JavaConverters._ import scala.collection.immutable import scala.collection.mutable.{ArrayBuffer, HashMap, WrappedArray} import scala.concurrent.duration._ +import scala.jdk.CollectionConverters._ import scala.util.control.NonFatal import com.google.common.cache.{Cache, CacheBuilder, RemovalListener, RemovalNotification} diff --git a/core/src/main/scala/org/apache/spark/executor/ExecutorSource.scala b/core/src/main/scala/org/apache/spark/executor/ExecutorSource.scala index d5077346516dd..0df1b67295a64 100644 --- a/core/src/main/scala/org/apache/spark/executor/ExecutorSource.scala +++ b/core/src/main/scala/org/apache/spark/executor/ExecutorSource.scala @@ -19,7 +19,7 @@ package org.apache.spark.executor import java.util.concurrent.ThreadPoolExecutor -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import com.codahale.metrics.{Gauge, MetricRegistry} import org.apache.hadoop.fs.FileSystem diff --git a/core/src/main/scala/org/apache/spark/executor/TaskMetrics.scala b/core/src/main/scala/org/apache/spark/executor/TaskMetrics.scala index 78b39b0cbda68..dbfd02b7d3b34 100644 --- a/core/src/main/scala/org/apache/spark/executor/TaskMetrics.scala +++ b/core/src/main/scala/org/apache/spark/executor/TaskMetrics.scala @@ -19,8 +19,8 @@ package org.apache.spark.executor import java.util.concurrent.CopyOnWriteArrayList -import scala.collection.JavaConverters._ import scala.collection.mutable.{ArrayBuffer, LinkedHashMap} +import scala.jdk.CollectionConverters._ import org.apache.spark._ import org.apache.spark.annotation.DeveloperApi diff --git a/core/src/main/scala/org/apache/spark/input/PortableDataStream.scala b/core/src/main/scala/org/apache/spark/input/PortableDataStream.scala index 57210da6a48eb..95ac87730e508 100644 --- a/core/src/main/scala/org/apache/spark/input/PortableDataStream.scala +++ b/core/src/main/scala/org/apache/spark/input/PortableDataStream.scala @@ -19,7 +19,7 @@ package org.apache.spark.input import java.io.{ByteArrayInputStream, ByteArrayOutputStream, DataInputStream, DataOutputStream} -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import com.google.common.io.{ByteStreams, Closeables} import org.apache.hadoop.conf.Configuration diff --git a/core/src/main/scala/org/apache/spark/input/WholeTextFileInputFormat.scala b/core/src/main/scala/org/apache/spark/input/WholeTextFileInputFormat.scala index 692deb7a3282f..d2954280a7117 100644 --- a/core/src/main/scala/org/apache/spark/input/WholeTextFileInputFormat.scala +++ b/core/src/main/scala/org/apache/spark/input/WholeTextFileInputFormat.scala @@ -17,7 +17,7 @@ package org.apache.spark.input -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.hadoop.fs.Path import org.apache.hadoop.io.Text diff --git a/core/src/main/scala/org/apache/spark/internal/plugin/PluginContainer.scala b/core/src/main/scala/org/apache/spark/internal/plugin/PluginContainer.scala index f78ec250f7173..261e016ce9bf0 100644 --- a/core/src/main/scala/org/apache/spark/internal/plugin/PluginContainer.scala +++ b/core/src/main/scala/org/apache/spark/internal/plugin/PluginContainer.scala @@ -17,7 +17,7 @@ package org.apache.spark.internal.plugin -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.util.{Either, Left, Right} import org.apache.spark.{SparkContext, SparkEnv, TaskFailedReason} diff --git a/core/src/main/scala/org/apache/spark/launcher/WorkerCommandBuilder.scala b/core/src/main/scala/org/apache/spark/launcher/WorkerCommandBuilder.scala index 4216b2627309e..93564a38f3f29 100644 --- a/core/src/main/scala/org/apache/spark/launcher/WorkerCommandBuilder.scala +++ b/core/src/main/scala/org/apache/spark/launcher/WorkerCommandBuilder.scala @@ -20,7 +20,7 @@ package org.apache.spark.launcher import java.io.File import java.util.{HashMap => JHashMap, List => JList, Map => JMap} -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.deploy.Command diff --git a/core/src/main/scala/org/apache/spark/metrics/ExecutorMetricType.scala b/core/src/main/scala/org/apache/spark/metrics/ExecutorMetricType.scala index 1e80eb66dc520..50b7ddcb13ae1 100644 --- a/core/src/main/scala/org/apache/spark/metrics/ExecutorMetricType.scala +++ b/core/src/main/scala/org/apache/spark/metrics/ExecutorMetricType.scala @@ -19,8 +19,8 @@ package org.apache.spark.metrics import java.lang.management.{BufferPoolMXBean, ManagementFactory} import javax.management.ObjectName -import scala.collection.JavaConverters._ import scala.collection.mutable +import scala.jdk.CollectionConverters._ import org.apache.spark.SparkEnv import org.apache.spark.executor.ProcfsMetricsGetter diff --git a/core/src/main/scala/org/apache/spark/metrics/MetricsConfig.scala b/core/src/main/scala/org/apache/spark/metrics/MetricsConfig.scala index 4b53aad6fc48b..195c5b0f47f57 100644 --- a/core/src/main/scala/org/apache/spark/metrics/MetricsConfig.scala +++ b/core/src/main/scala/org/apache/spark/metrics/MetricsConfig.scala @@ -20,8 +20,8 @@ package org.apache.spark.metrics import java.io.{FileInputStream, InputStream} import java.util.Properties -import scala.collection.JavaConverters._ import scala.collection.mutable +import scala.jdk.CollectionConverters._ import scala.util.matching.Regex import org.apache.spark.SparkConf diff --git a/core/src/main/scala/org/apache/spark/metrics/sink/PrometheusServlet.scala b/core/src/main/scala/org/apache/spark/metrics/sink/PrometheusServlet.scala index c087ee7c000c3..2b672f89686e5 100644 --- a/core/src/main/scala/org/apache/spark/metrics/sink/PrometheusServlet.scala +++ b/core/src/main/scala/org/apache/spark/metrics/sink/PrometheusServlet.scala @@ -48,7 +48,7 @@ private[spark] class PrometheusServlet( } def getMetricsSnapshot(request: HttpServletRequest): String = { - import scala.collection.JavaConverters._ + import scala.jdk.CollectionConverters._ val gaugesLabel = """{type="gauges"}""" val countersLabel = """{type="counters"}""" diff --git a/core/src/main/scala/org/apache/spark/metrics/sink/StatsdReporter.scala b/core/src/main/scala/org/apache/spark/metrics/sink/StatsdReporter.scala index ba75aa1c65cc6..877f04b1adc01 100644 --- a/core/src/main/scala/org/apache/spark/metrics/sink/StatsdReporter.scala +++ b/core/src/main/scala/org/apache/spark/metrics/sink/StatsdReporter.scala @@ -23,7 +23,7 @@ import java.nio.charset.StandardCharsets.UTF_8 import java.util.SortedMap import java.util.concurrent.TimeUnit -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.util.{Failure, Success, Try} import com.codahale.metrics._ diff --git a/core/src/main/scala/org/apache/spark/network/netty/NettyBlockRpcServer.scala b/core/src/main/scala/org/apache/spark/network/netty/NettyBlockRpcServer.scala index 16ad848a32648..aa0da153f7fa3 100644 --- a/core/src/main/scala/org/apache/spark/network/netty/NettyBlockRpcServer.scala +++ b/core/src/main/scala/org/apache/spark/network/netty/NettyBlockRpcServer.scala @@ -19,7 +19,7 @@ package org.apache.spark.network.netty import java.nio.ByteBuffer -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.reflect.ClassTag import org.apache.spark.SparkException diff --git a/core/src/main/scala/org/apache/spark/network/netty/NettyBlockTransferService.scala b/core/src/main/scala/org/apache/spark/network/netty/NettyBlockTransferService.scala index 2bd7be8ebd9bd..f54383db4c0e5 100644 --- a/core/src/main/scala/org/apache/spark/network/netty/NettyBlockTransferService.scala +++ b/core/src/main/scala/org/apache/spark/network/netty/NettyBlockTransferService.scala @@ -21,8 +21,8 @@ import java.io.IOException import java.nio.ByteBuffer import java.util.{HashMap => JHashMap, Map => JMap} -import scala.collection.JavaConverters._ import scala.concurrent.{Future, Promise} +import scala.jdk.CollectionConverters._ import scala.reflect.ClassTag import scala.util.{Success, Try} diff --git a/core/src/main/scala/org/apache/spark/network/netty/SparkTransportConf.scala b/core/src/main/scala/org/apache/spark/network/netty/SparkTransportConf.scala index c9103045260f2..812d57ac67cb5 100644 --- a/core/src/main/scala/org/apache/spark/network/netty/SparkTransportConf.scala +++ b/core/src/main/scala/org/apache/spark/network/netty/SparkTransportConf.scala @@ -17,7 +17,7 @@ package org.apache.spark.network.netty -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.SparkConf import org.apache.spark.network.util.{ConfigProvider, NettyUtils, TransportConf} diff --git a/core/src/main/scala/org/apache/spark/rdd/NewHadoopRDD.scala b/core/src/main/scala/org/apache/spark/rdd/NewHadoopRDD.scala index 119fdae531f22..4f9253a70c818 100644 --- a/core/src/main/scala/org/apache/spark/rdd/NewHadoopRDD.scala +++ b/core/src/main/scala/org/apache/spark/rdd/NewHadoopRDD.scala @@ -21,7 +21,7 @@ import java.io.{FileNotFoundException, IOException} import java.text.SimpleDateFormat import java.util.{Date, Locale} -import scala.collection.JavaConverters.asScalaBufferConverter +import scala.jdk.CollectionConverters._ import scala.reflect.ClassTag import org.apache.hadoop.conf.{Configurable, Configuration} diff --git a/core/src/main/scala/org/apache/spark/rdd/PairRDDFunctions.scala b/core/src/main/scala/org/apache/spark/rdd/PairRDDFunctions.scala index 4dd296787a1bc..ba4402f5e88df 100644 --- a/core/src/main/scala/org/apache/spark/rdd/PairRDDFunctions.scala +++ b/core/src/main/scala/org/apache/spark/rdd/PairRDDFunctions.scala @@ -21,8 +21,8 @@ import java.nio.ByteBuffer import java.util.{HashMap => JHashMap} import scala.collection.{mutable, Map} -import scala.collection.JavaConverters._ import scala.collection.mutable.ArrayBuffer +import scala.jdk.CollectionConverters._ import scala.reflect.ClassTag import com.clearspring.analytics.stream.cardinality.HyperLogLogPlus diff --git a/core/src/main/scala/org/apache/spark/rdd/PipedRDD.scala b/core/src/main/scala/org/apache/spark/rdd/PipedRDD.scala index 7e121e9a7ef2c..0359f6c6d3868 100644 --- a/core/src/main/scala/org/apache/spark/rdd/PipedRDD.scala +++ b/core/src/main/scala/org/apache/spark/rdd/PipedRDD.scala @@ -26,10 +26,10 @@ import java.io.PrintWriter import java.util.StringTokenizer import java.util.concurrent.atomic.AtomicReference -import scala.collection.JavaConverters._ import scala.collection.Map import scala.collection.mutable.ArrayBuffer import scala.io.Source +import scala.jdk.CollectionConverters._ import scala.reflect.ClassTag import org.apache.spark.{Partition, TaskContext} diff --git a/core/src/main/scala/org/apache/spark/rdd/SubtractedRDD.scala b/core/src/main/scala/org/apache/spark/rdd/SubtractedRDD.scala index a9061b3fdc939..27dfdb4daa2c4 100644 --- a/core/src/main/scala/org/apache/spark/rdd/SubtractedRDD.scala +++ b/core/src/main/scala/org/apache/spark/rdd/SubtractedRDD.scala @@ -19,8 +19,8 @@ package org.apache.spark.rdd import java.util.{HashMap => JHashMap} -import scala.collection.JavaConverters._ import scala.collection.mutable.ArrayBuffer +import scala.jdk.CollectionConverters._ import scala.reflect.ClassTag import org.apache.spark.Dependency diff --git a/core/src/main/scala/org/apache/spark/resource/ExecutorResourceRequests.scala b/core/src/main/scala/org/apache/spark/resource/ExecutorResourceRequests.scala index 28ff79ce1f44d..37bafa355fc7f 100644 --- a/core/src/main/scala/org/apache/spark/resource/ExecutorResourceRequests.scala +++ b/core/src/main/scala/org/apache/spark/resource/ExecutorResourceRequests.scala @@ -20,7 +20,7 @@ package org.apache.spark.resource import java.util.{Map => JMap} import java.util.concurrent.ConcurrentHashMap -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.annotation.{Evolving, Since} import org.apache.spark.network.util.JavaUtils diff --git a/core/src/main/scala/org/apache/spark/resource/ResourceProfile.scala b/core/src/main/scala/org/apache/spark/resource/ResourceProfile.scala index 60c541f5b7e52..8a145223a434b 100644 --- a/core/src/main/scala/org/apache/spark/resource/ResourceProfile.scala +++ b/core/src/main/scala/org/apache/spark/resource/ResourceProfile.scala @@ -21,8 +21,8 @@ import java.util.{Map => JMap} import java.util.concurrent.atomic.AtomicInteger import javax.annotation.concurrent.GuardedBy -import scala.collection.JavaConverters._ import scala.collection.mutable +import scala.jdk.CollectionConverters._ import org.apache.spark.{SparkConf, SparkContext, SparkEnv, SparkException} import org.apache.spark.annotation.{Evolving, Since} diff --git a/core/src/main/scala/org/apache/spark/resource/ResourceProfileBuilder.scala b/core/src/main/scala/org/apache/spark/resource/ResourceProfileBuilder.scala index 584ff32b4475a..4ada220b3cf82 100644 --- a/core/src/main/scala/org/apache/spark/resource/ResourceProfileBuilder.scala +++ b/core/src/main/scala/org/apache/spark/resource/ResourceProfileBuilder.scala @@ -20,7 +20,7 @@ package org.apache.spark.resource import java.util.{Map => JMap} import java.util.concurrent.ConcurrentHashMap -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.annotation.{Evolving, Since} diff --git a/core/src/main/scala/org/apache/spark/resource/TaskResourceRequests.scala b/core/src/main/scala/org/apache/spark/resource/TaskResourceRequests.scala index 1d5fc73a152f6..e623a186551b5 100644 --- a/core/src/main/scala/org/apache/spark/resource/TaskResourceRequests.scala +++ b/core/src/main/scala/org/apache/spark/resource/TaskResourceRequests.scala @@ -20,7 +20,7 @@ package org.apache.spark.resource import java.util.{Map => JMap} import java.util.concurrent.ConcurrentHashMap -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.annotation.{Evolving, Since} import org.apache.spark.resource.ResourceProfile._ diff --git a/core/src/main/scala/org/apache/spark/rpc/netty/Dispatcher.scala b/core/src/main/scala/org/apache/spark/rpc/netty/Dispatcher.scala index 14198743c4801..0e35842fece92 100644 --- a/core/src/main/scala/org/apache/spark/rpc/netty/Dispatcher.scala +++ b/core/src/main/scala/org/apache/spark/rpc/netty/Dispatcher.scala @@ -20,8 +20,8 @@ package org.apache.spark.rpc.netty import java.util.concurrent.{ConcurrentHashMap, ConcurrentMap, CountDownLatch} import javax.annotation.concurrent.GuardedBy -import scala.collection.JavaConverters._ import scala.concurrent.Promise +import scala.jdk.CollectionConverters._ import scala.util.control.NonFatal import org.apache.spark.{SparkEnv, SparkException} diff --git a/core/src/main/scala/org/apache/spark/scheduler/EventLoggingListener.scala b/core/src/main/scala/org/apache/spark/scheduler/EventLoggingListener.scala index b52a0f2f999dd..efd8fecb974e8 100644 --- a/core/src/main/scala/org/apache/spark/scheduler/EventLoggingListener.scala +++ b/core/src/main/scala/org/apache/spark/scheduler/EventLoggingListener.scala @@ -20,8 +20,8 @@ package org.apache.spark.scheduler import java.net.URI import java.util.Properties -import scala.collection.JavaConverters._ import scala.collection.mutable +import scala.jdk.CollectionConverters._ import org.apache.hadoop.conf.Configuration diff --git a/core/src/main/scala/org/apache/spark/scheduler/InputFormatInfo.scala b/core/src/main/scala/org/apache/spark/scheduler/InputFormatInfo.scala index 5696ef13775b9..4b3fd580341b0 100644 --- a/core/src/main/scala/org/apache/spark/scheduler/InputFormatInfo.scala +++ b/core/src/main/scala/org/apache/spark/scheduler/InputFormatInfo.scala @@ -17,9 +17,9 @@ package org.apache.spark.scheduler -import scala.collection.JavaConverters._ import scala.collection.immutable.Set import scala.collection.mutable.{ArrayBuffer, HashMap, HashSet} +import scala.jdk.CollectionConverters._ import org.apache.hadoop.conf.Configuration import org.apache.hadoop.mapred.{FileInputFormat, JobConf} diff --git a/core/src/main/scala/org/apache/spark/scheduler/LiveListenerBus.scala b/core/src/main/scala/org/apache/spark/scheduler/LiveListenerBus.scala index cb3fe41f71cda..ca12835c32bcd 100644 --- a/core/src/main/scala/org/apache/spark/scheduler/LiveListenerBus.scala +++ b/core/src/main/scala/org/apache/spark/scheduler/LiveListenerBus.scala @@ -21,8 +21,8 @@ import java.util.{List => JList} import java.util.concurrent._ import java.util.concurrent.atomic.AtomicBoolean -import scala.collection.JavaConverters._ import scala.collection.mutable +import scala.jdk.CollectionConverters._ import scala.reflect.ClassTag import scala.util.DynamicVariable diff --git a/core/src/main/scala/org/apache/spark/scheduler/Pool.scala b/core/src/main/scala/org/apache/spark/scheduler/Pool.scala index de4c9d39ddb51..97edbb08c7c09 100644 --- a/core/src/main/scala/org/apache/spark/scheduler/Pool.scala +++ b/core/src/main/scala/org/apache/spark/scheduler/Pool.scala @@ -19,8 +19,8 @@ package org.apache.spark.scheduler import java.util.concurrent.{ConcurrentHashMap, ConcurrentLinkedQueue} -import scala.collection.JavaConverters._ import scala.collection.mutable.ArrayBuffer +import scala.jdk.CollectionConverters._ import org.apache.spark.internal.Logging import org.apache.spark.scheduler.SchedulingMode.SchedulingMode diff --git a/core/src/main/scala/org/apache/spark/scheduler/TaskDescription.scala b/core/src/main/scala/org/apache/spark/scheduler/TaskDescription.scala index 753736735e40c..6e6507782a49e 100644 --- a/core/src/main/scala/org/apache/spark/scheduler/TaskDescription.scala +++ b/core/src/main/scala/org/apache/spark/scheduler/TaskDescription.scala @@ -22,9 +22,9 @@ import java.nio.ByteBuffer import java.nio.charset.StandardCharsets import java.util.Properties -import scala.collection.JavaConverters._ import scala.collection.immutable import scala.collection.mutable.{ArrayBuffer, HashMap, Map} +import scala.jdk.CollectionConverters._ import org.apache.spark.{JobArtifactSet, JobArtifactState} import org.apache.spark.resource.ResourceInformation diff --git a/core/src/main/scala/org/apache/spark/scheduler/dynalloc/ExecutorMonitor.scala b/core/src/main/scala/org/apache/spark/scheduler/dynalloc/ExecutorMonitor.scala index 34878b8e56150..c389b0c988f4d 100644 --- a/core/src/main/scala/org/apache/spark/scheduler/dynalloc/ExecutorMonitor.scala +++ b/core/src/main/scala/org/apache/spark/scheduler/dynalloc/ExecutorMonitor.scala @@ -20,8 +20,8 @@ package org.apache.spark.scheduler.dynalloc import java.util.concurrent.{ConcurrentHashMap, TimeUnit} import java.util.concurrent.atomic.AtomicLong -import scala.collection.JavaConverters._ import scala.collection.mutable +import scala.jdk.CollectionConverters._ import org.apache.spark._ import org.apache.spark.errors.SparkCoreErrors diff --git a/core/src/main/scala/org/apache/spark/security/CryptoStreamUtils.scala b/core/src/main/scala/org/apache/spark/security/CryptoStreamUtils.scala index 4ebb7b0defd7f..20a4147f7d809 100644 --- a/core/src/main/scala/org/apache/spark/security/CryptoStreamUtils.scala +++ b/core/src/main/scala/org/apache/spark/security/CryptoStreamUtils.scala @@ -24,7 +24,7 @@ import java.util.concurrent.TimeUnit import javax.crypto.KeyGenerator import javax.crypto.spec.{IvParameterSpec, SecretKeySpec} -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import com.google.common.io.ByteStreams import org.apache.commons.crypto.random._ diff --git a/core/src/main/scala/org/apache/spark/serializer/KryoSerializer.scala b/core/src/main/scala/org/apache/spark/serializer/KryoSerializer.scala index f75942cbb879f..1aa07a79cc19b 100644 --- a/core/src/main/scala/org/apache/spark/serializer/KryoSerializer.scala +++ b/core/src/main/scala/org/apache/spark/serializer/KryoSerializer.scala @@ -23,8 +23,8 @@ import java.nio.ByteBuffer import java.util.Locale import javax.annotation.Nullable -import scala.collection.JavaConverters._ import scala.collection.mutable.ArrayBuffer +import scala.jdk.CollectionConverters._ import scala.reflect.ClassTag import scala.util.Properties import scala.util.control.NonFatal @@ -735,7 +735,7 @@ private class JavaIterableWrapperSerializer private object JavaIterableWrapperSerializer extends Logging { // The class returned by JavaConverters.asJava // (scala.collection.convert.Wrappers$IterableWrapper). - import scala.collection.JavaConverters._ + import scala.jdk.CollectionConverters._ val wrapperClass = Seq(1).asJava.getClass // Get the underlying method so we can use it to get the Scala collection for serialization. diff --git a/core/src/main/scala/org/apache/spark/shuffle/sort/SortShuffleManager.scala b/core/src/main/scala/org/apache/spark/shuffle/sort/SortShuffleManager.scala index 46aca07ce43f6..344020935f211 100644 --- a/core/src/main/scala/org/apache/spark/shuffle/sort/SortShuffleManager.scala +++ b/core/src/main/scala/org/apache/spark/shuffle/sort/SortShuffleManager.scala @@ -19,7 +19,7 @@ package org.apache.spark.shuffle.sort import java.util.concurrent.ConcurrentHashMap -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark._ import org.apache.spark.internal.Logging diff --git a/core/src/main/scala/org/apache/spark/status/AppStatusListener.scala b/core/src/main/scala/org/apache/spark/status/AppStatusListener.scala index 15815e5539fda..0ae053d1ce1c0 100644 --- a/core/src/main/scala/org/apache/spark/status/AppStatusListener.scala +++ b/core/src/main/scala/org/apache/spark/status/AppStatusListener.scala @@ -20,9 +20,9 @@ package org.apache.spark.status import java.util.Date import java.util.concurrent.ConcurrentHashMap -import scala.collection.JavaConverters._ import scala.collection.mutable.{ArrayBuffer, HashMap} import scala.collection.mutable +import scala.jdk.CollectionConverters._ import org.apache.spark._ import org.apache.spark.executor.{ExecutorMetrics, TaskMetrics} diff --git a/core/src/main/scala/org/apache/spark/status/AppStatusStore.scala b/core/src/main/scala/org/apache/spark/status/AppStatusStore.scala index eaa7b7b987347..03501d9d7407c 100644 --- a/core/src/main/scala/org/apache/spark/status/AppStatusStore.scala +++ b/core/src/main/scala/org/apache/spark/status/AppStatusStore.scala @@ -21,8 +21,8 @@ import java.io.File import java.io.IOException import java.util.{List => JList} -import scala.collection.JavaConverters._ import scala.collection.mutable.HashMap +import scala.jdk.CollectionConverters._ import org.apache.spark.{JobExecutionStatus, SparkConf, SparkContext} import org.apache.spark.internal.Logging diff --git a/core/src/main/scala/org/apache/spark/status/ElementTrackingStore.scala b/core/src/main/scala/org/apache/spark/status/ElementTrackingStore.scala index c276f4f2064b8..2bc8f4de63d2e 100644 --- a/core/src/main/scala/org/apache/spark/status/ElementTrackingStore.scala +++ b/core/src/main/scala/org/apache/spark/status/ElementTrackingStore.scala @@ -21,8 +21,8 @@ import java.util.Collection import java.util.concurrent.{ExecutorService, TimeUnit} import java.util.concurrent.atomic.AtomicBoolean -import scala.collection.JavaConverters._ import scala.collection.mutable.{HashMap, ListBuffer} +import scala.jdk.CollectionConverters._ import org.apache.spark.SparkConf import org.apache.spark.internal.config.Status._ diff --git a/core/src/main/scala/org/apache/spark/status/KVUtils.scala b/core/src/main/scala/org/apache/spark/status/KVUtils.scala index 0dd40962309a4..14aa7db6e06b9 100644 --- a/core/src/main/scala/org/apache/spark/status/KVUtils.scala +++ b/core/src/main/scala/org/apache/spark/status/KVUtils.scala @@ -21,7 +21,7 @@ import java.io.File import java.nio.file.Files import scala.annotation.meta.getter -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.reflect.{classTag, ClassTag} import com.fasterxml.jackson.annotation.JsonInclude diff --git a/core/src/main/scala/org/apache/spark/status/LiveEntity.scala b/core/src/main/scala/org/apache/spark/status/LiveEntity.scala index ebea11fdca07b..eb51cb9f16158 100644 --- a/core/src/main/scala/org/apache/spark/status/LiveEntity.scala +++ b/core/src/main/scala/org/apache/spark/status/LiveEntity.scala @@ -20,9 +20,9 @@ package org.apache.spark.status import java.util.Date import java.util.concurrent.atomic.AtomicInteger -import scala.collection.JavaConverters._ import scala.collection.immutable.{HashSet, TreeSet} import scala.collection.mutable.HashMap +import scala.jdk.CollectionConverters._ import org.apache.spark.JobExecutionStatus import org.apache.spark.executor.{ExecutorMetrics, TaskMetrics} diff --git a/core/src/main/scala/org/apache/spark/status/api/v1/StagesResource.scala b/core/src/main/scala/org/apache/spark/status/api/v1/StagesResource.scala index b23d0770a3601..e199dd94e610d 100644 --- a/core/src/main/scala/org/apache/spark/status/api/v1/StagesResource.scala +++ b/core/src/main/scala/org/apache/spark/status/api/v1/StagesResource.scala @@ -20,7 +20,7 @@ import java.util.{HashMap, List => JList, Locale} import javax.ws.rs.{NotFoundException => _, _} import javax.ws.rs.core.{Context, MediaType, MultivaluedMap, UriInfo} -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.status.api.v1.TaskStatus._ import org.apache.spark.ui.UIUtils diff --git a/core/src/main/scala/org/apache/spark/status/protobuf/ApplicationEnvironmentInfoWrapperSerializer.scala b/core/src/main/scala/org/apache/spark/status/protobuf/ApplicationEnvironmentInfoWrapperSerializer.scala index fb39d9aad2ac6..baedccd9b9223 100644 --- a/core/src/main/scala/org/apache/spark/status/protobuf/ApplicationEnvironmentInfoWrapperSerializer.scala +++ b/core/src/main/scala/org/apache/spark/status/protobuf/ApplicationEnvironmentInfoWrapperSerializer.scala @@ -17,7 +17,7 @@ package org.apache.spark.status.protobuf -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.resource.{ExecutorResourceRequest, TaskResourceRequest} import org.apache.spark.status.ApplicationEnvironmentInfoWrapper diff --git a/core/src/main/scala/org/apache/spark/status/protobuf/ApplicationInfoWrapperSerializer.scala b/core/src/main/scala/org/apache/spark/status/protobuf/ApplicationInfoWrapperSerializer.scala index 6474f5eecf6f1..cf3159c7b481f 100644 --- a/core/src/main/scala/org/apache/spark/status/protobuf/ApplicationInfoWrapperSerializer.scala +++ b/core/src/main/scala/org/apache/spark/status/protobuf/ApplicationInfoWrapperSerializer.scala @@ -19,7 +19,7 @@ package org.apache.spark.status.protobuf import java.util.Date -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.status.ApplicationInfoWrapper import org.apache.spark.status.api.v1.{ApplicationAttemptInfo, ApplicationInfo} diff --git a/core/src/main/scala/org/apache/spark/status/protobuf/ExecutorSummaryWrapperSerializer.scala b/core/src/main/scala/org/apache/spark/status/protobuf/ExecutorSummaryWrapperSerializer.scala index 47849322217fe..97daf37995dd5 100644 --- a/core/src/main/scala/org/apache/spark/status/protobuf/ExecutorSummaryWrapperSerializer.scala +++ b/core/src/main/scala/org/apache/spark/status/protobuf/ExecutorSummaryWrapperSerializer.scala @@ -19,7 +19,7 @@ package org.apache.spark.status.protobuf import java.util.Date -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.resource.ResourceInformation import org.apache.spark.status.ExecutorSummaryWrapper diff --git a/core/src/main/scala/org/apache/spark/status/protobuf/JobDataWrapperSerializer.scala b/core/src/main/scala/org/apache/spark/status/protobuf/JobDataWrapperSerializer.scala index 11f1b7070cc3c..41d1fee160885 100644 --- a/core/src/main/scala/org/apache/spark/status/protobuf/JobDataWrapperSerializer.scala +++ b/core/src/main/scala/org/apache/spark/status/protobuf/JobDataWrapperSerializer.scala @@ -19,7 +19,7 @@ package org.apache.spark.status.protobuf import java.util.Date -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.status.JobDataWrapper import org.apache.spark.status.api.v1.JobData diff --git a/core/src/main/scala/org/apache/spark/status/protobuf/KVStoreProtobufSerializer.scala b/core/src/main/scala/org/apache/spark/status/protobuf/KVStoreProtobufSerializer.scala index 16646f14726a9..d87c9e6d59a75 100644 --- a/core/src/main/scala/org/apache/spark/status/protobuf/KVStoreProtobufSerializer.scala +++ b/core/src/main/scala/org/apache/spark/status/protobuf/KVStoreProtobufSerializer.scala @@ -20,7 +20,7 @@ package org.apache.spark.status.protobuf import java.lang.reflect.ParameterizedType import java.util.ServiceLoader -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.status.KVUtils.KVStoreScalaSerializer diff --git a/core/src/main/scala/org/apache/spark/status/protobuf/PoolDataSerializer.scala b/core/src/main/scala/org/apache/spark/status/protobuf/PoolDataSerializer.scala index a47308fc74abc..c3c28cf49f5da 100644 --- a/core/src/main/scala/org/apache/spark/status/protobuf/PoolDataSerializer.scala +++ b/core/src/main/scala/org/apache/spark/status/protobuf/PoolDataSerializer.scala @@ -17,7 +17,7 @@ package org.apache.spark.status.protobuf -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.status.PoolData import org.apache.spark.status.protobuf.Utils.{getStringField, setStringField} diff --git a/core/src/main/scala/org/apache/spark/status/protobuf/ProcessSummaryWrapperSerializer.scala b/core/src/main/scala/org/apache/spark/status/protobuf/ProcessSummaryWrapperSerializer.scala index b21d6540738cd..600f7f57bcf1f 100644 --- a/core/src/main/scala/org/apache/spark/status/protobuf/ProcessSummaryWrapperSerializer.scala +++ b/core/src/main/scala/org/apache/spark/status/protobuf/ProcessSummaryWrapperSerializer.scala @@ -19,7 +19,7 @@ package org.apache.spark.status.protobuf import java.util.Date -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.status.ProcessSummaryWrapper import org.apache.spark.status.api.v1.ProcessSummary diff --git a/core/src/main/scala/org/apache/spark/status/protobuf/RDDOperationGraphWrapperSerializer.scala b/core/src/main/scala/org/apache/spark/status/protobuf/RDDOperationGraphWrapperSerializer.scala index 6d24d64c43b7c..55e5d42b36eb0 100644 --- a/core/src/main/scala/org/apache/spark/status/protobuf/RDDOperationGraphWrapperSerializer.scala +++ b/core/src/main/scala/org/apache/spark/status/protobuf/RDDOperationGraphWrapperSerializer.scala @@ -17,7 +17,7 @@ package org.apache.spark.status.protobuf -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.rdd.DeterministicLevel import org.apache.spark.status.{RDDOperationClusterWrapper, RDDOperationGraphWrapper} diff --git a/core/src/main/scala/org/apache/spark/status/protobuf/RDDStorageInfoWrapperSerializer.scala b/core/src/main/scala/org/apache/spark/status/protobuf/RDDStorageInfoWrapperSerializer.scala index f58ae0fb7f01a..a2410ad31475a 100644 --- a/core/src/main/scala/org/apache/spark/status/protobuf/RDDStorageInfoWrapperSerializer.scala +++ b/core/src/main/scala/org/apache/spark/status/protobuf/RDDStorageInfoWrapperSerializer.scala @@ -17,7 +17,7 @@ package org.apache.spark.status.protobuf -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.status.RDDStorageInfoWrapper import org.apache.spark.status.api.v1.{RDDDataDistribution, RDDPartitionInfo, RDDStorageInfo} diff --git a/core/src/main/scala/org/apache/spark/status/protobuf/StageDataWrapperSerializer.scala b/core/src/main/scala/org/apache/spark/status/protobuf/StageDataWrapperSerializer.scala index 68845d6d1b890..8793ca7a12c78 100644 --- a/core/src/main/scala/org/apache/spark/status/protobuf/StageDataWrapperSerializer.scala +++ b/core/src/main/scala/org/apache/spark/status/protobuf/StageDataWrapperSerializer.scala @@ -19,7 +19,7 @@ package org.apache.spark.status.protobuf import java.util.Date -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.commons.collections4.MapUtils diff --git a/core/src/main/scala/org/apache/spark/storage/BlockInfoManager.scala b/core/src/main/scala/org/apache/spark/storage/BlockInfoManager.scala index 45ebb6eafa69f..8dccfbc5e2d80 100644 --- a/core/src/main/scala/org/apache/spark/storage/BlockInfoManager.scala +++ b/core/src/main/scala/org/apache/spark/storage/BlockInfoManager.scala @@ -22,8 +22,8 @@ import java.util.Collections import java.util.concurrent.ConcurrentHashMap import java.util.concurrent.locks.{Condition, Lock} -import scala.collection.JavaConverters._ import scala.collection.mutable +import scala.jdk.CollectionConverters._ import scala.reflect.ClassTag import com.google.common.collect.{ConcurrentHashMultiset, ImmutableMultiset} diff --git a/core/src/main/scala/org/apache/spark/storage/BlockManager.scala b/core/src/main/scala/org/apache/spark/storage/BlockManager.scala index 05d57c67576a5..81933744472ee 100644 --- a/core/src/main/scala/org/apache/spark/storage/BlockManager.scala +++ b/core/src/main/scala/org/apache/spark/storage/BlockManager.scala @@ -24,11 +24,11 @@ import java.nio.channels.Channels import java.util.Collections import java.util.concurrent.{CompletableFuture, ConcurrentHashMap, TimeUnit} -import scala.collection.JavaConverters._ import scala.collection.mutable import scala.collection.mutable.HashMap import scala.concurrent.{ExecutionContext, Future} import scala.concurrent.duration._ +import scala.jdk.CollectionConverters._ import scala.reflect.ClassTag import scala.util.{Failure, Random, Success, Try} import scala.util.control.NonFatal diff --git a/core/src/main/scala/org/apache/spark/storage/BlockManagerDecommissioner.scala b/core/src/main/scala/org/apache/spark/storage/BlockManagerDecommissioner.scala index cbac3fd1a9945..686003e2c51dc 100644 --- a/core/src/main/scala/org/apache/spark/storage/BlockManagerDecommissioner.scala +++ b/core/src/main/scala/org/apache/spark/storage/BlockManagerDecommissioner.scala @@ -20,8 +20,8 @@ package org.apache.spark.storage import java.io.IOException import java.util.concurrent.atomic.AtomicInteger -import scala.collection.JavaConverters._ import scala.collection.mutable +import scala.jdk.CollectionConverters._ import scala.util.control.NonFatal import org.apache.spark._ diff --git a/core/src/main/scala/org/apache/spark/storage/BlockManagerMasterEndpoint.scala b/core/src/main/scala/org/apache/spark/storage/BlockManagerMasterEndpoint.scala index 19de4544bea32..81a6bb5d45c3e 100644 --- a/core/src/main/scala/org/apache/spark/storage/BlockManagerMasterEndpoint.scala +++ b/core/src/main/scala/org/apache/spark/storage/BlockManagerMasterEndpoint.scala @@ -21,9 +21,9 @@ import java.io.IOException import java.util.{HashMap => JHashMap} import java.util.concurrent.TimeUnit -import scala.collection.JavaConverters._ import scala.collection.mutable import scala.concurrent.{ExecutionContext, Future, TimeoutException} +import scala.jdk.CollectionConverters._ import scala.util.Random import scala.util.control.NonFatal diff --git a/core/src/main/scala/org/apache/spark/storage/memory/MemoryStore.scala b/core/src/main/scala/org/apache/spark/storage/memory/MemoryStore.scala index b79286c3f113c..dafa67b091d71 100644 --- a/core/src/main/scala/org/apache/spark/storage/memory/MemoryStore.scala +++ b/core/src/main/scala/org/apache/spark/storage/memory/MemoryStore.scala @@ -21,9 +21,9 @@ import java.io.OutputStream import java.nio.ByteBuffer import java.util.LinkedHashMap -import scala.collection.JavaConverters._ import scala.collection.mutable import scala.collection.mutable.ArrayBuffer +import scala.jdk.CollectionConverters._ import scala.reflect.ClassTag import scala.util.control.NonFatal diff --git a/core/src/main/scala/org/apache/spark/ui/GraphUIData.scala b/core/src/main/scala/org/apache/spark/ui/GraphUIData.scala index ab8757ff9d1f2..6ddc828bd806f 100644 --- a/core/src/main/scala/org/apache/spark/ui/GraphUIData.scala +++ b/core/src/main/scala/org/apache/spark/ui/GraphUIData.scala @@ -20,8 +20,8 @@ package org.apache.spark.ui import java.{util => ju} import java.lang.{Long => JLong} -import scala.collection.JavaConverters._ import scala.collection.mutable.ArrayBuffer +import scala.jdk.CollectionConverters._ import scala.xml.{Node, Unparsed} /** diff --git a/core/src/main/scala/org/apache/spark/ui/HttpSecurityFilter.scala b/core/src/main/scala/org/apache/spark/ui/HttpSecurityFilter.scala index bd818dce7e167..ec37cd6cc5e69 100644 --- a/core/src/main/scala/org/apache/spark/ui/HttpSecurityFilter.scala +++ b/core/src/main/scala/org/apache/spark/ui/HttpSecurityFilter.scala @@ -21,7 +21,7 @@ import java.util.{Enumeration, Map => JMap} import javax.servlet._ import javax.servlet.http.{HttpServletRequest, HttpServletRequestWrapper, HttpServletResponse} -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.commons.text.StringEscapeUtils diff --git a/core/src/main/scala/org/apache/spark/ui/PagedTable.scala b/core/src/main/scala/org/apache/spark/ui/PagedTable.scala index 7155726ed427e..99052628f6e76 100644 --- a/core/src/main/scala/org/apache/spark/ui/PagedTable.scala +++ b/core/src/main/scala/org/apache/spark/ui/PagedTable.scala @@ -21,7 +21,7 @@ import java.net.{URLDecoder, URLEncoder} import java.nio.charset.StandardCharsets.UTF_8 import javax.servlet.http.HttpServletRequest -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.xml.{Node, Unparsed} import com.google.common.base.Splitter diff --git a/core/src/main/scala/org/apache/spark/ui/UIUtils.scala b/core/src/main/scala/org/apache/spark/ui/UIUtils.scala index 8874563d76f46..ccddbd1c608dd 100644 --- a/core/src/main/scala/org/apache/spark/ui/UIUtils.scala +++ b/core/src/main/scala/org/apache/spark/ui/UIUtils.scala @@ -26,7 +26,7 @@ import java.util.{Date, Locale, TimeZone} import javax.servlet.http.HttpServletRequest import javax.ws.rs.core.{MediaType, MultivaluedMap, Response} -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.util.control.NonFatal import scala.xml._ import scala.xml.transform.{RewriteRule, RuleTransformer} diff --git a/core/src/main/scala/org/apache/spark/util/BoundedPriorityQueue.scala b/core/src/main/scala/org/apache/spark/util/BoundedPriorityQueue.scala index bc55a44fc3c2e..074dab847265c 100644 --- a/core/src/main/scala/org/apache/spark/util/BoundedPriorityQueue.scala +++ b/core/src/main/scala/org/apache/spark/util/BoundedPriorityQueue.scala @@ -20,8 +20,8 @@ package org.apache.spark.util import java.io.Serializable import java.util.{PriorityQueue => JPriorityQueue} -import scala.collection.JavaConverters._ import scala.collection.mutable.Growable +import scala.jdk.CollectionConverters._ /** * Bounded priority queue. This class wraps the original PriorityQueue diff --git a/core/src/main/scala/org/apache/spark/util/ClosureCleaner.scala b/core/src/main/scala/org/apache/spark/util/ClosureCleaner.scala index fb2ce44edf55c..b522e684148a5 100644 --- a/core/src/main/scala/org/apache/spark/util/ClosureCleaner.scala +++ b/core/src/main/scala/org/apache/spark/util/ClosureCleaner.scala @@ -21,8 +21,8 @@ import java.io.{ByteArrayInputStream, ByteArrayOutputStream} import java.lang.invoke.{MethodHandleInfo, SerializedLambda} import java.lang.reflect.{Field, Modifier} -import scala.collection.JavaConverters._ import scala.collection.mutable.{Map, Set, Stack} +import scala.jdk.CollectionConverters._ import org.apache.commons.lang3.{ClassUtils, JavaVersion, SystemUtils} import org.apache.xbean.asm9.{ClassReader, ClassVisitor, Handle, MethodVisitor, Type} diff --git a/core/src/main/scala/org/apache/spark/util/JsonProtocol.scala b/core/src/main/scala/org/apache/spark/util/JsonProtocol.scala index 025e5d5bac94b..8654b658809f9 100644 --- a/core/src/main/scala/org/apache/spark/util/JsonProtocol.scala +++ b/core/src/main/scala/org/apache/spark/util/JsonProtocol.scala @@ -19,8 +19,8 @@ package org.apache.spark.util import java.util.{Properties, UUID} -import scala.collection.JavaConverters._ import scala.collection.Map +import scala.jdk.CollectionConverters._ import com.fasterxml.jackson.core.JsonGenerator import com.fasterxml.jackson.databind.JsonNode diff --git a/core/src/main/scala/org/apache/spark/util/ListenerBus.scala b/core/src/main/scala/org/apache/spark/util/ListenerBus.scala index 3520fa870c91b..0e4495479e0f7 100644 --- a/core/src/main/scala/org/apache/spark/util/ListenerBus.scala +++ b/core/src/main/scala/org/apache/spark/util/ListenerBus.scala @@ -19,7 +19,7 @@ package org.apache.spark.util import java.util.concurrent.CopyOnWriteArrayList -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.reflect.ClassTag import scala.util.control.NonFatal diff --git a/core/src/main/scala/org/apache/spark/util/SignalUtils.scala b/core/src/main/scala/org/apache/spark/util/SignalUtils.scala index 36ecb42352db1..775dc44fc1a13 100644 --- a/core/src/main/scala/org/apache/spark/util/SignalUtils.scala +++ b/core/src/main/scala/org/apache/spark/util/SignalUtils.scala @@ -19,7 +19,7 @@ package org.apache.spark.util import java.util.Collections -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.commons.lang3.SystemUtils import org.slf4j.Logger diff --git a/core/src/main/scala/org/apache/spark/util/TimeStampedHashMap.scala b/core/src/main/scala/org/apache/spark/util/TimeStampedHashMap.scala index 9c860061b5862..b0fb339465205 100644 --- a/core/src/main/scala/org/apache/spark/util/TimeStampedHashMap.scala +++ b/core/src/main/scala/org/apache/spark/util/TimeStampedHashMap.scala @@ -21,8 +21,8 @@ import java.util.Map.Entry import java.util.Set import java.util.concurrent.ConcurrentHashMap -import scala.collection.JavaConverters._ import scala.collection.mutable +import scala.jdk.CollectionConverters._ import org.apache.spark.internal.Logging diff --git a/core/src/main/scala/org/apache/spark/util/Utils.scala b/core/src/main/scala/org/apache/spark/util/Utils.scala index a85248e5f710e..2ec9a875d028b 100644 --- a/core/src/main/scala/org/apache/spark/util/Utils.scala +++ b/core/src/main/scala/org/apache/spark/util/Utils.scala @@ -34,10 +34,10 @@ import java.util.concurrent.TimeUnit.NANOSECONDS import java.util.zip.{GZIPInputStream, ZipInputStream} import scala.annotation.tailrec -import scala.collection.JavaConverters._ import scala.collection.Map import scala.collection.mutable.ArrayBuffer import scala.io.Source +import scala.jdk.CollectionConverters._ import scala.reflect.ClassTag import scala.util.{Failure, Success, Try} import scala.util.control.{ControlThrowable, NonFatal} diff --git a/core/src/main/scala/org/apache/spark/util/collection/Utils.scala b/core/src/main/scala/org/apache/spark/util/collection/Utils.scala index b2ced00e8d6c5..151d6c8268d48 100644 --- a/core/src/main/scala/org/apache/spark/util/collection/Utils.scala +++ b/core/src/main/scala/org/apache/spark/util/collection/Utils.scala @@ -19,8 +19,8 @@ package org.apache.spark.util.collection import java.util.Collections -import scala.collection.JavaConverters._ import scala.collection.immutable +import scala.jdk.CollectionConverters._ import com.google.common.collect.{Iterators => GuavaIterators, Ordering => GuavaOrdering} diff --git a/core/src/test/java/test/org/apache/spark/JavaAPISuite.java b/core/src/test/java/test/org/apache/spark/JavaAPISuite.java index 1c63800982a18..8d078f35b151f 100644 --- a/core/src/test/java/test/org/apache/spark/JavaAPISuite.java +++ b/core/src/test/java/test/org/apache/spark/JavaAPISuite.java @@ -42,7 +42,7 @@ import scala.Tuple2; import scala.Tuple3; import scala.Tuple4; -import scala.collection.JavaConverters; +import scala.jdk.CollectionConverters; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; @@ -1268,7 +1268,7 @@ public void combineByKey() { Partitioner defaultPartitioner = Partitioner.defaultPartitioner( combinedRDD.rdd(), - JavaConverters.collectionAsScalaIterableConverter( + CollectionConverters.CollectionHasAsScala( Collections.>emptyList()).asScala().toSeq()); combinedRDD = originalRDD.keyBy(keyFunction) .combineByKey( diff --git a/core/src/test/scala/org/apache/spark/MapOutputTrackerSuite.scala b/core/src/test/scala/org/apache/spark/MapOutputTrackerSuite.scala index 450ff01921a83..dde30aee82878 100644 --- a/core/src/test/scala/org/apache/spark/MapOutputTrackerSuite.scala +++ b/core/src/test/scala/org/apache/spark/MapOutputTrackerSuite.scala @@ -20,8 +20,8 @@ package org.apache.spark import java.util.{Collections => JCollections, HashSet => JHashSet} import java.util.concurrent.atomic.LongAdder -import scala.collection.JavaConverters._ import scala.collection.mutable.ArrayBuffer +import scala.jdk.CollectionConverters._ import org.mockito.ArgumentMatchers.any import org.mockito.Mockito._ diff --git a/core/src/test/scala/org/apache/spark/ShuffleSuite.scala b/core/src/test/scala/org/apache/spark/ShuffleSuite.scala index e4e6ec45a97ed..c1b9af37ce760 100644 --- a/core/src/test/scala/org/apache/spark/ShuffleSuite.scala +++ b/core/src/test/scala/org/apache/spark/ShuffleSuite.scala @@ -21,7 +21,7 @@ import java.io.{File, RandomAccessFile} import java.util.{Locale, Properties} import java.util.concurrent.{Callable, CyclicBarrier, Executors, ExecutorService } -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.commons.io.FileUtils import org.apache.commons.io.filefilter.TrueFileFilter diff --git a/core/src/test/scala/org/apache/spark/SortShuffleSuite.scala b/core/src/test/scala/org/apache/spark/SortShuffleSuite.scala index 571110784818f..8c6a876e8c1b7 100644 --- a/core/src/test/scala/org/apache/spark/SortShuffleSuite.scala +++ b/core/src/test/scala/org/apache/spark/SortShuffleSuite.scala @@ -19,7 +19,7 @@ package org.apache.spark import java.io.File -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.commons.io.FileUtils import org.apache.commons.io.filefilter.TrueFileFilter diff --git a/core/src/test/scala/org/apache/spark/SparkConfSuite.scala b/core/src/test/scala/org/apache/spark/SparkConfSuite.scala index 75e22e1418b4a..a2d41b92e0849 100644 --- a/core/src/test/scala/org/apache/spark/SparkConfSuite.scala +++ b/core/src/test/scala/org/apache/spark/SparkConfSuite.scala @@ -19,7 +19,7 @@ package org.apache.spark import java.util.concurrent.{Executors, TimeUnit} -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.util.{Random, Try} import com.esotericsoftware.kryo.Kryo diff --git a/core/src/test/scala/org/apache/spark/SparkFunSuite.scala b/core/src/test/scala/org/apache/spark/SparkFunSuite.scala index 1163088c82aa8..cd77d89245f48 100644 --- a/core/src/test/scala/org/apache/spark/SparkFunSuite.scala +++ b/core/src/test/scala/org/apache/spark/SparkFunSuite.scala @@ -23,8 +23,8 @@ import java.nio.file.{Files, Path} import java.util.{Locale, TimeZone} import scala.annotation.tailrec -import scala.collection.JavaConverters._ import scala.collection.mutable.ArrayBuffer +import scala.jdk.CollectionConverters._ import org.apache.commons.io.FileUtils import org.apache.logging.log4j._ diff --git a/core/src/test/scala/org/apache/spark/ThreadAudit.scala b/core/src/test/scala/org/apache/spark/ThreadAudit.scala index cdf12e257e29c..538cf3c91c12c 100644 --- a/core/src/test/scala/org/apache/spark/ThreadAudit.scala +++ b/core/src/test/scala/org/apache/spark/ThreadAudit.scala @@ -17,7 +17,7 @@ package org.apache.spark -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.internal.Logging diff --git a/core/src/test/scala/org/apache/spark/api/python/PythonRDDSuite.scala b/core/src/test/scala/org/apache/spark/api/python/PythonRDDSuite.scala index aae5fb002e1e8..88ad5b3a7483f 100644 --- a/core/src/test/scala/org/apache/spark/api/python/PythonRDDSuite.scala +++ b/core/src/test/scala/org/apache/spark/api/python/PythonRDDSuite.scala @@ -23,8 +23,8 @@ import java.nio.charset.StandardCharsets import java.util import scala.annotation.tailrec -import scala.collection.JavaConverters._ import scala.concurrent.duration.Duration +import scala.jdk.CollectionConverters._ import org.apache.hadoop.conf.Configuration import org.apache.hadoop.io.{LongWritable, Text} diff --git a/core/src/test/scala/org/apache/spark/benchmark/Benchmarks.scala b/core/src/test/scala/org/apache/spark/benchmark/Benchmarks.scala index 9799eab113df2..a94646a926ddd 100644 --- a/core/src/test/scala/org/apache/spark/benchmark/Benchmarks.scala +++ b/core/src/test/scala/org/apache/spark/benchmark/Benchmarks.scala @@ -21,7 +21,7 @@ import java.lang.reflect.Modifier import java.nio.file.{FileSystems, Paths} import java.util.Locale -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.util.Try import com.google.common.reflect.ClassPath diff --git a/core/src/test/scala/org/apache/spark/deploy/DecommissionWorkerSuite.scala b/core/src/test/scala/org/apache/spark/deploy/DecommissionWorkerSuite.scala index fe9bce770f513..55d853537cde2 100644 --- a/core/src/test/scala/org/apache/spark/deploy/DecommissionWorkerSuite.scala +++ b/core/src/test/scala/org/apache/spark/deploy/DecommissionWorkerSuite.scala @@ -20,9 +20,9 @@ package org.apache.spark.deploy import java.util.concurrent.{ConcurrentHashMap, ConcurrentLinkedQueue} import java.util.concurrent.atomic.AtomicBoolean -import scala.collection.JavaConverters._ import scala.collection.mutable import scala.concurrent.duration._ +import scala.jdk.CollectionConverters._ import org.scalatest.BeforeAndAfterEach import org.scalatest.concurrent.Eventually._ diff --git a/core/src/test/scala/org/apache/spark/deploy/ExternalShuffleServiceMetricsSuite.scala b/core/src/test/scala/org/apache/spark/deploy/ExternalShuffleServiceMetricsSuite.scala index d0e16cb09357c..03d437a2cdf6e 100644 --- a/core/src/test/scala/org/apache/spark/deploy/ExternalShuffleServiceMetricsSuite.scala +++ b/core/src/test/scala/org/apache/spark/deploy/ExternalShuffleServiceMetricsSuite.scala @@ -17,7 +17,7 @@ package org.apache.spark.deploy -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.{SecurityManager, SparkConf, SparkFunSuite} import org.apache.spark.internal.config.{SHUFFLE_SERVICE_DB_ENABLED, SHUFFLE_SERVICE_ENABLED} diff --git a/core/src/test/scala/org/apache/spark/deploy/RPackageUtilsSuite.scala b/core/src/test/scala/org/apache/spark/deploy/RPackageUtilsSuite.scala index 57269d6259345..b9ee492ed1cb9 100644 --- a/core/src/test/scala/org/apache/spark/deploy/RPackageUtilsSuite.scala +++ b/core/src/test/scala/org/apache/spark/deploy/RPackageUtilsSuite.scala @@ -23,8 +23,8 @@ import java.util.jar.{JarFile, Manifest} import java.util.jar.Attributes.Name import java.util.zip.ZipFile -import scala.collection.JavaConverters._ import scala.collection.mutable.ArrayBuffer +import scala.jdk.CollectionConverters._ import org.apache.commons.io.FileUtils import org.scalatest.BeforeAndAfterEach diff --git a/core/src/test/scala/org/apache/spark/deploy/SparkSubmitUtilsSuite.scala b/core/src/test/scala/org/apache/spark/deploy/SparkSubmitUtilsSuite.scala index db99a020bc9b2..f1c5165b457b0 100644 --- a/core/src/test/scala/org/apache/spark/deploy/SparkSubmitUtilsSuite.scala +++ b/core/src/test/scala/org/apache/spark/deploy/SparkSubmitUtilsSuite.scala @@ -22,8 +22,8 @@ import java.net.URI import java.nio.charset.StandardCharsets import java.nio.file.{Files, Paths} -import scala.collection.JavaConverters._ import scala.collection.mutable.ArrayBuffer +import scala.jdk.CollectionConverters._ import org.apache.ivy.core.module.descriptor.MDArtifact import org.apache.ivy.core.settings.IvySettings diff --git a/core/src/test/scala/org/apache/spark/deploy/history/HistoryServerSuite.scala b/core/src/test/scala/org/apache/spark/deploy/history/HistoryServerSuite.scala index 6322661f4afd2..050793842e587 100644 --- a/core/src/test/scala/org/apache/spark/deploy/history/HistoryServerSuite.scala +++ b/core/src/test/scala/org/apache/spark/deploy/history/HistoryServerSuite.scala @@ -23,8 +23,8 @@ import java.util.zip.ZipInputStream import javax.servlet._ import javax.servlet.http.{HttpServletRequest, HttpServletRequestWrapper, HttpServletResponse} -import scala.collection.JavaConverters._ import scala.concurrent.duration._ +import scala.jdk.CollectionConverters._ import com.google.common.io.{ByteStreams, Files} import org.apache.commons.io.{FileUtils, IOUtils} diff --git a/core/src/test/scala/org/apache/spark/deploy/master/MasterSuite.scala b/core/src/test/scala/org/apache/spark/deploy/master/MasterSuite.scala index 474d5e55f79b1..afe8ca45a4842 100644 --- a/core/src/test/scala/org/apache/spark/deploy/master/MasterSuite.scala +++ b/core/src/test/scala/org/apache/spark/deploy/master/MasterSuite.scala @@ -21,11 +21,11 @@ import java.util.Date import java.util.concurrent.{ConcurrentLinkedQueue, CountDownLatch, TimeUnit} import java.util.concurrent.atomic.AtomicInteger -import scala.collection.JavaConverters._ import scala.collection.mutable import scala.collection.mutable.{HashMap, HashSet} import scala.concurrent.duration._ import scala.io.Source +import scala.jdk.CollectionConverters._ import scala.reflect.ClassTag import org.json4s._ diff --git a/core/src/test/scala/org/apache/spark/internal/config/ConfigReaderSuite.scala b/core/src/test/scala/org/apache/spark/internal/config/ConfigReaderSuite.scala index be57cc34e450b..cbec17be79ccf 100644 --- a/core/src/test/scala/org/apache/spark/internal/config/ConfigReaderSuite.scala +++ b/core/src/test/scala/org/apache/spark/internal/config/ConfigReaderSuite.scala @@ -17,7 +17,7 @@ package org.apache.spark.internal.config -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.SparkFunSuite diff --git a/core/src/test/scala/org/apache/spark/internal/plugin/PluginContainerSuite.scala b/core/src/test/scala/org/apache/spark/internal/plugin/PluginContainerSuite.scala index e7959c8f74206..4bfa624fbd4a4 100644 --- a/core/src/test/scala/org/apache/spark/internal/plugin/PluginContainerSuite.scala +++ b/core/src/test/scala/org/apache/spark/internal/plugin/PluginContainerSuite.scala @@ -22,8 +22,8 @@ import java.nio.charset.StandardCharsets import java.util.{Map => JMap} import java.util.concurrent.atomic.AtomicInteger -import scala.collection.JavaConverters._ import scala.concurrent.duration._ +import scala.jdk.CollectionConverters._ import com.codahale.metrics.Gauge import com.google.common.io.Files diff --git a/core/src/test/scala/org/apache/spark/metrics/sink/GraphiteSinkSuite.scala b/core/src/test/scala/org/apache/spark/metrics/sink/GraphiteSinkSuite.scala index eabede303e09d..0416854e19d09 100644 --- a/core/src/test/scala/org/apache/spark/metrics/sink/GraphiteSinkSuite.scala +++ b/core/src/test/scala/org/apache/spark/metrics/sink/GraphiteSinkSuite.scala @@ -19,7 +19,7 @@ package org.apache.spark.metrics.sink import java.util.Properties -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import com.codahale.metrics._ diff --git a/core/src/test/scala/org/apache/spark/metrics/sink/PrometheusServletSuite.scala b/core/src/test/scala/org/apache/spark/metrics/sink/PrometheusServletSuite.scala index 56e864fea7a93..c794eccee425f 100644 --- a/core/src/test/scala/org/apache/spark/metrics/sink/PrometheusServletSuite.scala +++ b/core/src/test/scala/org/apache/spark/metrics/sink/PrometheusServletSuite.scala @@ -19,7 +19,7 @@ package org.apache.spark.metrics.sink import java.util.Properties -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import com.codahale.metrics.{Counter, Gauge, MetricRegistry} import org.scalatest.PrivateMethodTester diff --git a/core/src/test/scala/org/apache/spark/metrics/sink/StatsdSinkSuite.scala b/core/src/test/scala/org/apache/spark/metrics/sink/StatsdSinkSuite.scala index 28bf40e8c93ad..41cb52f92c975 100644 --- a/core/src/test/scala/org/apache/spark/metrics/sink/StatsdSinkSuite.scala +++ b/core/src/test/scala/org/apache/spark/metrics/sink/StatsdSinkSuite.scala @@ -22,7 +22,7 @@ import java.nio.charset.StandardCharsets.UTF_8 import java.util.Properties import java.util.concurrent.TimeUnit._ -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import com.codahale.metrics._ diff --git a/core/src/test/scala/org/apache/spark/rdd/PipedRDDSuite.scala b/core/src/test/scala/org/apache/spark/rdd/PipedRDDSuite.scala index 5000011b3c5ee..4e4eafbf0e4e0 100644 --- a/core/src/test/scala/org/apache/spark/rdd/PipedRDDSuite.scala +++ b/core/src/test/scala/org/apache/spark/rdd/PipedRDDSuite.scala @@ -19,10 +19,10 @@ package org.apache.spark.rdd import java.io.File -import scala.collection.JavaConverters._ import scala.collection.Map import scala.concurrent.duration._ import scala.io.Codec +import scala.jdk.CollectionConverters._ import org.apache.hadoop.fs.Path import org.apache.hadoop.io.{LongWritable, Text} diff --git a/core/src/test/scala/org/apache/spark/rdd/RDDCleanerSuite.scala b/core/src/test/scala/org/apache/spark/rdd/RDDCleanerSuite.scala index cfd646999eb6b..35dcf0f762a3f 100644 --- a/core/src/test/scala/org/apache/spark/rdd/RDDCleanerSuite.scala +++ b/core/src/test/scala/org/apache/spark/rdd/RDDCleanerSuite.scala @@ -19,7 +19,7 @@ package org.apache.spark.rdd import java.io.File -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.commons.io.FileUtils import org.apache.commons.io.filefilter.TrueFileFilter diff --git a/core/src/test/scala/org/apache/spark/rdd/RDDSuite.scala b/core/src/test/scala/org/apache/spark/rdd/RDDSuite.scala index 726da70623eea..02ffc23dbf126 100644 --- a/core/src/test/scala/org/apache/spark/rdd/RDDSuite.scala +++ b/core/src/test/scala/org/apache/spark/rdd/RDDSuite.scala @@ -21,9 +21,9 @@ import java.io.{File, IOException, ObjectInputStream, ObjectOutputStream} import java.lang.management.ManagementFactory import java.util.concurrent.atomic.AtomicInteger -import scala.collection.JavaConverters._ import scala.collection.mutable.{ArrayBuffer, HashMap} import scala.concurrent.duration._ +import scala.jdk.CollectionConverters._ import scala.reflect.ClassTag import com.esotericsoftware.kryo.KryoException diff --git a/core/src/test/scala/org/apache/spark/rpc/RpcEnvSuite.scala b/core/src/test/scala/org/apache/spark/rpc/RpcEnvSuite.scala index 6e5eb77322013..a88be983b804f 100644 --- a/core/src/test/scala/org/apache/spark/rpc/RpcEnvSuite.scala +++ b/core/src/test/scala/org/apache/spark/rpc/RpcEnvSuite.scala @@ -22,10 +22,10 @@ import java.nio.charset.StandardCharsets.UTF_8 import java.util.UUID import java.util.concurrent.{ConcurrentLinkedQueue, CountDownLatch, TimeUnit} -import scala.collection.JavaConverters._ import scala.collection.mutable import scala.concurrent.Await import scala.concurrent.duration._ +import scala.jdk.CollectionConverters._ import com.google.common.io.Files import org.mockito.ArgumentMatchers.any diff --git a/core/src/test/scala/org/apache/spark/scheduler/DAGSchedulerSuite.scala b/core/src/test/scala/org/apache/spark/scheduler/DAGSchedulerSuite.scala index e351f8b95bbb0..7bb8f49e6bff7 100644 --- a/core/src/test/scala/org/apache/spark/scheduler/DAGSchedulerSuite.scala +++ b/core/src/test/scala/org/apache/spark/scheduler/DAGSchedulerSuite.scala @@ -22,8 +22,8 @@ import java.util.concurrent.{CountDownLatch, Delayed, ScheduledFuture, TimeUnit} import java.util.concurrent.atomic.{AtomicBoolean, AtomicLong, AtomicReference} import scala.annotation.meta.param -import scala.collection.JavaConverters._ import scala.collection.mutable.{ArrayBuffer, HashMap, HashSet, Map} +import scala.jdk.CollectionConverters._ import scala.language.reflectiveCalls import scala.util.control.NonFatal diff --git a/core/src/test/scala/org/apache/spark/scheduler/SparkListenerSuite.scala b/core/src/test/scala/org/apache/spark/scheduler/SparkListenerSuite.scala index dca915e0a97ac..fbef1da3f0563 100644 --- a/core/src/test/scala/org/apache/spark/scheduler/SparkListenerSuite.scala +++ b/core/src/test/scala/org/apache/spark/scheduler/SparkListenerSuite.scala @@ -20,8 +20,8 @@ package org.apache.spark.scheduler import java.io.{Externalizable, ObjectInput, ObjectOutput} import java.util.concurrent.Semaphore -import scala.collection.JavaConverters._ import scala.collection.mutable +import scala.jdk.CollectionConverters._ import org.mockito.Mockito import org.scalatest.matchers.must.Matchers diff --git a/core/src/test/scala/org/apache/spark/serializer/KryoSerializerSuite.scala b/core/src/test/scala/org/apache/spark/serializer/KryoSerializerSuite.scala index 9c941979e4e26..57d59672b7b5d 100644 --- a/core/src/test/scala/org/apache/spark/serializer/KryoSerializerSuite.scala +++ b/core/src/test/scala/org/apache/spark/serializer/KryoSerializerSuite.scala @@ -21,10 +21,10 @@ import java.io.{ByteArrayInputStream, ByteArrayOutputStream, EOFException} import java.nio.ByteBuffer import java.util.concurrent.Executors -import scala.collection.JavaConverters._ import scala.collection.mutable import scala.concurrent.{ExecutionContext, Future} import scala.concurrent.duration._ +import scala.jdk.CollectionConverters._ import scala.reflect.ClassTag import com.esotericsoftware.kryo.{Kryo, KryoException} diff --git a/core/src/test/scala/org/apache/spark/storage/BlockManagerDecommissionIntegrationSuite.scala b/core/src/test/scala/org/apache/spark/storage/BlockManagerDecommissionIntegrationSuite.scala index d9d2e6102f120..5ab9f644be6be 100644 --- a/core/src/test/scala/org/apache/spark/storage/BlockManagerDecommissionIntegrationSuite.scala +++ b/core/src/test/scala/org/apache/spark/storage/BlockManagerDecommissionIntegrationSuite.scala @@ -19,9 +19,9 @@ package org.apache.spark.storage import java.util.concurrent.{ConcurrentHashMap, ConcurrentLinkedQueue, Semaphore, TimeUnit} -import scala.collection.JavaConverters._ import scala.collection.mutable.ArrayBuffer import scala.concurrent.duration._ +import scala.jdk.CollectionConverters._ import org.scalatest.concurrent.Eventually diff --git a/core/src/test/scala/org/apache/spark/storage/BlockManagerInfoSuite.scala b/core/src/test/scala/org/apache/spark/storage/BlockManagerInfoSuite.scala index 85f012aece3b4..19368d1ad250a 100644 --- a/core/src/test/scala/org/apache/spark/storage/BlockManagerInfoSuite.scala +++ b/core/src/test/scala/org/apache/spark/storage/BlockManagerInfoSuite.scala @@ -17,7 +17,7 @@ package org.apache.spark.storage -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.SparkFunSuite diff --git a/core/src/test/scala/org/apache/spark/storage/BlockManagerSuite.scala b/core/src/test/scala/org/apache/spark/storage/BlockManagerSuite.scala index dcb69f812a7db..a350c324e286d 100644 --- a/core/src/test/scala/org/apache/spark/storage/BlockManagerSuite.scala +++ b/core/src/test/scala/org/apache/spark/storage/BlockManagerSuite.scala @@ -22,11 +22,11 @@ import java.nio.ByteBuffer import java.nio.file.Files import java.util.concurrent.ThreadLocalRandom -import scala.collection.JavaConverters._ import scala.collection.mutable import scala.collection.mutable.ArrayBuffer import scala.concurrent.{Future, TimeoutException} import scala.concurrent.duration._ +import scala.jdk.CollectionConverters._ import scala.language.implicitConversions import scala.reflect.ClassTag import scala.reflect.classTag diff --git a/core/src/test/scala/org/apache/spark/storage/ShuffleBlockFetcherIteratorSuite.scala b/core/src/test/scala/org/apache/spark/storage/ShuffleBlockFetcherIteratorSuite.scala index af37a72c9e3f8..cf883567fe240 100644 --- a/core/src/test/scala/org/apache/spark/storage/ShuffleBlockFetcherIteratorSuite.scala +++ b/core/src/test/scala/org/apache/spark/storage/ShuffleBlockFetcherIteratorSuite.scala @@ -136,7 +136,7 @@ class ShuffleBlockFetcherIteratorSuite extends SparkFunSuite with PrivateMethodT when(blockManager.hostLocalDirManager).thenReturn(Some(hostLocalDirManager)) when(mockExternalBlockStoreClient.getHostLocalDirs(any(), any(), any(), any())) .thenAnswer { invocation => - import scala.collection.JavaConverters._ + import scala.jdk.CollectionConverters._ invocation.getArgument[CompletableFuture[java.util.Map[String, Array[String]]]](3) .complete(hostLocalDirs.asJava) } diff --git a/core/src/test/scala/org/apache/spark/ui/HttpSecurityFilterSuite.scala b/core/src/test/scala/org/apache/spark/ui/HttpSecurityFilterSuite.scala index c435852a46707..e69359a6573a6 100644 --- a/core/src/test/scala/org/apache/spark/ui/HttpSecurityFilterSuite.scala +++ b/core/src/test/scala/org/apache/spark/ui/HttpSecurityFilterSuite.scala @@ -21,7 +21,7 @@ import java.util.UUID import javax.servlet.FilterChain import javax.servlet.http.{HttpServletRequest, HttpServletResponse} -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.mockito.ArgumentCaptor import org.mockito.ArgumentMatchers.{any, eq => meq} diff --git a/core/src/test/scala/org/apache/spark/util/EventLoopSuite.scala b/core/src/test/scala/org/apache/spark/util/EventLoopSuite.scala index 45aad3f82f35d..d69347ce3dd57 100644 --- a/core/src/test/scala/org/apache/spark/util/EventLoopSuite.scala +++ b/core/src/test/scala/org/apache/spark/util/EventLoopSuite.scala @@ -19,8 +19,8 @@ package org.apache.spark.util import java.util.concurrent.{ConcurrentLinkedQueue, CountDownLatch} -import scala.collection.JavaConverters._ import scala.concurrent.duration._ +import scala.jdk.CollectionConverters._ import org.scalatest.concurrent.{Signaler, ThreadSignaler, TimeLimits} import org.scalatest.concurrent.Eventually._ diff --git a/core/src/test/scala/org/apache/spark/util/FileAppenderSuite.scala b/core/src/test/scala/org/apache/spark/util/FileAppenderSuite.scala index ef46983afb4b1..c377f2495d05d 100644 --- a/core/src/test/scala/org/apache/spark/util/FileAppenderSuite.scala +++ b/core/src/test/scala/org/apache/spark/util/FileAppenderSuite.scala @@ -340,7 +340,7 @@ class FileAppenderSuite extends SparkFunSuite with BeforeAndAfter { // Make sure no IOException errors have been logged as a result of appender closing gracefully verify(mockAppender, atLeast(0)).append(loggingEventCaptor.capture) - import scala.collection.JavaConverters._ + import scala.jdk.CollectionConverters._ loggingEventCaptor.getAllValues.asScala.foreach { loggingEvent => assert(loggingEvent.getThrown === null || !loggingEvent.getThrown.isInstanceOf[IOException]) diff --git a/core/src/test/scala/org/apache/spark/util/JsonProtocolSuite.scala b/core/src/test/scala/org/apache/spark/util/JsonProtocolSuite.scala index e8d41c4d46e21..1a8170bf0f683 100644 --- a/core/src/test/scala/org/apache/spark/util/JsonProtocolSuite.scala +++ b/core/src/test/scala/org/apache/spark/util/JsonProtocolSuite.scala @@ -19,7 +19,7 @@ package org.apache.spark.util import java.util.Properties -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.language.implicitConversions import com.fasterxml.jackson.databind.{JsonNode, ObjectMapper} diff --git a/core/src/test/scala/org/apache/spark/util/MutableURLClassLoaderSuite.scala b/core/src/test/scala/org/apache/spark/util/MutableURLClassLoaderSuite.scala index 9435b5acd2224..82c11a3c7330a 100644 --- a/core/src/test/scala/org/apache/spark/util/MutableURLClassLoaderSuite.scala +++ b/core/src/test/scala/org/apache/spark/util/MutableURLClassLoaderSuite.scala @@ -19,7 +19,7 @@ package org.apache.spark.util import java.net.URLClassLoader -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.scalatest.matchers.must.Matchers import org.scalatest.matchers.should.Matchers._ diff --git a/docs/streaming-kinesis-integration.md b/docs/streaming-kinesis-integration.md index ed19ddcc9b087..f27a45a69fecf 100644 --- a/docs/streaming-kinesis-integration.md +++ b/docs/streaming-kinesis-integration.md @@ -142,7 +142,7 @@ A Kinesis stream can be set up at one of the valid Kinesis endpoints with 1 or m import org.apache.spark.streaming.kinesis.KinesisInitialPositions; import com.amazonaws.services.kinesis.clientlibrary.lib.worker.KinesisClientLibConfiguration; import com.amazonaws.services.kinesis.metrics.interfaces.MetricsLevel; - import scala.collection.JavaConverters; + import scala.jdk.CollectionConverters; KinesisInputDStream kinesisStream = KinesisInputDStream.builder() .streamingContext(streamingContext) diff --git a/examples/src/main/scala/org/apache/spark/examples/DriverSubmissionTest.scala b/examples/src/main/scala/org/apache/spark/examples/DriverSubmissionTest.scala index f1d63fbcfb879..97d67ff599d5e 100644 --- a/examples/src/main/scala/org/apache/spark/examples/DriverSubmissionTest.scala +++ b/examples/src/main/scala/org/apache/spark/examples/DriverSubmissionTest.scala @@ -18,7 +18,7 @@ // scalastyle:off println package org.apache.spark.examples -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.util.Utils diff --git a/examples/src/main/scala/org/apache/spark/examples/pythonconverters/AvroConverters.scala b/examples/src/main/scala/org/apache/spark/examples/pythonconverters/AvroConverters.scala index 6bd96346ca238..61d4b28b5c13d 100644 --- a/examples/src/main/scala/org/apache/spark/examples/pythonconverters/AvroConverters.scala +++ b/examples/src/main/scala/org/apache/spark/examples/pythonconverters/AvroConverters.scala @@ -19,7 +19,7 @@ package org.apache.spark.examples.pythonconverters import java.util.{Collection => JCollection, Map => JMap} -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.avro.Schema import org.apache.avro.Schema.Type._ diff --git a/mllib-local/src/main/scala/org/apache/spark/ml/linalg/Vectors.scala b/mllib-local/src/main/scala/org/apache/spark/ml/linalg/Vectors.scala index 66d905a0cc178..985f67fc3c3b4 100644 --- a/mllib-local/src/main/scala/org/apache/spark/ml/linalg/Vectors.scala +++ b/mllib-local/src/main/scala/org/apache/spark/ml/linalg/Vectors.scala @@ -21,8 +21,8 @@ import java.lang.{Double => JavaDouble, Integer => JavaInteger, Iterable => Java import java.util import scala.annotation.varargs -import scala.collection.JavaConverters._ import scala.collection.mutable +import scala.jdk.CollectionConverters._ import breeze.linalg.{DenseVector => BDV, SparseVector => BSV, Vector => BV} diff --git a/mllib/src/main/scala/org/apache/spark/ml/Pipeline.scala b/mllib/src/main/scala/org/apache/spark/ml/Pipeline.scala index 0d2ede5e66805..f35c6e07a3ce2 100644 --- a/mllib/src/main/scala/org/apache/spark/ml/Pipeline.scala +++ b/mllib/src/main/scala/org/apache/spark/ml/Pipeline.scala @@ -19,8 +19,8 @@ package org.apache.spark.ml import java.{util => ju} -import scala.collection.JavaConverters._ import scala.collection.mutable.ListBuffer +import scala.jdk.CollectionConverters._ import org.apache.hadoop.fs.Path import org.json4s._ diff --git a/mllib/src/main/scala/org/apache/spark/ml/feature/VectorIndexer.scala b/mllib/src/main/scala/org/apache/spark/ml/feature/VectorIndexer.scala index f36e98046afa6..cf1751f86f963 100644 --- a/mllib/src/main/scala/org/apache/spark/ml/feature/VectorIndexer.scala +++ b/mllib/src/main/scala/org/apache/spark/ml/feature/VectorIndexer.scala @@ -20,7 +20,7 @@ package org.apache.spark.ml.feature import java.lang.{Double => JDouble, Integer => JInt} import java.util.{Map => JMap, NoSuchElementException} -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.hadoop.fs.Path diff --git a/mllib/src/main/scala/org/apache/spark/ml/image/ImageSchema.scala b/mllib/src/main/scala/org/apache/spark/ml/image/ImageSchema.scala index f69f5336f53c0..e172573312dda 100644 --- a/mllib/src/main/scala/org/apache/spark/ml/image/ImageSchema.scala +++ b/mllib/src/main/scala/org/apache/spark/ml/image/ImageSchema.scala @@ -22,7 +22,7 @@ import java.awt.color.ColorSpace import java.io.ByteArrayInputStream import javax.imageio.ImageIO -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.annotation.Since import org.apache.spark.sql.Row diff --git a/mllib/src/main/scala/org/apache/spark/ml/param/params.scala b/mllib/src/main/scala/org/apache/spark/ml/param/params.scala index b818be30583c0..ef1adea592492 100644 --- a/mllib/src/main/scala/org/apache/spark/ml/param/params.scala +++ b/mllib/src/main/scala/org/apache/spark/ml/param/params.scala @@ -22,8 +22,8 @@ import java.util.{List => JList} import java.util.NoSuchElementException import scala.annotation.varargs -import scala.collection.JavaConverters._ import scala.collection.mutable +import scala.jdk.CollectionConverters._ import org.json4s._ import org.json4s.jackson.JsonMethods._ diff --git a/mllib/src/main/scala/org/apache/spark/ml/recommendation/ALS.scala b/mllib/src/main/scala/org/apache/spark/ml/recommendation/ALS.scala index d5a7d58480bba..9e562f26abfb8 100644 --- a/mllib/src/main/scala/org/apache/spark/ml/recommendation/ALS.scala +++ b/mllib/src/main/scala/org/apache/spark/ml/recommendation/ALS.scala @@ -463,7 +463,7 @@ class ALSModel private[ml] ( num: Int, blockSize: Int): DataFrame = { import srcFactors.sparkSession.implicits._ - import scala.collection.JavaConverters._ + import scala.jdk.CollectionConverters._ val ratingColumn = "rating" val recommendColumn = "recommendations" diff --git a/mllib/src/main/scala/org/apache/spark/ml/stat/Correlation.scala b/mllib/src/main/scala/org/apache/spark/ml/stat/Correlation.scala index bab178b85d5ff..414141691006b 100644 --- a/mllib/src/main/scala/org/apache/spark/ml/stat/Correlation.scala +++ b/mllib/src/main/scala/org/apache/spark/ml/stat/Correlation.scala @@ -17,7 +17,7 @@ package org.apache.spark.ml.stat -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.annotation.Since import org.apache.spark.ml.linalg.{SQLDataTypes, Vector} diff --git a/mllib/src/main/scala/org/apache/spark/ml/tuning/CrossValidator.scala b/mllib/src/main/scala/org/apache/spark/ml/tuning/CrossValidator.scala index e04a8c1389b0e..4e9daddaebe66 100644 --- a/mllib/src/main/scala/org/apache/spark/ml/tuning/CrossValidator.scala +++ b/mllib/src/main/scala/org/apache/spark/ml/tuning/CrossValidator.scala @@ -19,9 +19,9 @@ package org.apache.spark.ml.tuning import java.util.{List => JList, Locale} -import scala.collection.JavaConverters._ import scala.concurrent.Future import scala.concurrent.duration.Duration +import scala.jdk.CollectionConverters._ import org.apache.hadoop.fs.Path import org.json4s.DefaultFormats diff --git a/mllib/src/main/scala/org/apache/spark/ml/tuning/TrainValidationSplit.scala b/mllib/src/main/scala/org/apache/spark/ml/tuning/TrainValidationSplit.scala index 4a6d5164aa0a3..32a17c11c56a8 100644 --- a/mllib/src/main/scala/org/apache/spark/ml/tuning/TrainValidationSplit.scala +++ b/mllib/src/main/scala/org/apache/spark/ml/tuning/TrainValidationSplit.scala @@ -19,9 +19,9 @@ package org.apache.spark.ml.tuning import java.util.{List => JList, Locale} -import scala.collection.JavaConverters._ import scala.concurrent.Future import scala.concurrent.duration.Duration +import scala.jdk.CollectionConverters._ import scala.language.existentials import org.apache.hadoop.fs.Path diff --git a/mllib/src/main/scala/org/apache/spark/ml/util/ReadWrite.scala b/mllib/src/main/scala/org/apache/spark/ml/util/ReadWrite.scala index 5e38b0aba95cd..2083a07e2cb5a 100644 --- a/mllib/src/main/scala/org/apache/spark/ml/util/ReadWrite.scala +++ b/mllib/src/main/scala/org/apache/spark/ml/util/ReadWrite.scala @@ -20,8 +20,8 @@ package org.apache.spark.ml.util import java.io.IOException import java.util.{Locale, ServiceLoader} -import scala.collection.JavaConverters._ import scala.collection.mutable +import scala.jdk.CollectionConverters._ import scala.util.{Failure, Success, Try} import org.apache.hadoop.fs.Path diff --git a/mllib/src/main/scala/org/apache/spark/mllib/api/python/GaussianMixtureModelWrapper.scala b/mllib/src/main/scala/org/apache/spark/mllib/api/python/GaussianMixtureModelWrapper.scala index 364d5eea08ce4..b567c10d9e649 100644 --- a/mllib/src/main/scala/org/apache/spark/mllib/api/python/GaussianMixtureModelWrapper.scala +++ b/mllib/src/main/scala/org/apache/spark/mllib/api/python/GaussianMixtureModelWrapper.scala @@ -17,7 +17,7 @@ package org.apache.spark.mllib.api.python -import scala.collection.JavaConverters +import scala.jdk.CollectionConverters import org.apache.spark.SparkContext import org.apache.spark.mllib.clustering.GaussianMixtureModel diff --git a/mllib/src/main/scala/org/apache/spark/mllib/api/python/LDAModelWrapper.scala b/mllib/src/main/scala/org/apache/spark/mllib/api/python/LDAModelWrapper.scala index 63282eee6e656..8e9c7aa47f46b 100644 --- a/mllib/src/main/scala/org/apache/spark/mllib/api/python/LDAModelWrapper.scala +++ b/mllib/src/main/scala/org/apache/spark/mllib/api/python/LDAModelWrapper.scala @@ -16,7 +16,7 @@ */ package org.apache.spark.mllib.api.python -import scala.collection.JavaConverters +import scala.jdk.CollectionConverters import org.apache.spark.SparkContext import org.apache.spark.mllib.clustering.LDAModel diff --git a/mllib/src/main/scala/org/apache/spark/mllib/api/python/PythonMLLibAPI.scala b/mllib/src/main/scala/org/apache/spark/mllib/api/python/PythonMLLibAPI.scala index 56aaaa31a9ed4..c895c64fc2310 100644 --- a/mllib/src/main/scala/org/apache/spark/mllib/api/python/PythonMLLibAPI.scala +++ b/mllib/src/main/scala/org/apache/spark/mllib/api/python/PythonMLLibAPI.scala @@ -22,7 +22,7 @@ import java.nio.{ByteBuffer, ByteOrder} import java.nio.charset.StandardCharsets import java.util.{ArrayList => JArrayList, List => JList, Map => JMap} -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.reflect.ClassTag import net.razorvine.pickle._ diff --git a/mllib/src/main/scala/org/apache/spark/mllib/api/python/Word2VecModelWrapper.scala b/mllib/src/main/scala/org/apache/spark/mllib/api/python/Word2VecModelWrapper.scala index 4d6520d0b2ee0..20b4bfff0f4fe 100644 --- a/mllib/src/main/scala/org/apache/spark/mllib/api/python/Word2VecModelWrapper.scala +++ b/mllib/src/main/scala/org/apache/spark/mllib/api/python/Word2VecModelWrapper.scala @@ -19,7 +19,7 @@ package org.apache.spark.mllib.api.python import java.util.{List => JList, Map => JMap} -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.SparkContext import org.apache.spark.api.java.JavaRDD diff --git a/mllib/src/main/scala/org/apache/spark/mllib/classification/NaiveBayes.scala b/mllib/src/main/scala/org/apache/spark/mllib/classification/NaiveBayes.scala index caa44d51b00fe..3d36b82708612 100644 --- a/mllib/src/main/scala/org/apache/spark/mllib/classification/NaiveBayes.scala +++ b/mllib/src/main/scala/org/apache/spark/mllib/classification/NaiveBayes.scala @@ -19,7 +19,7 @@ package org.apache.spark.mllib.classification import java.lang.{Iterable => JIterable} -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.json4s.JsonDSL._ import org.json4s.jackson.JsonMethods._ diff --git a/mllib/src/main/scala/org/apache/spark/mllib/clustering/KMeansModel.scala b/mllib/src/main/scala/org/apache/spark/mllib/clustering/KMeansModel.scala index 64b352157caf7..1e8a271592bb9 100644 --- a/mllib/src/main/scala/org/apache/spark/mllib/clustering/KMeansModel.scala +++ b/mllib/src/main/scala/org/apache/spark/mllib/clustering/KMeansModel.scala @@ -17,7 +17,7 @@ package org.apache.spark.mllib.clustering -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.json4s._ import org.json4s.JsonDSL._ diff --git a/mllib/src/main/scala/org/apache/spark/mllib/evaluation/RankingMetrics.scala b/mllib/src/main/scala/org/apache/spark/mllib/evaluation/RankingMetrics.scala index a3316d8a8fa41..641f55bb05f89 100644 --- a/mllib/src/main/scala/org/apache/spark/mllib/evaluation/RankingMetrics.scala +++ b/mllib/src/main/scala/org/apache/spark/mllib/evaluation/RankingMetrics.scala @@ -19,7 +19,7 @@ package org.apache.spark.mllib.evaluation import java.{lang => jl} -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.reflect.ClassTag import org.apache.spark.annotation.Since diff --git a/mllib/src/main/scala/org/apache/spark/mllib/feature/HashingTF.scala b/mllib/src/main/scala/org/apache/spark/mllib/feature/HashingTF.scala index 90f6f203cb40e..f9e37c01e9738 100644 --- a/mllib/src/main/scala/org/apache/spark/mllib/feature/HashingTF.scala +++ b/mllib/src/main/scala/org/apache/spark/mllib/feature/HashingTF.scala @@ -19,8 +19,8 @@ package org.apache.spark.mllib.feature import java.lang.{Iterable => JavaIterable} -import scala.collection.JavaConverters._ import scala.collection.mutable +import scala.jdk.CollectionConverters._ import org.apache.spark.SparkException import org.apache.spark.annotation.Since diff --git a/mllib/src/main/scala/org/apache/spark/mllib/feature/Word2Vec.scala b/mllib/src/main/scala/org/apache/spark/mllib/feature/Word2Vec.scala index 97f277d53ca9d..d6a493a011b99 100644 --- a/mllib/src/main/scala/org/apache/spark/mllib/feature/Word2Vec.scala +++ b/mllib/src/main/scala/org/apache/spark/mllib/feature/Word2Vec.scala @@ -19,8 +19,8 @@ package org.apache.spark.mllib.feature import java.lang.{Iterable => JavaIterable} -import scala.collection.JavaConverters._ import scala.collection.mutable +import scala.jdk.CollectionConverters._ import com.google.common.collect.{Ordering => GuavaOrdering} import org.json4s.DefaultFormats diff --git a/mllib/src/main/scala/org/apache/spark/mllib/fpm/AssociationRules.scala b/mllib/src/main/scala/org/apache/spark/mllib/fpm/AssociationRules.scala index 606e2f2f212ca..06c7754691953 100644 --- a/mllib/src/main/scala/org/apache/spark/mllib/fpm/AssociationRules.scala +++ b/mllib/src/main/scala/org/apache/spark/mllib/fpm/AssociationRules.scala @@ -16,7 +16,7 @@ */ package org.apache.spark.mllib.fpm -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.reflect.ClassTag import org.apache.spark.annotation.Since diff --git a/mllib/src/main/scala/org/apache/spark/mllib/fpm/FPGrowth.scala b/mllib/src/main/scala/org/apache/spark/mllib/fpm/FPGrowth.scala index ecdc28dea37fd..c74ad6b5c1aed 100644 --- a/mllib/src/main/scala/org/apache/spark/mllib/fpm/FPGrowth.scala +++ b/mllib/src/main/scala/org/apache/spark/mllib/fpm/FPGrowth.scala @@ -20,8 +20,8 @@ package org.apache.spark.mllib.fpm import java.{util => ju} import java.lang.{Iterable => JavaIterable} -import scala.collection.JavaConverters._ import scala.collection.mutable +import scala.jdk.CollectionConverters._ import scala.reflect.ClassTag import scala.reflect.runtime.universe._ diff --git a/mllib/src/main/scala/org/apache/spark/mllib/fpm/PrefixSpan.scala b/mllib/src/main/scala/org/apache/spark/mllib/fpm/PrefixSpan.scala index 7c023bcfa72a4..f2b7151feb16f 100644 --- a/mllib/src/main/scala/org/apache/spark/mllib/fpm/PrefixSpan.scala +++ b/mllib/src/main/scala/org/apache/spark/mllib/fpm/PrefixSpan.scala @@ -20,8 +20,8 @@ package org.apache.spark.mllib.fpm import java.{lang => jl, util => ju} import java.util.concurrent.atomic.AtomicInteger -import scala.collection.JavaConverters._ import scala.collection.mutable +import scala.jdk.CollectionConverters._ import scala.reflect.ClassTag import scala.reflect.runtime.universe._ diff --git a/mllib/src/main/scala/org/apache/spark/mllib/linalg/Vectors.scala b/mllib/src/main/scala/org/apache/spark/mllib/linalg/Vectors.scala index a93f37799419e..25899fd3ebbc8 100644 --- a/mllib/src/main/scala/org/apache/spark/mllib/linalg/Vectors.scala +++ b/mllib/src/main/scala/org/apache/spark/mllib/linalg/Vectors.scala @@ -21,7 +21,7 @@ import java.lang.{Double => JavaDouble, Integer => JavaInteger, Iterable => Java import java.util import scala.annotation.varargs -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.language.implicitConversions import breeze.linalg.{DenseVector => BDV, SparseVector => BSV, Vector => BV} diff --git a/mllib/src/main/scala/org/apache/spark/mllib/recommendation/MatrixFactorizationModel.scala b/mllib/src/main/scala/org/apache/spark/mllib/recommendation/MatrixFactorizationModel.scala index 3276513213f5d..0cf0a94bb3bf2 100644 --- a/mllib/src/main/scala/org/apache/spark/mllib/recommendation/MatrixFactorizationModel.scala +++ b/mllib/src/main/scala/org/apache/spark/mllib/recommendation/MatrixFactorizationModel.scala @@ -295,7 +295,7 @@ object MatrixFactorizationModel extends Loader[MatrixFactorizationModel] { srcFeatures: RDD[(Int, Array[Double])], dstFeatures: RDD[(Int, Array[Double])], num: Int): RDD[(Int, Array[(Int, Double)])] = { - import scala.collection.JavaConverters._ + import scala.jdk.CollectionConverters._ val srcBlocks = blockify(srcFeatures) val dstBlocks = blockify(dstFeatures) diff --git a/mllib/src/main/scala/org/apache/spark/mllib/regression/IsotonicRegression.scala b/mllib/src/main/scala/org/apache/spark/mllib/regression/IsotonicRegression.scala index 12a78ef4ec140..ba115c278eb44 100644 --- a/mllib/src/main/scala/org/apache/spark/mllib/regression/IsotonicRegression.scala +++ b/mllib/src/main/scala/org/apache/spark/mllib/regression/IsotonicRegression.scala @@ -20,8 +20,8 @@ import java.io.Serializable import java.lang.{Double => JDouble} import java.util.Arrays.binarySearch -import scala.collection.JavaConverters._ import scala.collection.mutable.ArrayBuffer +import scala.jdk.CollectionConverters._ import org.json4s._ import org.json4s.JsonDSL._ diff --git a/mllib/src/main/scala/org/apache/spark/mllib/tree/DecisionTree.scala b/mllib/src/main/scala/org/apache/spark/mllib/tree/DecisionTree.scala index e5aece779826d..dbac7c4f44bd8 100644 --- a/mllib/src/main/scala/org/apache/spark/mllib/tree/DecisionTree.scala +++ b/mllib/src/main/scala/org/apache/spark/mllib/tree/DecisionTree.scala @@ -17,7 +17,7 @@ package org.apache.spark.mllib.tree -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.annotation.Since import org.apache.spark.api.java.JavaRDD diff --git a/mllib/src/main/scala/org/apache/spark/mllib/tree/RandomForest.scala b/mllib/src/main/scala/org/apache/spark/mllib/tree/RandomForest.scala index 4f0c51e293319..00caa1dfef02d 100644 --- a/mllib/src/main/scala/org/apache/spark/mllib/tree/RandomForest.scala +++ b/mllib/src/main/scala/org/apache/spark/mllib/tree/RandomForest.scala @@ -17,7 +17,7 @@ package org.apache.spark.mllib.tree -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.util.Try import org.apache.spark.annotation.Since diff --git a/mllib/src/main/scala/org/apache/spark/mllib/tree/configuration/Strategy.scala b/mllib/src/main/scala/org/apache/spark/mllib/tree/configuration/Strategy.scala index 0f6c7033687fa..200d10130eed7 100644 --- a/mllib/src/main/scala/org/apache/spark/mllib/tree/configuration/Strategy.scala +++ b/mllib/src/main/scala/org/apache/spark/mllib/tree/configuration/Strategy.scala @@ -18,7 +18,7 @@ package org.apache.spark.mllib.tree.configuration import scala.beans.BeanProperty -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.annotation.Since import org.apache.spark.mllib.tree.configuration.Algo._ diff --git a/mllib/src/main/scala/org/apache/spark/mllib/util/LinearDataGenerator.scala b/mllib/src/main/scala/org/apache/spark/mllib/util/LinearDataGenerator.scala index 1ad8d93a445a2..687ecf7fb7224 100644 --- a/mllib/src/main/scala/org/apache/spark/mllib/util/LinearDataGenerator.scala +++ b/mllib/src/main/scala/org/apache/spark/mllib/util/LinearDataGenerator.scala @@ -17,7 +17,7 @@ package org.apache.spark.mllib.util -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.util.Random import org.apache.spark.SparkContext diff --git a/mllib/src/test/java/org/apache/spark/ml/classification/JavaOneVsRestSuite.java b/mllib/src/test/java/org/apache/spark/ml/classification/JavaOneVsRestSuite.java index 62888b85a0758..7caee72f6ed04 100644 --- a/mllib/src/test/java/org/apache/spark/ml/classification/JavaOneVsRestSuite.java +++ b/mllib/src/test/java/org/apache/spark/ml/classification/JavaOneVsRestSuite.java @@ -20,7 +20,7 @@ import java.io.IOException; import java.util.List; -import scala.collection.JavaConverters; +import scala.jdk.CollectionConverters; import org.junit.Assert; import org.junit.Test; @@ -51,7 +51,7 @@ public void setUp() throws IOException { double[] xMean = {5.843, 3.057, 3.758, 1.199}; double[] xVariance = {0.6856, 0.1899, 3.116, 0.581}; - List points = JavaConverters.seqAsJavaListConverter( + List points = CollectionConverters.SeqHasAsJava( generateMultinomialLogisticInput(coefficients, xMean, xVariance, true, nPoints, 42) ).asJava(); datasetRDD = jsc.parallelize(points, 2); diff --git a/mllib/src/test/scala/org/apache/spark/ml/PipelineSuite.scala b/mllib/src/test/scala/org/apache/spark/ml/PipelineSuite.scala index e6025a5a53ca6..e9c08f0e45c2d 100644 --- a/mllib/src/test/scala/org/apache/spark/ml/PipelineSuite.scala +++ b/mllib/src/test/scala/org/apache/spark/ml/PipelineSuite.scala @@ -17,7 +17,7 @@ package org.apache.spark.ml -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.hadoop.fs.Path import org.mockito.ArgumentMatchers.{any, eq => meq} diff --git a/mllib/src/test/scala/org/apache/spark/ml/classification/LogisticRegressionSuite.scala b/mllib/src/test/scala/org/apache/spark/ml/classification/LogisticRegressionSuite.scala index 15f2e63bc8516..afc57a3776ea0 100644 --- a/mllib/src/test/scala/org/apache/spark/ml/classification/LogisticRegressionSuite.scala +++ b/mllib/src/test/scala/org/apache/spark/ml/classification/LogisticRegressionSuite.scala @@ -17,7 +17,7 @@ package org.apache.spark.ml.classification -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.util.Random import scala.util.control.Breaks._ diff --git a/mllib/src/test/scala/org/apache/spark/ml/recommendation/ALSSuite.scala b/mllib/src/test/scala/org/apache/spark/ml/recommendation/ALSSuite.scala index f852a4243eabe..6800441f37b78 100644 --- a/mllib/src/test/scala/org/apache/spark/ml/recommendation/ALSSuite.scala +++ b/mllib/src/test/scala/org/apache/spark/ml/recommendation/ALSSuite.scala @@ -20,9 +20,9 @@ package org.apache.spark.ml.recommendation import java.io.File import java.util.Random -import scala.collection.JavaConverters._ import scala.collection.mutable import scala.collection.mutable.{ArrayBuffer, WrappedArray} +import scala.jdk.CollectionConverters._ import org.apache.commons.io.FileUtils import org.apache.commons.io.filefilter.TrueFileFilter diff --git a/mllib/src/test/scala/org/apache/spark/ml/regression/LinearRegressionSuite.scala b/mllib/src/test/scala/org/apache/spark/ml/regression/LinearRegressionSuite.scala index e4535f30328d8..9e4be2a964ad2 100644 --- a/mllib/src/test/scala/org/apache/spark/ml/regression/LinearRegressionSuite.scala +++ b/mllib/src/test/scala/org/apache/spark/ml/regression/LinearRegressionSuite.scala @@ -17,7 +17,7 @@ package org.apache.spark.ml.regression -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.util.Random import org.dmg.pmml.{OpType, PMML} diff --git a/mllib/src/test/scala/org/apache/spark/ml/tree/impl/TreeTests.scala b/mllib/src/test/scala/org/apache/spark/ml/tree/impl/TreeTests.scala index c9d1f833bb91a..a8a85391b1cb2 100644 --- a/mllib/src/test/scala/org/apache/spark/ml/tree/impl/TreeTests.scala +++ b/mllib/src/test/scala/org/apache/spark/ml/tree/impl/TreeTests.scala @@ -17,7 +17,7 @@ package org.apache.spark.ml.tree.impl -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.{SparkContext, SparkFunSuite} import org.apache.spark.api.java.JavaRDD diff --git a/mllib/src/test/scala/org/apache/spark/mllib/classification/LogisticRegressionSuite.scala b/mllib/src/test/scala/org/apache/spark/mllib/classification/LogisticRegressionSuite.scala index 584e7555eb0d9..c4621c9a00477 100644 --- a/mllib/src/test/scala/org/apache/spark/mllib/classification/LogisticRegressionSuite.scala +++ b/mllib/src/test/scala/org/apache/spark/mllib/classification/LogisticRegressionSuite.scala @@ -17,7 +17,7 @@ package org.apache.spark.mllib.classification -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.util.Random import scala.util.control.Breaks._ diff --git a/mllib/src/test/scala/org/apache/spark/mllib/classification/SVMSuite.scala b/mllib/src/test/scala/org/apache/spark/mllib/classification/SVMSuite.scala index 007b8ae6e1a6a..eacfeb7621d51 100644 --- a/mllib/src/test/scala/org/apache/spark/mllib/classification/SVMSuite.scala +++ b/mllib/src/test/scala/org/apache/spark/mllib/classification/SVMSuite.scala @@ -17,7 +17,7 @@ package org.apache.spark.mllib.classification -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.util.Random import breeze.linalg.{DenseVector => BDV} diff --git a/mllib/src/test/scala/org/apache/spark/mllib/optimization/GradientDescentSuite.scala b/mllib/src/test/scala/org/apache/spark/mllib/optimization/GradientDescentSuite.scala index edea67e524aad..91b547506aed3 100644 --- a/mllib/src/test/scala/org/apache/spark/mllib/optimization/GradientDescentSuite.scala +++ b/mllib/src/test/scala/org/apache/spark/mllib/optimization/GradientDescentSuite.scala @@ -17,7 +17,7 @@ package org.apache.spark.mllib.optimization -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.util.Random import org.scalatest.matchers.must.Matchers diff --git a/mllib/src/test/scala/org/apache/spark/mllib/recommendation/ALSSuite.scala b/mllib/src/test/scala/org/apache/spark/mllib/recommendation/ALSSuite.scala index 9be87db873dad..e5d5fbd33c159 100644 --- a/mllib/src/test/scala/org/apache/spark/mllib/recommendation/ALSSuite.scala +++ b/mllib/src/test/scala/org/apache/spark/mllib/recommendation/ALSSuite.scala @@ -17,7 +17,7 @@ package org.apache.spark.mllib.recommendation -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.math.abs import scala.util.Random diff --git a/mllib/src/test/scala/org/apache/spark/mllib/tree/DecisionTreeSuite.scala b/mllib/src/test/scala/org/apache/spark/mllib/tree/DecisionTreeSuite.scala index 83d77a0a791e0..9a2356f980008 100644 --- a/mllib/src/test/scala/org/apache/spark/mllib/tree/DecisionTreeSuite.scala +++ b/mllib/src/test/scala/org/apache/spark/mllib/tree/DecisionTreeSuite.scala @@ -17,7 +17,7 @@ package org.apache.spark.mllib.tree -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.SparkFunSuite import org.apache.spark.ml.tree.impl.DecisionTreeMetadata diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala index 400ee8c5f28ab..7c12bb63b6f88 100644 --- a/project/SparkBuild.scala +++ b/project/SparkBuild.scala @@ -22,7 +22,7 @@ import java.util.Locale import scala.io.Source import scala.util.Properties -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.collection.mutable.ListBuffer import sbt._ diff --git a/python/pyspark/context.py b/python/pyspark/context.py index 04bcb07dd8d64..5e5391d2e60c5 100644 --- a/python/pyspark/context.py +++ b/python/pyspark/context.py @@ -1908,7 +1908,7 @@ def listFiles(self) -> List[str]: :meth:`SparkContext.addFile` """ return list( - self._jvm.scala.collection.JavaConverters.seqAsJavaList( # type: ignore[union-attr] + self._jvm.scala.jdk.CollectionConverters.seqAsJavaList( # type: ignore[union-attr] self._jsc.sc().listFiles() ) ) @@ -2036,7 +2036,7 @@ def listArchives(self) -> List[str]: :meth:`SparkContext.addArchive` """ return list( - self._jvm.scala.collection.JavaConverters.seqAsJavaList( # type: ignore[union-attr] + self._jvm.scala.jdk.CollectionConverters.seqAsJavaList( # type: ignore[union-attr] self._jsc.sc().listArchives() ) ) diff --git a/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/KubernetesUtils.scala b/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/KubernetesUtils.scala index 0b9b1f85fb432..cbd12282278fc 100644 --- a/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/KubernetesUtils.scala +++ b/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/KubernetesUtils.scala @@ -21,7 +21,7 @@ import java.net.URI import java.security.SecureRandom import java.util.{Collections, UUID} -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import io.fabric8.kubernetes.api.model.{Container, ContainerBuilder, ContainerStateRunning, ContainerStateTerminated, ContainerStateWaiting, ContainerStatus, EnvVar, EnvVarBuilder, EnvVarSourceBuilder, HasMetadata, OwnerReferenceBuilder, Pod, PodBuilder, Quantity} import io.fabric8.kubernetes.client.KubernetesClient diff --git a/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/features/BasicDriverFeatureStep.scala b/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/features/BasicDriverFeatureStep.scala index 11a21bb68a6bd..31dd029c27fbf 100644 --- a/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/features/BasicDriverFeatureStep.scala +++ b/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/features/BasicDriverFeatureStep.scala @@ -18,8 +18,8 @@ package org.apache.spark.deploy.k8s.features import javax.ws.rs.core.UriBuilder -import scala.collection.JavaConverters._ import scala.collection.mutable +import scala.jdk.CollectionConverters._ import io.fabric8.kubernetes.api.model._ diff --git a/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/features/BasicExecutorFeatureStep.scala b/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/features/BasicExecutorFeatureStep.scala index f3e5cad8c9e2a..febd8dcfa75f1 100644 --- a/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/features/BasicExecutorFeatureStep.scala +++ b/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/features/BasicExecutorFeatureStep.scala @@ -16,7 +16,7 @@ */ package org.apache.spark.deploy.k8s.features -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import io.fabric8.kubernetes.api.model._ diff --git a/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/features/DriverCommandFeatureStep.scala b/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/features/DriverCommandFeatureStep.scala index 455712cec1f69..c351211dd97d5 100644 --- a/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/features/DriverCommandFeatureStep.scala +++ b/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/features/DriverCommandFeatureStep.scala @@ -16,7 +16,7 @@ */ package org.apache.spark.deploy.k8s.features -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import io.fabric8.kubernetes.api.model.ContainerBuilder diff --git a/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/features/DriverKubernetesCredentialsFeatureStep.scala b/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/features/DriverKubernetesCredentialsFeatureStep.scala index 62cbf0b91f8d1..462d70dee5345 100644 --- a/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/features/DriverKubernetesCredentialsFeatureStep.scala +++ b/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/features/DriverKubernetesCredentialsFeatureStep.scala @@ -19,7 +19,7 @@ package org.apache.spark.deploy.k8s.features import java.io.File import java.nio.charset.StandardCharsets -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import com.google.common.io.{BaseEncoding, Files} import io.fabric8.kubernetes.api.model.{ContainerBuilder, HasMetadata, PodBuilder, Secret, SecretBuilder} diff --git a/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/features/DriverServiceFeatureStep.scala b/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/features/DriverServiceFeatureStep.scala index 37dfe8ec07a4c..cba4f442371c9 100644 --- a/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/features/DriverServiceFeatureStep.scala +++ b/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/features/DriverServiceFeatureStep.scala @@ -16,7 +16,7 @@ */ package org.apache.spark.deploy.k8s.features -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import io.fabric8.kubernetes.api.model.{HasMetadata, ServiceBuilder} diff --git a/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/features/EnvSecretsFeatureStep.scala b/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/features/EnvSecretsFeatureStep.scala index 222e19c5e20f1..3f16d3fd384a6 100644 --- a/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/features/EnvSecretsFeatureStep.scala +++ b/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/features/EnvSecretsFeatureStep.scala @@ -16,7 +16,7 @@ */ package org.apache.spark.deploy.k8s.features -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import io.fabric8.kubernetes.api.model.{ContainerBuilder, EnvVarBuilder} diff --git a/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/features/HadoopConfDriverFeatureStep.scala b/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/features/HadoopConfDriverFeatureStep.scala index 45a5b8d7dae93..409be81598d42 100644 --- a/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/features/HadoopConfDriverFeatureStep.scala +++ b/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/features/HadoopConfDriverFeatureStep.scala @@ -19,7 +19,7 @@ package org.apache.spark.deploy.k8s.features import java.io.File import java.nio.charset.StandardCharsets -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import com.google.common.io.Files import io.fabric8.kubernetes.api.model._ diff --git a/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/features/KerberosConfDriverFeatureStep.scala b/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/features/KerberosConfDriverFeatureStep.scala index cb0a059e5047d..82bda88892d04 100644 --- a/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/features/KerberosConfDriverFeatureStep.scala +++ b/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/features/KerberosConfDriverFeatureStep.scala @@ -19,7 +19,7 @@ package org.apache.spark.deploy.k8s.features import java.io.File import java.nio.charset.StandardCharsets -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import com.google.common.io.Files import io.fabric8.kubernetes.api.model._ diff --git a/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/features/LocalDirsFeatureStep.scala b/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/features/LocalDirsFeatureStep.scala index 31ba63980f91a..6396b99d5d7f2 100644 --- a/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/features/LocalDirsFeatureStep.scala +++ b/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/features/LocalDirsFeatureStep.scala @@ -18,7 +18,7 @@ package org.apache.spark.deploy.k8s.features import java.util.UUID -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import io.fabric8.kubernetes.api.model._ diff --git a/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/features/MountVolumesFeatureStep.scala b/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/features/MountVolumesFeatureStep.scala index 78dd6ec21ed34..eed076ca877bc 100644 --- a/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/features/MountVolumesFeatureStep.scala +++ b/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/features/MountVolumesFeatureStep.scala @@ -16,8 +16,8 @@ */ package org.apache.spark.deploy.k8s.features -import scala.collection.JavaConverters._ import scala.collection.mutable.ArrayBuffer +import scala.jdk.CollectionConverters._ import io.fabric8.kubernetes.api.model._ diff --git a/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/submit/K8sSubmitOps.scala b/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/submit/K8sSubmitOps.scala index 9db55c30a39fc..17704b908558e 100644 --- a/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/submit/K8sSubmitOps.scala +++ b/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/submit/K8sSubmitOps.scala @@ -16,7 +16,7 @@ */ package org.apache.spark.deploy.k8s.submit -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import K8SSparkSubmitOperation.getGracePeriod import io.fabric8.kubernetes.api.model.Pod diff --git a/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/submit/KubernetesClientApplication.scala b/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/submit/KubernetesClientApplication.scala index eb6dc9d1dcc4b..389ba182fe153 100644 --- a/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/submit/KubernetesClientApplication.scala +++ b/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/submit/KubernetesClientApplication.scala @@ -16,8 +16,8 @@ */ package org.apache.spark.deploy.k8s.submit -import scala.collection.JavaConverters._ import scala.collection.mutable +import scala.jdk.CollectionConverters._ import scala.util.control.Breaks._ import scala.util.control.NonFatal diff --git a/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/submit/KubernetesClientUtils.scala b/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/submit/KubernetesClientUtils.scala index dc52babc5f32e..a699801dca199 100644 --- a/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/submit/KubernetesClientUtils.scala +++ b/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/submit/KubernetesClientUtils.scala @@ -21,9 +21,9 @@ import java.io.{File, StringWriter} import java.nio.charset.MalformedInputException import java.util.Properties -import scala.collection.JavaConverters._ import scala.collection.mutable import scala.io.{Codec, Source} +import scala.jdk.CollectionConverters._ import io.fabric8.kubernetes.api.model.{ConfigMap, ConfigMapBuilder, KeyToPath} diff --git a/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/scheduler/cluster/k8s/ExecutorPodsAllocator.scala b/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/scheduler/cluster/k8s/ExecutorPodsAllocator.scala index 25970e918ec42..e6c2171058d63 100644 --- a/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/scheduler/cluster/k8s/ExecutorPodsAllocator.scala +++ b/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/scheduler/cluster/k8s/ExecutorPodsAllocator.scala @@ -20,8 +20,8 @@ import java.time.Instant import java.util.concurrent.{ConcurrentHashMap, TimeUnit} import java.util.concurrent.atomic.AtomicInteger -import scala.collection.JavaConverters._ import scala.collection.mutable +import scala.jdk.CollectionConverters._ import scala.util.control.NonFatal import io.fabric8.kubernetes.api.model.{HasMetadata, PersistentVolumeClaim, Pod, PodBuilder} diff --git a/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/scheduler/cluster/k8s/ExecutorPodsLifecycleManager.scala b/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/scheduler/cluster/k8s/ExecutorPodsLifecycleManager.scala index 5d91070bcab20..49bfde98bb81f 100644 --- a/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/scheduler/cluster/k8s/ExecutorPodsLifecycleManager.scala +++ b/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/scheduler/cluster/k8s/ExecutorPodsLifecycleManager.scala @@ -19,8 +19,8 @@ package org.apache.spark.scheduler.cluster.k8s import java.util.concurrent.TimeUnit import java.util.function.UnaryOperator -import scala.collection.JavaConverters._ import scala.collection.mutable +import scala.jdk.CollectionConverters._ import com.google.common.cache.CacheBuilder import io.fabric8.kubernetes.api.model.{Pod, PodBuilder} diff --git a/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/scheduler/cluster/k8s/ExecutorPodsPollingSnapshotSource.scala b/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/scheduler/cluster/k8s/ExecutorPodsPollingSnapshotSource.scala index 73c3d23dd7934..4ed34ec3e4c00 100644 --- a/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/scheduler/cluster/k8s/ExecutorPodsPollingSnapshotSource.scala +++ b/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/scheduler/cluster/k8s/ExecutorPodsPollingSnapshotSource.scala @@ -18,7 +18,7 @@ package org.apache.spark.scheduler.cluster.k8s import java.util.concurrent.{Future, ScheduledExecutorService, TimeUnit} -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import com.google.common.primitives.UnsignedLong import io.fabric8.kubernetes.api.model.ListOptionsBuilder diff --git a/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/scheduler/cluster/k8s/ExecutorPodsSnapshot.scala b/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/scheduler/cluster/k8s/ExecutorPodsSnapshot.scala index ff47c17148f35..1d987cc7569c1 100644 --- a/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/scheduler/cluster/k8s/ExecutorPodsSnapshot.scala +++ b/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/scheduler/cluster/k8s/ExecutorPodsSnapshot.scala @@ -18,7 +18,7 @@ package org.apache.spark.scheduler.cluster.k8s import java.util.Locale -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import io.fabric8.kubernetes.api.model.ContainerStateTerminated import io.fabric8.kubernetes.api.model.Pod diff --git a/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/scheduler/cluster/k8s/ExecutorPodsSnapshotsStoreImpl.scala b/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/scheduler/cluster/k8s/ExecutorPodsSnapshotsStoreImpl.scala index 49ab1d32486d2..2a90310c0b26c 100644 --- a/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/scheduler/cluster/k8s/ExecutorPodsSnapshotsStoreImpl.scala +++ b/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/scheduler/cluster/k8s/ExecutorPodsSnapshotsStoreImpl.scala @@ -22,8 +22,8 @@ import java.util.concurrent.atomic.AtomicInteger import java.util.concurrent.locks.ReentrantLock import javax.annotation.concurrent.GuardedBy -import scala.collection.JavaConverters._ import scala.concurrent.duration.FiniteDuration +import scala.jdk.CollectionConverters._ import scala.util.control.NonFatal import io.fabric8.kubernetes.api.model.Pod diff --git a/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/scheduler/cluster/k8s/ExecutorRollPlugin.scala b/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/scheduler/cluster/k8s/ExecutorRollPlugin.scala index e1a9a1f7abe28..9f5acf2e7e45b 100644 --- a/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/scheduler/cluster/k8s/ExecutorRollPlugin.scala +++ b/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/scheduler/cluster/k8s/ExecutorRollPlugin.scala @@ -20,7 +20,7 @@ import java.lang.Math.sqrt import java.util.{Map => JMap} import java.util.concurrent.{ScheduledExecutorService, TimeUnit} -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.SparkContext import org.apache.spark.api.plugin.{DriverPlugin, ExecutorPlugin, PluginContext, SparkPlugin} diff --git a/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/scheduler/cluster/k8s/StatefulSetPodsAllocator.scala b/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/scheduler/cluster/k8s/StatefulSetPodsAllocator.scala index ad9e1c94a4bd4..0285ae396d6e6 100644 --- a/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/scheduler/cluster/k8s/StatefulSetPodsAllocator.scala +++ b/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/scheduler/cluster/k8s/StatefulSetPodsAllocator.scala @@ -18,8 +18,8 @@ package org.apache.spark.scheduler.cluster.k8s import java.util.concurrent.TimeUnit -import scala.collection.JavaConverters._ import scala.collection.mutable +import scala.jdk.CollectionConverters._ import io.fabric8.kubernetes.api.model.{PersistentVolumeClaim, PersistentVolumeClaimBuilder, PodSpec, PodSpecBuilder, PodTemplateSpec} diff --git a/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/KubernetesUtilsSuite.scala b/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/KubernetesUtilsSuite.scala index 2259ba99e6a59..be10d21aa527b 100644 --- a/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/KubernetesUtilsSuite.scala +++ b/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/KubernetesUtilsSuite.scala @@ -20,7 +20,7 @@ package org.apache.spark.deploy.k8s import java.io.File import java.nio.charset.StandardCharsets -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import io.fabric8.kubernetes.api.model.{ContainerBuilder, EnvVarBuilder, EnvVarSourceBuilder, PodBuilder} import org.apache.commons.io.FileUtils diff --git a/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/PodBuilderSuite.scala b/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/PodBuilderSuite.scala index 7da4e2c59bb25..947db5dd41c1a 100644 --- a/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/PodBuilderSuite.scala +++ b/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/PodBuilderSuite.scala @@ -18,7 +18,7 @@ package org.apache.spark.deploy.k8s import java.io.File -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import io.fabric8.kubernetes.api.model.{Config => _, _} import io.fabric8.kubernetes.client.KubernetesClient diff --git a/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/SecretVolumeUtils.scala b/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/SecretVolumeUtils.scala index 16780584a674a..9270781581f76 100644 --- a/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/SecretVolumeUtils.scala +++ b/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/SecretVolumeUtils.scala @@ -16,7 +16,7 @@ */ package org.apache.spark.deploy.k8s -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import io.fabric8.kubernetes.api.model.{Container, Pod} diff --git a/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/features/BasicDriverFeatureStepSuite.scala b/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/features/BasicDriverFeatureStepSuite.scala index 9eb27a37fbabc..a9149b6cbf14f 100644 --- a/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/features/BasicDriverFeatureStepSuite.scala +++ b/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/features/BasicDriverFeatureStepSuite.scala @@ -16,7 +16,7 @@ */ package org.apache.spark.deploy.k8s.features -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import io.fabric8.kubernetes.api.model.{ContainerPort, ContainerPortBuilder, LocalObjectReferenceBuilder, Quantity} import org.apache.hadoop.fs.{LocalFileSystem, Path} diff --git a/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/features/BasicExecutorFeatureStepSuite.scala b/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/features/BasicExecutorFeatureStepSuite.scala index 32897014931cf..b9d5681907a69 100644 --- a/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/features/BasicExecutorFeatureStepSuite.scala +++ b/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/features/BasicExecutorFeatureStepSuite.scala @@ -16,7 +16,7 @@ */ package org.apache.spark.deploy.k8s.features -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import com.google.common.net.InternetDomainName import io.fabric8.kubernetes.api.model._ diff --git a/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/features/DriverCommandFeatureStepSuite.scala b/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/features/DriverCommandFeatureStepSuite.scala index c92bf803ec551..4c38989955b80 100644 --- a/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/features/DriverCommandFeatureStepSuite.scala +++ b/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/features/DriverCommandFeatureStepSuite.scala @@ -16,7 +16,7 @@ */ package org.apache.spark.deploy.k8s.features -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.{SparkConf, SparkFunSuite} import org.apache.spark.deploy.k8s._ diff --git a/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/features/DriverKubernetesCredentialsFeatureStepSuite.scala b/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/features/DriverKubernetesCredentialsFeatureStepSuite.scala index 7d8e9296a6cb5..f1dd8b94f17ff 100644 --- a/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/features/DriverKubernetesCredentialsFeatureStepSuite.scala +++ b/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/features/DriverKubernetesCredentialsFeatureStepSuite.scala @@ -18,7 +18,7 @@ package org.apache.spark.deploy.k8s.features import java.io.File -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import com.google.common.base.Charsets import com.google.common.io.{BaseEncoding, Files} diff --git a/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/features/DriverServiceFeatureStepSuite.scala b/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/features/DriverServiceFeatureStepSuite.scala index 609c80f27c3da..06d322c9d19b5 100644 --- a/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/features/DriverServiceFeatureStepSuite.scala +++ b/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/features/DriverServiceFeatureStepSuite.scala @@ -16,7 +16,7 @@ */ package org.apache.spark.deploy.k8s.features -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import com.google.common.net.InternetDomainName import io.fabric8.kubernetes.api.model.Service diff --git a/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/features/EnvSecretsFeatureStepSuite.scala b/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/features/EnvSecretsFeatureStepSuite.scala index 0455526111067..f86607e0958b7 100644 --- a/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/features/EnvSecretsFeatureStepSuite.scala +++ b/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/features/EnvSecretsFeatureStepSuite.scala @@ -16,7 +16,7 @@ */ package org.apache.spark.deploy.k8s.features -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.SparkFunSuite import org.apache.spark.deploy.k8s._ diff --git a/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/features/HadoopConfDriverFeatureStepSuite.scala b/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/features/HadoopConfDriverFeatureStepSuite.scala index c078e69b8a14b..8f21b95236a9c 100644 --- a/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/features/HadoopConfDriverFeatureStepSuite.scala +++ b/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/features/HadoopConfDriverFeatureStepSuite.scala @@ -19,7 +19,7 @@ package org.apache.spark.deploy.k8s.features import java.io.File import java.nio.charset.StandardCharsets.UTF_8 -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import com.google.common.io.Files import io.fabric8.kubernetes.api.model.ConfigMap diff --git a/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/features/KerberosConfDriverFeatureStepSuite.scala b/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/features/KerberosConfDriverFeatureStepSuite.scala index 094fcb39782f4..163d87643abd3 100644 --- a/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/features/KerberosConfDriverFeatureStepSuite.scala +++ b/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/features/KerberosConfDriverFeatureStepSuite.scala @@ -20,7 +20,7 @@ import java.io.File import java.nio.charset.StandardCharsets.UTF_8 import java.security.PrivilegedExceptionAction -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import com.google.common.io.Files import io.fabric8.kubernetes.api.model.{ConfigMap, Secret} diff --git a/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/features/KubernetesFeaturesTestUtils.scala b/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/features/KubernetesFeaturesTestUtils.scala index 284887f0bddcb..83ec70b64e206 100644 --- a/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/features/KubernetesFeaturesTestUtils.scala +++ b/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/features/KubernetesFeaturesTestUtils.scala @@ -16,7 +16,7 @@ */ package org.apache.spark.deploy.k8s.features -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.reflect.ClassTag import io.fabric8.kubernetes.api.model.{Container, HasMetadata, PodBuilder, SecretBuilder} diff --git a/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/features/MountVolumesFeatureStepSuite.scala b/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/features/MountVolumesFeatureStepSuite.scala index 468d1dde9fb6d..41cb46bfa5a2e 100644 --- a/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/features/MountVolumesFeatureStepSuite.scala +++ b/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/features/MountVolumesFeatureStepSuite.scala @@ -16,7 +16,7 @@ */ package org.apache.spark.deploy.k8s.features -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.SparkFunSuite import org.apache.spark.deploy.k8s._ diff --git a/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/submit/ClientSuite.scala b/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/submit/ClientSuite.scala index a813b3a876f87..de5244be95e28 100644 --- a/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/submit/ClientSuite.scala +++ b/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/submit/ClientSuite.scala @@ -20,7 +20,7 @@ import java.io.File import java.nio.charset.StandardCharsets import java.nio.file.Files -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import io.fabric8.kubernetes.api.model._ import io.fabric8.kubernetes.api.model.apiextensions.v1.{CustomResourceDefinition, CustomResourceDefinitionBuilder} diff --git a/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/submit/K8sSubmitOpSuite.scala b/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/submit/K8sSubmitOpSuite.scala index 3d30fb320d641..95a76b98b227b 100644 --- a/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/submit/K8sSubmitOpSuite.scala +++ b/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/submit/K8sSubmitOpSuite.scala @@ -19,7 +19,7 @@ package org.apache.spark.deploy.k8s.submit import java.io.PrintStream import java.util.Arrays -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import io.fabric8.kubernetes.api.model._ import io.fabric8.kubernetes.client.{KubernetesClient, PropagationPolicyConfigurable} diff --git a/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/submit/KubernetesClientUtilsSuite.scala b/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/submit/KubernetesClientUtilsSuite.scala index 739274eac2d5d..5ed9cc5e03b18 100644 --- a/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/submit/KubernetesClientUtilsSuite.scala +++ b/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/submit/KubernetesClientUtilsSuite.scala @@ -22,7 +22,7 @@ import java.nio.charset.StandardCharsets import java.nio.file.Files import java.util.UUID -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import io.fabric8.kubernetes.api.model.ConfigMapBuilder import org.scalatest.BeforeAndAfter diff --git a/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/scheduler/cluster/k8s/ExecutorLifecycleTestUtils.scala b/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/scheduler/cluster/k8s/ExecutorLifecycleTestUtils.scala index de9da0de7da2f..09b3d5a230b79 100644 --- a/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/scheduler/cluster/k8s/ExecutorLifecycleTestUtils.scala +++ b/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/scheduler/cluster/k8s/ExecutorLifecycleTestUtils.scala @@ -18,7 +18,7 @@ package org.apache.spark.scheduler.cluster.k8s import java.time.Instant -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import io.fabric8.kubernetes.api.model._ diff --git a/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/scheduler/cluster/k8s/ExecutorPodsAllocatorSuite.scala b/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/scheduler/cluster/k8s/ExecutorPodsAllocatorSuite.scala index 350a09f0218ba..113e36ee327a3 100644 --- a/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/scheduler/cluster/k8s/ExecutorPodsAllocatorSuite.scala +++ b/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/scheduler/cluster/k8s/ExecutorPodsAllocatorSuite.scala @@ -20,8 +20,8 @@ import java.time.Instant import java.time.temporal.ChronoUnit.MILLIS import java.util.concurrent.atomic.AtomicInteger -import scala.collection.JavaConverters._ import scala.collection.mutable +import scala.jdk.CollectionConverters._ import io.fabric8.kubernetes.api.model._ import io.fabric8.kubernetes.client.{KubernetesClient, KubernetesClientException} diff --git a/resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/BasicTestsSuite.scala b/resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/BasicTestsSuite.scala index 66f2ae4924c09..992fe7c97ff1a 100644 --- a/resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/BasicTestsSuite.scala +++ b/resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/BasicTestsSuite.scala @@ -16,7 +16,7 @@ */ package org.apache.spark.deploy.k8s.integrationtest -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import io.fabric8.kubernetes.api.model.Pod import org.scalatest.concurrent.{Eventually, PatienceConfiguration} diff --git a/resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/ClientModeTestsSuite.scala b/resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/ClientModeTestsSuite.scala index e71d7c47d8de0..456943a965847 100644 --- a/resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/ClientModeTestsSuite.scala +++ b/resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/ClientModeTestsSuite.scala @@ -16,7 +16,7 @@ */ package org.apache.spark.deploy.k8s.integrationtest -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import io.fabric8.kubernetes.api.model.{PodBuilder, ServiceBuilder} import org.scalatest.concurrent.Eventually diff --git a/resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/DecommissionSuite.scala b/resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/DecommissionSuite.scala index 743de91b08fa3..1b9b5310c2ee2 100644 --- a/resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/DecommissionSuite.scala +++ b/resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/DecommissionSuite.scala @@ -19,7 +19,7 @@ package org.apache.spark.deploy.k8s.integrationtest import java.io.File import java.nio.charset.StandardCharsets -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import com.google.common.io.Files import io.fabric8.kubernetes.api.model.Pod diff --git a/resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/DepsTestsSuite.scala b/resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/DepsTestsSuite.scala index 8b94a65264a0f..e4650479b2ce1 100644 --- a/resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/DepsTestsSuite.scala +++ b/resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/DepsTestsSuite.scala @@ -20,7 +20,7 @@ import java.io.File import java.net.URL import java.nio.file.Files -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import com.amazonaws.auth.BasicAWSCredentials import com.amazonaws.services.s3.AmazonS3Client diff --git a/resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/KubernetesSuite.scala b/resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/KubernetesSuite.scala index 33485f2d6bc51..c3b6cc1d3893c 100644 --- a/resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/KubernetesSuite.scala +++ b/resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/KubernetesSuite.scala @@ -20,7 +20,7 @@ import java.io.File import java.nio.file.{Path, Paths} import java.util.UUID -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import com.google.common.base.Charsets import com.google.common.io.Files diff --git a/resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/KubernetesTestComponents.scala b/resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/KubernetesTestComponents.scala index 4aba11bdb9d8f..4d14593932c45 100644 --- a/resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/KubernetesTestComponents.scala +++ b/resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/KubernetesTestComponents.scala @@ -19,7 +19,7 @@ package org.apache.spark.deploy.k8s.integrationtest import java.nio.file.{Path, Paths} import java.util.UUID -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.collection.mutable import io.fabric8.kubernetes.api.model.NamespaceBuilder diff --git a/resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/PVTestsSuite.scala b/resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/PVTestsSuite.scala index 1d373f3f8066e..7e61e9339f5f7 100644 --- a/resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/PVTestsSuite.scala +++ b/resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/PVTestsSuite.scala @@ -16,7 +16,7 @@ */ package org.apache.spark.deploy.k8s.integrationtest -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import io.fabric8.kubernetes.api.model._ import io.fabric8.kubernetes.api.model.storage.StorageClassBuilder diff --git a/resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/ProcessUtils.scala b/resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/ProcessUtils.scala index e259979ad0329..a4e9ce5e3415c 100644 --- a/resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/ProcessUtils.scala +++ b/resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/ProcessUtils.scala @@ -19,7 +19,7 @@ package org.apache.spark.deploy.k8s.integrationtest import java.nio.charset.StandardCharsets import java.util.concurrent.TimeUnit -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.collection.mutable.ArrayBuffer import scala.io.Source diff --git a/resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/SecretsTestsSuite.scala b/resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/SecretsTestsSuite.scala index 1d999e701af27..3e076ea16fdc4 100644 --- a/resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/SecretsTestsSuite.scala +++ b/resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/SecretsTestsSuite.scala @@ -18,7 +18,7 @@ package org.apache.spark.deploy.k8s.integrationtest import java.util.Locale -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import io.fabric8.kubernetes.api.model.{Pod, SecretBuilder} import org.apache.commons.codec.binary.Base64 diff --git a/resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/Utils.scala b/resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/Utils.scala index 7df8df0fb79ed..6aeefe94d779c 100644 --- a/resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/Utils.scala +++ b/resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/Utils.scala @@ -21,7 +21,7 @@ import java.nio.file.{Files, Path} import java.util.concurrent.CountDownLatch import java.util.zip.{ZipEntry, ZipOutputStream} -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import io.fabric8.kubernetes.client.dsl.ExecListener import io.fabric8.kubernetes.client.dsl.ExecListener.Response diff --git a/resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/VolcanoTestsSuite.scala b/resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/VolcanoTestsSuite.scala index 35da48f61b366..b8e0246d0a621 100644 --- a/resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/VolcanoTestsSuite.scala +++ b/resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/VolcanoTestsSuite.scala @@ -20,7 +20,7 @@ import java.io.{File, FileInputStream} import java.time.Instant import java.util.UUID -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.collection.mutable // scalastyle:off executioncontextglobal import scala.concurrent.ExecutionContext.Implicits.global diff --git a/resource-managers/mesos/src/main/scala/org/apache/spark/deploy/mesos/MesosExternalShuffleService.scala b/resource-managers/mesos/src/main/scala/org/apache/spark/deploy/mesos/MesosExternalShuffleService.scala index 5da1b190ed8d5..e6870feb92863 100644 --- a/resource-managers/mesos/src/main/scala/org/apache/spark/deploy/mesos/MesosExternalShuffleService.scala +++ b/resource-managers/mesos/src/main/scala/org/apache/spark/deploy/mesos/MesosExternalShuffleService.scala @@ -20,7 +20,7 @@ package org.apache.spark.deploy.mesos import java.nio.ByteBuffer import java.util.concurrent.{ConcurrentHashMap, TimeUnit} -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.{SecurityManager, SparkConf} import org.apache.spark.deploy.ExternalShuffleService diff --git a/resource-managers/mesos/src/main/scala/org/apache/spark/executor/MesosExecutorBackend.scala b/resource-managers/mesos/src/main/scala/org/apache/spark/executor/MesosExecutorBackend.scala index b023cf1fa4bb2..366f6d0bf392c 100644 --- a/resource-managers/mesos/src/main/scala/org/apache/spark/executor/MesosExecutorBackend.scala +++ b/resource-managers/mesos/src/main/scala/org/apache/spark/executor/MesosExecutorBackend.scala @@ -19,7 +19,7 @@ package org.apache.spark.executor import java.nio.ByteBuffer -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.mesos.{Executor => MesosExecutor, ExecutorDriver, MesosExecutorDriver} import org.apache.mesos.Protos.{TaskStatus => MesosTaskStatus, _} diff --git a/resource-managers/mesos/src/main/scala/org/apache/spark/scheduler/cluster/mesos/MesosClusterPersistenceEngine.scala b/resource-managers/mesos/src/main/scala/org/apache/spark/scheduler/cluster/mesos/MesosClusterPersistenceEngine.scala index 123412f21e2a2..fa5977c8f8440 100644 --- a/resource-managers/mesos/src/main/scala/org/apache/spark/scheduler/cluster/mesos/MesosClusterPersistenceEngine.scala +++ b/resource-managers/mesos/src/main/scala/org/apache/spark/scheduler/cluster/mesos/MesosClusterPersistenceEngine.scala @@ -17,7 +17,7 @@ package org.apache.spark.scheduler.cluster.mesos -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.curator.framework.CuratorFramework import org.apache.zookeeper.CreateMode diff --git a/resource-managers/mesos/src/main/scala/org/apache/spark/scheduler/cluster/mesos/MesosClusterScheduler.scala b/resource-managers/mesos/src/main/scala/org/apache/spark/scheduler/cluster/mesos/MesosClusterScheduler.scala index 16cffd03135df..aed08d13e445e 100644 --- a/resource-managers/mesos/src/main/scala/org/apache/spark/scheduler/cluster/mesos/MesosClusterScheduler.scala +++ b/resource-managers/mesos/src/main/scala/org/apache/spark/scheduler/cluster/mesos/MesosClusterScheduler.scala @@ -20,10 +20,10 @@ package org.apache.spark.scheduler.cluster.mesos import java.io.File import java.util.{Collections, Date, List => JList} -import scala.collection.JavaConverters._ import scala.collection.immutable import scala.collection.mutable import scala.collection.mutable.ArrayBuffer +import scala.jdk.CollectionConverters._ import org.apache.mesos.{Scheduler, SchedulerDriver} import org.apache.mesos.Protos.{SlaveID => AgentID, TaskState => MesosTaskState, _} diff --git a/resource-managers/mesos/src/main/scala/org/apache/spark/scheduler/cluster/mesos/MesosCoarseGrainedSchedulerBackend.scala b/resource-managers/mesos/src/main/scala/org/apache/spark/scheduler/cluster/mesos/MesosCoarseGrainedSchedulerBackend.scala index e5a6a5f1ef166..29548de8b90eb 100644 --- a/resource-managers/mesos/src/main/scala/org/apache/spark/scheduler/cluster/mesos/MesosCoarseGrainedSchedulerBackend.scala +++ b/resource-managers/mesos/src/main/scala/org/apache/spark/scheduler/cluster/mesos/MesosCoarseGrainedSchedulerBackend.scala @@ -23,9 +23,9 @@ import java.util.concurrent.TimeUnit import java.util.concurrent.atomic.{AtomicBoolean, AtomicLong} import java.util.concurrent.locks.ReentrantLock -import scala.collection.JavaConverters._ import scala.collection.mutable import scala.concurrent.Future +import scala.jdk.CollectionConverters._ import org.apache.mesos.Protos.{SlaveID => AgentID, TaskInfo => MesosTaskInfo, _} import org.apache.mesos.SchedulerDriver diff --git a/resource-managers/mesos/src/main/scala/org/apache/spark/scheduler/cluster/mesos/MesosFineGrainedSchedulerBackend.scala b/resource-managers/mesos/src/main/scala/org/apache/spark/scheduler/cluster/mesos/MesosFineGrainedSchedulerBackend.scala index 92099e06fd996..21df567c8062f 100644 --- a/resource-managers/mesos/src/main/scala/org/apache/spark/scheduler/cluster/mesos/MesosFineGrainedSchedulerBackend.scala +++ b/resource-managers/mesos/src/main/scala/org/apache/spark/scheduler/cluster/mesos/MesosFineGrainedSchedulerBackend.scala @@ -20,8 +20,8 @@ package org.apache.spark.scheduler.cluster.mesos import java.io.File import java.util.{ArrayList => JArrayList, Collections, List => JList} -import scala.collection.JavaConverters._ import scala.collection.mutable.{HashMap, HashSet} +import scala.jdk.CollectionConverters._ import org.apache.mesos.Protos.{ExecutorInfo => MesosExecutorInfo, SlaveID => AgentID, TaskInfo => MesosTaskInfo, _} diff --git a/resource-managers/mesos/src/main/scala/org/apache/spark/scheduler/cluster/mesos/MesosProtoUtils.scala b/resource-managers/mesos/src/main/scala/org/apache/spark/scheduler/cluster/mesos/MesosProtoUtils.scala index fea01c7068c9a..6fd9ad1fa0887 100644 --- a/resource-managers/mesos/src/main/scala/org/apache/spark/scheduler/cluster/mesos/MesosProtoUtils.scala +++ b/resource-managers/mesos/src/main/scala/org/apache/spark/scheduler/cluster/mesos/MesosProtoUtils.scala @@ -17,7 +17,7 @@ package org.apache.spark.scheduler.cluster.mesos -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.mesos.Protos diff --git a/resource-managers/mesos/src/main/scala/org/apache/spark/scheduler/cluster/mesos/MesosSchedulerUtils.scala b/resource-managers/mesos/src/main/scala/org/apache/spark/scheduler/cluster/mesos/MesosSchedulerUtils.scala index 524b1d514fafe..3702c3f1cf26f 100644 --- a/resource-managers/mesos/src/main/scala/org/apache/spark/scheduler/cluster/mesos/MesosSchedulerUtils.scala +++ b/resource-managers/mesos/src/main/scala/org/apache/spark/scheduler/cluster/mesos/MesosSchedulerUtils.scala @@ -22,8 +22,8 @@ import java.nio.charset.StandardCharsets import java.util.{List => JList} import java.util.concurrent.CountDownLatch -import scala.collection.JavaConverters._ import scala.collection.mutable.ArrayBuffer +import scala.jdk.CollectionConverters._ import scala.util.control.NonFatal import com.google.common.base.Splitter diff --git a/resource-managers/mesos/src/test/scala/org/apache/spark/scheduler/cluster/mesos/MesosClusterSchedulerSuite.scala b/resource-managers/mesos/src/test/scala/org/apache/spark/scheduler/cluster/mesos/MesosClusterSchedulerSuite.scala index 102dd4b76d237..8e791d4faf52f 100644 --- a/resource-managers/mesos/src/test/scala/org/apache/spark/scheduler/cluster/mesos/MesosClusterSchedulerSuite.scala +++ b/resource-managers/mesos/src/test/scala/org/apache/spark/scheduler/cluster/mesos/MesosClusterSchedulerSuite.scala @@ -20,7 +20,7 @@ package org.apache.spark.scheduler.cluster.mesos import java.util.{Collection, Collections, Date} import java.util.concurrent.atomic.AtomicLong -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.mesos.Protos.{TaskState => MesosTaskState, _} import org.apache.mesos.Protos.Value.{Scalar, Type} diff --git a/resource-managers/mesos/src/test/scala/org/apache/spark/scheduler/cluster/mesos/MesosCoarseGrainedSchedulerBackendSuite.scala b/resource-managers/mesos/src/test/scala/org/apache/spark/scheduler/cluster/mesos/MesosCoarseGrainedSchedulerBackendSuite.scala index 2b7272a490376..8d0616ad055fe 100644 --- a/resource-managers/mesos/src/test/scala/org/apache/spark/scheduler/cluster/mesos/MesosCoarseGrainedSchedulerBackendSuite.scala +++ b/resource-managers/mesos/src/test/scala/org/apache/spark/scheduler/cluster/mesos/MesosCoarseGrainedSchedulerBackendSuite.scala @@ -19,8 +19,8 @@ package org.apache.spark.scheduler.cluster.mesos import java.util.concurrent.TimeUnit -import scala.collection.JavaConverters._ import scala.concurrent.duration._ +import scala.jdk.CollectionConverters._ import org.apache.mesos.{Protos, Scheduler, SchedulerDriver} import org.apache.mesos.Protos._ diff --git a/resource-managers/mesos/src/test/scala/org/apache/spark/scheduler/cluster/mesos/MesosFineGrainedSchedulerBackendSuite.scala b/resource-managers/mesos/src/test/scala/org/apache/spark/scheduler/cluster/mesos/MesosFineGrainedSchedulerBackendSuite.scala index c44f22faa419c..1b417f5b30b94 100644 --- a/resource-managers/mesos/src/test/scala/org/apache/spark/scheduler/cluster/mesos/MesosFineGrainedSchedulerBackendSuite.scala +++ b/resource-managers/mesos/src/test/scala/org/apache/spark/scheduler/cluster/mesos/MesosFineGrainedSchedulerBackendSuite.scala @@ -23,10 +23,10 @@ import java.util.Collection import java.util.Collections import java.util.Properties -import scala.collection.JavaConverters._ import scala.collection.immutable import scala.collection.mutable import scala.collection.mutable.ArrayBuffer +import scala.jdk.CollectionConverters._ import org.apache.mesos.{Protos, Scheduler, SchedulerDriver} import org.apache.mesos.Protos._ diff --git a/resource-managers/mesos/src/test/scala/org/apache/spark/scheduler/cluster/mesos/MesosSchedulerUtilsSuite.scala b/resource-managers/mesos/src/test/scala/org/apache/spark/scheduler/cluster/mesos/MesosSchedulerUtilsSuite.scala index f2efe86c3f9ec..f209c607935f8 100644 --- a/resource-managers/mesos/src/test/scala/org/apache/spark/scheduler/cluster/mesos/MesosSchedulerUtilsSuite.scala +++ b/resource-managers/mesos/src/test/scala/org/apache/spark/scheduler/cluster/mesos/MesosSchedulerUtilsSuite.scala @@ -20,7 +20,7 @@ package org.apache.spark.scheduler.cluster.mesos import java.io.{File, FileNotFoundException} import java.nio.charset.StandardCharsets.UTF_8 -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import com.google.common.io.Files import org.apache.mesos.Protos.{FrameworkInfo, Resource, Value} diff --git a/resource-managers/mesos/src/test/scala/org/apache/spark/scheduler/cluster/mesos/Utils.scala b/resource-managers/mesos/src/test/scala/org/apache/spark/scheduler/cluster/mesos/Utils.scala index 0a2c0cef31ecd..ccbdc85931851 100644 --- a/resource-managers/mesos/src/test/scala/org/apache/spark/scheduler/cluster/mesos/Utils.scala +++ b/resource-managers/mesos/src/test/scala/org/apache/spark/scheduler/cluster/mesos/Utils.scala @@ -19,7 +19,7 @@ package org.apache.spark.scheduler.cluster.mesos import java.util.Collections -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.mesos.Protos._ import org.apache.mesos.Protos.Value.{Range => MesosRange, Ranges, Scalar} diff --git a/resource-managers/yarn/src/main/scala/org/apache/spark/deploy/yarn/Client.scala b/resource-managers/yarn/src/main/scala/org/apache/spark/deploy/yarn/Client.scala index deb44c3b59115..b2091499fd618 100644 --- a/resource-managers/yarn/src/main/scala/org/apache/spark/deploy/yarn/Client.scala +++ b/resource-managers/yarn/src/main/scala/org/apache/spark/deploy/yarn/Client.scala @@ -25,9 +25,9 @@ import java.nio.file.{Files, Paths} import java.util.{Collections, Locale, Properties, UUID} import java.util.zip.{ZipEntry, ZipOutputStream} -import scala.collection.JavaConverters._ import scala.collection.immutable.{Map => IMap} import scala.collection.mutable.{ArrayBuffer, HashMap, HashSet, ListBuffer, Map} +import scala.jdk.CollectionConverters._ import scala.util.control.NonFatal import com.google.common.base.Objects diff --git a/resource-managers/yarn/src/main/scala/org/apache/spark/deploy/yarn/ExecutorRunnable.scala b/resource-managers/yarn/src/main/scala/org/apache/spark/deploy/yarn/ExecutorRunnable.scala index e3fcf5472f54d..d4c24e8a39f72 100644 --- a/resource-managers/yarn/src/main/scala/org/apache/spark/deploy/yarn/ExecutorRunnable.scala +++ b/resource-managers/yarn/src/main/scala/org/apache/spark/deploy/yarn/ExecutorRunnable.scala @@ -20,9 +20,9 @@ package org.apache.spark.deploy.yarn import java.nio.ByteBuffer import java.util.Collections -import scala.collection.JavaConverters._ import scala.collection.mutable import scala.collection.mutable.{HashMap, ListBuffer} +import scala.jdk.CollectionConverters._ import com.fasterxml.jackson.databind.ObjectMapper import com.fasterxml.jackson.module.scala.DefaultScalaModule diff --git a/resource-managers/yarn/src/main/scala/org/apache/spark/deploy/yarn/LocalityPreferredContainerPlacementStrategy.scala b/resource-managers/yarn/src/main/scala/org/apache/spark/deploy/yarn/LocalityPreferredContainerPlacementStrategy.scala index 7ac5beac76e20..d200a378a38e5 100644 --- a/resource-managers/yarn/src/main/scala/org/apache/spark/deploy/yarn/LocalityPreferredContainerPlacementStrategy.scala +++ b/resource-managers/yarn/src/main/scala/org/apache/spark/deploy/yarn/LocalityPreferredContainerPlacementStrategy.scala @@ -17,8 +17,8 @@ package org.apache.spark.deploy.yarn -import scala.collection.JavaConverters._ import scala.collection.mutable.{ArrayBuffer, HashMap, Set} +import scala.jdk.CollectionConverters._ import org.apache.hadoop.conf.Configuration import org.apache.hadoop.yarn.api.records.ContainerId diff --git a/resource-managers/yarn/src/main/scala/org/apache/spark/deploy/yarn/SparkRackResolver.scala b/resource-managers/yarn/src/main/scala/org/apache/spark/deploy/yarn/SparkRackResolver.scala index f3e8a9f0ceed6..e0d66af348e29 100644 --- a/resource-managers/yarn/src/main/scala/org/apache/spark/deploy/yarn/SparkRackResolver.scala +++ b/resource-managers/yarn/src/main/scala/org/apache/spark/deploy/yarn/SparkRackResolver.scala @@ -17,8 +17,8 @@ package org.apache.spark.deploy.yarn -import scala.collection.JavaConverters._ import scala.collection.mutable.ArrayBuffer +import scala.jdk.CollectionConverters._ import com.google.common.base.Strings import org.apache.hadoop.conf.Configuration diff --git a/resource-managers/yarn/src/main/scala/org/apache/spark/deploy/yarn/YarnAllocator.scala b/resource-managers/yarn/src/main/scala/org/apache/spark/deploy/yarn/YarnAllocator.scala index 19c06f957318b..448353d3a5cdf 100644 --- a/resource-managers/yarn/src/main/scala/org/apache/spark/deploy/yarn/YarnAllocator.scala +++ b/resource-managers/yarn/src/main/scala/org/apache/spark/deploy/yarn/YarnAllocator.scala @@ -23,9 +23,9 @@ import java.util.concurrent.ConcurrentHashMap import java.util.concurrent.atomic.AtomicInteger import javax.annotation.concurrent.GuardedBy -import scala.collection.JavaConverters._ import scala.collection.mutable import scala.collection.mutable.{ArrayBuffer, HashMap, HashSet} +import scala.jdk.CollectionConverters._ import scala.util.control.NonFatal import org.apache.hadoop.yarn.api.protocolrecords.AllocateResponse diff --git a/resource-managers/yarn/src/main/scala/org/apache/spark/deploy/yarn/YarnAllocatorNodeHealthTracker.scala b/resource-managers/yarn/src/main/scala/org/apache/spark/deploy/yarn/YarnAllocatorNodeHealthTracker.scala index 87828db1d993a..e778254e699bd 100644 --- a/resource-managers/yarn/src/main/scala/org/apache/spark/deploy/yarn/YarnAllocatorNodeHealthTracker.scala +++ b/resource-managers/yarn/src/main/scala/org/apache/spark/deploy/yarn/YarnAllocatorNodeHealthTracker.scala @@ -16,8 +16,8 @@ */ package org.apache.spark.deploy.yarn -import scala.collection.JavaConverters._ import scala.collection.mutable.HashMap +import scala.jdk.CollectionConverters._ import org.apache.hadoop.yarn.client.api.AMRMClient import org.apache.hadoop.yarn.client.api.AMRMClient.ContainerRequest diff --git a/resource-managers/yarn/src/main/scala/org/apache/spark/deploy/yarn/YarnRMClient.scala b/resource-managers/yarn/src/main/scala/org/apache/spark/deploy/yarn/YarnRMClient.scala index 842611807db4d..a94614a5414d6 100644 --- a/resource-managers/yarn/src/main/scala/org/apache/spark/deploy/yarn/YarnRMClient.scala +++ b/resource-managers/yarn/src/main/scala/org/apache/spark/deploy/yarn/YarnRMClient.scala @@ -17,7 +17,7 @@ package org.apache.spark.deploy.yarn -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.hadoop.conf.Configuration import org.apache.hadoop.yarn.api.records._ diff --git a/resource-managers/yarn/src/test/scala/org/apache/spark/deploy/yarn/BaseYarnClusterSuite.scala b/resource-managers/yarn/src/test/scala/org/apache/spark/deploy/yarn/BaseYarnClusterSuite.scala index d166c50a1ddb2..f0177541accc1 100644 --- a/resource-managers/yarn/src/test/scala/org/apache/spark/deploy/yarn/BaseYarnClusterSuite.scala +++ b/resource-managers/yarn/src/test/scala/org/apache/spark/deploy/yarn/BaseYarnClusterSuite.scala @@ -22,8 +22,8 @@ import java.nio.charset.StandardCharsets import java.util.Properties import java.util.concurrent.TimeUnit -import scala.collection.JavaConverters._ import scala.concurrent.duration._ +import scala.jdk.CollectionConverters._ import com.google.common.io.Files import org.apache.hadoop.yarn.conf.YarnConfiguration diff --git a/resource-managers/yarn/src/test/scala/org/apache/spark/deploy/yarn/ClientSuite.scala b/resource-managers/yarn/src/test/scala/org/apache/spark/deploy/yarn/ClientSuite.scala index ce4389fd268d7..ac946b514fa53 100644 --- a/resource-managers/yarn/src/test/scala/org/apache/spark/deploy/yarn/ClientSuite.scala +++ b/resource-managers/yarn/src/test/scala/org/apache/spark/deploy/yarn/ClientSuite.scala @@ -23,8 +23,8 @@ import java.nio.file.Paths import java.util.Properties import java.util.concurrent.ConcurrentHashMap -import scala.collection.JavaConverters._ import scala.collection.mutable.{HashMap => MutableHashMap} +import scala.jdk.CollectionConverters._ import org.apache.hadoop.conf.Configuration import org.apache.hadoop.fs.{FileStatus, FileSystem, Path, PathFilter} diff --git a/resource-managers/yarn/src/test/scala/org/apache/spark/deploy/yarn/ResourceRequestTestHelper.scala b/resource-managers/yarn/src/test/scala/org/apache/spark/deploy/yarn/ResourceRequestTestHelper.scala index 826bded50f84e..f09503c81d978 100644 --- a/resource-managers/yarn/src/test/scala/org/apache/spark/deploy/yarn/ResourceRequestTestHelper.scala +++ b/resource-managers/yarn/src/test/scala/org/apache/spark/deploy/yarn/ResourceRequestTestHelper.scala @@ -17,7 +17,7 @@ package org.apache.spark.deploy.yarn -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.hadoop.yarn.api.records.ResourceTypeInfo import org.apache.hadoop.yarn.util.resource.ResourceUtils diff --git a/resource-managers/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnAllocatorSuite.scala b/resource-managers/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnAllocatorSuite.scala index f6f2e1b11d58b..4aada0f3a8dac 100644 --- a/resource-managers/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnAllocatorSuite.scala +++ b/resource-managers/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnAllocatorSuite.scala @@ -21,8 +21,8 @@ import java.util import java.util.Collections import java.util.concurrent.atomic.AtomicInteger -import scala.collection.JavaConverters._ import scala.collection.mutable +import scala.jdk.CollectionConverters._ import org.apache.hadoop.net.{Node, NodeBase} import org.apache.hadoop.yarn.api.protocolrecords.AllocateResponse diff --git a/resource-managers/yarn/src/test/scala/org/apache/spark/network/yarn/YarnShuffleServiceMetricsSuite.scala b/resource-managers/yarn/src/test/scala/org/apache/spark/network/yarn/YarnShuffleServiceMetricsSuite.scala index f040594f39c8d..b04aa0aa533b8 100644 --- a/resource-managers/yarn/src/test/scala/org/apache/spark/network/yarn/YarnShuffleServiceMetricsSuite.scala +++ b/resource-managers/yarn/src/test/scala/org/apache/spark/network/yarn/YarnShuffleServiceMetricsSuite.scala @@ -16,8 +16,8 @@ */ package org.apache.spark.network.yarn -import scala.collection.JavaConverters._ import scala.collection.mutable +import scala.jdk.CollectionConverters._ import org.apache.hadoop.metrics2.{MetricsInfo, MetricsRecordBuilder} import org.mockito.ArgumentMatchers.{any, anyDouble, anyInt, anyLong} diff --git a/resource-managers/yarn/src/test/scala/org/apache/spark/network/yarn/YarnShuffleServiceSuite.scala b/resource-managers/yarn/src/test/scala/org/apache/spark/network/yarn/YarnShuffleServiceSuite.scala index 552cc98311e8f..2ef22e376a6a1 100644 --- a/resource-managers/yarn/src/test/scala/org/apache/spark/network/yarn/YarnShuffleServiceSuite.scala +++ b/resource-managers/yarn/src/test/scala/org/apache/spark/network/yarn/YarnShuffleServiceSuite.scala @@ -24,9 +24,9 @@ import java.nio.file.attribute.PosixFilePermission._ import java.util.EnumSet import scala.annotation.tailrec -import scala.collection.JavaConverters._ import scala.collection.mutable import scala.concurrent.duration._ +import scala.jdk.CollectionConverters._ import com.codahale.metrics.MetricSet import com.fasterxml.jackson.databind.ObjectMapper diff --git a/scalastyle-config.xml b/scalastyle-config.xml index 74e8480deaff7..7e1ef9082cc84 100644 --- a/scalastyle-config.xml +++ b/scalastyle-config.xml @@ -255,7 +255,7 @@ This file is divided into 3 sections: JavaConversions Instead of importing implicits in scala.collection.JavaConversions._, import - scala.collection.JavaConverters._ and use .asScala / .asJava methods + scala.jdk.CollectionConverters._ and use .asScala / .asJava methods diff --git a/sql/api/src/main/scala/org/apache/spark/sql/AnalysisException.scala b/sql/api/src/main/scala/org/apache/spark/sql/AnalysisException.scala index e1cb02a362bef..a043df9b42c82 100644 --- a/sql/api/src/main/scala/org/apache/spark/sql/AnalysisException.scala +++ b/sql/api/src/main/scala/org/apache/spark/sql/AnalysisException.scala @@ -17,7 +17,7 @@ package org.apache.spark.sql -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.{QueryContext, SparkThrowable, SparkThrowableHelper} import org.apache.spark.annotation.Stable diff --git a/sql/api/src/main/scala/org/apache/spark/sql/Row.scala b/sql/api/src/main/scala/org/apache/spark/sql/Row.scala index 94b446bf326d9..ae6a7a1e31eca 100644 --- a/sql/api/src/main/scala/org/apache/spark/sql/Row.scala +++ b/sql/api/src/main/scala/org/apache/spark/sql/Row.scala @@ -21,8 +21,8 @@ import java.sql.{Date, Timestamp} import java.time.{Instant, LocalDate} import java.util.Base64 -import scala.collection.JavaConverters._ import scala.collection.mutable +import scala.jdk.CollectionConverters._ import scala.util.hashing.MurmurHash3 import org.json4s.{JArray, JBool, JDecimal, JDouble, JField, JLong, JNull, JObject, JString} diff --git a/sql/api/src/main/scala/org/apache/spark/sql/catalyst/JavaTypeInference.scala b/sql/api/src/main/scala/org/apache/spark/sql/catalyst/JavaTypeInference.scala index 191ccc5254404..874303d849d7c 100644 --- a/sql/api/src/main/scala/org/apache/spark/sql/catalyst/JavaTypeInference.scala +++ b/sql/api/src/main/scala/org/apache/spark/sql/catalyst/JavaTypeInference.scala @@ -21,7 +21,7 @@ import java.lang.reflect.{ParameterizedType, Type, TypeVariable} import java.util.{List => JList, Map => JMap} import javax.annotation.Nonnull -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.reflect.ClassTag import org.apache.commons.lang3.reflect.{TypeUtils => JavaTypeUtils} diff --git a/sql/api/src/main/scala/org/apache/spark/sql/catalyst/parser/DataTypeAstBuilder.scala b/sql/api/src/main/scala/org/apache/spark/sql/catalyst/parser/DataTypeAstBuilder.scala index 97027604fd838..b30c6fa29e829 100644 --- a/sql/api/src/main/scala/org/apache/spark/sql/catalyst/parser/DataTypeAstBuilder.scala +++ b/sql/api/src/main/scala/org/apache/spark/sql/catalyst/parser/DataTypeAstBuilder.scala @@ -18,7 +18,7 @@ package org.apache.spark.sql.catalyst.parser import java.util.Locale -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.antlr.v4.runtime.Token import org.antlr.v4.runtime.tree.ParseTree diff --git a/sql/api/src/main/scala/org/apache/spark/sql/catalyst/parser/parsers.scala b/sql/api/src/main/scala/org/apache/spark/sql/catalyst/parser/parsers.scala index c3a051be89bcc..8e4d2ab16158a 100644 --- a/sql/api/src/main/scala/org/apache/spark/sql/catalyst/parser/parsers.scala +++ b/sql/api/src/main/scala/org/apache/spark/sql/catalyst/parser/parsers.scala @@ -16,7 +16,7 @@ */ package org.apache.spark.sql.catalyst.parser -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.antlr.v4.runtime._ import org.antlr.v4.runtime.atn.PredictionMode diff --git a/sql/api/src/main/scala/org/apache/spark/sql/types/StructType.scala b/sql/api/src/main/scala/org/apache/spark/sql/types/StructType.scala index 8edc7cf370b7d..0b547ec553969 100644 --- a/sql/api/src/main/scala/org/apache/spark/sql/types/StructType.scala +++ b/sql/api/src/main/scala/org/apache/spark/sql/types/StructType.scala @@ -525,7 +525,7 @@ object StructType extends AbstractDataType { def apply(fields: Seq[StructField]): StructType = StructType(fields.toArray) def apply(fields: java.util.List[StructField]): StructType = { - import scala.collection.JavaConverters._ + import scala.jdk.CollectionConverters._ StructType(fields.asScala.toArray) } diff --git a/sql/api/src/main/scala/org/apache/spark/sql/util/ArrowUtils.scala b/sql/api/src/main/scala/org/apache/spark/sql/util/ArrowUtils.scala index a3d8858b82771..4a0b215c81c17 100644 --- a/sql/api/src/main/scala/org/apache/spark/sql/util/ArrowUtils.scala +++ b/sql/api/src/main/scala/org/apache/spark/sql/util/ArrowUtils.scala @@ -19,7 +19,7 @@ package org.apache.spark.sql.util import java.util.concurrent.atomic.AtomicInteger -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.arrow.memory.RootAllocator import org.apache.arrow.vector.complex.MapVector diff --git a/sql/catalyst/src/main/java/org/apache/spark/sql/connector/expressions/Expressions.java b/sql/catalyst/src/main/java/org/apache/spark/sql/connector/expressions/Expressions.java index 6aed8896e9f58..9f09ef8a8be71 100644 --- a/sql/catalyst/src/main/java/org/apache/spark/sql/connector/expressions/Expressions.java +++ b/sql/catalyst/src/main/java/org/apache/spark/sql/connector/expressions/Expressions.java @@ -20,7 +20,7 @@ import java.util.Arrays; import org.apache.spark.annotation.Evolving; -import scala.collection.JavaConverters; +import scala.jdk.CollectionConverters; import org.apache.spark.sql.types.DataType; @@ -45,7 +45,7 @@ private Expressions() { */ public static Transform apply(String name, Expression... args) { return LogicalExpressions.apply(name, - JavaConverters.asScalaBuffer(Arrays.asList(args)).toSeq()); + CollectionConverters.CollectionHasAsScala(Arrays.asList(args)).asScala().toSeq()); } /** diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/QueryPlanningTracker.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/QueryPlanningTracker.scala index c42bf7e9aed3c..2e14c09bc8193 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/QueryPlanningTracker.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/QueryPlanningTracker.scala @@ -17,7 +17,7 @@ package org.apache.spark.sql.catalyst -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan import org.apache.spark.util.BoundedPriorityQueue diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/CodeGenerator.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/CodeGenerator.scala index fe61cc81359e4..cfed1761e334c 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/CodeGenerator.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/CodeGenerator.scala @@ -20,9 +20,9 @@ package org.apache.spark.sql.catalyst.expressions.codegen import java.io.ByteArrayInputStream import scala.annotation.tailrec -import scala.collection.JavaConverters._ import scala.collection.mutable import scala.collection.mutable.ArrayBuffer +import scala.jdk.CollectionConverters._ import scala.util.control.NonFatal import com.google.common.util.concurrent.{ExecutionError, UncheckedExecutionException} diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/objects/objects.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/objects/objects.scala index beb07259384a3..80c7367633323 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/objects/objects.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/objects/objects.scala @@ -19,9 +19,9 @@ package org.apache.spark.sql.catalyst.expressions.objects import java.lang.reflect.{Method, Modifier} -import scala.collection.JavaConverters._ import scala.collection.mutable import scala.collection.mutable.{Builder, WrappedArray} +import scala.jdk.CollectionConverters._ import scala.reflect.ClassTag import scala.util.Try diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/regexpExpressions.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/regexpExpressions.scala index cf76d0cb65d3a..5ebfdd919b8a6 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/regexpExpressions.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/regexpExpressions.scala @@ -20,8 +20,8 @@ package org.apache.spark.sql.catalyst.expressions import java.util.Locale import java.util.regex.{Matcher, MatchResult, Pattern, PatternSyntaxException} -import scala.collection.JavaConverters._ import scala.collection.mutable.ArrayBuffer +import scala.jdk.CollectionConverters._ import org.apache.commons.text.StringEscapeUtils diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala index 10628bb306a3b..c2bc6e9eb65a4 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala @@ -20,8 +20,8 @@ package org.apache.spark.sql.catalyst.parser import java.util.Locale import java.util.concurrent.TimeUnit -import scala.collection.JavaConverters._ import scala.collection.mutable.{ArrayBuffer, Set} +import scala.jdk.CollectionConverters._ import org.antlr.v4.runtime.{ParserRuleContext, Token} import org.antlr.v4.runtime.misc.Interval diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/rules/QueryExecutionMetering.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/rules/QueryExecutionMetering.scala index b5a5e239b68ba..4b7b0079f072e 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/rules/QueryExecutionMetering.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/rules/QueryExecutionMetering.scala @@ -17,7 +17,7 @@ package org.apache.spark.sql.catalyst.rules -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import com.google.common.util.concurrent.AtomicLongMap diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/trees/TreeNode.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/trees/TreeNode.scala index d5d7f17bfe341..39ce5e77e28ad 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/trees/TreeNode.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/trees/TreeNode.scala @@ -20,7 +20,7 @@ package org.apache.spark.sql.catalyst.trees import java.util.UUID import scala.collection.{mutable, Map} -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.reflect.ClassTag import org.apache.commons.lang3.ClassUtils diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/GenericArrayData.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/GenericArrayData.scala index e566e659db2cb..bdf8d36321e64 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/GenericArrayData.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/GenericArrayData.scala @@ -17,7 +17,7 @@ package org.apache.spark.sql.catalyst.util -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.sql.catalyst.InternalRow import org.apache.spark.sql.types.{DataType, Decimal} diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/xml/StaxXmlParser.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/xml/StaxXmlParser.scala index e8dbc85f29597..71022ba281c7f 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/xml/StaxXmlParser.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/xml/StaxXmlParser.scala @@ -23,8 +23,8 @@ import javax.xml.stream.events._ import javax.xml.transform.stream.StreamSource import javax.xml.validation.Schema -import scala.collection.JavaConverters._ import scala.collection.mutable.ArrayBuffer +import scala.jdk.CollectionConverters._ import scala.util.control.NonFatal import scala.xml.SAXException diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/xml/StaxXmlParserUtils.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/xml/StaxXmlParserUtils.scala index 2bfc6ef40516f..ad1c0b729c5dd 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/xml/StaxXmlParserUtils.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/xml/StaxXmlParserUtils.scala @@ -22,7 +22,7 @@ import javax.xml.stream.{EventFilter, XMLEventReader, XMLInputFactory, XMLStream import javax.xml.stream.events._ import scala.annotation.tailrec -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ private[sql] object StaxXmlParserUtils { diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/xml/XSDToSchema.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/xml/XSDToSchema.scala index ecd601832e1fc..6c3958f5dd1a6 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/xml/XSDToSchema.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/xml/XSDToSchema.scala @@ -20,7 +20,7 @@ import java.io.{File, FileInputStream, InputStreamReader, StringReader} import java.nio.charset.StandardCharsets import java.nio.file.Path -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.ws.commons.schema._ import org.apache.ws.commons.schema.constants.Constants diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/xml/XmlInferSchema.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/xml/XmlInferSchema.scala index 43d1b8fdb86be..3eabf4525b4ec 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/xml/XmlInferSchema.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/xml/XmlInferSchema.scala @@ -23,8 +23,8 @@ import javax.xml.transform.stream.StreamSource import javax.xml.validation.Schema import scala.annotation.tailrec -import scala.collection.JavaConverters._ import scala.collection.mutable.ArrayBuffer +import scala.jdk.CollectionConverters._ import scala.util.control.NonFatal import org.apache.spark.rdd.RDD diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/connector/catalog/CatalogV2Util.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/connector/catalog/CatalogV2Util.scala index be569b1de9dbc..72cb1e58c7ef7 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/connector/catalog/CatalogV2Util.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/connector/catalog/CatalogV2Util.scala @@ -20,7 +20,7 @@ package org.apache.spark.sql.connector.catalog import java.util import java.util.Collections -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.sql.AnalysisException import org.apache.spark.sql.catalyst.analysis.{AsOfTimestamp, AsOfVersion, NamedRelation, NoSuchDatabaseException, NoSuchFunctionException, NoSuchNamespaceException, NoSuchTableException, TimeTravelSpec} diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/connector/catalog/V1Table.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/connector/catalog/V1Table.scala index da201e816497c..d96b06789c402 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/connector/catalog/V1Table.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/connector/catalog/V1Table.scala @@ -19,8 +19,8 @@ package org.apache.spark.sql.connector.catalog import java.util -import scala.collection.JavaConverters._ import scala.collection.mutable +import scala.jdk.CollectionConverters._ import org.apache.spark.sql.catalyst.catalog.{CatalogTable, CatalogTableType} import org.apache.spark.sql.connector.catalog.CatalogV2Implicits.TableIdentifierHelper diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/execution/arrow/ArrowWriter.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/execution/arrow/ArrowWriter.scala index 1b05786e0c7de..eb18e42f034fe 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/execution/arrow/ArrowWriter.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/execution/arrow/ArrowWriter.scala @@ -17,7 +17,7 @@ package org.apache.spark.sql.execution.arrow -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.arrow.vector._ import org.apache.arrow.vector.complex._ diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/execution/datasources/v2/DataSourceV2Implicits.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/execution/datasources/v2/DataSourceV2Implicits.scala index bb55eb0f41f97..795778f986953 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/execution/datasources/v2/DataSourceV2Implicits.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/execution/datasources/v2/DataSourceV2Implicits.scala @@ -17,7 +17,7 @@ package org.apache.spark.sql.execution.datasources.v2 -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.sql.catalyst.analysis.{PartitionSpec, ResolvedPartitionSpec, UnresolvedPartitionSpec} import org.apache.spark.sql.catalyst.expressions.{AttributeReference, MetadataAttribute} diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala index 43eb0756d8dd4..a24943bc397ce 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala @@ -23,8 +23,8 @@ import java.util.concurrent.TimeUnit import java.util.concurrent.atomic.AtomicReference import java.util.zip.Deflater -import scala.collection.JavaConverters._ import scala.collection.immutable +import scala.jdk.CollectionConverters._ import scala.util.Try import scala.util.control.NonFatal import scala.util.matching.Regex diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/SQLKeywordSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/SQLKeywordSuite.scala index 74f7277f90ea6..8806431ab4395 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/SQLKeywordSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/SQLKeywordSuite.scala @@ -20,8 +20,8 @@ package org.apache.spark.sql.catalyst import java.io.File import java.nio.file.Files -import scala.collection.JavaConverters._ import scala.collection.mutable +import scala.jdk.CollectionConverters._ import org.apache.spark.SparkFunSuite import org.apache.spark.sql.catalyst.plans.SQLHelper diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisSuite.scala index ffc12a2b9810c..e338a5ebab24f 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisSuite.scala @@ -19,7 +19,7 @@ package org.apache.spark.sql.catalyst.analysis import java.util.TimeZone -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.reflect.ClassTag import scala.reflect.runtime.universe.TypeTag diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/TableLookupCacheSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/TableLookupCacheSuite.scala index 399799983fdb6..2c4215e70287a 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/TableLookupCacheSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/TableLookupCacheSuite.scala @@ -19,7 +19,7 @@ package org.apache.spark.sql.catalyst.analysis import java.io.File -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.mockito.ArgumentMatchers.any import org.mockito.Mockito._ diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ObjectExpressionsSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ObjectExpressionsSuite.scala index de85d6fe0b748..9635408435b92 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ObjectExpressionsSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ObjectExpressionsSuite.scala @@ -19,8 +19,8 @@ package org.apache.spark.sql.catalyst.expressions import java.sql.{Date, Timestamp} -import scala.collection.JavaConverters._ import scala.collection.mutable.WrappedArray +import scala.jdk.CollectionConverters._ import scala.reflect.ClassTag import scala.reflect.runtime.universe.TypeTag import scala.util.Random diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/ComputeCurrentTimeSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/ComputeCurrentTimeSuite.scala index 86461522f7469..955deeb59023d 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/ComputeCurrentTimeSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/ComputeCurrentTimeSuite.scala @@ -19,8 +19,8 @@ package org.apache.spark.sql.catalyst.optimizer import java.time.{LocalDateTime, ZoneId} -import scala.collection.JavaConverters.mapAsScalaMap import scala.concurrent.duration._ +import scala.jdk.CollectionConverters._ import org.apache.spark.sql.catalyst.dsl.plans._ import org.apache.spark.sql.catalyst.expressions.{Alias, CurrentDate, CurrentTimestamp, CurrentTimeZone, InSubquery, ListQuery, Literal, LocalTimestamp, Now} @@ -105,7 +105,7 @@ class ComputeCurrentTimeSuite extends PlanTest { } test("analyzer should use consistent timestamps for different timezones") { - val localTimestamps = mapAsScalaMap(ZoneId.SHORT_IDS) + val localTimestamps = ZoneId.SHORT_IDS.asScala .map { case (zoneId, _) => Alias(LocalTimestamp(Some(zoneId)), zoneId)() }.toSeq val input = Project(localTimestamps, LocalRelation()) diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/ParserUtilsSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/ParserUtilsSuite.scala index f0e1c243320f8..d9f3067d30e51 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/ParserUtilsSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/ParserUtilsSuite.scala @@ -16,7 +16,7 @@ */ package org.apache.spark.sql.catalyst.parser -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.antlr.v4.runtime.{CharStreams, CommonTokenStream, ParserRuleContext} diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeTestUtils.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeTestUtils.scala index 537f8d1f80609..41c87dd804be1 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeTestUtils.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeTestUtils.scala @@ -21,7 +21,7 @@ import java.time.{LocalDate, LocalDateTime, LocalTime, ZoneId, ZoneOffset} import java.util.TimeZone import java.util.concurrent.TimeUnit -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.sql.catalyst.util.DateTimeConstants._ import org.apache.spark.sql.catalyst.util.DateTimeUtils.getZoneId diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/connector/catalog/CatalogManagerSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/connector/catalog/CatalogManagerSuite.scala index bfff3ee855e6d..fc78eef0ff1b8 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/connector/catalog/CatalogManagerSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/connector/catalog/CatalogManagerSuite.scala @@ -19,7 +19,7 @@ package org.apache.spark.sql.connector.catalog import java.net.URI -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.SparkFunSuite import org.apache.spark.sql.catalyst.analysis.{EmptyFunctionRegistry, FakeV2SessionCatalog, NoSuchNamespaceException} diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/connector/catalog/CatalogSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/connector/catalog/CatalogSuite.scala index 6be50f36c848a..e79fff7479b9e 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/connector/catalog/CatalogSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/connector/catalog/CatalogSuite.scala @@ -20,7 +20,7 @@ package org.apache.spark.sql.connector.catalog import java.util import java.util.Collections -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.SparkFunSuite import org.apache.spark.sql.catalyst.InternalRow diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/connector/catalog/EnumTypeSetBenchmark.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/connector/catalog/EnumTypeSetBenchmark.scala index d8bc3ed28dd4e..5c3a0d239ee69 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/connector/catalog/EnumTypeSetBenchmark.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/connector/catalog/EnumTypeSetBenchmark.scala @@ -19,7 +19,7 @@ package org.apache.spark.sql.connector.catalog import java.util import java.util.Collections -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.benchmark.{Benchmark, BenchmarkBase} import org.apache.spark.sql.connector.catalog.TableCapability._ diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/connector/catalog/InMemoryCatalog.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/connector/catalog/InMemoryCatalog.scala index aa9914d760f2a..8d8d2317f0986 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/connector/catalog/InMemoryCatalog.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/connector/catalog/InMemoryCatalog.scala @@ -20,7 +20,7 @@ package org.apache.spark.sql.connector.catalog import java.util import java.util.concurrent.ConcurrentHashMap -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.sql.catalyst.analysis.{NoSuchFunctionException, NoSuchNamespaceException} import org.apache.spark.sql.connector.catalog.functions.UnboundFunction diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/connector/catalog/InMemoryPartitionTable.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/connector/catalog/InMemoryPartitionTable.scala index 7280d6a5b0776..ad55af81a3647 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/connector/catalog/InMemoryPartitionTable.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/connector/catalog/InMemoryPartitionTable.scala @@ -20,7 +20,7 @@ package org.apache.spark.sql.connector.catalog import java.util import java.util.concurrent.ConcurrentHashMap -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.sql.catalyst.InternalRow import org.apache.spark.sql.catalyst.analysis.{NoSuchPartitionException, PartitionsAlreadyExistException} diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/connector/catalog/InMemoryTableCatalog.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/connector/catalog/InMemoryTableCatalog.scala index a6da7308a25bf..f1f84fbfeb9f9 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/connector/catalog/InMemoryTableCatalog.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/connector/catalog/InMemoryTableCatalog.scala @@ -20,7 +20,7 @@ package org.apache.spark.sql.connector.catalog import java.util import java.util.concurrent.ConcurrentHashMap -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.sql.catalyst.analysis.{NamespaceAlreadyExistsException, NonEmptyNamespaceException, NoSuchNamespaceException, NoSuchTableException, TableAlreadyExistsException} import org.apache.spark.sql.connector.distributions.{Distribution, Distributions} diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/connector/catalog/SupportsPartitionManagementSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/connector/catalog/SupportsPartitionManagementSuite.scala index 501f363d7dc6b..55f676b0c440b 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/connector/catalog/SupportsPartitionManagementSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/connector/catalog/SupportsPartitionManagementSuite.scala @@ -19,7 +19,7 @@ package org.apache.spark.sql.connector.catalog import java.util -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.{SparkFunSuite, SparkUnsupportedOperationException} import org.apache.spark.sql.catalyst.InternalRow diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/util/CaseInsensitiveStringMapSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/util/CaseInsensitiveStringMapSuite.scala index 0accb471cada3..a097e62f033bd 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/util/CaseInsensitiveStringMapSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/util/CaseInsensitiveStringMapSuite.scala @@ -19,7 +19,7 @@ package org.apache.spark.sql.util import java.util -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.SparkFunSuite diff --git a/sql/core/src/main/java/org/apache/spark/sql/execution/datasources/parquet/VectorizedParquetRecordReader.java b/sql/core/src/main/java/org/apache/spark/sql/execution/datasources/parquet/VectorizedParquetRecordReader.java index 0f0455c0d8eed..3b594ec3a7935 100644 --- a/sql/core/src/main/java/org/apache/spark/sql/execution/datasources/parquet/VectorizedParquetRecordReader.java +++ b/sql/core/src/main/java/org/apache/spark/sql/execution/datasources/parquet/VectorizedParquetRecordReader.java @@ -26,7 +26,7 @@ import org.apache.spark.sql.catalyst.util.ResolveDefaultColumns; import scala.Option; -import scala.collection.JavaConverters; +import scala.jdk.CollectionConverters; import com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.mapreduce.InputSplit; @@ -359,7 +359,7 @@ public boolean nextBatch() throws IOException { private void initializeInternal() throws IOException, UnsupportedOperationException { missingColumns = new HashSet<>(); - for (ParquetColumn column : JavaConverters.seqAsJavaList(parquetColumn.children())) { + for (ParquetColumn column : CollectionConverters.SeqHasAsJava(parquetColumn.children()).asJava()) { checkColumn(column); } } @@ -369,7 +369,7 @@ private void initializeInternal() throws IOException, UnsupportedOperationExcept * conforms to the type of the file schema. */ private void checkColumn(ParquetColumn column) throws IOException { - String[] path = JavaConverters.seqAsJavaList(column.path()).toArray(new String[0]); + String[] path = CollectionConverters.SeqHasAsJava(column.path()).asJava().toArray(new String[0]); if (containsPath(fileSchema, path)) { if (column.isPrimitive()) { ColumnDescriptor desc = column.descriptor().get(); @@ -378,7 +378,7 @@ private void checkColumn(ParquetColumn column) throws IOException { throw new UnsupportedOperationException("Schema evolution not supported."); } } else { - for (ParquetColumn childColumn : JavaConverters.seqAsJavaList(column.children())) { + for (ParquetColumn childColumn : CollectionConverters.SeqHasAsJava((column.children())).asJava()) { checkColumn(childColumn); } } diff --git a/sql/core/src/main/scala/org/apache/spark/sql/Column.scala b/sql/core/src/main/scala/org/apache/spark/sql/Column.scala index bb326119ab49c..9bb35a8b0b3d1 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/Column.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/Column.scala @@ -17,7 +17,7 @@ package org.apache.spark.sql -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.annotation.Stable import org.apache.spark.internal.Logging diff --git a/sql/core/src/main/scala/org/apache/spark/sql/DataFrameNaFunctions.scala b/sql/core/src/main/scala/org/apache/spark/sql/DataFrameNaFunctions.scala index 91da789cd774b..df70b2b7d3084 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/DataFrameNaFunctions.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/DataFrameNaFunctions.scala @@ -20,7 +20,7 @@ package org.apache.spark.sql import java.{lang => jl} import java.util.Locale -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.annotation.Stable import org.apache.spark.sql.catalyst.expressions._ diff --git a/sql/core/src/main/scala/org/apache/spark/sql/DataFrameReader.scala b/sql/core/src/main/scala/org/apache/spark/sql/DataFrameReader.scala index c57c96f9c073f..bc62003b251e1 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/DataFrameReader.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/DataFrameReader.scala @@ -19,7 +19,7 @@ package org.apache.spark.sql import java.util.{Locale, Properties} -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.Partition import org.apache.spark.annotation.Stable diff --git a/sql/core/src/main/scala/org/apache/spark/sql/DataFrameStatFunctions.scala b/sql/core/src/main/scala/org/apache/spark/sql/DataFrameStatFunctions.scala index 7511c21fa76df..9d4f83c53a354 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/DataFrameStatFunctions.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/DataFrameStatFunctions.scala @@ -19,7 +19,7 @@ package org.apache.spark.sql import java.{lang => jl, util => ju} -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.annotation.Stable import org.apache.spark.sql.catalyst.InternalRow diff --git a/sql/core/src/main/scala/org/apache/spark/sql/DataFrameWriter.scala b/sql/core/src/main/scala/org/apache/spark/sql/DataFrameWriter.scala index 76424faca28de..127fae14cb104 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/DataFrameWriter.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/DataFrameWriter.scala @@ -19,7 +19,7 @@ package org.apache.spark.sql import java.util.{Locale, Properties} -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.annotation.Stable import org.apache.spark.sql.catalyst.TableIdentifier diff --git a/sql/core/src/main/scala/org/apache/spark/sql/DataFrameWriterV2.scala b/sql/core/src/main/scala/org/apache/spark/sql/DataFrameWriterV2.scala index 7ca9c7ef71d67..b68a13ba21590 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/DataFrameWriterV2.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/DataFrameWriterV2.scala @@ -17,8 +17,8 @@ package org.apache.spark.sql -import scala.collection.JavaConverters._ import scala.collection.mutable +import scala.jdk.CollectionConverters._ import org.apache.spark.annotation.Experimental import org.apache.spark.sql.catalyst.analysis.{CannotReplaceMissingTableException, NoSuchTableException, TableAlreadyExistsException, UnresolvedIdentifier, UnresolvedRelation} diff --git a/sql/core/src/main/scala/org/apache/spark/sql/Dataset.scala b/sql/core/src/main/scala/org/apache/spark/sql/Dataset.scala index f07496e643048..f2250216a8ef0 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/Dataset.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/Dataset.scala @@ -20,8 +20,8 @@ package org.apache.spark.sql import java.io.{ByteArrayOutputStream, CharArrayWriter, DataOutputStream} import scala.annotation.varargs -import scala.collection.JavaConverters._ import scala.collection.mutable.{ArrayBuffer, HashSet} +import scala.jdk.CollectionConverters._ import scala.reflect.runtime.universe.TypeTag import scala.util.control.NonFatal diff --git a/sql/core/src/main/scala/org/apache/spark/sql/KeyValueGroupedDataset.scala b/sql/core/src/main/scala/org/apache/spark/sql/KeyValueGroupedDataset.scala index 4c2ccb27eab20..2239a128a2c45 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/KeyValueGroupedDataset.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/KeyValueGroupedDataset.scala @@ -17,7 +17,7 @@ package org.apache.spark.sql -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.api.java.function._ import org.apache.spark.sql.catalyst.encoders.{encoderFor, ExpressionEncoder} diff --git a/sql/core/src/main/scala/org/apache/spark/sql/Observation.scala b/sql/core/src/main/scala/org/apache/spark/sql/Observation.scala index df2c99dde5fdb..270631047c2d2 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/Observation.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/Observation.scala @@ -19,7 +19,7 @@ package org.apache.spark.sql import java.util.UUID -import scala.collection.JavaConverters +import scala.jdk.CollectionConverters import org.apache.spark.sql.execution.QueryExecution import org.apache.spark.sql.util.QueryExecutionListener @@ -109,9 +109,9 @@ class Observation(val name: String) { */ @throws[InterruptedException] def getAsJava: java.util.Map[String, AnyRef] = { - JavaConverters.mapAsJavaMap( + CollectionConverters.MapHasAsJava( get.map { case (key, value) => (key, value.asInstanceOf[Object])} - ) + ).asJava } private def register(sparkSession: SparkSession): Unit = { diff --git a/sql/core/src/main/scala/org/apache/spark/sql/RelationalGroupedDataset.scala b/sql/core/src/main/scala/org/apache/spark/sql/RelationalGroupedDataset.scala index f67f2b2cfde20..27093ced7514d 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/RelationalGroupedDataset.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/RelationalGroupedDataset.scala @@ -19,7 +19,7 @@ package org.apache.spark.sql import java.util.Locale -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.SparkRuntimeException import org.apache.spark.annotation.Stable diff --git a/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala b/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala index 971cf9194d0ec..d446e9a15b590 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala @@ -22,7 +22,7 @@ import java.util.{ServiceLoader, UUID} import java.util.concurrent.TimeUnit._ import java.util.concurrent.atomic.{AtomicBoolean, AtomicReference} -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.reflect.runtime.universe.TypeTag import scala.util.control.NonFatal diff --git a/sql/core/src/main/scala/org/apache/spark/sql/api/r/SQLUtils.scala b/sql/core/src/main/scala/org/apache/spark/sql/api/r/SQLUtils.scala index a76465ff42add..5f72dc59105eb 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/api/r/SQLUtils.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/api/r/SQLUtils.scala @@ -20,7 +20,7 @@ package org.apache.spark.sql.api.r import java.io.{ByteArrayInputStream, ByteArrayOutputStream, DataInputStream, DataOutputStream} import java.util.{Locale, Map => JMap} -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.util.matching.Regex import org.apache.spark.TaskContext diff --git a/sql/core/src/main/scala/org/apache/spark/sql/catalog/Catalog.scala b/sql/core/src/main/scala/org/apache/spark/sql/catalog/Catalog.scala index 13b199948e0f2..676be7fe41cbc 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/catalog/Catalog.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/catalog/Catalog.scala @@ -17,7 +17,7 @@ package org.apache.spark.sql.catalog -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.annotation.Stable import org.apache.spark.sql.{AnalysisException, DataFrame, Dataset} diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/BaseScriptTransformationExec.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/BaseScriptTransformationExec.scala index 99d59901d581c..91042b59677bf 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/BaseScriptTransformationExec.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/BaseScriptTransformationExec.scala @@ -21,7 +21,7 @@ import java.io.{BufferedReader, File, InputStream, InputStreamReader, OutputStre import java.nio.charset.StandardCharsets import java.util.concurrent.TimeUnit -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.util.control.NonFatal import org.apache.hadoop.conf.Configuration diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/ColumnarEvaluatorFactory.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/ColumnarEvaluatorFactory.scala index 960d4b74a1b95..95e7e509d7ba0 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/ColumnarEvaluatorFactory.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/ColumnarEvaluatorFactory.scala @@ -17,7 +17,7 @@ package org.apache.spark.sql.execution -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.{PartitionEvaluator, PartitionEvaluatorFactory, TaskContext} import org.apache.spark.sql.catalyst.InternalRow diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala index dfe3c67e18b1f..1cc2147725355 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala @@ -21,7 +21,7 @@ import java.time.ZoneOffset import java.util.{Locale, TimeZone} import javax.ws.rs.core.UriBuilder -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.antlr.v4.runtime.{ParserRuleContext, Token} import org.antlr.v4.runtime.tree.TerminalNode diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/adaptive/AdaptiveSparkPlanExec.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/adaptive/AdaptiveSparkPlanExec.scala index 36895b17aa847..36c2160f52282 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/adaptive/AdaptiveSparkPlanExec.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/adaptive/AdaptiveSparkPlanExec.scala @@ -20,10 +20,10 @@ package org.apache.spark.sql.execution.adaptive import java.util import java.util.concurrent.LinkedBlockingQueue -import scala.collection.JavaConverters._ import scala.collection.concurrent.TrieMap import scala.collection.mutable import scala.concurrent.ExecutionContext +import scala.jdk.CollectionConverters._ import scala.util.control.NonFatal import org.apache.spark.SparkException diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/arrow/ArrowConverters.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/arrow/ArrowConverters.scala index 86dd7984b5859..c9733a25af90e 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/arrow/ArrowConverters.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/arrow/ArrowConverters.scala @@ -20,8 +20,8 @@ package org.apache.spark.sql.execution.arrow import java.io.{ByteArrayInputStream, ByteArrayOutputStream, FileInputStream, OutputStream} import java.nio.channels.{Channels, ReadableByteChannel} -import scala.collection.JavaConverters._ import scala.collection.mutable.ArrayBuffer +import scala.jdk.CollectionConverters._ import org.apache.arrow.flatbuf.MessageHeader import org.apache.arrow.memory.BufferAllocator diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/commands.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/commands.scala index c21f330be0647..1a6eab27d08cc 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/commands.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/commands.scala @@ -17,7 +17,7 @@ package org.apache.spark.sql.execution.command -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.util.control.NonFatal import org.apache.spark.rdd.RDD diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala index 88e940ffdc78d..f67e23f21e033 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala @@ -19,8 +19,8 @@ package org.apache.spark.sql.execution.command import java.net.{URI, URISyntaxException} -import scala.collection.JavaConverters._ import scala.collection.mutable.ArrayBuffer +import scala.jdk.CollectionConverters._ import scala.util.control.NonFatal import org.apache.hadoop.fs.{FileContext, FsConstants, Path} diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/DataSource.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/DataSource.scala index b1129dad6b1e5..c701fe2dbcb25 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/DataSource.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/DataSource.scala @@ -19,7 +19,7 @@ package org.apache.spark.sql.execution.datasources import java.util.{Locale, ServiceConfigurationError, ServiceLoader} -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.util.{Failure, Success, Try} import org.apache.hadoop.conf.Configuration diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/DataSourceUtils.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/DataSourceUtils.scala index a72e4c339af8c..ace22ade91595 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/DataSourceUtils.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/DataSourceUtils.scala @@ -19,7 +19,7 @@ package org.apache.spark.sql.execution.datasources import java.util.Locale -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.hadoop.fs.Path import org.json4s.NoTypeHints diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/FallBackFileSourceV2.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/FallBackFileSourceV2.scala index 2e1ae9fe3aefa..0bd3b6c2bf083 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/FallBackFileSourceV2.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/FallBackFileSourceV2.scala @@ -17,7 +17,7 @@ package org.apache.spark.sql.execution.datasources -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.sql.SparkSession import org.apache.spark.sql.catalyst.plans.logical.{InsertIntoStatement, LogicalPlan} diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/FileStatusCache.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/FileStatusCache.scala index b5d800f02862e..e1fdb9570732a 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/FileStatusCache.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/FileStatusCache.scala @@ -20,7 +20,7 @@ package org.apache.spark.sql.execution.datasources import java.util.concurrent.TimeUnit import java.util.concurrent.atomic.AtomicBoolean -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import com.google.common.cache._ import org.apache.hadoop.fs.{FileStatus, Path} diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/DriverRegistry.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/DriverRegistry.scala index 3444d03beff5d..421fa4ddace4b 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/DriverRegistry.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/DriverRegistry.scala @@ -19,8 +19,8 @@ package org.apache.spark.sql.execution.datasources.jdbc import java.sql.{Driver, DriverManager} -import scala.collection.JavaConverters._ import scala.collection.mutable +import scala.jdk.CollectionConverters._ import org.apache.spark.internal.Logging import org.apache.spark.util.Utils diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JDBCRDD.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JDBCRDD.scala index e241951abe392..53b09179cc3a1 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JDBCRDD.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JDBCRDD.scala @@ -228,7 +228,7 @@ private[jdbc] class JDBCRDD( val part = thePart.asInstanceOf[JDBCPartition] conn = getConnection(part.idx) val dialect = JdbcDialects.get(url) - import scala.collection.JavaConverters._ + import scala.jdk.CollectionConverters._ dialect.beforeFetch(conn, options.asProperties.asScala.toMap) // This executes a generic SQL statement (or PL/SQL block) before reading diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JdbcUtils.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JdbcUtils.scala index b7019c1dcbe53..65346aa49d89f 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JdbcUtils.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JdbcUtils.scala @@ -23,8 +23,8 @@ import java.util import java.util.Locale import java.util.concurrent.TimeUnit -import scala.collection.JavaConverters._ import scala.collection.mutable.ArrayBuffer +import scala.jdk.CollectionConverters._ import scala.util.Try import scala.util.control.NonFatal diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/connection/BasicConnectionProvider.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/connection/BasicConnectionProvider.scala index aff91c5ff0f77..369cf59e0599d 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/connection/BasicConnectionProvider.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/connection/BasicConnectionProvider.scala @@ -20,7 +20,7 @@ package org.apache.spark.sql.execution.datasources.jdbc.connection import java.sql.{Connection, Driver} import java.util.Properties -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.internal.Logging import org.apache.spark.sql.execution.datasources.jdbc.JDBCOptions diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/connection/SecureConnectionProvider.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/connection/SecureConnectionProvider.scala index 71c20e11f9274..108ad731de2d2 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/connection/SecureConnectionProvider.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/connection/SecureConnectionProvider.scala @@ -20,7 +20,7 @@ package org.apache.spark.sql.execution.datasources.jdbc.connection import java.sql.{Connection, Driver} import javax.security.auth.login.{AppConfigurationEntry, Configuration} -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.internal.Logging import org.apache.spark.sql.execution.datasources.jdbc.JDBCOptions diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/orc/OrcUtils.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/orc/OrcUtils.scala index 89a9ba6ec7aa9..d6d42e74da1cd 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/orc/OrcUtils.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/orc/OrcUtils.scala @@ -20,8 +20,8 @@ package org.apache.spark.sql.execution.datasources.orc import java.nio.charset.StandardCharsets.UTF_8 import java.util.Locale -import scala.collection.JavaConverters._ import scala.collection.mutable.ArrayBuffer +import scala.jdk.CollectionConverters._ import org.apache.hadoop.conf.Configuration import org.apache.hadoop.fs.{FileStatus, Path} diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetFileFormat.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetFileFormat.scala index c131ad2cf3173..df367766501d4 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetFileFormat.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetFileFormat.scala @@ -17,8 +17,8 @@ package org.apache.spark.sql.execution.datasources.parquet -import scala.collection.JavaConverters._ import scala.collection.mutable +import scala.jdk.CollectionConverters._ import scala.util.{Failure, Try} import org.apache.hadoop.conf.Configuration diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetFilters.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetFilters.scala index 5899b6621ad8e..c5360c9a04f20 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetFilters.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetFilters.scala @@ -25,7 +25,7 @@ import java.time.{Duration, Instant, LocalDate, Period} import java.util.HashSet import java.util.Locale -import scala.collection.JavaConverters.asScalaBufferConverter +import scala.jdk.CollectionConverters._ import org.apache.parquet.filter2.predicate._ import org.apache.parquet.filter2.predicate.SparkFilterApi._ diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetReadSupport.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetReadSupport.scala index 025ee3dd034ae..29d447daa65b7 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetReadSupport.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetReadSupport.scala @@ -21,7 +21,7 @@ import java.time.ZoneId import java.util import java.util.{Locale, Map => JMap, UUID} -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.hadoop.conf.Configuration import org.apache.parquet.hadoop.api.{InitContext, ReadSupport} diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetRowConverter.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetRowConverter.scala index e257be3d189aa..936339e091d8f 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetRowConverter.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetRowConverter.scala @@ -21,8 +21,8 @@ import java.math.{BigDecimal, BigInteger} import java.nio.ByteOrder import java.time.{ZoneId, ZoneOffset} -import scala.collection.JavaConverters._ import scala.collection.mutable.ArrayBuffer +import scala.jdk.CollectionConverters._ import org.apache.parquet.column.Dictionary import org.apache.parquet.io.ColumnIOFactory diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetRowIndexUtil.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetRowIndexUtil.scala index a5d8494cfa77c..c9a1a4fb46e45 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetRowIndexUtil.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetRowIndexUtil.scala @@ -18,7 +18,7 @@ package org.apache.spark.sql.execution.datasources.parquet import java.io.IOException -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.hadoop.mapreduce.{InputSplit, RecordReader, TaskAttemptContext} import org.apache.parquet.column.page.PageReadStore diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetWriteSupport.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetWriteSupport.scala index e546816dd1db4..dfa4e607500ef 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetWriteSupport.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetWriteSupport.scala @@ -20,7 +20,7 @@ package org.apache.spark.sql.execution.datasources.parquet import java.nio.{ByteBuffer, ByteOrder} import java.util -import scala.collection.JavaConverters.mapAsJavaMapConverter +import scala.jdk.CollectionConverters._ import org.apache.hadoop.conf.Configuration import org.apache.parquet.column.ParquetProperties diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/AddPartitionExec.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/AddPartitionExec.scala index e77240a5d8e54..3a8347188e578 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/AddPartitionExec.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/AddPartitionExec.scala @@ -17,7 +17,7 @@ package org.apache.spark.sql.execution.datasources.v2 -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.sql.catalyst.InternalRow import org.apache.spark.sql.catalyst.analysis.{PartitionsAlreadyExistException, ResolvedPartitionSpec} diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/CreateIndexExec.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/CreateIndexExec.scala index 8dac673733492..63c8dc6517b9e 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/CreateIndexExec.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/CreateIndexExec.scala @@ -19,7 +19,7 @@ package org.apache.spark.sql.execution.datasources.v2 import java.util -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.sql.catalyst.InternalRow import org.apache.spark.sql.catalyst.analysis.IndexAlreadyExistsException diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/CreateNamespaceExec.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/CreateNamespaceExec.scala index dba84d2385aec..c90a1a2fab1e5 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/CreateNamespaceExec.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/CreateNamespaceExec.scala @@ -17,7 +17,7 @@ package org.apache.spark.sql.execution.datasources.v2 -import scala.collection.JavaConverters.mapAsJavaMapConverter +import scala.jdk.CollectionConverters._ import org.apache.spark.sql.catalyst.InternalRow import org.apache.spark.sql.catalyst.analysis.NamespaceAlreadyExistsException diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/CreateTableExec.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/CreateTableExec.scala index 550578443283f..5f3ed7a5bc76c 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/CreateTableExec.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/CreateTableExec.scala @@ -17,7 +17,7 @@ package org.apache.spark.sql.execution.datasources.v2 -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.sql.catalyst.InternalRow import org.apache.spark.sql.catalyst.analysis.TableAlreadyExistsException diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/DataSourceV2Utils.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/DataSourceV2Utils.scala index c906e42a9b9e0..574bc869de4c4 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/DataSourceV2Utils.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/DataSourceV2Utils.scala @@ -19,7 +19,7 @@ package org.apache.spark.sql.execution.datasources.v2 import java.util.regex.Pattern -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import com.fasterxml.jackson.databind.ObjectMapper diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/DescribeNamespaceExec.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/DescribeNamespaceExec.scala index 7f9a62f42ddcf..125952566d7e8 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/DescribeNamespaceExec.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/DescribeNamespaceExec.scala @@ -17,8 +17,8 @@ package org.apache.spark.sql.execution.datasources.v2 -import scala.collection.JavaConverters._ import scala.collection.mutable.ArrayBuffer +import scala.jdk.CollectionConverters._ import org.apache.spark.sql.catalyst.InternalRow import org.apache.spark.sql.catalyst.expressions.Attribute diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/DescribeTableExec.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/DescribeTableExec.scala index 8b0098f14fedc..9cade86829e6f 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/DescribeTableExec.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/DescribeTableExec.scala @@ -17,8 +17,8 @@ package org.apache.spark.sql.execution.datasources.v2 -import scala.collection.JavaConverters._ import scala.collection.mutable.ArrayBuffer +import scala.jdk.CollectionConverters._ import org.apache.spark.sql.catalyst.InternalRow import org.apache.spark.sql.catalyst.catalog.CatalogTableType diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/FileDataSourceV2.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/FileDataSourceV2.scala index 3cb1a74417db1..19c9b3d8bb338 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/FileDataSourceV2.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/FileDataSourceV2.scala @@ -18,7 +18,7 @@ package org.apache.spark.sql.execution.datasources.v2 import java.util -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import com.fasterxml.jackson.databind.ObjectMapper import com.fasterxml.jackson.module.scala.DefaultScalaModule diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/FileTable.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/FileTable.scala index 91e6ef70c760c..3b806bc9b487d 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/FileTable.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/FileTable.scala @@ -18,7 +18,7 @@ package org.apache.spark.sql.execution.datasources.v2 import java.util -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.hadoop.fs.{FileStatus, Path} diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/FileWrite.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/FileWrite.scala index e5f064fcf6ed4..068870511ea55 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/FileWrite.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/FileWrite.scala @@ -18,7 +18,7 @@ package org.apache.spark.sql.execution.datasources.v2 import java.util.UUID -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.hadoop.conf.Configuration import org.apache.hadoop.fs.Path diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/ReplaceTableExec.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/ReplaceTableExec.scala index 55d97577d5781..104d8a706efb7 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/ReplaceTableExec.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/ReplaceTableExec.scala @@ -17,7 +17,7 @@ package org.apache.spark.sql.execution.datasources.v2 -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.sql.catalyst.InternalRow import org.apache.spark.sql.catalyst.analysis.NoSuchTableException diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/ShowCreateTableExec.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/ShowCreateTableExec.scala index 6fa51ed63bd46..8f9a925ed4bed 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/ShowCreateTableExec.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/ShowCreateTableExec.scala @@ -17,8 +17,8 @@ package org.apache.spark.sql.execution.datasources.v2 -import scala.collection.JavaConverters._ import scala.collection.mutable.ArrayBuffer +import scala.jdk.CollectionConverters._ import org.apache.spark.sql.catalyst.InternalRow import org.apache.spark.sql.catalyst.analysis.ResolvedTable diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/ShowTablePropertiesExec.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/ShowTablePropertiesExec.scala index 61b8e91fd348e..3a957e0433932 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/ShowTablePropertiesExec.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/ShowTablePropertiesExec.scala @@ -31,7 +31,7 @@ case class ShowTablePropertiesExec( propertyKey: Option[String]) extends LeafV2CommandExec { override protected def run(): Seq[InternalRow] = { - import scala.collection.JavaConverters._ + import scala.jdk.CollectionConverters._ // The reserved properties are accessible through DESCRIBE val properties = conf.redactOptions(catalogTable.properties.asScala.toMap) diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/TextBasedFileScan.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/TextBasedFileScan.scala index f24fb95acb922..e11adb186c841 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/TextBasedFileScan.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/TextBasedFileScan.scala @@ -16,7 +16,7 @@ */ package org.apache.spark.sql.execution.datasources.v2 -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.hadoop.fs.Path import org.apache.hadoop.io.compress.CompressionCodecFactory diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/V2SessionCatalog.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/V2SessionCatalog.scala index a7062a9a596c3..a43f42a87d18a 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/V2SessionCatalog.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/V2SessionCatalog.scala @@ -20,8 +20,8 @@ package org.apache.spark.sql.execution.datasources.v2 import java.net.URI import java.util -import scala.collection.JavaConverters._ import scala.collection.mutable +import scala.jdk.CollectionConverters._ import org.apache.spark.sql.catalyst.{FunctionIdentifier, SQLConfHelper, TableIdentifier} import org.apache.spark.sql.catalyst.analysis.{NoSuchDatabaseException, NoSuchTableException, TableAlreadyExistsException} diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/WriteToDataSourceV2Exec.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/WriteToDataSourceV2Exec.scala index 4a9b85450a176..a9e34ff2e1cf0 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/WriteToDataSourceV2Exec.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/WriteToDataSourceV2Exec.scala @@ -17,7 +17,7 @@ package org.apache.spark.sql.execution.datasources.v2 -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.{SparkEnv, SparkException, TaskContext} import org.apache.spark.internal.Logging diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/csv/CSVScan.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/csv/CSVScan.scala index 734f8165aff3d..b193e5199690d 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/csv/CSVScan.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/csv/CSVScan.scala @@ -16,7 +16,7 @@ */ package org.apache.spark.sql.execution.datasources.v2.csv -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.hadoop.fs.Path diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/csv/CSVTable.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/csv/CSVTable.scala index 5ec2a34a32876..0000caaa8928b 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/csv/CSVTable.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/csv/CSVTable.scala @@ -16,7 +16,7 @@ */ package org.apache.spark.sql.execution.datasources.v2.csv -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.hadoop.fs.FileStatus diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/jdbc/JDBCTable.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/jdbc/JDBCTable.scala index 0a184116a0f5a..b12f55ea93792 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/jdbc/JDBCTable.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/jdbc/JDBCTable.scala @@ -18,7 +18,7 @@ package org.apache.spark.sql.execution.datasources.v2.jdbc import java.util -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.sql.SparkSession import org.apache.spark.sql.connector.catalog._ diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/jdbc/JDBCTableCatalog.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/jdbc/JDBCTableCatalog.scala index 1bc2d43d7b8ee..0084abb392ef9 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/jdbc/JDBCTableCatalog.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/jdbc/JDBCTableCatalog.scala @@ -19,8 +19,8 @@ package org.apache.spark.sql.execution.datasources.v2.jdbc import java.sql.SQLException import java.util -import scala.collection.JavaConverters._ import scala.collection.mutable +import scala.jdk.CollectionConverters._ import org.apache.spark.internal.Logging import org.apache.spark.sql.catalyst.analysis.NoSuchFunctionException diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/json/JsonScan.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/json/JsonScan.scala index c9a3a6f5e7f2d..ff7273f2870b2 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/json/JsonScan.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/json/JsonScan.scala @@ -16,7 +16,7 @@ */ package org.apache.spark.sql.execution.datasources.v2.json -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.hadoop.fs.Path diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/json/JsonTable.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/json/JsonTable.scala index 52168007aaa18..c567e87e7d767 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/json/JsonTable.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/json/JsonTable.scala @@ -16,7 +16,7 @@ */ package org.apache.spark.sql.execution.datasources.v2.json -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.hadoop.fs.FileStatus diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/orc/OrcScan.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/orc/OrcScan.scala index 072ab26774e52..87fc6e19254ea 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/orc/OrcScan.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/orc/OrcScan.scala @@ -16,7 +16,7 @@ */ package org.apache.spark.sql.execution.datasources.v2.orc -import scala.collection.JavaConverters.mapAsScalaMapConverter +import scala.jdk.CollectionConverters._ import org.apache.hadoop.conf.Configuration import org.apache.hadoop.fs.Path diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/orc/OrcScanBuilder.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/orc/OrcScanBuilder.scala index c7c720ffeead4..2ab1a2a1e210c 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/orc/OrcScanBuilder.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/orc/OrcScanBuilder.scala @@ -17,7 +17,7 @@ package org.apache.spark.sql.execution.datasources.v2.orc -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.sql.SparkSession import org.apache.spark.sql.connector.expressions.aggregate.Aggregation diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/orc/OrcTable.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/orc/OrcTable.scala index 9cc4525badd81..ca4b83b3c58f1 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/orc/OrcTable.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/orc/OrcTable.scala @@ -16,7 +16,7 @@ */ package org.apache.spark.sql.execution.datasources.v2.orc -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.hadoop.fs.FileStatus diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/parquet/ParquetScan.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/parquet/ParquetScan.scala index 0e77b419ff524..f7fa4b7cb82a6 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/parquet/ParquetScan.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/parquet/ParquetScan.scala @@ -16,7 +16,7 @@ */ package org.apache.spark.sql.execution.datasources.v2.parquet -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.hadoop.conf.Configuration import org.apache.hadoop.fs.Path diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/parquet/ParquetScanBuilder.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/parquet/ParquetScanBuilder.scala index 7987372abdec1..ae98f3b10301f 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/parquet/ParquetScanBuilder.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/parquet/ParquetScanBuilder.scala @@ -17,7 +17,7 @@ package org.apache.spark.sql.execution.datasources.v2.parquet -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.sql.SparkSession import org.apache.spark.sql.catalyst.util.RebaseDateTime.RebaseSpec diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/parquet/ParquetTable.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/parquet/ParquetTable.scala index c8bb4b2eb221e..e593ad7d0c0cd 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/parquet/ParquetTable.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/parquet/ParquetTable.scala @@ -16,7 +16,7 @@ */ package org.apache.spark.sql.execution.datasources.v2.parquet -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.hadoop.fs.FileStatus diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/text/TextScan.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/text/TextScan.scala index c7b0fec34b4e4..761d88b5431fa 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/text/TextScan.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/text/TextScan.scala @@ -16,7 +16,7 @@ */ package org.apache.spark.sql.execution.datasources.v2.text -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.hadoop.fs.Path diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/debug/package.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/debug/package.scala index b4bb6aba15d85..0b97c8ebab815 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/debug/package.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/debug/package.scala @@ -19,7 +19,7 @@ package org.apache.spark.sql.execution import java.util.Collections -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.util.control.NonFatal import org.apache.spark.broadcast.Broadcast diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/objects.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/objects.scala index 0ae699240ca08..10878c8106902 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/objects.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/objects.scala @@ -19,7 +19,7 @@ package org.apache.spark.sql.execution import java.io.{ByteArrayOutputStream, DataOutputStream} -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.language.existentials import org.apache.spark.api.java.function.MapFunction diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/python/ApplyInPandasWithStatePythonRunner.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/python/ApplyInPandasWithStatePythonRunner.scala index 9fde181407966..e5763b9f230b1 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/python/ApplyInPandasWithStatePythonRunner.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/python/ApplyInPandasWithStatePythonRunner.scala @@ -19,7 +19,7 @@ package org.apache.spark.sql.execution.python import java.io._ -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.arrow.vector.VectorSchemaRoot import org.apache.arrow.vector.ipc.ArrowStreamWriter diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/python/ApplyInPandasWithStateWriter.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/python/ApplyInPandasWithStateWriter.scala index 60a228ddd73a6..6c9c7e1179b60 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/python/ApplyInPandasWithStateWriter.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/python/ApplyInPandasWithStateWriter.scala @@ -17,7 +17,7 @@ package org.apache.spark.sql.execution.python -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.arrow.vector.{FieldVector, VectorSchemaRoot} import org.apache.arrow.vector.ipc.ArrowStreamWriter diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/python/ArrowEvalPythonExec.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/python/ArrowEvalPythonExec.scala index bd91da3bc0fe8..a6937b7bf89ca 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/python/ArrowEvalPythonExec.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/python/ArrowEvalPythonExec.scala @@ -17,7 +17,7 @@ package org.apache.spark.sql.execution.python -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.{JobArtifactSet, TaskContext} import org.apache.spark.api.python.ChainedPythonFunctions diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/python/ArrowEvalPythonUDTFExec.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/python/ArrowEvalPythonUDTFExec.scala index 9d5bac0c6007a..9e210bf5241bb 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/python/ArrowEvalPythonUDTFExec.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/python/ArrowEvalPythonUDTFExec.scala @@ -17,7 +17,7 @@ package org.apache.spark.sql.execution.python -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.{JobArtifactSet, TaskContext} import org.apache.spark.sql.catalyst.InternalRow diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/python/BatchEvalPythonExec.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/python/BatchEvalPythonExec.scala index a0e7789b28173..04d71c6c0153c 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/python/BatchEvalPythonExec.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/python/BatchEvalPythonExec.scala @@ -17,7 +17,7 @@ package org.apache.spark.sql.execution.python -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import net.razorvine.pickle.{Pickler, Unpickler} diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/python/BatchEvalPythonUDTFExec.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/python/BatchEvalPythonUDTFExec.scala index 342e072319453..4ad9bfe3ec580 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/python/BatchEvalPythonUDTFExec.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/python/BatchEvalPythonUDTFExec.scala @@ -19,7 +19,7 @@ package org.apache.spark.sql.execution.python import java.io.DataOutputStream -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import net.razorvine.pickle.Unpickler diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/python/EvaluatePython.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/python/EvaluatePython.scala index 6664acf957263..48db2560da944 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/python/EvaluatePython.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/python/EvaluatePython.scala @@ -20,7 +20,7 @@ package org.apache.spark.sql.execution.python import java.io.OutputStream import java.nio.charset.StandardCharsets -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import net.razorvine.pickle.{IObjectPickler, Opcodes, Pickler} diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/python/MapInBatchEvaluatorFactory.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/python/MapInBatchEvaluatorFactory.scala index 6f501e1411ac0..316c543ea807e 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/python/MapInBatchEvaluatorFactory.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/python/MapInBatchEvaluatorFactory.scala @@ -17,7 +17,7 @@ package org.apache.spark.sql.execution.python -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.{PartitionEvaluator, PartitionEvaluatorFactory, TaskContext} import org.apache.spark.api.python.ChainedPythonFunctions diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/python/PandasGroupUtils.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/python/PandasGroupUtils.scala index 078876664062d..3f66e61c99663 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/python/PandasGroupUtils.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/python/PandasGroupUtils.scala @@ -17,8 +17,8 @@ package org.apache.spark.sql.execution.python -import scala.collection.JavaConverters._ import scala.collection.mutable.ArrayBuffer +import scala.jdk.CollectionConverters._ import org.apache.spark.TaskContext import org.apache.spark.api.python.BasePythonRunner diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/python/PythonArrowOutput.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/python/PythonArrowOutput.scala index 8f99325e4e08c..e7d4aa9f04607 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/python/PythonArrowOutput.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/python/PythonArrowOutput.scala @@ -19,7 +19,7 @@ package org.apache.spark.sql.execution.python import java.io.DataInputStream import java.util.concurrent.atomic.AtomicBoolean -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.arrow.vector.VectorSchemaRoot import org.apache.arrow.vector.ipc.ArrowStreamReader diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/python/UserDefinedPythonFunction.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/python/UserDefinedPythonFunction.scala index 2beefedc9467e..6400f7978afd9 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/python/UserDefinedPythonFunction.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/python/UserDefinedPythonFunction.scala @@ -23,8 +23,8 @@ import java.nio.channels.SelectionKey import java.nio.charset.StandardCharsets import java.util.HashMap -import scala.collection.JavaConverters._ import scala.collection.mutable.ArrayBuffer +import scala.jdk.CollectionConverters._ import net.razorvine.pickle.Pickler diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/python/WindowInPandasEvaluatorFactory.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/python/WindowInPandasEvaluatorFactory.scala index cf9f8c22ea082..d5577c9e22f3b 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/python/WindowInPandasEvaluatorFactory.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/python/WindowInPandasEvaluatorFactory.scala @@ -19,8 +19,8 @@ package org.apache.spark.sql.execution.python import java.io.File -import scala.collection.JavaConverters._ import scala.collection.mutable.ArrayBuffer +import scala.jdk.CollectionConverters._ import org.apache.spark.{JobArtifactSet, PartitionEvaluator, PartitionEvaluatorFactory, SparkEnv, TaskContext} import org.apache.spark.api.python.{ChainedPythonFunctions, PythonEvalType} diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/r/ArrowRRunner.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/r/ArrowRRunner.scala index 69faa4c8fecc7..819fd1bd297f8 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/r/ArrowRRunner.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/r/ArrowRRunner.scala @@ -20,7 +20,7 @@ package org.apache.spark.sql.execution.r import java.io._ import java.nio.channels.Channels -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.arrow.vector.VectorSchemaRoot import org.apache.arrow.vector.ipc.{ArrowStreamReader, ArrowStreamWriter} diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/AsyncCommitLog.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/AsyncCommitLog.scala index 495f2f7ac0bff..686e0bb868865 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/AsyncCommitLog.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/AsyncCommitLog.scala @@ -20,7 +20,7 @@ package org.apache.spark.sql.execution.streaming import java.io.OutputStream import java.util.concurrent.{CompletableFuture, ConcurrentLinkedDeque, ThreadPoolExecutor} -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.sql.SparkSession import org.apache.spark.sql.errors.QueryExecutionErrors diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/AsyncOffsetSeqLog.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/AsyncOffsetSeqLog.scala index 240a64ec7b0e4..a89a9132e03e0 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/AsyncOffsetSeqLog.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/AsyncOffsetSeqLog.scala @@ -21,7 +21,7 @@ import java.io.OutputStream import java.util.concurrent._ import java.util.concurrent.atomic.AtomicLong -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.sql.SparkSession import org.apache.spark.sql.errors.QueryExecutionErrors diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/HDFSMetadataLog.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/HDFSMetadataLog.scala index 2b0172bb9555c..c7b0695700917 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/HDFSMetadataLog.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/HDFSMetadataLog.scala @@ -21,7 +21,7 @@ import java.io._ import java.nio.charset.StandardCharsets import java.util.{Collections, LinkedHashMap => JLinkedHashMap} -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.reflect.ClassTag import org.apache.commons.io.IOUtils diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/ProgressReporter.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/ProgressReporter.scala index c0bd94e7d6cd5..702d3ea09b643 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/ProgressReporter.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/ProgressReporter.scala @@ -20,8 +20,8 @@ package org.apache.spark.sql.execution.streaming import java.text.SimpleDateFormat import java.util.{Date, Optional, UUID} -import scala.collection.JavaConverters._ import scala.collection.mutable +import scala.jdk.CollectionConverters._ import org.apache.spark.internal.Logging import org.apache.spark.sql.{Row, SparkSession} diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/StreamExecution.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/StreamExecution.scala index 936de41af76ff..d9cab2470c9d1 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/StreamExecution.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/StreamExecution.scala @@ -24,8 +24,8 @@ import java.util.concurrent.{CountDownLatch, ExecutionException, TimeoutExceptio import java.util.concurrent.atomic.AtomicReference import java.util.concurrent.locks.ReentrantLock -import scala.collection.JavaConverters._ import scala.collection.mutable.{Map => MutableMap} +import scala.jdk.CollectionConverters._ import scala.util.control.NonFatal import com.google.common.util.concurrent.UncheckedExecutionException diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/state/HDFSBackedStateStoreMap.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/state/HDFSBackedStateStoreMap.scala index 9a0b6a733d051..a79a856b0ec5f 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/state/HDFSBackedStateStoreMap.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/state/HDFSBackedStateStoreMap.scala @@ -17,8 +17,8 @@ package org.apache.spark.sql.execution.streaming.state -import scala.collection.JavaConverters._ import scala.collection.mutable +import scala.jdk.CollectionConverters._ import org.apache.spark.sql.catalyst.expressions.{BoundReference, UnsafeProjection, UnsafeRow} import org.apache.spark.sql.types.{StructField, StructType} diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/state/HDFSBackedStateStoreProvider.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/state/HDFSBackedStateStoreProvider.scala index 66832400aa147..d2cd5d3de36a1 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/state/HDFSBackedStateStoreProvider.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/state/HDFSBackedStateStoreProvider.scala @@ -22,8 +22,8 @@ import java.util import java.util.Locale import java.util.concurrent.atomic.LongAdder -import scala.collection.JavaConverters._ import scala.collection.mutable +import scala.jdk.CollectionConverters._ import scala.util.control.NonFatal import com.google.common.io.ByteStreams diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/state/RocksDBFileManager.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/state/RocksDBFileManager.scala index ed04472b62c64..05fd845accbab 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/state/RocksDBFileManager.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/state/RocksDBFileManager.scala @@ -24,8 +24,8 @@ import java.util.UUID import java.util.concurrent.ConcurrentHashMap import java.util.zip.{ZipEntry, ZipOutputStream} -import scala.collection.JavaConverters._ import scala.collection.mutable +import scala.jdk.CollectionConverters._ import com.fasterxml.jackson.annotation.JsonInclude.Include import com.fasterxml.jackson.databind.{DeserializationFeature, ObjectMapper} diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/statefulOperators.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/statefulOperators.scala index 67d89c7f40fb2..77645378f22ab 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/statefulOperators.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/statefulOperators.scala @@ -20,8 +20,8 @@ package org.apache.spark.sql.execution.streaming import java.util.UUID import java.util.concurrent.TimeUnit._ -import scala.collection.JavaConverters._ import scala.collection.mutable +import scala.jdk.CollectionConverters._ import org.apache.spark.SparkContext import org.apache.spark.rdd.RDD diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/ui/SQLAppStatusListener.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/ui/SQLAppStatusListener.scala index 3fafc399dd828..7e31d40e51196 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/ui/SQLAppStatusListener.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/ui/SQLAppStatusListener.scala @@ -20,8 +20,8 @@ import java.util.{Arrays, Date, NoSuchElementException} import java.util.concurrent.ConcurrentHashMap import java.util.concurrent.atomic.AtomicInteger -import scala.collection.JavaConverters._ import scala.collection.mutable +import scala.jdk.CollectionConverters._ import scala.util.control.NonFatal import org.apache.spark.{JobExecutionStatus, SparkConf} diff --git a/sql/core/src/main/scala/org/apache/spark/sql/functions.scala b/sql/core/src/main/scala/org/apache/spark/sql/functions.scala index 2a7ed263c7480..5a66d63898c53 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/functions.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/functions.scala @@ -19,7 +19,7 @@ package org.apache.spark.sql import java.util.Collections -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.reflect.runtime.universe.TypeTag import scala.util.Try diff --git a/sql/core/src/main/scala/org/apache/spark/sql/jdbc/H2Dialect.scala b/sql/core/src/main/scala/org/apache/spark/sql/jdbc/H2Dialect.scala index c246b50f4e156..43888d0ffedda 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/jdbc/H2Dialect.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/jdbc/H2Dialect.scala @@ -22,7 +22,7 @@ import java.util import java.util.Locale import java.util.concurrent.ConcurrentHashMap -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.util.control.NonFatal import org.apache.commons.lang3.StringUtils diff --git a/sql/core/src/main/scala/org/apache/spark/sql/streaming/DataStreamReader.scala b/sql/core/src/main/scala/org/apache/spark/sql/streaming/DataStreamReader.scala index c7b3473bbb62d..e572cb268749c 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/streaming/DataStreamReader.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/streaming/DataStreamReader.scala @@ -19,7 +19,7 @@ package org.apache.spark.sql.streaming import java.util.Locale -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.annotation.Evolving import org.apache.spark.internal.Logging diff --git a/sql/core/src/main/scala/org/apache/spark/sql/streaming/DataStreamWriter.scala b/sql/core/src/main/scala/org/apache/spark/sql/streaming/DataStreamWriter.scala index b7bafeca5461e..c53ecc56bb6ea 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/streaming/DataStreamWriter.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/streaming/DataStreamWriter.scala @@ -20,7 +20,7 @@ package org.apache.spark.sql.streaming import java.util.Locale import java.util.concurrent.TimeoutException -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.hadoop.fs.Path diff --git a/sql/core/src/main/scala/org/apache/spark/sql/streaming/StreamingQueryException.scala b/sql/core/src/main/scala/org/apache/spark/sql/streaming/StreamingQueryException.scala index b90dfe7238cda..738c79769bb88 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/streaming/StreamingQueryException.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/streaming/StreamingQueryException.scala @@ -17,7 +17,7 @@ package org.apache.spark.sql.streaming -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.{SparkThrowable, SparkThrowableHelper} import org.apache.spark.annotation.Evolving diff --git a/sql/core/src/main/scala/org/apache/spark/sql/streaming/StreamingQueryManager.scala b/sql/core/src/main/scala/org/apache/spark/sql/streaming/StreamingQueryManager.scala index 20254dec3d874..225f9d1f19a55 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/streaming/StreamingQueryManager.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/streaming/StreamingQueryManager.scala @@ -21,8 +21,8 @@ import java.util.UUID import java.util.concurrent.{TimeoutException, TimeUnit} import javax.annotation.concurrent.GuardedBy -import scala.collection.JavaConverters._ import scala.collection.mutable +import scala.jdk.CollectionConverters._ import org.apache.spark.annotation.Evolving import org.apache.spark.internal.Logging diff --git a/sql/core/src/main/scala/org/apache/spark/sql/streaming/progress.scala b/sql/core/src/main/scala/org/apache/spark/sql/streaming/progress.scala index 0219ecd52badd..117daea7d1971 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/streaming/progress.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/streaming/progress.scala @@ -21,7 +21,7 @@ import java.{util => ju} import java.lang.{Long => JLong} import java.util.UUID -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.util.control.NonFatal import com.fasterxml.jackson.databind.{DeserializationFeature, ObjectMapper} diff --git a/sql/core/src/main/scala/org/apache/spark/sql/streaming/ui/StreamingQueryStatisticsPage.scala b/sql/core/src/main/scala/org/apache/spark/sql/streaming/ui/StreamingQueryStatisticsPage.scala index e13ac4e487c95..8b02ddf3cdff2 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/streaming/ui/StreamingQueryStatisticsPage.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/streaming/ui/StreamingQueryStatisticsPage.scala @@ -22,7 +22,7 @@ import java.lang.{Long => JLong} import java.util.Locale import javax.servlet.http.HttpServletRequest -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.xml.{Node, NodeBuffer, Unparsed} import org.apache.spark.internal.Logging diff --git a/sql/core/src/main/scala/org/apache/spark/sql/util/QueryExecutionListener.scala b/sql/core/src/main/scala/org/apache/spark/sql/util/QueryExecutionListener.scala index 45482f12f3c7f..309853abbd39a 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/util/QueryExecutionListener.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/util/QueryExecutionListener.scala @@ -17,7 +17,7 @@ package org.apache.spark.sql.util -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.annotation.DeveloperApi import org.apache.spark.internal.Logging diff --git a/sql/core/src/main/scala/org/apache/spark/status/protobuf/sql/SQLExecutionUIDataSerializer.scala b/sql/core/src/main/scala/org/apache/spark/status/protobuf/sql/SQLExecutionUIDataSerializer.scala index afac9b67ed7ac..3e27cdfcf154a 100644 --- a/sql/core/src/main/scala/org/apache/spark/status/protobuf/sql/SQLExecutionUIDataSerializer.scala +++ b/sql/core/src/main/scala/org/apache/spark/status/protobuf/sql/SQLExecutionUIDataSerializer.scala @@ -19,7 +19,7 @@ package org.apache.spark.status.protobuf.sql import java.util.Date -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.sql.execution.ui.SQLExecutionUIData import org.apache.spark.status.protobuf.{JobExecutionStatusSerializer, ProtobufSerDe, StoreTypes} diff --git a/sql/core/src/main/scala/org/apache/spark/status/protobuf/sql/SparkPlanGraphWrapperSerializer.scala b/sql/core/src/main/scala/org/apache/spark/status/protobuf/sql/SparkPlanGraphWrapperSerializer.scala index 1788d7a532ca3..172f7f5418f2f 100644 --- a/sql/core/src/main/scala/org/apache/spark/status/protobuf/sql/SparkPlanGraphWrapperSerializer.scala +++ b/sql/core/src/main/scala/org/apache/spark/status/protobuf/sql/SparkPlanGraphWrapperSerializer.scala @@ -17,7 +17,7 @@ package org.apache.spark.status.protobuf.sql -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.sql.execution.ui.{SparkPlanGraphClusterWrapper, SparkPlanGraphEdge, SparkPlanGraphNode, SparkPlanGraphNodeWrapper, SparkPlanGraphWrapper} import org.apache.spark.status.protobuf.ProtobufSerDe diff --git a/sql/core/src/test/java/test/org/apache/spark/sql/JavaDataFrameSuite.java b/sql/core/src/test/java/test/org/apache/spark/sql/JavaDataFrameSuite.java index 1342e5cc38db5..45906793f9285 100644 --- a/sql/core/src/test/java/test/org/apache/spark/sql/JavaDataFrameSuite.java +++ b/sql/core/src/test/java/test/org/apache/spark/sql/JavaDataFrameSuite.java @@ -24,7 +24,7 @@ import java.math.BigInteger; import java.math.BigDecimal; -import scala.collection.JavaConverters; +import scala.jdk.CollectionConverters; import scala.collection.Seq; import com.google.common.collect.ImmutableMap; @@ -210,7 +210,7 @@ void validateDataFrameWithBeans(Bean bean, Dataset df) { Seq outputBuffer = (Seq) first.getJavaMap(2).get("hello"); Assert.assertArrayEquals( bean.getC().get("hello"), - Ints.toArray(JavaConverters.seqAsJavaListConverter(outputBuffer).asJava())); + Ints.toArray(CollectionConverters.SeqHasAsJava(outputBuffer).asJava())); Seq d = first.getAs(3); Assert.assertEquals(bean.getD().size(), d.length()); for (int i = 0; i < d.length(); i++) { diff --git a/sql/core/src/test/java/test/org/apache/spark/sql/JavaHigherOrderFunctionsSuite.java b/sql/core/src/test/java/test/org/apache/spark/sql/JavaHigherOrderFunctionsSuite.java index de0acc295b5ea..148574e10c67b 100644 --- a/sql/core/src/test/java/test/org/apache/spark/sql/JavaHigherOrderFunctionsSuite.java +++ b/sql/core/src/test/java/test/org/apache/spark/sql/JavaHigherOrderFunctionsSuite.java @@ -22,7 +22,7 @@ import java.util.List; import static java.util.stream.Collectors.toList; -import static scala.collection.JavaConverters.mapAsScalaMap; +import scala.jdk.CollectionConverters; import org.junit.After; import org.junit.Assert; @@ -222,7 +222,7 @@ public void testTransformKeys() throws Exception { checkAnswer( mapDf.select(transform_keys(col("x"), (k, v) -> k.plus(v))), toRows( - mapAsScalaMap(new HashMap() {{ + CollectionConverters.MapHasAsScala(new HashMap() {{ put(2, 1); put(4, 2); }}), @@ -236,7 +236,7 @@ public void testTransformValues() throws Exception { checkAnswer( mapDf.select(transform_values(col("x"), (k, v) -> k.plus(v))), toRows( - mapAsScalaMap(new HashMap() {{ + CollectionConverters.MapHasAsScala(new HashMap() {{ put(1, 2); put(2, 4); }}), @@ -250,7 +250,7 @@ public void testMapFilter() throws Exception { checkAnswer( mapDf.select(map_filter(col("x"), (k, v) -> lit(false))), toRows( - mapAsScalaMap(new HashMap()), + CollectionConverters.MapHasAsScala(new HashMap()), null ) ); @@ -261,7 +261,7 @@ public void testMapZipWith() throws Exception { checkAnswer( mapDf.select(map_zip_with(col("x"), col("x"), (k, v1, v2) -> lit(false))), toRows( - mapAsScalaMap(new HashMap() {{ + CollectionConverters.MapHasAsScala(new HashMap() {{ put(1, false); put(2, false); }}), diff --git a/sql/core/src/test/scala/org/apache/spark/sql/ComplexTypesSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/ComplexTypesSuite.scala index ff6a80dd968be..5dbb4b598b8c0 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/ComplexTypesSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/ComplexTypesSuite.scala @@ -17,7 +17,7 @@ package org.apache.spark.sql -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.sql.catalyst.expressions.CreateNamedStruct import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan diff --git a/sql/core/src/test/scala/org/apache/spark/sql/CsvFunctionsSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/CsvFunctionsSuite.scala index 77b9b38085263..3b5f0947f7011 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/CsvFunctionsSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/CsvFunctionsSuite.scala @@ -21,7 +21,7 @@ import java.text.SimpleDateFormat import java.time.{Duration, LocalDateTime, Period} import java.util.Locale -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.SparkException import org.apache.spark.sql.functions._ diff --git a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameAsOfJoinSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameAsOfJoinSuite.scala index c4a1eaf5af408..280eb095dc753 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameAsOfJoinSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameAsOfJoinSuite.scala @@ -17,7 +17,7 @@ package org.apache.spark.sql -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.sql.execution.adaptive.AdaptiveSparkPlanHelper import org.apache.spark.sql.functions._ diff --git a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameJoinSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameJoinSuite.scala index 56e9520fdaba3..a923d1fd8c91d 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameJoinSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameJoinSuite.scala @@ -17,7 +17,7 @@ package org.apache.spark.sql -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.sql.catalyst.TableIdentifier import org.apache.spark.sql.catalyst.plans.{Inner, InnerLike, LeftOuter, RightOuter} diff --git a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameNaFunctionsSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameNaFunctionsSuite.scala index fb4bad5b9fa9e..ea0e9a3c79512 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameNaFunctionsSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameNaFunctionsSuite.scala @@ -17,7 +17,7 @@ package org.apache.spark.sql -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.SparkUnsupportedOperationException import org.apache.spark.sql.internal.SQLConf diff --git a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameWriterV2Suite.scala b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameWriterV2Suite.scala index f58f798b8dec9..a12f9790a7c24 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameWriterV2Suite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameWriterV2Suite.scala @@ -19,7 +19,7 @@ package org.apache.spark.sql import java.sql.Timestamp -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.scalatest.BeforeAndAfter diff --git a/sql/core/src/test/scala/org/apache/spark/sql/IntegratedUDFTestUtils.scala b/sql/core/src/test/scala/org/apache/spark/sql/IntegratedUDFTestUtils.scala index 86804ceed4f88..ef4606b70caed 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/IntegratedUDFTestUtils.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/IntegratedUDFTestUtils.scala @@ -20,7 +20,7 @@ package org.apache.spark.sql import java.nio.charset.StandardCharsets import java.nio.file.{Files, Paths} -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.util.Try import org.scalatest.Assertions._ diff --git a/sql/core/src/test/scala/org/apache/spark/sql/JoinSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/JoinSuite.scala index 14f1fb27906a1..45b80d81e7360 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/JoinSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/JoinSuite.scala @@ -19,8 +19,8 @@ package org.apache.spark.sql import java.util.Locale -import scala.collection.JavaConverters._ import scala.collection.mutable.ListBuffer +import scala.jdk.CollectionConverters._ import org.mockito.Mockito._ diff --git a/sql/core/src/test/scala/org/apache/spark/sql/JsonFunctionsSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/JsonFunctionsSuite.scala index a76e102fe913f..b7b34129a959f 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/JsonFunctionsSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/JsonFunctionsSuite.scala @@ -21,7 +21,7 @@ import java.text.SimpleDateFormat import java.time.{Duration, LocalDateTime, Period} import java.util.Locale -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.commons.lang3.exception.ExceptionUtils diff --git a/sql/core/src/test/scala/org/apache/spark/sql/QueryTest.scala b/sql/core/src/test/scala/org/apache/spark/sql/QueryTest.scala index c2c333a998b43..b5ae9c7f35200 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/QueryTest.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/QueryTest.scala @@ -19,7 +19,7 @@ package org.apache.spark.sql import java.util.TimeZone -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.scalatest.Assertions diff --git a/sql/core/src/test/scala/org/apache/spark/sql/SparkSessionBuilderSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/SparkSessionBuilderSuite.scala index d76193a17c533..0e7c294e3fa23 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/SparkSessionBuilderSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/SparkSessionBuilderSuite.scala @@ -17,7 +17,7 @@ package org.apache.spark.sql -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.hadoop.fs.Path import org.apache.logging.log4j.Level diff --git a/sql/core/src/test/scala/org/apache/spark/sql/TPCDSQueryTestSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/TPCDSQueryTestSuite.scala index 93369299fda3b..ef7bdc2b079ef 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/TPCDSQueryTestSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/TPCDSQueryTestSuite.scala @@ -20,7 +20,7 @@ package org.apache.spark.sql import java.io.File import java.nio.file.{Files, Paths} -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.{SparkConf, SparkContext} import org.apache.spark.sql.catalyst.util.{fileToString, resourceToString, stringToFile} diff --git a/sql/core/src/test/scala/org/apache/spark/sql/connector/AlterTableTests.scala b/sql/core/src/test/scala/org/apache/spark/sql/connector/AlterTableTests.scala index ca60e3212e686..39ecf14bed1f2 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/connector/AlterTableTests.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/connector/AlterTableTests.scala @@ -17,7 +17,7 @@ package org.apache.spark.sql.connector -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.SparkException import org.apache.spark.sql.AnalysisException diff --git a/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala index 8e0580bf644fe..2855d7b06f52e 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala @@ -21,8 +21,8 @@ import java.sql.Timestamp import java.time.{Duration, LocalDate, Period} import java.util.Locale -import scala.collection.JavaConverters._ import scala.concurrent.duration.MICROSECONDS +import scala.jdk.CollectionConverters._ import org.apache.spark.{SparkException, SparkUnsupportedOperationException} import org.apache.spark.sql._ diff --git a/sql/core/src/test/scala/org/apache/spark/sql/connector/SimpleWritableDataSource.scala b/sql/core/src/test/scala/org/apache/spark/sql/connector/SimpleWritableDataSource.scala index 64c893ed74fdb..235a8ff3869bd 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/connector/SimpleWritableDataSource.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/connector/SimpleWritableDataSource.scala @@ -19,7 +19,7 @@ package org.apache.spark.sql.connector import java.io.{BufferedReader, InputStreamReader, IOException} -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.hadoop.conf.Configuration import org.apache.hadoop.fs.{FileSystem, Path} diff --git a/sql/core/src/test/scala/org/apache/spark/sql/connector/TestV2SessionCatalogBase.scala b/sql/core/src/test/scala/org/apache/spark/sql/connector/TestV2SessionCatalogBase.scala index 46586c622db79..1396ef82925af 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/connector/TestV2SessionCatalogBase.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/connector/TestV2SessionCatalogBase.scala @@ -20,7 +20,7 @@ package org.apache.spark.sql.connector import java.util.concurrent.ConcurrentHashMap import java.util.concurrent.atomic.AtomicBoolean -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.sql.catalyst.catalog.CatalogTableType import org.apache.spark.sql.connector.catalog.{CatalogV2Util, Column, DelegatingCatalogExtension, Identifier, Table, TableCatalog, V1Table} diff --git a/sql/core/src/test/scala/org/apache/spark/sql/connector/V1WriteFallbackSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/connector/V1WriteFallbackSuite.scala index a111242be6465..377e1e2b084c6 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/connector/V1WriteFallbackSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/connector/V1WriteFallbackSuite.scala @@ -17,8 +17,8 @@ package org.apache.spark.sql.connector -import scala.collection.JavaConverters._ import scala.collection.mutable +import scala.jdk.CollectionConverters._ import org.scalatest.BeforeAndAfter diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/ExternalAppendOnlyUnsafeRowArraySuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/ExternalAppendOnlyUnsafeRowArraySuite.scala index f140d867481ed..b9e7367d54df1 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/ExternalAppendOnlyUnsafeRowArraySuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/ExternalAppendOnlyUnsafeRowArraySuite.scala @@ -20,8 +20,8 @@ package org.apache.spark.sql.execution import java.util import java.util.ConcurrentModificationException -import scala.collection.JavaConverters._ import scala.collection.mutable.ArrayBuffer +import scala.jdk.CollectionConverters._ import org.apache.spark._ import org.apache.spark.memory.MemoryTestingUtils diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/SQLViewTestSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/SQLViewTestSuite.scala index 73d0bd19bf671..6563a7698e2e9 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/SQLViewTestSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/SQLViewTestSuite.scala @@ -17,7 +17,7 @@ package org.apache.spark.sql.execution -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.sql.{AnalysisException, DataFrame, QueryTest, Row} import org.apache.spark.sql.catalyst.{FunctionIdentifier, TableIdentifier} diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/SparkSqlParserSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/SparkSqlParserSuite.scala index d6a3b74ee4c68..d3f3118664daa 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/SparkSqlParserSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/SparkSqlParserSuite.scala @@ -17,7 +17,7 @@ package org.apache.spark.sql.execution -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.SparkThrowable import org.apache.spark.internal.config.ConfigEntry diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/benchmark/DataSourceReadBenchmark.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/benchmark/DataSourceReadBenchmark.scala index d439ac4324327..62bd85d7568de 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/benchmark/DataSourceReadBenchmark.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/benchmark/DataSourceReadBenchmark.scala @@ -18,7 +18,7 @@ package org.apache.spark.sql.execution.benchmark import java.io.File -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.util.Random import org.apache.parquet.column.ParquetProperties diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/columnar/CachedBatchSerializerSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/columnar/CachedBatchSerializerSuite.scala index 645dc870d27bd..604ee1f7ace9c 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/columnar/CachedBatchSerializerSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/columnar/CachedBatchSerializerSuite.scala @@ -17,7 +17,7 @@ package org.apache.spark.sql.execution.columnar -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.SparkConf import org.apache.spark.rdd.RDD diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/CreateNamespaceSuiteBase.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/CreateNamespaceSuiteBase.scala index e90469c29a59f..7c5df7f51d6b9 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/CreateNamespaceSuiteBase.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/CreateNamespaceSuiteBase.scala @@ -17,7 +17,7 @@ package org.apache.spark.sql.execution.command -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.hadoop.fs.Path diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala index f0a5f14bc4099..0692a794f5d5b 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala @@ -1495,7 +1495,7 @@ abstract class DDLSuite extends QueryTest with DDLSuiteBase { } test("SPARK-18009 calling toLocalIterator on commands") { - import scala.collection.JavaConverters._ + import scala.jdk.CollectionConverters._ val df = sql("show databases") val rows: Seq[Row] = df.toLocalIterator().asScala.toSeq assert(rows.length > 0) diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/AlterTableSetLocationSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/AlterTableSetLocationSuite.scala index 0ac35452b60a5..13f6b8d5b33bb 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/AlterTableSetLocationSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/AlterTableSetLocationSuite.scala @@ -17,7 +17,7 @@ package org.apache.spark.sql.execution.command.v2 -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.sql.AnalysisException import org.apache.spark.sql.connector.catalog.{Identifier, Table} diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/FileMetadataStructSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/FileMetadataStructSuite.scala index c782104f4f9bf..9f2d202299557 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/FileMetadataStructSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/FileMetadataStructSuite.scala @@ -118,7 +118,7 @@ class FileMetadataStructSuite extends QueryTest with SharedSparkSession { Seq("json", "parquet").foreach { testFileFormat => test(s"metadata struct ($testFileFormat): " + testName) { withTempDir { dir => - import scala.collection.JavaConverters._ + import scala.jdk.CollectionConverters._ // 1. create df0 and df1 and save under /data/f0 and /data/f1 val df0 = spark.createDataFrame(data0.asJava, fileSchema) diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/binaryfile/BinaryFileFormatSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/binaryfile/BinaryFileFormatSuite.scala index 0b6fdef4f74ef..3dbb48903c5a2 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/binaryfile/BinaryFileFormatSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/binaryfile/BinaryFileFormatSuite.scala @@ -21,7 +21,7 @@ import java.io.{File, IOException} import java.nio.file.{Files, StandardOpenOption} import java.sql.Timestamp -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import com.google.common.io.{ByteStreams, Closeables} import org.apache.hadoop.fs.{FileStatus, FileSystem, GlobFilter, Path} diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/csv/CSVSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/csv/CSVSuite.scala index 3bd45ca0dcdb3..7655635fc62f5 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/csv/CSVSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/csv/CSVSuite.scala @@ -26,7 +26,7 @@ import java.time.{Duration, Instant, LocalDate, LocalDateTime, Period} import java.util.Locale import java.util.zip.GZIPOutputStream -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.util.Properties import com.univocity.parsers.common.TextParsingException @@ -1852,7 +1852,7 @@ abstract class CSVSuite test("SPARK-24244: Select a subset of all columns") { withTempPath { path => - import scala.collection.JavaConverters._ + import scala.jdk.CollectionConverters._ val schema = new StructType() .add("f1", IntegerType).add("f2", IntegerType).add("f3", IntegerType) .add("f4", IntegerType).add("f5", IntegerType).add("f6", IntegerType) diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/jdbc/connection/ConnectionProviderSuiteBase.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/jdbc/connection/ConnectionProviderSuiteBase.scala index f42b17abf31bc..4c3c2b4de3443 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/jdbc/connection/ConnectionProviderSuiteBase.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/jdbc/connection/ConnectionProviderSuiteBase.scala @@ -20,7 +20,7 @@ package org.apache.spark.sql.execution.datasources.jdbc.connection import java.sql.{Driver, DriverManager} import javax.security.auth.login.Configuration -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.scalatest.BeforeAndAfterEach diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/orc/OrcFilterSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/orc/OrcFilterSuite.scala index dfd3283234330..9aa15d4f515ab 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/orc/OrcFilterSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/orc/OrcFilterSuite.scala @@ -22,7 +22,7 @@ import java.nio.charset.StandardCharsets import java.sql.{Date, Timestamp} import java.time.{Duration, LocalDateTime, Period} -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.hadoop.hive.ql.io.sarg.{PredicateLeaf, SearchArgument, SearchArgumentImpl} import org.apache.hadoop.hive.ql.io.sarg.SearchArgumentFactory.newBuilder diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/orc/OrcV1FilterSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/orc/OrcV1FilterSuite.scala index b5043dbfce606..6ca9f6cd525fa 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/orc/OrcV1FilterSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/orc/OrcV1FilterSuite.scala @@ -16,7 +16,7 @@ */ package org.apache.spark.sql.execution.datasources.orc -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.hadoop.hive.ql.io.sarg.SearchArgumentImpl diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetAvroCompatibilitySuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetAvroCompatibilitySuite.scala index 0d7731251861f..25414bfc299a7 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetAvroCompatibilitySuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetAvroCompatibilitySuite.scala @@ -21,7 +21,7 @@ import java.nio.ByteBuffer import java.nio.charset.StandardCharsets import java.util.{List => JList, Map => JMap} -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.avro.Schema import org.apache.avro.generic.IndexedRecord diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetCompatibilityTest.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetCompatibilityTest.scala index a43a856d16ac7..f8a9a2dba9e96 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetCompatibilityTest.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetCompatibilityTest.scala @@ -17,7 +17,7 @@ package org.apache.spark.sql.execution.datasources.parquet -import scala.collection.JavaConverters.{collectionAsScalaIterableConverter, mapAsJavaMapConverter, seqAsJavaListConverter} +import scala.jdk.CollectionConverters.{collectionAsScalaIterableConverter, mapAsJavaMapConverter, seqAsJavaListConverter} import org.apache.hadoop.conf.Configuration import org.apache.hadoop.fs.{Path, PathFilter} diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetCompressionCodecPrecedenceSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetCompressionCodecPrecedenceSuite.scala index ac0aad16f1eba..7e1a9becd23f7 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetCompressionCodecPrecedenceSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetCompressionCodecPrecedenceSuite.scala @@ -19,7 +19,7 @@ package org.apache.spark.sql.execution.datasources.parquet import java.io.File -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.hadoop.fs.Path import org.apache.parquet.hadoop.ParquetOutputFormat diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetEncodingSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetEncodingSuite.scala index 07e2849ce6f19..65b067e915bde 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetEncodingSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetEncodingSuite.scala @@ -20,7 +20,7 @@ import java.math.BigDecimal import java.sql.{Date, Timestamp} import java.time.{Duration, Period} -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.hadoop.fs.Path import org.apache.parquet.column.{Encoding, ParquetProperties} diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetFieldIdIOSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetFieldIdIOSuite.scala index 5e01d3f447c96..f5e854a79979b 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetFieldIdIOSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetFieldIdIOSuite.scala @@ -17,7 +17,7 @@ package org.apache.spark.sql.execution.datasources.parquet -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.SparkException import org.apache.spark.sql.{QueryTest, Row} diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetFieldIdSchemaSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetFieldIdSchemaSuite.scala index b3babdd3a0cff..d3aad531ed7a1 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetFieldIdSchemaSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetFieldIdSchemaSuite.scala @@ -17,7 +17,7 @@ package org.apache.spark.sql.execution.datasources.parquet -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.parquet.schema.{MessageType, MessageTypeParser} diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetIOSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetIOSuite.scala index 4f8a9e3971664..95a45e52bfb49 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetIOSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetIOSuite.scala @@ -20,8 +20,8 @@ package org.apache.spark.sql.execution.datasources.parquet import java.time.LocalDateTime import java.util.Locale -import scala.collection.JavaConverters._ import scala.collection.mutable +import scala.jdk.CollectionConverters._ import scala.reflect.ClassTag import scala.reflect.runtime.universe.TypeTag diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetRowIndexSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetRowIndexSuite.scala index 27c2a2148fd3b..24dd98da82580 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetRowIndexSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetRowIndexSuite.scala @@ -18,7 +18,7 @@ package org.apache.spark.sql.execution.datasources.parquet import java.io.File -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.hadoop.fs.Path import org.apache.parquet.column.ParquetProperties._ diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetTest.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetTest.scala index 1558e9733523d..d108803d43e44 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetTest.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetTest.scala @@ -19,7 +19,7 @@ package org.apache.spark.sql.execution.datasources.parquet import java.io.File -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.reflect.ClassTag import scala.reflect.runtime.universe.TypeTag diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetVectorizedSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetVectorizedSuite.scala index 123992a1a86b4..91a8a38928224 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetVectorizedSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetVectorizedSuite.scala @@ -620,7 +620,7 @@ class ParquetVectorizedSuite extends QueryTest with ParquetTest with SharedSpark readStore: PageReadStore, expected: Seq[Row], batchSize: Int = NUM_VALUES): Unit = { - import scala.collection.JavaConverters._ + import scala.jdk.CollectionConverters._ val recordReader = new VectorizedParquetRecordReader( DateTimeUtils.getZoneId("EST"), diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/v2/FileTableSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/v2/FileTableSuite.scala index 8f001e0e4d668..4160516deece5 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/v2/FileTableSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/v2/FileTableSuite.scala @@ -16,7 +16,7 @@ */ package org.apache.spark.sql.execution.datasources.v2 -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.hadoop.fs.FileStatus diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/v2/V2SessionCatalogSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/v2/V2SessionCatalogSuite.scala index 8f5996438e202..c43658eacabc2 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/v2/V2SessionCatalogSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/v2/V2SessionCatalogSuite.scala @@ -21,7 +21,7 @@ import java.net.URI import java.util import java.util.Collections -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.hadoop.fs.Path import org.scalatest.BeforeAndAfter diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/xml/XmlSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/xml/XmlSuite.scala index b03d65219d75c..7c0e8c6785fad 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/xml/XmlSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/xml/XmlSuite.scala @@ -21,9 +21,9 @@ import java.nio.file.{Files, Path, Paths} import java.sql.{Date, Timestamp} import java.util.TimeZone -import scala.collection.JavaConverters._ import scala.collection.mutable import scala.io.Source +import scala.jdk.CollectionConverters._ import org.apache.commons.lang3.exception.ExceptionUtils import org.apache.hadoop.conf.Configuration diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/xml/parsers/StaxXmlParserUtilsSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/xml/parsers/StaxXmlParserUtilsSuite.scala index c239cfb92f8da..2642b13293f4d 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/xml/parsers/StaxXmlParserUtilsSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/xml/parsers/StaxXmlParserUtilsSuite.scala @@ -20,7 +20,7 @@ import java.io.StringReader import javax.xml.stream.{XMLInputFactory, XMLStreamConstants} import javax.xml.stream.events.Attribute -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.scalatest.BeforeAndAfterAll diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/python/BatchEvalPythonExecSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/python/BatchEvalPythonExecSuite.scala index 9c8031bbad6b4..39142cf256017 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/python/BatchEvalPythonExecSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/python/BatchEvalPythonExecSuite.scala @@ -17,8 +17,8 @@ package org.apache.spark.sql.execution.python -import scala.collection.JavaConverters._ import scala.collection.mutable.ArrayBuffer +import scala.jdk.CollectionConverters._ import org.apache.spark.api.python.{PythonEvalType, SimplePythonFunction} import org.apache.spark.sql.catalyst.FunctionIdentifier diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/streaming/sources/RateStreamProviderSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/streaming/sources/RateStreamProviderSuite.scala index 730611f8f35b3..363189a3b3661 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/streaming/sources/RateStreamProviderSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/streaming/sources/RateStreamProviderSuite.scala @@ -19,8 +19,8 @@ package org.apache.spark.sql.execution.streaming.sources import java.util.concurrent.TimeUnit -import scala.collection.JavaConverters._ import scala.collection.mutable.ArrayBuffer +import scala.jdk.CollectionConverters._ import org.apache.spark.{SparkException, SparkRuntimeException} import org.apache.spark.sql.Row diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/streaming/sources/TextSocketStreamSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/streaming/sources/TextSocketStreamSuite.scala index 0ebaa47fb8c18..03388724ef585 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/streaming/sources/TextSocketStreamSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/streaming/sources/TextSocketStreamSuite.scala @@ -25,7 +25,7 @@ import java.sql.Timestamp import java.util.concurrent.LinkedBlockingQueue import java.util.concurrent.TimeUnit._ -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.internal.Logging import org.apache.spark.sql.AnalysisException diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/streaming/state/MemoryStateStore.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/streaming/state/MemoryStateStore.scala index e52ccd0fcfc39..4f11f1edd7e0e 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/streaming/state/MemoryStateStore.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/streaming/state/MemoryStateStore.scala @@ -22,7 +22,7 @@ import java.util.concurrent.ConcurrentHashMap import org.apache.spark.sql.catalyst.expressions.UnsafeRow class MemoryStateStore extends StateStore() { - import scala.collection.JavaConverters._ + import scala.jdk.CollectionConverters._ private val map = new ConcurrentHashMap[UnsafeRow, UnsafeRow] override def iterator(): Iterator[UnsafeRowPair] = { diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/streaming/state/RocksDBStateStoreIntegrationSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/streaming/state/RocksDBStateStoreIntegrationSuite.scala index 2eb7d98bea828..3cd29d40f82b7 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/streaming/state/RocksDBStateStoreIntegrationSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/streaming/state/RocksDBStateStoreIntegrationSuite.scala @@ -19,7 +19,7 @@ package org.apache.spark.sql.execution.streaming.state import java.io.File -import scala.collection.JavaConverters +import scala.jdk.CollectionConverters import org.scalatest.time.{Minute, Span} diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/streaming/state/StateStoreSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/streaming/state/StateStoreSuite.scala index e6d2f63267fda..067a1a32a9b8e 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/streaming/state/StateStoreSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/streaming/state/StateStoreSuite.scala @@ -23,8 +23,8 @@ import java.util import java.util.UUID import java.util.concurrent.atomic.AtomicBoolean -import scala.collection.JavaConverters._ import scala.collection.mutable +import scala.jdk.CollectionConverters._ import scala.util.Random import org.apache.commons.io.FileUtils diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/ui/UISeleniumSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/ui/UISeleniumSuite.scala index 30124a5988e2f..f80c456b4bac1 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/ui/UISeleniumSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/ui/UISeleniumSuite.scala @@ -17,8 +17,8 @@ package org.apache.spark.sql.execution.ui -import scala.collection.JavaConverters._ import scala.concurrent.duration.DurationInt +import scala.jdk.CollectionConverters._ import org.apache.commons.text.StringEscapeUtils.escapeJava import org.apache.commons.text.translate.EntityArrays._ diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/vectorized/ColumnarBatchSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/vectorized/ColumnarBatchSuite.scala index b34af3c8c633c..ec95257b0ee4c 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/vectorized/ColumnarBatchSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/vectorized/ColumnarBatchSuite.scala @@ -24,8 +24,8 @@ import java.sql.{Date, Timestamp} import java.time.LocalDateTime import java.util -import scala.collection.JavaConverters._ import scala.collection.mutable +import scala.jdk.CollectionConverters._ import scala.language.implicitConversions import scala.util.Random diff --git a/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala index 1b7a92b719915..7babb8587ad71 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala @@ -22,7 +22,7 @@ import java.sql.{Date, DriverManager, Timestamp} import java.time.{Instant, LocalDate, LocalDateTime} import java.util.{Calendar, GregorianCalendar, Properties, TimeZone} -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.util.Random import org.mockito.ArgumentMatchers._ diff --git a/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCWriteSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCWriteSuite.scala index 486255f1a3b19..b913a397eba83 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCWriteSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCWriteSuite.scala @@ -21,8 +21,8 @@ import java.sql.{Date, DriverManager, Timestamp} import java.time.{Instant, LocalDate} import java.util.Properties -import scala.collection.JavaConverters.propertiesAsScalaMapConverter import scala.collection.mutable.ArrayBuffer +import scala.jdk.CollectionConverters._ import org.scalatest.BeforeAndAfter diff --git a/sql/core/src/test/scala/org/apache/spark/sql/sources/ExternalCommandRunnerSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/sources/ExternalCommandRunnerSuite.scala index 55fb3eb8ade35..cbf63cd30545d 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/sources/ExternalCommandRunnerSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/sources/ExternalCommandRunnerSuite.scala @@ -17,7 +17,7 @@ package org.apache.spark.sql.sources -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.sql.{QueryTest, Row} import org.apache.spark.sql.connector.ExternalCommandRunner diff --git a/sql/core/src/test/scala/org/apache/spark/sql/streaming/FileStreamSinkSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/streaming/FileStreamSinkSuite.scala index 75f440caefc34..5b24703344773 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/streaming/FileStreamSinkSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/streaming/FileStreamSinkSuite.scala @@ -21,8 +21,8 @@ import java.io.{File, IOException} import java.nio.file.Files import java.util.Locale -import scala.collection.JavaConverters._ import scala.collection.mutable.ArrayBuffer +import scala.jdk.CollectionConverters._ import org.apache.hadoop.fs.{FileStatus, Path, RawLocalFileSystem} import org.apache.hadoop.mapreduce.JobContext diff --git a/sql/core/src/test/scala/org/apache/spark/sql/streaming/ReportSinkMetricsSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/streaming/ReportSinkMetricsSuite.scala index f8537d4edc588..c417693b5d7a6 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/streaming/ReportSinkMetricsSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/streaming/ReportSinkMetricsSuite.scala @@ -17,7 +17,7 @@ package org.apache.spark.sql.streaming -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.internal.Logging import org.apache.spark.sql._ diff --git a/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingQueryListenerSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingQueryListenerSuite.scala index 861e4e83ceff0..a0f385343ec23 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingQueryListenerSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingQueryListenerSuite.scala @@ -268,7 +268,7 @@ class StreamingQueryListenerSuite extends StreamTest with BeforeAndAfter { test("QueryProgressEvent serialization") { def testSerialization(event: QueryProgressEvent): Unit = { - import scala.collection.JavaConverters._ + import scala.jdk.CollectionConverters._ val json = JsonProtocol.sparkEventToJsonString(event) val newEvent = JsonProtocol.sparkEventFromJson(json).asInstanceOf[QueryProgressEvent] assert(newEvent.progress.json === event.progress.json) // json as a proxy for equality diff --git a/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingQueryStatusAndProgressSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingQueryStatusAndProgressSuite.scala index 28134ec9d9144..4fa49064faa73 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingQueryStatusAndProgressSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingQueryStatusAndProgressSuite.scala @@ -19,7 +19,7 @@ package org.apache.spark.sql.streaming import java.util.UUID -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.json4s.jackson.JsonMethods._ import org.scalatest.concurrent.Eventually diff --git a/sql/core/src/test/scala/org/apache/spark/sql/streaming/continuous/EpochCoordinatorSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/streaming/continuous/EpochCoordinatorSuite.scala index 0e1c9b9c4ba46..02c0dbb5ab98e 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/streaming/continuous/EpochCoordinatorSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/streaming/continuous/EpochCoordinatorSuite.scala @@ -292,7 +292,7 @@ class EpochCoordinatorSuite val exceptionCaptor = ArgumentCaptor.forClass(classOf[Throwable]); verify(query, atLeastOnce()).stopInNewThread(exceptionCaptor.capture()) - import scala.collection.JavaConverters._ + import scala.jdk.CollectionConverters._ val throwable = exceptionCaptor.getAllValues.asScala.find(_.getMessage === msg) assert(throwable != null, "Stream stopped with an exception but expected message is missing") } diff --git a/sql/core/src/test/scala/org/apache/spark/sql/test/DataFrameReaderWriterSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/test/DataFrameReaderWriterSuite.scala index 17348fe2dcbb5..66e07e6e9c1b9 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/test/DataFrameReaderWriterSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/test/DataFrameReaderWriterSuite.scala @@ -21,7 +21,7 @@ import java.io.File import java.util.{Locale, Random} import java.util.concurrent.ConcurrentLinkedQueue -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.hadoop.conf.Configuration import org.apache.hadoop.fs.Path diff --git a/sql/core/src/test/scala/org/apache/spark/sql/test/SQLTestUtils.scala b/sql/core/src/test/scala/org/apache/spark/sql/test/SQLTestUtils.scala index dd55fcfe42cac..fab5c49da8e41 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/test/SQLTestUtils.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/test/SQLTestUtils.scala @@ -22,8 +22,8 @@ import java.net.URI import java.nio.file.Files import java.util.{Locale, UUID} -import scala.collection.JavaConverters._ import scala.concurrent.duration._ +import scala.jdk.CollectionConverters._ import scala.language.implicitConversions import scala.util.control.NonFatal diff --git a/sql/core/src/test/scala/org/apache/spark/status/protobuf/sql/KVStoreProtobufSerializerSuite.scala b/sql/core/src/test/scala/org/apache/spark/status/protobuf/sql/KVStoreProtobufSerializerSuite.scala index 16f5897d2b89e..3f3a6925409cd 100644 --- a/sql/core/src/test/scala/org/apache/spark/status/protobuf/sql/KVStoreProtobufSerializerSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/status/protobuf/sql/KVStoreProtobufSerializerSuite.scala @@ -20,7 +20,7 @@ package org.apache.spark.status.protobuf.sql import java.lang.{Long => JLong} import java.util.UUID -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.SparkFunSuite import org.apache.spark.sql.Row diff --git a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/RowSetUtils.scala b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/RowSetUtils.scala index 9625021f392cb..94046adca0d8f 100644 --- a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/RowSetUtils.scala +++ b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/RowSetUtils.scala @@ -19,7 +19,7 @@ package org.apache.spark.sql.hive.thriftserver import java.nio.ByteBuffer -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.language.implicitConversions import org.apache.hive.service.rpc.thrift._ diff --git a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkExecuteStatementOperation.scala b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkExecuteStatementOperation.scala index a9b46739fa665..155861a94525d 100644 --- a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkExecuteStatementOperation.scala +++ b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkExecuteStatementOperation.scala @@ -21,7 +21,7 @@ import java.security.PrivilegedExceptionAction import java.util.{Collections, Map => JMap} import java.util.concurrent.{Executors, RejectedExecutionException, TimeUnit} -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.util.control.NonFatal import org.apache.hadoop.hive.shims.Utils diff --git a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkGetColumnsOperation.scala b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkGetColumnsOperation.scala index a455d50f12bc5..5dd8caf3f221d 100644 --- a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkGetColumnsOperation.scala +++ b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkGetColumnsOperation.scala @@ -19,7 +19,7 @@ package org.apache.spark.sql.hive.thriftserver import java.util.regex.Pattern -import scala.collection.JavaConverters.seqAsJavaListConverter +import scala.jdk.CollectionConverters._ import org.apache.hadoop.hive.ql.security.authorization.plugin.{HiveOperationType, HivePrivilegeObject} import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject.HivePrivilegeObjectType @@ -87,7 +87,7 @@ private[hive] class SparkGetColumnsOperation( }.toMap if (isAuthV2Enabled) { - val privObjs = seqAsJavaListConverter(getPrivObjs(db2Tabs)).asJava + val privObjs = getPrivObjs(db2Tabs).asJava authorizeMetaGets(HiveOperationType.GET_COLUMNS, privObjs, cmdStr) } diff --git a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkGetFunctionsOperation.scala b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkGetFunctionsOperation.scala index 352528e26e318..b35d506a43b24 100644 --- a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkGetFunctionsOperation.scala +++ b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkGetFunctionsOperation.scala @@ -19,7 +19,7 @@ package org.apache.spark.sql.hive.thriftserver import java.sql.DatabaseMetaData -import scala.collection.JavaConverters.seqAsJavaListConverter +import scala.jdk.CollectionConverters.seqAsJavaListConverter import org.apache.hadoop.hive.ql.security.authorization.plugin.{HiveOperationType, HivePrivilegeObjectUtils} import org.apache.hive.service.cli._ diff --git a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkGetTablesOperation.scala b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkGetTablesOperation.scala index bddf5eb82012f..06c402e0d096b 100644 --- a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkGetTablesOperation.scala +++ b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkGetTablesOperation.scala @@ -20,7 +20,7 @@ package org.apache.spark.sql.hive.thriftserver import java.util.{List => JList} import java.util.regex.Pattern -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.hadoop.hive.ql.security.authorization.plugin.{HiveOperationType, HivePrivilegeObjectUtils} import org.apache.hive.service.cli._ diff --git a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLCLIDriver.scala b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLCLIDriver.scala index 2302482bb7267..ceba74e94bf5e 100644 --- a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLCLIDriver.scala +++ b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLCLIDriver.scala @@ -22,7 +22,7 @@ import java.nio.charset.StandardCharsets.UTF_8 import java.util.{ArrayList => JArrayList, List => JList, Locale} import java.util.concurrent.TimeUnit -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import jline.console.ConsoleReader import jline.console.completer.{ArgumentCompleter, Completer, StringsCompleter} diff --git a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLCLIService.scala b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLCLIService.scala index 2522389b5aa1c..7262bc22dc429 100644 --- a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLCLIService.scala +++ b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLCLIService.scala @@ -21,7 +21,7 @@ import java.io.IOException import java.util.{List => JList} import javax.security.auth.login.LoginException -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.util.control.NonFatal import org.apache.hadoop.hive.conf.HiveConf diff --git a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLDriver.scala b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLDriver.scala index 8ae65a5860824..4834956f478d0 100644 --- a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLDriver.scala +++ b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLDriver.scala @@ -19,7 +19,7 @@ package org.apache.spark.sql.hive.thriftserver import java.util.{ArrayList => JArrayList, Arrays, List => JList} -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.commons.lang3.exception.ExceptionUtils import org.apache.hadoop.hive.metastore.api.{FieldSchema, Schema} diff --git a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/ui/HiveThriftServer2Listener.scala b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/ui/HiveThriftServer2Listener.scala index 5ccc72c7782a9..31f30f3d97ea4 100644 --- a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/ui/HiveThriftServer2Listener.scala +++ b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/ui/HiveThriftServer2Listener.scala @@ -19,8 +19,8 @@ package org.apache.spark.sql.hive.thriftserver.ui import java.util.concurrent.ConcurrentHashMap -import scala.collection.JavaConverters._ import scala.collection.mutable.ArrayBuffer +import scala.jdk.CollectionConverters._ import org.apache.hive.service.server.HiveServer2 diff --git a/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/CliSuite.scala b/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/CliSuite.scala index d3a9a9f08411c..4588cf39d1f29 100644 --- a/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/CliSuite.scala +++ b/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/CliSuite.scala @@ -23,10 +23,10 @@ import java.sql.Timestamp import java.util.Date import java.util.concurrent.CountDownLatch -import scala.collection.JavaConverters._ import scala.collection.mutable.ArrayBuffer import scala.concurrent.Promise import scala.concurrent.duration._ +import scala.jdk.CollectionConverters._ import org.apache.hadoop.hive.cli.CliSessionState import org.apache.hadoop.hive.conf.HiveConf.ConfVars diff --git a/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/HiveSessionImplSuite.scala b/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/HiveSessionImplSuite.scala index 7c42348f74453..6f89fbfb788bc 100644 --- a/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/HiveSessionImplSuite.scala +++ b/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/HiveSessionImplSuite.scala @@ -18,8 +18,8 @@ package org.apache.spark.sql.hive.thriftserver import java.lang.reflect.InvocationTargetException -import scala.collection.JavaConverters._ import scala.collection.mutable +import scala.jdk.CollectionConverters._ import org.apache.hadoop.hive.conf.HiveConf import org.apache.hive.service.cli.OperationHandle diff --git a/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/HiveThriftServer2Suites.scala b/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/HiveThriftServer2Suites.scala index b421a94a06c40..db72218366f7d 100644 --- a/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/HiveThriftServer2Suites.scala +++ b/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/HiveThriftServer2Suites.scala @@ -23,12 +23,12 @@ import java.nio.charset.StandardCharsets import java.sql.{Date, DriverManager, SQLException, Statement} import java.util.{Locale, UUID} -import scala.collection.JavaConverters._ import scala.collection.mutable import scala.collection.mutable.ArrayBuffer import scala.concurrent.{ExecutionContext, Future, Promise} import scala.concurrent.duration._ import scala.io.Source +import scala.jdk.CollectionConverters._ import scala.util.Try import com.google.common.io.Files diff --git a/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/SharedThriftServer.scala b/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/SharedThriftServer.scala index 104c5797d433a..b82b914e45b5b 100644 --- a/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/SharedThriftServer.scala +++ b/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/SharedThriftServer.scala @@ -21,8 +21,8 @@ import java.io.File import java.sql.{DriverManager, ResultSet, Statement} import java.util -import scala.collection.JavaConverters._ import scala.concurrent.duration._ +import scala.jdk.CollectionConverters._ import scala.util.Try import org.apache.hadoop.hive.conf.HiveConf.ConfVars diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveInspectors.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveInspectors.scala index 707532f2bad38..849f6f15189c6 100644 --- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveInspectors.scala +++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveInspectors.scala @@ -20,7 +20,7 @@ package org.apache.spark.sql.hive import java.lang.reflect.{ParameterizedType, Type, WildcardType} import java.time.Duration -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.hadoop.{io => hadoopIo} import org.apache.hadoop.hive.common.`type`.{HiveChar, HiveDecimal, HiveIntervalDayTime, HiveIntervalYearMonth, HiveVarchar} diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveShim.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveShim.scala index 0b9c55f6083a5..34683da55ccc9 100644 --- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveShim.scala +++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveShim.scala @@ -19,7 +19,7 @@ package org.apache.spark.sql.hive import java.rmi.server.UID -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import com.google.common.base.Objects import org.apache.avro.Schema diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveUtils.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveUtils.scala index a01246520f32b..4f1993b30aab0 100644 --- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveUtils.scala +++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveUtils.scala @@ -22,8 +22,8 @@ import java.net.URL import java.util.Locale import java.util.concurrent.TimeUnit -import scala.collection.JavaConverters._ import scala.collection.mutable.HashMap +import scala.jdk.CollectionConverters._ import scala.util.Try import org.apache.hadoop.conf.Configuration diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/TableReader.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/TableReader.scala index cd1d236dd36c9..bf54ebad8f71b 100644 --- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/TableReader.scala +++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/TableReader.scala @@ -19,7 +19,7 @@ package org.apache.spark.sql.hive import java.util.Properties -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.hadoop.conf.Configuration import org.apache.hadoop.fs.{Path, PathFilter} diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClientImpl.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClientImpl.scala index f3d7d7e66a537..6d76c4aca9433 100644 --- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClientImpl.scala +++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClientImpl.scala @@ -24,9 +24,9 @@ import java.nio.charset.StandardCharsets.UTF_8 import java.util.{HashMap => JHashMap, Locale, Map => JMap} import java.util.concurrent.TimeUnit._ -import scala.collection.JavaConverters._ import scala.collection.mutable import scala.collection.mutable.ArrayBuffer +import scala.jdk.CollectionConverters._ import org.apache.hadoop.conf.Configuration import org.apache.hadoop.fs.Path diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveShim.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveShim.scala index 338498d3d48a5..5320c6fc2e60f 100644 --- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveShim.scala +++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveShim.scala @@ -23,7 +23,7 @@ import java.net.URI import java.util.{ArrayList => JArrayList, List => JList, Locale, Map => JMap, Set => JSet} import java.util.concurrent.TimeUnit -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.util.control.NonFatal import org.apache.hadoop.fs.Path diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/HiveFileFormat.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/HiveFileFormat.scala index 29734c4de3441..fa21be0c65146 100644 --- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/HiveFileFormat.scala +++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/HiveFileFormat.scala @@ -17,7 +17,7 @@ package org.apache.spark.sql.hive.execution -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.hadoop.fs.{FileStatus, Path} import org.apache.hadoop.hive.ql.exec.Utilities diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/HiveOptions.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/HiveOptions.scala index 7b51618772edc..044a515fbdc9d 100644 --- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/HiveOptions.scala +++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/HiveOptions.scala @@ -19,7 +19,7 @@ package org.apache.spark.sql.hive.execution import java.util.Locale -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.hadoop.hive.ql.plan.TableDesc import org.apache.orc.OrcConf.COMPRESS diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/HiveScriptTransformationExec.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/HiveScriptTransformationExec.scala index beb5583d81a60..0fcc43e5c3919 100644 --- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/HiveScriptTransformationExec.scala +++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/HiveScriptTransformationExec.scala @@ -20,7 +20,7 @@ package org.apache.spark.sql.hive.execution import java.io._ import java.util.Properties -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.util.control.NonFatal import org.apache.hadoop.conf.Configuration diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/HiveTableScanExec.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/HiveTableScanExec.scala index 63e7d28c42ad9..d2d84e095110c 100644 --- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/HiveTableScanExec.scala +++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/HiveTableScanExec.scala @@ -17,7 +17,7 @@ package org.apache.spark.sql.hive.execution -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.hadoop.conf.Configuration import org.apache.hadoop.hive.ql.io.{DelegateSymlinkTextInputFormat, SymlinkTextInputFormat} diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/hiveUDFEvaluators.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/hiveUDFEvaluators.scala index 094f8ba7a0f89..2ebe724f399a7 100644 --- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/hiveUDFEvaluators.scala +++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/hiveUDFEvaluators.scala @@ -17,7 +17,7 @@ package org.apache.spark.sql.hive -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.hadoop.hive.ql.exec.{FunctionRegistry, UDF} import org.apache.hadoop.hive.ql.udf.{UDFType => HiveUDFType} diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/hiveUDFs.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/hiveUDFs.scala index 01684f52ab82b..9452209e3184e 100644 --- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/hiveUDFs.scala +++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/hiveUDFs.scala @@ -19,8 +19,8 @@ package org.apache.spark.sql.hive import java.nio.ByteBuffer -import scala.collection.JavaConverters._ import scala.collection.mutable.ArrayBuffer +import scala.jdk.CollectionConverters._ import org.apache.hadoop.hive.ql.exec._ import org.apache.hadoop.hive.ql.udf.generic._ diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/orc/OrcFileFormat.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/orc/OrcFileFormat.scala index a9314397dcf67..3cf6fcbc65ace 100644 --- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/orc/OrcFileFormat.scala +++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/orc/OrcFileFormat.scala @@ -19,7 +19,7 @@ package org.apache.spark.sql.hive.orc import java.util.Properties -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.util.control.NonFatal import com.esotericsoftware.kryo.Kryo diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/CompressionCodecSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/CompressionCodecSuite.scala index 398f4d2efbbf4..6669fbdfbdea1 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/CompressionCodecSuite.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/CompressionCodecSuite.scala @@ -20,7 +20,7 @@ package org.apache.spark.sql.hive import java.io.File import java.util.Locale -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.hadoop.fs.Path import org.apache.orc.OrcConf.COMPRESS diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveUserDefinedTypeSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveUserDefinedTypeSuite.scala index d0af8dc7ae49f..d058aa0f4be59 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveUserDefinedTypeSuite.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveUserDefinedTypeSuite.scala @@ -17,7 +17,7 @@ package org.apache.spark.sql.hive -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.hadoop.hive.ql.udf.generic.GenericUDF import org.apache.hadoop.hive.serde2.objectinspector.{ObjectInspector, StandardListObjectInspector} diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveUDAFSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveUDAFSuite.scala index cbf0c4f6de570..4aadd710b42a7 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveUDAFSuite.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveUDAFSuite.scala @@ -17,7 +17,7 @@ package org.apache.spark.sql.hive.execution -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.hadoop.hive.ql.udf.UDAFPercentile import org.apache.hadoop.hive.ql.udf.generic.{AbstractGenericUDAFResolver, GenericUDAFEvaluator, GenericUDAFMax} diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveUDFSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveUDFSuite.scala index d12ebae0f5fc7..1eff35c63a8a7 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveUDFSuite.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveUDFSuite.scala @@ -20,7 +20,7 @@ package org.apache.spark.sql.hive.execution import java.io.{DataInput, DataOutput, File, PrintWriter} import java.util.{ArrayList, Arrays, Properties} -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.hadoop.conf.Configuration import org.apache.hadoop.hive.ql.exec.UDF diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/PruningSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/PruningSuite.scala index 407e121df5780..04acf76255ad7 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/PruningSuite.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/PruningSuite.scala @@ -17,7 +17,7 @@ package org.apache.spark.sql.hive.execution -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.scalatest.BeforeAndAfter diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/test/TestHive.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/test/TestHive.scala index 9284b35fb3e35..a9861dafda723 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/test/TestHive.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/test/TestHive.scala @@ -20,8 +20,8 @@ package org.apache.spark.sql.hive.test import java.io.File import java.net.URI -import scala.collection.JavaConverters._ import scala.collection.mutable +import scala.jdk.CollectionConverters._ import org.apache.hadoop.conf.Configuration import org.apache.hadoop.fs.Path diff --git a/streaming/src/main/scala/org/apache/spark/streaming/api/java/JavaDStreamLike.scala b/streaming/src/main/scala/org/apache/spark/streaming/api/java/JavaDStreamLike.scala index 51141212f9ecb..bac5d50bb692e 100644 --- a/streaming/src/main/scala/org/apache/spark/streaming/api/java/JavaDStreamLike.scala +++ b/streaming/src/main/scala/org/apache/spark/streaming/api/java/JavaDStreamLike.scala @@ -20,7 +20,7 @@ package org.apache.spark.streaming.api.java import java.{lang => jl} import java.util.{List => JList} -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.language.implicitConversions import scala.reflect.ClassTag diff --git a/streaming/src/main/scala/org/apache/spark/streaming/api/java/JavaPairDStream.scala b/streaming/src/main/scala/org/apache/spark/streaming/api/java/JavaPairDStream.scala index 650d8c7f4d1a7..e5798825aad07 100644 --- a/streaming/src/main/scala/org/apache/spark/streaming/api/java/JavaPairDStream.scala +++ b/streaming/src/main/scala/org/apache/spark/streaming/api/java/JavaPairDStream.scala @@ -21,7 +21,7 @@ import java.{lang => jl} import java.lang.{Iterable => JIterable} import java.util.{List => JList} -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.language.implicitConversions import scala.reflect.ClassTag diff --git a/streaming/src/main/scala/org/apache/spark/streaming/api/java/JavaStreamingContext.scala b/streaming/src/main/scala/org/apache/spark/streaming/api/java/JavaStreamingContext.scala index be23478605fdb..1ce6495db1573 100644 --- a/streaming/src/main/scala/org/apache/spark/streaming/api/java/JavaStreamingContext.scala +++ b/streaming/src/main/scala/org/apache/spark/streaming/api/java/JavaStreamingContext.scala @@ -22,7 +22,7 @@ import java.lang.{Boolean => JBoolean} import java.util.{List => JList, Map => JMap} import scala.annotation.varargs -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.reflect.ClassTag import org.apache.hadoop.conf.Configuration diff --git a/streaming/src/main/scala/org/apache/spark/streaming/api/java/JavaStreamingListenerWrapper.scala b/streaming/src/main/scala/org/apache/spark/streaming/api/java/JavaStreamingListenerWrapper.scala index 7555e2f57fccb..af884914ad8f1 100644 --- a/streaming/src/main/scala/org/apache/spark/streaming/api/java/JavaStreamingListenerWrapper.scala +++ b/streaming/src/main/scala/org/apache/spark/streaming/api/java/JavaStreamingListenerWrapper.scala @@ -17,7 +17,7 @@ package org.apache.spark.streaming.api.java -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.streaming.scheduler._ diff --git a/streaming/src/main/scala/org/apache/spark/streaming/api/python/PythonDStream.scala b/streaming/src/main/scala/org/apache/spark/streaming/api/python/PythonDStream.scala index 7a8e3f1d2ccf4..8a936ba3ac739 100644 --- a/streaming/src/main/scala/org/apache/spark/streaming/api/python/PythonDStream.scala +++ b/streaming/src/main/scala/org/apache/spark/streaming/api/python/PythonDStream.scala @@ -21,7 +21,7 @@ import java.io.{ObjectInputStream, ObjectOutputStream} import java.lang.reflect.Proxy import java.util.{ArrayList => JArrayList, List => JList} -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.language.existentials import py4j.Py4JException diff --git a/streaming/src/main/scala/org/apache/spark/streaming/receiver/Receiver.scala b/streaming/src/main/scala/org/apache/spark/streaming/receiver/Receiver.scala index dde074c7e324b..b5230aceb6bba 100644 --- a/streaming/src/main/scala/org/apache/spark/streaming/receiver/Receiver.scala +++ b/streaming/src/main/scala/org/apache/spark/streaming/receiver/Receiver.scala @@ -19,8 +19,8 @@ package org.apache.spark.streaming.receiver import java.nio.ByteBuffer -import scala.collection.JavaConverters._ import scala.collection.mutable.ArrayBuffer +import scala.jdk.CollectionConverters._ import org.apache.spark.annotation.DeveloperApi import org.apache.spark.storage.StorageLevel diff --git a/streaming/src/main/scala/org/apache/spark/streaming/receiver/ReceiverSupervisorImpl.scala b/streaming/src/main/scala/org/apache/spark/streaming/receiver/ReceiverSupervisorImpl.scala index 948b5f6ecf1f9..daaf7ed7eb2b6 100644 --- a/streaming/src/main/scala/org/apache/spark/streaming/receiver/ReceiverSupervisorImpl.scala +++ b/streaming/src/main/scala/org/apache/spark/streaming/receiver/ReceiverSupervisorImpl.scala @@ -21,8 +21,8 @@ import java.nio.ByteBuffer import java.util.concurrent.ConcurrentLinkedQueue import java.util.concurrent.atomic.AtomicLong -import scala.collection.JavaConverters._ import scala.collection.mutable.ArrayBuffer +import scala.jdk.CollectionConverters._ import com.google.common.base.Throwables import org.apache.hadoop.conf.Configuration diff --git a/streaming/src/main/scala/org/apache/spark/streaming/scheduler/JobScheduler.scala b/streaming/src/main/scala/org/apache/spark/streaming/scheduler/JobScheduler.scala index a6d8dccd7e722..13d10ec28987e 100644 --- a/streaming/src/main/scala/org/apache/spark/streaming/scheduler/JobScheduler.scala +++ b/streaming/src/main/scala/org/apache/spark/streaming/scheduler/JobScheduler.scala @@ -19,7 +19,7 @@ package org.apache.spark.streaming.scheduler import java.util.concurrent.{ConcurrentHashMap, TimeUnit} -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.util.Failure import org.apache.spark.ExecutorAllocationClient diff --git a/streaming/src/main/scala/org/apache/spark/streaming/scheduler/ReceivedBlockTracker.scala b/streaming/src/main/scala/org/apache/spark/streaming/scheduler/ReceivedBlockTracker.scala index 4ac1c62822e7a..a14112e473823 100644 --- a/streaming/src/main/scala/org/apache/spark/streaming/scheduler/ReceivedBlockTracker.scala +++ b/streaming/src/main/scala/org/apache/spark/streaming/scheduler/ReceivedBlockTracker.scala @@ -19,8 +19,8 @@ package org.apache.spark.streaming.scheduler import java.nio.ByteBuffer -import scala.collection.JavaConverters._ import scala.collection.mutable +import scala.jdk.CollectionConverters._ import scala.util.control.NonFatal import org.apache.hadoop.conf.Configuration diff --git a/streaming/src/main/scala/org/apache/spark/streaming/ui/StreamingJobProgressListener.scala b/streaming/src/main/scala/org/apache/spark/streaming/ui/StreamingJobProgressListener.scala index da351ecf1889c..9abf018584ce2 100644 --- a/streaming/src/main/scala/org/apache/spark/streaming/ui/StreamingJobProgressListener.scala +++ b/streaming/src/main/scala/org/apache/spark/streaming/ui/StreamingJobProgressListener.scala @@ -20,8 +20,8 @@ package org.apache.spark.streaming.ui import java.util.{LinkedHashMap, Map => JMap, Properties} import java.util.concurrent.ConcurrentLinkedQueue -import scala.collection.JavaConverters._ import scala.collection.mutable.{HashMap, Queue} +import scala.jdk.CollectionConverters._ import org.apache.spark.scheduler._ import org.apache.spark.streaming.{StreamingConf, StreamingContext, Time} diff --git a/streaming/src/main/scala/org/apache/spark/streaming/util/BatchedWriteAheadLog.scala b/streaming/src/main/scala/org/apache/spark/streaming/util/BatchedWriteAheadLog.scala index d0a3517af70b9..a73cde1f99aa9 100644 --- a/streaming/src/main/scala/org/apache/spark/streaming/util/BatchedWriteAheadLog.scala +++ b/streaming/src/main/scala/org/apache/spark/streaming/util/BatchedWriteAheadLog.scala @@ -22,10 +22,10 @@ import java.util.{Iterator => JIterator} import java.util.concurrent.LinkedBlockingQueue import java.util.concurrent.atomic.AtomicBoolean -import scala.collection.JavaConverters._ import scala.collection.mutable.ArrayBuffer import scala.concurrent.Promise import scala.concurrent.duration._ +import scala.jdk.CollectionConverters._ import scala.util.control.NonFatal import org.apache.spark.SparkConf diff --git a/streaming/src/main/scala/org/apache/spark/streaming/util/FileBasedWriteAheadLog.scala b/streaming/src/main/scala/org/apache/spark/streaming/util/FileBasedWriteAheadLog.scala index d1f9dfb791355..87d3cb6faa734 100644 --- a/streaming/src/main/scala/org/apache/spark/streaming/util/FileBasedWriteAheadLog.scala +++ b/streaming/src/main/scala/org/apache/spark/streaming/util/FileBasedWriteAheadLog.scala @@ -21,11 +21,11 @@ import java.nio.ByteBuffer import java.util.{Iterator => JIterator} import java.util.concurrent.RejectedExecutionException -import scala.collection.JavaConverters._ import scala.collection.mutable.ArrayBuffer import scala.collection.parallel.ExecutionContextTaskSupport import scala.collection.parallel.immutable.ParVector import scala.concurrent.{Await, ExecutionContext, Future} +import scala.jdk.CollectionConverters._ import org.apache.hadoop.conf.Configuration import org.apache.hadoop.fs.Path diff --git a/streaming/src/test/scala/org/apache/spark/streaming/CheckpointSuite.scala b/streaming/src/test/scala/org/apache/spark/streaming/CheckpointSuite.scala index 238ef1e2367a0..6757cb74ec610 100644 --- a/streaming/src/test/scala/org/apache/spark/streaming/CheckpointSuite.scala +++ b/streaming/src/test/scala/org/apache/spark/streaming/CheckpointSuite.scala @@ -21,7 +21,7 @@ import java.io._ import java.nio.charset.StandardCharsets import java.util.concurrent.ConcurrentLinkedQueue -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.reflect.ClassTag import com.google.common.io.Files diff --git a/streaming/src/test/scala/org/apache/spark/streaming/InputStreamsSuite.scala b/streaming/src/test/scala/org/apache/spark/streaming/InputStreamsSuite.scala index 174c3ca379363..e00651fe91028 100644 --- a/streaming/src/test/scala/org/apache/spark/streaming/InputStreamsSuite.scala +++ b/streaming/src/test/scala/org/apache/spark/streaming/InputStreamsSuite.scala @@ -23,8 +23,8 @@ import java.nio.charset.StandardCharsets import java.util.concurrent._ import java.util.concurrent.atomic.AtomicInteger -import scala.collection.JavaConverters._ import scala.collection.mutable +import scala.jdk.CollectionConverters._ import com.google.common.io.Files import org.apache.commons.io.IOUtils diff --git a/streaming/src/test/scala/org/apache/spark/streaming/JavaTestUtils.scala b/streaming/src/test/scala/org/apache/spark/streaming/JavaTestUtils.scala index 42a5aaba5178f..6b84bbadbd000 100644 --- a/streaming/src/test/scala/org/apache/spark/streaming/JavaTestUtils.scala +++ b/streaming/src/test/scala/org/apache/spark/streaming/JavaTestUtils.scala @@ -19,7 +19,7 @@ package org.apache.spark.streaming import java.util.{List => JList} -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.reflect.ClassTag import org.apache.spark.api.java.JavaRDDLike diff --git a/streaming/src/test/scala/org/apache/spark/streaming/MapWithStateSuite.scala b/streaming/src/test/scala/org/apache/spark/streaming/MapWithStateSuite.scala index 3ffaa62bd75ac..09048c8f70b08 100644 --- a/streaming/src/test/scala/org/apache/spark/streaming/MapWithStateSuite.scala +++ b/streaming/src/test/scala/org/apache/spark/streaming/MapWithStateSuite.scala @@ -20,7 +20,7 @@ package org.apache.spark.streaming import java.io.File import java.util.concurrent.ConcurrentLinkedQueue -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.reflect.ClassTag import org.scalatest.PrivateMethodTester._ diff --git a/streaming/src/test/scala/org/apache/spark/streaming/MasterFailureTest.scala b/streaming/src/test/scala/org/apache/spark/streaming/MasterFailureTest.scala index 4b26212ec307a..c1af91ae63e76 100644 --- a/streaming/src/test/scala/org/apache/spark/streaming/MasterFailureTest.scala +++ b/streaming/src/test/scala/org/apache/spark/streaming/MasterFailureTest.scala @@ -22,8 +22,8 @@ import java.nio.charset.StandardCharsets import java.util.UUID import java.util.concurrent.TimeUnit -import scala.collection.JavaConverters._ import scala.collection.mutable.ArrayBuffer +import scala.jdk.CollectionConverters._ import scala.reflect.ClassTag import scala.util.Random diff --git a/streaming/src/test/scala/org/apache/spark/streaming/StreamingListenerSuite.scala b/streaming/src/test/scala/org/apache/spark/streaming/StreamingListenerSuite.scala index 08121a38dc5d5..2ec4b5b20fafe 100644 --- a/streaming/src/test/scala/org/apache/spark/streaming/StreamingListenerSuite.scala +++ b/streaming/src/test/scala/org/apache/spark/streaming/StreamingListenerSuite.scala @@ -19,12 +19,12 @@ package org.apache.spark.streaming import java.util.concurrent.ConcurrentLinkedQueue -import scala.collection.JavaConverters._ import scala.collection.mutable.HashMap // scalastyle:off executioncontextglobal import scala.concurrent.ExecutionContext.Implicits.global // scalastyle:on executioncontextglobal import scala.concurrent.Future +import scala.jdk.CollectionConverters._ import org.mockito.Mockito.{mock, reset, verifyNoMoreInteractions} import org.scalatest.concurrent.Eventually._ diff --git a/streaming/src/test/scala/org/apache/spark/streaming/TestSuiteBase.scala b/streaming/src/test/scala/org/apache/spark/streaming/TestSuiteBase.scala index 5836478937dd3..55e4a4692c982 100644 --- a/streaming/src/test/scala/org/apache/spark/streaming/TestSuiteBase.scala +++ b/streaming/src/test/scala/org/apache/spark/streaming/TestSuiteBase.scala @@ -20,7 +20,7 @@ package org.apache.spark.streaming import java.io.{IOException, ObjectInputStream} import java.util.concurrent.{ConcurrentLinkedQueue, TimeUnit} -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.reflect.ClassTag import org.scalatest.BeforeAndAfterEach diff --git a/streaming/src/test/scala/org/apache/spark/streaming/api/java/JavaStreamingListenerWrapperSuite.scala b/streaming/src/test/scala/org/apache/spark/streaming/api/java/JavaStreamingListenerWrapperSuite.scala index cfd4323531bdb..3124f150893c3 100644 --- a/streaming/src/test/scala/org/apache/spark/streaming/api/java/JavaStreamingListenerWrapperSuite.scala +++ b/streaming/src/test/scala/org/apache/spark/streaming/api/java/JavaStreamingListenerWrapperSuite.scala @@ -17,7 +17,7 @@ package org.apache.spark.streaming.api.java -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import org.apache.spark.SparkFunSuite import org.apache.spark.streaming.Time diff --git a/streaming/src/test/scala/org/apache/spark/streaming/receiver/BlockGeneratorSuite.scala b/streaming/src/test/scala/org/apache/spark/streaming/receiver/BlockGeneratorSuite.scala index 31456b0b95b18..a0b162a0e5706 100644 --- a/streaming/src/test/scala/org/apache/spark/streaming/receiver/BlockGeneratorSuite.scala +++ b/streaming/src/test/scala/org/apache/spark/streaming/receiver/BlockGeneratorSuite.scala @@ -19,8 +19,8 @@ package org.apache.spark.streaming.receiver import java.util.concurrent.ConcurrentLinkedQueue -import scala.collection.JavaConverters._ import scala.collection.mutable +import scala.jdk.CollectionConverters._ import org.scalatest.BeforeAndAfter import org.scalatest.concurrent.{Signaler, ThreadSignaler, TimeLimits} diff --git a/streaming/src/test/scala/org/apache/spark/streaming/util/RecurringTimerSuite.scala b/streaming/src/test/scala/org/apache/spark/streaming/util/RecurringTimerSuite.scala index a11dac4d41caa..b9b172219e364 100644 --- a/streaming/src/test/scala/org/apache/spark/streaming/util/RecurringTimerSuite.scala +++ b/streaming/src/test/scala/org/apache/spark/streaming/util/RecurringTimerSuite.scala @@ -19,8 +19,8 @@ package org.apache.spark.streaming.util import java.util.concurrent.ConcurrentLinkedQueue -import scala.collection.JavaConverters._ import scala.concurrent.duration._ +import scala.jdk.CollectionConverters._ import org.scalatest.PrivateMethodTester import org.scalatest.concurrent.Eventually._ diff --git a/streaming/src/test/scala/org/apache/spark/streaming/util/WriteAheadLogSuite.scala b/streaming/src/test/scala/org/apache/spark/streaming/util/WriteAheadLogSuite.scala index 4d23230e2ea4e..97f37cb1f03da 100644 --- a/streaming/src/test/scala/org/apache/spark/streaming/util/WriteAheadLogSuite.scala +++ b/streaming/src/test/scala/org/apache/spark/streaming/util/WriteAheadLogSuite.scala @@ -22,10 +22,10 @@ import java.util.{Iterator => JIterator} import java.util.concurrent.{CountDownLatch, RejectedExecutionException, ThreadPoolExecutor, TimeUnit} import java.util.concurrent.atomic.AtomicInteger -import scala.collection.JavaConverters._ import scala.collection.mutable.ArrayBuffer import scala.concurrent._ import scala.concurrent.duration._ +import scala.jdk.CollectionConverters._ import scala.language.implicitConversions import org.apache.commons.lang3.{JavaVersion, SystemUtils}