Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
The table of contents is too big for display.
Diff view
Diff view
  •  
  •  
  •  
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,8 @@ package org.apache.spark

import java.net.URL

import scala.collection.JavaConverters._
import scala.collection.immutable.Map
import scala.jdk.CollectionConverters._

import com.fasterxml.jackson.annotation.JsonIgnore
import com.fasterxml.jackson.core.`type`.TypeReference
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ import java.sql.{SQLException, SQLFeatureNotSupportedException}
import java.time.DateTimeException
import java.util.ConcurrentModificationException

import scala.collection.JavaConverters._
import scala.jdk.CollectionConverters._

class SparkException(
message: String,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@

package org.apache.spark

import scala.collection.JavaConverters._
import scala.jdk.CollectionConverters._

import org.apache.spark.util.JsonUtils.toJsonString
import org.apache.spark.util.SparkClassUtils
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@

package org.apache.spark.internal

import scala.collection.JavaConverters._
import scala.jdk.CollectionConverters._

import org.apache.logging.log4j.{Level, LogManager}
import org.apache.logging.log4j.core.{Filter, LifeCycle, LogEvent, Logger => Log4jLogger, LoggerContext}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ package org.apache.spark.sql.avro
import java.math.BigDecimal
import java.nio.ByteBuffer

import scala.collection.JavaConverters._
import scala.jdk.CollectionConverters._

import org.apache.avro.{LogicalTypes, Schema, SchemaBuilder}
import org.apache.avro.Conversions.DecimalConversion
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ package org.apache.spark.sql.avro

import java.io.{IOException, OutputStream}

import scala.collection.JavaConverters._
import scala.jdk.CollectionConverters._

import org.apache.avro.Schema
import org.apache.avro.generic.GenericRecord
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ package org.apache.spark.sql.avro

import java.nio.ByteBuffer

import scala.collection.JavaConverters._
import scala.jdk.CollectionConverters._

import org.apache.avro.Conversions.DecimalConversion
import org.apache.avro.LogicalTypes
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ package org.apache.spark.sql.avro
import java.io.{FileNotFoundException, IOException}
import java.util.Locale

import scala.collection.JavaConverters._
import scala.jdk.CollectionConverters._

import org.apache.avro.Schema
import org.apache.avro.file.{DataFileReader, FileReader}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,8 @@ package org.apache.spark.sql.avro

import java.util.Locale

import scala.collection.JavaConverters._
import scala.collection.mutable
import scala.jdk.CollectionConverters._

import org.apache.avro.{LogicalTypes, Schema, SchemaBuilder}
import org.apache.avro.LogicalTypes.{Date, Decimal, LocalTimestampMicros, LocalTimestampMillis, TimestampMicros, TimestampMillis}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@

package org.apache.spark.sql.avro

import scala.collection.JavaConverters._
import scala.jdk.CollectionConverters._

import org.apache.spark.annotation.Experimental
import org.apache.spark.sql.Column
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
*/
package org.apache.spark.sql.v2.avro

import scala.collection.JavaConverters._
import scala.jdk.CollectionConverters._

import org.apache.hadoop.fs.Path

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
*/
package org.apache.spark.sql.v2.avro

import scala.collection.JavaConverters._
import scala.jdk.CollectionConverters._

import org.apache.hadoop.fs.FileStatus

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ package org.apache.spark.sql.avro

import java.io.ByteArrayOutputStream

import scala.collection.JavaConverters._
import scala.jdk.CollectionConverters._

import org.apache.avro.{Schema, SchemaBuilder}
import org.apache.avro.generic.{GenericDatumWriter, GenericRecord, GenericRecordBuilder}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ import java.nio.file.{Files, Paths, StandardCopyOption}
import java.sql.{Date, Timestamp}
import java.util.UUID

import scala.collection.JavaConverters._
import scala.jdk.CollectionConverters._

import org.apache.avro.{AvroTypeException, Schema, SchemaBuilder}
import org.apache.avro.Schema.{Field, Type}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
*/
package org.apache.spark.sql

import scala.collection.JavaConverters._
import scala.jdk.CollectionConverters._

import org.apache.spark.annotation.DeveloperApi
import org.apache.spark.connect.proto
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ package org.apache.spark.sql

import java.util.Locale

import scala.collection.JavaConverters._
import scala.jdk.CollectionConverters._

import org.apache.spark.connect.proto.{NAReplace, Relation}
import org.apache.spark.connect.proto.Expression.{Literal => GLiteral}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ package org.apache.spark.sql

import java.util.Properties

import scala.collection.JavaConverters._
import scala.jdk.CollectionConverters._

import org.apache.spark.annotation.Stable
import org.apache.spark.connect.proto.Parse.ParseFormat
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ package org.apache.spark.sql
import java.{lang => jl, util => ju}
import java.io.ByteArrayInputStream

import scala.collection.JavaConverters._
import scala.jdk.CollectionConverters._

import org.apache.spark.SparkException
import org.apache.spark.connect.proto.{Relation, StatSampleBy}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ package org.apache.spark.sql

import java.util.{Locale, Properties}

import scala.collection.JavaConverters._
import scala.jdk.CollectionConverters._

import org.apache.spark.annotation.Stable
import org.apache.spark.connect.proto
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,8 @@

package org.apache.spark.sql

import scala.collection.JavaConverters._
import scala.collection.mutable
import scala.jdk.CollectionConverters._

import org.apache.spark.annotation.Experimental
import org.apache.spark.connect.proto
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,8 +18,8 @@ package org.apache.spark.sql

import java.util.{Collections, Locale}

import scala.collection.JavaConverters._
import scala.collection.mutable
import scala.jdk.CollectionConverters._
import scala.reflect.ClassTag
import scala.reflect.runtime.universe.TypeTag
import scala.util.control.NonFatal
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ package org.apache.spark.sql

import java.util.Arrays

import scala.collection.JavaConverters._
import scala.jdk.CollectionConverters._
import scala.language.existentials

import org.apache.spark.api.java.function._
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ package org.apache.spark.sql

import java.util.Locale

import scala.collection.JavaConverters._
import scala.jdk.CollectionConverters._

import org.apache.spark.connect.proto

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ import java.net.URI
import java.util.concurrent.TimeUnit._
import java.util.concurrent.atomic.{AtomicLong, AtomicReference}

import scala.collection.JavaConverters._
import scala.jdk.CollectionConverters._
import scala.reflect.runtime.universe.TypeTag

import com.google.common.cache.{CacheBuilder, CacheLoader}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@

package org.apache.spark.sql.avro

import scala.collection.JavaConverters._
import scala.jdk.CollectionConverters._

import org.apache.spark.annotation.Experimental
import org.apache.spark.sql.Column
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@

package org.apache.spark.sql.catalog

import scala.collection.JavaConverters._
import scala.jdk.CollectionConverters._

import org.apache.spark.sql.{AnalysisException, DataFrame, Dataset}
import org.apache.spark.sql.types.StructType
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
*/
package org.apache.spark.sql.expressions

import scala.collection.JavaConverters._
import scala.jdk.CollectionConverters._
import scala.reflect.runtime.universe.TypeTag
import scala.util.control.NonFatal

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@

package org.apache.spark.sql.expressions

import scala.collection.JavaConverters._
import scala.jdk.CollectionConverters._

import org.apache.spark.annotation.Stable
import org.apache.spark.connect.proto
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ package org.apache.spark.sql

import java.util.Collections

import scala.collection.JavaConverters._
import scala.jdk.CollectionConverters._
import scala.reflect.runtime.universe.{typeTag, TypeTag}

import org.apache.spark.connect.proto
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ import java.io.FileNotFoundException
import java.nio.file.{Files, NoSuchFileException, Paths}
import java.util.Collections

import scala.collection.JavaConverters._
import scala.jdk.CollectionConverters._
import scala.util.control.NonFatal

import org.apache.spark.annotation.Experimental
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@

package org.apache.spark.sql.streaming

import scala.collection.JavaConverters._
import scala.jdk.CollectionConverters._

import org.apache.spark.annotation.Evolving
import org.apache.spark.connect.proto.Read.DataSource
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ package org.apache.spark.sql.streaming
import java.util.Locale
import java.util.concurrent.TimeoutException

import scala.collection.JavaConverters._
import scala.jdk.CollectionConverters._

import com.google.protobuf.ByteString

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ package org.apache.spark.sql.streaming
import java.util.UUID
import java.util.concurrent.TimeoutException

import scala.collection.JavaConverters._
import scala.jdk.CollectionConverters._

import org.apache.spark.annotation.Evolving
import org.apache.spark.connect.proto.Command
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ package org.apache.spark.sql.streaming
import java.util.UUID
import java.util.concurrent.{ConcurrentHashMap, ConcurrentMap}

import scala.collection.JavaConverters._
import scala.jdk.CollectionConverters._

import com.google.protobuf.ByteString

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ import java.{util => ju}
import java.lang.{Long => JLong}
import java.util.UUID

import scala.collection.JavaConverters._
import scala.jdk.CollectionConverters._
import scala.util.control.NonFatal

import com.fasterxml.jackson.databind.{DeserializationFeature, ObjectMapper}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,8 +21,8 @@ import java.nio.file.Files
import java.time.DateTimeException
import java.util.Properties

import scala.collection.JavaConverters._
import scala.collection.mutable
import scala.jdk.CollectionConverters._

import org.apache.commons.io.FileUtils
import org.apache.commons.io.output.TeeOutputStream
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ package org.apache.spark.sql

import java.io.ByteArrayOutputStream

import scala.collection.JavaConverters._
import scala.jdk.CollectionConverters._

import org.apache.spark.sql.{functions => fn}
import org.apache.spark.sql.test.ConnectFunSuite
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@

package org.apache.spark.sql

import scala.collection.JavaConverters._
import scala.jdk.CollectionConverters._

import org.apache.spark.sql.internal.SqlApiConf
import org.apache.spark.sql.test.{QueryTest, SQLHelper}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ package org.apache.spark.sql

import java.util.Collections

import scala.collection.JavaConverters._
import scala.jdk.CollectionConverters._

import org.apache.spark.sql.avro.{functions => avroFn}
import org.apache.spark.sql.functions._
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,8 @@ import java.nio.file.{Files, Path}
import java.util.{Collections, Properties}
import java.util.concurrent.atomic.AtomicLong

import scala.collection.JavaConverters._
import scala.collection.mutable
import scala.jdk.CollectionConverters._
import scala.util.{Failure, Success, Try}

import com.google.protobuf.util.JsonFormat
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ import java.util.{Iterator => JIterator}
import java.util.Arrays
import java.util.concurrent.atomic.AtomicLong

import scala.collection.JavaConverters._
import scala.jdk.CollectionConverters._

import org.apache.spark.api.java.function._
import org.apache.spark.sql.api.java.UDF2
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ import java.io.InputStream
import java.nio.file.{Files, Path, Paths}
import java.util.concurrent.TimeUnit

import scala.collection.JavaConverters._
import scala.jdk.CollectionConverters._

import com.google.protobuf.ByteString
import io.grpc.{ManagedChannel, Server}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,8 @@ package org.apache.spark.sql.connect.client
import java.util.UUID
import java.util.concurrent.TimeUnit

import scala.collection.JavaConverters._
import scala.collection.mutable
import scala.jdk.CollectionConverters._

import io.grpc.{CallOptions, Channel, ClientCall, ClientInterceptor, MethodDescriptor, Server, Status, StatusRuntimeException}
import io.grpc.netty.NettyServerBuilder
Expand Down
Loading