Skip to content

Commit

Permalink
Clean-up FileWrite
Browse files Browse the repository at this point in the history
  • Loading branch information
Steve Vaughan Jr committed Apr 2, 2024
1 parent 0ba1ea7 commit 420b107
Showing 1 changed file with 1 addition and 4 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@ package org.apache.spark.sql.execution.datasources.v2

import java.util.UUID

import scala.collection.mutable
import scala.jdk.CollectionConverters._

import org.apache.hadoop.conf.Configuration
Expand All @@ -33,7 +32,7 @@ import org.apache.spark.sql.catalyst.types.DataTypeUtils.toAttributes
import org.apache.spark.sql.catalyst.util.{CaseInsensitiveMap, DateTimeUtils}
import org.apache.spark.sql.connector.write.{BatchWrite, LogicalWriteInfo, Write}
import org.apache.spark.sql.errors.QueryCompilationErrors
import org.apache.spark.sql.execution.datasources.{BasicWriteJobStatsTracker, DataSource, OutputWriterFactory, PartitionTaskStats, WriteJobDescription}
import org.apache.spark.sql.execution.datasources.{BasicWriteJobStatsTracker, DataSource, OutputWriterFactory, WriteJobDescription}
import org.apache.spark.sql.execution.metric.SQLMetric
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.types.{DataType, StructType}
Expand Down Expand Up @@ -125,8 +124,6 @@ trait FileWrite extends Write {
prepareWrite(sparkSession.sessionState.conf, job, caseInsensitiveOptions, schema)
val allColumns = toAttributes(schema)
val metrics: Map[String, SQLMetric] = BasicWriteJobStatsTracker.metrics
val partitionMetrics: mutable.Map[String, PartitionTaskStats]
= BasicWriteJobStatsTracker.partitionMetrics
val serializableHadoopConf = new SerializableConfiguration(hadoopConf)
val statsTracker = new BasicWriteJobStatsTracker(serializableHadoopConf, metrics)
// TODO: after partitioning is supported in V2:
Expand Down

0 comments on commit 420b107

Please sign in to comment.