Skip to content

Commit 7cf6a6f

Browse files
committed
[SPARK-31257][SPARK-33561][SQL][FOLLOWUP] Fix Scala 2.13 compilation
### What changes were proposed in this pull request? This PR is a follow-up to fix Scala 2.13 compilation. ### Why are the changes needed? To support Scala 2.13 in Apache Spark 3.1. ### Does this PR introduce _any_ user-facing change? No. ### How was this patch tested? Pass the GitHub Action Scala 2.13 compilation job. Closes #30502 from dongjoon-hyun/SPARK-31257. Authored-by: Dongjoon Hyun <[email protected]> Signed-off-by: Dongjoon Hyun <[email protected]>
1 parent 9643eab commit 7cf6a6f

File tree

3 files changed

+4
-3
lines changed

3 files changed

+4
-3
lines changed

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2946,7 +2946,8 @@ class AstBuilder extends SqlBaseBaseVisitor[AnyRef] with SQLConfHelper with Logg
29462946
val location = visitLocationSpecList(ctx.locationSpec())
29472947
val (cleanedOptions, newLocation) = cleanTableOptions(ctx, options, location)
29482948
val comment = visitCommentSpecList(ctx.commentSpec())
2949-
val serdeInfo = getSerdeInfo(ctx.rowFormat.asScala, ctx.createFileFormat.asScala, ctx)
2949+
val serdeInfo =
2950+
getSerdeInfo(ctx.rowFormat.asScala.toSeq, ctx.createFileFormat.asScala.toSeq, ctx)
29502951
(partTransforms, partCols, bucketSpec, cleanedProperties, cleanedOptions, newLocation, comment,
29512952
serdeInfo)
29522953
}

sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -440,7 +440,7 @@ class SparkSqlAstBuilder extends AstBuilder {
440440
val location = visitLocationSpecList(ctx.locationSpec())
441441
// TODO: Do not skip serde check for CREATE TABLE LIKE.
442442
val serdeInfo = getSerdeInfo(
443-
ctx.rowFormat.asScala, ctx.createFileFormat.asScala, ctx, skipCheck = true)
443+
ctx.rowFormat.asScala.toSeq, ctx.createFileFormat.asScala.toSeq, ctx, skipCheck = true)
444444
if (provider.isDefined && serdeInfo.isDefined) {
445445
operationNotAllowed(s"CREATE TABLE LIKE ... USING ... ${serdeInfo.get.describe}", ctx)
446446
}

sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/V2SessionCatalog.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -114,7 +114,7 @@ class V2SessionCatalog(catalog: SessionCatalog)
114114
private def toOptions(properties: Map[String, String]): Map[String, String] = {
115115
properties.filterKeys(_.startsWith(TableCatalog.OPTION_PREFIX)).map {
116116
case (key, value) => key.drop(TableCatalog.OPTION_PREFIX.length) -> value
117-
}
117+
}.toMap
118118
}
119119

120120
override def alterTable(

0 commit comments

Comments
 (0)