Skip to content

Commit 45d40d9

Browse files
petermaxleecloud-fan
authored andcommitted
[SPARK-17150][SQL] Support SQL generation for inline tables
## What changes were proposed in this pull request? This patch adds support for SQL generation for inline tables. With this, it would be possible to create a view that depends on inline tables. ## How was this patch tested? Added a test case in LogicalPlanToSQLSuite. Author: petermaxlee <[email protected]> Closes apache#14709 from petermaxlee/SPARK-17150.
1 parent ba1737c commit 45d40d9

File tree

4 files changed

+30
-2
lines changed

4 files changed

+30
-2
lines changed

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/LocalRelation.scala

Lines changed: 15 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -18,8 +18,9 @@
1818
package org.apache.spark.sql.catalyst.plans.logical
1919

2020
import org.apache.spark.sql.Row
21-
import org.apache.spark.sql.catalyst.{analysis, CatalystTypeConverters, InternalRow}
22-
import org.apache.spark.sql.catalyst.expressions.Attribute
21+
import org.apache.spark.sql.catalyst.{CatalystTypeConverters, InternalRow}
22+
import org.apache.spark.sql.catalyst.analysis
23+
import org.apache.spark.sql.catalyst.expressions.{Attribute, Literal}
2324
import org.apache.spark.sql.types.{StructField, StructType}
2425

2526
object LocalRelation {
@@ -75,4 +76,16 @@ case class LocalRelation(output: Seq[Attribute], data: Seq[InternalRow] = Nil)
7576

7677
override lazy val statistics =
7778
Statistics(sizeInBytes = output.map(_.dataType.defaultSize).sum * data.length)
79+
80+
def toSQL(inlineTableName: String): String = {
81+
require(data.nonEmpty)
82+
val types = output.map(_.dataType)
83+
val rows = data.map { row =>
84+
val cells = row.toSeq(types).zip(types).map { case (v, tpe) => Literal(v, tpe).sql }
85+
cells.mkString("(", ", ", ")")
86+
}
87+
"VALUES " + rows.mkString(", ") +
88+
" AS " + inlineTableName +
89+
output.map(_.name).mkString("(", ", ", ")")
90+
}
7891
}

sql/core/src/main/scala/org/apache/spark/sql/catalyst/SQLBuilder.scala

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -205,6 +205,9 @@ class SQLBuilder private (
205205
case p: ScriptTransformation =>
206206
scriptTransformationToSQL(p)
207207

208+
case p: LocalRelation =>
209+
p.toSQL(newSubqueryName())
210+
208211
case OneRowRelation =>
209212
""
210213

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,4 @@
1+
-- This file is automatically generated by LogicalPlanToSQLSuite.
2+
select * from values ("one", 1), ("two", 2), ("three", null) as data(a, b) where b > 1
3+
--------------------------------------------------------------------------------
4+
SELECT `gen_attr_0` AS `a`, `gen_attr_1` AS `b` FROM (SELECT `gen_attr_0`, `gen_attr_1` FROM (VALUES ("one", 1), ("two", 2), ("three", CAST(NULL AS INT)) AS gen_subquery_0(gen_attr_0, gen_attr_1)) AS data WHERE (`gen_attr_1` > 1)) AS data

sql/hive/src/test/scala/org/apache/spark/sql/catalyst/LogicalPlanToSQLSuite.scala

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1102,4 +1102,12 @@ class LogicalPlanToSQLSuite extends SQLBuilderTest with SQLTestUtils {
11021102
checkSQL("select * from orc_t", "select_orc_table")
11031103
}
11041104
}
1105+
1106+
test("inline tables") {
1107+
checkSQL(
1108+
"""
1109+
|select * from values ("one", 1), ("two", 2), ("three", null) as data(a, b) where b > 1
1110+
""".stripMargin,
1111+
"inline_tables")
1112+
}
11051113
}

0 commit comments

Comments
 (0)