Skip to content

Commit 9ddd515

Browse files
committed
Move fromDDL to StructType
1 parent 842ca77 commit 9ddd515

File tree

4 files changed

+8
-9
lines changed

4 files changed

+8
-9
lines changed

sql/catalyst/src/main/scala/org/apache/spark/sql/types/DataType.scala

Lines changed: 0 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,6 @@ import org.json4s.jackson.JsonMethods._
2424

2525
import org.apache.spark.annotation.InterfaceStability
2626
import org.apache.spark.sql.catalyst.expressions.Expression
27-
import org.apache.spark.sql.catalyst.parser.CatalystSqlParser
2827
import org.apache.spark.util.Utils
2928

3029
/**
@@ -104,12 +103,6 @@ object DataType {
104103

105104
def fromJson(json: String): DataType = parseDataType(parse(json))
106105

107-
/**
108-
* Creates StructType for a given DDL-formatted string, which is a comma separated list of field
109-
* definitions, e.g., a INT, b STRING.
110-
*/
111-
def fromDDL(ddl: String): StructType = CatalystSqlParser.parseTableSchema(ddl)
112-
113106
private val nonDecimalNameToType = {
114107
Seq(NullType, DateType, TimestampType, BinaryType, IntegerType, BooleanType, LongType,
115108
DoubleType, FloatType, ShortType, ByteType, StringType, CalendarIntervalType)

sql/catalyst/src/main/scala/org/apache/spark/sql/types/StructType.scala

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -417,6 +417,12 @@ object StructType extends AbstractDataType {
417417
}
418418
}
419419

420+
/**
421+
* Creates StructType for a given DDL-formatted string, which is a comma separated list of field
422+
* definitions, e.g., a INT, b STRING.
423+
*/
424+
def fromDDL(ddl: String): StructType = CatalystSqlParser.parseTableSchema(ddl)
425+
420426
def apply(fields: Seq[StructField]): StructType = StructType(fields.toArray)
421427

422428
def apply(fields: java.util.List[StructField]): StructType = {

sql/catalyst/src/test/scala/org/apache/spark/sql/types/DataTypeSuite.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -177,7 +177,7 @@ class DataTypeSuite extends SparkFunSuite {
177177

178178
def checkDataTypeFromDDL(dataType: DataType, ignoreNullability: Boolean = false): Unit = {
179179
test(s"from DDL - $dataType") {
180-
val parsed = DataType.fromDDL(s"a ${dataType.sql}")
180+
val parsed = StructType.fromDDL(s"a ${dataType.sql}")
181181
val expected = new StructType().add("a", dataType)
182182
if (!ignoreNullability) {
183183
assert(parsed === expected)

sql/core/src/main/scala/org/apache/spark/sql/functions.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3067,7 +3067,7 @@ object functions {
30673067
val dataType = try {
30683068
DataType.fromJson(schema)
30693069
} catch {
3070-
case NonFatal(_) => DataType.fromDDL(schema)
3070+
case NonFatal(_) => StructType.fromDDL(schema)
30713071
}
30723072
from_json(e, dataType, options)
30733073
}

0 commit comments

Comments
 (0)