Skip to content

Commit 3ba2515

Browse files
committed
Adress comments
1 parent 98287e4 commit 3ba2515

File tree

5 files changed

+28
-18
lines changed

5 files changed

+28
-18
lines changed

sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/ExpressionInfo.java

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -105,9 +105,8 @@ public ExpressionInfo(
105105
String deprecated) {
106106
assert name != null;
107107
assert arguments != null;
108-
assert arguments.isEmpty() || arguments.startsWith("\n Arguments:\n");
109108
assert examples != null;
110-
assert examples.isEmpty() || examples.startsWith("\n Examples:\n");
109+
assert examples.isEmpty() || examples.contains(" Examples:");
111110
assert note != null;
112111
assert group != null;
113112
assert since != null;
@@ -143,9 +142,9 @@ public ExpressionInfo(
143142
"got [" + group + "].");
144143
}
145144
if (!since.isEmpty()) {
146-
if (!since.matches("[0-9]+\\.[0-9]+\\.[0-9]+")) {
145+
if (Integer.parseInt(since.split("\\.")[0]) < 0) {
147146
throw new IllegalArgumentException("'since' is malformed in the expression [" +
148-
this.name + "]. It should follow the MAJOR.MINOR.PATCH pattern; however, " +
147+
this.name + "]. It should not start with a negative number; however, " +
149148
"got [" + since + "].");
150149
}
151150
this.extended += "\n Since: " + since + "\n";

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -663,10 +663,10 @@ object FunctionRegistry {
663663
val clazz = scala.reflect.classTag[Cast].runtimeClass
664664
val usage = "_FUNC_(expr) - Casts the value `expr` to the target data type `_FUNC_`."
665665
val examples = s"""
666-
Examples:
667-
> SELECT _FUNC_($exampleIn);
668-
$exampleOut
669-
"""
666+
| Examples:
667+
| > SELECT _FUNC_($exampleIn);
668+
| $exampleOut
669+
| """.stripMargin
670670
val expressionInfo = new ExpressionInfo(clazz.getCanonicalName, null, name, usage, "",
671671
examples, "", "", "2.0.1", "")
672672
(name, (expressionInfo, builder))

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/complexTypeCreator.scala

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -350,10 +350,10 @@ object CreateStruct {
350350
"_FUNC_(col1, col2, col3, ...) - Creates a struct with the given field values.",
351351
"",
352352
"""
353-
Examples:
354-
> SELECT _FUNC_(1, 2, 3);
355-
{"col1":1,"col2":2,"col3":3}
356-
""",
353+
| Examples:
354+
| > SELECT _FUNC_(1, 2, 3);
355+
| {"col1":1,"col2":2,"col3":3}
356+
| """.stripMargin,
357357
"",
358358
"",
359359
"1.4.0",

sql/core/src/test/resources/sql-functions/sql-expression-schema.md

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,8 @@
11
<!-- Automatically generated by ExpressionsSchemaSuite -->
22
## Summary
33
- Number of queries: 339
4-
- Number of expressions that missing example: 0
5-
- Expressions missing examples:
4+
- Number of expressions that missing example: 1
5+
- Expressions missing examples: window
66
## Schema of Built-in Functions
77
| Class name | Function name or alias | Query example | Output schema |
88
| ---------- | ---------------------- | ------------- | ------------- |
@@ -272,7 +272,7 @@
272272
| org.apache.spark.sql.catalyst.expressions.Subtract | - | SELECT 2 - 1 | struct<(2 - 1):int> |
273273
| org.apache.spark.sql.catalyst.expressions.Tan | tan | SELECT tan(0) | struct<TAN(CAST(0 AS DOUBLE)):double> |
274274
| org.apache.spark.sql.catalyst.expressions.Tanh | tanh | SELECT tanh(0) | struct<TANH(CAST(0 AS DOUBLE)):double> |
275-
| org.apache.spark.sql.catalyst.expressions.TimeWindow | window | SELECT window(timestamp('1970-01-01 12:00'), '10 minute', '5 minute') | struct<window:struct<start:timestamp,end:timestamp>> |
275+
| org.apache.spark.sql.catalyst.expressions.TimeWindow | window | N/A | N/A |
276276
| org.apache.spark.sql.catalyst.expressions.ToDegrees | degrees | SELECT degrees(3.141592653589793) | struct<DEGREES(CAST(3.141592653589793 AS DOUBLE)):double> |
277277
| org.apache.spark.sql.catalyst.expressions.ToRadians | radians | SELECT radians(180) | struct<RADIANS(CAST(180 AS DOUBLE)):double> |
278278
| org.apache.spark.sql.catalyst.expressions.ToUTCTimestamp | to_utc_timestamp | SELECT to_utc_timestamp('2016-08-31', 'Asia/Seoul') | struct<to_utc_timestamp(CAST(2016-08-31 AS TIMESTAMP), Asia/Seoul):timestamp> |

sql/core/src/test/scala/org/apache/spark/sql/expressions/ExpressionInfoSuite.scala

Lines changed: 14 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -115,8 +115,14 @@ class ExpressionInfoSuite extends SparkFunSuite with SharedSparkSession {
115115
if (!ignoreSet.contains(info.getClassName)) {
116116
withClue(s"Function '${info.getName}', Expression class '${info.getClassName}'") {
117117
assert(info.getUsage.nonEmpty)
118-
assert(info.getExamples.nonEmpty)
119-
assert(info.getSince.nonEmpty)
118+
assert(info.getExamples.startsWith("\n Examples:\n"))
119+
assert(info.getExamples.endsWith("\n "))
120+
assert(info.getSince.matches("[0-9]+\\.[0-9]+\\.[0-9]+"))
121+
122+
if (info.getArguments.nonEmpty) {
123+
assert(info.getArguments.startsWith("\n Arguments:\n"))
124+
assert(info.getArguments.endsWith("\n "))
125+
}
120126
}
121127
}
122128
}
@@ -148,7 +154,12 @@ class ExpressionInfoSuite extends SparkFunSuite with SharedSparkSession {
148154
"org.apache.spark.sql.catalyst.expressions.Uuid",
149155
// The example calls methods that return unstable results.
150156
"org.apache.spark.sql.catalyst.expressions.CallMethodViaReflection",
151-
"org.apache.spark.sql.catalyst.expressions.SparkVersion")
157+
"org.apache.spark.sql.catalyst.expressions.SparkVersion",
158+
"org.apache.spark.sql.catalyst.expressions.MonotonicallyIncreasingID",
159+
"org.apache.spark.sql.catalyst.expressions.SparkPartitionID",
160+
"org.apache.spark.sql.catalyst.expressions.InputFileName",
161+
"org.apache.spark.sql.catalyst.expressions.InputFileBlockStart",
162+
"org.apache.spark.sql.catalyst.expressions.InputFileBlockLength")
152163

153164
val parFuncs = new ParVector(spark.sessionState.functionRegistry.listFunction().toVector)
154165
parFuncs.foreach { funcId =>

0 commit comments

Comments
 (0)