Skip to content

Commit 4a2c375

Browse files
hvanhovellrxin
authored andcommitted
[SPARK-17084][SQL] Rename ParserUtils.assert to validate
## What changes were proposed in this pull request? This PR renames `ParserUtils.assert` to `ParserUtils.validate`. This is done because this method is used to check requirements, and not to check if the program is in an invalid state. ## How was this patch tested? Simple rename. Compilation should do. Author: Herman van Hovell <[email protected]> Closes apache#14665 from hvanhovell/SPARK-17084.
1 parent e28a8c5 commit 4a2c375

File tree

3 files changed

+11
-12
lines changed

3 files changed

+11
-12
lines changed

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -132,7 +132,7 @@ class AstBuilder extends SqlBaseBaseVisitor[AnyRef] with Logging {
132132
// Build the insert clauses.
133133
val inserts = ctx.multiInsertQueryBody.asScala.map {
134134
body =>
135-
assert(body.querySpecification.fromClause == null,
135+
validate(body.querySpecification.fromClause == null,
136136
"Multi-Insert queries cannot have a FROM clause in their individual SELECT statements",
137137
body)
138138

@@ -596,7 +596,7 @@ class AstBuilder extends SqlBaseBaseVisitor[AnyRef] with Logging {
596596
// function takes X PERCENT as the input and the range of X is [0, 100], we need to
597597
// adjust the fraction.
598598
val eps = RandomSampler.roundingEpsilon
599-
assert(fraction >= 0.0 - eps && fraction <= 1.0 + eps,
599+
validate(fraction >= 0.0 - eps && fraction <= 1.0 + eps,
600600
s"Sampling fraction ($fraction) must be on interval [0, 1]",
601601
ctx)
602602
Sample(0.0, fraction, withReplacement = false, (math.random * 1000).toInt, query)(true)
@@ -664,7 +664,7 @@ class AstBuilder extends SqlBaseBaseVisitor[AnyRef] with Logging {
664664
// Get the backing expressions.
665665
val expressions = ctx.expression.asScala.map { eCtx =>
666666
val e = expression(eCtx)
667-
assert(e.foldable, "All expressions in an inline table must be constants.", eCtx)
667+
validate(e.foldable, "All expressions in an inline table must be constants.", eCtx)
668668
e
669669
}
670670

@@ -686,7 +686,7 @@ class AstBuilder extends SqlBaseBaseVisitor[AnyRef] with Logging {
686686
val baseAttributes = structType.toAttributes.map(_.withNullability(true))
687687
val attributes = if (ctx.identifierList != null) {
688688
val aliases = visitIdentifierList(ctx.identifierList)
689-
assert(aliases.size == baseAttributes.size,
689+
validate(aliases.size == baseAttributes.size,
690690
"Number of aliases must match the number of fields in an inline table.", ctx)
691691
baseAttributes.zip(aliases).map(p => p._1.withName(p._2))
692692
} else {
@@ -1094,7 +1094,7 @@ class AstBuilder extends SqlBaseBaseVisitor[AnyRef] with Logging {
10941094
// We currently only allow foldable integers.
10951095
def value: Int = {
10961096
val e = expression(ctx.expression)
1097-
assert(e.resolved && e.foldable && e.dataType == IntegerType,
1097+
validate(e.resolved && e.foldable && e.dataType == IntegerType,
10981098
"Frame bound value must be a constant integer.",
10991099
ctx)
11001100
e.eval().asInstanceOf[Int]
@@ -1347,7 +1347,7 @@ class AstBuilder extends SqlBaseBaseVisitor[AnyRef] with Logging {
13471347
*/
13481348
override def visitInterval(ctx: IntervalContext): Literal = withOrigin(ctx) {
13491349
val intervals = ctx.intervalField.asScala.map(visitIntervalField)
1350-
assert(intervals.nonEmpty, "at least one time unit should be given for interval literal", ctx)
1350+
validate(intervals.nonEmpty, "at least one time unit should be given for interval literal", ctx)
13511351
Literal(intervals.reduce(_.add(_)))
13521352
}
13531353

@@ -1374,7 +1374,7 @@ class AstBuilder extends SqlBaseBaseVisitor[AnyRef] with Logging {
13741374
case (from, Some(t)) =>
13751375
throw new ParseException(s"Intervals FROM $from TO $t are not supported.", ctx)
13761376
}
1377-
assert(interval != null, "No interval can be constructed", ctx)
1377+
validate(interval != null, "No interval can be constructed", ctx)
13781378
interval
13791379
} catch {
13801380
// Handle Exceptions thrown by CalendarInterval

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/ParserUtils.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -77,8 +77,8 @@ object ParserUtils {
7777
Origin(Option(token.getLine), Option(token.getCharPositionInLine))
7878
}
7979

80-
/** Assert if a condition holds. If it doesn't throw a parse exception. */
81-
def assert(f: => Boolean, message: String, ctx: ParserRuleContext): Unit = {
80+
/** Validate the condition. If it doesn't throw a parse exception. */
81+
def validate(f: => Boolean, message: String, ctx: ParserRuleContext): Unit = {
8282
if (!f) {
8383
throw new ParseException(message, ctx)
8484
}

sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,6 @@
1818
package org.apache.spark.sql.execution
1919

2020
import scala.collection.JavaConverters._
21-
import scala.util.Try
2221

2322
import org.antlr.v4.runtime.{ParserRuleContext, Token}
2423
import org.antlr.v4.runtime.tree.TerminalNode
@@ -799,7 +798,7 @@ class SparkSqlAstBuilder(conf: SQLConf) extends AstBuilder {
799798
}
800799

801800
/**
802-
* Create an [[AlterTableDiscoverPartitionsCommand]] command
801+
* Create an [[AlterTableRecoverPartitionsCommand]] command
803802
*
804803
* For example:
805804
* {{{
@@ -1182,7 +1181,7 @@ class SparkSqlAstBuilder(conf: SQLConf) extends AstBuilder {
11821181
entry("mapkey.delim", ctx.keysTerminatedBy) ++
11831182
Option(ctx.linesSeparatedBy).toSeq.map { token =>
11841183
val value = string(token)
1185-
assert(
1184+
validate(
11861185
value == "\n",
11871186
s"LINES TERMINATED BY only supports newline '\\n' right now: $value",
11881187
ctx)

0 commit comments

Comments
 (0)