From 87f4ed0a5782717c776f118ebff0a1d06641447a Mon Sep 17 00:00:00 2001 From: Gengliang Wang Date: Thu, 5 Sep 2019 17:11:07 +0800 Subject: [PATCH 1/7] Add spark.sql.ansi.enabled --- docs/sql-keywords.md | 8 ++-- .../sql/catalyst/CatalystTypeConverters.scala | 2 +- .../sql/catalyst/SerializerBuildHelper.scala | 2 +- .../catalyst/analysis/DecimalPrecision.scala | 2 +- .../sql/catalyst/encoders/RowEncoder.scala | 2 +- .../spark/sql/catalyst/expressions/Cast.scala | 4 +- .../catalyst/expressions/aggregate/Sum.scala | 2 +- .../sql/catalyst/expressions/arithmetic.scala | 4 +- .../expressions/decimalExpressions.scala | 2 +- .../sql/catalyst/parser/AstBuilder.scala | 2 +- .../sql/catalyst/parser/ParseDriver.scala | 4 +- .../apache/spark/sql/internal/SQLConf.scala | 41 +++++-------------- .../encoders/ExpressionEncoderSuite.scala | 8 ++-- .../catalyst/encoders/RowEncoderSuite.scala | 4 +- .../ArithmeticExpressionSuite.scala | 24 +++++------ .../sql/catalyst/expressions/CastSuite.scala | 12 +++--- .../expressions/DecimalExpressionSuite.scala | 4 +- .../catalyst/expressions/ScalaUDFSuite.scala | 4 +- .../parser/ExpressionParserSuite.scala | 10 ++--- .../parser/TableIdentifierParserSuite.scala | 2 +- .../org/apache/spark/sql/DataFrameSuite.scala | 6 +-- .../apache/spark/sql/SQLQueryTestSuite.scala | 2 +- .../ThriftServerQueryTestSuite.scala | 2 +- 23 files changed, 66 insertions(+), 87 deletions(-) diff --git a/docs/sql-keywords.md b/docs/sql-keywords.md index 16ce35d55ed7b..c55e589aa8c3f 100644 --- a/docs/sql-keywords.md +++ b/docs/sql-keywords.md @@ -19,15 +19,15 @@ license: | limitations under the License. --- -When `spark.sql.parser.ansi.enabled` is true, Spark SQL has two kinds of keywords: +When `spark.sql.ansi.enabled` is true, Spark SQL has two kinds of keywords: * Reserved keywords: Keywords that are reserved and can't be used as identifiers for table, view, column, function, alias, etc. * Non-reserved keywords: Keywords that have a special meaning only in particular contexts and can be used as identifiers in other contexts. For example, `SELECT 1 WEEK` is an interval literal, but WEEK can be used as identifiers in other places. -When `spark.sql.parser.ansi.enabled` is false, Spark SQL has two kinds of keywords: -* Non-reserved keywords: Same definition as the one when `spark.sql.parser.ansi.enabled=true`. +When `spark.sql.ansi.enabled` is false, Spark SQL has two kinds of keywords: +* Non-reserved keywords: Same definition as the one when `spark.sql.ansi.enabled=true`. * Strict-non-reserved keywords: A strict version of non-reserved keywords, which can not be used as table alias. -By default `spark.sql.parser.ansi.enabled` is false. +By default `spark.sql.ansi.enabled` is false. Below is a list of all the keywords in Spark SQL. diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/CatalystTypeConverters.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/CatalystTypeConverters.scala index 488252aa0c7b5..34d2f45e715e9 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/CatalystTypeConverters.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/CatalystTypeConverters.scala @@ -344,7 +344,7 @@ object CatalystTypeConverters { private class DecimalConverter(dataType: DecimalType) extends CatalystTypeConverter[Any, JavaBigDecimal, Decimal] { - private val nullOnOverflow = SQLConf.get.decimalOperationsNullOnOverflow + private val nullOnOverflow = !SQLConf.get.ansiEnabled override def toCatalystImpl(scalaValue: Any): Decimal = { val decimal = scalaValue match { diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/SerializerBuildHelper.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/SerializerBuildHelper.scala index 75c278e781140..026ff6f2983fb 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/SerializerBuildHelper.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/SerializerBuildHelper.scala @@ -26,7 +26,7 @@ import org.apache.spark.unsafe.types.UTF8String object SerializerBuildHelper { - private def nullOnOverflow: Boolean = SQLConf.get.decimalOperationsNullOnOverflow + private def nullOnOverflow: Boolean = !SQLConf.get.ansiEnabled def createSerializerForBoolean(inputObject: Expression): Expression = { Invoke(inputObject, "booleanValue", BooleanType) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/DecimalPrecision.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/DecimalPrecision.scala index 856c2ed828002..a64befecb68d6 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/DecimalPrecision.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/DecimalPrecision.scala @@ -82,7 +82,7 @@ object DecimalPrecision extends TypeCoercionRule { PromotePrecision(Cast(e, dataType)) } - private def nullOnOverflow: Boolean = SQLConf.get.decimalOperationsNullOnOverflow + private def nullOnOverflow: Boolean = !SQLConf.get.ansiEnabled override protected def coerceTypes(plan: LogicalPlan): LogicalPlan = plan resolveOperators { // fix decimal precision for expressions diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/encoders/RowEncoder.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/encoders/RowEncoder.scala index afe8a23f8f150..765018f07d87a 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/encoders/RowEncoder.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/encoders/RowEncoder.scala @@ -114,7 +114,7 @@ object RowEncoder { d, "fromDecimal", inputObject :: Nil, - returnNullable = false), d, SQLConf.get.decimalOperationsNullOnOverflow) + returnNullable = false), d, !SQLConf.get.ansiEnabled) case StringType => createSerializerForString(inputObject) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala index d1943f02f85e5..1f89b64628a62 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala @@ -289,7 +289,7 @@ case class Cast(child: Expression, dataType: DataType, timeZoneId: Option[String private lazy val dateFormatter = DateFormatter() private lazy val timestampFormatter = TimestampFormatter.getFractionFormatter(zoneId) - private val failOnIntegralTypeOverflow = SQLConf.get.failOnIntegralTypeOverflow + private val failOnIntegralTypeOverflow = SQLConf.get.ansiEnabled // UDFToString private[this] def castToString(from: DataType): Any => Any = from match { @@ -600,7 +600,7 @@ case class Cast(child: Expression, dataType: DataType, timeZoneId: Option[String b => x.numeric.asInstanceOf[Numeric[Any]].toInt(b).toByte } - private val nullOnOverflow = SQLConf.get.decimalOperationsNullOnOverflow + private val nullOnOverflow = !SQLConf.get.ansiEnabled /** * Change the precision / scale in a given decimal to those set in `decimalType` (if any), diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/Sum.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/Sum.scala index d04fe9249d064..c2ab8adfaef67 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/Sum.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/Sum.scala @@ -91,7 +91,7 @@ case class Sum(child: Expression) extends DeclarativeAggregate with ImplicitCast } override lazy val evaluateExpression: Expression = resultType match { - case d: DecimalType => CheckOverflow(sum, d, SQLConf.get.decimalOperationsNullOnOverflow) + case d: DecimalType => CheckOverflow(sum, d, !SQLConf.get.ansiEnabled) case _ => sum } diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/arithmetic.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/arithmetic.scala index 2ed82d99fe2bb..e4276e33acbd2 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/arithmetic.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/arithmetic.scala @@ -35,7 +35,7 @@ import org.apache.spark.unsafe.types.CalendarInterval """) case class UnaryMinus(child: Expression) extends UnaryExpression with ExpectsInputTypes with NullIntolerant { - private val checkOverflow = SQLConf.get.failOnIntegralTypeOverflow + private val checkOverflow = SQLConf.get.ansiEnabled override def inputTypes: Seq[AbstractDataType] = Seq(TypeCollection.NumericAndInterval) @@ -136,7 +136,7 @@ case class Abs(child: Expression) abstract class BinaryArithmetic extends BinaryOperator with NullIntolerant { - protected val checkOverflow = SQLConf.get.failOnIntegralTypeOverflow + protected val checkOverflow = SQLConf.get.ansiEnabled override def dataType: DataType = left.dataType diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/decimalExpressions.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/decimalExpressions.scala index b5b712cda8ea3..7b2489e682661 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/decimalExpressions.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/decimalExpressions.scala @@ -47,7 +47,7 @@ case class UnscaledValue(child: Expression) extends UnaryExpression { */ case class MakeDecimal(child: Expression, precision: Int, scale: Int) extends UnaryExpression { - private val nullOnOverflow = SQLConf.get.decimalOperationsNullOnOverflow + private val nullOnOverflow = !SQLConf.get.ansiEnabled override def dataType: DataType = DecimalType(precision, scale) override def nullable: Boolean = child.nullable || nullOnOverflow diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala index 9335be5b239b6..c334d09b691ea 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala @@ -1363,7 +1363,7 @@ class AstBuilder(conf: SQLConf) extends SqlBaseBaseVisitor[AnyRef] with Logging } override def visitCurrentDatetime(ctx: CurrentDatetimeContext): Expression = withOrigin(ctx) { - if (conf.ansiParserEnabled) { + if (conf.ansiEnabled) { ctx.name.getType match { case SqlBaseParser.CURRENT_DATE => CurrentDate() diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/ParseDriver.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/ParseDriver.scala index 932e795f1d0bc..85998e33140d0 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/ParseDriver.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/ParseDriver.scala @@ -92,7 +92,7 @@ abstract class AbstractSqlParser extends ParserInterface with Logging { lexer.removeErrorListeners() lexer.addErrorListener(ParseErrorListener) lexer.legacy_setops_precedence_enbled = SQLConf.get.setOpsPrecedenceEnforced - lexer.ansi = SQLConf.get.ansiParserEnabled + lexer.ansi = SQLConf.get.ansiEnabled val tokenStream = new CommonTokenStream(lexer) val parser = new SqlBaseParser(tokenStream) @@ -100,7 +100,7 @@ abstract class AbstractSqlParser extends ParserInterface with Logging { parser.removeErrorListeners() parser.addErrorListener(ParseErrorListener) parser.legacy_setops_precedence_enbled = SQLConf.get.setOpsPrecedenceEnforced - parser.ansi = SQLConf.get.ansiParserEnabled + parser.ansi = SQLConf.get.ansiEnabled try { try { diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala index 4f3e39ad49afe..bbd731dbbf9c7 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala @@ -411,12 +411,6 @@ object SQLConf { .booleanConf .createWithDefault(true) - val ANSI_SQL_PARSER = - buildConf("spark.sql.parser.ansi.enabled") - .doc("When true, tries to conform to ANSI SQL syntax.") - .booleanConf - .createWithDefault(false) - val ESCAPED_STRING_LITERALS = buildConf("spark.sql.parser.escapedStringLiterals") .internal() .doc("When true, string literals (including regex patterns) remain escaped in our SQL " + @@ -1557,16 +1551,6 @@ object SQLConf { .booleanConf .createWithDefault(true) - val DECIMAL_OPERATIONS_NULL_ON_OVERFLOW = - buildConf("spark.sql.decimalOperations.nullOnOverflow") - .internal() - .doc("When true (default), if an overflow on a decimal occurs, then NULL is returned. " + - "Spark's older versions and Hive behave in this way. If turned to false, SQL ANSI 2011 " + - "specification will be followed instead: an arithmetic exception is thrown, as most " + - "of the SQL databases do.") - .booleanConf - .createWithDefault(true) - val LITERAL_PICK_MINIMUM_PRECISION = buildConf("spark.sql.legacy.literal.pickMinimumPrecision") .internal() @@ -1723,6 +1707,14 @@ object SQLConf { .checkValues(StoreAssignmentPolicy.values.map(_.toString)) .createOptional + val ANSI_ENABLED = buildConf("spark.sql.ansi.enabled") + .doc("When true, tries to conform to the ANSI SQL specification. For example, Spark will " + + "throw an runtime exception if an overflow occurs in any operation on integral/decimal " + + "field. For another example, Spark will forbid using the reserved keywords of ANSI SQL " + + "as identifiers in the SQL parser.") + .booleanConf + .createWithDefault(false) + val SORT_BEFORE_REPARTITION = buildConf("spark.sql.execution.sortBeforeRepartition") .internal() @@ -1886,15 +1878,6 @@ object SQLConf { .booleanConf .createWithDefault(false) - val FAIL_ON_INTEGRAL_TYPE_OVERFLOW = - buildConf("spark.sql.failOnIntegralTypeOverflow") - .doc("If it is set to true, all operations on integral fields throw an " + - "exception if an overflow occurs. If it is false (default), in case of overflow a wrong " + - "result is returned.") - .internal() - .booleanConf - .createWithDefault(false) - val LEGACY_HAVING_WITHOUT_GROUP_BY_AS_WHERE = buildConf("spark.sql.legacy.parser.havingWithoutGroupByAsWhere") .internal() @@ -2195,8 +2178,6 @@ class SQLConf extends Serializable with Logging { def constraintPropagationEnabled: Boolean = getConf(CONSTRAINT_PROPAGATION_ENABLED) - def ansiParserEnabled: Boolean = getConf(ANSI_SQL_PARSER) - def escapedStringLiterals: Boolean = getConf(ESCAPED_STRING_LITERALS) def fileCompressionFactor: Double = getConf(FILE_COMPRESSION_FACTOR) @@ -2418,10 +2399,6 @@ class SQLConf extends Serializable with Logging { def decimalOperationsAllowPrecisionLoss: Boolean = getConf(DECIMAL_OPERATIONS_ALLOW_PREC_LOSS) - def decimalOperationsNullOnOverflow: Boolean = getConf(DECIMAL_OPERATIONS_NULL_ON_OVERFLOW) - - def failOnIntegralTypeOverflow: Boolean = getConf(FAIL_ON_INTEGRAL_TYPE_OVERFLOW) - def literalPickMinimumPrecision: Boolean = getConf(LITERAL_PICK_MINIMUM_PRECISION) def continuousStreamingEpochBacklogQueueSize: Int = @@ -2454,6 +2431,8 @@ class SQLConf extends Serializable with Logging { def storeAssignmentPolicy: Option[StoreAssignmentPolicy.Value] = getConf(STORE_ASSIGNMENT_POLICY).map(StoreAssignmentPolicy.withName) + def ansiEnabled: Boolean = getConf(ANSI_ENABLED) + def nestedSchemaPruningEnabled: Boolean = getConf(NESTED_SCHEMA_PRUNING_ENABLED) def serializerNestedSchemaPruningEnabled: Boolean = diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/encoders/ExpressionEncoderSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/encoders/ExpressionEncoderSuite.scala index 9380c7e3f5f72..d2acfb9686700 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/encoders/ExpressionEncoderSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/encoders/ExpressionEncoderSuite.scala @@ -432,16 +432,16 @@ class ExpressionEncoderSuite extends CodegenInterpretedPlanTest with AnalysisTes } private def testOverflowingBigNumeric[T: TypeTag](bigNumeric: T, testName: String): Unit = { - Seq(true, false).foreach { allowNullOnOverflow => + Seq(true, false).foreach { ansiEnabled => testAndVerifyNotLeakingReflectionObjects( - s"overflowing $testName, allowNullOnOverflow=$allowNullOnOverflow") { + s"overflowing $testName, ansiEnabled=$ansiEnabled") { withSQLConf( - SQLConf.DECIMAL_OPERATIONS_NULL_ON_OVERFLOW.key -> allowNullOnOverflow.toString + SQLConf.ANSI_ENABLED.key -> ansiEnabled.toString ) { // Need to construct Encoder here rather than implicitly resolving it // so that SQLConf changes are respected. val encoder = ExpressionEncoder[T]() - if (allowNullOnOverflow) { + if (!ansiEnabled) { val convertedBack = encoder.resolveAndBind().fromRow(encoder.toRow(bigNumeric)) assert(convertedBack === null) } else { diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/encoders/RowEncoderSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/encoders/RowEncoderSuite.scala index 5d21e4a2a83ca..1a1cab823d4f3 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/encoders/RowEncoderSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/encoders/RowEncoderSuite.scala @@ -169,7 +169,7 @@ class RowEncoderSuite extends CodegenInterpretedPlanTest { } private def testDecimalOverflow(schema: StructType, row: Row): Unit = { - withSQLConf(SQLConf.DECIMAL_OPERATIONS_NULL_ON_OVERFLOW.key -> "false") { + withSQLConf(SQLConf.ANSI_ENABLED.key -> "true") { val encoder = RowEncoder(schema).resolveAndBind() intercept[Exception] { encoder.toRow(row) @@ -182,7 +182,7 @@ class RowEncoderSuite extends CodegenInterpretedPlanTest { } } - withSQLConf(SQLConf.DECIMAL_OPERATIONS_NULL_ON_OVERFLOW.key -> "true") { + withSQLConf(SQLConf.ANSI_ENABLED.key -> "false") { val encoder = RowEncoder(schema).resolveAndBind() assert(encoder.fromRow(encoder.toRow(row)).get(0) == null) } diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ArithmeticExpressionSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ArithmeticExpressionSuite.scala index 729e2f529b97f..ad8b1a1673679 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ArithmeticExpressionSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ArithmeticExpressionSuite.scala @@ -61,7 +61,7 @@ class ArithmeticExpressionSuite extends SparkFunSuite with ExpressionEvalHelper checkEvaluation(Add(positiveLongLit, negativeLongLit), -1L) Seq("true", "false").foreach { checkOverflow => - withSQLConf(SQLConf.FAIL_ON_INTEGRAL_TYPE_OVERFLOW.key -> checkOverflow) { + withSQLConf(SQLConf.ANSI_ENABLED.key -> checkOverflow) { DataTypeTestUtils.numericAndInterval.foreach { tpe => checkConsistencyBetweenInterpretedAndCodegenAllowingException(Add, tpe, tpe) } @@ -80,7 +80,7 @@ class ArithmeticExpressionSuite extends SparkFunSuite with ExpressionEvalHelper checkEvaluation(UnaryMinus(Literal(Int.MinValue)), Int.MinValue) checkEvaluation(UnaryMinus(Literal(Short.MinValue)), Short.MinValue) checkEvaluation(UnaryMinus(Literal(Byte.MinValue)), Byte.MinValue) - withSQLConf(SQLConf.FAIL_ON_INTEGRAL_TYPE_OVERFLOW.key -> "true") { + withSQLConf(SQLConf.ANSI_ENABLED.key -> "true") { checkExceptionInExpression[ArithmeticException]( UnaryMinus(Literal(Long.MinValue)), "overflow") checkExceptionInExpression[ArithmeticException]( @@ -122,7 +122,7 @@ class ArithmeticExpressionSuite extends SparkFunSuite with ExpressionEvalHelper checkEvaluation(Subtract(positiveLongLit, negativeLongLit), positiveLong - negativeLong) Seq("true", "false").foreach { checkOverflow => - withSQLConf(SQLConf.FAIL_ON_INTEGRAL_TYPE_OVERFLOW.key -> checkOverflow) { + withSQLConf(SQLConf.ANSI_ENABLED.key -> checkOverflow) { DataTypeTestUtils.numericAndInterval.foreach { tpe => checkConsistencyBetweenInterpretedAndCodegenAllowingException(Subtract, tpe, tpe) } @@ -144,7 +144,7 @@ class ArithmeticExpressionSuite extends SparkFunSuite with ExpressionEvalHelper checkEvaluation(Multiply(positiveLongLit, negativeLongLit), positiveLong * negativeLong) Seq("true", "false").foreach { checkOverflow => - withSQLConf(SQLConf.FAIL_ON_INTEGRAL_TYPE_OVERFLOW.key -> checkOverflow) { + withSQLConf(SQLConf.ANSI_ENABLED.key -> checkOverflow) { DataTypeTestUtils.numericTypeWithoutDecimal.foreach { tpe => checkConsistencyBetweenInterpretedAndCodegenAllowingException(Multiply, tpe, tpe) } @@ -445,12 +445,12 @@ class ArithmeticExpressionSuite extends SparkFunSuite with ExpressionEvalHelper val e4 = Add(minLongLiteral, minLongLiteral) val e5 = Subtract(minLongLiteral, maxLongLiteral) val e6 = Multiply(minLongLiteral, minLongLiteral) - withSQLConf(SQLConf.FAIL_ON_INTEGRAL_TYPE_OVERFLOW.key -> "true") { + withSQLConf(SQLConf.ANSI_ENABLED.key -> "true") { Seq(e1, e2, e3, e4, e5, e6).foreach { e => checkExceptionInExpression[ArithmeticException](e, "overflow") } } - withSQLConf(SQLConf.FAIL_ON_INTEGRAL_TYPE_OVERFLOW.key -> "false") { + withSQLConf(SQLConf.ANSI_ENABLED.key -> "false") { checkEvaluation(e1, Long.MinValue) checkEvaluation(e2, Long.MinValue) checkEvaluation(e3, -2L) @@ -469,12 +469,12 @@ class ArithmeticExpressionSuite extends SparkFunSuite with ExpressionEvalHelper val e4 = Add(minIntLiteral, minIntLiteral) val e5 = Subtract(minIntLiteral, maxIntLiteral) val e6 = Multiply(minIntLiteral, minIntLiteral) - withSQLConf(SQLConf.FAIL_ON_INTEGRAL_TYPE_OVERFLOW.key -> "true") { + withSQLConf(SQLConf.ANSI_ENABLED.key -> "true") { Seq(e1, e2, e3, e4, e5, e6).foreach { e => checkExceptionInExpression[ArithmeticException](e, "overflow") } } - withSQLConf(SQLConf.FAIL_ON_INTEGRAL_TYPE_OVERFLOW.key -> "false") { + withSQLConf(SQLConf.ANSI_ENABLED.key -> "false") { checkEvaluation(e1, Int.MinValue) checkEvaluation(e2, Int.MinValue) checkEvaluation(e3, -2) @@ -493,12 +493,12 @@ class ArithmeticExpressionSuite extends SparkFunSuite with ExpressionEvalHelper val e4 = Add(minShortLiteral, minShortLiteral) val e5 = Subtract(minShortLiteral, maxShortLiteral) val e6 = Multiply(minShortLiteral, minShortLiteral) - withSQLConf(SQLConf.FAIL_ON_INTEGRAL_TYPE_OVERFLOW.key -> "true") { + withSQLConf(SQLConf.ANSI_ENABLED.key -> "true") { Seq(e1, e2, e3, e4, e5, e6).foreach { e => checkExceptionInExpression[ArithmeticException](e, "overflow") } } - withSQLConf(SQLConf.FAIL_ON_INTEGRAL_TYPE_OVERFLOW.key -> "false") { + withSQLConf(SQLConf.ANSI_ENABLED.key -> "false") { checkEvaluation(e1, Short.MinValue) checkEvaluation(e2, Short.MinValue) checkEvaluation(e3, (-2).toShort) @@ -517,12 +517,12 @@ class ArithmeticExpressionSuite extends SparkFunSuite with ExpressionEvalHelper val e4 = Add(minByteLiteral, minByteLiteral) val e5 = Subtract(minByteLiteral, maxByteLiteral) val e6 = Multiply(minByteLiteral, minByteLiteral) - withSQLConf(SQLConf.FAIL_ON_INTEGRAL_TYPE_OVERFLOW.key -> "true") { + withSQLConf(SQLConf.ANSI_ENABLED.key -> "true") { Seq(e1, e2, e3, e4, e5, e6).foreach { e => checkExceptionInExpression[ArithmeticException](e, "overflow") } } - withSQLConf(SQLConf.FAIL_ON_INTEGRAL_TYPE_OVERFLOW.key -> "false") { + withSQLConf(SQLConf.ANSI_ENABLED.key -> "false") { checkEvaluation(e1, Byte.MinValue) checkEvaluation(e2, Byte.MinValue) checkEvaluation(e3, (-2).toByte) diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CastSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CastSuite.scala index 7a781295a7957..ffb14e2838687 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CastSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CastSuite.scala @@ -1044,14 +1044,14 @@ class CastSuite extends SparkFunSuite with ExpressionEvalHelper { } test("SPARK-28470: Cast should honor nullOnOverflow property") { - withSQLConf(SQLConf.DECIMAL_OPERATIONS_NULL_ON_OVERFLOW.key -> "true") { + withSQLConf(SQLConf.ANSI_ENABLED.key -> "false") { checkEvaluation(Cast(Literal("134.12"), DecimalType(3, 2)), null) checkEvaluation( Cast(Literal(Timestamp.valueOf("2019-07-25 22:04:36")), DecimalType(3, 2)), null) checkEvaluation(Cast(Literal(BigDecimal(134.12)), DecimalType(3, 2)), null) checkEvaluation(Cast(Literal(134.12), DecimalType(3, 2)), null) } - withSQLConf(SQLConf.DECIMAL_OPERATIONS_NULL_ON_OVERFLOW.key -> "false") { + withSQLConf(SQLConf.ANSI_ENABLED.key -> "true") { checkExceptionInExpression[ArithmeticException]( Cast(Literal("134.12"), DecimalType(3, 2)), "cannot be represented") checkExceptionInExpression[ArithmeticException]( @@ -1117,7 +1117,7 @@ class CastSuite extends SparkFunSuite with ExpressionEvalHelper { } test("Cast to byte with option FAIL_ON_INTEGER_OVERFLOW enabled") { - withSQLConf(SQLConf.FAIL_ON_INTEGRAL_TYPE_OVERFLOW.key -> "true") { + withSQLConf(SQLConf.ANSI_ENABLED.key -> "true") { testIntMaxAndMin(ByteType) Seq(Byte.MaxValue + 1, Byte.MinValue - 1).foreach { value => checkExceptionInExpression[ArithmeticException](cast(value, ByteType), "overflow") @@ -1142,7 +1142,7 @@ class CastSuite extends SparkFunSuite with ExpressionEvalHelper { } test("Cast to short with option FAIL_ON_INTEGER_OVERFLOW enabled") { - withSQLConf(SQLConf.FAIL_ON_INTEGRAL_TYPE_OVERFLOW.key -> "true") { + withSQLConf(SQLConf.ANSI_ENABLED.key -> "true") { testIntMaxAndMin(ShortType) Seq(Short.MaxValue + 1, Short.MinValue - 1).foreach { value => checkExceptionInExpression[ArithmeticException](cast(value, ShortType), "overflow") @@ -1167,7 +1167,7 @@ class CastSuite extends SparkFunSuite with ExpressionEvalHelper { } test("Cast to int with option FAIL_ON_INTEGER_OVERFLOW enabled") { - withSQLConf(SQLConf.FAIL_ON_INTEGRAL_TYPE_OVERFLOW.key -> "true") { + withSQLConf(SQLConf.ANSI_ENABLED.key -> "true") { testIntMaxAndMin(IntegerType) testLongMaxAndMin(IntegerType) @@ -1184,7 +1184,7 @@ class CastSuite extends SparkFunSuite with ExpressionEvalHelper { } test("Cast to long with option FAIL_ON_INTEGER_OVERFLOW enabled") { - withSQLConf(SQLConf.FAIL_ON_INTEGRAL_TYPE_OVERFLOW.key -> "true") { + withSQLConf(SQLConf.ANSI_ENABLED.key -> "true") { testLongMaxAndMin(LongType) Seq(Long.MaxValue, 0, Long.MinValue).foreach { value => diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/DecimalExpressionSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/DecimalExpressionSuite.scala index fc5e8dc5ee7f1..36bc3db580400 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/DecimalExpressionSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/DecimalExpressionSuite.scala @@ -32,7 +32,7 @@ class DecimalExpressionSuite extends SparkFunSuite with ExpressionEvalHelper { } test("MakeDecimal") { - withSQLConf(SQLConf.DECIMAL_OPERATIONS_NULL_ON_OVERFLOW.key -> "true") { + withSQLConf(SQLConf.ANSI_ENABLED.key -> "false") { checkEvaluation(MakeDecimal(Literal(101L), 3, 1), Decimal("10.1")) checkEvaluation(MakeDecimal(Literal.create(null, LongType), 3, 1), null) val overflowExpr = MakeDecimal(Literal.create(1000L, LongType), 3, 1) @@ -41,7 +41,7 @@ class DecimalExpressionSuite extends SparkFunSuite with ExpressionEvalHelper { evaluateWithoutCodegen(overflowExpr, null) checkEvaluationWithUnsafeProjection(overflowExpr, null) } - withSQLConf(SQLConf.DECIMAL_OPERATIONS_NULL_ON_OVERFLOW.key -> "false") { + withSQLConf(SQLConf.ANSI_ENABLED.key -> "true") { checkEvaluation(MakeDecimal(Literal(101L), 3, 1), Decimal("10.1")) checkEvaluation(MakeDecimal(Literal.create(null, LongType), 3, 1), null) val overflowExpr = MakeDecimal(Literal.create(1000L, LongType), 3, 1) diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ScalaUDFSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ScalaUDFSuite.scala index 981ef57c051fd..c5ffc381b58e2 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ScalaUDFSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ScalaUDFSuite.scala @@ -57,7 +57,7 @@ class ScalaUDFSuite extends SparkFunSuite with ExpressionEvalHelper { } test("SPARK-28369: honor nullOnOverflow config for ScalaUDF") { - withSQLConf(SQLConf.DECIMAL_OPERATIONS_NULL_ON_OVERFLOW.key -> "false") { + withSQLConf(SQLConf.ANSI_ENABLED.key -> "true") { val udf = ScalaUDF( (a: java.math.BigDecimal) => a.multiply(new java.math.BigDecimal(100)), DecimalType.SYSTEM_DEFAULT, @@ -69,7 +69,7 @@ class ScalaUDFSuite extends SparkFunSuite with ExpressionEvalHelper { } assert(e2.getCause.isInstanceOf[ArithmeticException]) } - withSQLConf(SQLConf.DECIMAL_OPERATIONS_NULL_ON_OVERFLOW.key -> "true") { + withSQLConf(SQLConf.ANSI_ENABLED.key -> "false") { val udf = ScalaUDF( (a: java.math.BigDecimal) => a.multiply(new java.math.BigDecimal(100)), DecimalType.SYSTEM_DEFAULT, diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/ExpressionParserSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/ExpressionParserSuite.scala index ee89980d3eedd..5da2bf059758d 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/ExpressionParserSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/ExpressionParserSuite.scala @@ -605,7 +605,7 @@ class ExpressionParserSuite extends AnalysisTest { assertEqual(s"interval $intervalValue", expected) // SPARK-23264 Support interval values without INTERVAL clauses if ANSI SQL enabled - withSQLConf(SQLConf.ANSI_SQL_PARSER.key -> "true") { + withSQLConf(SQLConf.ANSI_ENABLED.key -> "true") { assertEqual(intervalValue, expected) } } @@ -682,12 +682,12 @@ class ExpressionParserSuite extends AnalysisTest { test("SPARK-23264 Interval Compatibility tests") { def checkIntervals(intervalValue: String, expected: Literal): Unit = { - withSQLConf(SQLConf.ANSI_SQL_PARSER.key -> "true") { + withSQLConf(SQLConf.ANSI_ENABLED.key -> "true") { assertEqual(intervalValue, expected) } // Compatibility tests: If ANSI SQL disabled, `intervalValue` should be parsed as an alias - withSQLConf(SQLConf.ANSI_SQL_PARSER.key -> "false") { + withSQLConf(SQLConf.ANSI_ENABLED.key -> "false") { val aliases = defaultParser.parseExpression(intervalValue).collect { case a @ Alias(_: Literal, name) if intervalUnits.exists { unit => name.startsWith(unit) } => a @@ -785,12 +785,12 @@ class ExpressionParserSuite extends AnalysisTest { } test("current date/timestamp braceless expressions") { - withSQLConf(SQLConf.ANSI_SQL_PARSER.key -> "true") { + withSQLConf(SQLConf.ANSI_ENABLED.key -> "true") { assertEqual("current_date", CurrentDate()) assertEqual("current_timestamp", CurrentTimestamp()) } - withSQLConf(SQLConf.ANSI_SQL_PARSER.key -> "false") { + withSQLConf(SQLConf.ANSI_ENABLED.key -> "false") { assertEqual("current_date", UnresolvedAttribute.quoted("current_date")) assertEqual("current_timestamp", UnresolvedAttribute.quoted("current_timestamp")) } diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/TableIdentifierParserSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/TableIdentifierParserSuite.scala index 81f0bd024a80e..a9216174804d0 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/TableIdentifierParserSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/TableIdentifierParserSuite.scala @@ -658,7 +658,7 @@ class TableIdentifierParserSuite extends SparkFunSuite with SQLHelper { } test("table identifier - reserved/non-reserved keywords if ANSI mode enabled") { - withSQLConf(SQLConf.ANSI_SQL_PARSER.key -> "true") { + withSQLConf(SQLConf.ANSI_ENABLED.key -> "true") { reservedKeywordsInAnsiMode.foreach { keyword => val errMsg = intercept[ParseException] { parseTableIdentifier(keyword) diff --git a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala index b4ddfecaee469..6a2eac9e7e21c 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala @@ -161,10 +161,10 @@ class DataFrameSuite extends QueryTest with SharedSparkSession { DecimalData(BigDecimal("1"* 20 + ".123"), BigDecimal("1"* 20 + ".123")) :: DecimalData(BigDecimal("9"* 20 + ".123"), BigDecimal("9"* 20 + ".123")) :: Nil).toDF() - Seq(true, false).foreach { nullOnOverflow => - withSQLConf((SQLConf.DECIMAL_OPERATIONS_NULL_ON_OVERFLOW.key, nullOnOverflow.toString)) { + Seq(true, false).foreach { ansiEnabled => + withSQLConf((SQLConf.ANSI_ENABLED.key, ansiEnabled.toString)) { val structDf = largeDecimals.select("a").agg(sum("a")) - if (nullOnOverflow) { + if (!ansiEnabled) { checkAnswer(structDf, Row(null)) } else { val e = intercept[SparkException] { diff --git a/sql/core/src/test/scala/org/apache/spark/sql/SQLQueryTestSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/SQLQueryTestSuite.scala index cc92d6556387d..10e2177d3ede0 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/SQLQueryTestSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/SQLQueryTestSuite.scala @@ -308,7 +308,7 @@ class SQLQueryTestSuite extends QueryTest with SharedSparkSession { localSparkSession.udf.register("vol", (s: String) => s) // PostgreSQL enabled cartesian product by default. localSparkSession.conf.set(SQLConf.CROSS_JOINS_ENABLED.key, true) - localSparkSession.conf.set(SQLConf.ANSI_SQL_PARSER.key, true) + localSparkSession.conf.set(SQLConf.ANSI_ENABLED.key, true) localSparkSession.conf.set(SQLConf.PREFER_INTEGRAL_DIVISION.key, true) localSparkSession.conf.set(SQLConf.FAIL_ON_INTEGRAL_TYPE_OVERFLOW.key, true) // Propagate the SQL conf FAIL_ON_INTEGRAL_TYPE_OVERFLOW to executor. diff --git a/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/ThriftServerQueryTestSuite.scala b/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/ThriftServerQueryTestSuite.scala index 1f7b3feae47b5..381b8f2324ca6 100644 --- a/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/ThriftServerQueryTestSuite.scala +++ b/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/ThriftServerQueryTestSuite.scala @@ -110,7 +110,7 @@ class ThriftServerQueryTestSuite extends SQLQueryTestSuite { case _: PgSQLTest => // PostgreSQL enabled cartesian product by default. statement.execute(s"SET ${SQLConf.CROSS_JOINS_ENABLED.key} = true") - statement.execute(s"SET ${SQLConf.ANSI_SQL_PARSER.key} = true") + statement.execute(s"SET ${SQLConf.ANSI_ENABLED.key} = true") statement.execute(s"SET ${SQLConf.PREFER_INTEGRAL_DIVISION.key} = true") case _ => } From e41fca3b1289b9fc84369ca6914fec824ef4183c Mon Sep 17 00:00:00 2001 From: Gengliang Wang Date: Thu, 5 Sep 2019 17:20:35 +0800 Subject: [PATCH 2/7] update comments --- .../org/apache/spark/sql/catalyst/expressions/Cast.scala | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala index 1f89b64628a62..5001b4350d778 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala @@ -606,7 +606,7 @@ case class Cast(child: Expression, dataType: DataType, timeZoneId: Option[String * Change the precision / scale in a given decimal to those set in `decimalType` (if any), * modifying `value` in-place and returning it if successful. If an overflow occurs, it * either returns null or throws an exception according to the value set for - * `spark.sql.decimalOperations.nullOnOverflow`. + * `spark.sql.ansi.enabled`. * * NOTE: this modifies `value` in-place, so don't call it on external data. */ @@ -625,7 +625,7 @@ case class Cast(child: Expression, dataType: DataType, timeZoneId: Option[String /** * Create new `Decimal` with precision and scale given in `decimalType` (if any). - * If overflow occurs, if `spark.sql.decimalOperations.nullOnOverflow` is true, null is returned; + * If overflow occurs, if `spark.sql.ansi.enabled` is false, null is returned; * otherwise, an `ArithmeticException` is thrown. */ private[this] def toPrecision(value: Decimal, decimalType: DecimalType): Decimal = From 63643dfd454a52396567afb6dc577905f7e3ae15 Mon Sep 17 00:00:00 2001 From: Gengliang Wang Date: Thu, 5 Sep 2019 18:20:01 +0800 Subject: [PATCH 3/7] fix sql files --- .../src/test/resources/sql-tests/inputs/ansi/interval.sql | 4 ++-- .../sql-tests/inputs/decimalArithmeticOperations.sql | 5 ++++- sql/core/src/test/resources/sql-tests/inputs/pgSQL/text.sql | 6 +++--- 3 files changed, 9 insertions(+), 6 deletions(-) diff --git a/sql/core/src/test/resources/sql-tests/inputs/ansi/interval.sql b/sql/core/src/test/resources/sql-tests/inputs/ansi/interval.sql index f2f4b02c8634b..4a831855a091a 100644 --- a/sql/core/src/test/resources/sql-tests/inputs/ansi/interval.sql +++ b/sql/core/src/test/resources/sql-tests/inputs/ansi/interval.sql @@ -1,5 +1,5 @@ -- Turns on ANSI mode -SET spark.sql.parser.ansi.enabled=true; +SET spark.sql.ansi.enabled=true; select '1' second, @@ -185,4 +185,4 @@ select date '2012-01-01' + interval (a + 1) day from t; select date '2012-01-01' + (a + 1) day from t; -- Turns off ANSI mode -SET spark.sql.parser.ansi.enabled=false; +SET spark.sql.ansi.enabled=false; diff --git a/sql/core/src/test/resources/sql-tests/inputs/decimalArithmeticOperations.sql b/sql/core/src/test/resources/sql-tests/inputs/decimalArithmeticOperations.sql index 35f2be46cd130..0b90779b8ba5f 100644 --- a/sql/core/src/test/resources/sql-tests/inputs/decimalArithmeticOperations.sql +++ b/sql/core/src/test/resources/sql-tests/inputs/decimalArithmeticOperations.sql @@ -84,7 +84,7 @@ select 123456789123456789.1234567890 * 1.123456789123456789; select 12345678912345.123456789123 / 0.000000012345678; -- throw an exception instead of returning NULL, according to SQL ANSI 2011 -set spark.sql.decimalOperations.nullOnOverflow=false; +set spark.sql.parser.ansi.enabled=true; -- test operations between decimals and constants select id, a*10, b/10 from decimals_test order by id; @@ -108,3 +108,6 @@ select 123456789123456789.1234567890 * 1.123456789123456789; select 12345678912345.123456789123 / 0.000000012345678; drop table decimals_test; + +-- Turns off ANSI mode +set spark.sql.parser.ansi.enabled=false; diff --git a/sql/core/src/test/resources/sql-tests/inputs/pgSQL/text.sql b/sql/core/src/test/resources/sql-tests/inputs/pgSQL/text.sql index 04d3acc145e95..7abf903bc6bee 100644 --- a/sql/core/src/test/resources/sql-tests/inputs/pgSQL/text.sql +++ b/sql/core/src/test/resources/sql-tests/inputs/pgSQL/text.sql @@ -45,10 +45,10 @@ select concat_ws('',10,20,null,30); select concat_ws(NULL,10,20,null,30) is null; select reverse('abcde'); -- [SPARK-28036] Built-in udf left/right has inconsistent behavior --- [SPARK-28479] Parser error when enabling ANSI mode -set spark.sql.parser.ansi.enabled=false; +-- [SPARK-28479][SPARK-28989] Parser error when enabling ANSI mode +set spark.sql.ansi.enabled=false; select i, left('ahoj', i), right('ahoj', i) from range(-5, 6) t(i) order by i; -set spark.sql.parser.ansi.enabled=true; +set spark.sql.ansi.enabled=true; -- [SPARK-28037] Add built-in String Functions: quote_literal -- select quote_literal(''); -- select quote_literal('abc'''); From da014fb01051ecf20c76b13ae41fadd5ef60febd Mon Sep 17 00:00:00 2001 From: Gengliang Wang Date: Fri, 13 Sep 2019 23:52:24 +0800 Subject: [PATCH 4/7] update sql/sql.out --- .../sql-tests/inputs/decimalArithmeticOperations.sql | 4 ++-- .../src/test/resources/sql-tests/inputs/pgSQL/text.sql | 2 +- .../resources/sql-tests/results/ansi/interval.sql.out | 8 ++++---- .../sql-tests/results/decimalArithmeticOperations.sql.out | 4 ++-- .../test/resources/sql-tests/results/pgSQL/text.sql.out | 8 ++++---- 5 files changed, 13 insertions(+), 13 deletions(-) diff --git a/sql/core/src/test/resources/sql-tests/inputs/decimalArithmeticOperations.sql b/sql/core/src/test/resources/sql-tests/inputs/decimalArithmeticOperations.sql index 0b90779b8ba5f..1db4e6fcbf566 100644 --- a/sql/core/src/test/resources/sql-tests/inputs/decimalArithmeticOperations.sql +++ b/sql/core/src/test/resources/sql-tests/inputs/decimalArithmeticOperations.sql @@ -84,7 +84,7 @@ select 123456789123456789.1234567890 * 1.123456789123456789; select 12345678912345.123456789123 / 0.000000012345678; -- throw an exception instead of returning NULL, according to SQL ANSI 2011 -set spark.sql.parser.ansi.enabled=true; +set spark.sql.ansi.enabled=true; -- test operations between decimals and constants select id, a*10, b/10 from decimals_test order by id; @@ -110,4 +110,4 @@ select 12345678912345.123456789123 / 0.000000012345678; drop table decimals_test; -- Turns off ANSI mode -set spark.sql.parser.ansi.enabled=false; +set spark.sql.ansi.enabled=false; diff --git a/sql/core/src/test/resources/sql-tests/inputs/pgSQL/text.sql b/sql/core/src/test/resources/sql-tests/inputs/pgSQL/text.sql index 7abf903bc6bee..77261a06a0638 100644 --- a/sql/core/src/test/resources/sql-tests/inputs/pgSQL/text.sql +++ b/sql/core/src/test/resources/sql-tests/inputs/pgSQL/text.sql @@ -134,4 +134,4 @@ select format_string('>>%1$10s<<', 'Hello'); -- select format_string('>>%2$*1$L<<', NULL, 'Hello'); -- select format_string('>>%2$*1$L<<', 0, 'Hello'); -DROP TABLE TEXT_TBL; +DROP TABLE TEXT_TBL; \ No newline at end of file diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out index 13f72614f5778..43ad3c3f539f1 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out @@ -3,11 +3,11 @@ -- !query 0 -SET spark.sql.parser.ansi.enabled=true +SET spark.sql.ansi.enabled=true -- !query 0 schema struct -- !query 0 output -spark.sql.parser.ansi.enabled true +spark.sql.ansi.enabled true -- !query 1 @@ -432,8 +432,8 @@ select date '2012-01-01' + (a + 1) day from t -- !query 34 -SET spark.sql.parser.ansi.enabled=false +SET spark.sql.ansi.enabled=false -- !query 34 schema struct -- !query 34 output -spark.sql.parser.ansi.enabled false +spark.sql.ansi.enabled false diff --git a/sql/core/src/test/resources/sql-tests/results/decimalArithmeticOperations.sql.out b/sql/core/src/test/resources/sql-tests/results/decimalArithmeticOperations.sql.out index 217233bfad378..00e139d90f488 100644 --- a/sql/core/src/test/resources/sql-tests/results/decimalArithmeticOperations.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/decimalArithmeticOperations.sql.out @@ -328,11 +328,11 @@ NULL -- !query 39 -set spark.sql.decimalOperations.nullOnOverflow=false +set spark.sql.ansi.enabled=true -- !query 39 schema struct -- !query 39 output -spark.sql.decimalOperations.nullOnOverflow false +spark.sql.ansi.enabled true -- !query 40 diff --git a/sql/core/src/test/resources/sql-tests/results/pgSQL/text.sql.out b/sql/core/src/test/resources/sql-tests/results/pgSQL/text.sql.out index 352b0232e8945..2e1d639974ec6 100644 --- a/sql/core/src/test/resources/sql-tests/results/pgSQL/text.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/pgSQL/text.sql.out @@ -151,11 +151,11 @@ edcba -- !query 18 -set spark.sql.parser.ansi.enabled=false +set spark.sql.ansi.enabled=false -- !query 18 schema struct -- !query 18 output -spark.sql.parser.ansi.enabled false +spark.sql.ansi.enabled false -- !query 19 @@ -177,11 +177,11 @@ struct -- !query 20 -set spark.sql.parser.ansi.enabled=true +set spark.sql.ansi.enabled=true -- !query 20 schema struct -- !query 20 output -spark.sql.parser.ansi.enabled true +spark.sql.ansi.enabled true -- !query 21 From 7c32fee85c93954fa671aebc333e4b30976c8469 Mon Sep 17 00:00:00 2001 From: Gengliang Wang Date: Wed, 18 Sep 2019 19:01:12 +0800 Subject: [PATCH 5/7] update --- .../main/scala/org/apache/spark/sql/internal/SQLConf.scala | 6 +++--- .../sql-tests/inputs/decimalArithmeticOperations.sql | 3 --- .../test/scala/org/apache/spark/sql/SQLQueryTestSuite.scala | 6 ++---- 3 files changed, 5 insertions(+), 10 deletions(-) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala index bbd731dbbf9c7..a11b862aba513 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala @@ -1708,10 +1708,10 @@ object SQLConf { .createOptional val ANSI_ENABLED = buildConf("spark.sql.ansi.enabled") - .doc("When true, tries to conform to the ANSI SQL specification. For example, Spark will " + + .doc("When true, Spark tries to conform to the ANSI SQL specification: 1. Spark will " + "throw an runtime exception if an overflow occurs in any operation on integral/decimal " + - "field. For another example, Spark will forbid using the reserved keywords of ANSI SQL " + - "as identifiers in the SQL parser.") + "field. 2. Spark will forbid using the reserved keywords of ANSI SQL as identifiers in " + + "the SQL parser.") .booleanConf .createWithDefault(false) diff --git a/sql/core/src/test/resources/sql-tests/inputs/decimalArithmeticOperations.sql b/sql/core/src/test/resources/sql-tests/inputs/decimalArithmeticOperations.sql index 1db4e6fcbf566..a946123f709c7 100644 --- a/sql/core/src/test/resources/sql-tests/inputs/decimalArithmeticOperations.sql +++ b/sql/core/src/test/resources/sql-tests/inputs/decimalArithmeticOperations.sql @@ -108,6 +108,3 @@ select 123456789123456789.1234567890 * 1.123456789123456789; select 12345678912345.123456789123 / 0.000000012345678; drop table decimals_test; - --- Turns off ANSI mode -set spark.sql.ansi.enabled=false; diff --git a/sql/core/src/test/scala/org/apache/spark/sql/SQLQueryTestSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/SQLQueryTestSuite.scala index 10e2177d3ede0..abecaa7a9b16a 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/SQLQueryTestSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/SQLQueryTestSuite.scala @@ -310,11 +310,9 @@ class SQLQueryTestSuite extends QueryTest with SharedSparkSession { localSparkSession.conf.set(SQLConf.CROSS_JOINS_ENABLED.key, true) localSparkSession.conf.set(SQLConf.ANSI_ENABLED.key, true) localSparkSession.conf.set(SQLConf.PREFER_INTEGRAL_DIVISION.key, true) - localSparkSession.conf.set(SQLConf.FAIL_ON_INTEGRAL_TYPE_OVERFLOW.key, true) - // Propagate the SQL conf FAIL_ON_INTEGRAL_TYPE_OVERFLOW to executor. + // Propagate the SQL conf ANSI_ENABLED to executor. // TODO: remove this after SPARK-29122 is resolved. - localSparkSession.sparkContext.setLocalProperty( - SQLConf.FAIL_ON_INTEGRAL_TYPE_OVERFLOW.key, "true") + localSparkSession.sparkContext.setLocalProperty(SQLConf.ANSI_ENABLED.key, "true") case _ => } From 342ce1cb432a41e22224aa36d1a2a5695526ebe4 Mon Sep 17 00:00:00 2001 From: Gengliang Wang Date: Wed, 18 Sep 2019 19:36:46 +0800 Subject: [PATCH 6/7] revise --- sql/core/src/test/resources/sql-tests/inputs/pgSQL/text.sql | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sql/core/src/test/resources/sql-tests/inputs/pgSQL/text.sql b/sql/core/src/test/resources/sql-tests/inputs/pgSQL/text.sql index 77261a06a0638..7abf903bc6bee 100644 --- a/sql/core/src/test/resources/sql-tests/inputs/pgSQL/text.sql +++ b/sql/core/src/test/resources/sql-tests/inputs/pgSQL/text.sql @@ -134,4 +134,4 @@ select format_string('>>%1$10s<<', 'Hello'); -- select format_string('>>%2$*1$L<<', NULL, 'Hello'); -- select format_string('>>%2$*1$L<<', 0, 'Hello'); -DROP TABLE TEXT_TBL; \ No newline at end of file +DROP TABLE TEXT_TBL; From 3a0745595d2aa1964bc23bc1415b90a4c7d52d26 Mon Sep 17 00:00:00 2001 From: Xiao Li Date: Wed, 18 Sep 2019 22:29:08 -0700 Subject: [PATCH 7/7] a typo --- .../src/main/scala/org/apache/spark/sql/internal/SQLConf.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala index a11b862aba513..0ec661fc16c88 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala @@ -1709,7 +1709,7 @@ object SQLConf { val ANSI_ENABLED = buildConf("spark.sql.ansi.enabled") .doc("When true, Spark tries to conform to the ANSI SQL specification: 1. Spark will " + - "throw an runtime exception if an overflow occurs in any operation on integral/decimal " + + "throw a runtime exception if an overflow occurs in any operation on integral/decimal " + "field. 2. Spark will forbid using the reserved keywords of ANSI SQL as identifiers in " + "the SQL parser.") .booleanConf