diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/PostgreSQLDialect.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/PostgreSQLDialect.scala index e7f0e571804d..c87fce0f3efb 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/PostgreSQLDialect.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/PostgreSQLDialect.scala @@ -19,15 +19,15 @@ package org.apache.spark.sql.catalyst.analysis import org.apache.spark.internal.Logging import org.apache.spark.sql.catalyst.expressions.Cast -import org.apache.spark.sql.catalyst.expressions.postgreSQL.PostgreCastToBoolean +import org.apache.spark.sql.catalyst.expressions.postgreSQL.{PostgreCastToBoolean, PostgreCastToFloat} import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan import org.apache.spark.sql.catalyst.rules.Rule import org.apache.spark.sql.internal.SQLConf -import org.apache.spark.sql.types.{BooleanType, StringType} +import org.apache.spark.sql.types.{BooleanType, FloatType, StringType} object PostgreSQLDialect { val postgreSQLDialectRules: List[Rule[LogicalPlan]] = - CastToBoolean :: + CastToBoolean :: CastToFloat :: Nil object CastToBoolean extends Rule[LogicalPlan] with Logging { @@ -46,4 +46,21 @@ object PostgreSQLDialect { } } } + + object CastToFloat extends Rule[LogicalPlan] with Logging { + override def apply(plan: LogicalPlan): LogicalPlan = { + // The SQL configuration `spark.sql.dialect` can be changed in runtime. + // To make sure the configuration is effective, we have to check it during rule execution. + val conf = SQLConf.get + if (conf.usePostgreSQLDialect) { + plan.transformExpressions { + case Cast(child, dataType, timeZoneId) + if child.dataType != FloatType && dataType == FloatType => + PostgreCastToFloat(child, timeZoneId) + } + } else { + plan + } + } + } } diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala index f3b58fa3137b..ced9bfb3e1c2 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala @@ -666,7 +666,7 @@ abstract class CastBase extends UnaryExpression with TimeZoneAwareExpression wit } // FloatConverter - private[this] def castToFloat(from: DataType): Any => Any = from match { + protected[this] def castToFloat(from: DataType): Any => Any = from match { case StringType => buildCast[UTF8String](_, s => { val floatStr = s.toString @@ -1451,7 +1451,7 @@ abstract class CastBase extends UnaryExpression with TimeZoneAwareExpression wit (c, evPrim, evNull) => code"$evPrim = (long) $c;" } - private[this] def castToFloatCode(from: DataType, ctx: CodegenContext): CastFunction = { + protected[this] def castToFloatCode(from: DataType, ctx: CodegenContext): CastFunction = { from match { case StringType => val floatStr = ctx.freshVariable("floatStr", StringType) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/postgreSQL/PostgreCastToFloat.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/postgreSQL/PostgreCastToFloat.scala new file mode 100644 index 000000000000..fd958bddb554 --- /dev/null +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/postgreSQL/PostgreCastToFloat.scala @@ -0,0 +1,101 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.spark.sql.catalyst.expressions.postgreSQL + +import java.util.Locale + +import org.apache.spark.sql.AnalysisException +import org.apache.spark.sql.catalyst.analysis.TypeCheckResult +import org.apache.spark.sql.catalyst.expressions.{Cast, CastBase, Expression, TimeZoneAwareExpression} +import org.apache.spark.sql.catalyst.expressions.codegen.Block._ +import org.apache.spark.sql.catalyst.expressions.codegen.CodegenContext +import org.apache.spark.sql.types._ +import org.apache.spark.unsafe.types.UTF8String + +case class PostgreCastToFloat(child: Expression, timeZoneId: Option[String]) + extends CastBase { + + override def dataType: DataType = FloatType + + override def toString: String = s"PostgreCastToFloat($child as ${dataType.simpleString})" + + override def nullable: Boolean = child.nullable + + override protected def ansiEnabled = + throw new UnsupportedOperationException("PostgreSQL dialect doesn't support ansi mode") + + override def withTimeZone(timeZoneId: String): TimeZoneAwareExpression = + copy(timeZoneId = Option(timeZoneId)) + + override def checkInputDataTypes(): TypeCheckResult = child.dataType match { + case StringType | LongType | IntegerType | NullType | + ShortType | DoubleType | ByteType => + TypeCheckResult.TypeCheckSuccess + case _: DecimalType => TypeCheckResult.TypeCheckSuccess + case _ => + TypeCheckResult.TypeCheckFailure(s"cannot cast type ${child.dataType} to float") + } + + def processFloatingPointSpecialLiterals(v: String, isFloat: Boolean): Any = { + v.trim.toLowerCase(Locale.ROOT) match { + case "inf" | "+inf" | "infinity" | "+infinity" => + if (isFloat) Float.PositiveInfinity else Double.PositiveInfinity + case "-inf" | "-infinity" => + if (isFloat) Float.NegativeInfinity else Double.NegativeInfinity + case "nan" => + if (isFloat) Float.NaN else Double.NaN + case _ => throw new AnalysisException(s"invalid input syntax for type double precision: $v") + } + } + + override def castToFloat(from: DataType): Any => Any = from match { + case StringType => + buildCast[UTF8String](_, s => { + val floatStr = s.toString + try floatStr.toFloat catch { + case _: NumberFormatException => + processFloatingPointSpecialLiterals(floatStr, true) + } + }) + case x: NumericType => + super.castToFloat(from) + } + + override def castToFloatCode(from: DataType, ctx: CodegenContext): CastFunction = from match { + case StringType => + val floatStr = ctx.freshVariable("floatStr", StringType) + (c, evPrim, evNull) => + code""" + final String $floatStr = $c.toString(); + try { + $evPrim = Float.valueOf($floatStr); + } catch (java.lang.NumberFormatException e) { + final Float f = (Float) processFloatingPointSpecialLiterals($floatStr, true); + if (f == null) { + throw new AnalysisException("invalid input syntax for type double precision: $c") + } else { + $evPrim = f.floatValue(); + } + } + """ + case DecimalType() => + super.castToFloatCode(from, ctx) + case x: NumericType => + super.castToFloatCode(from, ctx) + } + override def sql: String = s"CAST(${child.sql} AS ${dataType.sql})" +}