Skip to content
Closed
Show file tree
Hide file tree
Changes from 3 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -262,6 +262,7 @@ object FunctionRegistry {
expression[Tan]("tan"),
expression[Cot]("cot"),
expression[Tanh]("tanh"),
expression[Truncate]("truncate"),

expression[Add]("+"),
expression[Subtract]("-"),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ import org.apache.spark.sql.catalyst.analysis.TypeCheckResult
import org.apache.spark.sql.catalyst.analysis.TypeCheckResult.{TypeCheckFailure, TypeCheckSuccess}
import org.apache.spark.sql.catalyst.expressions.codegen._
import org.apache.spark.sql.catalyst.expressions.codegen.Block._
import org.apache.spark.sql.catalyst.util.NumberConverter
import org.apache.spark.sql.catalyst.util.{MathUtils, NumberConverter}
import org.apache.spark.sql.types._
import org.apache.spark.unsafe.types.UTF8String

Expand Down Expand Up @@ -1245,3 +1245,80 @@ case class BRound(child: Expression, scale: Expression)
with Serializable with ImplicitCastInputTypes {
def this(child: Expression) = this(child, Literal(0))
}

/**
* The number truncated to scale decimal places.
*/
// scalastyle:off line.size.limit
@ExpressionDescription(
usage = "_FUNC_(number, scale) - Returns number truncated to scale decimal places. " +
"If scale is omitted, then number is truncated to 0 places. " +
"scale can be negative to truncate (make zero) scale digits left of the decimal point.",
examples = """
Examples:
> SELECT _FUNC_(1234567891.1234567891, 4);
1234567891.1234
> SELECT _FUNC_(1234567891.1234567891, -4);
1234560000
> SELECT _FUNC_(1234567891.1234567891);
1234567891
""")
// scalastyle:on line.size.limit
case class Truncate(number: Expression, scale: Expression)
extends BinaryExpression with ImplicitCastInputTypes {

def this(number: Expression) = this(number, Literal(0))

override def left: Expression = number
override def right: Expression = scale

override def inputTypes: Seq[AbstractDataType] =
Seq(TypeCollection(DoubleType, FloatType, DecimalType), IntegerType)

override def checkInputDataTypes(): TypeCheckResult = {
super.checkInputDataTypes() match {
case TypeCheckSuccess =>
if (scale.foldable) {
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Same to RoundBase. only support foldable:

TypeCheckSuccess
} else {
TypeCheckFailure("Only foldable Expression is allowed for scale arguments")
}
case f => f
}
}

override def dataType: DataType = left.dataType
override def nullable: Boolean = true
override def prettyName: String = "truncate"

private lazy val scaleV: Any = scale.eval(EmptyRow)
private lazy val _scale: Int = scaleV.asInstanceOf[Int]

protected override def nullSafeEval(input1: Any, input2: Any): Any = {
number.dataType match {
case DoubleType => MathUtils.trunc(input1.asInstanceOf[Double], _scale)
case FloatType => MathUtils.trunc(input1.asInstanceOf[Float], _scale)
case DecimalType.Fixed(_, _) => MathUtils.trunc(input1.asInstanceOf[Decimal], _scale)
}
}

override protected def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
val mu = MathUtils.getClass.getName.stripSuffix("$")

val javaType = CodeGenerator.javaType(dataType)
if (scaleV == null) { // if scale is null, no need to eval its child at all
ev.copy(code = code"""
boolean ${ev.isNull} = true;
$javaType ${ev.value} = ${CodeGenerator.defaultValue(dataType)};""")
} else {
val d = number.genCode(ctx)
ev.copy(code = code"""
${d.code}
boolean ${ev.isNull} = ${d.isNull};
$javaType ${ev.value} = ${CodeGenerator.defaultValue(dataType)};
if (!${ev.isNull}) {
${ev.value} = $mu.trunc(${d.value}, ${_scale});
}""")
}
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,70 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.util

import java.math.{BigDecimal => JBigDecimal}

import org.apache.spark.sql.types.Decimal

object MathUtils {

/**
* Returns double type input truncated to scale decimal places.
*/
def trunc(input: Double, scale: Int): Double = {
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Why do you put this function in a separate file? Any plan to reuse this?

trunc(JBigDecimal.valueOf(input), scale).toDouble
}

/**
* Returns float type input truncated to scale decimal places.
*/
def trunc(input: Float, scale: Int): Float = {
trunc(JBigDecimal.valueOf(input), scale).toFloat
}

/**
* Returns decimal type input truncated to scale decimal places.
*/
def trunc(input: Decimal, scale: Int): Decimal = {
trunc(input.toJavaBigDecimal, scale)
}

/**
* Returns BigDecimal type input truncated to scale decimal places.
*/
def trunc(input: JBigDecimal, scale: Int): Decimal = {
// Copy from (https://github.com/apache/hive/blob/release-2.3.0-rc0
// /ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTrunc.java#L471-L487)
val pow = if (scale >= 0) {
JBigDecimal.valueOf(Math.pow(10, scale))
} else {
JBigDecimal.valueOf(Math.pow(10, Math.abs(scale)))
}

val truncatedValue = if (scale > 0) {
val longValue = input.multiply(pow).longValue()
JBigDecimal.valueOf(longValue).divide(pow)
} else if (scale == 0) {
JBigDecimal.valueOf(input.longValue())
} else {
val longValue = input.divide(pow).longValue()
JBigDecimal.valueOf(longValue).multiply(pow)
}

Decimal(truncatedValue)
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -644,4 +644,56 @@ class MathExpressionsSuite extends SparkFunSuite with ExpressionEvalHelper {
checkEvaluation(BRound(-0.35, 1), -0.4)
checkEvaluation(BRound(-35, -1), -40)
}

test("Truncate number") {
assert(Truncate(Literal.create(123.123, DoubleType),
NonFoldableLiteral.create(1, IntegerType)).checkInputDataTypes().isFailure)
assert(Truncate(Literal.create(123.123, DoubleType),
Literal.create(1, IntegerType)).checkInputDataTypes().isSuccess)

def testDouble(input: Any, scale: Any, expected: Any): Unit = {
checkEvaluation(Truncate(Literal.create(input, DoubleType),
Literal.create(scale, IntegerType)),
expected)
}

def testFloat(input: Any, scale: Any, expected: Any): Unit = {
checkEvaluation(Truncate(Literal.create(input, FloatType),
Literal.create(scale, IntegerType)),
expected)
}

def testDecimal(input: Any, scale: Any, expected: Any): Unit = {
checkEvaluation(Truncate(Literal.create(input, DecimalType.DoubleDecimal),
Literal.create(scale, IntegerType)),
expected)
}

testDouble(1234567891.1234567891D, 4, 1234567891.1234D)
testDouble(1234567891.1234567891D, -4, 1234560000D)
testDouble(1234567891.1234567891D, 0, 1234567891D)
testDouble(0.123D, -1, 0D)
testDouble(0.123D, 0, 0D)
testDouble(null, null, null)
testDouble(null, 0, null)
testDouble(1D, null, null)

testFloat(1234567891.1234567891F, 4, 1234567891.1234F)
testFloat(1234567891.1234567891F, -4, 1234560000F)
testFloat(1234567891.1234567891F, 0, 1234567891F)
testFloat(0.123F, -1, 0F)
testFloat(0.123F, 0, 0F)
testFloat(null, null, null)
testFloat(null, 0, null)
testFloat(1D, null, null)

testDecimal(Decimal(1234567891.1234567891), 4, Decimal(1234567891.1234))
testDecimal(Decimal(1234567891.1234567891), -4, Decimal(1234560000))
testDecimal(Decimal(1234567891.1234567891), 0, Decimal(1234567891))
testDecimal(Decimal(0.123), -1, Decimal(0))
testDecimal(Decimal(0.123), 0, Decimal(0))
testDecimal(null, null, null)
testDecimal(null, 0, null)
testDecimal(1D, null, null)
}
}
20 changes: 20 additions & 0 deletions sql/core/src/main/scala/org/apache/spark/sql/functions.scala
Original file line number Diff line number Diff line change
Expand Up @@ -2214,6 +2214,26 @@ object functions {
*/
def radians(columnName: String): Column = radians(Column(columnName))

/**
* Returns the value of the column `e` truncated to 0 places.
*
* @group math_funcs
* @since 2.4.0
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@since 2.5.0 now.

*/
def truncate(e: Column): Column = truncate(e, 0)

/**
* Returns the value of column `e` truncated to the unit specified by the scale.
* If scale is omitted, then the value of column `e` is truncated to 0 places.
* Scale can be negative to truncate (make zero) scale digits left of the decimal point.
*
* @group math_funcs
* @since 2.4.0
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

ditto

*/
def truncate(e: Column, scale: Int): Column = withExpr {
Truncate(e.expr, Literal(scale))
}
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

We need def truncate(number: Column) to support omitting scale?


//////////////////////////////////////////////////////////////////////////////////////////////
// Misc functions
//////////////////////////////////////////////////////////////////////////////////////////////
Expand Down
7 changes: 7 additions & 0 deletions sql/core/src/test/resources/sql-tests/inputs/operators.sql
Original file line number Diff line number Diff line change
Expand Up @@ -96,3 +96,10 @@ select positive('-1.11'), positive(-1.11), negative('-1.11'), negative(-1.11);
-- pmod
select pmod(-7, 2), pmod(0, 2), pmod(7, 0), pmod(7, null), pmod(null, 2), pmod(null, null);
select pmod(cast(3.13 as decimal), cast(0 as decimal)), pmod(cast(2 as smallint), cast(0 as smallint));

-- truncate
select truncate(1234567891.1234567891, -4), truncate(1234567891.1234567891, 0), truncate(1234567891.1234567891, 4);
select truncate(cast(1234567891.1234567891 as decimal), -4), truncate(cast(1234567891.1234567891 as decimal), 0), truncate(cast(1234567891.1234567891 as decimal), 4);
select truncate(cast(1234567891.1234567891 as long), -4), truncate(cast(1234567891.1234567891 as long), 0), truncate(cast(1234567891.1234567891 as long), 4);
select truncate(cast(1234567891.1234567891 as long), 9.03);
select truncate(cast(1234567891.1234567891 as double)), truncate(cast(1234567891.1234567891 as float)), truncate(cast(1234567891.1234567891 as decimal));
42 changes: 41 additions & 1 deletion sql/core/src/test/resources/sql-tests/results/operators.sql.out
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
-- Automatically generated by SQLQueryTestSuite
-- Number of queries: 59
-- Number of queries: 64


-- !query 0
Expand Down Expand Up @@ -484,3 +484,43 @@ select pmod(cast(3.13 as decimal), cast(0 as decimal)), pmod(cast(2 as smallint)
struct<pmod(CAST(3.13 AS DECIMAL(10,0)), CAST(0 AS DECIMAL(10,0))):decimal(10,0),pmod(CAST(2 AS SMALLINT), CAST(0 AS SMALLINT)):smallint>
-- !query 58 output
NULL NULL


-- !query 59
select truncate(1234567891.1234567891, -4), truncate(1234567891.1234567891, 0), truncate(1234567891.1234567891, 4)
-- !query 59 schema
struct<truncate(1234567891.1234567891, -4):decimal(20,10),truncate(1234567891.1234567891, 0):decimal(20,10),truncate(1234567891.1234567891, 4):decimal(20,10)>
-- !query 59 output
1234560000 1234567891 1234567891.1234


-- !query 60
select truncate(cast(1234567891.1234567891 as decimal), -4), truncate(cast(1234567891.1234567891 as decimal), 0), truncate(cast(1234567891.1234567891 as decimal), 4)
-- !query 60 schema
struct<truncate(CAST(1234567891.1234567891 AS DECIMAL(10,0)), -4):decimal(10,0),truncate(CAST(1234567891.1234567891 AS DECIMAL(10,0)), 0):decimal(10,0),truncate(CAST(1234567891.1234567891 AS DECIMAL(10,0)), 4):decimal(10,0)>
-- !query 60 output
1234560000 1234567891 1234567891


-- !query 61
select truncate(cast(1234567891.1234567891 as long), -4), truncate(cast(1234567891.1234567891 as long), 0), truncate(cast(1234567891.1234567891 as long), 4)
-- !query 61 schema
struct<truncate(CAST(CAST(1234567891.1234567891 AS BIGINT) AS DOUBLE), -4):double,truncate(CAST(CAST(1234567891.1234567891 AS BIGINT) AS DOUBLE), 0):double,truncate(CAST(CAST(1234567891.1234567891 AS BIGINT) AS DOUBLE), 4):double>
-- !query 61 output
1.23456E9 1.234567891E9 1.234567891E9


-- !query 62
select truncate(cast(1234567891.1234567891 as long), 9.03)
-- !query 62 schema
struct<truncate(CAST(CAST(1234567891.1234567891 AS BIGINT) AS DOUBLE), CAST(9.03 AS INT)):double>
-- !query 62 output
1.234567891E9


-- !query 63
select truncate(cast(1234567891.1234567891 as double)), truncate(cast(1234567891.1234567891 as float)), truncate(cast(1234567891.1234567891 as decimal))
-- !query 63 schema
struct<truncate(CAST(1234567891.1234567891 AS DOUBLE), 0):double,truncate(CAST(1234567891.1234567891 AS FLOAT), 0):float,truncate(CAST(1234567891.1234567891 AS DECIMAL(10,0)), 0):decimal(10,0)>
-- !query 63 output
1.234567891E9 1.23456794E9 1234567891