-
Notifications
You must be signed in to change notification settings - Fork 29k
[SPARK-23939][SQL] Add transform_keys function #22013
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from 13 commits
0a19cc4
12e0259
5806ac4
150a6a5
9f6a8ab
6526630
f7fd231
1cbaf0c
bb52630
621213d
5db526b
e5d9b05
fb885f4
58b60b2
2f4943f
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -497,6 +497,62 @@ case class ArrayAggregate( | |
| override def prettyName: String = "aggregate" | ||
| } | ||
|
|
||
| /** | ||
| * Transform Keys for every entry of the map by applying the transform_keys function. | ||
| * Returns map with transformed key entries | ||
| */ | ||
| @ExpressionDescription( | ||
| usage = "_FUNC_(expr, func) - Transforms elements in a map using the function.", | ||
| examples = """ | ||
| Examples: | ||
| > SELECT _FUNC_(map(array(1, 2, 3), array(1, 2, 3)), (k, v) -> k + 1); | ||
| map(array(2, 3, 4), array(1, 2, 3)) | ||
| > SELECT _FUNC_(map(array(1, 2, 3), array(1, 2, 3)), (k, v) -> k + v); | ||
| map(array(2, 4, 6), array(1, 2, 3)) | ||
| """, | ||
| since = "2.4.0") | ||
| case class TransformKeys( | ||
| argument: Expression, | ||
| function: Expression) | ||
| extends MapBasedSimpleHigherOrderFunction with CodegenFallback { | ||
|
|
||
| override def nullable: Boolean = argument.nullable | ||
|
|
||
| @transient lazy val MapType(keyType, valueType, valueContainsNull) = argument.dataType | ||
|
|
||
| override def dataType: DataType = { | ||
| MapType(function.dataType, valueType, valueContainsNull) | ||
|
||
| } | ||
|
|
||
| override def bind(f: (Expression, Seq[(DataType, Boolean)]) => LambdaFunction): TransformKeys = { | ||
| copy(function = f(function, (keyType, false) :: (valueType, valueContainsNull) :: Nil)) | ||
| } | ||
|
|
||
| @transient lazy val LambdaFunction( | ||
| _, (keyVar: NamedLambdaVariable) :: (valueVar: NamedLambdaVariable) :: Nil, _) = function | ||
|
|
||
|
|
||
| override def nullSafeEval(inputRow: InternalRow, argumentValue: Any): Any = { | ||
| val map = argumentValue.asInstanceOf[MapData] | ||
| val f = functionForEval | ||
|
||
| val resultKeys = new GenericArrayData(new Array[Any](map.numElements)) | ||
| var i = 0 | ||
| while (i < map.numElements) { | ||
| keyVar.value.set(map.keyArray().get(i, keyVar.dataType)) | ||
| valueVar.value.set(map.valueArray().get(i, valueVar.dataType)) | ||
| val result = f.eval(inputRow) | ||
| if (result == null) { | ||
| throw new RuntimeException("Cannot use null as map key!") | ||
| } | ||
| resultKeys.update(i, result) | ||
| i += 1 | ||
| } | ||
| new ArrayBasedMapData(resultKeys, map.valueArray()) | ||
| } | ||
|
|
||
| override def prettyName: String = "transform_keys" | ||
| } | ||
|
|
||
| /** | ||
| * Merges two given maps into a single map by applying function to the pair of values with | ||
| * the same key. | ||
|
|
||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -2302,6 +2302,100 @@ class DataFrameFunctionsSuite extends QueryTest with SharedSQLContext { | |
| assert(ex5.getMessage.contains("function map_zip_with does not support ordering on type map")) | ||
| } | ||
|
|
||
| test("transform keys function - primitive data types") { | ||
| val dfExample1 = Seq( | ||
| Map[Int, Int](1 -> 1, 9 -> 9, 8 -> 8, 7 -> 7) | ||
| ).toDF("i") | ||
|
|
||
| val dfExample2 = Seq( | ||
| Map[Int, Double](1 -> 1.0, 2 -> 1.40, 3 -> 1.70) | ||
| ).toDF("j") | ||
|
|
||
| val dfExample3 = Seq( | ||
| Map[Int, Boolean](25 -> true, 26 -> false) | ||
| ).toDF("x") | ||
|
|
||
| val dfExample4 = Seq( | ||
| Map[Array[Int], Boolean](Array(1, 2) -> false) | ||
| ).toDF("y") | ||
|
|
||
|
|
||
| def testMapOfPrimitiveTypesCombination(): Unit = { | ||
| checkAnswer(dfExample1.selectExpr("transform_keys(i, (k, v) -> k + v)"), | ||
| Seq(Row(Map(2 -> 1, 18 -> 9, 16 -> 8, 14 -> 7)))) | ||
|
|
||
| checkAnswer(dfExample2.selectExpr("transform_keys(j, " + | ||
| "(k, v) -> map_from_arrays(ARRAY(1, 2, 3), ARRAY('one', 'two', 'three'))[k])"), | ||
| Seq(Row(Map("one" -> 1.0, "two" -> 1.4, "three" -> 1.7)))) | ||
|
|
||
| checkAnswer(dfExample2.selectExpr("transform_keys(j, (k, v) -> CAST(v * 2 AS BIGINT) + k)"), | ||
| Seq(Row(Map(3 -> 1.0, 4 -> 1.4, 6 -> 1.7)))) | ||
|
|
||
| checkAnswer(dfExample2.selectExpr("transform_keys(j, (k, v) -> k + v)"), | ||
| Seq(Row(Map(2.0 -> 1.0, 3.4 -> 1.4, 4.7 -> 1.7)))) | ||
|
|
||
| checkAnswer(dfExample3.selectExpr("transform_keys(x, (k, v) -> k % 2 = 0 OR v)"), | ||
| Seq(Row(Map(true -> true, true -> false)))) | ||
|
|
||
| checkAnswer(dfExample3.selectExpr("transform_keys(x, (k, v) -> if(v, 2 * k, 3 * k))"), | ||
| Seq(Row(Map(50 -> true, 78 -> false)))) | ||
|
|
||
| checkAnswer(dfExample3.selectExpr("transform_keys(x, (k, v) -> if(v, 2 * k, 3 * k))"), | ||
| Seq(Row(Map(50 -> true, 78 -> false)))) | ||
|
|
||
| checkAnswer(dfExample4.selectExpr("transform_keys(y, (k, v) -> array_contains(k, 3) AND v)"), | ||
| Seq(Row(Map(false -> false)))) | ||
| } | ||
| // Test with local relation, the Project will be evaluated without codegen | ||
| testMapOfPrimitiveTypesCombination() | ||
| dfExample1.cache() | ||
| dfExample2.cache() | ||
| dfExample3.cache() | ||
| dfExample4.cache() | ||
| // Test with cached relation, the Project will be evaluated with codegen | ||
| testMapOfPrimitiveTypesCombination() | ||
|
Contributor
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Do we have do that if the expression implements |
||
| } | ||
|
|
||
| test("transform keys function - Invalid lambda functions and exceptions") { | ||
|
|
||
| val dfExample1 = Seq( | ||
| Map[String, String]("a" -> null) | ||
| ).toDF("i") | ||
|
|
||
| val dfExample2 = Seq( | ||
| Seq(1, 2, 3, 4) | ||
| ).toDF("j") | ||
|
|
||
| def testInvalidLambdaFunctions(): Unit = { | ||
| val ex1 = intercept[AnalysisException] { | ||
| dfExample1.selectExpr("transform_keys(i, k -> k)") | ||
| } | ||
| assert(ex1.getMessage.contains("The number of lambda function arguments '1' does not match")) | ||
|
|
||
| val ex2 = intercept[AnalysisException] { | ||
| dfExample1.selectExpr("transform_keys(i, (k, v, x) -> k + 1)") | ||
| } | ||
| assert(ex2.getMessage.contains( | ||
| "The number of lambda function arguments '3' does not match")) | ||
|
|
||
| val ex3 = intercept[RuntimeException] { | ||
| dfExample1.selectExpr("transform_keys(i, (k, v) -> v)").show() | ||
| } | ||
| assert(ex3.getMessage.contains("Cannot use null as map key!")) | ||
|
||
|
|
||
| val ex4 = intercept[AnalysisException] { | ||
| dfExample2.selectExpr("transform_keys(j, (k, v) -> k + 1)") | ||
| } | ||
| assert(ex4.getMessage.contains( | ||
| "data type mismatch: argument 1 requires map type")) | ||
| } | ||
|
|
||
| testInvalidLambdaFunctions() | ||
| dfExample1.cache() | ||
| dfExample2.cache() | ||
| testInvalidLambdaFunctions() | ||
|
||
| } | ||
|
|
||
| private def assertValuesDoNotChangeAfterCoalesceOrUnion(v: Column): Unit = { | ||
| import DataFrameFunctionsSuite.CodegenFallbackExpr | ||
| for ((codegenFallback, wholeStage) <- Seq((true, false), (false, false), (false, true))) { | ||
|
|
||
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
I think this can be moved to
SimpleHigherOrderFunctionThere was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
makes sense.
Let's have wrap-up prs for higher-order functions after the remaining 2 prs are merged.