Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions core/src/main/resources/error/error-classes.json
Original file line number Diff line number Diff line change
Expand Up @@ -484,6 +484,11 @@
"Failed to execute user defined function (<functionName>: (<signature>) => <result>)"
]
},
"FAILED_FUNCTION_CALL" : {
"message" : [
"Failed preparing of the function <funcName> for call. Please, double check function's arguments."
]
},
"FAILED_RENAME_PATH" : {
"message" : [
"Failed to rename <sourcePath> to <targetPath> as destination already exists"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,6 @@ import javax.annotation.concurrent.GuardedBy
import scala.collection.mutable
import scala.reflect.ClassTag

import org.apache.spark.SparkThrowable
import org.apache.spark.internal.Logging
import org.apache.spark.sql.AnalysisException
import org.apache.spark.sql.catalyst.FunctionIdentifier
Expand Down Expand Up @@ -132,11 +131,7 @@ object FunctionRegistryBase {
} catch {
// the exception is an invocation exception. To get a meaningful message, we need the
// cause.
case e: Exception =>
throw e.getCause match {
case ae: SparkThrowable => ae
case _ => new AnalysisException(e.getCause.getMessage)
}
case e: Exception => throw QueryCompilationErrors.funcBuildError(name, e)
}
} else {
// Otherwise, find a constructor method that matches the number of arguments, and use that.
Expand All @@ -153,7 +148,7 @@ object FunctionRegistryBase {
} catch {
// the exception is an invocation exception. To get a meaningful message, we need the
// cause.
case e: Exception => throw new AnalysisException(e.getCause.getMessage)
case e: Exception => throw QueryCompilationErrors.funcBuildError(name, e)
}
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ import scala.collection.mutable

import org.apache.hadoop.fs.Path

import org.apache.spark.SparkThrowableHelper
import org.apache.spark.{SparkThrowable, SparkThrowableHelper}
import org.apache.spark.sql.AnalysisException
import org.apache.spark.sql.catalyst.{FunctionIdentifier, QualifiedTableName, TableIdentifier}
import org.apache.spark.sql.catalyst.analysis.{CannotReplaceMissingTableException, FunctionAlreadyExistsException, NamespaceAlreadyExistsException, NoSuchFunctionException, NoSuchNamespaceException, NoSuchPartitionException, NoSuchTableException, ResolvedTable, Star, TableAlreadyExistsException, UnresolvedRegex}
Expand Down Expand Up @@ -3393,4 +3393,15 @@ private[sql] object QueryCompilationErrors extends QueryErrorsBase {
"unsupported" -> unsupported.toString,
"class" -> unsupported.getClass.toString))
}

def funcBuildError(funcName: String, cause: Exception): Throwable = {
cause.getCause match {
case st: SparkThrowable with Throwable => st
Copy link
Member Author

@MaxGekk MaxGekk Nov 21, 2022

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

If some Spark's exception happens during preparation of a function call, we just propagate it to users AS IS otherwise (something we didn't catch) we wrap it by AnalysisException(errorClass = FAILED_FUNCTION_CALL)

case other =>
new AnalysisException(
errorClass = "FAILED_FUNCTION_CALL",
messageParameters = Map("funcName" -> toSQLId(funcName)),
cause = Option(other))
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -1609,7 +1609,21 @@ select to_binary('abc', 1)
struct<>
-- !query output
org.apache.spark.sql.AnalysisException
The 'format' parameter of function 'to_binary' needs to be a string literal.; line 1 pos 7
{
"errorClass" : "_LEGACY_ERROR_TEMP_1100",
"messageParameters" : {
"argName" : "format",
"funcName" : "to_binary",
"requiredType" : "string"
},
"queryContext" : [ {
"objectType" : "",
"objectName" : "",
"startIndex" : 8,
"stopIndex" : 26,
"fragment" : "to_binary('abc', 1)"
} ]
}


-- !query
Expand Down
93 changes: 74 additions & 19 deletions sql/core/src/test/resources/sql-tests/results/csv-functions.sql.out
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,19 @@ select from_csv('1', 1)
struct<>
-- !query output
org.apache.spark.sql.AnalysisException
The expression '1' is not a valid schema string.; line 1 pos 7
{
"errorClass" : "_LEGACY_ERROR_TEMP_1092",
"messageParameters" : {
"expr" : "1"
},
"queryContext" : [ {
"objectType" : "",
"objectName" : "",
"startIndex" : 8,
"stopIndex" : 23,
"fragment" : "from_csv('1', 1)"
} ]
}


-- !query
Expand All @@ -30,20 +42,21 @@ select from_csv('1', 'a InvalidType')
struct<>
-- !query output
org.apache.spark.sql.AnalysisException
Cannot parse the data type:
[PARSE_SYNTAX_ERROR] Syntax error at or near 'InvalidType': extra input 'InvalidType'(line 1, pos 2)

== SQL ==
a InvalidType
--^^^

Failed fallback parsing:
DataType invalidtype is not supported.(line 1, pos 2)

== SQL ==
a InvalidType
--^^^
; line 1 pos 7
{
"errorClass" : "_LEGACY_ERROR_TEMP_1227",
"messageParameters" : {
"e1" : "\n[PARSE_SYNTAX_ERROR] Syntax error at or near 'InvalidType': extra input 'InvalidType'(line 1, pos 2)\n\n== SQL ==\na InvalidType\n--^^^\n",
"e2" : "\nDataType invalidtype is not supported.(line 1, pos 2)\n\n== SQL ==\na InvalidType\n--^^^\n",
"msg" : "Cannot parse the data type: "
},
"queryContext" : [ {
"objectType" : "",
"objectName" : "",
"startIndex" : 8,
"stopIndex" : 37,
"fragment" : "from_csv('1', 'a InvalidType')"
} ]
}


-- !query
Expand All @@ -52,7 +65,16 @@ select from_csv('1', 'a INT', named_struct('mode', 'PERMISSIVE'))
struct<>
-- !query output
org.apache.spark.sql.AnalysisException
Must use a map() function for options.; line 1 pos 7
{
"errorClass" : "_LEGACY_ERROR_TEMP_1096",
"queryContext" : [ {
"objectType" : "",
"objectName" : "",
"startIndex" : 8,
"stopIndex" : 65,
"fragment" : "from_csv('1', 'a INT', named_struct('mode', 'PERMISSIVE'))"
} ]
}


-- !query
Expand All @@ -61,7 +83,19 @@ select from_csv('1', 'a INT', map('mode', 1))
struct<>
-- !query output
org.apache.spark.sql.AnalysisException
A type of keys and values in map() must be string, but got map<string,int>.; line 1 pos 7
{
"errorClass" : "_LEGACY_ERROR_TEMP_1095",
"messageParameters" : {
"map" : "map<string,int>"
},
"queryContext" : [ {
"objectType" : "",
"objectName" : "",
"startIndex" : 8,
"stopIndex" : 45,
"fragment" : "from_csv('1', 'a INT', map('mode', 1))"
} ]
}


-- !query
Expand Down Expand Up @@ -187,7 +221,16 @@ select to_csv(named_struct('a', 1, 'b', 2), named_struct('mode', 'PERMISSIVE'))
struct<>
-- !query output
org.apache.spark.sql.AnalysisException
Must use a map() function for options.; line 1 pos 7
{
"errorClass" : "_LEGACY_ERROR_TEMP_1096",
"queryContext" : [ {
"objectType" : "",
"objectName" : "",
"startIndex" : 8,
"stopIndex" : 79,
"fragment" : "to_csv(named_struct('a', 1, 'b', 2), named_struct('mode', 'PERMISSIVE'))"
} ]
}


-- !query
Expand All @@ -196,4 +239,16 @@ select to_csv(named_struct('a', 1, 'b', 2), map('mode', 1))
struct<>
-- !query output
org.apache.spark.sql.AnalysisException
A type of keys and values in map() must be string, but got map<string,int>.; line 1 pos 7
{
"errorClass" : "_LEGACY_ERROR_TEMP_1095",
"messageParameters" : {
"map" : "map<string,int>"
},
"queryContext" : [ {
"objectType" : "",
"objectName" : "",
"startIndex" : 8,
"stopIndex" : 59,
"fragment" : "to_csv(named_struct('a', 1, 'b', 2), map('mode', 1))"
} ]
}
75 changes: 70 additions & 5 deletions sql/core/src/test/resources/sql-tests/results/extract.sql.out
Original file line number Diff line number Diff line change
Expand Up @@ -317,7 +317,20 @@ select extract(not_supported from c) from t
struct<>
-- !query output
org.apache.spark.sql.AnalysisException
Literals of type 'not_supported' are currently not supported for the string type.; line 1 pos 7
{
"errorClass" : "_LEGACY_ERROR_TEMP_1102",
"messageParameters" : {
"field" : "not_supported",
"srcDataType" : "string"
},
"queryContext" : [ {
"objectType" : "",
"objectName" : "",
"startIndex" : 8,
"stopIndex" : 36,
"fragment" : "extract(not_supported from c)"
} ]
}


-- !query
Expand All @@ -326,7 +339,20 @@ select extract(not_supported from i) from t
struct<>
-- !query output
org.apache.spark.sql.AnalysisException
Literals of type 'not_supported' are currently not supported for the interval year to month type.; line 1 pos 7
{
"errorClass" : "_LEGACY_ERROR_TEMP_1102",
"messageParameters" : {
"field" : "not_supported",
"srcDataType" : "interval year to month"
},
"queryContext" : [ {
"objectType" : "",
"objectName" : "",
"startIndex" : 8,
"stopIndex" : 36,
"fragment" : "extract(not_supported from i)"
} ]
}


-- !query
Expand All @@ -335,7 +361,20 @@ select extract(not_supported from j) from t
struct<>
-- !query output
org.apache.spark.sql.AnalysisException
Literals of type 'not_supported' are currently not supported for the interval day to second type.; line 1 pos 7
{
"errorClass" : "_LEGACY_ERROR_TEMP_1102",
"messageParameters" : {
"field" : "not_supported",
"srcDataType" : "interval day to second"
},
"queryContext" : [ {
"objectType" : "",
"objectName" : "",
"startIndex" : 8,
"stopIndex" : 36,
"fragment" : "extract(not_supported from j)"
} ]
}


-- !query
Expand Down Expand Up @@ -924,7 +963,20 @@ select extract(DAY from interval '2-1' YEAR TO MONTH)
struct<>
-- !query output
org.apache.spark.sql.AnalysisException
Literals of type 'DAY' are currently not supported for the interval year to month type.; line 1 pos 7
{
"errorClass" : "_LEGACY_ERROR_TEMP_1102",
"messageParameters" : {
"field" : "DAY",
"srcDataType" : "interval year to month"
},
"queryContext" : [ {
"objectType" : "",
"objectName" : "",
"startIndex" : 8,
"stopIndex" : 53,
"fragment" : "extract(DAY from interval '2-1' YEAR TO MONTH)"
} ]
}


-- !query
Expand Down Expand Up @@ -1081,7 +1133,20 @@ select extract(MONTH from interval '123 12:34:56.789123123' DAY TO SECOND)
struct<>
-- !query output
org.apache.spark.sql.AnalysisException
Literals of type 'MONTH' are currently not supported for the interval day to second type.; line 1 pos 7
{
"errorClass" : "_LEGACY_ERROR_TEMP_1102",
"messageParameters" : {
"field" : "MONTH",
"srcDataType" : "interval day to second"
},
"queryContext" : [ {
"objectType" : "",
"objectName" : "",
"startIndex" : 8,
"stopIndex" : 74,
"fragment" : "extract(MONTH from interval '123 12:34:56.789123123' DAY TO SECOND)"
} ]
}


-- !query
Expand Down
Loading