Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .github/workflows/master.yml
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,7 @@ jobs:
- '3.0'
- '3.1'
- '3.2'
- '3.3'
spark-archive: [""]
exclude-tags: [""]
include:
Expand Down
2 changes: 1 addition & 1 deletion docs/deployment/settings.md
Original file line number Diff line number Diff line change
Expand Up @@ -332,7 +332,7 @@ Key | Default | Meaning | Type | Since
<code>kyuubi.operation.interrupt.on.cancel</code>|<div style='width: 65pt;word-wrap: break-word;white-space: normal'>true</div>|<div style='width: 170pt;word-wrap: break-word;white-space: normal'>When true, all running tasks will be interrupted if one cancels a query. When false, all running tasks will remain until finished.</div>|<div style='width: 30pt'>boolean</div>|<div style='width: 20pt'>1.2.0</div>
<code>kyuubi.operation.language</code>|<div style='width: 65pt;word-wrap: break-word;white-space: normal'>SQL</div>|<div style='width: 170pt;word-wrap: break-word;white-space: normal'>Choose a programing language for the following inputs <ul><li>SQL: (Default) Run all following statements as SQL queries.</li> <li>SCALA: Run all following input a scala codes</li></ul></div>|<div style='width: 30pt'>string</div>|<div style='width: 20pt'>1.5.0</div>
<code>kyuubi.operation.log.dir.root</code>|<div style='width: 65pt;word-wrap: break-word;white-space: normal'>server_operation_logs</div>|<div style='width: 170pt;word-wrap: break-word;white-space: normal'>Root directory for query operation log at server-side.</div>|<div style='width: 30pt'>string</div>|<div style='width: 20pt'>1.4.0</div>
<code>kyuubi.operation.plan.only.excludes</code>|<div style='width: 65pt;word-wrap: break-word;white-space: normal'>ResetCommand,SetCommand,SetNamespaceCommand,UseStatement</div>|<div style='width: 170pt;word-wrap: break-word;white-space: normal'>Comma-separated list of query plan names, in the form of simple class names, i.e, for `set abc=xyz`, the value will be `SetCommand`. For those auxiliary plans, such as `switch databases`, `set properties`, or `create temporary view` e.t.c, which are used for setup evaluating environments for analyzing actual queries, we can use this config to exclude them and let them take effect. See also kyuubi.operation.plan.only.mode.</div>|<div style='width: 30pt'>seq</div>|<div style='width: 20pt'>1.5.0</div>
<code>kyuubi.operation.plan.only.excludes</code>|<div style='width: 65pt;word-wrap: break-word;white-space: normal'>ResetCommand,SetCommand,SetNamespaceCommand,UseStatement,SetCatalogAndNamespace</div>|<div style='width: 170pt;word-wrap: break-word;white-space: normal'>Comma-separated list of query plan names, in the form of simple class names, i.e, for `set abc=xyz`, the value will be `SetCommand`. For those auxiliary plans, such as `switch databases`, `set properties`, or `create temporary view` e.t.c, which are used for setup evaluating environments for analyzing actual queries, we can use this config to exclude them and let them take effect. See also kyuubi.operation.plan.only.mode.</div>|<div style='width: 30pt'>seq</div>|<div style='width: 20pt'>1.5.0</div>
<code>kyuubi.operation.plan.only.mode</code>|<div style='width: 65pt;word-wrap: break-word;white-space: normal'>NONE</div>|<div style='width: 170pt;word-wrap: break-word;white-space: normal'>Whether to perform the statement in a PARSE, ANALYZE, OPTIMIZE, PHYSICAL, EXECUTION only way without executing the query. When it is NONE, the statement will be fully executed</div>|<div style='width: 30pt'>string</div>|<div style='width: 20pt'>1.4.0</div>
<code>kyuubi.operation.progress.enabled</code>|<div style='width: 65pt;word-wrap: break-word;white-space: normal'>false</div>|<div style='width: 170pt;word-wrap: break-word;white-space: normal'>Whether to enable the operation progress. When true, the operation progress will be returned in `GetOperationStatus`.</div>|<div style='width: 30pt'>boolean</div>|<div style='width: 20pt'>1.6.0</div>
<code>kyuubi.operation.query.timeout</code>|<div style='width: 65pt;word-wrap: break-word;white-space: normal'>&lt;undefined&gt;</div>|<div style='width: 170pt;word-wrap: break-word;white-space: normal'>Timeout for query executions at server-side, take affect with client-side timeout(`java.sql.Statement.setQueryTimeout`) together, a running query will be cancelled automatically if timeout. It's off by default, which means only client-side take fully control whether the query should timeout or not. If set, client-side timeout capped at this point. To cancel the queries right away without waiting task to finish, consider enabling kyuubi.operation.interrupt.on.cancel together.</div>|<div style='width: 30pt'>duration</div>|<div style='width: 20pt'>1.2.0</div>
Expand Down
20 changes: 0 additions & 20 deletions extensions/spark/kyuubi-spark-connector-kudu/README.md

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -1337,7 +1337,12 @@ object KyuubiConf {
.version("1.5.0")
.stringConf
.toSequence()
.createWithDefault(Seq("ResetCommand", "SetCommand", "SetNamespaceCommand", "UseStatement"))
.createWithDefault(Seq(
"ResetCommand",
"SetCommand",
"SetNamespaceCommand",
"UseStatement",
"SetCatalogAndNamespace"))

object OperationLanguages extends Enumeration {
type OperationLanguage = Value
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -73,15 +73,13 @@ class SparkProcessBuilderSuite extends KerberizedTestHelper with MockitoSugar {
}

test("engine log truncation") {
val msg = "org.apache.spark.sql.hive."
val pb =
new SparkProcessBuilder("kentyao", conf.set("spark.hive.metastore.uris", "thrift://dummy"))
pb.start
eventually(timeout(90.seconds), interval(500.milliseconds)) {
val error1 = pb.getError
assert(!error1.getMessage.contains("Failed to detect the root cause"))
assert(error1.getMessage.contains("See more: "))
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The call stack is changed after spark-3.3.x

Tail of spark-3.3 engine log file
22/05/19 03:57:27 WARN HiveClientImpl: Deadline exceeded
22/05/19 03:57:27 ERROR SparkSQLEngine: Failed to instantiate SparkSession: org.apache.hadoop.hive.ql.metadata.HiveException: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient
org.apache.spark.sql.AnalysisException: org.apache.hadoop.hive.ql.metadata.HiveException: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient
	at org.apache.spark.sql.hive.HiveExternalCatalog.withClient(HiveExternalCatalog.scala:110) ~[spark-hive_2.12-3.3.0.jar:3.3.0]
	at org.apache.spark.sql.hive.HiveExternalCatalog.databaseExists(HiveExternalCatalog.scala:223) ~[spark-hive_2.12-3.3.0.jar:3.3.0]
	at org.apache.spark.sql.internal.SharedState.externalCatalog$lzycompute(SharedState.scala:150) ~[spark-sql_2.12-3.3.0.jar:3.3.0]
	at org.apache.spark.sql.internal.SharedState.externalCatalog(SharedState.scala:140) ~[spark-sql_2.12-3.3.0.jar:3.3.0]
	at org.apache.spark.sql.hive.HiveSessionStateBuilder.externalCatalog(HiveSessionStateBuilder.scala:54) ~[spark-hive_2.12-3.3.0.jar:3.3.0]
	at org.apache.spark.sql.hive.HiveSessionStateBuilder.$anonfun$catalog$1(HiveSessionStateBuilder.scala:69) ~[spark-hive_2.12-3.3.0.jar:3.3.0]
	at org.apache.spark.sql.catalyst.catalog.SessionCatalog.externalCatalog$lzycompute(SessionCatalog.scala:121) ~[spark-catalyst_2.12-3.3.0.jar:3.3.0]
	at org.apache.spark.sql.catalyst.catalog.SessionCatalog.externalCatalog(SessionCatalog.scala:121) ~[spark-catalyst_2.12-3.3.0.jar:3.3.0]
	at org.apache.spark.sql.catalyst.catalog.SessionCatalog.listDatabases(SessionCatalog.scala:294) ~[spark-catalyst_2.12-3.3.0.jar:3.3.0]
	at org.apache.spark.sql.execution.datasources.v2.V2SessionCatalog.listNamespaces(V2SessionCatalog.scala:230) ~[spark-sql_2.12-3.3.0.jar:3.3.0]
	at org.apache.spark.sql.execution.datasources.v2.ShowNamespacesExec.run(ShowNamespacesExec.scala:42) ~[spark-sql_2.12-3.3.0.jar:3.3.0]
	at org.apache.spark.sql.execution.datasources.v2.V2CommandExec.result$lzycompute(V2CommandExec.scala:43) ~[spark-sql_2.12-3.3.0.jar:3.3.0]
	at org.apache.spark.sql.execution.datasources.v2.V2CommandExec.result(V2CommandExec.scala:43) ~[spark-sql_2.12-3.3.0.jar:3.3.0]
	at org.apache.spark.sql.execution.datasources.v2.V2CommandExec.executeCollect(V2CommandExec.scala:49) ~[spark-sql_2.12-3.3.0.jar:3.3.0]
	at org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.$anonfun$applyOrElse$1(QueryExecution.scala:97) ~[spark-sql_2.12-3.3.0.jar:3.3.0]
	at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$6(SQLExecution.scala:109) ~[spark-sql_2.12-3.3.0.jar:3.3.0]
	at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:169) ~[spark-sql_2.12-3.3.0.jar:3.3.0]
	at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$1(SQLExecution.scala:95) ~[spark-sql_2.12-3.3.0.jar:3.3.0]
	at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:779) ~[spark-sql_2.12-3.3.0.jar:3.3.0]
	at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:64) ~[spark-sql_2.12-3.3.0.jar:3.3.0]
	at org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.applyOrElse(QueryExecution.scala:97) ~[spark-sql_2.12-3.3.0.jar:3.3.0]
	at org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.applyOrElse(QueryExecution.scala:93) ~[spark-sql_2.12-3.3.0.jar:3.3.0]
	at org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformDownWithPruning$1(TreeNode.scala:582) ~[spark-catalyst_2.12-3.3.0.jar:3.3.0]
	at org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(TreeNode.scala:174) ~[spark-catalyst_2.12-3.3.0.jar:3.3.0]
	at org.apache.spark.sql.catalyst.trees.TreeNode.transformDownWithPruning(TreeNode.scala:582) ~[spark-catalyst_2.12-3.3.0.jar:3.3.0]
	at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.org$apache$spark$sql$catalyst$plans$logical$AnalysisHelper$$super$transformDownWithPruning(LogicalPlan.scala:30) ~[spark-catalyst_2.12-3.3.0.jar:3.3.0]
	at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDownWithPruning(AnalysisHelper.scala:267) ~[spark-catalyst_2.12-3.3.0.jar:3.3.0]
	at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDownWithPruning$(AnalysisHelper.scala:263) ~[spark-catalyst_2.12-3.3.0.jar:3.3.0]
	at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDownWithPruning(LogicalPlan.scala:30) ~[spark-catalyst_2.12-3.3.0.jar:3.3.0]
	at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDownWithPruning(LogicalPlan.scala:30) ~[spark-catalyst_2.12-3.3.0.jar:3.3.0]
	at org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:558) ~[spark-catalyst_2.12-3.3.0.jar:3.3.0]
	at org.apache.spark.sql.execution.QueryExecution.eagerlyExecuteCommands(QueryExecution.scala:93) ~[spark-sql_2.12-3.3.0.jar:3.3.0]
	at org.apache.spark.sql.execution.QueryExecution.commandExecuted$lzycompute(QueryExecution.scala:80) ~[spark-sql_2.12-3.3.0.jar:3.3.0]
	at org.apache.spark.sql.execution.QueryExecution.commandExecuted(QueryExecution.scala:78) ~[spark-sql_2.12-3.3.0.jar:3.3.0]
	at org.apache.spark.sql.Dataset.<init>(Dataset.scala:220) ~[spark-sql_2.12-3.3.0.jar:3.3.0]
	at org.apache.spark.sql.Dataset$.$anonfun$ofRows$2(Dataset.scala:100) ~[spark-sql_2.12-3.3.0.jar:3.3.0]
	at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:779) ~[spark-sql_2.12-3.3.0.jar:3.3.0]
	at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:97) ~[spark-sql_2.12-3.3.0.jar:3.3.0]
	at org.apache.spark.sql.SparkSession.$anonfun$sql$1(SparkSession.scala:622) ~[spark-sql_2.12-3.3.0.jar:3.3.0]
	at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:779) ~[spark-sql_2.12-3.3.0.jar:3.3.0]
	at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:617) ~[spark-sql_2.12-3.3.0.jar:3.3.0]
	at org.apache.kyuubi.engine.spark.KyuubiSparkUtil$.$anonfun$initializeSparkSession$1(KyuubiSparkUtil.scala:47) ~[kyuubi-spark-sql-engine_2.12-1.6.0-SNAPSHOT.jar:1.6.0-SNAPSHOT]
	at org.apache.kyuubi.engine.spark.KyuubiSparkUtil$.$anonfun$initializeSparkSession$1$adapted(KyuubiSparkUtil.scala:40) ~[kyuubi-spark-sql-engine_2.12-1.6.0-SNAPSHOT.jar:1.6.0-SNAPSHOT]
	at scala.collection.mutable.ResizableArray.foreach(ResizableArray.scala:62) ~[scala-library-2.12.15.jar:?]
	at scala.collection.mutable.ResizableArray.foreach$(ResizableArray.scala:55) ~[scala-library-2.12.15.jar:?]
	at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:49) ~[scala-library-2.12.15.jar:?]
	at org.apache.kyuubi.engine.spark.KyuubiSparkUtil$.initializeSparkSession(KyuubiSparkUtil.scala:40) ~[kyuubi-spark-sql-engine_2.12-1.6.0-SNAPSHOT.jar:1.6.0-SNAPSHOT]
	at org.apache.kyuubi.engine.spark.SparkSQLEngine$.createSpark(SparkSQLEngine.scala:185) ~[kyuubi-spark-sql-engine_2.12-1.6.0-SNAPSHOT.jar:1.6.0-SNAPSHOT]
	at org.apache.kyuubi.engine.spark.SparkSQLEngine$.main(SparkSQLEngine.scala:268) ~[kyuubi-spark-sql-engine_2.12-1.6.0-SNAPSHOT.jar:1.6.0-SNAPSHOT]
	at org.apache.kyuubi.engine.spark.SparkSQLEngine.main(SparkSQLEngine.scala) ~[kyuubi-spark-sql-engine_2.12-1.6.0-SNAPSHOT.jar:1.6.0-SNAPSHOT]
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_332]
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_332]
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_332]
	at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_332]
	at org.apache.spark.deploy.JavaMainApplication.start(SparkApplication.scala:52) ~[spark-core_2.12-3.3.0.jar:3.3.0]
	at org.apache.spark.deploy.SparkSubmit.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:958) ~[spark-core_2.12-3.3.0.jar:3.3.0]
	at org.apache.spark.deploy.SparkSubmit$$anon$1.run(SparkSubmit.scala:165) ~[spark-core_2.12-3.3.0.jar:3.3.0]
	at org.apache.spark.deploy.SparkSubmit$$anon$1.run(SparkSubmit.scala:163) ~[spark-core_2.12-3.3.0.jar:3.3.0]
	at java.security.AccessController.doPrivileged(Native Method) ~[?:1.8.0_332]
	at javax.security.auth.Subject.doAs(Subject.java:422) ~[?:1.8.0_332]
	at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1878) ~[hadoop-client-api-3.3.2.jar:?]
	at org.apache.spark.deploy.SparkSubmit.doRunMain$1(SparkSubmit.scala:163) ~[spark-core_2.12-3.3.0.jar:3.3.0]
	at org.apache.spark.deploy.SparkSubmit.submit(SparkSubmit.scala:203) ~[spark-core_2.12-3.3.0.jar:3.3.0]
	at org.apache.spark.deploy.SparkSubmit.doSubmit(SparkSubmit.scala:90) ~[spark-core_2.12-3.3.0.jar:3.3.0]
	at org.apache.spark.deploy.SparkSubmit$$anon$2.doSubmit(SparkSubmit.scala:1046) ~[spark-core_2.12-3.3.0.jar:3.3.0]
	at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:1055) ~[spark-core_2.12-3.3.0.jar:3.3.0]
	at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) ~[spark-core_2.12-3.3.0.jar:3.3.0]
Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient
	at org.apache.hadoop.hive.ql.metadata.Hive.getDatabase(Hive.java:1567) ~[hive-exec-2.3.9-core.jar:2.3.9]
	at org.apache.hadoop.hive.ql.metadata.Hive.databaseExists(Hive.java:1552) ~[hive-exec-2.3.9-core.jar:2.3.9]
	at org.apache.spark.sql.hive.client.Shim_v0_12.databaseExists(HiveShim.scala:609) ~[spark-hive_2.12-3.3.0.jar:3.3.0]
	at org.apache.spark.sql.hive.client.HiveClientImpl.$anonfun$databaseExists$1(HiveClientImpl.scala:394) ~[spark-hive_2.12-3.3.0.jar:3.3.0]
	at scala.runtime.java8.JFunction0$mcZ$sp.apply(JFunction0$mcZ$sp.java:23) ~[scala-library-2.12.15.jar:?]
	at org.apache.spark.sql.hive.client.HiveClientImpl.$anonfun$withHiveState$1(HiveClientImpl.scala:294) ~[spark-hive_2.12-3.3.0.jar:3.3.0]
	at org.apache.spark.sql.hive.client.HiveClientImpl.liftedTree1$1(HiveClientImpl.scala:225) ~[spark-hive_2.12-3.3.0.jar:3.3.0]
	at org.apache.spark.sql.hive.client.HiveClientImpl.retryLocked(HiveClientImpl.scala:224) ~[spark-hive_2.12-3.3.0.jar:3.3.0]
	at org.apache.spark.sql.hive.client.HiveClientImpl.withHiveState(HiveClientImpl.scala:274) ~[spark-hive_2.12-3.3.0.jar:3.3.0]
	at org.apache.spark.sql.hive.client.HiveClientImpl.databaseExists(HiveClientImpl.scala:394) ~[spark-hive_2.12-3.3.0.jar:3.3.0]
	at org.apache.spark.sql.hive.HiveExternalCatalog.$anonfun$databaseExists$1(HiveExternalCatalog.scala:223) ~[spark-hive_2.12-3.3.0.jar:3.3.0]
	at scala.runtime.java8.JFunction0$mcZ$sp.apply(JFunction0$mcZ$sp.java:23) ~[scala-library-2.12.15.jar:?]
	at org.apache.spark.sql.hive.HiveExternalCatalog.withClient(HiveExternalCatalog.scala:101) ~[spark-hive_2.12-3.3.0.jar:3.3.0]
	... 66 more
Caused by: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient
	at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1742) ~[hive-metastore-2.3.9.jar:2.3.9]
	at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:83) ~[hive-metastore-2.3.9.jar:2.3.9]
	at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:133) ~[hive-metastore-2.3.9.jar:2.3.9]
	at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104) ~[hive-metastore-2.3.9.jar:2.3.9]
	at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3607) ~[hive-exec-2.3.9-core.jar:2.3.9]
	at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3659) ~[hive-exec-2.3.9-core.jar:2.3.9]
	at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3639) ~[hive-exec-2.3.9-core.jar:2.3.9]
	at org.apache.hadoop.hive.ql.metadata.Hive.getDatabase(Hive.java:1563) ~[hive-exec-2.3.9-core.jar:2.3.9]
	at org.apache.hadoop.hive.ql.metadata.Hive.databaseExists(Hive.java:1552) ~[hive-exec-2.3.9-core.jar:2.3.9]
	at org.apache.spark.sql.hive.client.Shim_v0_12.databaseExists(HiveShim.scala:609) ~[spark-hive_2.12-3.3.0.jar:3.3.0]
	at org.apache.spark.sql.hive.client.HiveClientImpl.$anonfun$databaseExists$1(HiveClientImpl.scala:394) ~[spark-hive_2.12-3.3.0.jar:3.3.0]
	at scala.runtime.java8.JFunction0$mcZ$sp.apply(JFunction0$mcZ$sp.java:23) ~[scala-library-2.12.15.jar:?]
	at org.apache.spark.sql.hive.client.HiveClientImpl.$anonfun$withHiveState$1(HiveClientImpl.scala:294) ~[spark-hive_2.12-3.3.0.jar:3.3.0]
	at org.apache.spark.sql.hive.client.HiveClientImpl.liftedTree1$1(HiveClientImpl.scala:225) ~[spark-hive_2.12-3.3.0.jar:3.3.0]
	at org.apache.spark.sql.hive.client.HiveClientImpl.retryLocked(HiveClientImpl.scala:224) ~[spark-hive_2.12-3.3.0.jar:3.3.0]
	at org.apache.spark.sql.hive.client.HiveClientImpl.withHiveState(HiveClientImpl.scala:274) ~[spark-hive_2.12-3.3.0.jar:3.3.0]
	at org.apache.spark.sql.hive.client.HiveClientImpl.databaseExists(HiveClientImpl.scala:394) ~[spark-hive_2.12-3.3.0.jar:3.3.0]
	at org.apache.spark.sql.hive.HiveExternalCatalog.$anonfun$databaseExists$1(HiveExternalCatalog.scala:223) ~[spark-hive_2.12-3.3.0.jar:3.3.0]
	at scala.runtime.java8.JFunction0$mcZ$sp.apply(JFunction0$mcZ$sp.java:23) ~[scala-library-2.12.15.jar:?]
	at org.apache.spark.sql.hive.HiveExternalCatalog.withClient(HiveExternalCatalog.scala:101) ~[spark-hive_2.12-3.3.0.jar:3.3.0]
	... 66 more
Caused by: java.lang.reflect.InvocationTargetException
	at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) ~[?:1.8.0_332]
	at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62) ~[?:1.8.0_332]
	at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) ~[?:1.8.0_332]
	at java.lang.reflect.Constructor.newInstance(Constructor.java:423) ~[?:1.8.0_332]
	at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1740) ~[hive-metastore-2.3.9.jar:2.3.9]
	at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:83) ~[hive-metastore-2.3.9.jar:2.3.9]
	at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:133) ~[hive-metastore-2.3.9.jar:2.3.9]
	at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104) ~[hive-metastore-2.3.9.jar:2.3.9]
	at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3607) ~[hive-exec-2.3.9-core.jar:2.3.9]
	at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3659) ~[hive-exec-2.3.9-core.jar:2.3.9]
	at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3639) ~[hive-exec-2.3.9-core.jar:2.3.9]
	at org.apache.hadoop.hive.ql.metadata.Hive.getDatabase(Hive.java:1563) ~[hive-exec-2.3.9-core.jar:2.3.9]
	at org.apache.hadoop.hive.ql.metadata.Hive.databaseExists(Hive.java:1552) ~[hive-exec-2.3.9-core.jar:2.3.9]
	at org.apache.spark.sql.hive.client.Shim_v0_12.databaseExists(HiveShim.scala:609) ~[spark-hive_2.12-3.3.0.jar:3.3.0]
	at org.apache.spark.sql.hive.client.HiveClientImpl.$anonfun$databaseExists$1(HiveClientImpl.scala:394) ~[spark-hive_2.12-3.3.0.jar:3.3.0]
	at scala.runtime.java8.JFunction0$mcZ$sp.apply(JFunction0$mcZ$sp.java:23) ~[scala-library-2.12.15.jar:?]
	at org.apache.spark.sql.hive.client.HiveClientImpl.$anonfun$withHiveState$1(HiveClientImpl.scala:294) ~[spark-hive_2.12-3.3.0.jar:3.3.0]
	at org.apache.spark.sql.hive.client.HiveClientImpl.liftedTree1$1(HiveClientImpl.scala:225) ~[spark-hive_2.12-3.3.0.jar:3.3.0]
	at org.apache.spark.sql.hive.client.HiveClientImpl.retryLocked(HiveClientImpl.scala:224) ~[spark-hive_2.12-3.3.0.jar:3.3.0]
	at org.apache.spark.sql.hive.client.HiveClientImpl.withHiveState(HiveClientImpl.scala:274) ~[spark-hive_2.12-3.3.0.jar:3.3.0]
	at org.apache.spark.sql.hive.client.HiveClientImpl.databaseExists(HiveClientImpl.scala:394) ~[spark-hive_2.12-3.3.0.jar:3.3.0]
	at org.apache.spark.sql.hive.HiveExternalCatalog.$anonfun$databaseExists$1(HiveExternalCatalog.scala:223) ~[spark-hive_2.12-3.3.0.jar:3.3.0]
	at scala.runtime.java8.JFunction0$mcZ$sp.apply(JFunction0$mcZ$sp.java:23) ~[scala-library-2.12.15.jar:?]
	at org.apache.spark.sql.hive.HiveExternalCatalog.withClient(HiveExternalCatalog.scala:101) ~[spark-hive_2.12-3.3.0.jar:3.3.0]
	... 66 more
Caused by: org.apache.hadoop.hive.metastore.api.MetaException: Could not connect to meta store using any of the URIs provided. Most recent failure: org.apache.thrift.transport.TTransportException: Invalid port -1
	at org.apache.thrift.transport.TSocket.open(TSocket.java:213)
	at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.open(HiveMetaStoreClient.java:478)
	at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:245)
	at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:70)
	at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
	at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
	at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
	at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
	at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1740)
	at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:83)
	at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:133)
	at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104)
	at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3607)
	at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3659)
	at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3639)
	at org.apache.hadoop.hive.ql.metadata.Hive.getDatabase(Hive.java:1563)
	at org.apache.hadoop.hive.ql.metadata.Hive.databaseExists(Hive.java:1552)
	at org.apache.spark.sql.hive.client.Shim_v0_12.databaseExists(HiveShim.scala:609)
	at org.apache.spark.sql.hive.client.HiveClientImpl.$anonfun$databaseExists$1(HiveClientImpl.scala:394)
	at scala.runtime.java8.JFunction0$mcZ$sp.apply(JFunction0$mcZ$sp.java:23)
	at org.apache.spark.sql.hive.client.HiveClientImpl.$anonfun$withHiveState$1(HiveClientImpl.scala:294)
	at org.apache.spark.sql.hive.client.HiveClientImpl.liftedTree1$1(HiveClientImpl.scala:225)
	at org.apache.spark.sql.hive.client.HiveClientImpl.retryLocked(HiveClientImpl.scala:224)
	at org.apache.spark.sql.hive.client.HiveClientImpl.withHiveState(HiveClientImpl.scala:274)
	at org.apache.spark.sql.hive.client.HiveClientImpl.databaseExists(HiveClientImpl.scala:394)
	at org.apache.spark.sql.hive.HiveExternalCatalog.$anonfun$databaseExists$1(HiveExternalCatalog.scala:223)
	at scala.runtime.java8.JFunction0$mcZ$sp.apply(JFunction0$mcZ$sp.java:23)
	at org.apache.spark.sql.hive.HiveExternalCatalog.withClient(HiveExternalCatalog.scala:101)
	at org.apache.spark.sql.hive.HiveExternalCatalog.databaseExists(HiveExternalCatalog.scala:223)
	at org.apache.spark.sql.internal.SharedState.externalCatalog$lzycompute(SharedState.scala:150)
	at org.apache.spark.sql.internal.SharedState.externalCatalog(SharedState.scala:140)
	at org.apache.spark.sql.hive.HiveSessionStateBuilder.externalCatalog(HiveSessionStateBuilder.scala:54)
	at org.apache.spark.sql.hive.HiveSessionStateBuilder.$anonfun$catalog$1(HiveSessionStateBuilder.scala:69)
	at org.apache.spark.sql.catalyst.catalog.SessionCatalog.externalCatalog$lzycompute(SessionCatalog.scala:121)
	at org.apache.spark.sql.catalyst.catalog.SessionCatalog.externalCatalog(SessionCatalog.scala:121)
	at org.apache.spark.sql.catalyst.catalog.SessionCatalog.listDatabases(SessionCatalog.scala:294)
	at org.apache.spark.sql.execution.datasources.v2.V2SessionCatalog.listNamespaces(V2SessionCatalog.scala:230)
	at org.apache.spark.sql.execution.datasources.v2.ShowNamespacesExec.run(ShowNamespacesExec.scala:42)
	at org.apache.spark.sql.execution.datasources.v2.V2CommandExec.result$lzycompute(V2CommandExec.scala:43)
	at org.apache.spark.sql.execution.datasources.v2.V2CommandExec.result(V2CommandExec.scala:43)
	at org.apache.spark.sql.execution.datasources.v2.V2CommandExec.executeCollect(V2CommandExec.scala:49)
	at org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.$anonfun$applyOrElse$1(QueryExecution.scala:97)
	at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$6(SQLExecution.scala:109)
	at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:169)
	at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$1(SQLExecution.scala:95)
	at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:779)
	at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:64)
	at org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.applyOrElse(QueryExecution.scala:97)
	at org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.applyOrElse(QueryExecution.scala:93)
	at org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformDownWithPruning$1(TreeNode.scala:582)
	at org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(TreeNode.scala:174)
	at org.apache.spark.sql.catalyst.trees.TreeNode.transformDownWithPruning(TreeNode.scala:582)
	at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.org$apache$spark$sql$catalyst$plans$logical$AnalysisHelper$$super$transformDownWithPruning(LogicalPlan.scala:30)
	at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDownWithPruning(AnalysisHelper.scala:267)
	at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDownWithPruning$(AnalysisHelper.scala:263)
	at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDownWithPruning(LogicalPlan.scala:30)
	at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDownWithPruning(LogicalPlan.scala:30)
	at org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:558)
	at org.apache.spark.sql.execution.QueryExecution.eagerlyExecuteCommands(QueryExecution.scala:93)
	at org.apache.spark.sql.execution.QueryExecution.commandExecuted$lzycompute(QueryExecution.scala:80)
	at org.apache.spark.sql.execution.QueryExecution.commandExecuted(QueryExecution.scala:78)
	at org.apache.spark.sql.Dataset.<init>(Dataset.scala:220)
	at org.apache.spark.sql.Dataset$.$anonfun$ofRows$2(Dataset.scala:100)
	at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:779)
	at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:97)
	at org.apache.spark.sql.SparkSession.$anonfun$sql$1(SparkSession.scala:622)
	at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:779)
	at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:617)
	at org.apache.kyuubi.engine.spark.KyuubiSparkUtil$.$anonfun$initializeSparkSession$1(KyuubiSparkUtil.scala:47)
	at org.apache.kyuubi.engine.spark.KyuubiSparkUtil$.$anonfun$initializeSparkSession$1$adapted(KyuubiSparkUtil.scala:40)
	at scala.collection.mutable.ResizableArray.foreach(ResizableArray.scala:62)
	at scala.collection.mutable.ResizableArray.foreach$(ResizableArray.scala:55)
	at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:49)
	at org.apache.kyuubi.engine.spark.KyuubiSparkUtil$.initializeSparkSession(KyuubiSparkUtil.scala:40)
	at org.apache.kyuubi.engine.spark.SparkSQLEngine$.createSpark(SparkSQLEngine.scala:185)
	at org.apache.kyuubi.engine.spark.SparkSQLEngine$.main(SparkSQLEngine.scala:268)
	at org.apache.kyuubi.engine.spark.SparkSQLEngine.main(SparkSQLEngine.scala)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:498)
	at org.apache.spark.deploy.JavaMainApplication.start(SparkApplication.scala:52)
	at org.apache.spark.deploy.SparkSubmit.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:958)
	at org.apache.spark.deploy.SparkSubmit$$anon$1.run(SparkSubmit.scala:165)
	at org.apache.spark.deploy.SparkSubmit$$anon$1.run(SparkSubmit.scala:163)
	at java.security.AccessController.doPrivileged(Native Method)
	at javax.security.auth.Subject.doAs(Subject.java:422)
	at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1878)
	at org.apache.spark.deploy.SparkSubmit.doRunMain$1(SparkSubmit.scala:163)
	at org.apache.spark.deploy.SparkSubmit.submit(SparkSubmit.scala:203)
	at org.apache.spark.deploy.SparkSubmit.doSubmit(SparkSubmit.scala:90)
	at org.apache.spark.deploy.SparkSubmit$$anon$2.doSubmit(SparkSubmit.scala:1046)
	at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:1055)
	at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)

	at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.open(HiveMetaStoreClient.java:527) ~[hive-metastore-2.3.9.jar:2.3.9]
	at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:245) ~[hive-metastore-2.3.9.jar:2.3.9]
	at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:70) ~[hive-exec-2.3.9-core.jar:2.3.9]
	at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) ~[?:1.8.0_332]
	at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62) ~[?:1.8.0_332]
	at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) ~[?:1.8.0_332]
	at java.lang.reflect.Constructor.newInstance(Constructor.java:423) ~[?:1.8.0_332]
	at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1740) ~[hive-metastore-2.3.9.jar:2.3.9]
	at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:83) ~[hive-metastore-2.3.9.jar:2.3.9]
	at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:133) ~[hive-metastore-2.3.9.jar:2.3.9]
	at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104) ~[hive-metastore-2.3.9.jar:2.3.9]
	at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3607) ~[hive-exec-2.3.9-core.jar:2.3.9]
	at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3659) ~[hive-exec-2.3.9-core.jar:2.3.9]
	at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3639) ~[hive-exec-2.3.9-core.jar:2.3.9]
	at org.apache.hadoop.hive.ql.metadata.Hive.getDatabase(Hive.java:1563) ~[hive-exec-2.3.9-core.jar:2.3.9]
	at org.apache.hadoop.hive.ql.metadata.Hive.databaseExists(Hive.java:1552) ~[hive-exec-2.3.9-core.jar:2.3.9]
	at org.apache.spark.sql.hive.client.Shim_v0_12.databaseExists(HiveShim.scala:609) ~[spark-hive_2.12-3.3.0.jar:3.3.0]
	at org.apache.spark.sql.hive.client.HiveClientImpl.$anonfun$databaseExists$1(HiveClientImpl.scala:394) ~[spark-hive_2.12-3.3.0.jar:3.3.0]
	at scala.runtime.java8.JFunction0$mcZ$sp.apply(JFunction0$mcZ$sp.java:23) ~[scala-library-2.12.15.jar:?]
	at org.apache.spark.sql.hive.client.HiveClientImpl.$anonfun$withHiveState$1(HiveClientImpl.scala:294) ~[spark-hive_2.12-3.3.0.jar:3.3.0]
	at org.apache.spark.sql.hive.client.HiveClientImpl.liftedTree1$1(HiveClientImpl.scala:225) ~[spark-hive_2.12-3.3.0.jar:3.3.0]
	at org.apache.spark.sql.hive.client.HiveClientImpl.retryLocked(HiveClientImpl.scala:224) ~[spark-hive_2.12-3.3.0.jar:3.3.0]
	at org.apache.spark.sql.hive.client.HiveClientImpl.withHiveState(HiveClientImpl.scala:274) ~[spark-hive_2.12-3.3.0.jar:3.3.0]
	at org.apache.spark.sql.hive.client.HiveClientImpl.databaseExists(HiveClientImpl.scala:394) ~[spark-hive_2.12-3.3.0.jar:3.3.0]
	at org.apache.spark.sql.hive.HiveExternalCatalog.$anonfun$databaseExists$1(HiveExternalCatalog.scala:223) ~[spark-hive_2.12-3.3.0.jar:3.3.0]
	at scala.runtime.java8.JFunction0$mcZ$sp.apply(JFunction0$mcZ$sp.java:23) ~[scala-library-2.12.15.jar:?]
	at org.apache.spark.sql.hive.HiveExternalCatalog.withClient(HiveExternalCatalog.scala:101) ~[spark-hive_2.12-3.3.0.jar:3.3.0]
	... 66 more
Tail of spark-3.2 engine log file
Caused by: java.lang.reflect.InvocationTargetException
	at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
	at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
	at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
	at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
	at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1740)
	... 84 more
Caused by: MetaException(message:Could not connect to meta store using any of the URIs provided. Most recent failure: org.apache.thrift.transport.TTransportException: Invalid port -1
	at org.apache.thrift.transport.TSocket.open(TSocket.java:213)
	at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.open(HiveMetaStoreClient.java:478)
	at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:245)
	at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:70)
	at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
	at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
	at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
	at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
	at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1740)
	at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:83)
	at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:133)
	at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104)
	at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3607)
	at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3659)
	at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3639)
	at org.apache.hadoop.hive.ql.metadata.Hive.getDatabase(Hive.java:1563)
	at org.apache.hadoop.hive.ql.metadata.Hive.databaseExists(Hive.java:1552)
	at org.apache.spark.sql.hive.client.HiveClientImpl.$anonfun$databaseExists$1(HiveClientImpl.scala:396)
	at scala.runtime.java8.JFunction0$mcZ$sp.apply(JFunction0$mcZ$sp.java:23)
	at org.apache.spark.sql.hive.client.HiveClientImpl.$anonfun$withHiveState$1(HiveClientImpl.scala:305)
	at org.apache.spark.sql.hive.client.HiveClientImpl.liftedTree1$1(HiveClientImpl.scala:236)
	at org.apache.spark.sql.hive.client.HiveClientImpl.retryLocked(HiveClientImpl.scala:235)
	at org.apache.spark.sql.hive.client.HiveClientImpl.withHiveState(HiveClientImpl.scala:285)
	at org.apache.spark.sql.hive.client.HiveClientImpl.databaseExists(HiveClientImpl.scala:396)
	at org.apache.spark.sql.hive.HiveExternalCatalog.$anonfun$databaseExists$1(HiveExternalCatalog.scala:224)
	at scala.runtime.java8.JFunction0$mcZ$sp.apply(JFunction0$mcZ$sp.java:23)
	at org.apache.spark.sql.hive.HiveExternalCatalog.withClient(HiveExternalCatalog.scala:102)
	at org.apache.spark.sql.hive.HiveExternalCatalog.databaseExists(HiveExternalCatalog.scala:224)
	at org.apache.spark.sql.internal.SharedState.externalCatalog$lzycompute(SharedState.scala:150)
	at org.apache.spark.sql.internal.SharedState.externalCatalog(SharedState.scala:140)
	at org.apache.spark.sql.hive.HiveSessionStateBuilder.externalCatalog(HiveSessionStateBuilder.scala:45)
	at org.apache.spark.sql.hive.HiveSessionStateBuilder.$anonfun$catalog$1(HiveSessionStateBuilder.scala:60)
	at org.apache.spark.sql.catalyst.catalog.SessionCatalog.externalCatalog$lzycompute(SessionCatalog.scala:118)
	at org.apache.spark.sql.catalyst.catalog.SessionCatalog.externalCatalog(SessionCatalog.scala:118)
	at org.apache.spark.sql.catalyst.catalog.SessionCatalog.listDatabases(SessionCatalog.scala:298)
	at org.apache.spark.sql.execution.datasources.v2.V2SessionCatalog.listNamespaces(V2SessionCatalog.scala:205)
	at org.apache.spark.sql.execution.datasources.v2.ShowNamespacesExec.run(ShowNamespacesExec.scala:42)
	at org.apache.spark.sql.execution.datasources.v2.V2CommandExec.result$lzycompute(V2CommandExec.scala:43)
	at org.apache.spark.sql.execution.datasources.v2.V2CommandExec.result(V2CommandExec.scala:43)
	at org.apache.spark.sql.execution.datasources.v2.V2CommandExec.executeCollect(V2CommandExec.scala:49)
	at org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.$anonfun$applyOrElse$1(QueryExecution.scala:110)
	at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$5(SQLExecution.scala:103)
	at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:163)
	at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$1(SQLExecution.scala:90)
	at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:775)
	at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:64)
	at org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.applyOrElse(QueryExecution.scala:110)
	at org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.applyOrElse(QueryExecution.scala:106)
	at org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformDownWithPruning$1(TreeNode.scala:481)
	at org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(TreeNode.scala:82)
	at org.apache.spark.sql.catalyst.trees.TreeNode.transformDownWithPruning(TreeNode.scala:481)
	at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.org$apache$spark$sql$catalyst$plans$logical$AnalysisHelper$$super$transformDownWithPruning(LogicalPlan.scala:30)
	at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDownWithPruning(AnalysisHelper.scala:267)
	at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDownWithPruning$(AnalysisHelper.scala:263)
	at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDownWithPruning(LogicalPlan.scala:30)
	at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDownWithPruning(LogicalPlan.scala:30)
	at org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:457)
	at org.apache.spark.sql.execution.QueryExecution.eagerlyExecuteCommands(QueryExecution.scala:106)
	at org.apache.spark.sql.execution.QueryExecution.commandExecuted$lzycompute(QueryExecution.scala:93)
	at org.apache.spark.sql.execution.QueryExecution.commandExecuted(QueryExecution.scala:91)
	at org.apache.spark.sql.Dataset.<init>(Dataset.scala:219)
	at org.apache.spark.sql.Dataset$.$anonfun$ofRows$2(Dataset.scala:99)
	at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:775)
	at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:96)
	at org.apache.spark.sql.SparkSession.$anonfun$sql$1(SparkSession.scala:618)
	at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:775)
	at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:613)
	at org.apache.kyuubi.engine.spark.KyuubiSparkUtil$.$anonfun$initializeSparkSession$1(KyuubiSparkUtil.scala:47)
	at org.apache.kyuubi.engine.spark.KyuubiSparkUtil$.$anonfun$initializeSparkSession$1$adapted(KyuubiSparkUtil.scala:40)
	at scala.collection.mutable.ResizableArray.foreach(ResizableArray.scala:62)
	at scala.collection.mutable.ResizableArray.foreach$(ResizableArray.scala:55)
	at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:49)
	at org.apache.kyuubi.engine.spark.KyuubiSparkUtil$.initializeSparkSession(KyuubiSparkUtil.scala:40)
	at org.apache.kyuubi.engine.spark.SparkSQLEngine$.createSpark(SparkSQLEngine.scala:185)
	at org.apache.kyuubi.engine.spark.SparkSQLEngine$.main(SparkSQLEngine.scala:268)
	at org.apache.kyuubi.engine.spark.SparkSQLEngine.main(SparkSQLEngine.scala)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:498)
	at org.apache.spark.deploy.JavaMainApplication.start(SparkApplication.scala:52)
	at org.apache.spark.deploy.SparkSubmit.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:955)
	at org.apache.spark.deploy.SparkSubmit$$anon$1.run(SparkSubmit.scala:165)
	at org.apache.spark.deploy.SparkSubmit$$anon$1.run(SparkSubmit.scala:163)
	at java.security.AccessController.doPrivileged(Native Method)
	at javax.security.auth.Subject.doAs(Subject.java:422)
	at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1878)
	at org.apache.spark.deploy.SparkSubmit.doRunMain$1(SparkSubmit.scala:163)
	at org.apache.spark.deploy.SparkSubmit.submit(SparkSubmit.scala:203)
	at org.apache.spark.deploy.SparkSubmit.doSubmit(SparkSubmit.scala:90)
	at org.apache.spark.deploy.SparkSubmit$$anon$2.doSubmit(SparkSubmit.scala:1043)
	at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:1052)
	at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
)
	at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.open(HiveMetaStoreClient.java:527)
	at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:245)
	at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:70)
	... 89 more
2022-05-19 12:25:16.588 INFO spark.SparkContext: Invoking stop() from shutdown hook
2022-05-19 12:25:16.601 INFO server.AbstractConnector: Stopped Spark@514bd750{HTTP/1.1, (http/1.1)}{0.0.0.0:0}
2022-05-19 12:25:16.602 INFO ui.SparkUI: Stopped Spark web UI at http://192.168.202.190:43707
2022-05-19 12:25:16.619 INFO spark.MapOutputTrackerMasterEndpoint: MapOutputTrackerMasterEndpoint stopped!
2022-05-19 12:25:16.632 INFO memory.MemoryStore: MemoryStore cleared
2022-05-19 12:25:16.632 INFO storage.BlockManager: BlockManager stopped
2022-05-19 12:25:16.641 INFO storage.BlockManagerMaster: BlockManagerMaster stopped
2022-05-19 12:25:16.647 INFO scheduler.OutputCommitCoordinator$OutputCommitCoordinatorEndpoint: OutputCommitCoordinator stopped!
2022-05-19 12:25:16.656 INFO spark.SparkContext: Successfully stopped SparkContext
2022-05-19 12:25:16.656 INFO util.ShutdownHookManager: Shutdown hook called
2022-05-19 12:25:16.657 INFO util.ShutdownHookManager: Deleting directory /tmp/spark-809163c6-5508-422c-97c9-41402e064225
2022-05-19 12:25:16.660 INFO util.ShutdownHookManager: Deleting directory /tmp/spark-cbb63692-d47a-4444-9dc9-9254f2bb7a4d

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Note that the spark-3.3 contains the message org.apache.spark.sql.hive. at the tail of the engine log file.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

assert(error1.getMessage.contains(msg))
}

val pb2 = new SparkProcessBuilder(
Expand All @@ -93,7 +91,6 @@ class SparkProcessBuilderSuite extends KerberizedTestHelper with MockitoSugar {
val error1 = pb2.getError
assert(!error1.getMessage.contains("Failed to detect the root cause"))
assert(error1.getMessage.contains("See more: "))
assert(!error1.getMessage.contains(msg), "stack trace shall be truncated")
}

val pb3 =
Expand Down
26 changes: 25 additions & 1 deletion pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,6 @@
<module>dev/kyuubi-codecov</module>
<module>extensions/server/kyuubi-server-plugin</module>
<module>extensions/spark/kyuubi-spark-authz</module>
<module>extensions/spark/kyuubi-spark-connector-kudu</module>
<module>extensions/spark/kyuubi-spark-connector-tpcds</module>
<module>externals/kyuubi-download</module>
<module>externals/kyuubi-flink-sql-engine</module>
Expand Down Expand Up @@ -1965,6 +1964,31 @@
</modules>
</profile>

<profile>
<id>spark-3.3</id>
<repositories>
<repository>
<id>Apache Staging Repository</id>
<url>https://repository.apache.org/content/groups/staging/</url>
<releases>
<enabled>true</enabled>
</releases>
<snapshots>
<enabled>true</enabled>
</snapshots>
</repository>
</repositories>
<properties>
<spark.version>3.3.0</spark.version>
<spark.archive.name>spark-${spark.version}-bin-hadoop3.tgz</spark.archive.name>
<spark.archive.mirror>https://dist.apache.org/repos/dist/dev/spark/v3.3.0-rc2-bin/</spark.archive.mirror>
<maven.plugin.scalatest.exclude.tags>org.apache.kyuubi.tags.ExtendedSQLTest,org.apache.kyuubi.tags.DeltaTest,org.apache.kyuubi.tags.IcebergTest,org.apache.kyuubi.tags.HudiTest</maven.plugin.scalatest.exclude.tags>
</properties>
<modules>
<module>extensions/spark/kyuubi-spark-connector-kudu</module>
</modules>
</profile>

<profile>
<id>spark-master</id>
<repositories>
Expand Down