diff --git a/docs/interpreter/flink.md b/docs/interpreter/flink.md index 23973a80cb3..c956d82959a 100644 --- a/docs/interpreter/flink.md +++ b/docs/interpreter/flink.md @@ -201,6 +201,11 @@ You can also set other flink properties which are not listed in the table. For a 2.3.4 Hive version that you would like to connect + + zeppelin.flink.module.enableHive + false + Whether enable hive module, hive udf take precedence over flink udf if hive module is enabled. + zeppelin.flink.maxResult 1000 diff --git a/flink/interpreter/src/main/resources/interpreter-setting.json b/flink/interpreter/src/main/resources/interpreter-setting.json index cba12c391e0..4ddead04999 100644 --- a/flink/interpreter/src/main/resources/interpreter-setting.json +++ b/flink/interpreter/src/main/resources/interpreter-setting.json @@ -145,6 +145,13 @@ "description": "Hive version that you would like to connect", "type": "string" }, + "zeppelin.flink.module.enableHive": { + "envName": null, + "propertyName": null, + "defaultValue": false, + "description": "Whether enable hive module, hive udf take precedence over flink udf if hive module is enabled.", + "type": "checkbox" + }, "zeppelin.flink.printREPLOutput": { "envName": null, "propertyName": "zeppelin.flink.printREPLOutput", diff --git a/flink/interpreter/src/main/scala/org/apache/zeppelin/flink/FlinkScalaInterpreter.scala b/flink/interpreter/src/main/scala/org/apache/zeppelin/flink/FlinkScalaInterpreter.scala index aaedc826dfc..d3cb074cafd 100644 --- a/flink/interpreter/src/main/scala/org/apache/zeppelin/flink/FlinkScalaInterpreter.scala +++ b/flink/interpreter/src/main/scala/org/apache/zeppelin/flink/FlinkScalaInterpreter.scala @@ -455,7 +455,9 @@ class FlinkScalaInterpreter(val properties: Properties) { this.btenv.registerCatalog("hive", hiveCatalog) this.btenv.useCatalog("hive") this.btenv.useDatabase(database) - this.btenv.loadModule("hive", new HiveModule(hiveVersion)) + if (properties.getProperty("zeppelin.flink.module.enableHive", "false").toBoolean) { + this.btenv.loadModule("hive", new HiveModule(hiveVersion)) + } } private def loadUDFJar(jar: String): Unit = {