From fd120de138330e686b967121197bd13e1cd325b8 Mon Sep 17 00:00:00 2001 From: zhangbutao Date: Fri, 19 Sep 2025 18:00:10 +0800 Subject: [PATCH 01/15] HIVE-29177: Implement default Catalog selection --- .../org/apache/hadoop/hive/ql/ErrorMsg.java | 2 +- .../hadoop/hive/ql/parse/FromClauseParser.g | 11 ++++ .../apache/hadoop/hive/ql/parse/HiveParser.g | 13 ++++- .../catalog/use/SwitchCatalogAnalyzer.java | 53 +++++++++++++++++++ .../ql/ddl/catalog/use/SwitchCatalogDesc.java | 43 +++++++++++++++ .../catalog/use/SwitchCatalogOperation.java | 50 +++++++++++++++++ .../create/CreateDatabaseOperation.java | 4 +- .../database/drop/DropDatabaseOperation.java | 4 +- .../database/use/SwitchDatabaseAnalyzer.java | 8 +-- .../apache/hadoop/hive/ql/metadata/Hive.java | 20 ++++--- .../apache/hadoop/hive/ql/metadata/Table.java | 4 ++ .../hive/ql/parse/BaseSemanticAnalyzer.java | 40 ++++++++++++++ .../hadoop/hive/ql/plan/HiveOperation.java | 1 + .../hive/ql/processors/HiveCommand.java | 2 + .../queries/clientpositive/catalog_database.q | 28 ++++++++++ .../results/clientpositive/llap/catalog.q.out | 15 ++++-- .../llap/catalog_database.q.out | 50 +++++++++++++++++ 17 files changed, 324 insertions(+), 24 deletions(-) create mode 100644 ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/use/SwitchCatalogAnalyzer.java create mode 100644 ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/use/SwitchCatalogDesc.java create mode 100644 ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/use/SwitchCatalogOperation.java create mode 100644 ql/src/test/queries/clientpositive/catalog_database.q create mode 100644 ql/src/test/results/clientpositive/llap/catalog_database.q.out diff --git a/common/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java b/common/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java index 148932b25299..141e91a16835 100644 --- a/common/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java +++ b/common/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java @@ -468,7 +468,7 @@ public enum ErrorMsg { RESOURCE_PLAN_ALREADY_EXISTS(10417, "Resource plan {0} already exists", true), RESOURCE_PLAN_NOT_EXISTS(10418, "Resource plan {0} does not exist", true), INCOMPATIBLE_STRUCT(10419, "Incompatible structs.", true), - OBJECTNAME_CONTAINS_DOT(10420, "Table or database name may not contain dot(.) character", true), + OBJECTNAME_CONTAINS_DOT(10420, "Catalog or table or database name may not contain dot(.) character", true), WITHIN_GROUP_NOT_ALLOWED(10421, "Not an ordered-set aggregate function: {0}. WITHIN GROUP clause is not allowed.", true), WITHIN_GROUP_PARAMETER_MISMATCH(10422, diff --git a/parser/src/java/org/apache/hadoop/hive/ql/parse/FromClauseParser.g b/parser/src/java/org/apache/hadoop/hive/ql/parse/FromClauseParser.g index a5c37faec8ae..d3461064e574 100644 --- a/parser/src/java/org/apache/hadoop/hive/ql/parse/FromClauseParser.g +++ b/parser/src/java/org/apache/hadoop/hive/ql/parse/FromClauseParser.g @@ -231,6 +231,17 @@ uniqueJoinTableSource -> ^(TOK_TABREF $tabname $ts? $alias?) ; +databaseName +@init { gParent.pushMsg("database name", state); } +@after { gParent.popMsg(state); } + : + catalog=identifier DOT db=identifier? + -> ^(TOK_DBNAME $catalog $db) + | + db=identifier + -> ^(TOK_DBNAME $db) + ; + tableName @init { gParent.pushMsg("table name", state); } @after { gParent.popMsg(state); } diff --git a/parser/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g b/parser/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g index ddce6aa85af6..31048c3cbb69 100644 --- a/parser/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g +++ b/parser/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g @@ -378,6 +378,7 @@ TOK_DESCCATALOG; TOK_CATALOGLOCATION; TOK_CATALOGCOMMENT; TOK_ALTERCATALOG_LOCATION; +TOK_SWITCHCATALOG; TOK_DESCDATABASE; TOK_DATABASEPROPERTIES; TOK_DATABASELOCATION; @@ -1011,6 +1012,7 @@ ddlStatement @after { popMsg(state); } : createCatalogStatement | dropCatalogStatement + | switchCatalogStatement | createDatabaseStatement | switchDatabaseStatement | dropDatabaseStatement @@ -1151,6 +1153,13 @@ dropCatalogStatement -> ^(TOK_DROPCATALOG identifier ifExists?) ; +switchCatalogStatement +@init { pushMsg("switch catalog statement", state); } +@after { popMsg(state); } + : KW_SET KW_CATALOG identifier + -> ^(TOK_SWITCHCATALOG identifier) + ; + createDatabaseStatement @init { pushMsg("create database statement", state); } @after { popMsg(state); } @@ -1210,8 +1219,8 @@ dbConnectorName switchDatabaseStatement @init { pushMsg("switch database statement", state); } @after { popMsg(state); } - : KW_USE identifier - -> ^(TOK_SWITCHDATABASE identifier) + : KW_USE databaseName + -> ^(TOK_SWITCHDATABASE databaseName) ; dropDatabaseStatement diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/use/SwitchCatalogAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/use/SwitchCatalogAnalyzer.java new file mode 100644 index 000000000000..367e94d78b84 --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/use/SwitchCatalogAnalyzer.java @@ -0,0 +1,53 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.catalog.use; + +import org.apache.hadoop.hive.metastore.api.Catalog; +import org.apache.hadoop.hive.ql.QueryState; +import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory; +import org.apache.hadoop.hive.ql.ddl.DDLWork; +import org.apache.hadoop.hive.ql.exec.TaskFactory; +import org.apache.hadoop.hive.ql.hooks.ReadEntity; +import org.apache.hadoop.hive.ql.parse.ASTNode; +import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer; +import org.apache.hadoop.hive.ql.parse.HiveParser; +import org.apache.hadoop.hive.ql.parse.SemanticException; + +/** + * Analyzer for catalog switching commands. + */ +@DDLSemanticAnalyzerFactory.DDLType(types = HiveParser.TOK_SWITCHCATALOG) +public class SwitchCatalogAnalyzer extends BaseSemanticAnalyzer { + public SwitchCatalogAnalyzer(QueryState queryState) throws SemanticException { + super(queryState); + } + + @Override + public void analyzeInternal(ASTNode root) throws SemanticException { + String catlogName = unescapeIdentifier(root.getChild(0).getText()); + + Catalog catalog = getCatalog(catlogName); + ReadEntity readEntity = new ReadEntity(catalog); + readEntity.noLockNeeded(); + inputs.add(readEntity); + + SwitchCatalogDesc desc = new SwitchCatalogDesc(catlogName); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc))); + } +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/use/SwitchCatalogDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/use/SwitchCatalogDesc.java new file mode 100644 index 000000000000..68ce7222a7f9 --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/use/SwitchCatalogDesc.java @@ -0,0 +1,43 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.catalog.use; + +import org.apache.hadoop.hive.ql.ddl.DDLDesc; +import org.apache.hadoop.hive.ql.plan.Explain; + +import java.io.Serializable; + +/** + * DDL task description for SET CATALOG commands. + */ +@Explain(displayName = "Switch Catalog", explainLevels = { Explain.Level.USER, Explain.Level.DEFAULT, Explain.Level.EXTENDED }) +public class SwitchCatalogDesc implements DDLDesc, Serializable { + private static final long serialVersionUID = 1L; + + private final String catalogName; + + public SwitchCatalogDesc(String databaseName) { + this.catalogName = databaseName; + } + + @Explain(displayName = "name", explainLevels = { Explain.Level.USER, Explain.Level.DEFAULT, Explain.Level.EXTENDED }) + public String getCatalogName() { + return catalogName; + } +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/use/SwitchCatalogOperation.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/use/SwitchCatalogOperation.java new file mode 100644 index 000000000000..95468461b42b --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/use/SwitchCatalogOperation.java @@ -0,0 +1,50 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.catalog.use; + +import java.util.Map; + +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.metastore.api.Catalog; +import org.apache.hadoop.hive.metastore.api.Database; +import org.apache.hadoop.hive.ql.ErrorMsg; +import org.apache.hadoop.hive.ql.ddl.DDLOperation; +import org.apache.hadoop.hive.ql.ddl.DDLOperationContext; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.session.SessionState; + +/** + * Operation process of switching to another catalog. + */ +public class SwitchCatalogOperation extends DDLOperation { + public SwitchCatalogOperation(DDLOperationContext context, SwitchCatalogDesc desc) { + super(context, desc); + } + + @Override + public int execute() throws HiveException { + String catalogName = desc.getCatalogName(); + if (context.getDb().getCatalog(catalogName) == null) { + throw new HiveException(ErrorMsg.CATALOG_NOT_EXISTS, catalogName); + } + + SessionState.get().setCurrentCatalog(catalogName); + return 0; + } +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/create/CreateDatabaseOperation.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/create/CreateDatabaseOperation.java index 0bccba094fed..caff77fe7008 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/create/CreateDatabaseOperation.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/create/CreateDatabaseOperation.java @@ -55,12 +55,12 @@ public int execute() throws HiveException { if (desc.getManagedLocationUri() != null) { database.setManagedLocationUri(desc.getManagedLocationUri()); } - makeLocationQualified(database); + makeLocationQualified(database); // TODO add catalog prefix for db location if (database.getLocationUri().equalsIgnoreCase(database.getManagedLocationUri())) { throw new HiveException("Managed and external locations for database cannot be the same"); } } else if (desc.getDatabaseType() == DatabaseType.REMOTE) { - makeLocationQualified(database); + makeLocationQualified(database); // TODO add catalog prefix for db location database.setConnector_name(desc.getConnectorName()); database.setRemote_dbname(desc.getRemoteDbName()); } else { // should never be here diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/drop/DropDatabaseOperation.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/drop/DropDatabaseOperation.java index e83d082703ef..ce3b5e514d8f 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/drop/DropDatabaseOperation.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/drop/DropDatabaseOperation.java @@ -52,12 +52,12 @@ public int execute() throws HiveException { if (LlapHiveUtils.isLlapMode(context.getConf())) { ProactiveEviction.Request.Builder llapEvictRequestBuilder = ProactiveEviction.Request.Builder.create(); - llapEvictRequestBuilder.addDb(dbName); + llapEvictRequestBuilder.addDb(dbName); //TODO add catalog for the cache ProactiveEviction.evict(context.getConf(), llapEvictRequestBuilder.build()); } // Unregister the functions as well if (desc.isCasdade()) { - FunctionRegistry.unregisterPermanentFunctions(dbName); + FunctionRegistry.unregisterPermanentFunctions(dbName); //TODO add catalog for the cache } } catch (NoSuchObjectException ex) { throw new HiveException(ex, ErrorMsg.DATABASE_NOT_EXISTS, desc.getDatabaseName()); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/use/SwitchDatabaseAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/use/SwitchDatabaseAnalyzer.java index 97d86617d672..8b27cec6a21e 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/use/SwitchDatabaseAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/use/SwitchDatabaseAnalyzer.java @@ -18,6 +18,7 @@ package org.apache.hadoop.hive.ql.ddl.database.use; +import org.apache.commons.lang3.tuple.Pair; import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.ql.QueryState; import org.apache.hadoop.hive.ql.exec.TaskFactory; @@ -40,14 +41,13 @@ public SwitchDatabaseAnalyzer(QueryState queryState) throws SemanticException { @Override public void analyzeInternal(ASTNode root) throws SemanticException { - String databaseName = unescapeIdentifier(root.getChild(0).getText()); - - Database database = getDatabase(databaseName, true); + Pair catDbNamePair = getCatDbNamePair((ASTNode) root.getChild(0)); + Database database = getDatabase(catDbNamePair.getLeft(), catDbNamePair.getRight(), true); ReadEntity readEntity = new ReadEntity(database); readEntity.noLockNeeded(); inputs.add(readEntity); - SwitchDatabaseDesc desc = new SwitchDatabaseDesc(databaseName); + SwitchDatabaseDesc desc = new SwitchDatabaseDesc(catDbNamePair.getRight()); rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc))); } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java index 6a332a10f19a..e5bcf4c15754 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java @@ -222,6 +222,7 @@ import java.util.List; import java.util.Map.Entry; import java.util.Map; +import java.util.Objects; import java.util.Optional; import java.util.Set; import java.util.concurrent.Callable; @@ -667,6 +668,7 @@ public void dropCatalog(String catName, boolean ignoreUnknownCat) public void createDatabase(Database db, boolean ifNotExist) throws AlreadyExistsException, HiveException { try { + db.setCatalogName(SessionState.get().getCurrentCatalog()); getMSC().createDatabase(db); } catch (AlreadyExistsException e) { if (!ifNotExist) { @@ -735,7 +737,7 @@ public void dropDatabase(DropDatabaseDesc desc) .map(HiveTxnManager::getCurrentTxnId).orElse(0L); DropDatabaseRequest req = new DropDatabaseRequest(); - req.setCatalogName(getDefaultCatalog(conf)); + req.setCatalogName(SessionState.get().getCurrentCatalog()); req.setName(desc.getDatabaseName()); req.setIgnoreUnknownDb(desc.getIfExists()); req.setDeleteData(desc.isDeleteData()); @@ -1428,7 +1430,8 @@ public void createTable(Table tbl, boolean ifNotExists, } public void createTable(Table tbl, boolean ifNotExists) throws HiveException { - createTable(tbl, ifNotExists, null, null, null, null, + tbl.setCatalogName(Objects.requireNonNullElse(tbl.getCatName(), SessionState.get().getCurrentCatalog())); + createTable(tbl, ifNotExists, null, null, null, null, null, null); } @@ -1463,6 +1466,7 @@ public void dropTable(Table table, boolean ifPurge) throws HiveException { long txnId = Optional.ofNullable(SessionState.get()) .map(ss -> ss.getTxnMgr().getCurrentTxnId()).orElse(0L); table.getTTable().setTxnId(txnId); + table.setCatalogName(Objects.requireNonNullElse(table.getCatName(), SessionState.get().getCurrentCatalog())); dropTable(table.getTTable(), !tableWithSuffix, true, ifPurge); } @@ -1978,15 +1982,15 @@ public List getTablesByType(String dbName, String pattern, TableType typ List result; if (type != null) { if (pattern != null) { - result = getMSC().getTables(dbName, pattern, type); + result = getMSC().getTables(SessionState.get().getCurrentCatalog(), dbName, pattern, type); } else { - result = getMSC().getTables(dbName, ".*", type); + result = getMSC().getTables(SessionState.get().getCurrentCatalog(), dbName, ".*", type); } } else { if (pattern != null) { - result = getMSC().getTables(dbName, pattern); + result = getMSC().getTables(SessionState.get().getCurrentCatalog(), dbName, pattern); } else { - result = getMSC().getTables(dbName, ".*"); + result = getMSC().getTables(SessionState.get().getCurrentCatalog(), dbName, ".*"); } } return result; @@ -2445,7 +2449,7 @@ public List getMaterializedViewsByAST( */ public List getAllDatabases() throws HiveException { try { - return getMSC().getAllDatabases(); + return getMSC().getAllDatabases(SessionState.get().getCurrentCatalog()); } catch (Exception e) { throw new HiveException(e); } @@ -2551,7 +2555,7 @@ public Database getDatabase(String dbName) throws HiveException { PerfLogger perfLogger = SessionState.getPerfLogger(); perfLogger.perfLogBegin(CLASS_NAME, PerfLogger.HIVE_GET_DATABASE); try { - return getMSC().getDatabase(dbName); + return getMSC().getDatabase(SessionState.get().getCurrentCatalog(), dbName); } catch (NoSuchObjectException e) { return null; } catch (Exception e) { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java index 8a37073509ef..e0aed673dc25 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java @@ -1198,6 +1198,10 @@ public String getCatalogName() { return this.tTable.getCatName(); } + public void setCatalogName(String catalogName) { + this.tTable.setCatName(catalogName); + } + public void setOutdatedForRewriting(Boolean validForRewritingMaterializedView) { this.outdatedForRewritingMaterializedView = validForRewritingMaterializedView; } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java index f0850a27be6c..5eb3934e478a 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java @@ -31,6 +31,7 @@ import java.util.List; import java.util.Map; import java.util.Map.Entry; +import java.util.Objects; import java.util.Properties; import java.util.Set; import java.util.stream.Stream; @@ -59,6 +60,7 @@ import org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint; import org.apache.hadoop.hive.metastore.api.SourceTable; import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants; +import org.apache.hadoop.hive.metastore.txn.TxnStore; import org.apache.hadoop.hive.metastore.utils.MetaStoreUtils; import org.apache.hadoop.hive.ql.CompilationOpContext; import org.apache.hadoop.hive.ql.Context; @@ -422,6 +424,24 @@ public static String charSetString(String charSetName, String charSetString) } } + public static Pair getCatDbNamePair(ASTNode dbNameNode) throws SemanticException { + if (dbNameNode.getChildCount() == 2) { + final String catName = unescapeIdentifier(dbNameNode.getChild(0).getText()); + final String dbName = unescapeIdentifier(dbNameNode.getChild(1).getText()); + if (catName.contains(".") || dbName.contains(".")) { + throw new SemanticException(ASTErrorUtils.getMsg( + ErrorMsg.OBJECTNAME_CONTAINS_DOT.getMsg(), dbNameNode)); + } + return Pair.of(catName, dbName); + } + final String dbName = unescapeIdentifier(dbNameNode.getChild(0).getText()); + if (dbName.contains(".")) { + throw new SemanticException(ASTErrorUtils.getMsg( + ErrorMsg.OBJECTNAME_CONTAINS_DOT.getMsg(), dbNameNode)); + } + return Pair.of(null, dbName); + } + /** * Get dequoted name from a table/column node. * @param tableOrColumnNode the table or column node @@ -1915,6 +1935,12 @@ protected Database getDatabase(String dbName) throws SemanticException { return getDatabase(dbName, true); } + /** + * TODO. Once we confirm that no compatibility has been broken, we can remove these non-catalog APIs + * @deprecated Replaced by + * {@link BaseSemanticAnalyzer#getDatabase(String catalogName, String dbName, boolean throwException)} + * @return the database if existed. + */ protected Database getDatabase(String dbName, boolean throwException) throws SemanticException { Database database; try { @@ -1928,6 +1954,20 @@ protected Database getDatabase(String dbName, boolean throwException) throws Sem return database; } + protected Database getDatabase(String catalogName, String dbName, boolean throwException) throws SemanticException { + Database database; + try { + catalogName = Objects.requireNonNullElse(catalogName, SessionState.get().getCurrentCatalog()); + database = db.getDatabase(catalogName, dbName); + } catch (Exception e) { + throw new SemanticException(e.getMessage(), e); + } + if (database == null && throwException) { + throw new SemanticException(ErrorMsg.DATABASE_NOT_EXISTS.getMsg(dbName)); + } + return database; + } + protected DataConnector getDataConnector(String dbName) throws SemanticException { return getDataConnector(dbName, true); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java index 1d908bb1bdb5..ed6e331bd928 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java @@ -34,6 +34,7 @@ public enum HiveOperation { REPLSTATUS("REPLSTATUS", HiveParser.TOK_REPL_STATUS, new Privilege[]{Privilege.SELECT}, null), CREATECATALOG("CREATECATALOG", HiveParser.TOK_CREATECATALOG, null, new Privilege[]{Privilege.CREATE}), DROPCATALOG("DROPCATALOG", HiveParser.TOK_DROPCATALOG, null, new Privilege[]{Privilege.DROP}), + SWITCHCATALOG("SWITCHCATALOG", HiveParser.TOK_SWITCHCATALOG, null, null, true, false), CREATEDATABASE("CREATEDATABASE", HiveParser.TOK_CREATEDATABASE, null, new Privilege[]{Privilege.CREATE}), CREATEDATACONNECTOR("CREATEDATACONNECTOR", HiveParser.TOK_CREATEDATACONNECTOR, null, new Privilege[]{Privilege.CREATE}), DROPDATABASE("DROPDATABASE", HiveParser.TOK_DROPDATABASE, null, new Privilege[]{Privilege.DROP}), diff --git a/ql/src/java/org/apache/hadoop/hive/ql/processors/HiveCommand.java b/ql/src/java/org/apache/hadoop/hive/ql/processors/HiveCommand.java index afc82639771a..a9e9590247ea 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/processors/HiveCommand.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/processors/HiveCommand.java @@ -86,6 +86,8 @@ public static HiveCommand find(String[] command, boolean findOnlyForTesting) { } else if (command.length > 1 && "show".equalsIgnoreCase(command[0]) && "processlist".equalsIgnoreCase(command[1])) { return PROCESSLIST; + } else if(command.length > 1 && "set".equalsIgnoreCase(command[0]) && "catalog".equalsIgnoreCase(command[1])) { + return null;// set catalog catalog_name should be a SQLOperation instead of a HiveCommandOperation } else if (COMMANDS.contains(cmd)) { HiveCommand hiveCommand = HiveCommand.valueOf(cmd); if (findOnlyForTesting == hiveCommand.isOnlyForTesting()) { diff --git a/ql/src/test/queries/clientpositive/catalog_database.q b/ql/src/test/queries/clientpositive/catalog_database.q new file mode 100644 index 000000000000..2acb2ad9b6ae --- /dev/null +++ b/ql/src/test/queries/clientpositive/catalog_database.q @@ -0,0 +1,28 @@ +set hive.mapred.mode=nonstrict; +set hive.support.concurrency = true; + +-- CREATE DATABASE in default catalog 'hive' +CREATE DATABASE testdb; + +-- Check databases in default catalog 'hive', +-- The list of databases in the catalog 'hive' should only contain the default and the testdb. +SHOW DATABASES; + +-- CREATE a new catalog with comment +CREATE CATALOG testcat LOCATION '/tmp/testcat' COMMENT 'Hive test catalog'; + +-- Check catalogs list +SHOW CATALOGS; + +-- Switch the catalog from hive to 'testcat' +SET CATALOG testcat; + +-- CREATE DATABASE in default catalog 'hive' +CREATE DATABASE testdb_new; + +-- Check databases in catalog 'testcat', +-- The list of databases in the catalog 'hive' should only contain the default and the testdb_new. +SHOW DATABASES; + +-- Switch database by catalog.db pattern +USE testcat.testdb_new; diff --git a/ql/src/test/results/clientpositive/llap/catalog.q.out b/ql/src/test/results/clientpositive/llap/catalog.q.out index 6f9ef138dcd0..97b04736b3a1 100644 --- a/ql/src/test/results/clientpositive/llap/catalog.q.out +++ b/ql/src/test/results/clientpositive/llap/catalog.q.out @@ -6,10 +6,11 @@ hive #### A masked pattern was here #### PREHOOK: type: CREATECATALOG PREHOOK: Output: catalog:test_cat +PREHOOK: Output: hdfs://### HDFS PATH ### #### A masked pattern was here #### POSTHOOK: type: CREATECATALOG POSTHOOK: Output: catalog:test_cat -#### A masked pattern was here #### +POSTHOOK: Output: hdfs://### HDFS PATH ### PREHOOK: query: DESC CATALOG test_cat PREHOOK: type: DESCCATALOG PREHOOK: Input: catalog:test_cat @@ -21,10 +22,11 @@ Comment Hive test catalog #### A masked pattern was here #### PREHOOK: type: CREATECATALOG PREHOOK: Output: catalog:test_cat +PREHOOK: Output: hdfs://### HDFS PATH ### #### A masked pattern was here #### POSTHOOK: type: CREATECATALOG POSTHOOK: Output: catalog:test_cat -#### A masked pattern was here #### +POSTHOOK: Output: hdfs://### HDFS PATH ### PREHOOK: query: SHOW CATALOGS PREHOOK: type: SHOWCATALOGS POSTHOOK: query: SHOW CATALOGS @@ -47,10 +49,11 @@ hive #### A masked pattern was here #### PREHOOK: type: CREATECATALOG PREHOOK: Output: catalog:test_cat +PREHOOK: Output: hdfs://### HDFS PATH ### #### A masked pattern was here #### POSTHOOK: type: CREATECATALOG POSTHOOK: Output: catalog:test_cat -#### A masked pattern was here #### +POSTHOOK: Output: hdfs://### HDFS PATH ### PREHOOK: query: SHOW CATALOGS PREHOOK: type: SHOWCATALOGS POSTHOOK: query: SHOW CATALOGS @@ -77,10 +80,11 @@ POSTHOOK: type: DROPCATALOG #### A masked pattern was here #### PREHOOK: type: CREATECATALOG PREHOOK: Output: catalog:test_cat +PREHOOK: Output: hdfs://### HDFS PATH ### #### A masked pattern was here #### POSTHOOK: type: CREATECATALOG POSTHOOK: Output: catalog:test_cat -#### A masked pattern was here #### +POSTHOOK: Output: hdfs://### HDFS PATH ### PREHOOK: query: SHOW CATALOGS PREHOOK: type: SHOWCATALOGS POSTHOOK: query: SHOW CATALOGS @@ -103,10 +107,11 @@ POSTHOOK: type: SHOWCATALOGS #### A masked pattern was here #### PREHOOK: type: ALTERCATALOG_LOCATION PREHOOK: Output: catalog:test_cat +PREHOOK: Output: hdfs://### HDFS PATH ### #### A masked pattern was here #### POSTHOOK: type: ALTERCATALOG_LOCATION POSTHOOK: Output: catalog:test_cat -#### A masked pattern was here #### +POSTHOOK: Output: hdfs://### HDFS PATH ### PREHOOK: query: DESC CATALOG EXTENDED test_cat PREHOOK: type: DESCCATALOG PREHOOK: Input: catalog:test_cat diff --git a/ql/src/test/results/clientpositive/llap/catalog_database.q.out b/ql/src/test/results/clientpositive/llap/catalog_database.q.out new file mode 100644 index 000000000000..5c93b6c18b6f --- /dev/null +++ b/ql/src/test/results/clientpositive/llap/catalog_database.q.out @@ -0,0 +1,50 @@ +PREHOOK: query: CREATE DATABASE testdb +PREHOOK: type: CREATEDATABASE +PREHOOK: Output: database:testdb +POSTHOOK: query: CREATE DATABASE testdb +POSTHOOK: type: CREATEDATABASE +POSTHOOK: Output: database:testdb +PREHOOK: query: SHOW DATABASES +PREHOOK: type: SHOWDATABASES +POSTHOOK: query: SHOW DATABASES +POSTHOOK: type: SHOWDATABASES +default +testdb +#### A masked pattern was here #### +PREHOOK: type: CREATECATALOG +PREHOOK: Output: catalog:testcat +PREHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### +POSTHOOK: type: CREATECATALOG +POSTHOOK: Output: catalog:testcat +POSTHOOK: Output: hdfs://### HDFS PATH ### +PREHOOK: query: SHOW CATALOGS +PREHOOK: type: SHOWCATALOGS +POSTHOOK: query: SHOW CATALOGS +POSTHOOK: type: SHOWCATALOGS +hive +testcat +PREHOOK: query: SET CATALOG testcat +PREHOOK: type: SWITCHCATALOG +PREHOOK: Input: catalog:testcat +POSTHOOK: query: SET CATALOG testcat +POSTHOOK: type: SWITCHCATALOG +POSTHOOK: Input: catalog:testcat +PREHOOK: query: CREATE DATABASE testdb_new +PREHOOK: type: CREATEDATABASE +PREHOOK: Output: database:testdb_new +POSTHOOK: query: CREATE DATABASE testdb_new +POSTHOOK: type: CREATEDATABASE +POSTHOOK: Output: database:testdb_new +PREHOOK: query: SHOW DATABASES +PREHOOK: type: SHOWDATABASES +POSTHOOK: query: SHOW DATABASES +POSTHOOK: type: SHOWDATABASES +default +testdb_new +PREHOOK: query: USE testcat.testdb_new +PREHOOK: type: SWITCHDATABASE +PREHOOK: Input: database:testdb_new +POSTHOOK: query: USE testcat.testdb_new +POSTHOOK: type: SWITCHDATABASE +POSTHOOK: Input: database:testdb_new From bf6c90b3c643e08bbfaabd6e5d4976e27ef8b00c Mon Sep 17 00:00:00 2001 From: zhangbutao Date: Wed, 24 Sep 2025 18:09:08 +0800 Subject: [PATCH 02/15] Implement drop catalog.database syntax --- .../apache/hadoop/hive/ql/parse/HiveParser.g | 4 +- .../database/drop/DropDatabaseAnalyzer.java | 17 ++-- .../ddl/database/drop/DropDatabaseDesc.java | 15 +++- .../database/drop/DropDatabaseOperation.java | 3 +- .../apache/hadoop/hive/ql/metadata/Hive.java | 83 ++++++++++++++++++- .../queries/clientpositive/catalog_database.q | 10 ++- .../llap/catalog_database.q.out | 13 +++ 7 files changed, 128 insertions(+), 17 deletions(-) diff --git a/parser/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g b/parser/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g index 31048c3cbb69..0603cb9e1b41 100644 --- a/parser/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g +++ b/parser/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g @@ -1226,8 +1226,8 @@ switchDatabaseStatement dropDatabaseStatement @init { pushMsg("drop database statement", state); } @after { popMsg(state); } - : KW_DROP (KW_DATABASE|KW_SCHEMA) ifExists? identifier restrictOrCascade? - -> ^(TOK_DROPDATABASE identifier ifExists? restrictOrCascade?) + : KW_DROP (KW_DATABASE|KW_SCHEMA) ifExists? databaseName restrictOrCascade? + -> ^(TOK_DROPDATABASE databaseName ifExists? restrictOrCascade?) ; databaseComment diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/drop/DropDatabaseAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/drop/DropDatabaseAnalyzer.java index 6d01c2e88b30..fca15467e9a4 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/drop/DropDatabaseAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/drop/DropDatabaseAnalyzer.java @@ -18,9 +18,11 @@ package org.apache.hadoop.hive.ql.ddl.database.drop; +import org.apache.commons.lang3.tuple.Pair; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.metastore.api.Function; +import org.apache.hadoop.hive.ql.ErrorMsg; import org.apache.hadoop.hive.ql.QueryState; import org.apache.hadoop.hive.ql.exec.TaskFactory; import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType; @@ -51,12 +53,17 @@ public DropDatabaseAnalyzer(QueryState queryState) throws SemanticException { @Override public void analyzeInternal(ASTNode root) throws SemanticException { - String databaseName = unescapeIdentifier(root.getChild(0).getText()); + Pair catDbNamePair = getCatDbNamePair((ASTNode) root.getChild(0)); boolean ifExists = root.getFirstChildWithType(HiveParser.TOK_IFEXISTS) != null; boolean cascade = root.getFirstChildWithType(HiveParser.TOK_CASCADE) != null; boolean isSoftDelete = HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVE_ACID_LOCKLESS_READS_ENABLED); - Database database = getDatabase(databaseName, !ifExists); + String catalogName = catDbNamePair.getLeft(); + if (getCatalog(catalogName) == null) { + throw new SemanticException(ErrorMsg.CATALOG_NOT_EXISTS, catalogName); + } + String databaseName = catDbNamePair.getRight(); + Database database = getDatabase(catDbNamePair.getLeft(), catDbNamePair.getRight(), ifExists); if (database == null) { return; } @@ -72,7 +79,7 @@ public void analyzeInternal(ASTNode root) throws SemanticException { HiveConf hiveConf = new HiveConf(conf); hiveConf.set("hive.metastore.client.filter.enabled", "false"); newDb = Hive.get(hiveConf); - List tables = newDb.getAllTableObjects(databaseName); + List
tables = newDb.getAllTableObjects(catalogName, databaseName); isDbLevelLock = !isSoftDelete || tables.stream().allMatch( table -> AcidUtils.isTableSoftDeleteEnabled(table, conf)); for (Table table : tables) { @@ -85,7 +92,7 @@ public void analyzeInternal(ASTNode root) throws SemanticException { outputs.add(new WriteEntity(table, lockType)); } // fetch all the functions in the database - List functions = db.getFunctionsInDb(databaseName, ".*"); + List functions = db.getFunctionsInDb(catalogName, databaseName, ".*"); for (Function func: functions) { outputs.add(new WriteEntity(func, WriteEntity.WriteType.DDL_NO_LOCK)); } @@ -111,7 +118,7 @@ public void analyzeInternal(ASTNode root) throws SemanticException { WriteEntity.WriteType.DDL_EXCL_WRITE : WriteEntity.WriteType.DDL_EXCLUSIVE; outputs.add(new WriteEntity(database, lockType)); } - DropDatabaseDesc desc = new DropDatabaseDesc(databaseName, ifExists, cascade, new ReplicationSpec()); + DropDatabaseDesc desc = new DropDatabaseDesc(catalogName, databaseName, ifExists, cascade, new ReplicationSpec()); rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc))); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/drop/DropDatabaseDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/drop/DropDatabaseDesc.java index c86922a23729..e436a43fd1aa 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/drop/DropDatabaseDesc.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/drop/DropDatabaseDesc.java @@ -32,6 +32,7 @@ public class DropDatabaseDesc implements DDLDesc, Serializable { private static final long serialVersionUID = 1L; + private final String catalogName; private final String databaseName; private final boolean ifExists; private final boolean cascade; @@ -40,21 +41,27 @@ public class DropDatabaseDesc implements DDLDesc, Serializable { private boolean deleteData = true; public DropDatabaseDesc(String databaseName, boolean ifExists, ReplicationSpec replicationSpec) { - this(databaseName, ifExists, false, replicationSpec); + this(null, databaseName, ifExists, false, replicationSpec); //TODO check the actual catalog } - public DropDatabaseDesc(String databaseName, boolean ifExists, boolean cascade, ReplicationSpec replicationSpec) { + public DropDatabaseDesc(String catalogName, String databaseName, boolean ifExists, boolean cascade, ReplicationSpec replicationSpec) { + this.catalogName = catalogName; this.databaseName = databaseName; this.ifExists = ifExists; this.cascade = cascade; this.replicationSpec = replicationSpec; } - public DropDatabaseDesc(String databaseName, boolean ifExists, boolean cascade, boolean deleteData) { - this(databaseName, ifExists, cascade, null); + public DropDatabaseDesc(String catalogName, String databaseName, boolean ifExists, boolean cascade, boolean deleteData) { + this(catalogName, databaseName, ifExists, cascade, null); this.deleteData = deleteData; } + @Explain(displayName = "catalog", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) + public String getCatalogName() { + return catalogName; + } + @Explain(displayName = "database", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) public String getDatabaseName() { return databaseName; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/drop/DropDatabaseOperation.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/drop/DropDatabaseOperation.java index ce3b5e514d8f..ed08a59babd5 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/drop/DropDatabaseOperation.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/drop/DropDatabaseOperation.java @@ -40,10 +40,11 @@ public DropDatabaseOperation(DDLOperationContext context, DropDatabaseDesc desc) @Override public int execute() throws HiveException { try { + String catName = desc.getCatalogName(); String dbName = desc.getDatabaseName(); ReplicationSpec replicationSpec = desc.getReplicationSpec(); if (replicationSpec.isInReplicationScope()) { - Database database = context.getDb().getDatabase(dbName); + Database database = context.getDb().getDatabase(catName, dbName); if (database == null || !replicationSpec.allowEventReplacementInto(database.getParameters())) { return 0; } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java index e5bcf4c15754..a7cbffaa9833 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java @@ -725,7 +725,7 @@ public void dropDatabase(String name, boolean deleteData, boolean ignoreUnknownD */ public void dropDatabase(String name, boolean deleteData, boolean ignoreUnknownDb, boolean cascade) throws HiveException, NoSuchObjectException { - dropDatabase(new DropDatabaseDesc(name, ignoreUnknownDb, cascade, deleteData)); + dropDatabase(new DropDatabaseDesc(getDefaultCatalog(conf) ,name, ignoreUnknownDb, cascade, deleteData)); //TODO check the actual catalog } public void dropDatabase(DropDatabaseDesc desc) @@ -737,7 +737,7 @@ public void dropDatabase(DropDatabaseDesc desc) .map(HiveTxnManager::getCurrentTxnId).orElse(0L); DropDatabaseRequest req = new DropDatabaseRequest(); - req.setCatalogName(SessionState.get().getCurrentCatalog()); + req.setCatalogName(Objects.requireNonNullElse(desc.getCatalogName(), SessionState.get().getCurrentCatalog())); req.setName(desc.getDatabaseName()); req.setIgnoreUnknownDb(desc.getIfExists()); req.setDeleteData(desc.isDeleteData()); @@ -1872,6 +1872,17 @@ public List
getAllTableObjects(String dbName) throws HiveException { return getTableObjects(dbName, ".*", null); } + /** + * Get all tables for the specified database. + * @param catName + * @param dbName + * @return List of all tables + * @throws HiveException + */ + public List
getAllTableObjects(String catName, String dbName) throws HiveException { + return getTableObjects(catName, dbName, ".*", null); + } + /** * Get all materialized view names for the specified database. * @param dbName @@ -1918,6 +1929,16 @@ public Table apply(org.apache.hadoop.hive.metastore.api.Table table) { } } + public List
getTableObjects(String catName, String dbName, String pattern, TableType tableType) throws HiveException { + try { + return Lists.transform(getMSC().getTables(catName, dbName, getTablesByType(catName, dbName, pattern, tableType), null), + Table::new + ); + } catch (Exception e) { + throw new HiveException(e); + } + } + /** * Returns all existing tables from default database which match the given * pattern. The matching occurs as per Java regular expressions @@ -2001,6 +2022,52 @@ public List getTablesByType(String dbName, String pattern, TableType typ } } + /** + * Returns all existing tables of a type (VIRTUAL_VIEW|EXTERNAL_TABLE|MANAGED_TABLE) from the specified + * database which match the given pattern. The matching occurs as per Java regular expressions. + * @param catName catalog name to find the tables in. if null, uses the current catalog in this session. + * @param dbName Database name to find the tables in. if null, uses the current database in this session. + * @param pattern A pattern to match for the table names.If null, returns all names from this DB. + * @param type The type of tables to return. VIRTUAL_VIEWS for views. If null, returns all tables and views. + * @return list of table names that match the pattern. + * @throws HiveException + */ + public List getTablesByType(String catName, String dbName, String pattern, TableType type) + throws HiveException { + PerfLogger perfLogger = SessionState.getPerfLogger(); + perfLogger.perfLogBegin(CLASS_NAME, PerfLogger.HIVE_GET_TABLE); + + if (catName == null) { + dbName = SessionState.get().getCurrentCatalog(); + } + + if (dbName == null) { + dbName = SessionState.get().getCurrentDatabase(); + } + + try { + List result; + if (type != null) { + if (pattern != null) { + result = getMSC().getTables(catName, dbName, pattern, type); + } else { + result = getMSC().getTables(catName, dbName, ".*", type); + } + } else { + if (pattern != null) { + result = getMSC().getTables(catName, dbName, pattern); + } else { + result = getMSC().getTables(catName, dbName, ".*"); + } + } + return result; + } catch (Exception e) { + throw new HiveException(e); + } finally { + perfLogger.perfLogEnd(CLASS_NAME, PerfLogger.HIVE_GET_TABLE, "HS2-cache"); + } + } + /** * Get the materialized views that have been enabled for rewriting from the * cache (registry). It will preprocess them to discard those that are @@ -6467,6 +6534,18 @@ public List getFunctionsInDb(String dbName, String pattern) throws Hiv } } + public List getFunctionsInDb(String catName, String dbName, String pattern) throws HiveException { + try { + GetFunctionsRequest request = new GetFunctionsRequest(dbName); + request.setPattern(pattern); + request.setCatalogName(Objects.requireNonNullElse(catName, SessionState.get().getCurrentCatalog())); + request.setReturnNames(false); + return getMSC().getFunctionsRequest(request).getFunctions(); + } catch (TException te) { + throw new HiveException(te); + } + } + public void setMetaConf(String propName, String propValue) throws HiveException { try { getMSC().setMetaConf(propName, propValue); diff --git a/ql/src/test/queries/clientpositive/catalog_database.q b/ql/src/test/queries/clientpositive/catalog_database.q index 2acb2ad9b6ae..3a8e8bfbe16c 100644 --- a/ql/src/test/queries/clientpositive/catalog_database.q +++ b/ql/src/test/queries/clientpositive/catalog_database.q @@ -1,6 +1,3 @@ -set hive.mapred.mode=nonstrict; -set hive.support.concurrency = true; - -- CREATE DATABASE in default catalog 'hive' CREATE DATABASE testdb; @@ -26,3 +23,10 @@ SHOW DATABASES; -- Switch database by catalog.db pattern USE testcat.testdb_new; + +-- Drop database by catalog.db pattern +DROP DATABASE testcat.testdb_new; + +-- Check databases in catalog 'testcat', +-- The list of databases in the catalog 'hive' should only contain the default. +SHOW DATABASES; diff --git a/ql/src/test/results/clientpositive/llap/catalog_database.q.out b/ql/src/test/results/clientpositive/llap/catalog_database.q.out index 5c93b6c18b6f..223b91d6595d 100644 --- a/ql/src/test/results/clientpositive/llap/catalog_database.q.out +++ b/ql/src/test/results/clientpositive/llap/catalog_database.q.out @@ -48,3 +48,16 @@ PREHOOK: Input: database:testdb_new POSTHOOK: query: USE testcat.testdb_new POSTHOOK: type: SWITCHDATABASE POSTHOOK: Input: database:testdb_new +PREHOOK: query: DROP DATABASE testcat.testdb_new +PREHOOK: type: DROPDATABASE +PREHOOK: Input: database:testdb_new +PREHOOK: Output: database:testdb_new +POSTHOOK: query: DROP DATABASE testcat.testdb_new +POSTHOOK: type: DROPDATABASE +POSTHOOK: Input: database:testdb_new +POSTHOOK: Output: database:testdb_new +PREHOOK: query: SHOW DATABASES +PREHOOK: type: SHOWDATABASES +POSTHOOK: query: SHOW DATABASES +POSTHOOK: type: SHOWDATABASES +default From 2bd7555c66b705bbd71030177fc350733a3ba13a Mon Sep 17 00:00:00 2001 From: zhangbutao Date: Wed, 24 Sep 2025 20:29:20 +0800 Subject: [PATCH 03/15] Implement create catalog.database syntax --- .../apache/hadoop/hive/ql/parse/HiveParser.g | 4 +- .../create/CreateDatabaseAnalyzer.java | 16 ++++++-- .../database/create/CreateDatabaseDesc.java | 13 +++++-- .../create/CreateDatabaseOperation.java | 5 ++- .../database/drop/DropDatabaseAnalyzer.java | 2 +- .../repl/bootstrap/load/LoadDatabase.java | 2 +- .../apache/hadoop/hive/ql/metadata/Hive.java | 4 +- .../load/message/CreateDatabaseHandler.java | 3 +- .../queries/clientpositive/catalog_database.q | 15 +++++--- .../llap/catalog_database.q.out | 38 +++++++++++-------- 10 files changed, 65 insertions(+), 37 deletions(-) diff --git a/parser/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g b/parser/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g index 0603cb9e1b41..f0548e606ef6 100644 --- a/parser/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g +++ b/parser/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g @@ -1165,7 +1165,7 @@ createDatabaseStatement @after { popMsg(state); } : KW_CREATE (KW_DATABASE|KW_SCHEMA) ifNotExists? - name=identifier + name=databaseName databaseComment? dbLocation? dbManagedLocation? @@ -1174,7 +1174,7 @@ createDatabaseStatement | KW_CREATE KW_REMOTE (KW_DATABASE|KW_SCHEMA) ifNotExists? - name=identifier + name=databaseName databaseComment? dbConnectorName (KW_WITH KW_DBPROPERTIES dbprops=dbProperties)? diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/create/CreateDatabaseAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/create/CreateDatabaseAnalyzer.java index c7b04e9e1c13..2d7f5b9d4a52 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/create/CreateDatabaseAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/create/CreateDatabaseAnalyzer.java @@ -20,10 +20,12 @@ import java.util.Map; +import org.apache.commons.lang3.tuple.Pair; import org.apache.hadoop.hive.metastore.api.DataConnector; import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.metastore.api.DatabaseType; import org.apache.hadoop.hive.metastore.api.PrincipalType; +import org.apache.hadoop.hive.ql.ErrorMsg; import org.apache.hadoop.hive.ql.QueryState; import org.apache.hadoop.hive.ql.exec.TaskFactory; import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType; @@ -47,7 +49,12 @@ public CreateDatabaseAnalyzer(QueryState queryState) throws SemanticException { @Override public void analyzeInternal(ASTNode root) throws SemanticException { - String databaseName = unescapeIdentifier(root.getChild(0).getText()); + Pair catDbNamePair = getCatDbNamePair((ASTNode) root.getChild(0)); + String catalogName = catDbNamePair.getLeft(); + if (catalogName != null && getCatalog(catalogName) == null) { + throw new SemanticException(ErrorMsg.CATALOG_NOT_EXISTS, catalogName); + } + String databaseName = catDbNamePair.getRight(); boolean ifNotExists = false; String comment = null; @@ -92,14 +99,15 @@ public void analyzeInternal(ASTNode root) throws SemanticException { } } - if (ifNotExists && getDatabase(databaseName, false) != null) { + if (ifNotExists && getDatabase(catalogName, databaseName, false) != null) { return; } CreateDatabaseDesc desc = null; Database database = new Database(databaseName, comment, locationUri, props); + database.setCatalogName(catalogName); if (type.equalsIgnoreCase(DatabaseType.NATIVE.name())) { - desc = new CreateDatabaseDesc(databaseName, comment, locationUri, managedLocationUri, ifNotExists, props); + desc = new CreateDatabaseDesc(catalogName, databaseName, comment, locationUri, managedLocationUri, ifNotExists, props); database.setType(DatabaseType.NATIVE); // database = new Database(databaseName, comment, locationUri, props); if (managedLocationUri != null) { @@ -109,7 +117,7 @@ public void analyzeInternal(ASTNode root) throws SemanticException { String remoteDbName = databaseName; if (props != null && props.get("connector.remoteDbName") != null) // TODO finalize the property name remoteDbName = props.get("connector.remoteDbName"); - desc = new CreateDatabaseDesc(databaseName, comment, locationUri, null, ifNotExists, props, type, + desc = new CreateDatabaseDesc(catalogName, databaseName, comment, locationUri, null, ifNotExists, props, type, connectorName, remoteDbName); database.setConnector_name(connectorName); database.setType(DatabaseType.REMOTE); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/create/CreateDatabaseDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/create/CreateDatabaseDesc.java index f458cdc3356e..2d096d6c4eb7 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/create/CreateDatabaseDesc.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/create/CreateDatabaseDesc.java @@ -35,6 +35,7 @@ public class CreateDatabaseDesc implements DDLDesc, Serializable { private static final long serialVersionUID = 1L; + private final String catalogName; private final String databaseName; private final String comment; private final String locationUri; @@ -45,13 +46,14 @@ public class CreateDatabaseDesc implements DDLDesc, Serializable { private final String remoteDbName; private final Map dbProperties; - public CreateDatabaseDesc(String databaseName, String comment, String locationUri, String managedLocationUri, + public CreateDatabaseDesc(String catalogName, String databaseName, String comment, String locationUri, String managedLocationUri, boolean ifNotExists, Map dbProperties) { - this(databaseName, comment, locationUri, managedLocationUri, ifNotExists, dbProperties, "NATIVE", null, null); + this(catalogName, databaseName, comment, locationUri, managedLocationUri, ifNotExists, dbProperties, "NATIVE", null, null); } - public CreateDatabaseDesc(String databaseName, String comment, String locationUri, String managedLocationUri, + public CreateDatabaseDesc(String catalogName, String databaseName, String comment, String locationUri, String managedLocationUri, boolean ifNotExists, Map dbProperties, String dbtype, String connectorName, String remoteDbName) { + this.catalogName = catalogName; this.databaseName = databaseName; this.comment = comment; if (dbtype != null && dbtype.equalsIgnoreCase("REMOTE")) { @@ -80,6 +82,11 @@ public Map getDatabaseProperties() { return dbProperties; } + @Explain(displayName="catalogName", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) + public String getCatalogName() { + return catalogName; + } + @Explain(displayName="name", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) public String getName() { return databaseName; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/create/CreateDatabaseOperation.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/create/CreateDatabaseOperation.java index caff77fe7008..e8beba96430f 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/create/CreateDatabaseOperation.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/create/CreateDatabaseOperation.java @@ -47,6 +47,7 @@ public CreateDatabaseOperation(DDLOperationContext context, CreateDatabaseDesc d public int execute() throws HiveException { Database database = new Database(desc.getName(), desc.getComment(), desc.getLocationUri(), desc.getDatabaseProperties()); + database.setCatalogName(desc.getCatalogName()); database.setOwnerName(SessionState.getUserFromAuthenticator()); database.setOwnerType(PrincipalType.USER); database.setType(desc.getDatabaseType()); @@ -55,12 +56,12 @@ public int execute() throws HiveException { if (desc.getManagedLocationUri() != null) { database.setManagedLocationUri(desc.getManagedLocationUri()); } - makeLocationQualified(database); // TODO add catalog prefix for db location + makeLocationQualified(database); // TODO catalog. Add catalog prefix for location if (database.getLocationUri().equalsIgnoreCase(database.getManagedLocationUri())) { throw new HiveException("Managed and external locations for database cannot be the same"); } } else if (desc.getDatabaseType() == DatabaseType.REMOTE) { - makeLocationQualified(database); // TODO add catalog prefix for db location + makeLocationQualified(database); // TODO catalog. Add catalog prefix for location database.setConnector_name(desc.getConnectorName()); database.setRemote_dbname(desc.getRemoteDbName()); } else { // should never be here diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/drop/DropDatabaseAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/drop/DropDatabaseAnalyzer.java index fca15467e9a4..1b1359656910 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/drop/DropDatabaseAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/drop/DropDatabaseAnalyzer.java @@ -59,7 +59,7 @@ public void analyzeInternal(ASTNode root) throws SemanticException { boolean isSoftDelete = HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVE_ACID_LOCKLESS_READS_ENABLED); String catalogName = catDbNamePair.getLeft(); - if (getCatalog(catalogName) == null) { + if (catalogName != null && getCatalog(catalogName) == null) { throw new SemanticException(ErrorMsg.CATALOG_NOT_EXISTS, catalogName); } String databaseName = catDbNamePair.getRight(); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/bootstrap/load/LoadDatabase.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/bootstrap/load/LoadDatabase.java index 76ff8db26900..e2d215b1f20d 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/bootstrap/load/LoadDatabase.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/bootstrap/load/LoadDatabase.java @@ -150,7 +150,7 @@ private boolean isDbEmpty(String dbName) throws HiveException { private Task createDbTask(Database dbObj) throws MetaException { // note that we do not set location - for repl load, we want that auto-created. - CreateDatabaseDesc createDbDesc = new CreateDatabaseDesc(dbObj.getName(), dbObj.getDescription(), + CreateDatabaseDesc createDbDesc = new CreateDatabaseDesc(dbObj.getCatalogName(), dbObj.getName(), dbObj.getDescription(), getDbLocation(dbObj), getDbManagedLocation(dbObj), false, updateDbProps(dbObj, context.dumpDirectory)); // If it exists, we want this to be an error condition. Repl Load is not intended to replace a // db. diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java index a7cbffaa9833..e73924eae395 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java @@ -668,7 +668,7 @@ public void dropCatalog(String catName, boolean ignoreUnknownCat) public void createDatabase(Database db, boolean ifNotExist) throws AlreadyExistsException, HiveException { try { - db.setCatalogName(SessionState.get().getCurrentCatalog()); + db.setCatalogName(Objects.requireNonNullElse(db.getCatalogName(), SessionState.get().getCurrentCatalog())); getMSC().createDatabase(db); } catch (AlreadyExistsException e) { if (!ifNotExist) { @@ -725,7 +725,7 @@ public void dropDatabase(String name, boolean deleteData, boolean ignoreUnknownD */ public void dropDatabase(String name, boolean deleteData, boolean ignoreUnknownDb, boolean cascade) throws HiveException, NoSuchObjectException { - dropDatabase(new DropDatabaseDesc(getDefaultCatalog(conf) ,name, ignoreUnknownDb, cascade, deleteData)); //TODO check the actual catalog + dropDatabase(new DropDatabaseDesc(getDefaultCatalog(conf) ,name, ignoreUnknownDb, cascade, deleteData)); // TODO catalog. check the actual catalog } public void dropDatabase(DropDatabaseDesc desc) diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/CreateDatabaseHandler.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/CreateDatabaseHandler.java index cf7879875a71..7c222ef7c86b 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/CreateDatabaseHandler.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/CreateDatabaseHandler.java @@ -56,9 +56,10 @@ public List> handle(Context context) Database db = metaData.getDatabase(); String destinationDBName = context.dbName == null ? db.getName() : context.dbName; + String destinationCatalogName = db.getCatalogName(); // TODO catalog. Need to double check the catalog here. CreateDatabaseDesc createDatabaseDesc = - new CreateDatabaseDesc(destinationDBName, db.getDescription(), null, null, true, db.getParameters()); + new CreateDatabaseDesc(destinationCatalogName, destinationDBName, db.getDescription(), null, null, true, db.getParameters()); Task createDBTask = TaskFactory.get( new DDLWork(new HashSet<>(), new HashSet<>(), createDatabaseDesc, true, context.getDumpDirectory(), context.getMetricCollector()), context.hiveConf); diff --git a/ql/src/test/queries/clientpositive/catalog_database.q b/ql/src/test/queries/clientpositive/catalog_database.q index 3a8e8bfbe16c..4ce9cfa6acb8 100644 --- a/ql/src/test/queries/clientpositive/catalog_database.q +++ b/ql/src/test/queries/clientpositive/catalog_database.q @@ -11,22 +11,25 @@ CREATE CATALOG testcat LOCATION '/tmp/testcat' COMMENT 'Hive test catalog'; -- Check catalogs list SHOW CATALOGS; +-- CREATE DATABASE in new catalog testcat by catalog.db pattern +CREATE DATABASE testcat.testdb_1; + -- Switch the catalog from hive to 'testcat' SET CATALOG testcat; --- CREATE DATABASE in default catalog 'hive' -CREATE DATABASE testdb_new; +-- CREATE DATABASE in new catalog testcat +CREATE DATABASE testdb_2; -- Check databases in catalog 'testcat', --- The list of databases in the catalog 'hive' should only contain the default and the testdb_new. +-- The list of databases in the catalog 'hive' should contain default and testdb_1 and testdb_2. SHOW DATABASES; -- Switch database by catalog.db pattern -USE testcat.testdb_new; +USE testcat.testdb_1; -- Drop database by catalog.db pattern -DROP DATABASE testcat.testdb_new; +DROP DATABASE testcat.testdb_1; -- Check databases in catalog 'testcat', --- The list of databases in the catalog 'hive' should only contain the default. +-- The list of databases in the catalog 'hive' should contain default and testdb_2. SHOW DATABASES; diff --git a/ql/src/test/results/clientpositive/llap/catalog_database.q.out b/ql/src/test/results/clientpositive/llap/catalog_database.q.out index 223b91d6595d..ddf6a33f59f3 100644 --- a/ql/src/test/results/clientpositive/llap/catalog_database.q.out +++ b/ql/src/test/results/clientpositive/llap/catalog_database.q.out @@ -24,40 +24,48 @@ POSTHOOK: query: SHOW CATALOGS POSTHOOK: type: SHOWCATALOGS hive testcat +PREHOOK: query: CREATE DATABASE testcat.testdb_1 +PREHOOK: type: CREATEDATABASE +PREHOOK: Output: database:testdb_1 +POSTHOOK: query: CREATE DATABASE testcat.testdb_1 +POSTHOOK: type: CREATEDATABASE +POSTHOOK: Output: database:testdb_1 PREHOOK: query: SET CATALOG testcat PREHOOK: type: SWITCHCATALOG PREHOOK: Input: catalog:testcat POSTHOOK: query: SET CATALOG testcat POSTHOOK: type: SWITCHCATALOG POSTHOOK: Input: catalog:testcat -PREHOOK: query: CREATE DATABASE testdb_new +PREHOOK: query: CREATE DATABASE testdb_2 PREHOOK: type: CREATEDATABASE -PREHOOK: Output: database:testdb_new -POSTHOOK: query: CREATE DATABASE testdb_new +PREHOOK: Output: database:testdb_2 +POSTHOOK: query: CREATE DATABASE testdb_2 POSTHOOK: type: CREATEDATABASE -POSTHOOK: Output: database:testdb_new +POSTHOOK: Output: database:testdb_2 PREHOOK: query: SHOW DATABASES PREHOOK: type: SHOWDATABASES POSTHOOK: query: SHOW DATABASES POSTHOOK: type: SHOWDATABASES default -testdb_new -PREHOOK: query: USE testcat.testdb_new +testdb_1 +testdb_2 +PREHOOK: query: USE testcat.testdb_1 PREHOOK: type: SWITCHDATABASE -PREHOOK: Input: database:testdb_new -POSTHOOK: query: USE testcat.testdb_new +PREHOOK: Input: database:testdb_1 +POSTHOOK: query: USE testcat.testdb_1 POSTHOOK: type: SWITCHDATABASE -POSTHOOK: Input: database:testdb_new -PREHOOK: query: DROP DATABASE testcat.testdb_new +POSTHOOK: Input: database:testdb_1 +PREHOOK: query: DROP DATABASE testcat.testdb_1 PREHOOK: type: DROPDATABASE -PREHOOK: Input: database:testdb_new -PREHOOK: Output: database:testdb_new -POSTHOOK: query: DROP DATABASE testcat.testdb_new +PREHOOK: Input: database:testdb_1 +PREHOOK: Output: database:testdb_1 +POSTHOOK: query: DROP DATABASE testcat.testdb_1 POSTHOOK: type: DROPDATABASE -POSTHOOK: Input: database:testdb_new -POSTHOOK: Output: database:testdb_new +POSTHOOK: Input: database:testdb_1 +POSTHOOK: Output: database:testdb_1 PREHOOK: query: SHOW DATABASES PREHOOK: type: SHOWDATABASES POSTHOOK: query: SHOW DATABASES POSTHOOK: type: SHOWDATABASES default +testdb_2 From 06335d9a30ef02f2de8cf136aabd4d5b0f2b3fc2 Mon Sep 17 00:00:00 2001 From: zhangbutao Date: Thu, 25 Sep 2025 13:55:23 +0800 Subject: [PATCH 04/15] Optimize use catalog.db pattern --- .../database/use/SwitchDatabaseAnalyzer.java | 2 +- .../ddl/database/use/SwitchDatabaseDesc.java | 9 +++++- .../database/use/SwitchDatabaseOperation.java | 10 +++++-- .../apache/hadoop/hive/ql/metadata/Hive.java | 17 ++++++++++- .../queries/clientpositive/catalog_database.q | 15 ++++++++-- .../llap/catalog_database.q.out | 30 +++++++++++++++++++ 6 files changed, 76 insertions(+), 7 deletions(-) diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/use/SwitchDatabaseAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/use/SwitchDatabaseAnalyzer.java index 8b27cec6a21e..b8b5474fcd13 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/use/SwitchDatabaseAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/use/SwitchDatabaseAnalyzer.java @@ -47,7 +47,7 @@ public void analyzeInternal(ASTNode root) throws SemanticException { readEntity.noLockNeeded(); inputs.add(readEntity); - SwitchDatabaseDesc desc = new SwitchDatabaseDesc(catDbNamePair.getRight()); + SwitchDatabaseDesc desc = new SwitchDatabaseDesc(catDbNamePair.getLeft(), catDbNamePair.getRight()); rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc))); } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/use/SwitchDatabaseDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/use/SwitchDatabaseDesc.java index 57923d8e53ba..447e67f4c4e0 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/use/SwitchDatabaseDesc.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/use/SwitchDatabaseDesc.java @@ -31,9 +31,11 @@ public class SwitchDatabaseDesc implements DDLDesc, Serializable { private static final long serialVersionUID = 1L; + private final String catalogName; private final String databaseName; - public SwitchDatabaseDesc(String databaseName) { + public SwitchDatabaseDesc(String catalogName, String databaseName) { + this.catalogName = catalogName; this.databaseName = databaseName; } @@ -41,4 +43,9 @@ public SwitchDatabaseDesc(String databaseName) { public String getDatabaseName() { return databaseName; } + + @Explain(displayName = "catalogName", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) + public String getCatalogName() { + return catalogName; + } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/use/SwitchDatabaseOperation.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/use/SwitchDatabaseOperation.java index 8a3c863825d5..0c947d459f6e 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/use/SwitchDatabaseOperation.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/use/SwitchDatabaseOperation.java @@ -38,15 +38,21 @@ public SwitchDatabaseOperation(DDLOperationContext context, SwitchDatabaseDesc d @Override public int execute() throws HiveException { + String catName = desc.getCatalogName(); + if (catName != null && context.getDb().getCatalog(catName) != null) { + SessionState.get().setCurrentCatalog(catName); + } else if (catName != null) { + throw new HiveException(ErrorMsg.CATALOG_NOT_EXISTS, catName); + } String dbName = desc.getDatabaseName(); - if (!context.getDb().databaseExists(dbName)) { + if (!context.getDb().databaseExists(catName, dbName)) { throw new HiveException(ErrorMsg.DATABASE_NOT_EXISTS, dbName); } SessionState.get().setCurrentDatabase(dbName); // set database specific parameters - Database database = context.getDb().getDatabase(dbName); + Database database = context.getDb().getDatabase(catName, dbName); assert(database != null); Map dbParams = database.getParameters(); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java index e73924eae395..7b161c8d1562 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java @@ -2601,6 +2601,7 @@ public void alterCatalog(String catName, Catalog catalog) throws HiveException { } /** + * @deprecated please use {@link #databaseExists(String, String)}} * Query metadata to see if a database with the given name already exists. * * @param dbName @@ -2612,6 +2613,19 @@ public boolean databaseExists(String dbName) throws HiveException { return getDatabase(dbName) != null; } + /** + * Query metadata to see if a database with the given name already exists. + * + * @param catName + * @param dbName + * @return true if a database with the given name already exists, false if + * does not exist. + * @throws HiveException + */ + public boolean databaseExists(String catName, String dbName) throws HiveException { + return getDatabase(catName, dbName) != null; + } + /** * Get the database by name. * @param dbName the name of the database. @@ -2622,7 +2636,7 @@ public Database getDatabase(String dbName) throws HiveException { PerfLogger perfLogger = SessionState.getPerfLogger(); perfLogger.perfLogBegin(CLASS_NAME, PerfLogger.HIVE_GET_DATABASE); try { - return getMSC().getDatabase(SessionState.get().getCurrentCatalog(), dbName); + return getMSC().getDatabase(dbName); } catch (NoSuchObjectException e) { return null; } catch (Exception e) { @@ -2643,6 +2657,7 @@ public Database getDatabase(String catName, String dbName) throws HiveException PerfLogger perfLogger = SessionState.getPerfLogger(); perfLogger.perfLogBegin(CLASS_NAME, PerfLogger.HIVE_GET_DATABASE_2); try { + catName = Objects.requireNonNullElse(catName, SessionState.get().getCurrentCatalog()); return getMSC().getDatabase(catName, dbName); } catch (NoSuchObjectException e) { return null; diff --git a/ql/src/test/queries/clientpositive/catalog_database.q b/ql/src/test/queries/clientpositive/catalog_database.q index 4ce9cfa6acb8..399e056b52c4 100644 --- a/ql/src/test/queries/clientpositive/catalog_database.q +++ b/ql/src/test/queries/clientpositive/catalog_database.q @@ -17,11 +17,21 @@ CREATE DATABASE testcat.testdb_1; -- Switch the catalog from hive to 'testcat' SET CATALOG testcat; +-- Check the current catalog, should be testcat. +select current_catalog(); + +-- Switch database by catalog.db pattern, and the catalog also be changed. +USE hive.default; + +-- Check the current catalog, should be hive +select current_catalog(); + -- CREATE DATABASE in new catalog testcat +SET CATALOG testcat; CREATE DATABASE testdb_2; -- Check databases in catalog 'testcat', --- The list of databases in the catalog 'hive' should contain default and testdb_1 and testdb_2. +-- The list of databases in the catalog 'testcat' should contain default and testdb_1 and testdb_2. SHOW DATABASES; -- Switch database by catalog.db pattern @@ -31,5 +41,6 @@ USE testcat.testdb_1; DROP DATABASE testcat.testdb_1; -- Check databases in catalog 'testcat', --- The list of databases in the catalog 'hive' should contain default and testdb_2. +-- The list of databases in the catalog 'testcat' should contain default and testdb_2. SHOW DATABASES; + diff --git a/ql/src/test/results/clientpositive/llap/catalog_database.q.out b/ql/src/test/results/clientpositive/llap/catalog_database.q.out index ddf6a33f59f3..acf98417a7e9 100644 --- a/ql/src/test/results/clientpositive/llap/catalog_database.q.out +++ b/ql/src/test/results/clientpositive/llap/catalog_database.q.out @@ -36,6 +36,36 @@ PREHOOK: Input: catalog:testcat POSTHOOK: query: SET CATALOG testcat POSTHOOK: type: SWITCHCATALOG POSTHOOK: Input: catalog:testcat +PREHOOK: query: select current_catalog() +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: hdfs://### HDFS PATH ### +POSTHOOK: query: select current_catalog() +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: hdfs://### HDFS PATH ### +testcat +PREHOOK: query: USE hive.default +PREHOOK: type: SWITCHDATABASE +PREHOOK: Input: database:default +POSTHOOK: query: USE hive.default +POSTHOOK: type: SWITCHDATABASE +POSTHOOK: Input: database:default +PREHOOK: query: select current_catalog() +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: hdfs://### HDFS PATH ### +POSTHOOK: query: select current_catalog() +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: hdfs://### HDFS PATH ### +hive +PREHOOK: query: SET CATALOG testcat +PREHOOK: type: SWITCHCATALOG +PREHOOK: Input: catalog:testcat +POSTHOOK: query: SET CATALOG testcat +POSTHOOK: type: SWITCHCATALOG +POSTHOOK: Input: catalog:testcat PREHOOK: query: CREATE DATABASE testdb_2 PREHOOK: type: CREATEDATABASE PREHOOK: Output: database:testdb_2 From 4d48164a131b89ce833839c14972dbeebcdf7f51 Mon Sep 17 00:00:00 2001 From: zhangbutao Date: Fri, 26 Sep 2025 13:58:54 +0800 Subject: [PATCH 05/15] Optimize drop catalog.database --- .../hadoop/hive/ql/ddl/database/drop/DropDatabaseAnalyzer.java | 2 +- ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/drop/DropDatabaseAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/drop/DropDatabaseAnalyzer.java index 1b1359656910..5c39700718df 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/drop/DropDatabaseAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/drop/DropDatabaseAnalyzer.java @@ -63,7 +63,7 @@ public void analyzeInternal(ASTNode root) throws SemanticException { throw new SemanticException(ErrorMsg.CATALOG_NOT_EXISTS, catalogName); } String databaseName = catDbNamePair.getRight(); - Database database = getDatabase(catDbNamePair.getLeft(), catDbNamePair.getRight(), ifExists); + Database database = getDatabase(catDbNamePair.getLeft(), catDbNamePair.getRight(), !ifExists); if (database == null) { return; } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java index 7b161c8d1562..732a0283554f 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java @@ -2038,7 +2038,7 @@ public List getTablesByType(String catName, String dbName, String patter perfLogger.perfLogBegin(CLASS_NAME, PerfLogger.HIVE_GET_TABLE); if (catName == null) { - dbName = SessionState.get().getCurrentCatalog(); + catName = SessionState.get().getCurrentCatalog(); } if (dbName == null) { From 31d19f17198a74cde3808373d8e2875bf2c4dced Mon Sep 17 00:00:00 2001 From: zhangbutao Date: Sat, 27 Sep 2025 10:21:50 +0800 Subject: [PATCH 06/15] Fix possible null when invoking SessionState.get() --- .../apache/hadoop/hive/ql/metadata/Hive.java | 33 +++++++++++-------- .../hadoop/hive/ql/metadata/HiveUtils.java | 9 +++++ 2 files changed, 29 insertions(+), 13 deletions(-) diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java index 732a0283554f..3d457f01d7b2 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java @@ -222,7 +222,6 @@ import java.util.List; import java.util.Map.Entry; import java.util.Map; -import java.util.Objects; import java.util.Optional; import java.util.Set; import java.util.concurrent.Callable; @@ -668,7 +667,9 @@ public void dropCatalog(String catName, boolean ignoreUnknownCat) public void createDatabase(Database db, boolean ifNotExist) throws AlreadyExistsException, HiveException { try { - db.setCatalogName(Objects.requireNonNullElse(db.getCatalogName(), SessionState.get().getCurrentCatalog())); + if (db.getCatalogName() == null) { + db.setCatalogName(HiveUtils.getCurrentCatalogOrDefault(conf)); + } getMSC().createDatabase(db); } catch (AlreadyExistsException e) { if (!ifNotExist) { @@ -737,7 +738,7 @@ public void dropDatabase(DropDatabaseDesc desc) .map(HiveTxnManager::getCurrentTxnId).orElse(0L); DropDatabaseRequest req = new DropDatabaseRequest(); - req.setCatalogName(Objects.requireNonNullElse(desc.getCatalogName(), SessionState.get().getCurrentCatalog())); + req.setCatalogName(Optional.ofNullable(desc.getCatalogName()).orElse(HiveUtils.getCurrentCatalogOrDefault(conf))); req.setName(desc.getDatabaseName()); req.setIgnoreUnknownDb(desc.getIfExists()); req.setDeleteData(desc.isDeleteData()); @@ -1430,7 +1431,9 @@ public void createTable(Table tbl, boolean ifNotExists, } public void createTable(Table tbl, boolean ifNotExists) throws HiveException { - tbl.setCatalogName(Objects.requireNonNullElse(tbl.getCatName(), SessionState.get().getCurrentCatalog())); + if (tbl.getCatalogName() == null) { + tbl.setCatalogName(HiveUtils.getCurrentCatalogOrDefault(conf)); + } createTable(tbl, ifNotExists, null, null, null, null, null, null); } @@ -1466,7 +1469,9 @@ public void dropTable(Table table, boolean ifPurge) throws HiveException { long txnId = Optional.ofNullable(SessionState.get()) .map(ss -> ss.getTxnMgr().getCurrentTxnId()).orElse(0L); table.getTTable().setTxnId(txnId); - table.setCatalogName(Objects.requireNonNullElse(table.getCatName(), SessionState.get().getCurrentCatalog())); + if (table.getCatName() == null) { + table.setCatalogName(HiveUtils.getCurrentCatalogOrDefault(conf)); + } dropTable(table.getTTable(), !tableWithSuffix, true, ifPurge); } @@ -2003,15 +2008,15 @@ public List getTablesByType(String dbName, String pattern, TableType typ List result; if (type != null) { if (pattern != null) { - result = getMSC().getTables(SessionState.get().getCurrentCatalog(), dbName, pattern, type); + result = getMSC().getTables(dbName, pattern, type); } else { - result = getMSC().getTables(SessionState.get().getCurrentCatalog(), dbName, ".*", type); + result = getMSC().getTables(dbName, ".*", type); } } else { if (pattern != null) { - result = getMSC().getTables(SessionState.get().getCurrentCatalog(), dbName, pattern); + result = getMSC().getTables(dbName, pattern); } else { - result = getMSC().getTables(SessionState.get().getCurrentCatalog(), dbName, ".*"); + result = getMSC().getTables(dbName, ".*"); } } return result; @@ -2038,7 +2043,7 @@ public List getTablesByType(String catName, String dbName, String patter perfLogger.perfLogBegin(CLASS_NAME, PerfLogger.HIVE_GET_TABLE); if (catName == null) { - catName = SessionState.get().getCurrentCatalog(); + catName = HiveUtils.getCurrentCatalogOrDefault(conf); } if (dbName == null) { @@ -2516,7 +2521,7 @@ public List getMaterializedViewsByAST( */ public List getAllDatabases() throws HiveException { try { - return getMSC().getAllDatabases(SessionState.get().getCurrentCatalog()); + return getMSC().getAllDatabases(HiveUtils.getCurrentCatalogOrDefault(conf)); } catch (Exception e) { throw new HiveException(e); } @@ -2657,7 +2662,9 @@ public Database getDatabase(String catName, String dbName) throws HiveException PerfLogger perfLogger = SessionState.getPerfLogger(); perfLogger.perfLogBegin(CLASS_NAME, PerfLogger.HIVE_GET_DATABASE_2); try { - catName = Objects.requireNonNullElse(catName, SessionState.get().getCurrentCatalog()); + if (catName == null) { + catName = HiveUtils.getCurrentCatalogOrDefault(conf); + } return getMSC().getDatabase(catName, dbName); } catch (NoSuchObjectException e) { return null; @@ -6553,7 +6560,7 @@ public List getFunctionsInDb(String catName, String dbName, String pat try { GetFunctionsRequest request = new GetFunctionsRequest(dbName); request.setPattern(pattern); - request.setCatalogName(Objects.requireNonNullElse(catName, SessionState.get().getCurrentCatalog())); + request.setCatalogName(Optional.ofNullable(catName).orElse(HiveUtils.getCurrentCatalogOrDefault(conf))); request.setReturnNames(false); return getMSC().getFunctionsRequest(request).getFunctions(); } catch (TException te) { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveUtils.java index ccc97614d73f..bf42a5c3fa43 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveUtils.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveUtils.java @@ -42,6 +42,7 @@ import org.apache.hadoop.hive.ql.parse.Quotation; import org.apache.hadoop.hive.ql.parse.SemanticException; import org.apache.hadoop.hive.ql.parse.UnparseTranslator; +import org.apache.hadoop.hive.ql.session.SessionState; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; @@ -60,6 +61,8 @@ import org.apache.hadoop.util.ReflectionUtils; import org.apache.hadoop.util.StringUtils; +import static org.apache.hadoop.hive.metastore.utils.MetaStoreUtils.getDefaultCatalog; + /** * General collection of helper functions. * @@ -543,4 +546,10 @@ public static String getLowerCaseTableName(String refName) { } return refName.toLowerCase(); } + + public static String getCurrentCatalogOrDefault(Configuration conf) { + return SessionState.get() != null ? + SessionState.get().getCurrentCatalog() : + getDefaultCatalog(conf); + } } From f1ddb46bb87299d69c3322ddf2616b2128106b6a Mon Sep 17 00:00:00 2001 From: zhangbutao Date: Sat, 27 Sep 2025 17:41:52 +0800 Subject: [PATCH 07/15] Fix failed tests --- .../authorization/plugin/HiveOperationType.java | 1 + .../plugin/sqlstd/Operation2Privilege.java | 1 + .../apache/hadoop/hive/ql/metadata/TestHive.java | 2 +- .../queries/clientpositive/catalog_database.q | 2 ++ .../incorrectly_quoted_insert.q.out | 2 +- .../clientnegative/table_create_with_dot.q.out | 2 +- .../results/clientpositive/llap/catalog.q.out | 15 +++++---------- .../clientpositive/llap/catalog_database.q.out | 11 +++++------ 8 files changed, 17 insertions(+), 19 deletions(-) diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveOperationType.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveOperationType.java index 6c84e31fc9ef..8217be9d844b 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveOperationType.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveOperationType.java @@ -37,6 +37,7 @@ public enum HiveOperationType { CREATEDATABASE, CREATEDATACONNECTOR, DROPCATALOG, + SWITCHCATALOG, DROPDATABASE, DROPDATACONNECTOR, SWITCHDATABASE, diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/Operation2Privilege.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/Operation2Privilege.java index d5869c2f4f5a..d7f1420330d0 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/Operation2Privilege.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/Operation2Privilege.java @@ -481,6 +481,7 @@ public HivePrivilegeObjectType getObjectType() { op2Priv.put(HiveOperationType.ALTERCATALOG_LOCATION, PrivRequirement.newIOPrivRequirement(null, ADMIN_PRIV_AR)); op2Priv.put(HiveOperationType.DESCCATALOG, PrivRequirement.newIOPrivRequirement(null, null)); op2Priv.put(HiveOperationType.SHOWCATALOGS, PrivRequirement.newIOPrivRequirement(null, null)); + op2Priv.put(HiveOperationType.SWITCHCATALOG, PrivRequirement.newIOPrivRequirement(null, null)); } /** diff --git a/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java b/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java index 0fb4b06611e5..b94657f716b3 100755 --- a/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java @@ -324,7 +324,7 @@ public void testMetaStoreApiTiming() throws Throwable { hm.getAllDatabases(); hm.dumpAndClearMetaCallTiming("test"); String logStr = appender.getOutput(); - String expectedString = "getAllDatabases_()="; + String expectedString = "getAllDatabases_(String)="; Assert.assertTrue(logStr + " should contain <" + expectedString, logStr.contains(expectedString)); diff --git a/ql/src/test/queries/clientpositive/catalog_database.q b/ql/src/test/queries/clientpositive/catalog_database.q index 399e056b52c4..05830a0ffce8 100644 --- a/ql/src/test/queries/clientpositive/catalog_database.q +++ b/ql/src/test/queries/clientpositive/catalog_database.q @@ -1,3 +1,5 @@ +-- SORT_QUERY_RESULTS + -- CREATE DATABASE in default catalog 'hive' CREATE DATABASE testdb; diff --git a/ql/src/test/results/clientnegative/incorrectly_quoted_insert.q.out b/ql/src/test/results/clientnegative/incorrectly_quoted_insert.q.out index 7b476d5130a0..d81c7a14a5c6 100644 --- a/ql/src/test/results/clientnegative/incorrectly_quoted_insert.q.out +++ b/ql/src/test/results/clientnegative/incorrectly_quoted_insert.q.out @@ -26,4 +26,4 @@ POSTHOOK: query: create table t2(id int) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:tdb POSTHOOK: Output: tdb@t2 -FAILED: SemanticException Line 2:20 Table or database name may not contain dot(.) character 'tdb.t1' +FAILED: SemanticException Line 2:20 Catalog or table or database name may not contain dot(.) character 'tdb.t1' diff --git a/ql/src/test/results/clientnegative/table_create_with_dot.q.out b/ql/src/test/results/clientnegative/table_create_with_dot.q.out index 99cdf0cf844a..06c5cc53d7c6 100644 --- a/ql/src/test/results/clientnegative/table_create_with_dot.q.out +++ b/ql/src/test/results/clientnegative/table_create_with_dot.q.out @@ -4,4 +4,4 @@ PREHOOK: Output: database:asd POSTHOOK: query: create database asd POSTHOOK: type: CREATEDATABASE POSTHOOK: Output: database:asd -FAILED: SemanticException Line 2:13 Table or database name may not contain dot(.) character 'asd.tbl' +FAILED: SemanticException Line 2:13 Catalog or table or database name may not contain dot(.) character 'asd.tbl' diff --git a/ql/src/test/results/clientpositive/llap/catalog.q.out b/ql/src/test/results/clientpositive/llap/catalog.q.out index 97b04736b3a1..6f9ef138dcd0 100644 --- a/ql/src/test/results/clientpositive/llap/catalog.q.out +++ b/ql/src/test/results/clientpositive/llap/catalog.q.out @@ -6,11 +6,10 @@ hive #### A masked pattern was here #### PREHOOK: type: CREATECATALOG PREHOOK: Output: catalog:test_cat -PREHOOK: Output: hdfs://### HDFS PATH ### #### A masked pattern was here #### POSTHOOK: type: CREATECATALOG POSTHOOK: Output: catalog:test_cat -POSTHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### PREHOOK: query: DESC CATALOG test_cat PREHOOK: type: DESCCATALOG PREHOOK: Input: catalog:test_cat @@ -22,11 +21,10 @@ Comment Hive test catalog #### A masked pattern was here #### PREHOOK: type: CREATECATALOG PREHOOK: Output: catalog:test_cat -PREHOOK: Output: hdfs://### HDFS PATH ### #### A masked pattern was here #### POSTHOOK: type: CREATECATALOG POSTHOOK: Output: catalog:test_cat -POSTHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### PREHOOK: query: SHOW CATALOGS PREHOOK: type: SHOWCATALOGS POSTHOOK: query: SHOW CATALOGS @@ -49,11 +47,10 @@ hive #### A masked pattern was here #### PREHOOK: type: CREATECATALOG PREHOOK: Output: catalog:test_cat -PREHOOK: Output: hdfs://### HDFS PATH ### #### A masked pattern was here #### POSTHOOK: type: CREATECATALOG POSTHOOK: Output: catalog:test_cat -POSTHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### PREHOOK: query: SHOW CATALOGS PREHOOK: type: SHOWCATALOGS POSTHOOK: query: SHOW CATALOGS @@ -80,11 +77,10 @@ POSTHOOK: type: DROPCATALOG #### A masked pattern was here #### PREHOOK: type: CREATECATALOG PREHOOK: Output: catalog:test_cat -PREHOOK: Output: hdfs://### HDFS PATH ### #### A masked pattern was here #### POSTHOOK: type: CREATECATALOG POSTHOOK: Output: catalog:test_cat -POSTHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### PREHOOK: query: SHOW CATALOGS PREHOOK: type: SHOWCATALOGS POSTHOOK: query: SHOW CATALOGS @@ -107,11 +103,10 @@ POSTHOOK: type: SHOWCATALOGS #### A masked pattern was here #### PREHOOK: type: ALTERCATALOG_LOCATION PREHOOK: Output: catalog:test_cat -PREHOOK: Output: hdfs://### HDFS PATH ### #### A masked pattern was here #### POSTHOOK: type: ALTERCATALOG_LOCATION POSTHOOK: Output: catalog:test_cat -POSTHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### PREHOOK: query: DESC CATALOG EXTENDED test_cat PREHOOK: type: DESCCATALOG PREHOOK: Input: catalog:test_cat diff --git a/ql/src/test/results/clientpositive/llap/catalog_database.q.out b/ql/src/test/results/clientpositive/llap/catalog_database.q.out index acf98417a7e9..f5b5d07d4aed 100644 --- a/ql/src/test/results/clientpositive/llap/catalog_database.q.out +++ b/ql/src/test/results/clientpositive/llap/catalog_database.q.out @@ -13,11 +13,10 @@ testdb #### A masked pattern was here #### PREHOOK: type: CREATECATALOG PREHOOK: Output: catalog:testcat -PREHOOK: Output: hdfs://### HDFS PATH ### #### A masked pattern was here #### POSTHOOK: type: CREATECATALOG POSTHOOK: Output: catalog:testcat -POSTHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### PREHOOK: query: SHOW CATALOGS PREHOOK: type: SHOWCATALOGS POSTHOOK: query: SHOW CATALOGS @@ -39,11 +38,11 @@ POSTHOOK: Input: catalog:testcat PREHOOK: query: select current_catalog() PREHOOK: type: QUERY PREHOOK: Input: _dummy_database@_dummy_table -PREHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### POSTHOOK: query: select current_catalog() POSTHOOK: type: QUERY POSTHOOK: Input: _dummy_database@_dummy_table -POSTHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### testcat PREHOOK: query: USE hive.default PREHOOK: type: SWITCHDATABASE @@ -54,11 +53,11 @@ POSTHOOK: Input: database:default PREHOOK: query: select current_catalog() PREHOOK: type: QUERY PREHOOK: Input: _dummy_database@_dummy_table -PREHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### POSTHOOK: query: select current_catalog() POSTHOOK: type: QUERY POSTHOOK: Input: _dummy_database@_dummy_table -POSTHOOK: Output: hdfs://### HDFS PATH ### +#### A masked pattern was here #### hive PREHOOK: query: SET CATALOG testcat PREHOOK: type: SWITCHCATALOG From 8870e65effd04ec48609ddada068bb531f0e267b Mon Sep 17 00:00:00 2001 From: zhangbutao Date: Sun, 28 Sep 2025 10:17:55 +0800 Subject: [PATCH 08/15] Fix failed catalog tests --- ql/src/test/queries/clientpositive/catalog.q | 3 ++ .../queries/clientpositive/catalog_database.q | 9 ++++++ .../results/clientpositive/llap/catalog.q.out | 8 +++++ .../llap/catalog_database.q.out | 30 +++++++++++++++++++ 4 files changed, 50 insertions(+) diff --git a/ql/src/test/queries/clientpositive/catalog.q b/ql/src/test/queries/clientpositive/catalog.q index 173e9a065da0..f2247761b15e 100644 --- a/ql/src/test/queries/clientpositive/catalog.q +++ b/ql/src/test/queries/clientpositive/catalog.q @@ -45,3 +45,6 @@ SHOW CATALOGS LIKE 'test__'; -- ALTER LOCATION ALTER CATALOG test_cat SET LOCATION '/tmp/test_cat_new'; DESC CATALOG EXTENDED test_cat; + +-- DROP catalog at the end +DROP CATALOG test_cat; diff --git a/ql/src/test/queries/clientpositive/catalog_database.q b/ql/src/test/queries/clientpositive/catalog_database.q index 05830a0ffce8..8e6d5bfcd083 100644 --- a/ql/src/test/queries/clientpositive/catalog_database.q +++ b/ql/src/test/queries/clientpositive/catalog_database.q @@ -46,3 +46,12 @@ DROP DATABASE testcat.testdb_1; -- The list of databases in the catalog 'testcat' should contain default and testdb_2. SHOW DATABASES; + +-- DROP CATALOG at the end. Need to drop all non-default databases first. +DROP DATABASE testcat.testdb_2; +DROP CATALOG testcat; + +-- Switch back to the clean default hive catalog at the end. +DROP DATABASE hive.testdb; +SET CATALOG hive; + diff --git a/ql/src/test/results/clientpositive/llap/catalog.q.out b/ql/src/test/results/clientpositive/llap/catalog.q.out index 6f9ef138dcd0..aad833b332b6 100644 --- a/ql/src/test/results/clientpositive/llap/catalog.q.out +++ b/ql/src/test/results/clientpositive/llap/catalog.q.out @@ -116,3 +116,11 @@ POSTHOOK: Input: catalog:test_cat Catalog Name test_cat Comment Hive test catalog #### A masked pattern was here #### +PREHOOK: query: DROP CATALOG test_cat +PREHOOK: type: DROPCATALOG +PREHOOK: Input: catalog:test_cat +PREHOOK: Output: catalog:test_cat +POSTHOOK: query: DROP CATALOG test_cat +POSTHOOK: type: DROPCATALOG +POSTHOOK: Input: catalog:test_cat +POSTHOOK: Output: catalog:test_cat diff --git a/ql/src/test/results/clientpositive/llap/catalog_database.q.out b/ql/src/test/results/clientpositive/llap/catalog_database.q.out index f5b5d07d4aed..615ebe3c9003 100644 --- a/ql/src/test/results/clientpositive/llap/catalog_database.q.out +++ b/ql/src/test/results/clientpositive/llap/catalog_database.q.out @@ -98,3 +98,33 @@ POSTHOOK: query: SHOW DATABASES POSTHOOK: type: SHOWDATABASES default testdb_2 +PREHOOK: query: DROP DATABASE testcat.testdb_2 +PREHOOK: type: DROPDATABASE +PREHOOK: Input: database:testdb_2 +PREHOOK: Output: database:testdb_2 +POSTHOOK: query: DROP DATABASE testcat.testdb_2 +POSTHOOK: type: DROPDATABASE +POSTHOOK: Input: database:testdb_2 +POSTHOOK: Output: database:testdb_2 +PREHOOK: query: DROP CATALOG testcat +PREHOOK: type: DROPCATALOG +PREHOOK: Input: catalog:testcat +PREHOOK: Output: catalog:testcat +POSTHOOK: query: DROP CATALOG testcat +POSTHOOK: type: DROPCATALOG +POSTHOOK: Input: catalog:testcat +POSTHOOK: Output: catalog:testcat +PREHOOK: query: DROP DATABASE hive.testdb +PREHOOK: type: DROPDATABASE +PREHOOK: Input: database:testdb +PREHOOK: Output: database:testdb +POSTHOOK: query: DROP DATABASE hive.testdb +POSTHOOK: type: DROPDATABASE +POSTHOOK: Input: database:testdb +POSTHOOK: Output: database:testdb +PREHOOK: query: SET CATALOG hive +PREHOOK: type: SWITCHCATALOG +PREHOOK: Input: catalog:hive +POSTHOOK: query: SET CATALOG hive +POSTHOOK: type: SWITCHCATALOG +POSTHOOK: Input: catalog:hive From 842a3d001c0a11dc2f0a90b21d3a03814c49e161 Mon Sep 17 00:00:00 2001 From: zhangbutao Date: Mon, 29 Sep 2025 15:15:00 +0800 Subject: [PATCH 09/15] Implement desc catalog.database syntax --- .../org/apache/hadoop/hive/ql/parse/HiveParser.g | 2 +- .../ddl/database/desc/DescDatabaseAnalyzer.java | 13 ++++++++----- .../ql/ddl/database/desc/DescDatabaseDesc.java | 16 ++++++++++++---- .../ddl/database/desc/DescDatabaseFormatter.java | 9 ++++++--- .../ddl/database/desc/DescDatabaseOperation.java | 6 +++--- .../queries/clientpositive/catalog_database.q | 3 +++ .../clientpositive/llap/catalog_database.q.out | 14 ++++++++++++++ 7 files changed, 47 insertions(+), 16 deletions(-) diff --git a/parser/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g b/parser/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g index f0548e606ef6..59f2cf03cf37 100644 --- a/parser/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g +++ b/parser/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g @@ -1293,7 +1293,7 @@ descStatement ( (KW_CATALOG) => (KW_CATALOG) KW_EXTENDED? (catName=identifier) -> ^(TOK_DESCCATALOG $catName KW_EXTENDED?) | - (KW_DATABASE|KW_SCHEMA) => (KW_DATABASE|KW_SCHEMA) KW_EXTENDED? (dbName=identifier) -> ^(TOK_DESCDATABASE $dbName KW_EXTENDED?) + (KW_DATABASE|KW_SCHEMA) => (KW_DATABASE|KW_SCHEMA) KW_EXTENDED? (dbName=databaseName) -> ^(TOK_DESCDATABASE $dbName KW_EXTENDED?) | (KW_DATACONNECTOR) => (KW_DATACONNECTOR) KW_EXTENDED? (dcName=identifier) -> ^(TOK_DESCDATACONNECTOR $dcName KW_EXTENDED?) | diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/desc/DescDatabaseAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/desc/DescDatabaseAnalyzer.java index 6b4860b9e1d2..38651f805173 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/desc/DescDatabaseAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/desc/DescDatabaseAnalyzer.java @@ -18,6 +18,7 @@ package org.apache.hadoop.hive.ql.ddl.database.desc; +import org.apache.commons.lang3.tuple.Pair; import org.apache.hadoop.hive.ql.QueryState; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.exec.TaskFactory; @@ -40,18 +41,20 @@ public DescDatabaseAnalyzer(QueryState queryState) throws SemanticException { @Override public void analyzeInternal(ASTNode root) throws SemanticException { - if (root.getChildCount() == 0 || root.getChildCount() > 2) { + if (root.getChildCount() == 0 || root.getChildCount() > 3) { throw new SemanticException("Unexpected Tokens at DESCRIBE DATABASE"); } ctx.setResFile(ctx.getLocalTmpPath()); - String databaseName = stripQuotes(root.getChild(0).getText()); - boolean isExtended = root.getChildCount() == 2; + Pair catDbNamePair = getCatDbNamePair((ASTNode) root.getChild(0)); + String catName = catDbNamePair.getLeft(); + String dbName = catDbNamePair.getRight(); + boolean isExtended = root.getChildCount() == 3; - inputs.add(new ReadEntity(getDatabase(databaseName))); + inputs.add(new ReadEntity(getDatabase(catName, dbName, true))); - DescDatabaseDesc desc = new DescDatabaseDesc(ctx.getResFile(), databaseName, isExtended); + DescDatabaseDesc desc = new DescDatabaseDesc(ctx.getResFile(), catName, dbName, isExtended); Task task = TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)); rootTasks.add(task); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/desc/DescDatabaseDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/desc/DescDatabaseDesc.java index aafa67fe0edd..f1ec808360c3 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/desc/DescDatabaseDesc.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/desc/DescDatabaseDesc.java @@ -33,18 +33,21 @@ public class DescDatabaseDesc implements DDLDesc, Serializable { private static final long serialVersionUID = 1L; public static final String DESC_DATABASE_SCHEMA = - "db_name,comment,location,managedLocation,owner_name,owner_type,connector_name,remote_dbname#string:string:string:string:string:string,string,string"; + "cat_name,db_name,comment,location,managedLocation,owner_name,owner_type,connector_name,remote_dbname#" + + "string:string:string:string:string:string:string,string,string"; public static final String DESC_DATABASE_SCHEMA_EXTENDED = - "db_name,comment,location,managedLocation,owner_name,owner_type,connector_name,remote_dbname,parameters#" + - "string:string:string:string:string:string:string,string,string"; + "cat_name,db_name,comment,location,managedLocation,owner_name,owner_type,connector_name,remote_dbname,parameters#" + + "string:string:string:string:string:string:string:string,string,string"; private final String resFile; + private final String catName; private final String dbName; private final boolean isExtended; - public DescDatabaseDesc(Path resFile, String dbName, boolean isExtended) { + public DescDatabaseDesc(Path resFile, String catName, String dbName, boolean isExtended) { this.resFile = resFile.toString(); + this.catName = catName; this.dbName = dbName; this.isExtended = isExtended; } @@ -55,6 +58,11 @@ public boolean isExtended() { return isExtended; } + @Explain(displayName = "catalog", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) + public String getCatalogName() { + return catName; + } + @Explain(displayName = "database", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) public String getDatabaseName() { return dbName; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/desc/DescDatabaseFormatter.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/desc/DescDatabaseFormatter.java index 327aa5c3dd90..269b7099eb12 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/desc/DescDatabaseFormatter.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/desc/DescDatabaseFormatter.java @@ -45,7 +45,7 @@ static DescDatabaseFormatter getFormatter(HiveConf conf) { } } - abstract void showDatabaseDescription(DataOutputStream out, String database, String comment, String location, + abstract void showDatabaseDescription(DataOutputStream out, String catalog, String database, String comment, String location, String managedLocation, String ownerName, PrincipalType ownerType, Map params, String connectorName, String remoteDbName) throws HiveException; @@ -54,11 +54,12 @@ abstract void showDatabaseDescription(DataOutputStream out, String database, Str static class JsonDescDatabaseFormatter extends DescDatabaseFormatter { @Override - void showDatabaseDescription(DataOutputStream out, String database, String comment, String location, + void showDatabaseDescription(DataOutputStream out, String catalog, String database, String comment, String location, String managedLocation, String ownerName, PrincipalType ownerType, Map params, String connectorName, String remoteDbName) throws HiveException { MapBuilder builder = MapBuilder.create() + .put("catalog", catalog) .put("database", database) .put("comment", comment) .put("location", location); @@ -86,11 +87,13 @@ void showDatabaseDescription(DataOutputStream out, String database, String comme static class TextDescDatabaseFormatter extends DescDatabaseFormatter { @Override - void showDatabaseDescription(DataOutputStream out, String database, String comment, String location, + void showDatabaseDescription(DataOutputStream out, String catalog, String database, String comment, String location, String managedLocation, String ownerName, PrincipalType ownerType, Map params, String connectorName, String remoteDbName) throws HiveException { try { + out.write(catalog.getBytes(StandardCharsets.UTF_8)); + out.write(Utilities.tabCode); out.write(database.getBytes(StandardCharsets.UTF_8)); out.write(Utilities.tabCode); if (comment != null) { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/desc/DescDatabaseOperation.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/desc/DescDatabaseOperation.java index 332e36eb7ae4..496c5ba20aa1 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/desc/DescDatabaseOperation.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/desc/DescDatabaseOperation.java @@ -45,7 +45,7 @@ public DescDatabaseOperation(DDLOperationContext context, DescDatabaseDesc desc) @Override public int execute() throws HiveException { try (DataOutputStream outStream = ShowUtils.getOutputStream(new Path(desc.getResFile()), context)) { - Database database = context.getDb().getDatabase(desc.getDatabaseName()); + Database database = context.getDb().getDatabase(desc.getCatalogName(), desc.getDatabaseName()); if (database == null) { throw new HiveException(ErrorMsg.DATABASE_NOT_EXISTS, desc.getDatabaseName()); } @@ -63,11 +63,11 @@ public int execute() throws HiveException { if (HiveConf.getBoolVar(context.getConf(), HiveConf.ConfVars.HIVE_IN_TEST)) { location = "location/in/test"; } - formatter.showDatabaseDescription(outStream, database.getName(), database.getDescription(), location, + formatter.showDatabaseDescription(outStream, database.getCatalogName(), database.getName(), database.getDescription(), location, database.getManagedLocationUri(), database.getOwnerName(), database.getOwnerType(), params, "", ""); break; case REMOTE: - formatter.showDatabaseDescription(outStream, database.getName(), database.getDescription(), "", "", + formatter.showDatabaseDescription(outStream, database.getCatalogName(), database.getName(), database.getDescription(), "", "", database.getOwnerName(), database.getOwnerType(), params, database.getConnector_name(), database.getRemote_dbname()); break; default: diff --git a/ql/src/test/queries/clientpositive/catalog_database.q b/ql/src/test/queries/clientpositive/catalog_database.q index 8e6d5bfcd083..eec5721a40ab 100644 --- a/ql/src/test/queries/clientpositive/catalog_database.q +++ b/ql/src/test/queries/clientpositive/catalog_database.q @@ -46,6 +46,9 @@ DROP DATABASE testcat.testdb_1; -- The list of databases in the catalog 'testcat' should contain default and testdb_2. SHOW DATABASES; +-- DESC DATABASE by catalog.db pattern +DESCRIBE DATABASE testcat.testdb_2; +DESCRIBE DATABASE EXTENDED testcat.testdb_2; -- DROP CATALOG at the end. Need to drop all non-default databases first. DROP DATABASE testcat.testdb_2; diff --git a/ql/src/test/results/clientpositive/llap/catalog_database.q.out b/ql/src/test/results/clientpositive/llap/catalog_database.q.out index 615ebe3c9003..50e379c9e1a5 100644 --- a/ql/src/test/results/clientpositive/llap/catalog_database.q.out +++ b/ql/src/test/results/clientpositive/llap/catalog_database.q.out @@ -98,6 +98,20 @@ POSTHOOK: query: SHOW DATABASES POSTHOOK: type: SHOWDATABASES default testdb_2 +PREHOOK: query: DESCRIBE DATABASE testcat.testdb_2 +PREHOOK: type: DESCDATABASE +PREHOOK: Input: database:testdb_2 +POSTHOOK: query: DESCRIBE DATABASE testcat.testdb_2 +POSTHOOK: type: DESCDATABASE +POSTHOOK: Input: database:testdb_2 +testcat testdb_2 location/in/test hive_test_user USER +PREHOOK: query: DESCRIBE DATABASE EXTENDED testcat.testdb_2 +PREHOOK: type: DESCDATABASE +PREHOOK: Input: database:testdb_2 +POSTHOOK: query: DESCRIBE DATABASE EXTENDED testcat.testdb_2 +POSTHOOK: type: DESCDATABASE +POSTHOOK: Input: database:testdb_2 +testcat testdb_2 location/in/test hive_test_user USER PREHOOK: query: DROP DATABASE testcat.testdb_2 PREHOOK: type: DROPDATABASE PREHOOK: Input: database:testdb_2 From f1c88578a471bc1b4711ece3e76e5417f03acdd8 Mon Sep 17 00:00:00 2001 From: zhangbutao Date: Tue, 30 Sep 2025 16:20:48 +0800 Subject: [PATCH 10/15] Do not show the catalog in the desc database cat.db results For this PR, we do not plan to display the catalog information in the desc db results, to keep them consistent with the previous ones. However, it can be added later if someone considers it necessary. --- .../hive/ql/ddl/database/desc/DescDatabaseAnalyzer.java | 4 ++-- .../hive/ql/ddl/database/desc/DescDatabaseDesc.java | 8 ++++---- .../hive/ql/ddl/database/desc/DescDatabaseFormatter.java | 9 +++------ .../hive/ql/ddl/database/desc/DescDatabaseOperation.java | 4 ++-- 4 files changed, 11 insertions(+), 14 deletions(-) diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/desc/DescDatabaseAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/desc/DescDatabaseAnalyzer.java index 38651f805173..3e6adc9904d1 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/desc/DescDatabaseAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/desc/DescDatabaseAnalyzer.java @@ -41,7 +41,7 @@ public DescDatabaseAnalyzer(QueryState queryState) throws SemanticException { @Override public void analyzeInternal(ASTNode root) throws SemanticException { - if (root.getChildCount() == 0 || root.getChildCount() > 3) { + if (root.getChildCount() == 0 || root.getChildCount() > 2) { throw new SemanticException("Unexpected Tokens at DESCRIBE DATABASE"); } @@ -50,7 +50,7 @@ public void analyzeInternal(ASTNode root) throws SemanticException { Pair catDbNamePair = getCatDbNamePair((ASTNode) root.getChild(0)); String catName = catDbNamePair.getLeft(); String dbName = catDbNamePair.getRight(); - boolean isExtended = root.getChildCount() == 3; + boolean isExtended = root.getChildCount() == 2; inputs.add(new ReadEntity(getDatabase(catName, dbName, true))); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/desc/DescDatabaseDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/desc/DescDatabaseDesc.java index f1ec808360c3..274c18f60164 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/desc/DescDatabaseDesc.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/desc/DescDatabaseDesc.java @@ -33,12 +33,12 @@ public class DescDatabaseDesc implements DDLDesc, Serializable { private static final long serialVersionUID = 1L; public static final String DESC_DATABASE_SCHEMA = - "cat_name,db_name,comment,location,managedLocation,owner_name,owner_type,connector_name,remote_dbname#" + - "string:string:string:string:string:string:string,string,string"; + "db_name,comment,location,managedLocation,owner_name,owner_type,connector_name,remote_dbname#" + + "string:string:string:string:string:string,string,string"; public static final String DESC_DATABASE_SCHEMA_EXTENDED = - "cat_name,db_name,comment,location,managedLocation,owner_name,owner_type,connector_name,remote_dbname,parameters#" + - "string:string:string:string:string:string:string:string,string,string"; + "db_name,comment,location,managedLocation,owner_name,owner_type,connector_name,remote_dbname,parameters#" + + "string:string:string:string:string:string:string,string,string"; private final String resFile; private final String catName; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/desc/DescDatabaseFormatter.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/desc/DescDatabaseFormatter.java index 269b7099eb12..327aa5c3dd90 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/desc/DescDatabaseFormatter.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/desc/DescDatabaseFormatter.java @@ -45,7 +45,7 @@ static DescDatabaseFormatter getFormatter(HiveConf conf) { } } - abstract void showDatabaseDescription(DataOutputStream out, String catalog, String database, String comment, String location, + abstract void showDatabaseDescription(DataOutputStream out, String database, String comment, String location, String managedLocation, String ownerName, PrincipalType ownerType, Map params, String connectorName, String remoteDbName) throws HiveException; @@ -54,12 +54,11 @@ abstract void showDatabaseDescription(DataOutputStream out, String catalog, Stri static class JsonDescDatabaseFormatter extends DescDatabaseFormatter { @Override - void showDatabaseDescription(DataOutputStream out, String catalog, String database, String comment, String location, + void showDatabaseDescription(DataOutputStream out, String database, String comment, String location, String managedLocation, String ownerName, PrincipalType ownerType, Map params, String connectorName, String remoteDbName) throws HiveException { MapBuilder builder = MapBuilder.create() - .put("catalog", catalog) .put("database", database) .put("comment", comment) .put("location", location); @@ -87,13 +86,11 @@ void showDatabaseDescription(DataOutputStream out, String catalog, String databa static class TextDescDatabaseFormatter extends DescDatabaseFormatter { @Override - void showDatabaseDescription(DataOutputStream out, String catalog, String database, String comment, String location, + void showDatabaseDescription(DataOutputStream out, String database, String comment, String location, String managedLocation, String ownerName, PrincipalType ownerType, Map params, String connectorName, String remoteDbName) throws HiveException { try { - out.write(catalog.getBytes(StandardCharsets.UTF_8)); - out.write(Utilities.tabCode); out.write(database.getBytes(StandardCharsets.UTF_8)); out.write(Utilities.tabCode); if (comment != null) { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/desc/DescDatabaseOperation.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/desc/DescDatabaseOperation.java index 496c5ba20aa1..c114f9874194 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/desc/DescDatabaseOperation.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/desc/DescDatabaseOperation.java @@ -63,11 +63,11 @@ public int execute() throws HiveException { if (HiveConf.getBoolVar(context.getConf(), HiveConf.ConfVars.HIVE_IN_TEST)) { location = "location/in/test"; } - formatter.showDatabaseDescription(outStream, database.getCatalogName(), database.getName(), database.getDescription(), location, + formatter.showDatabaseDescription(outStream, database.getName(), database.getDescription(), location, database.getManagedLocationUri(), database.getOwnerName(), database.getOwnerType(), params, "", ""); break; case REMOTE: - formatter.showDatabaseDescription(outStream, database.getCatalogName(), database.getName(), database.getDescription(), "", "", + formatter.showDatabaseDescription(outStream, database.getName(), database.getDescription(), "", "", database.getOwnerName(), database.getOwnerType(), params, database.getConnector_name(), database.getRemote_dbname()); break; default: From 927b5c1c85e130cd4107193be4672495821d13de Mon Sep 17 00:00:00 2001 From: zhangbutao Date: Tue, 30 Sep 2025 16:26:51 +0800 Subject: [PATCH 11/15] Implement alter database cat.db syntax --- .../hadoop/hive/ql/parse/AlterClauseParser.g | 22 +++++------ .../apache/hadoop/hive/ql/parse/HiveParser.g | 1 + .../alter/AbstractAlterDatabaseAnalyzer.java | 2 +- .../alter/AbstractAlterDatabaseDesc.java | 9 ++++- .../alter/AbstractAlterDatabaseOperation.java | 4 +- .../alter/AlterDatabaseAnalyzerCategory.java | 38 +++++++++++++++++++ .../AlterDatabaseSetLocationAnalyzer.java | 9 +++-- .../AlterDatabaseSetLocationDesc.java | 4 +- ...terDatabaseSetManagedLocationAnalyzer.java | 9 +++-- .../AlterDatabaseSetManagedLocationDesc.java | 4 +- .../owner/AlterDatabaseSetOwnerAnalyzer.java | 9 +++-- .../owner/AlterDatabaseSetOwnerDesc.java | 4 +- .../AlterDatabaseSetPropertiesAnalyzer.java | 9 +++-- .../AlterDatabaseSetPropertiesDesc.java | 4 +- .../hive/ql/exec/repl/ReplLoadTask.java | 13 +++++-- .../repl/bootstrap/load/LoadDatabase.java | 10 ++--- .../IncrementalLoadTasksBuilder.java | 6 ++- .../apache/hadoop/hive/ql/metadata/Hive.java | 7 +++- .../hive/ql/parse/BaseSemanticAnalyzer.java | 35 +++++++++-------- .../load/message/AlterDatabaseHandler.java | 10 +++-- .../load/message/CreateDatabaseHandler.java | 4 +- .../queries/clientpositive/catalog_database.q | 6 +++ .../llap/catalog_database.q.out | 30 ++++++++++++++- .../metastore/client/BaseMetaStoreClient.java | 3 +- 24 files changed, 183 insertions(+), 69 deletions(-) create mode 100644 ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/AlterDatabaseAnalyzerCategory.java diff --git a/parser/src/java/org/apache/hadoop/hive/ql/parse/AlterClauseParser.g b/parser/src/java/org/apache/hadoop/hive/ql/parse/AlterClauseParser.g index 3cfaaf4669fb..c46a67d5b8f3 100644 --- a/parser/src/java/org/apache/hadoop/hive/ql/parse/AlterClauseParser.g +++ b/parser/src/java/org/apache/hadoop/hive/ql/parse/AlterClauseParser.g @@ -50,7 +50,7 @@ alterStatement : KW_ALTER KW_TABLE tableName alterTableStatementSuffix -> ^(TOK_ALTERTABLE tableName alterTableStatementSuffix) | KW_ALTER KW_VIEW tableName KW_AS? alterViewStatementSuffix -> ^(TOK_ALTERVIEW tableName alterViewStatementSuffix) | KW_ALTER KW_MATERIALIZED KW_VIEW tableNameTree=tableName alterMaterializedViewStatementSuffix[$tableNameTree.tree] -> alterMaterializedViewStatementSuffix - | KW_ALTER (KW_DATABASE|KW_SCHEMA) alterDatabaseStatementSuffix -> alterDatabaseStatementSuffix + | KW_ALTER (KW_DATABASE|KW_SCHEMA) databaseName alterDatabaseStatementSuffix -> ^(TOK_ALTERDATABASE databaseName alterDatabaseStatementSuffix) | KW_ALTER KW_DATACONNECTOR alterDataConnectorStatementSuffix -> alterDataConnectorStatementSuffix | KW_OPTIMIZE KW_TABLE tableName optimizeTableStatementSuffix -> ^(TOK_ALTERTABLE tableName optimizeTableStatementSuffix) | KW_ALTER KW_CATALOG alterCatalogStatementSuffix -> alterCatalogStatementSuffix @@ -181,31 +181,31 @@ alterDatabaseStatementSuffix alterDatabaseSuffixProperties @init { gParent.pushMsg("alter database properties statement", state); } @after { gParent.popMsg(state); } - : name=identifier KW_SET KW_DBPROPERTIES dbProperties - -> ^(TOK_ALTERDATABASE_PROPERTIES $name dbProperties) + : KW_SET KW_DBPROPERTIES dbProperties + -> ^(TOK_ALTERDATABASE_PROPERTIES dbProperties) ; alterDatabaseSuffixSetOwner @init { gParent.pushMsg("alter database set owner", state); } @after { gParent.popMsg(state); } - : dbName=identifier KW_SET KW_OWNER principalName - -> ^(TOK_ALTERDATABASE_OWNER $dbName principalName) + : KW_SET KW_OWNER principalName + -> ^(TOK_ALTERDATABASE_OWNER principalName) ; alterDatabaseSuffixSetLocation @init { gParent.pushMsg("alter database set location", state); } @after { gParent.popMsg(state); } - : dbName=identifier KW_SET KW_LOCATION newLocation=StringLiteral - -> ^(TOK_ALTERDATABASE_LOCATION $dbName $newLocation) - | dbName=identifier KW_SET KW_MANAGEDLOCATION newLocation=StringLiteral - -> ^(TOK_ALTERDATABASE_MANAGEDLOCATION $dbName $newLocation) + : KW_SET KW_LOCATION newLocation=StringLiteral + -> ^(TOK_ALTERDATABASE_LOCATION $newLocation) + | KW_SET KW_MANAGEDLOCATION newLocation=StringLiteral + -> ^(TOK_ALTERDATABASE_MANAGEDLOCATION $newLocation) ; alterDatabaseSuffixSetManagedLocation @init { gParent.pushMsg("alter database set managed location", state); } @after { gParent.popMsg(state); } - : dbName=identifier KW_SET KW_MANAGEDLOCATION newLocation=StringLiteral - -> ^(TOK_ALTERDATABASE_MANAGEDLOCATION $dbName $newLocation) + : KW_SET KW_MANAGEDLOCATION newLocation=StringLiteral + -> ^(TOK_ALTERDATABASE_MANAGEDLOCATION $newLocation) ; alterStatementSuffixRename[boolean table] diff --git a/parser/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g b/parser/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g index 59f2cf03cf37..6491e949d32b 100644 --- a/parser/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g +++ b/parser/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g @@ -384,6 +384,7 @@ TOK_DATABASEPROPERTIES; TOK_DATABASELOCATION; TOK_DATABASE_MANAGEDLOCATION; TOK_DBPROPLIST; +TOK_ALTERDATABASE; TOK_ALTERDATABASE_PROPERTIES; TOK_ALTERDATABASE_OWNER; TOK_ALTERDATABASE_LOCATION; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/AbstractAlterDatabaseAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/AbstractAlterDatabaseAnalyzer.java index 92cbee55b6d5..9016a8e2c27c 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/AbstractAlterDatabaseAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/AbstractAlterDatabaseAnalyzer.java @@ -35,7 +35,7 @@ public AbstractAlterDatabaseAnalyzer(QueryState queryState) throws SemanticExcep } protected void addAlterDatabaseDesc(AbstractAlterDatabaseDesc alterDesc) throws SemanticException { - Database database = getDatabase(alterDesc.getDatabaseName()); + Database database = getDatabase(alterDesc.getCatalogName(), alterDesc.getDatabaseName(), true); outputs.add(new WriteEntity(database, WriteEntity.WriteType.DDL_NO_LOCK)); rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterDesc))); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/AbstractAlterDatabaseDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/AbstractAlterDatabaseDesc.java index 854cc9116f21..0f74359276c7 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/AbstractAlterDatabaseDesc.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/AbstractAlterDatabaseDesc.java @@ -31,14 +31,21 @@ public abstract class AbstractAlterDatabaseDesc implements DDLDesc, Serializable { private static final long serialVersionUID = 1L; + private final String catalogName; private final String databaseName; private final ReplicationSpec replicationSpec; - public AbstractAlterDatabaseDesc(String databaseName, ReplicationSpec replicationSpec) { + public AbstractAlterDatabaseDesc(String catalogName, String databaseName, ReplicationSpec replicationSpec) { + this.catalogName = catalogName; this.databaseName = databaseName; this.replicationSpec = replicationSpec; } + @Explain(displayName="catalogName", explainLevels = {Level.USER, Level.DEFAULT, Level.EXTENDED }) + public String getCatalogName() { + return catalogName; + } + @Explain(displayName="name", explainLevels = {Level.USER, Level.DEFAULT, Level.EXTENDED }) public String getDatabaseName() { return databaseName; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/AbstractAlterDatabaseOperation.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/AbstractAlterDatabaseOperation.java index 8deb44f0c708..0a7ab14b6c5f 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/AbstractAlterDatabaseOperation.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/AbstractAlterDatabaseOperation.java @@ -25,6 +25,7 @@ import org.apache.hadoop.hive.ql.ddl.DDLOperation; import org.apache.hadoop.hive.ql.ddl.DDLOperationContext; import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.parse.SemanticException; /** * Operation process of altering a database. @@ -36,8 +37,9 @@ public AbstractAlterDatabaseOperation(DDLOperationContext context, T desc) { @Override public int execute() throws HiveException { + String catName = desc.getCatalogName(); String dbName = desc.getDatabaseName(); - Database database = context.getDb().getDatabase(dbName); + Database database = context.getDb().getDatabase(catName, dbName); if (database == null) { throw new HiveException(ErrorMsg.DATABASE_NOT_EXISTS, dbName); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/AlterDatabaseAnalyzerCategory.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/AlterDatabaseAnalyzerCategory.java new file mode 100644 index 000000000000..c0a78646d670 --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/AlterDatabaseAnalyzerCategory.java @@ -0,0 +1,38 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.ddl.database.alter; + +import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory; +import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLSemanticAnalyzerCategory; +import org.apache.hadoop.hive.ql.parse.ASTNode; +import org.apache.hadoop.hive.ql.parse.HiveParser; + +/** + * Alter Database category helper. It derives the actual type of the command from the root element, by selecting the type + * of the second child, as I found that when using the cat.db syntax to execute ALTER DATABASE tableName, the databaseName + * needs to be extracted to the Antlr grammar prefix to take effect. + * I made this change by referring to {@link org.apache.hadoop.hive.ql.ddl.table.AlterTableAnalyzerCategory}. + */ +@DDLSemanticAnalyzerFactory.DDLType(types = HiveParser.TOK_ALTERDATABASE) +public class AlterDatabaseAnalyzerCategory implements DDLSemanticAnalyzerCategory { + @Override + public int getType(ASTNode root) { + return root.getChild(1).getType(); + } +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/location/AlterDatabaseSetLocationAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/location/AlterDatabaseSetLocationAnalyzer.java index 6ea68f0c6889..a1031588a9ff 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/location/AlterDatabaseSetLocationAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/location/AlterDatabaseSetLocationAnalyzer.java @@ -18,6 +18,7 @@ package org.apache.hadoop.hive.ql.ddl.database.alter.location; +import org.apache.commons.lang3.tuple.Pair; import org.apache.hadoop.hive.ql.QueryState; import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType; import org.apache.hadoop.hive.ql.ddl.database.alter.AbstractAlterDatabaseAnalyzer; @@ -36,12 +37,14 @@ public AlterDatabaseSetLocationAnalyzer(QueryState queryState) throws SemanticEx @Override public void analyzeInternal(ASTNode root) throws SemanticException { - String databaseName = getUnescapedName((ASTNode) root.getChild(0)); - String newLocation = unescapeSQLString(root.getChild(1).getText()); + Pair catDbNamePair = getCatDbNamePair((ASTNode) root.getChild(0)); + String catalogName = catDbNamePair.getLeft(); + String databaseName = catDbNamePair.getRight(); + String newLocation = unescapeSQLString(root.getChild(1).getChild(0).getText()); outputs.add(toWriteEntity(newLocation)); - AlterDatabaseSetLocationDesc desc = new AlterDatabaseSetLocationDesc(databaseName, newLocation); + AlterDatabaseSetLocationDesc desc = new AlterDatabaseSetLocationDesc(catalogName, databaseName, newLocation); addAlterDatabaseDesc(desc); } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/location/AlterDatabaseSetLocationDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/location/AlterDatabaseSetLocationDesc.java index ddb320692aac..766bd676bfe9 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/location/AlterDatabaseSetLocationDesc.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/location/AlterDatabaseSetLocationDesc.java @@ -31,8 +31,8 @@ public class AlterDatabaseSetLocationDesc extends AbstractAlterDatabaseDesc { private final String location; - public AlterDatabaseSetLocationDesc(String databaseName, String location) { - super(databaseName, null); + public AlterDatabaseSetLocationDesc(String catalogName, String databaseName, String location) { + super(catalogName, databaseName, null); this.location = location; } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/location/AlterDatabaseSetManagedLocationAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/location/AlterDatabaseSetManagedLocationAnalyzer.java index db7a3ba1b2cd..60b633490153 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/location/AlterDatabaseSetManagedLocationAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/location/AlterDatabaseSetManagedLocationAnalyzer.java @@ -18,6 +18,7 @@ package org.apache.hadoop.hive.ql.ddl.database.alter.location; +import org.apache.commons.lang3.tuple.Pair; import org.apache.hadoop.hive.ql.QueryState; import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType; import org.apache.hadoop.hive.ql.ddl.database.alter.AbstractAlterDatabaseAnalyzer; @@ -36,12 +37,14 @@ public AlterDatabaseSetManagedLocationAnalyzer(QueryState queryState) throws Sem @Override public void analyzeInternal(ASTNode root) throws SemanticException { - String databaseName = getUnescapedName((ASTNode) root.getChild(0)); - String newLocation = unescapeSQLString(root.getChild(1).getText()); + Pair catDbNamePair = getCatDbNamePair((ASTNode) root.getChild(0)); + String catalogName = catDbNamePair.getLeft(); + String databaseName = catDbNamePair.getRight(); + String newLocation = unescapeSQLString(root.getChild(1).getChild(0).getText()); outputs.add(toWriteEntity(newLocation)); - AlterDatabaseSetManagedLocationDesc desc = new AlterDatabaseSetManagedLocationDesc(databaseName, newLocation); + AlterDatabaseSetManagedLocationDesc desc = new AlterDatabaseSetManagedLocationDesc(catalogName, databaseName, newLocation); addAlterDatabaseDesc(desc); } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/location/AlterDatabaseSetManagedLocationDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/location/AlterDatabaseSetManagedLocationDesc.java index fc43583882e5..1e785d4f822d 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/location/AlterDatabaseSetManagedLocationDesc.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/location/AlterDatabaseSetManagedLocationDesc.java @@ -31,8 +31,8 @@ public class AlterDatabaseSetManagedLocationDesc extends AbstractAlterDatabaseDe private final String managedLocation; - public AlterDatabaseSetManagedLocationDesc(String databaseName, String managedLocation) { - super(databaseName, null); + public AlterDatabaseSetManagedLocationDesc(String catalogName, String databaseName, String managedLocation) { + super(catalogName, databaseName, null); this.managedLocation = managedLocation; } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/owner/AlterDatabaseSetOwnerAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/owner/AlterDatabaseSetOwnerAnalyzer.java index 61028761afab..e34eef8dc690 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/owner/AlterDatabaseSetOwnerAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/owner/AlterDatabaseSetOwnerAnalyzer.java @@ -18,6 +18,7 @@ package org.apache.hadoop.hive.ql.ddl.database.alter.owner; +import org.apache.commons.lang3.tuple.Pair; import org.apache.hadoop.hive.ql.QueryState; import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType; import org.apache.hadoop.hive.ql.ddl.database.alter.AbstractAlterDatabaseAnalyzer; @@ -38,8 +39,10 @@ public AlterDatabaseSetOwnerAnalyzer(QueryState queryState) throws SemanticExcep @Override public void analyzeInternal(ASTNode root) throws SemanticException { - String databaseName = getUnescapedName((ASTNode) root.getChild(0)); - PrincipalDesc principalDesc = AuthorizationParseUtils.getPrincipalDesc((ASTNode) root.getChild(1)); + Pair catDbNamePair = getCatDbNamePair((ASTNode) root.getChild(0)); + String catalogName = catDbNamePair.getLeft(); + String databaseName = catDbNamePair.getRight(); + PrincipalDesc principalDesc = AuthorizationParseUtils.getPrincipalDesc((ASTNode) root.getChild(1).getChild(0)); if (principalDesc.getName() == null) { throw new SemanticException("Owner name can't be null in alter database set owner command"); @@ -48,7 +51,7 @@ public void analyzeInternal(ASTNode root) throws SemanticException { throw new SemanticException("Owner type can't be null in alter database set owner command"); } - AlterDatabaseSetOwnerDesc desc = new AlterDatabaseSetOwnerDesc(databaseName, principalDesc, null); + AlterDatabaseSetOwnerDesc desc = new AlterDatabaseSetOwnerDesc(catalogName, databaseName, principalDesc, null); addAlterDatabaseDesc(desc); } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/owner/AlterDatabaseSetOwnerDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/owner/AlterDatabaseSetOwnerDesc.java index 424694043351..e5e47a963281 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/owner/AlterDatabaseSetOwnerDesc.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/owner/AlterDatabaseSetOwnerDesc.java @@ -33,8 +33,8 @@ public class AlterDatabaseSetOwnerDesc extends AbstractAlterDatabaseDesc { private final PrincipalDesc ownerPrincipal; - public AlterDatabaseSetOwnerDesc(String databaseName, PrincipalDesc ownerPrincipal, ReplicationSpec replicationSpec) { - super(databaseName, replicationSpec); + public AlterDatabaseSetOwnerDesc(String catalogName, String databaseName, PrincipalDesc ownerPrincipal, ReplicationSpec replicationSpec) { + super(catalogName, databaseName, replicationSpec); this.ownerPrincipal = ownerPrincipal; } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/poperties/AlterDatabaseSetPropertiesAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/poperties/AlterDatabaseSetPropertiesAnalyzer.java index 79e072989b01..9f6ff2228001 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/poperties/AlterDatabaseSetPropertiesAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/poperties/AlterDatabaseSetPropertiesAnalyzer.java @@ -20,6 +20,7 @@ import java.util.Map; +import org.apache.commons.lang3.tuple.Pair; import org.apache.hadoop.hive.ql.QueryState; import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType; import org.apache.hadoop.hive.ql.ddl.database.alter.AbstractAlterDatabaseAnalyzer; @@ -38,11 +39,13 @@ public AlterDatabaseSetPropertiesAnalyzer(QueryState queryState) throws Semantic @Override public void analyzeInternal(ASTNode root) throws SemanticException { - String databaseName = unescapeIdentifier(root.getChild(0).getText()); + Pair catDbNamePair = getCatDbNamePair((ASTNode) root.getChild(0)); + String catalogName = catDbNamePair.getLeft(); + String databaseName = catDbNamePair.getRight(); Map dbProps = null; for (int i = 1; i < root.getChildCount(); i++) { - ASTNode childNode = (ASTNode) root.getChild(i); + ASTNode childNode = (ASTNode) root.getChild(i).getChild(0); switch (childNode.getToken().getType()) { case HiveParser.TOK_DATABASEPROPERTIES: dbProps = getProps((ASTNode) childNode.getChild(0)); @@ -52,7 +55,7 @@ public void analyzeInternal(ASTNode root) throws SemanticException { } } - AlterDatabaseSetPropertiesDesc desc = new AlterDatabaseSetPropertiesDesc(databaseName, dbProps, null); + AlterDatabaseSetPropertiesDesc desc = new AlterDatabaseSetPropertiesDesc(catalogName, databaseName, dbProps, null); addAlterDatabaseDesc(desc); } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/poperties/AlterDatabaseSetPropertiesDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/poperties/AlterDatabaseSetPropertiesDesc.java index 98496417f8c7..abcc1d7b1d0d 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/poperties/AlterDatabaseSetPropertiesDesc.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/alter/poperties/AlterDatabaseSetPropertiesDesc.java @@ -34,9 +34,9 @@ public class AlterDatabaseSetPropertiesDesc extends AbstractAlterDatabaseDesc { private final Map dbProperties; - public AlterDatabaseSetPropertiesDesc(String databaseName, Map dbProperties, + public AlterDatabaseSetPropertiesDesc(String catalogName, String databaseName, Map dbProperties, ReplicationSpec replicationSpec) { - super(databaseName, replicationSpec); + super(catalogName, databaseName, replicationSpec); this.dbProperties = dbProperties; } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/ReplLoadTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/ReplLoadTask.java index f6ac672eb67f..74fc85ac304c 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/ReplLoadTask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/ReplLoadTask.java @@ -30,6 +30,7 @@ import org.apache.hadoop.hive.ql.ddl.privilege.PrincipalDesc; import org.apache.hadoop.hive.ql.exec.repl.util.SnapshotUtils; import org.apache.hadoop.hive.ql.lockmgr.HiveTxnManager; +import org.apache.hadoop.hive.ql.metadata.HiveUtils; import org.apache.hadoop.hive.ql.parse.repl.load.log.IncrementalLoadLogger; import org.apache.hadoop.hive.ql.parse.repl.metric.event.Status; import org.apache.thrift.TException; @@ -448,8 +449,9 @@ private void setDbReadOnly() { Map props = new HashMap<>(); props.put(READONLY, Boolean.TRUE.toString()); + // TODO catalog. Need to double check the actual catalog here. AlterDatabaseSetPropertiesDesc setTargetReadOnly = - new AlterDatabaseSetPropertiesDesc(work.dbNameToLoadIn, props, null); + new AlterDatabaseSetPropertiesDesc(HiveUtils.getCurrentCatalogOrDefault(conf), work.dbNameToLoadIn, props, null); DDLWork alterDbPropWork = new DDLWork(new HashSet<>(), new HashSet<>(), setTargetReadOnly, true, work.dumpDirectory, work.getMetricCollector()); @@ -865,7 +867,7 @@ private int executeIncrementalLoad(long loadStartTime) throws Exception { props.put(currProp.getKey(), (actualVal == null) ? "" : actualVal); } } - AlterDatabaseSetOwnerDesc alterDbDesc = new AlterDatabaseSetOwnerDesc(sourceDb.getName(), + AlterDatabaseSetOwnerDesc alterDbDesc = new AlterDatabaseSetOwnerDesc(sourceDb.getCatalogName(), sourceDb.getName(), new PrincipalDesc(sourceDb.getOwnerName(), sourceDb.getOwnerType()), null); DDLWork ddlWork = new DDLWork(new HashSet<>(), new HashSet<>(), alterDbDesc, true, (new Path(work.dumpDirectory)).getParent().toString(), work.getMetricCollector()); @@ -881,7 +883,9 @@ private int executeIncrementalLoad(long loadStartTime) throws Exception { props.put(ReplConst.REPL_FAILOVER_ENDPOINT, ""); } if (!props.isEmpty()) { - AlterDatabaseSetPropertiesDesc setTargetDesc = new AlterDatabaseSetPropertiesDesc(work.dbNameToLoadIn, props, null); + // TODO catalog. Need to double check the actual catalog here. + AlterDatabaseSetPropertiesDesc setTargetDesc = new AlterDatabaseSetPropertiesDesc(targetDb.getCatalogName(), + work.dbNameToLoadIn, props, null); Task addReplTargetPropTask = TaskFactory.get(new DDLWork(new HashSet<>(), new HashSet<>(), setTargetDesc, true, work.dumpDirectory, work.getMetricCollector()), conf); @@ -927,8 +931,9 @@ private int executeIncrementalLoad(long loadStartTime) throws Exception { String lastEventid = builder.eventTo().toString(); Map mapProp = new HashMap<>(); mapProp.put(ReplicationSpec.KEY.CURR_STATE_ID_SOURCE.toString(), lastEventid); + // TODO catalog. Need to double check the actual catalog here. AlterDatabaseSetPropertiesDesc alterDbDesc = - new AlterDatabaseSetPropertiesDesc(dbName, mapProp, + new AlterDatabaseSetPropertiesDesc(targetDb.getCatalogName(), dbName, mapProp, new ReplicationSpec(lastEventid, lastEventid)); Task updateReplIdTask = TaskFactory.get(new DDLWork(new HashSet<>(), new HashSet<>(), alterDbDesc, true, diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/bootstrap/load/LoadDatabase.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/bootstrap/load/LoadDatabase.java index e2d215b1f20d..5c2061dbeb5b 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/bootstrap/load/LoadDatabase.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/bootstrap/load/LoadDatabase.java @@ -161,12 +161,12 @@ private Task createDbTask(Database dbObj) throws MetaException { } private Task alterDbTask(Database dbObj) { - return alterDbTask(dbObj.getName(), updateDbProps(dbObj, context.dumpDirectory), + return alterDbTask(dbObj.getCatalogName(), dbObj.getName(), updateDbProps(dbObj, context.dumpDirectory), context.hiveConf, context.dumpDirectory, this.metricCollector); } private Task setOwnerInfoTask(Database dbObj) { - AlterDatabaseSetOwnerDesc alterDbDesc = new AlterDatabaseSetOwnerDesc(dbObj.getName(), + AlterDatabaseSetOwnerDesc alterDbDesc = new AlterDatabaseSetOwnerDesc(dbObj.getCatalogName(), dbObj.getName(), new PrincipalDesc(dbObj.getOwnerName(), dbObj.getOwnerType()), null); DDLWork work = new DDLWork(new HashSet<>(), new HashSet<>(), alterDbDesc, true, (new Path(context.dumpDirectory)).getParent().toString(), this.metricCollector); @@ -204,10 +204,10 @@ private static Map updateDbProps(Database dbObj, String dumpDire return parameters; } - private static Task alterDbTask(String dbName, Map props, + private static Task alterDbTask(String catName, String dbName, Map props, HiveConf hiveConf, String dumpDirectory, ReplicationMetricCollector metricCollector) { - AlterDatabaseSetPropertiesDesc alterDbDesc = new AlterDatabaseSetPropertiesDesc(dbName, props, null); + AlterDatabaseSetPropertiesDesc alterDbDesc = new AlterDatabaseSetPropertiesDesc(catName, dbName, props, null); DDLWork work = new DDLWork(new HashSet<>(), new HashSet<>(), alterDbDesc, true, (new Path(dumpDirectory)).getParent().toString(), metricCollector); return TaskFactory.get(work, hiveConf); @@ -228,7 +228,7 @@ public AlterDatabase(Context context, DatabaseEvent event, String dbNameToLoadIn @Override public TaskTracker tasks() throws SemanticException { Database dbObj = readDbMetadata(); - tracker.addTask(alterDbTask(dbObj.getName(), dbObj.getParameters(), context.hiveConf, + tracker.addTask(alterDbTask(dbObj.getCatalogName(), dbObj.getName(), dbObj.getParameters(), context.hiveConf, context.dumpDirectory, this.metricCollector )); return tracker; } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/incremental/IncrementalLoadTasksBuilder.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/incremental/IncrementalLoadTasksBuilder.java index eda4a3f070e8..9d7847af5d05 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/incremental/IncrementalLoadTasksBuilder.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/incremental/IncrementalLoadTasksBuilder.java @@ -39,6 +39,7 @@ import org.apache.hadoop.hive.ql.hooks.WriteEntity; import org.apache.hadoop.hive.ql.metadata.Hive; import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.metadata.HiveUtils; import org.apache.hadoop.hive.ql.parse.ReplicationSpec; import org.apache.hadoop.hive.ql.parse.SemanticException; import org.apache.hadoop.hive.ql.parse.repl.DumpType; @@ -264,8 +265,9 @@ private Task dbUpdateReplStateTask(String dbName, String replState, Task p HashMap mapProp = new HashMap<>(); mapProp.put(ReplicationSpec.KEY.CURR_STATE_ID_SOURCE.toString(), replState); - AlterDatabaseSetPropertiesDesc alterDbDesc = new AlterDatabaseSetPropertiesDesc(dbName, mapProp, - new ReplicationSpec(replState, replState)); + // TODO catalog. Need to double check the actual catalog here. + AlterDatabaseSetPropertiesDesc alterDbDesc = new AlterDatabaseSetPropertiesDesc(HiveUtils.getCurrentCatalogOrDefault(conf), + dbName, mapProp, new ReplicationSpec(replState, replState)); Task updateReplIdTask = TaskFactory.get(new DDLWork(inputs, outputs, alterDbDesc, true, dumpDirectory, metricCollector), conf); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java index 3d457f01d7b2..49fe02300fa6 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java @@ -1300,7 +1300,12 @@ public void renamePartition(Table tbl, Map oldPartSpec, Partitio } } - // TODO: this whole path won't work with catalogs + /** + * When you call this method, you need to ensure that the catalog has been set in the db object. + * @param dbName The database name. + * @param db The database object. + * @throws HiveException + */ public void alterDatabase(String dbName, Database db) throws HiveException { try { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java index 5eb3934e478a..b9dbf992d9a6 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java @@ -31,7 +31,6 @@ import java.util.List; import java.util.Map; import java.util.Map.Entry; -import java.util.Objects; import java.util.Properties; import java.util.Set; import java.util.stream.Stream; @@ -60,7 +59,6 @@ import org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint; import org.apache.hadoop.hive.metastore.api.SourceTable; import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants; -import org.apache.hadoop.hive.metastore.txn.TxnStore; import org.apache.hadoop.hive.metastore.utils.MetaStoreUtils; import org.apache.hadoop.hive.ql.CompilationOpContext; import org.apache.hadoop.hive.ql.Context; @@ -97,7 +95,6 @@ import org.apache.hadoop.hive.ql.plan.ExprNodeDesc; import org.apache.hadoop.hive.ql.plan.FetchWork; import org.apache.hadoop.hive.ql.plan.FileSinkDesc; -import org.apache.hadoop.hive.ql.plan.HiveOperation; import org.apache.hadoop.hive.ql.plan.ListBucketingCtx; import org.apache.hadoop.hive.ql.plan.PlanUtils; import org.apache.hadoop.hive.ql.plan.TableDesc; @@ -424,22 +421,31 @@ public static String charSetString(String charSetName, String charSetString) } } + /** + * + * @param dbNameNode A root node that contains database fields + * @return Return a Pair object which includes catalogName and dbName + * @throws SemanticException + */ public static Pair getCatDbNamePair(ASTNode dbNameNode) throws SemanticException { + String catName = null; + String dbName; + if (dbNameNode.getChildCount() == 2) { - final String catName = unescapeIdentifier(dbNameNode.getChild(0).getText()); - final String dbName = unescapeIdentifier(dbNameNode.getChild(1).getText()); - if (catName.contains(".") || dbName.contains(".")) { - throw new SemanticException(ASTErrorUtils.getMsg( - ErrorMsg.OBJECTNAME_CONTAINS_DOT.getMsg(), dbNameNode)); - } - return Pair.of(catName, dbName); + catName = unescapeIdentifier(dbNameNode.getChild(0).getText()); + dbName = unescapeIdentifier(dbNameNode.getChild(1).getText()); + } else if (dbNameNode.getChildCount() == 1) { + dbName = unescapeIdentifier(dbNameNode.getChild(0).getText()); + } else { + dbName = unescapeIdentifier(dbNameNode.getText()); } - final String dbName = unescapeIdentifier(dbNameNode.getChild(0).getText()); - if (dbName.contains(".")) { + + if ((catName != null && catName.contains(".")) || dbName.contains(".")) { throw new SemanticException(ASTErrorUtils.getMsg( ErrorMsg.OBJECTNAME_CONTAINS_DOT.getMsg(), dbNameNode)); } - return Pair.of(null, dbName); + + return Pair.of(catName, dbName); } /** @@ -1936,7 +1942,7 @@ protected Database getDatabase(String dbName) throws SemanticException { } /** - * TODO. Once we confirm that no compatibility has been broken, we can remove these non-catalog APIs + * TODO catalog. Once we confirm that no compatibility has been broken, we can remove these non-catalog APIs * @deprecated Replaced by * {@link BaseSemanticAnalyzer#getDatabase(String catalogName, String dbName, boolean throwException)} * @return the database if existed. @@ -1957,7 +1963,6 @@ protected Database getDatabase(String dbName, boolean throwException) throws Sem protected Database getDatabase(String catalogName, String dbName, boolean throwException) throws SemanticException { Database database; try { - catalogName = Objects.requireNonNullElse(catalogName, SessionState.get().getCurrentCatalog()); database = db.getDatabase(catalogName, dbName); } catch (Exception e) { throw new SemanticException(e.getMessage(), e); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AlterDatabaseHandler.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AlterDatabaseHandler.java index e32b3275dd74..196d76a729c7 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AlterDatabaseHandler.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AlterDatabaseHandler.java @@ -26,6 +26,7 @@ import org.apache.hadoop.hive.ql.ddl.privilege.PrincipalDesc; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.exec.TaskFactory; +import org.apache.hadoop.hive.ql.metadata.HiveUtils; import org.apache.hadoop.hive.ql.parse.SemanticException; import java.util.Collections; @@ -58,10 +59,13 @@ public List> handle(Context context) for (Map.Entry entry : dbProps.entrySet()) { newDbProps.put(entry.getKey(), entry.getValue()); } - alterDbDesc = new AlterDatabaseSetPropertiesDesc(actualDbName, newDbProps, context.eventOnlyReplicationSpec()); + //TODO catalog. Need to double check the acutual catalog here. + alterDbDesc = new AlterDatabaseSetPropertiesDesc(HiveUtils.getCurrentCatalogOrDefault(context.hiveConf), + actualDbName, newDbProps, context.eventOnlyReplicationSpec()); } else { - alterDbDesc = new AlterDatabaseSetOwnerDesc(actualDbName, new PrincipalDesc(newDb.getOwnerName(), - newDb.getOwnerType()), context.eventOnlyReplicationSpec()); + // TODO catalog. Need to double the actual catalog here. + alterDbDesc = new AlterDatabaseSetOwnerDesc(HiveUtils.getCurrentCatalogOrDefault(context.hiveConf), actualDbName, + new PrincipalDesc(newDb.getOwnerName(), newDb.getOwnerType()), context.eventOnlyReplicationSpec()); } Task alterDbTask = TaskFactory.get(new DDLWork(readEntitySet, writeEntitySet, diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/CreateDatabaseHandler.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/CreateDatabaseHandler.java index 7c222ef7c86b..ebcda945e372 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/CreateDatabaseHandler.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/CreateDatabaseHandler.java @@ -64,7 +64,7 @@ public List> handle(Context context) new DDLWork(new HashSet<>(), new HashSet<>(), createDatabaseDesc, true, context.getDumpDirectory(), context.getMetricCollector()), context.hiveConf); if (!db.getParameters().isEmpty()) { - AlterDatabaseSetPropertiesDesc alterDbDesc = new AlterDatabaseSetPropertiesDesc(destinationDBName, + AlterDatabaseSetPropertiesDesc alterDbDesc = new AlterDatabaseSetPropertiesDesc(destinationCatalogName, destinationDBName, db.getParameters(), context.eventOnlyReplicationSpec()); Task alterDbProperties = TaskFactory.get(new DDLWork(new HashSet<>(), new HashSet<>(), alterDbDesc, true, context.getDumpDirectory(), @@ -72,7 +72,7 @@ public List> handle(Context context) createDBTask.addDependentTask(alterDbProperties); } if (StringUtils.isNotEmpty(db.getOwnerName())) { - AlterDatabaseSetOwnerDesc alterDbOwner = new AlterDatabaseSetOwnerDesc(destinationDBName, + AlterDatabaseSetOwnerDesc alterDbOwner = new AlterDatabaseSetOwnerDesc(destinationCatalogName, destinationDBName, new PrincipalDesc(db.getOwnerName(), db.getOwnerType()), context.eventOnlyReplicationSpec()); Task alterDbTask = TaskFactory.get(new DDLWork(new HashSet<>(), new HashSet<>(), diff --git a/ql/src/test/queries/clientpositive/catalog_database.q b/ql/src/test/queries/clientpositive/catalog_database.q index eec5721a40ab..b47fbb797d7a 100644 --- a/ql/src/test/queries/clientpositive/catalog_database.q +++ b/ql/src/test/queries/clientpositive/catalog_database.q @@ -50,6 +50,12 @@ SHOW DATABASES; DESCRIBE DATABASE testcat.testdb_2; DESCRIBE DATABASE EXTENDED testcat.testdb_2; +-- ALTER DATABASE by catalog.db pattern +ALTER DATABASE testcat.testdb_2 SET dbproperties('test'='yesthisis'); +ALTER DATABASE testcat.testdb_2 SET owner user user1; +ALTER DATABASE testcat.testdb_2 SET LOCATION '/tmp/testcat/path/testcat.testdb_2'; +DESCRIBE DATABASE testcat.testdb_2; + -- DROP CATALOG at the end. Need to drop all non-default databases first. DROP DATABASE testcat.testdb_2; DROP CATALOG testcat; diff --git a/ql/src/test/results/clientpositive/llap/catalog_database.q.out b/ql/src/test/results/clientpositive/llap/catalog_database.q.out index 50e379c9e1a5..1ab7a07c2f44 100644 --- a/ql/src/test/results/clientpositive/llap/catalog_database.q.out +++ b/ql/src/test/results/clientpositive/llap/catalog_database.q.out @@ -104,14 +104,40 @@ PREHOOK: Input: database:testdb_2 POSTHOOK: query: DESCRIBE DATABASE testcat.testdb_2 POSTHOOK: type: DESCDATABASE POSTHOOK: Input: database:testdb_2 -testcat testdb_2 location/in/test hive_test_user USER +testdb_2 location/in/test hive_test_user USER PREHOOK: query: DESCRIBE DATABASE EXTENDED testcat.testdb_2 PREHOOK: type: DESCDATABASE PREHOOK: Input: database:testdb_2 POSTHOOK: query: DESCRIBE DATABASE EXTENDED testcat.testdb_2 POSTHOOK: type: DESCDATABASE POSTHOOK: Input: database:testdb_2 -testcat testdb_2 location/in/test hive_test_user USER +testdb_2 location/in/test hive_test_user USER +PREHOOK: query: ALTER DATABASE testcat.testdb_2 SET dbproperties('test'='yesthisis') +PREHOOK: type: ALTERDATABASE +PREHOOK: Output: database:testdb_2 +POSTHOOK: query: ALTER DATABASE testcat.testdb_2 SET dbproperties('test'='yesthisis') +POSTHOOK: type: ALTERDATABASE +POSTHOOK: Output: database:testdb_2 +#### A masked pattern was here #### +PREHOOK: type: ALTERDATABASE_OWNER +PREHOOK: Output: database:testdb_2 +#### A masked pattern was here #### +POSTHOOK: type: ALTERDATABASE_OWNER +POSTHOOK: Output: database:testdb_2 +#### A masked pattern was here #### +PREHOOK: type: ALTERDATABASE_LOCATION +PREHOOK: Output: database:testdb_2 +#### A masked pattern was here #### +POSTHOOK: type: ALTERDATABASE_LOCATION +POSTHOOK: Output: database:testdb_2 +#### A masked pattern was here #### +PREHOOK: query: DESCRIBE DATABASE testcat.testdb_2 +PREHOOK: type: DESCDATABASE +PREHOOK: Input: database:testdb_2 +POSTHOOK: query: DESCRIBE DATABASE testcat.testdb_2 +POSTHOOK: type: DESCDATABASE +POSTHOOK: Input: database:testdb_2 +testdb_2 location/in/test user1 USER PREHOOK: query: DROP DATABASE testcat.testdb_2 PREHOOK: type: DROPDATABASE PREHOOK: Input: database:testdb_2 diff --git a/standalone-metastore/metastore-client/src/main/java/org/apache/hadoop/hive/metastore/client/BaseMetaStoreClient.java b/standalone-metastore/metastore-client/src/main/java/org/apache/hadoop/hive/metastore/client/BaseMetaStoreClient.java index 8a624735a14d..78c20bddace2 100644 --- a/standalone-metastore/metastore-client/src/main/java/org/apache/hadoop/hive/metastore/client/BaseMetaStoreClient.java +++ b/standalone-metastore/metastore-client/src/main/java/org/apache/hadoop/hive/metastore/client/BaseMetaStoreClient.java @@ -513,7 +513,8 @@ public final void dropDatabase(String name, boolean deleteData, boolean ignoreUn @Override public final void alterDatabase(String name, Database db) throws NoSuchObjectException, MetaException, TException { - alterDatabase(getDefaultCatalog(conf), name, db); + String catName = db.getCatalogName() == null ? getDefaultCatalog(conf) : db.getCatalogName(); + alterDatabase(catName, name, db); } @Override From 3d469fc462feee7672757b6c344fc1294017b2bf Mon Sep 17 00:00:00 2001 From: Butao Zhang Date: Wed, 1 Oct 2025 17:58:10 +0800 Subject: [PATCH 12/15] Implement LOCK DATABASE cat.db syntax --- .../org/apache/hadoop/hive/ql/parse/LockParser.g | 4 ++-- .../ddl/database/lock/LockDatabaseAnalyzer.java | 15 +++++++++++---- .../ql/ddl/database/lock/LockDatabaseDesc.java | 9 ++++++++- .../database/unlock/UnlockDatabaseAnalyzer.java | 15 +++++++++++---- .../ddl/database/unlock/UnlockDatabaseDesc.java | 9 ++++++++- .../hive/ql/lockmgr/HiveTxnManagerImpl.java | 16 ++++++++++++---- .../queries/clientpositive/catalog_database.q | 6 ++++++ .../clientpositive/llap/catalog_database.q.out | 12 ++++++++++++ 8 files changed, 70 insertions(+), 16 deletions(-) diff --git a/parser/src/java/org/apache/hadoop/hive/ql/parse/LockParser.g b/parser/src/java/org/apache/hadoop/hive/ql/parse/LockParser.g index ac2084bf4eed..07ba0098b15e 100644 --- a/parser/src/java/org/apache/hadoop/hive/ql/parse/LockParser.g +++ b/parser/src/java/org/apache/hadoop/hive/ql/parse/LockParser.g @@ -61,7 +61,7 @@ lockStatement lockDatabase @init { gParent.pushMsg("lock database statement", state); } @after { gParent.popMsg(state); } - : KW_LOCK (KW_DATABASE|KW_SCHEMA) (dbName=identifier) lockMode -> ^(TOK_LOCKDB $dbName lockMode) + : KW_LOCK (KW_DATABASE|KW_SCHEMA) (dbName=databaseName) lockMode -> ^(TOK_LOCKDB $dbName lockMode) ; lockMode @@ -79,5 +79,5 @@ unlockStatement unlockDatabase @init { gParent.pushMsg("unlock database statement", state); } @after { gParent.popMsg(state); } - : KW_UNLOCK (KW_DATABASE|KW_SCHEMA) (dbName=identifier) -> ^(TOK_UNLOCKDB $dbName) + : KW_UNLOCK (KW_DATABASE|KW_SCHEMA) (dbName=databaseName) -> ^(TOK_UNLOCKDB $dbName) ; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/lock/LockDatabaseAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/lock/LockDatabaseAnalyzer.java index cd0392dd066c..4d83f53b81c2 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/lock/LockDatabaseAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/lock/LockDatabaseAnalyzer.java @@ -18,8 +18,10 @@ package org.apache.hadoop.hive.ql.ddl.database.lock; +import org.apache.commons.lang3.tuple.Pair; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; +import org.apache.hadoop.hive.ql.ErrorMsg; import org.apache.hadoop.hive.ql.QueryState; import org.apache.hadoop.hive.ql.exec.TaskFactory; import org.apache.hadoop.hive.ql.hooks.ReadEntity; @@ -43,16 +45,21 @@ public LockDatabaseAnalyzer(QueryState queryState) throws SemanticException { @Override public void analyzeInternal(ASTNode root) throws SemanticException { - String databaseName = unescapeIdentifier(root.getChild(0).getText()); + Pair catDbNamePair = getCatDbNamePair((ASTNode) root.getChild(0)); + String catalogName = catDbNamePair.getLeft(); + if (catalogName != null && getCatalog(catalogName) == null) { + throw new SemanticException(ErrorMsg.CATALOG_NOT_EXISTS, catalogName); + } + String databaseName = catDbNamePair.getRight(); String mode = unescapeIdentifier(root.getChild(1).getText().toUpperCase()); - inputs.add(new ReadEntity(getDatabase(databaseName))); + inputs.add(new ReadEntity(getDatabase(catalogName, databaseName, true))); // Lock database operation is to acquire the lock explicitly, the operation itself doesn't need to be locked. // Set the WriteEntity as WriteType: DDL_NO_LOCK here, otherwise it will conflict with Hive's transaction. - outputs.add(new WriteEntity(getDatabase(databaseName), WriteType.DDL_NO_LOCK)); + outputs.add(new WriteEntity(getDatabase(catalogName, databaseName, true), WriteType.DDL_NO_LOCK)); LockDatabaseDesc desc = - new LockDatabaseDesc(databaseName, mode, HiveConf.getVar(conf, ConfVars.HIVE_QUERY_ID), ctx.getCmd()); + new LockDatabaseDesc(catalogName, databaseName, mode, HiveConf.getVar(conf, ConfVars.HIVE_QUERY_ID), ctx.getCmd()); rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc))); ctx.setNeedLockMgr(true); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/lock/LockDatabaseDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/lock/LockDatabaseDesc.java index 0affeced35e8..3f298bdb8fab 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/lock/LockDatabaseDesc.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/lock/LockDatabaseDesc.java @@ -31,18 +31,25 @@ public class LockDatabaseDesc implements DDLDesc, Serializable { private static final long serialVersionUID = 1L; + private final String catalogName; private final String databaseName; private final String mode; private final String queryId; private final String queryStr; - public LockDatabaseDesc(String databaseName, String mode, String queryId, String queryStr) { + public LockDatabaseDesc(String catalogName, String databaseName, String mode, String queryId, String queryStr) { + this.catalogName = catalogName; this.databaseName = databaseName; this.mode = mode; this.queryId = queryId; this.queryStr = queryStr; } + @Explain(displayName = "catalog", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) + public String getCatalogName() { + return catalogName; + } + @Explain(displayName = "database", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) public String getDatabaseName() { return databaseName; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/unlock/UnlockDatabaseAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/unlock/UnlockDatabaseAnalyzer.java index 64c80bf83132..ee4409d1f3ab 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/unlock/UnlockDatabaseAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/unlock/UnlockDatabaseAnalyzer.java @@ -18,6 +18,8 @@ package org.apache.hadoop.hive.ql.ddl.database.unlock; +import org.apache.commons.lang3.tuple.Pair; +import org.apache.hadoop.hive.ql.ErrorMsg; import org.apache.hadoop.hive.ql.QueryState; import org.apache.hadoop.hive.ql.exec.TaskFactory; import org.apache.hadoop.hive.ql.hooks.ReadEntity; @@ -41,14 +43,19 @@ public UnlockDatabaseAnalyzer(QueryState queryState) throws SemanticException { @Override public void analyzeInternal(ASTNode root) throws SemanticException { - String databaseName = unescapeIdentifier(root.getChild(0).getText()); + Pair catDbNamePair = getCatDbNamePair((ASTNode) root.getChild(0)); + String catalogName = catDbNamePair.getLeft(); + if (catalogName != null && getCatalog(catalogName) == null) { + throw new SemanticException(ErrorMsg.CATALOG_NOT_EXISTS, catalogName); + } + String databaseName = catDbNamePair.getRight(); - inputs.add(new ReadEntity(getDatabase(databaseName))); + inputs.add(new ReadEntity(getDatabase(catalogName, databaseName, true))); // Unlock database operation is to release the lock explicitly, the operation itself don't need to be locked. // Set the WriteEntity as WriteType: DDL_NO_LOCK here, otherwise it will conflict with Hive's transaction. - outputs.add(new WriteEntity(getDatabase(databaseName), WriteType.DDL_NO_LOCK)); + outputs.add(new WriteEntity(getDatabase(catalogName, databaseName, true), WriteType.DDL_NO_LOCK)); - UnlockDatabaseDesc desc = new UnlockDatabaseDesc(databaseName); + UnlockDatabaseDesc desc = new UnlockDatabaseDesc(catalogName, databaseName); rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc))); ctx.setNeedLockMgr(true); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/unlock/UnlockDatabaseDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/unlock/UnlockDatabaseDesc.java index 3605a6d47e2d..341b5f5388f5 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/unlock/UnlockDatabaseDesc.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/unlock/UnlockDatabaseDesc.java @@ -31,12 +31,19 @@ public class UnlockDatabaseDesc implements DDLDesc, Serializable { private static final long serialVersionUID = 1L; + private final String catalogName; private final String databaseName; - public UnlockDatabaseDesc(String databaseName) { + public UnlockDatabaseDesc(String catalogName, String databaseName) { + this.catalogName = catalogName; this.databaseName = databaseName; } + @Explain(displayName = "catalog", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) + public String getCatalogName() { + return catalogName; + } + @Explain(displayName = "database", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) public String getDatabaseName() { return databaseName; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/HiveTxnManagerImpl.java b/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/HiveTxnManagerImpl.java index 9897795db9c7..ab2fcae26834 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/HiveTxnManagerImpl.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/HiveTxnManagerImpl.java @@ -20,6 +20,7 @@ import java.util.Iterator; import java.util.List; import java.util.Map; +import java.util.Objects; import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.conf.Configuration; @@ -38,8 +39,10 @@ import org.apache.hadoop.hive.ql.lockmgr.HiveLockObject.HiveLockObjectData; import org.apache.hadoop.hive.ql.metadata.Hive; import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.metadata.HiveUtils; import org.apache.hadoop.hive.ql.metadata.Partition; import org.apache.hadoop.hive.ql.metadata.Table; +import org.stringtemplate.v4.ST; /** * An implementation HiveTxnManager that includes internal methods that all @@ -150,9 +153,11 @@ public int lockDatabase(Hive hiveDB, LockDatabaseDesc lockDb) throws HiveExcepti HiveLockManager lockMgr = getAndCheckLockManager(); HiveLockMode mode = HiveLockMode.valueOf(lockDb.getMode()); + String catName = Objects.requireNonNullElse(lockDb.getCatalogName(), + HiveUtils.getCurrentCatalogOrDefault(conf)); String dbName = lockDb.getDatabaseName(); - Database dbObj = hiveDB.getDatabase(dbName); + Database dbObj = hiveDB.getDatabase(catName, dbName); if (dbObj == null) { throw new HiveException("Database " + dbName + " does not exist "); } @@ -162,7 +167,8 @@ public int lockDatabase(Hive hiveDB, LockDatabaseDesc lockDb) throws HiveExcepti String.valueOf(System.currentTimeMillis()), "EXPLICIT", lockDb.getQueryStr(), conf); - HiveLock lck = lockMgr.lock(new HiveLockObject(dbObj.getName(), lockData), mode, true); + // Using the catalogName@databaseName format to uniquely identify a database. + HiveLock lck = lockMgr.lock(new HiveLockObject(catName + "@" +dbObj.getName(), lockData), mode, true); if (lck == null) { return 1; } @@ -173,13 +179,15 @@ public int lockDatabase(Hive hiveDB, LockDatabaseDesc lockDb) throws HiveExcepti public int unlockDatabase(Hive hiveDB, UnlockDatabaseDesc unlockDb) throws HiveException { HiveLockManager lockMgr = getAndCheckLockManager(); + String catName = Objects.requireNonNullElse(unlockDb.getCatalogName(), + HiveUtils.getCurrentCatalogOrDefault(conf)); String dbName = unlockDb.getDatabaseName(); - Database dbObj = hiveDB.getDatabase(dbName); + Database dbObj = hiveDB.getDatabase(catName, dbName); if (dbObj == null) { throw new HiveException("Database " + dbName + " does not exist "); } - HiveLockObject obj = new HiveLockObject(dbObj.getName(), null); + HiveLockObject obj = new HiveLockObject(catName + "@" +dbObj.getName(), null); List locks = lockMgr.getLocks(obj, false, false); if ((locks == null) || (locks.isEmpty())) { diff --git a/ql/src/test/queries/clientpositive/catalog_database.q b/ql/src/test/queries/clientpositive/catalog_database.q index b47fbb797d7a..62b7e0979312 100644 --- a/ql/src/test/queries/clientpositive/catalog_database.q +++ b/ql/src/test/queries/clientpositive/catalog_database.q @@ -1,3 +1,5 @@ +set hive.support.concurrency=true; + -- SORT_QUERY_RESULTS -- CREATE DATABASE in default catalog 'hive' @@ -56,6 +58,10 @@ ALTER DATABASE testcat.testdb_2 SET owner user user1; ALTER DATABASE testcat.testdb_2 SET LOCATION '/tmp/testcat/path/testcat.testdb_2'; DESCRIBE DATABASE testcat.testdb_2; +-- LOCK & UNLOCK DATABASE by catalog.db pattern +LOCK DATABASE testcat.testdb_2 SHARED; +SHOW LOCKS; + -- DROP CATALOG at the end. Need to drop all non-default databases first. DROP DATABASE testcat.testdb_2; DROP CATALOG testcat; diff --git a/ql/src/test/results/clientpositive/llap/catalog_database.q.out b/ql/src/test/results/clientpositive/llap/catalog_database.q.out index 1ab7a07c2f44..b804cda089f0 100644 --- a/ql/src/test/results/clientpositive/llap/catalog_database.q.out +++ b/ql/src/test/results/clientpositive/llap/catalog_database.q.out @@ -138,6 +138,18 @@ POSTHOOK: query: DESCRIBE DATABASE testcat.testdb_2 POSTHOOK: type: DESCDATABASE POSTHOOK: Input: database:testdb_2 testdb_2 location/in/test user1 USER +PREHOOK: query: LOCK DATABASE testcat.testdb_2 SHARED +PREHOOK: type: LOCKDATABASE +PREHOOK: Input: database:testdb_2 +PREHOOK: Output: database:testdb_2 +POSTHOOK: query: LOCK DATABASE testcat.testdb_2 SHARED +POSTHOOK: type: LOCKDATABASE +POSTHOOK: Input: database:testdb_2 +POSTHOOK: Output: database:testdb_2 +PREHOOK: query: SHOW LOCKS +PREHOOK: type: SHOWLOCKS +POSTHOOK: query: SHOW LOCKS +POSTHOOK: type: SHOWLOCKS PREHOOK: query: DROP DATABASE testcat.testdb_2 PREHOOK: type: DROPDATABASE PREHOOK: Input: database:testdb_2 From 8dfef8606ca7b3521af3f869fa2e1fee3a1932c6 Mon Sep 17 00:00:00 2001 From: Butao Zhang Date: Wed, 1 Oct 2025 18:05:26 +0800 Subject: [PATCH 13/15] Implement show create database catalog.db syntax --- .../org/apache/hadoop/hive/ql/parse/HiveParser.g | 2 +- .../showcreate/ShowCreateDatabaseAnalyzer.java | 13 ++++++++++--- .../database/showcreate/ShowCreateDatabaseDesc.java | 9 ++++++++- .../showcreate/ShowCreateDatabaseOperation.java | 2 +- .../test/queries/clientpositive/catalog_database.q | 3 +++ .../clientpositive/llap/catalog_database.q.out | 11 +++++++++++ 6 files changed, 34 insertions(+), 6 deletions(-) diff --git a/parser/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g b/parser/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g index 6491e949d32b..90aeedde5870 100644 --- a/parser/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g +++ b/parser/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g @@ -1333,7 +1333,7 @@ showStatement | KW_SHOW KW_FUNCTIONS (KW_LIKE showFunctionIdentifier)? -> ^(TOK_SHOWFUNCTIONS KW_LIKE? showFunctionIdentifier?) | KW_SHOW KW_PARTITIONS tabName=tableName partitionSpec? whereClause? orderByClause? limitClause? -> ^(TOK_SHOWPARTITIONS $tabName partitionSpec? whereClause? orderByClause? limitClause?) | KW_SHOW KW_CREATE ( - (KW_DATABASE|KW_SCHEMA) => (KW_DATABASE|KW_SCHEMA) db_name=identifier -> ^(TOK_SHOW_CREATEDATABASE $db_name) + (KW_DATABASE|KW_SCHEMA) => (KW_DATABASE|KW_SCHEMA) db_name=databaseName -> ^(TOK_SHOW_CREATEDATABASE $db_name) | KW_TABLE tabName=tableName -> ^(TOK_SHOW_CREATETABLE $tabName) ) diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/showcreate/ShowCreateDatabaseAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/showcreate/ShowCreateDatabaseAnalyzer.java index 4345503f4b93..106c8f1a1590 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/showcreate/ShowCreateDatabaseAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/showcreate/ShowCreateDatabaseAnalyzer.java @@ -18,7 +18,9 @@ package org.apache.hadoop.hive.ql.ddl.database.showcreate; +import org.apache.commons.lang3.tuple.Pair; import org.apache.hadoop.hive.metastore.api.Database; +import org.apache.hadoop.hive.ql.ErrorMsg; import org.apache.hadoop.hive.ql.QueryState; import org.apache.hadoop.hive.ql.ddl.DDLWork; import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType; @@ -43,12 +45,17 @@ public ShowCreateDatabaseAnalyzer(QueryState queryState) throws SemanticExceptio public void analyzeInternal(ASTNode root) throws SemanticException { ctx.setResFile(ctx.getLocalTmpPath()); - String databaseName = getUnescapedName((ASTNode)root.getChild(0)); + Pair catDbNamePair = getCatDbNamePair((ASTNode) root.getChild(0)); + String catalogName = catDbNamePair.getLeft(); + if (catalogName != null && getCatalog(catalogName) == null) { + throw new SemanticException(ErrorMsg.CATALOG_NOT_EXISTS, catalogName); + } + String databaseName = catDbNamePair.getRight(); - Database database = getDatabase(databaseName); + Database database = getDatabase(catalogName, databaseName, true); inputs.add(new ReadEntity(database)); - ShowCreateDatabaseDesc desc = new ShowCreateDatabaseDesc(databaseName, ctx.getResFile()); + ShowCreateDatabaseDesc desc = new ShowCreateDatabaseDesc(catalogName, databaseName, ctx.getResFile()); Task task = TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)); rootTasks.add(task); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/showcreate/ShowCreateDatabaseDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/showcreate/ShowCreateDatabaseDesc.java index cb60d7ed5ea9..31fc4d9787e4 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/showcreate/ShowCreateDatabaseDesc.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/showcreate/ShowCreateDatabaseDesc.java @@ -35,9 +35,11 @@ public class ShowCreateDatabaseDesc implements DDLDesc, Serializable { public static final String SCHEMA = "createdb_stmt#string"; private final Path resFile; + private final String catName; private final String dbName; - public ShowCreateDatabaseDesc(String dbName, Path resFile) { + public ShowCreateDatabaseDesc(String catName, String dbName, Path resFile) { + this.catName = catName; this.dbName = dbName; this.resFile = resFile; } @@ -51,4 +53,9 @@ public Path getResFile() { public String getDatabaseName() { return dbName; } + + @Explain(displayName = "catalog name", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) + public String getCatalogName() { + return catName; + } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/showcreate/ShowCreateDatabaseOperation.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/showcreate/ShowCreateDatabaseOperation.java index 1dba71f64059..2e13b42f7030 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/showcreate/ShowCreateDatabaseOperation.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/database/showcreate/ShowCreateDatabaseOperation.java @@ -52,7 +52,7 @@ public int execute() throws HiveException { } private int showCreateDatabase(DataOutputStream outStream) throws Exception { - Database database = context.getDb().getDatabase(desc.getDatabaseName()); + Database database = context.getDb().getDatabase(desc.getCatalogName(), desc.getDatabaseName()); StringBuilder createDbCommand = new StringBuilder(); createDbCommand.append("CREATE DATABASE `").append(database.getName()).append("`\n"); diff --git a/ql/src/test/queries/clientpositive/catalog_database.q b/ql/src/test/queries/clientpositive/catalog_database.q index 62b7e0979312..d148a5242c90 100644 --- a/ql/src/test/queries/clientpositive/catalog_database.q +++ b/ql/src/test/queries/clientpositive/catalog_database.q @@ -62,6 +62,9 @@ DESCRIBE DATABASE testcat.testdb_2; LOCK DATABASE testcat.testdb_2 SHARED; SHOW LOCKS; +-- SHOW CREATE DATABASE vy catalog.db pattern +SHOW CREATE DATABASE testcat.testdb_2; + -- DROP CATALOG at the end. Need to drop all non-default databases first. DROP DATABASE testcat.testdb_2; DROP CATALOG testcat; diff --git a/ql/src/test/results/clientpositive/llap/catalog_database.q.out b/ql/src/test/results/clientpositive/llap/catalog_database.q.out index b804cda089f0..a3c96b4cd530 100644 --- a/ql/src/test/results/clientpositive/llap/catalog_database.q.out +++ b/ql/src/test/results/clientpositive/llap/catalog_database.q.out @@ -150,6 +150,17 @@ PREHOOK: query: SHOW LOCKS PREHOOK: type: SHOWLOCKS POSTHOOK: query: SHOW LOCKS POSTHOOK: type: SHOWLOCKS +PREHOOK: query: SHOW CREATE DATABASE testcat.testdb_2 +PREHOOK: type: SHOW_CREATEDATABASE +PREHOOK: Input: database:testdb_2 +POSTHOOK: query: SHOW CREATE DATABASE testcat.testdb_2 +POSTHOOK: type: SHOW_CREATEDATABASE +POSTHOOK: Input: database:testdb_2 +CREATE DATABASE `testdb_2` +LOCATION +#### A masked pattern was here #### +WITH DBPROPERTIES ( + 'test'='yesthisis') PREHOOK: query: DROP DATABASE testcat.testdb_2 PREHOOK: type: DROPDATABASE PREHOOK: Input: database:testdb_2 From 628949da3716aadb952e830659e223be9316446e Mon Sep 17 00:00:00 2001 From: Butao Zhang Date: Thu, 2 Oct 2025 00:02:26 +0800 Subject: [PATCH 14/15] Fix typo in SwitchCatalogAnalyzer --- .../hive/ql/ddl/catalog/use/SwitchCatalogAnalyzer.java | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/use/SwitchCatalogAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/use/SwitchCatalogAnalyzer.java index 367e94d78b84..f2aa9fe2aeb5 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/use/SwitchCatalogAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/use/SwitchCatalogAnalyzer.java @@ -40,14 +40,14 @@ public SwitchCatalogAnalyzer(QueryState queryState) throws SemanticException { @Override public void analyzeInternal(ASTNode root) throws SemanticException { - String catlogName = unescapeIdentifier(root.getChild(0).getText()); + String catalogName = unescapeIdentifier(root.getChild(0).getText()); - Catalog catalog = getCatalog(catlogName); + Catalog catalog = getCatalog(catalogName); ReadEntity readEntity = new ReadEntity(catalog); readEntity.noLockNeeded(); inputs.add(readEntity); - SwitchCatalogDesc desc = new SwitchCatalogDesc(catlogName); + SwitchCatalogDesc desc = new SwitchCatalogDesc(catalogName); rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc))); } } From 30fa6edc3e3296e7cc760c22a4e50d44f5efc3ce Mon Sep 17 00:00:00 2001 From: Butao Zhang Date: Thu, 2 Oct 2025 00:02:51 +0800 Subject: [PATCH 15/15] Fix lock database cat.db --- .../table/lock/show/ShowDbLocksAnalyzer.java | 11 +++++++-- .../ql/ddl/table/lock/show/ShowLocksDesc.java | 10 +++++++- .../table/lock/show/ShowLocksOperation.java | 2 ++ .../hive/ql/lockmgr/DummyTxnManager.java | 5 +++- .../lockneg_try_lock_cat_db_in_use.q | 9 ++++++++ .../queries/clientpositive/catalog_database.q | 4 ---- .../lockneg_try_db_lock_conflict.q.out | 2 +- .../lockneg_try_drop_locked_db.q.out | 2 +- .../lockneg_try_lock_cat_db_in_use.q.out | 23 +++++++++++++++++++ .../lockneg_try_lock_db_in_use.q.out | 2 +- .../llap/catalog_database.q.out | 12 ---------- 11 files changed, 59 insertions(+), 23 deletions(-) create mode 100644 ql/src/test/queries/clientnegative/lockneg_try_lock_cat_db_in_use.q create mode 100644 ql/src/test/results/clientnegative/lockneg_try_lock_cat_db_in_use.q.out diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/lock/show/ShowDbLocksAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/lock/show/ShowDbLocksAnalyzer.java index d4be520dd476..a4ad0092acf5 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/lock/show/ShowDbLocksAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/lock/show/ShowDbLocksAnalyzer.java @@ -18,6 +18,8 @@ package org.apache.hadoop.hive.ql.ddl.table.lock.show; +import org.apache.commons.lang3.tuple.Pair; +import org.apache.hadoop.hive.ql.ErrorMsg; import org.apache.hadoop.hive.ql.QueryState; import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType; import org.apache.hadoop.hive.ql.ddl.DDLWork; @@ -41,11 +43,16 @@ public ShowDbLocksAnalyzer(QueryState queryState) throws SemanticException { public void analyzeInternal(ASTNode root) throws SemanticException { ctx.setResFile(ctx.getLocalTmpPath()); - String dbName = stripQuotes(root.getChild(0).getText()); + Pair catDbNamePair = getCatDbNamePair((ASTNode) root.getChild(0)); + String catName = catDbNamePair.getLeft(); + if (catName != null && getCatalog(catName) == null) { + throw new SemanticException(ErrorMsg.CATALOG_NOT_EXISTS, catName); + } + String dbName = catDbNamePair.getRight(); boolean isExtended = (root.getChildCount() > 1); assert txnManager != null : "Transaction manager should be set before calling analyze"; - ShowLocksDesc desc = new ShowLocksDesc(ctx.getResFile(), dbName, isExtended, txnManager.useNewShowLocksFormat()); + ShowLocksDesc desc = new ShowLocksDesc(ctx.getResFile(), catName, dbName, isExtended, txnManager.useNewShowLocksFormat()); Task task = TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)); rootTasks.add(task); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/lock/show/ShowLocksDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/lock/show/ShowLocksDesc.java index 898d3b8be618..e826e090b997 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/lock/show/ShowLocksDesc.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/lock/show/ShowLocksDesc.java @@ -39,14 +39,16 @@ public class ShowLocksDesc implements DDLDesc, Serializable { "string:string:string:string:string:string:string:string:string:string:string:string:string"; private final String resFile; + private final String catName; private final String dbName; private final String tableName; private final Map partSpec; private final boolean isExt; private final boolean isNewFormat; - public ShowLocksDesc(Path resFile, String dbName, boolean isExt, boolean isNewFormat) { + public ShowLocksDesc(Path resFile, String catName, String dbName, boolean isExt, boolean isNewFormat) { this.resFile = resFile.toString(); + this.catName = catName; this.dbName = dbName; this.tableName = null; this.partSpec = null; @@ -57,6 +59,7 @@ public ShowLocksDesc(Path resFile, String dbName, boolean isExt, boolean isNewFo public ShowLocksDesc(Path resFile, String tableName, Map partSpec, boolean isExt, boolean isNewFormat) { this.resFile = resFile.toString(); + this.catName = null; this.dbName = null; this.tableName = tableName; this.partSpec = partSpec; @@ -69,6 +72,11 @@ public String getResFile() { return resFile; } + @Explain(displayName = "catName", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) + public String getCatName() { + return catName; + } + @Explain(displayName = "dbName", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) public String getDbName() { return dbName; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/lock/show/ShowLocksOperation.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/lock/show/ShowLocksOperation.java index d7e71858a0ad..5a3ac38dfe2d 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/lock/show/ShowLocksOperation.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/lock/show/ShowLocksOperation.java @@ -160,6 +160,8 @@ private ShowLocksResponse getLocksForNewFormat(HiveLockManager lockMgr) throws H throw new HiveException("New lock format only supported with db lock manager."); } + // TODO catalog. Need to add catalog into ShowLocksRequest. But ShowLocksRequest doesn't have catalog field. + // Maybe we need to change hive_metastore.thrift to add catalog into ShowLocksRequest struct. ShowLocksRequest request = new ShowLocksRequest(); if (desc.getDbName() == null && desc.getTableName() != null) { request.setDbname(SessionState.get().getCurrentDatabase()); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/DummyTxnManager.java b/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/DummyTxnManager.java index 8449ef83a087..e6faaaadd599 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/DummyTxnManager.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/DummyTxnManager.java @@ -23,6 +23,7 @@ import org.apache.hadoop.hive.metastore.api.GetOpenTxnsResponse; import org.apache.hadoop.hive.metastore.api.TxnToWriteId; import org.apache.hadoop.hive.metastore.api.TxnType; +import org.apache.hadoop.hive.ql.metadata.HiveUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.common.ValidTxnList; @@ -394,7 +395,9 @@ private List getLockObjects(QueryPlan plan, Database db, conf); if (db != null) { - locks.add(new HiveLockObj(new HiveLockObject(db.getName(), lockData), + String catName = Objects.requireNonNullElse(db.getCatalogName(), + HiveUtils.getCurrentCatalogOrDefault(conf)); + locks.add(new HiveLockObj(new HiveLockObject(catName + "@" + db.getName(), lockData), mode)); return locks; } diff --git a/ql/src/test/queries/clientnegative/lockneg_try_lock_cat_db_in_use.q b/ql/src/test/queries/clientnegative/lockneg_try_lock_cat_db_in_use.q new file mode 100644 index 000000000000..9f6bed6aa559 --- /dev/null +++ b/ql/src/test/queries/clientnegative/lockneg_try_lock_cat_db_in_use.q @@ -0,0 +1,9 @@ +set hive.lock.numretries=0; +set hive.support.concurrency=true; + +CREATE CATALOG testcat LOCATION '/tmp/testcat' COMMENT 'Hive test catalog'; +create database testcat.lockneg9; + +lock database testcat.lockneg9 shared; + +drop database testcat.lockneg9; diff --git a/ql/src/test/queries/clientpositive/catalog_database.q b/ql/src/test/queries/clientpositive/catalog_database.q index d148a5242c90..9feb1c30bec8 100644 --- a/ql/src/test/queries/clientpositive/catalog_database.q +++ b/ql/src/test/queries/clientpositive/catalog_database.q @@ -58,10 +58,6 @@ ALTER DATABASE testcat.testdb_2 SET owner user user1; ALTER DATABASE testcat.testdb_2 SET LOCATION '/tmp/testcat/path/testcat.testdb_2'; DESCRIBE DATABASE testcat.testdb_2; --- LOCK & UNLOCK DATABASE by catalog.db pattern -LOCK DATABASE testcat.testdb_2 SHARED; -SHOW LOCKS; - -- SHOW CREATE DATABASE vy catalog.db pattern SHOW CREATE DATABASE testcat.testdb_2; diff --git a/ql/src/test/results/clientnegative/lockneg_try_db_lock_conflict.q.out b/ql/src/test/results/clientnegative/lockneg_try_db_lock_conflict.q.out index ad264fadc52a..8b065d01a006 100644 --- a/ql/src/test/results/clientnegative/lockneg_try_db_lock_conflict.q.out +++ b/ql/src/test/results/clientnegative/lockneg_try_db_lock_conflict.q.out @@ -16,5 +16,5 @@ PREHOOK: query: lock database lockneg4 shared PREHOOK: type: LOCKDATABASE PREHOOK: Input: database:lockneg4 PREHOOK: Output: database:lockneg4 -Unable to acquire EXPLICIT, SHARED lock lockneg4 after 1 attempts. +Unable to acquire EXPLICIT, SHARED lock hive@lockneg4 after 1 attempts. FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask diff --git a/ql/src/test/results/clientnegative/lockneg_try_drop_locked_db.q.out b/ql/src/test/results/clientnegative/lockneg_try_drop_locked_db.q.out index 964b77600496..cdb4620f9d6e 100644 --- a/ql/src/test/results/clientnegative/lockneg_try_drop_locked_db.q.out +++ b/ql/src/test/results/clientnegative/lockneg_try_drop_locked_db.q.out @@ -16,5 +16,5 @@ PREHOOK: query: show locks database lockneg9 PREHOOK: type: SHOWLOCKS POSTHOOK: query: show locks database lockneg9 POSTHOOK: type: SHOWLOCKS -Unable to acquire IMPLICIT, EXCLUSIVE lock lockneg9 after 1 attempts. +Unable to acquire IMPLICIT, EXCLUSIVE lock hive@lockneg9 after 1 attempts. FAILED: Error in acquiring locks: Locks on the underlying objects cannot be acquired, retry after some time. diff --git a/ql/src/test/results/clientnegative/lockneg_try_lock_cat_db_in_use.q.out b/ql/src/test/results/clientnegative/lockneg_try_lock_cat_db_in_use.q.out new file mode 100644 index 000000000000..97cf1983b97d --- /dev/null +++ b/ql/src/test/results/clientnegative/lockneg_try_lock_cat_db_in_use.q.out @@ -0,0 +1,23 @@ +#### A masked pattern was here #### +PREHOOK: type: CREATECATALOG +PREHOOK: Output: catalog:testcat +#### A masked pattern was here #### +POSTHOOK: type: CREATECATALOG +POSTHOOK: Output: catalog:testcat +#### A masked pattern was here #### +PREHOOK: query: create database testcat.lockneg9 +PREHOOK: type: CREATEDATABASE +PREHOOK: Output: database:lockneg9 +POSTHOOK: query: create database testcat.lockneg9 +POSTHOOK: type: CREATEDATABASE +POSTHOOK: Output: database:lockneg9 +PREHOOK: query: lock database testcat.lockneg9 shared +PREHOOK: type: LOCKDATABASE +PREHOOK: Input: database:lockneg9 +PREHOOK: Output: database:lockneg9 +POSTHOOK: query: lock database testcat.lockneg9 shared +POSTHOOK: type: LOCKDATABASE +POSTHOOK: Input: database:lockneg9 +POSTHOOK: Output: database:lockneg9 +Unable to acquire IMPLICIT, EXCLUSIVE lock testcat@lockneg9 after 1 attempts. +FAILED: Error in acquiring locks: Locks on the underlying objects cannot be acquired, retry after some time. diff --git a/ql/src/test/results/clientnegative/lockneg_try_lock_db_in_use.q.out b/ql/src/test/results/clientnegative/lockneg_try_lock_db_in_use.q.out index 00d231a48b25..5aa7500aeaf4 100644 --- a/ql/src/test/results/clientnegative/lockneg_try_lock_db_in_use.q.out +++ b/ql/src/test/results/clientnegative/lockneg_try_lock_db_in_use.q.out @@ -48,5 +48,5 @@ PREHOOK: query: lock database lockneg2 exclusive PREHOOK: type: LOCKDATABASE PREHOOK: Input: database:lockneg2 PREHOOK: Output: database:lockneg2 -Unable to acquire EXPLICIT, EXCLUSIVE lock lockneg2 after 1 attempts. +Unable to acquire EXPLICIT, EXCLUSIVE lock hive@lockneg2 after 1 attempts. FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask diff --git a/ql/src/test/results/clientpositive/llap/catalog_database.q.out b/ql/src/test/results/clientpositive/llap/catalog_database.q.out index a3c96b4cd530..0a445ab50ec9 100644 --- a/ql/src/test/results/clientpositive/llap/catalog_database.q.out +++ b/ql/src/test/results/clientpositive/llap/catalog_database.q.out @@ -138,18 +138,6 @@ POSTHOOK: query: DESCRIBE DATABASE testcat.testdb_2 POSTHOOK: type: DESCDATABASE POSTHOOK: Input: database:testdb_2 testdb_2 location/in/test user1 USER -PREHOOK: query: LOCK DATABASE testcat.testdb_2 SHARED -PREHOOK: type: LOCKDATABASE -PREHOOK: Input: database:testdb_2 -PREHOOK: Output: database:testdb_2 -POSTHOOK: query: LOCK DATABASE testcat.testdb_2 SHARED -POSTHOOK: type: LOCKDATABASE -POSTHOOK: Input: database:testdb_2 -POSTHOOK: Output: database:testdb_2 -PREHOOK: query: SHOW LOCKS -PREHOOK: type: SHOWLOCKS -POSTHOOK: query: SHOW LOCKS -POSTHOOK: type: SHOWLOCKS PREHOOK: query: SHOW CREATE DATABASE testcat.testdb_2 PREHOOK: type: SHOW_CREATEDATABASE PREHOOK: Input: database:testdb_2