diff --git a/parser/src/java/org/apache/hadoop/hive/ql/parse/FromClauseParser.g b/parser/src/java/org/apache/hadoop/hive/ql/parse/FromClauseParser.g index ad34eddbc1ae..841723c38226 100644 --- a/parser/src/java/org/apache/hadoop/hive/ql/parse/FromClauseParser.g +++ b/parser/src/java/org/apache/hadoop/hive/ql/parse/FromClauseParser.g @@ -243,12 +243,27 @@ tableName @init { gParent.pushMsg("table name", state); } @after { gParent.popMsg(state); } : - db=identifier DOT tab=identifier (DOT meta=identifier)? - {tables.add(new ImmutablePair<>($db.text, $tab.text));} - -> ^(TOK_TABNAME $db $tab $meta?) - | - tab=identifier - {tables.add(new ImmutablePair<>(null, $tab.text));} + // case 1:catalog.db.table(.meta)? + (cat=identifier DOT db=identifier DOT tab=identifier (DOT meta=identifier)?) + => + cat=identifier DOT db=identifier DOT tab=identifier (DOT meta=identifier)? + { + tables.add(new ImmutablePair<>($cat.text + "." + $db.text, $tab.text)); + } + -> ^(TOK_TABNAME $cat $db $tab $meta?) + + // case 2:db.table + | db=identifier DOT tab=identifier + { + tables.add(new ImmutablePair<>($db.text, $tab.text)); + } + -> ^(TOK_TABNAME $db $tab) + + // case 3:table + | tab=identifier + { + tables.add(new ImmutablePair<>(null, $tab.text)); + } -> ^(TOK_TABNAME $tab) ; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/create/CreateTableAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/create/CreateTableAnalyzer.java index 74273f780cf4..7f52b008c7d5 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/create/CreateTableAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/create/CreateTableAnalyzer.java @@ -233,12 +233,13 @@ private static boolean isIcebergTable(Map tblProps) { .equalsIgnoreCase(tblProps.get(META_TABLE_STORAGE)); } - private String getDefaultLocation(String dbName, String tableName, boolean isExt) - throws SemanticException { + private String getDefaultLocation(TableName qualifiedTabName, boolean isExt) + throws SemanticException { String tblLocation; try { Warehouse wh = new Warehouse(conf); - tblLocation = wh.getDefaultTablePath(db.getDatabase(dbName), tableName, isExt).toUri().getPath(); + tblLocation = wh.getDefaultTablePath(db.getDatabase(qualifiedTabName.getCat(), + qualifiedTabName.getDb()), qualifiedTabName.getTable(), isExt).toUri().getPath(); } catch (MetaException | HiveException e) { throw new SemanticException(e); } @@ -255,7 +256,7 @@ private String getDefaultLocation(String dbName, String tableName, boolean isExt */ private Map validateAndAddDefaultProperties(Map tblProp, boolean isExt, StorageFormat storageFormat, String qualifiedTableName, List sortCols, boolean isMaterialization, - boolean isTemporaryTable, boolean isTransactional, boolean isManaged, String[] qualifiedTabName, + boolean isTemporaryTable, boolean isTransactional, boolean isManaged, TableName qualifiedTabName, boolean isTableTypeChanged) throws SemanticException { Map retValue = Optional.ofNullable(tblProp).orElseGet(HashMap::new); @@ -316,7 +317,7 @@ private Map validateAndAddDefaultProperties(Map if (isIcebergTable(retValue)) { SessionStateUtil.addResourceOrThrow(conf, SessionStateUtil.DEFAULT_TABLE_LOCATION, - getDefaultLocation(qualifiedTabName[0], qualifiedTabName[1], true)); + getDefaultLocation(qualifiedTabName, true)); } return retValue; } @@ -348,7 +349,8 @@ private void updateDefaultTblProps(Map source, Map defaultSerdeProps, Map tblProps, boolean ifNotExists, String likeTableName, boolean isUserStorageFormat) { this.tableName = tableName; @@ -72,7 +73,7 @@ public boolean getIfNotExists() { } @Explain(displayName = "name", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) - public String getTableName() { + public TableName getTableName() { return tableName; } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/create/like/CreateTableLikeOperation.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/create/like/CreateTableLikeOperation.java index 770724b90abf..516067508139 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/create/like/CreateTableLikeOperation.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/create/like/CreateTableLikeOperation.java @@ -85,7 +85,7 @@ public int execute() throws HiveException { } private Table createViewLikeTable(Table oldTable) throws HiveException { - Table table = context.getDb().newTable(desc.getTableName()); + Table table = context.getDb().getTable(desc.getTableName()); if (desc.getTblProps() != null) { table.getTTable().getParameters().putAll(desc.getTblProps()); @@ -115,9 +115,8 @@ private Table createViewLikeTable(Table oldTable) throws HiveException { private Table createTableLikeTable(Table table, Map originalProperties) throws SemanticException, HiveException { - String[] names = Utilities.getDbTableName(desc.getTableName()); - table.setDbName(names[0]); - table.setTableName(names[1]); + table.setDbName(desc.getTableName().getDb()); + table.setTableName(desc.getTableName().getTable()); table.setOwner(SessionState.getUserFromAuthenticator()); setUserSpecifiedLocation(table); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/create/show/ShowCreateTableAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/create/show/ShowCreateTableAnalyzer.java index 02007578027b..a42a899f3ed1 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/create/show/ShowCreateTableAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/create/show/ShowCreateTableAnalyzer.java @@ -18,9 +18,9 @@ package org.apache.hadoop.hive.ql.ddl.table.create.show; -import java.util.Map.Entry; - import org.apache.commons.lang3.StringUtils; +import org.apache.commons.lang3.tuple.Triple; +import org.apache.hadoop.hive.common.TableName; import org.apache.hadoop.hive.ql.QueryState; import org.apache.hadoop.hive.ql.ddl.DDLWork; import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType; @@ -46,17 +46,21 @@ public ShowCreateTableAnalyzer(QueryState queryState) throws SemanticException { public void analyzeInternal(ASTNode root) throws SemanticException { ctx.setResFile(ctx.getLocalTmpPath()); - Entry tableIdentifier = getDbTableNamePair((ASTNode) root.getChild(0)); - if (tableIdentifier.getValue().contains(".")) { + Triple tableIdentifier = getCatDbTableNameTriple((ASTNode) root.getChild(0)); + if (tableIdentifier.toString().contains(".")) { throw new SemanticException("The SHOW CREATE TABLE command is not supported for metadata tables."); } - Table table = getTable(tableIdentifier.getKey(), tableIdentifier.getValue(), true); + String catName = tableIdentifier.getLeft(); + String dbName = tableIdentifier.getMiddle(); + String tblName = tableIdentifier.getRight(); + TableName tableName = new TableName(catName, dbName, tblName); + Table table = getTable(tableName, true); inputs.add(new ReadEntity(table)); // If no DB was specified in statement, do not include it in the final output - ShowCreateTableDesc desc = new ShowCreateTableDesc(table.getDbName(), table.getTableName(), - ctx.getResFile().toString(), StringUtils.isBlank(tableIdentifier.getKey())); + ShowCreateTableDesc desc = new ShowCreateTableDesc(table.getCatName(), table.getDbName(), table.getTableName(), + ctx.getResFile().toString(), StringUtils.isBlank(tableIdentifier.getMiddle())); Task task = TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)); rootTasks.add(task); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/create/show/ShowCreateTableDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/create/show/ShowCreateTableDesc.java index cd580b7f70fd..878fdae6cecc 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/create/show/ShowCreateTableDesc.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/create/show/ShowCreateTableDesc.java @@ -33,12 +33,14 @@ public class ShowCreateTableDesc implements DDLDesc, Serializable { public static final String SCHEMA = "createtab_stmt#string"; + private final String catalogName; private final String databaseName; private final String tableName; private final String resFile; private final boolean isRelative; - public ShowCreateTableDesc(String databaseName, String tableName, String resFile, boolean isRelative) { + public ShowCreateTableDesc(String catalogName, String databaseName, String tableName, String resFile, boolean isRelative) { + this.catalogName = catalogName; this.databaseName = databaseName; this.tableName = tableName; this.resFile = resFile; @@ -60,6 +62,11 @@ public String getDatabaseName() { return databaseName; } + @Explain(displayName = "catalog name", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) + public String getCatalogName() { + return catalogName; + } + @Explain(displayName = "relative table location", explainLevels = { Level.EXTENDED }) public boolean isRelative() { return isRelative; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/create/show/ShowCreateTableOperation.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/create/show/ShowCreateTableOperation.java index 7b8cc1f2ba49..12229b2f4052 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/create/show/ShowCreateTableOperation.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/create/show/ShowCreateTableOperation.java @@ -23,13 +23,10 @@ import java.io.IOException; import java.nio.charset.StandardCharsets; import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.HashSet; import java.util.List; -import java.util.Set; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.common.TableName; import org.apache.hadoop.hive.ql.ddl.DDLOperation; import org.apache.hadoop.hive.ql.ddl.DDLOperationContext; import org.apache.hadoop.hive.ql.ddl.ShowUtils; @@ -50,7 +47,8 @@ public ShowCreateTableOperation(DDLOperationContext context, ShowCreateTableDesc public int execute() throws HiveException { // get the create table statement for the table and populate the output try (DataOutputStream outStream = ShowUtils.getOutputStream(new Path(desc.getResFile()), context)) { - Table table = context.getDb().getTable(desc.getDatabaseName(), desc.getTableName()); + TableName tn = new TableName(desc.getCatalogName(), desc.getDatabaseName(), desc.getTableName()); + Table table = context.getDb().getTable(tn, true); DDLPlanUtils ddlObj = new DDLPlanUtils(); String command; if (table.isView()) { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/drop/DropTableAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/drop/DropTableAnalyzer.java index 2c7533d40941..ac9ee0f13091 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/drop/DropTableAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/drop/DropTableAnalyzer.java @@ -49,17 +49,17 @@ public DropTableAnalyzer(QueryState queryState) throws SemanticException { @Override public void analyzeInternal(ASTNode root) throws SemanticException { TableName qualTabName = getQualifiedTableName((ASTNode) root.getChild(0)); - String tableName = qualTabName.getNotEmptyDbTable(); +// String tableName = qualTabName.getNotEmptyDbTable(); boolean ifExists = (root.getFirstChildWithType(HiveParser.TOK_IFEXISTS) != null); boolean throwException = !ifExists && !HiveConf.getBoolVar(conf, ConfVars.DROP_IGNORES_NON_EXISTENT); //Authorize database for drop table command // Skip db object if database doesn't exist - Database database = getDatabase(qualTabName.getDb(),false); + Database database = getDatabase(qualTabName.getCat(), qualTabName.getDb(),false); if (database != null) { outputs.add(new WriteEntity(database, WriteType.DDL_SHARED)); } - Table table = getTable(tableName, throwException); + Table table = getTable(qualTabName, throwException); if (table != null) { inputs.add(new ReadEntity(table)); @@ -70,7 +70,7 @@ public void analyzeInternal(ASTNode root) throws SemanticException { boolean purge = (root.getFirstChildWithType(HiveParser.KW_PURGE) != null); ReplicationSpec replicationSpec = new ReplicationSpec(root); - DropTableDesc desc = new DropTableDesc(tableName, ifExists, purge, replicationSpec); + DropTableDesc desc = new DropTableDesc(qualTabName, ifExists, purge, replicationSpec); rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc))); } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/drop/DropTableDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/drop/DropTableDesc.java index 2ecdf9e5af2c..9a59d0957828 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/drop/DropTableDesc.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/drop/DropTableDesc.java @@ -20,6 +20,7 @@ import java.io.Serializable; +import org.apache.hadoop.hive.common.TableName; import org.apache.hadoop.hive.ql.ddl.DDLDesc; import org.apache.hadoop.hive.ql.parse.ReplicationSpec; import org.apache.hadoop.hive.ql.plan.Explain; @@ -32,17 +33,17 @@ public class DropTableDesc implements DDLDesc, Serializable { private static final long serialVersionUID = 1L; - private final String tableName; + private final TableName tableName; private final boolean ifExists; private final boolean purge; private final ReplicationSpec replicationSpec; private final boolean validationRequired; - public DropTableDesc(String tableName, boolean ifExists, boolean ifPurge, ReplicationSpec replicationSpec) { + public DropTableDesc(TableName tableName, boolean ifExists, boolean ifPurge, ReplicationSpec replicationSpec) { this(tableName, ifExists, ifPurge, replicationSpec, true); } - public DropTableDesc(String tableName, boolean ifExists, boolean purge, ReplicationSpec replicationSpec, + public DropTableDesc(TableName tableName, boolean ifExists, boolean purge, ReplicationSpec replicationSpec, boolean validationRequired) { this.tableName = tableName; this.ifExists = ifExists; @@ -52,7 +53,7 @@ public DropTableDesc(String tableName, boolean ifExists, boolean purge, Replicat } @Explain(displayName = "table", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) - public String getTableName() { + public TableName getTableName() { return tableName; } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/drop/DropTableOperation.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/drop/DropTableOperation.java index b253ec5df5ff..0c709a1b1eec 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/drop/DropTableOperation.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/drop/DropTableOperation.java @@ -21,7 +21,6 @@ import org.apache.hadoop.hive.common.TableName; import org.apache.hadoop.hive.llap.LlapHiveUtils; import org.apache.hadoop.hive.llap.ProactiveEviction; -import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.metastore.conf.MetastoreConf; import org.apache.hadoop.hive.ql.ddl.DDLOperationContext; import org.apache.hadoop.hive.ql.ddl.DDLUtils; @@ -35,8 +34,6 @@ import org.apache.hadoop.hive.ql.parse.HiveTableName; import org.apache.hadoop.hive.ql.parse.ReplicationSpec; -import com.google.common.collect.Iterables; - import java.util.Map; /** @@ -103,7 +100,7 @@ public int execute() throws HiveException { } } } - LOG.debug("DDLTask: Drop Table is skipped as table {} is newer than update", desc.getTableName()); + LOG.debug("DDLTask: Drop Table is skipped as table {} is newer than update", desc.getTableName().toString()); return 0; // table is newer, leave it be. } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/info/show/tables/ShowTablesAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/info/show/tables/ShowTablesAnalyzer.java index 826cb299e918..649c9ddf3b85 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/info/show/tables/ShowTablesAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/info/show/tables/ShowTablesAnalyzer.java @@ -26,6 +26,7 @@ import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.exec.TaskFactory; import org.apache.hadoop.hive.ql.hooks.ReadEntity; +import org.apache.hadoop.hive.ql.metadata.HiveUtils; import org.apache.hadoop.hive.ql.parse.ASTNode; import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer; import org.apache.hadoop.hive.ql.parse.HiveParser; @@ -49,6 +50,7 @@ public void analyzeInternal(ASTNode root) throws SemanticException { ctx.setResFile(ctx.getLocalTmpPath()); + String catName = HiveUtils.getCurrentCatalogOrDefault(conf); String dbName = SessionState.get().getCurrentDatabase(); String tableNames = null; TableType tableTypeFilter = null; @@ -57,7 +59,7 @@ public void analyzeInternal(ASTNode root) throws SemanticException { ASTNode child = (ASTNode) root.getChild(i); if (child.getType() == HiveParser.TOK_FROM) { // Specifies a DB dbName = unescapeIdentifier(root.getChild(++i).getText()); - db.validateDatabaseExists(dbName); + db.validateDatabaseExists(catName,dbName); } else if (child.getType() == HiveParser.TOK_TABLE_TYPE) { // Filter on table type String tableType = unescapeIdentifier(child.getChild(0).getText()); if (!"table_type".equalsIgnoreCase(tableType)) { @@ -73,7 +75,7 @@ public void analyzeInternal(ASTNode root) throws SemanticException { inputs.add(new ReadEntity(getDatabase(dbName))); - ShowTablesDesc desc = new ShowTablesDesc(ctx.getResFile(), dbName, tableNames, tableTypeFilter, isExtended); + ShowTablesDesc desc = new ShowTablesDesc(ctx.getResFile(), catName, dbName, tableNames, tableTypeFilter, isExtended); Task task = TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)); rootTasks.add(task); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/info/show/tables/ShowTablesDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/info/show/tables/ShowTablesDesc.java index 99c1118f9f98..c1be3301540c 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/info/show/tables/ShowTablesDesc.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/info/show/tables/ShowTablesDesc.java @@ -37,13 +37,15 @@ public class ShowTablesDesc implements DDLDesc, Serializable { private static final String EXTENDED_TABLES_SCHEMA = "tab_name,table_type#string,string"; private final String resFile; + private final String catName; private final String dbName; private final String pattern; private final TableType typeFilter; private final boolean isExtended; - public ShowTablesDesc(Path resFile, String dbName, String pattern, TableType typeFilter, boolean isExtended) { + public ShowTablesDesc(Path resFile, String catName, String dbName, String pattern, TableType typeFilter, boolean isExtended) { this.resFile = resFile.toString(); + this.catName = catName; this.dbName = dbName; this.pattern = pattern; this.typeFilter = typeFilter; @@ -60,6 +62,11 @@ public String getResFile() { return resFile; } + @Explain(displayName = "catalog name", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) + public String getCatName() { + return catName; + } + @Explain(displayName = "database name", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) public String getDbName() { return dbName; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/info/show/tables/ShowTablesOperation.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/info/show/tables/ShowTablesOperation.java index 40f7b767e88d..551d4e6b477b 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/info/show/tables/ShowTablesOperation.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/info/show/tables/ShowTablesOperation.java @@ -45,7 +45,7 @@ public ShowTablesOperation(DDLOperationContext context, ShowTablesDesc desc) { @Override public int execute() throws HiveException { - if (!context.getDb().databaseExists(desc.getDbName())) { + if (!context.getDb().databaseExists(desc.getCatName(), desc.getDbName())) { throw new HiveException(ErrorMsg.DATABASE_NOT_EXISTS, desc.getDbName()); } @@ -61,7 +61,7 @@ public int execute() throws HiveException { private void showTables() throws HiveException { String pattern = UDFLike.likePatternToRegExp(desc.getPattern(), false, true); List tableNames = new ArrayList<>( - context.getDb().getTablesByType(desc.getDbName(), pattern, desc.getTypeFilter())); + context.getDb().getTablesByType(desc.getCatName(), desc.getDbName(), pattern, desc.getTypeFilter())); Collections.sort(tableNames); LOG.debug("Found {} table(s) matching the SHOW TABLES statement.", tableNames.size()); @@ -79,7 +79,7 @@ private void showTablesExtended() throws HiveException { TableType typeFilter = desc.getTypeFilter(); TableType[] tableTypes = typeFilter == null ? TableType.values() : new TableType[]{typeFilter}; for (TableType tableType : tableTypes) { - List tables = context.getDb().getTablesByType(desc.getDbName(), pattern, tableType); + List tables = context.getDb().getTablesByType(desc.getCatName(), desc.getDbName(), pattern, tableType); tables.forEach(name -> tableNameToType.put(name, tableType.toString())); } LOG.debug("Found {} table(s) matching the SHOW EXTENDED TABLES statement.", tableNameToType.size()); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/bootstrap/load/table/LoadTable.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/bootstrap/load/table/LoadTable.java index 54ca6e02fc1a..b3a987eed38a 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/bootstrap/load/table/LoadTable.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/bootstrap/load/table/LoadTable.java @@ -18,6 +18,7 @@ package org.apache.hadoop.hive.ql.exec.repl.bootstrap.load.table; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.common.TableName; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.TableType; import org.apache.hadoop.hive.metastore.Warehouse; @@ -318,7 +319,8 @@ private Task loadTableTask(Table table, ReplicationSpec replicationSpec, Path private Task dropTableTask(Table table) { assert(table != null); - DropTableDesc dropTblDesc = new DropTableDesc(table.getFullyQualifiedName(), true, false, event.replicationSpec()); + TableName tableName = TableName.fromString(table.getTableName(), table.getCatalogName(), table.getDbName()); + DropTableDesc dropTblDesc = new DropTableDesc(tableName, true, false, event.replicationSpec()); return TaskFactory.get(new DDLWork(new HashSet<>(), new HashSet<>(), dropTblDesc, true, (new Path(context.dumpDirectory)).getParent().toString(), this.metricCollector), context.hiveConf); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java index b3977b8c9578..e1d476fc8ecc 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java @@ -1668,8 +1668,20 @@ public Table getTable(final String dbName, final String tableName) throws HiveEx * if there's an internal error or if the table doesn't exist */ public Table getTable(TableName tableName) throws HiveException { - return this.getTable(ObjectUtils.firstNonNull(tableName.getDb(), SessionState.get().getCurrentDatabase()), - tableName.getTable(), tableName.getTableMetaRef(), true); + return getTable(tableName, true); + } + + /** + * Returns metadata of the table. We should prioritize this method and phase out other getTable calls. + * + * @param tableName the tableName object + * @param throwException + * controls whether an exception is thrown or a returns a null + * @exception HiveException + * if there's an internal error or if the table doesn't exist + */ + public Table getTable(TableName tableName, boolean throwException) throws HiveException { + return this.getTable(tableName, throwException, false, false); } /** @@ -1767,10 +1779,23 @@ public Table getTable(final String dbName, final String tableName, String tableM * get column statistics if available * @return the table or if throwException is false a null value. * @throws HiveException + * + * @deprecated use {@link #getTable(TableName, boolean, boolean, boolean)} */ public Table getTable(final String dbName, final String tableName, String tableMetaRef, boolean throwException, boolean checkTransactional, boolean getColumnStats) throws HiveException { + TableName table = new TableName(getDefaultCatalog(conf), dbName, tableName, tableMetaRef); + return getTable(table, throwException, checkTransactional, getColumnStats); + } + + public Table getTable(final TableName table, boolean throwException, + boolean checkTransactional, boolean getColumnStats) throws HiveException { + + String catName = table.getCat() != null ? table.getCat() : HiveUtils.getCurrentCatalogOrDefault(conf); + String dbName = table.getDb() != null ? table.getDb() : SessionState.get().getCurrentDatabase(); + String tableName = table.getTable(); + String tableMetaRef = table.getTableMetaRef(); if (tableName == null || tableName.equals("")) { throw new HiveException("empty table creation??"); } @@ -1780,7 +1805,7 @@ public Table getTable(final String dbName, final String tableName, String tableM try { // Note: this is currently called w/true from StatsOptimizer only. GetTableRequest request = new GetTableRequest(dbName, tableName); - request.setCatName(getDefaultCatalog(conf)); + request.setCatName(catName); request.setGetColumnStats(getColumnStats); request.setEngine(Constants.HIVE_ENGINE); if (checkTransactional) { @@ -2554,6 +2579,9 @@ public boolean revokePrivileges(PrivilegeBag privileges, boolean grantOption) } } + /** + * @deprecated use {@link #validateDatabaseExists(String, String)} + */ public void validateDatabaseExists(String databaseName) throws SemanticException { boolean exists; try { @@ -2567,6 +2595,19 @@ public void validateDatabaseExists(String databaseName) throws SemanticException } } + public void validateDatabaseExists(String catalogName, String databaseName) throws SemanticException { + boolean exists; + try { + exists = databaseExists(catalogName, databaseName); + } catch (HiveException e) { + throw new SemanticException(ErrorMsg.DATABASE_NOT_EXISTS.getMsg(databaseName), e); + } + + if (!exists) { + throw new SemanticException(ErrorMsg.DATABASE_NOT_EXISTS.getMsg(databaseName)); + } + } + public Catalog getCatalog(String catName) throws HiveException { PerfLogger perfLogger = SessionState.getPerfLogger(); perfLogger.perfLogBegin(CLASS_NAME, PerfLogger.HIVE_GET_CATALOG); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/AcidExportSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/AcidExportSemanticAnalyzer.java index 06912a1b3226..6d3dadd13336 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/AcidExportSemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/AcidExportSemanticAnalyzer.java @@ -108,10 +108,11 @@ public static boolean isAcidExport(ASTNode tree) throws SemanticException { return tableHandle != null && AcidUtils.isFullAcidTable(tableHandle); } - private static String getTmptTableNameForExport(Table exportTable) { + private static TableName getTmptTableNameForExport(Table exportTable) { + String tmpTableCat = exportTable.getCatName(); String tmpTableDb = exportTable.getDbName(); String tmpTableName = exportTable.getTableName() + "_" + UUID.randomUUID().toString().replace('-', '_'); - return Warehouse.getQualifiedName(tmpTableDb, tmpTableName); + return TableName.fromString(tmpTableName, tmpTableCat, tmpTableDb); } /** @@ -134,8 +135,7 @@ private void analyzeAcidExport(ASTNode ast, Table exportTable, ASTNode tokRefOrN //need to create the table "manually" rather than creating a task since it has to exist to // compile the insert into T... - final String newTableName = getTmptTableNameForExport(exportTable); //this is db.table - final TableName newTableNameRef = HiveTableName.of(newTableName); + final TableName newTableName = getTmptTableNameForExport(exportTable); //this is db.table Map tblProps = new HashMap<>(); tblProps.put(hive_metastoreConstants.TABLE_IS_TRANSACTIONAL, Boolean.FALSE.toString()); String location; @@ -190,7 +190,7 @@ private void analyzeAcidExport(ASTNode ast, Table exportTable, ASTNode tokRefOrN the partition spec in the Export command. These of course don't exist yet since we've not ran the insert stmt yet!!!!!!! */ - Task exportTask = ExportSemanticAnalyzer.analyzeExport(ast, newTableName, db, conf, inputs, outputs); + Task exportTask = ExportSemanticAnalyzer.analyzeExport(ast, newTableName.toString(), db, conf, inputs, outputs); // Add an alter table task to set transactional props // do it after populating temp table so that it's written as non-transactional table but @@ -198,7 +198,7 @@ private void analyzeAcidExport(ASTNode ast, Table exportTable, ASTNode tokRefOrN // IMPORT is done for this archive and target table doesn't exist, it will be created as Acid. Map mapProps = new HashMap<>(); mapProps.put(hive_metastoreConstants.TABLE_IS_TRANSACTIONAL, Boolean.TRUE.toString()); - AlterTableSetPropertiesDesc alterTblDesc = new AlterTableSetPropertiesDesc(newTableNameRef, null, null, false, + AlterTableSetPropertiesDesc alterTblDesc = new AlterTableSetPropertiesDesc(newTableName, null, null, false, mapProps, false, false, null); addExportTask(rootTasks, exportTask, TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterTblDesc))); @@ -226,8 +226,8 @@ private void analyzeAcidExport(ASTNode ast, Table exportTable, ASTNode tokRefOrN * for EXPORT command. */ private StringBuilder generateExportQuery(List partCols, ASTNode tokRefOrNameExportTable, - ASTNode tableTree, String newTableName) throws SemanticException { - StringBuilder rewrittenQueryStr = new StringBuilder("insert into ").append(newTableName); + ASTNode tableTree, TableName newTableName) throws SemanticException { + StringBuilder rewrittenQueryStr = new StringBuilder("insert into ").append(newTableName.toString()); addPartitionColsToInsert(partCols, rewrittenQueryStr); rewrittenQueryStr.append(" select * from ").append(getFullTableNameForSQL(tokRefOrNameExportTable)); //builds partition spec so we can build suitable WHERE clause diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java index 393006bd8f06..c67404a84f8d 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java @@ -40,6 +40,7 @@ import org.antlr.runtime.tree.Tree; import org.apache.calcite.sql.SqlKind; import org.apache.commons.lang3.tuple.Pair; +import org.apache.commons.lang3.tuple.Triple; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.common.FileUtils; @@ -124,6 +125,7 @@ import com.google.common.collect.ImmutableList; import static org.apache.hadoop.hive.conf.HiveConf.ConfVars.HIVE_LOAD_DATA_USE_NATIVE_API; +import static org.apache.hadoop.hive.metastore.utils.MetaStoreUtils.getDefaultCatalog; import static org.apache.hadoop.hive.ql.parse.PTFInvocationSpec.NullOrder.NULLS_FIRST; import static org.apache.hadoop.hive.ql.parse.PTFInvocationSpec.NullOrder.NULLS_LAST; import static org.apache.hadoop.hive.ql.parse.PTFInvocationSpec.Order.ASC; @@ -431,6 +433,9 @@ public static String getUnescapedName(ASTNode tableOrColumnNode) throws Semantic return getUnescapedName(tableOrColumnNode, null); } + /** + * * @deprecated use {@link #getCatDbTableNameTriple(ASTNode)} instead. + */ public static Map.Entry getDbTableNamePair(ASTNode tableNameNode) throws SemanticException { if (tableNameNode.getType() != HiveParser.TOK_TABNAME || @@ -459,6 +464,29 @@ public static Map.Entry getDbTableNamePair(ASTNode tableNameNode } } + public static Triple getCatDbTableNameTriple(ASTNode tableNameNode) throws SemanticException { + if (tableNameNode.getType() != HiveParser.TOK_TABNAME || tableNameNode.getChildCount() < 1 || tableNameNode.getChildCount() > 4) { + throw new SemanticException(ASTErrorUtils.getMsg(ErrorMsg.INVALID_TABLE_NAME.getMsg(), tableNameNode)); + } + + List parts = new ArrayList<>(); + for (int i = 0; i < tableNameNode.getChildCount(); i++) { + String part = unescapeIdentifier(tableNameNode.getChild(i).getText()); + if (part != null && part.contains(".")) { + throw new SemanticException(ASTErrorUtils.getMsg(ErrorMsg.OBJECTNAME_CONTAINS_DOT.getMsg(), tableNameNode)); + } + parts.add(part); + } + + return switch (parts.size()) { + case 1 -> Triple.of(null, null, parts.get(0)); + case 2 -> Triple.of(null, parts.get(0), parts.get(1)); + case 3 -> Triple.of(parts.get(0), parts.get(1), parts.get(2)); + case 4 -> Triple.of(parts.get(0), parts.get(1), parts.get(2) + "." + parts.get(3)); + default -> throw new SemanticException(ASTErrorUtils.getMsg(ErrorMsg.INVALID_TABLE_NAME.getMsg(), tableNameNode)); + }; + } + public static String getUnescapedName(ASTNode tableOrColumnNode, String currentDatabase) throws SemanticException { int tokenType = tableOrColumnNode.getToken().getType(); if (tokenType == HiveParser.TOK_TABNAME) { @@ -508,13 +536,21 @@ public static TableName getQualifiedTableName(ASTNode tabNameNode, String catalo ErrorMsg.INVALID_TABLE_NAME.getMsg(), tabNameNode)); } - if (tabNameNode.getChildCount() == 3) { - final String dbName = unescapeIdentifier(tabNameNode.getChild(0).getText()); - final String tableName = unescapeIdentifier(tabNameNode.getChild(1).getText()); - final String tableMetaRef = unescapeIdentifier(tabNameNode.getChild(2).getText()); + if (tabNameNode.getChildCount() == 4) { + catalogName = unescapeIdentifier(tabNameNode.getChild(0).getText()); + final String dbName = unescapeIdentifier(tabNameNode.getChild(1).getText()); + final String tableName = unescapeIdentifier(tabNameNode.getChild(2).getText()); + final String tableMetaRef = unescapeIdentifier(tabNameNode.getChild(3).getText()); return HiveTableName.fromString(tableName, catalogName, dbName, tableMetaRef); } + if (tabNameNode.getChildCount() == 3) { + catalogName = unescapeIdentifier(tabNameNode.getChild(0).getText()); + final String dbName = unescapeIdentifier(tabNameNode.getChild(1).getText()); + final String tableName = unescapeIdentifier(tabNameNode.getChild(2).getText()); + return HiveTableName.fromString(tableName, catalogName, dbName); + } + if (tabNameNode.getChildCount() == 2) { final String dbName = unescapeIdentifier(tabNameNode.getChild(0).getText()); final String tableName = unescapeIdentifier(tabNameNode.getChild(1).getText()); @@ -522,14 +558,14 @@ public static TableName getQualifiedTableName(ASTNode tabNameNode, String catalo throw new SemanticException(ASTErrorUtils.getMsg( ErrorMsg.OBJECTNAME_CONTAINS_DOT.getMsg(), tabNameNode)); } - return HiveTableName.ofNullable(tableName, dbName); + return HiveTableName.fromString(tableName, catalogName, dbName); } final String tableName = unescapeIdentifier(tabNameNode.getChild(0).getText()); if (tableName.contains(".")) { throw new SemanticException(ASTErrorUtils.getMsg( ErrorMsg.OBJECTNAME_CONTAINS_DOT.getMsg(), tabNameNode)); } - return HiveTableName.ofNullable(tableName); + return HiveTableName.fromString(tableName, catalogName, SessionState.get().getCurrentDatabase()); } /** @@ -1959,7 +1995,27 @@ protected Table getTable(TableName tn) throws SemanticException { } protected Table getTable(TableName tn, boolean throwException) throws SemanticException { - return getTable(tn.getDb(), tn.getTable(), tn.getTableMetaRef(), throwException); + String catName = tn.getCat(); + String dbName = tn.getDb(); + String tblName = tn.getTable(); + + Table tab; + try { + tab = db.getTable(tn, false); + } + catch (InvalidTableException e) { + throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(TableName.fromString(tblName, + catName, dbName).getNotEmptyDbTable()), e); + } + catch (Exception e) { + throw new SemanticException(e.getMessage(), e); + } + if (tab == null && throwException) { + // getTable needs a refactor with all ~50 occurences + throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(TableName.fromString(tblName, + catName, dbName).getNotEmptyDbTable())); + } + return tab; } protected Table getTable(String tblName) throws SemanticException { @@ -1974,25 +2030,14 @@ protected Table getTable(String database, String tblName, boolean throwException return getTable(database, tblName, null, throwException); } + /** + * @deprecated use {@link #getTable(TableName, boolean)} instead + * Since this is a protected method, can we directly remove it? + */ protected Table getTable(String database, String tblName, String tableMetaRef, boolean throwException) throws SemanticException { - Table tab; - try { - String tableName = tableMetaRef == null ? tblName : tblName + "." + tableMetaRef; - tab = database == null ? db.getTable(tableName, false) - : db.getTable(database, tblName, tableMetaRef, false); - } - catch (InvalidTableException e) { - throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(TableName.fromString(tblName, null, database).getNotEmptyDbTable()), e); - } - catch (Exception e) { - throw new SemanticException(e.getMessage(), e); - } - if (tab == null && throwException) { - // getTable needs a refactor with all ~50 occurences - throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(TableName.fromString(tblName, null, database).getNotEmptyDbTable())); - } - return tab; + TableName table = new TableName(getDefaultCatalog(conf), database, tblName, tableMetaRef); + return getTable(table, throwException); } public List> getAllRootTasks() { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java index 4d4956fbec13..6fb9d5f3a867 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java @@ -521,14 +521,16 @@ private static Task createTableTask(ImportTableDesc tableDesc, EximUtil.Seman private static Task dropTableTask(Table table, EximUtil.SemanticAnalyzerWrapperContext x, ReplicationSpec replicationSpec) { - DropTableDesc dropTblDesc = new DropTableDesc(table.getTableName(), true, false, replicationSpec); + TableName tableName = TableName.fromString(table.getTableName(), table.getCatalogName(), table.getDbName()); + DropTableDesc dropTblDesc = new DropTableDesc(tableName, true, false, replicationSpec); return TaskFactory.get(new DDLWork(x.getInputs(), x.getOutputs(), dropTblDesc), x.getConf()); } private static Task dropTableTask(Table table, EximUtil.SemanticAnalyzerWrapperContext x, ReplicationSpec replicationSpec, String dumpRoot, ReplicationMetricCollector metricCollector) { - DropTableDesc dropTblDesc = new DropTableDesc(table.getTableName(), true, false, replicationSpec); + TableName tableName = TableName.fromString(table.getTableName(), table.getCatalogName(), table.getDbName()); + DropTableDesc dropTblDesc = new DropTableDesc(tableName, true, false, replicationSpec); return TaskFactory.get(new DDLWork(x.getInputs(), x.getOutputs(), dropTblDesc, true, dumpRoot, metricCollector), x.getConf()); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java index f08808b01415..e0c7572d7461 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java @@ -13888,12 +13888,13 @@ protected void validateStorageFormat( } /** Adds entities for create table/create view. */ - protected void addDbAndTabToOutputs(String[] qualifiedTabName, TableType type, + protected void addDbAndTabToOutputs(TableName qualifiedTabName, TableType type, boolean isTemporary, Map tblProps, StorageFormat storageFormat) throws SemanticException { - Database database = getDatabase(qualifiedTabName[0]); + Database database = getDatabase(qualifiedTabName.getCat(), qualifiedTabName.getDb(), true); outputs.add(new WriteEntity(database, WriteEntity.WriteType.DDL_SHARED)); - Table t = new Table(qualifiedTabName[0], qualifiedTabName[1]); + Table t = new Table(qualifiedTabName.getDb(), qualifiedTabName.getTable()); + t.setCatalogName(qualifiedTabName.getCat()); t.setParameters(tblProps); t.setTableType(type); t.setTemporary(isTemporary); @@ -14071,7 +14072,7 @@ protected ASTNode analyzeCreateView(ASTNode ast, QB qb, PlannerContext plannerCt storageFormat.getInputFormat(), storageFormat.getOutputFormat(), location, storageFormat.getSerde(), storageFormat.getStorageHandler(), storageFormat.getSerdeProps()); - addDbAndTabToOutputs(new String[] {qualTabName.getDb(), qualTabName.getTable()}, TableType.MATERIALIZED_VIEW, + addDbAndTabToOutputs(qualTabName, TableType.MATERIALIZED_VIEW, false, tblProps, storageFormat); queryState.setCommandType(HiveOperation.CREATE_MATERIALIZED_VIEW); qb.setViewDesc(createVwDesc); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropTableHandler.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropTableHandler.java index 31649b3b2739..0661196b6d8d 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropTableHandler.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropTableHandler.java @@ -17,6 +17,7 @@ */ package org.apache.hadoop.hive.ql.parse.repl.load.message; +import org.apache.hadoop.hive.common.TableName; import org.apache.hadoop.hive.metastore.messaging.AlterTableMessage; import org.apache.hadoop.hive.metastore.messaging.DropTableMessage; import org.apache.hadoop.hive.ql.ddl.DDLWork; @@ -46,14 +47,16 @@ public List> handle(Context context) actualTblName = msg.getTable(); } - DropTableDesc dropTableDesc = new DropTableDesc(actualDbName + "." + actualTblName, true, true, + // TODO catalog. Consider supporting the retrieval of catalog name from cat.db.tbl format.. + TableName tableName = TableName.fromString(actualTblName, null, actualTblName); + DropTableDesc dropTableDesc = new DropTableDesc(tableName, true, true, context.eventOnlyReplicationSpec(), false); Task dropTableTask = TaskFactory.get( new DDLWork(readEntitySet, writeEntitySet, dropTableDesc, true, context.getDumpDirectory(), context.getMetricCollector()), context.hiveConf ); context.log.debug( - "Added drop tbl task : {}:{}", dropTableTask.getId(), dropTableDesc.getTableName() + "Added drop tbl task : {}:{}", dropTableTask.getId(), dropTableDesc.getTableName().toString() ); updatedMetadata.set(context.dmd.getEventTo().toString(), actualDbName, null, null); return Collections.singletonList(dropTableTask); diff --git a/ql/src/test/queries/clientpositive/catalog_database_table.q b/ql/src/test/queries/clientpositive/catalog_database_table.q new file mode 100644 index 000000000000..b63a551aa5e7 --- /dev/null +++ b/ql/src/test/queries/clientpositive/catalog_database_table.q @@ -0,0 +1,28 @@ +-- SORT_QUERY_RESULTS + +create catalog testcat location '/tmp/testcat' comment 'Hive test catalog'; + +-- create database in new catalog testcat by catalog.db pattern +create database testcat.testdb1; + +-- switch current db to testcat.testdb1 +use testcat.testdb1; + +-- create tbl in the current db testcat.testdb1 +create table test1(id int); + +-- create tbl in db testcat.testdb1 by cat.db.tbl syntax +create table testcat.testdb1.test2(id int); + +-- show tables in current db testcat.testdb1 +show tables; + +-- show create table in current db testcat.testdb1 +show create table test1; + +-- show create table use cat.db.tbl syntax +show create table testcat.testdb1.test2; + + +-- drop table use cat.db.tbl syntax +drop table testcat.testdb1.test2; diff --git a/ql/src/test/results/clientpositive/llap/catalog_database_table.q.out b/ql/src/test/results/clientpositive/llap/catalog_database_table.q.out new file mode 100644 index 000000000000..ec86609be0d2 --- /dev/null +++ b/ql/src/test/results/clientpositive/llap/catalog_database_table.q.out @@ -0,0 +1,91 @@ +#### A masked pattern was here #### +PREHOOK: type: CREATECATALOG +PREHOOK: Output: catalog:testcat +#### A masked pattern was here #### +POSTHOOK: type: CREATECATALOG +POSTHOOK: Output: catalog:testcat +#### A masked pattern was here #### +PREHOOK: query: create database testcat.testdb1 +PREHOOK: type: CREATEDATABASE +PREHOOK: Output: database:testdb1 +POSTHOOK: query: create database testcat.testdb1 +POSTHOOK: type: CREATEDATABASE +POSTHOOK: Output: database:testdb1 +PREHOOK: query: use testcat.testdb1 +PREHOOK: type: SWITCHDATABASE +PREHOOK: Input: database:testdb1 +POSTHOOK: query: use testcat.testdb1 +POSTHOOK: type: SWITCHDATABASE +POSTHOOK: Input: database:testdb1 +PREHOOK: query: create table test1(id int) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:testdb1 +PREHOOK: Output: testdb1@test1 +POSTHOOK: query: create table test1(id int) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:testdb1 +POSTHOOK: Output: testdb1@test1 +PREHOOK: query: create table testcat.testdb1.test2(id int) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:testdb1 +PREHOOK: Output: testdb1@test2 +POSTHOOK: query: create table testcat.testdb1.test2(id int) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:testdb1 +POSTHOOK: Output: testdb1@test2 +PREHOOK: query: show tables +PREHOOK: type: SHOWTABLES +PREHOOK: Input: database:testdb1 +POSTHOOK: query: show tables +POSTHOOK: type: SHOWTABLES +POSTHOOK: Input: database:testdb1 +test1 +test2 +PREHOOK: query: show create table test1 +PREHOOK: type: SHOW_CREATETABLE +PREHOOK: Input: testdb1@test1 +POSTHOOK: query: show create table test1 +POSTHOOK: type: SHOW_CREATETABLE +POSTHOOK: Input: testdb1@test1 +CREATE TABLE `test1`( + `id` int) +ROW FORMAT SERDE + 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' +STORED AS INPUTFORMAT + 'org.apache.hadoop.mapred.TextInputFormat' +OUTPUTFORMAT + 'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' +LOCATION +#### A masked pattern was here #### +TBLPROPERTIES ( + 'bucketing_version'='2', +#### A masked pattern was here #### +PREHOOK: query: show create table testcat.testdb1.test2 +PREHOOK: type: SHOW_CREATETABLE +PREHOOK: Input: testdb1@test2 +POSTHOOK: query: show create table testcat.testdb1.test2 +POSTHOOK: type: SHOW_CREATETABLE +POSTHOOK: Input: testdb1@test2 +CREATE TABLE `testdb1`.`test2`( + `id` int) +ROW FORMAT SERDE + 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' +STORED AS INPUTFORMAT + 'org.apache.hadoop.mapred.TextInputFormat' +OUTPUTFORMAT + 'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' +LOCATION +#### A masked pattern was here #### +TBLPROPERTIES ( + 'bucketing_version'='2', +#### A masked pattern was here #### +PREHOOK: query: drop table testcat.testdb1.test2 +PREHOOK: type: DROPTABLE +PREHOOK: Input: testdb1@test2 +PREHOOK: Output: database:testdb1 +PREHOOK: Output: testdb1@test2 +POSTHOOK: query: drop table testcat.testdb1.test2 +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: testdb1@test2 +POSTHOOK: Output: database:testdb1 +POSTHOOK: Output: testdb1@test2