diff --git a/parser/src/java/org/apache/hadoop/hive/ql/parse/FromClauseParser.g b/parser/src/java/org/apache/hadoop/hive/ql/parse/FromClauseParser.g index a5c37faec8ae..882ed4202152 100644 --- a/parser/src/java/org/apache/hadoop/hive/ql/parse/FromClauseParser.g +++ b/parser/src/java/org/apache/hadoop/hive/ql/parse/FromClauseParser.g @@ -233,6 +233,35 @@ uniqueJoinTableSource tableName @init { gParent.pushMsg("table name", state); } +@after { gParent.popMsg(state); } + : + // case 1:catalog.db.table(.meta)? + (cat=identifier DOT db=identifier DOT tab=identifier (DOT meta=identifier)?) + => + cat=identifier DOT db=identifier DOT tab=identifier (DOT meta=identifier)? + { + tables.add(new ImmutablePair<>($cat.text + "." + $db.text, $tab.text)); + } + -> ^(TOK_TABNAME $cat $db $tab $meta?) + + // case 2:db.table + | db=identifier DOT tab=identifier + { + tables.add(new ImmutablePair<>($db.text, $tab.text)); + } + -> ^(TOK_TABNAME $db $tab) + + // case 3:table + | tab=identifier + { + tables.add(new ImmutablePair<>(null, $tab.text)); + } + -> ^(TOK_TABNAME $tab) + ; + + +tableName_bak +@init { gParent.pushMsg("table name", state); } @after { gParent.popMsg(state); } : db=identifier DOT tab=identifier (DOT meta=identifier)? diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/DDLUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/DDLUtils.java index c9b74eca9d5e..f2e1bde13572 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/DDLUtils.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/DDLUtils.java @@ -195,6 +195,7 @@ public static void addDbAndTableToOutputs(Database database, TableName tableName outputs.add(new WriteEntity(database, WriteEntity.WriteType.DDL_SHARED)); Table table = new Table(tableName.getDb(), tableName.getTable()); + table.setCatName(tableName.getCat()); table.setParameters(properties); table.setTableType(type); table.setTemporary(isTemporary); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/info/desc/DescTableOperation.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/info/desc/DescTableOperation.java index 3fa15fdc651e..1557bc59875c 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/info/desc/DescTableOperation.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/info/desc/DescTableOperation.java @@ -117,7 +117,7 @@ public int execute() throws Exception { } private Table getTable() throws HiveException { - Table table = context.getDb().getTable(desc.getTableName().getDb(), desc.getTableName().getTable(), + Table table = context.getDb().getTable(null, desc.getTableName().getDb(), desc.getTableName().getTable(), desc.getTableName().getTableMetaRef(), false, false, false); if (table == null) { throw new HiveException(ErrorMsg.INVALID_TABLE, desc.getDbTableName()); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java index e417e13f712d..5ff5667939e3 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java @@ -2326,7 +2326,7 @@ public static List getColumnTypes(Properties props) { */ @Deprecated public static String[] getDbTableName(String dbtable) throws SemanticException { - return getDbTableName(SessionState.get().getCurrentDatabase(), dbtable); + return getDbTableName(SessionState.get().getCurrentCatalog(), SessionState.get().getCurrentDatabase(), dbtable); } /** @@ -2338,17 +2338,19 @@ public static String[] getDbTableName(String dbtable) throws SemanticException { * @deprecated use {@link TableName} or {@link org.apache.hadoop.hive.ql.parse.HiveTableName} instead */ @Deprecated - public static String[] getDbTableName(String defaultDb, String dbtable) throws SemanticException { + public static String[] getDbTableName(String defaultCatalog, String defaultDb, String dbtable) throws SemanticException { if (dbtable == null) { return new String[2]; } String[] names = dbtable.split("\\."); switch (names.length) { + case 4: case 3: - case 2: return names; + case 2: + return new String [] {defaultCatalog, names[0], names[1]}; case 1: - return new String [] {defaultDb, dbtable}; + return new String [] {defaultCatalog, defaultDb, dbtable}; default: throw new SemanticException(ErrorMsg.INVALID_TABLE_NAME, dbtable); } @@ -2381,7 +2383,7 @@ public static void validateColumnNames(List colNames, List check * @param dbTableName * @return a {@link TableName} * @throws SemanticException - * @deprecated handle null values and use {@link TableName#fromString(String, String, String)} + * @deprecated handle null values and use {@link TableName#fromString(String, String, String)} (why deprecated) */ @Deprecated public static TableName getNullableTableName(String dbTableName) throws SemanticException { @@ -2396,7 +2398,7 @@ public static TableName getNullableTableName(String dbTableName) throws Semantic * @param defaultDb * @return a {@link TableName} * @throws SemanticException - * @deprecated handle null values and use {@link TableName#fromString(String, String, String)} + * @deprecated handle null values and use {@link TableName#fromString(String, String, String)}(why deprecated) */ @Deprecated public static TableName getNullableTableName(String dbTableName, String defaultDb) throws SemanticException { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java index 07b98266d85c..73bb3f1fade2 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java @@ -1609,12 +1609,12 @@ public Table getTable(final String tableName) throws HiveException { */ public Table getTable(final String tableName, boolean throwException) throws HiveException { String[] nameParts = tableName.split("\\."); - if (nameParts.length == 3) { - Table table = this.getTable(nameParts[0], nameParts[1], nameParts[2], throwException); + if (nameParts.length == 4) { + Table table = this.getTable(nameParts[0], nameParts[1], nameParts[2], nameParts[3], throwException); return table; } else { String[] names = Utilities.getDbTableName(tableName); - Table table = this.getTable(names[0], names[1], null, throwException); + Table table = this.getTable(names[0], names[1], names[2], null, throwException); return table; } } @@ -1634,9 +1634,9 @@ public Table getTable(final String dbName, final String tableName) throws HiveEx // TODO: catalog... etc everywhere if (tableName.contains(".")) { String[] names = Utilities.getDbTableName(tableName); - return this.getTable(names[0], names[1], null, true); + return this.getTable(names[0], names[1], names[2], null, true); } else { - return this.getTable(dbName, tableName, null, true); + return this.getTable(SessionState.get().getCurrentCatalog(), dbName, tableName, null, true); } } @@ -1650,7 +1650,7 @@ public Table getTable(final String dbName, final String tableName) throws HiveEx * if there's an internal error or if the table doesn't exist */ public Table getTable(TableName tableName) throws HiveException { - return this.getTable(ObjectUtils.firstNonNull(tableName.getDb(), SessionState.get().getCurrentDatabase()), + return this.getTable(SessionState.get().getCurrentCatalog(), ObjectUtils.firstNonNull(tableName.getDb(), SessionState.get().getCurrentDatabase()), tableName.getTable(), tableName.getTableMetaRef(), true); } @@ -1668,9 +1668,9 @@ public Table getTable(TableName tableName) throws HiveException { * @return the table or if throwException is false a null value. * @throws HiveException */ - public Table getTable(final String dbName, final String tableName, + public Table getTable(final String catName, final String dbName, final String tableName, final String tableMetaRef, boolean throwException) throws HiveException { - return this.getTable(dbName, tableName, tableMetaRef, throwException, false); + return this.getTable(catName, dbName, tableName, tableMetaRef, throwException, false); } /** @@ -1686,8 +1686,8 @@ public Table getTable(final String dbName, final String tableName, * @throws HiveException */ public Table getTable(final String dbName, final String tableName, boolean throwException) throws HiveException { - return this.getTable(dbName, tableName, null, throwException); - } + return this.getTable(SessionState.get().getCurrentCatalog(), dbName, tableName, null, throwException); + } //TODO get the correct catalog /** * Returns metadata of the table. @@ -1706,7 +1706,7 @@ public Table getTable(final String dbName, final String tableName, boolean throw */ public Table getTable(final String dbName, final String tableName, boolean throwException, boolean checkTransactional) throws HiveException { - return getTable(dbName, tableName, null, throwException, checkTransactional, false); + return getTable(null, dbName, tableName, null, throwException, checkTransactional, false); } /** @@ -1726,9 +1726,9 @@ public Table getTable(final String dbName, final String tableName, boolean throw * @return the table or if throwException is false a null value. * @throws HiveException */ - public Table getTable(final String dbName, final String tableName, String tableMetaRef, boolean throwException, + public Table getTable(final String catName, final String dbName, final String tableName, String tableMetaRef, boolean throwException, boolean checkTransactional) throws HiveException { - return getTable(dbName, tableName, tableMetaRef, throwException, checkTransactional, false); + return getTable(catName, dbName, tableName, tableMetaRef, throwException, checkTransactional, false); } /** @@ -1750,8 +1750,8 @@ public Table getTable(final String dbName, final String tableName, String tableM * @return the table or if throwException is false a null value. * @throws HiveException */ - public Table getTable(final String dbName, final String tableName, String tableMetaRef, boolean throwException, - boolean checkTransactional, boolean getColumnStats) throws HiveException { + public Table getTable(final String catalogName, final String dbName, final String tableName, String tableMetaRef, + boolean throwException, boolean checkTransactional, boolean getColumnStats) throws HiveException { if (tableName == null || tableName.equals("")) { throw new HiveException("empty table creation??"); @@ -1762,7 +1762,7 @@ public Table getTable(final String dbName, final String tableName, String tableM try { // Note: this is currently called w/true from StatsOptimizer only. GetTableRequest request = new GetTableRequest(dbName, tableName); - request.setCatName(getDefaultCatalog(conf)); + request.setCatName(catalogName != null ? catalogName : SessionState.get().getCurrentCatalog()); request.setGetColumnStats(getColumnStats); request.setEngine(Constants.HIVE_ENGINE); if (checkTransactional) { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java index 8a37073509ef..cd44f205b967 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java @@ -170,7 +170,7 @@ protected void initialize(org.apache.hadoop.hive.metastore.api.Table table) { // performance. Since those fields are null/cache-check by their accessors // anyway, that's not a concern. } - + // TODO add catalog after calling this constructor public Table(String databaseName, String tableName) { this(getEmptyTable(databaseName, tableName)); } @@ -928,6 +928,10 @@ public void setDbName(String databaseName) { tTable.setDbName(databaseName); } + public void setCatName(String catName) { + tTable.setCatName(catName); + } + public List getPartitionKeys() { return tTable.getPartitionKeys(); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/ASTBuilder.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/ASTBuilder.java index 12a0bcaffe08..c2a2bcc11414 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/ASTBuilder.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/ASTBuilder.java @@ -81,6 +81,7 @@ public static ASTNode table(final RelNode scan) { return cte(hTbl.getHiveTableMD().getTableName(), hts.getTableAlias()); } ASTBuilder tableNameBuilder = ASTBuilder.construct(HiveParser.TOK_TABNAME, "TOK_TABNAME") + .add(HiveParser.Identifier, hTbl.getHiveTableMD().getCatName()) .add(HiveParser.Identifier, hTbl.getHiveTableMD().getDbName()) .add(HiveParser.Identifier, hTbl.getHiveTableMD().getTableName()); if (hTbl.getHiveTableMD().getMetaTable() != null) { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/AnalyzeCommandUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/AnalyzeCommandUtils.java index 9fc0416edbc0..59e13ed4df55 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/AnalyzeCommandUtils.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/AnalyzeCommandUtils.java @@ -20,6 +20,7 @@ import java.util.HashMap; import java.util.Map; +import org.apache.hadoop.hive.common.TableName; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.metadata.Table; @@ -47,9 +48,8 @@ public static boolean isPartitionLevelStats(ASTNode tree) { public static Table getTable(ASTNode tree, BaseSemanticAnalyzer sa) throws SemanticException { String tableName = ColumnStatsSemanticAnalyzer.getUnescapedName((ASTNode) tree.getChild(0).getChild(0)); - String currentDb = SessionState.get().getCurrentDatabase(); - String [] names = Utilities.getDbTableName(currentDb, tableName); - return sa.getTable(names[0], names[1], true); + TableName tName = HiveTableName.of(tableName); + return sa.getTable(tName); } public static Map getPartKeyValuePairsFromAST(Table tbl, ASTNode tree, diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java index f3415219d26f..16515263a52d 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java @@ -40,6 +40,7 @@ import org.antlr.runtime.tree.Tree; import org.apache.calcite.sql.SqlKind; import org.apache.commons.lang3.tuple.Pair; +import org.apache.commons.lang3.tuple.Triple; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.common.FileUtils; @@ -431,6 +432,43 @@ public static String getUnescapedName(ASTNode tableOrColumnNode) throws Semantic return getUnescapedName(tableOrColumnNode, null); } + + public static Triple getDbTableNameTriple(ASTNode tableNameNode) throws SemanticException { + + if (tableNameNode.getType() != HiveParser.TOK_TABNAME || + (tableNameNode.getChildCount() < 1 || tableNameNode.getChildCount() > 4)) { + throw new SemanticException(ASTErrorUtils.getMsg(ErrorMsg.INVALID_TABLE_NAME.getMsg(), tableNameNode)); + } + + List parts = new ArrayList<>(); + for (int i = 0; i < tableNameNode.getChildCount(); i++) { + String part = unescapeIdentifier(tableNameNode.getChild(i).getText()); + if (part != null && part.contains(".")) { + throw new SemanticException(ASTErrorUtils.getMsg(ErrorMsg.OBJECTNAME_CONTAINS_DOT.getMsg(), tableNameNode)); + } + parts.add(part); + } + + String catalog = null, db = null, table = null; + + if (parts.size() == 1) { + table = parts.get(0); + } else if (parts.size() == 2) { + db = parts.get(0); + table = parts.get(1); + } else if (parts.size() == 3) { + catalog = parts.get(0); + db = parts.get(1); + table = parts.get(2); + } else if (parts.size() == 4) { + catalog = parts.get(0); + db = parts.get(1); + table = parts.get(2) + "." + parts.get(3); // meta table + } + + return Triple.of(catalog, db, table); + } + // todo remove this function public static Map.Entry getDbTableNamePair(ASTNode tableNameNode) throws SemanticException { if (tableNameNode.getType() != HiveParser.TOK_TABNAME || @@ -463,8 +501,8 @@ public static String getUnescapedName(ASTNode tableOrColumnNode, String currentD int tokenType = tableOrColumnNode.getToken().getType(); if (tokenType == HiveParser.TOK_TABNAME) { // table node - Map.Entry dbTablePair = getDbTableNamePair(tableOrColumnNode); - String tableName = dbTablePair.getValue(); + Triple dbTablePair = getDbTableNameTriple(tableOrColumnNode); + String tableName = dbTablePair.getRight(); String tableMetaRef = null; if (tableName.contains(".")) { String[] tmpNames = tableName.split("\\."); @@ -472,8 +510,8 @@ public static String getUnescapedName(ASTNode tableOrColumnNode, String currentD tableMetaRef = tmpNames[1]; } return TableName.fromString(tableName, - null, - dbTablePair.getKey() == null ? currentDatabase : dbTablePair.getKey(), + dbTablePair.getLeft() == null ? SessionState.get().getCurrentCatalog() : dbTablePair.getLeft(), + dbTablePair.getMiddle() == null ? currentDatabase : dbTablePair.getMiddle(), tableMetaRef) .getNotEmptyDbTable(); } else if (tokenType == HiveParser.StringLiteral) { @@ -548,13 +586,13 @@ public static String getUnescapedUnqualifiedTableName(ASTNode node) throws Seman assert node.getChildCount() <= 3; assert node.getType() == HiveParser.TOK_TABNAME; - if (node.getChildCount() == 2 || node.getChildCount() == 3) { + if (node.getChildCount() == 2 || node.getChildCount() == 3 || node.getChildCount() == 4) { node = (ASTNode) node.getChild(1); } String tableName = getUnescapedName(node); - if (node.getChildCount() == 3) { - tableName = tableName + "." + node.getChild(2); + if (node.getChildCount() == 4) { + tableName = tableName + "." + node.getChild(3); } return tableName; } @@ -1969,7 +2007,7 @@ protected Table getTable(String database, String tblName, String tableMetaRef, b try { String tableName = tableMetaRef == null ? tblName : tblName + "." + tableMetaRef; tab = database == null ? db.getTable(tableName, false) - : db.getTable(database, tblName, tableMetaRef, false); + : db.getTable(SessionState.get().getCurrentCatalog(), database, tblName, tableMetaRef, false); } catch (InvalidTableException e) { throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(TableName.fromString(tblName, null, database).getNotEmptyDbTable()), e); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java index 5150de01baac..90e48c483d67 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java @@ -5120,18 +5120,19 @@ private QBParseInfo getQBParseInfo(QB qb) throws CalciteSemanticException { @Override protected Table getTableObjectByName(String tabName, boolean throwException) throws HiveException { String[] names = Utilities.getDbTableName(tabName); - final String tableName = names[1]; - final String dbName = names[0]; + final String catalogName = names[0]; + final String tableName = names[2]; + final String dbName = names[1]; String tableMetaRef = null; - if (names.length == 3) { - tableMetaRef = names[2]; + if (names.length == 4) { + tableMetaRef = names[3]; } - String fullyQualName = dbName + "." + tableName; + String fullyQualName = catalogName + "." + dbName + "." + tableName; if (tableMetaRef != null) { fullyQualName += "." + tableMetaRef; } if (!tabNameToTabObject.containsKey(fullyQualName)) { - Table table = db.getTable(dbName, tableName, tableMetaRef, throwException, false, false); + Table table = db.getTable(catalogName, dbName, tableName, tableMetaRef, throwException, false, false); if (table != null) { tabNameToTabObject.put(fullyQualName, table); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/QBParseInfo.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/QBParseInfo.java index aac61b7fbad0..e70fe2d0e257 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/QBParseInfo.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/QBParseInfo.java @@ -211,8 +211,8 @@ public boolean isDestToOpTypeInsertOverwrite(String clause) { /** * See also {@link #getInsertOverwriteTables()} */ - public boolean isInsertIntoTable(String dbName, String table, String branchName) { - String fullName = dbName + "." + table; + public boolean isInsertIntoTable(String catName, String dbName, String table, String branchName) { + String fullName = catName + "." + dbName + "." + table; if (branchName != null) { fullName += "." + branchName; } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/RewriteSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/RewriteSemanticAnalyzer.java index 101f6b1fc3d8..1b108b64f2a9 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/RewriteSemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/RewriteSemanticAnalyzer.java @@ -198,7 +198,7 @@ protected static Table getTable(ASTNode tabRef, Hive db, boolean throwException) Table mTable; try { - mTable = db.getTable(tableName.getDb(), tableName.getTable(), tableName.getTableMetaRef(), throwException); + mTable = db.getTable(tableName.getCat(), tableName.getDb(), tableName.getTable(), tableName.getTableMetaRef(), throwException); } catch (InvalidTableException e) { LOG.error("Failed to find table " + tableName.getNotEmptyDbTable() + " got exception " + e.getMessage()); throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(tableName.getNotEmptyDbTable()), e); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java index 1a996268b69c..ee80ef971005 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java @@ -330,6 +330,7 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer { + public static final String DUMMY_CATALOG = "_dummy_catalog"; public static final String DUMMY_DATABASE = "_dummy_database"; public static final String DUMMY_TABLE = "_dummy_table"; public static final String SUBQUERY_TAG_1 = "-subquery1"; @@ -2406,7 +2407,7 @@ private void getMetaData(QB qb, ReadEntity parentInput) } if (tab == null) { - if(tabName.equals(DUMMY_DATABASE + "." + DUMMY_TABLE)) { + if(tabName.equals(DUMMY_CATALOG + "." + DUMMY_DATABASE + "." + DUMMY_TABLE)) { continue; } ASTNode src = qb.getParseInfo().getSrcForAlias(alias); @@ -2543,7 +2544,7 @@ private void getMetaData(QB qb, ReadEntity parentInput) ast, "The class is " + outputFormatClass.toString())); } - boolean isTableWrittenTo = qb.getParseInfo().isInsertIntoTable(ts.tableHandle.getDbName(), + boolean isTableWrittenTo = qb.getParseInfo().isInsertIntoTable(ts.tableHandle.getCatName(), ts.tableHandle.getDbName(), ts.tableHandle.getTableName(), ts.tableHandle.getSnapshotRef()); isTableWrittenTo |= (qb.getParseInfo().getInsertOverwriteTables(). get(getUnescapedName((ASTNode) ast.getChild(0), ts.tableHandle.getDbName()).toLowerCase()) != null); @@ -7634,7 +7635,7 @@ protected Operator genFileSinkPlan(String dest, QB qb, Operator input) writeId = allocateTableWriteId(destinationTable.getFullTableName(), isMmTable || acidOp != Operation.NOT_ACID); boolean isReplace = !qb.getParseInfo().isInsertIntoTable( - destinationTable.getDbName(), destinationTable.getTableName(), destinationTable.getSnapshotRef()); + destinationTable.getCatName(), destinationTable.getDbName(), destinationTable.getTableName(), destinationTable.getSnapshotRef()); ltd = new LoadTableDesc(queryTmpdir, tableDescriptor, dpCtx, acidOp, isReplace, writeId); if (writeId != null) { ltd.setStmtId(txnMgr.getCurrentStmtId()); @@ -7643,7 +7644,7 @@ protected Operator genFileSinkPlan(String dest, QB qb, Operator input) // For Acid table, Insert Overwrite shouldn't replace the table content. We keep the old // deltas and base and leave them up to the cleaner to clean up boolean isInsertInto = qb.getParseInfo().isInsertIntoTable( - destinationTable.getDbName(), destinationTable.getTableName(), destinationTable.getSnapshotRef()); + destinationTable.getCatName(), destinationTable.getDbName(), destinationTable.getTableName(), destinationTable.getSnapshotRef()); LoadFileType loadType; if (isDirectInsert) { loadType = LoadFileType.IGNORE; @@ -7662,8 +7663,8 @@ protected Operator genFileSinkPlan(String dest, QB qb, Operator input) // We need to set stats as inaccurate. setStatsForNonNativeTable(destinationTable.getDbName(), destinationTable.getTableName()); // true if it is insert overwrite. - boolean overwrite = !qb.getParseInfo().isInsertIntoTable(destinationTable.getDbName(), destinationTable.getTableName(), - destinationTable.getSnapshotRef()); + boolean overwrite = !qb.getParseInfo().isInsertIntoTable(destinationTable.getCatName(), destinationTable.getDbName(), + destinationTable.getTableName(), destinationTable.getSnapshotRef()); createPreInsertDesc(destinationTable, overwrite); ltd = new LoadTableDesc(queryTmpdir, tableDescriptor, partSpec == null ? ImmutableMap.of() : partSpec); @@ -8259,11 +8260,11 @@ && enableColumnStatsCollecting() && ColumnStatsAutoGatherContext.canRunAutogatherStats(fso)) { if (destType == QBMetaData.DEST_TABLE) { genAutoColumnStatsGatheringPipeline(destinationTable, partSpec, input, - qb.getParseInfo().isInsertIntoTable(destinationTable.getDbName(), destinationTable.getTableName(), + qb.getParseInfo().isInsertIntoTable(destinationTable.getCatName(), destinationTable.getDbName(), destinationTable.getTableName(), destinationTable.getSnapshotRef()), false); } else if (destType == QBMetaData.DEST_PARTITION) { genAutoColumnStatsGatheringPipeline(destinationTable, destinationPartition.getSpec(), input, - qb.getParseInfo().isInsertIntoTable(destinationTable.getDbName(), destinationTable.getTableName(), + qb.getParseInfo().isInsertIntoTable(destinationTable.getCatName(), destinationTable.getDbName(), destinationTable.getTableName(), destinationTable.getSnapshotRef()), false); } else if (destType == QBMetaData.DEST_LOCAL_FILE || destType == QBMetaData.DEST_DFS_FILE) { // CTAS or CMV statement @@ -8760,7 +8761,7 @@ private void checkImmutableTable(QB qb, Table dest_tab, Path dest_path, boolean // If the query here is an INSERT_INTO and the target is an immutable table, // verify that our destination is empty before proceeding if (!dest_tab.isImmutable() || !qb.getParseInfo().isInsertIntoTable( - dest_tab.getDbName(), dest_tab.getTableName(), dest_tab.getSnapshotRef())) { + dest_tab.getCatName(), dest_tab.getDbName(), dest_tab.getTableName(), dest_tab.getSnapshotRef())) { return; } try { @@ -12525,6 +12526,7 @@ public Table getDummyTable() throws SemanticException { } Table desc = new Table(DUMMY_DATABASE, DUMMY_TABLE); + desc.setCatName(DUMMY_CATALOG); desc.getTTable().getSd().setLocation(dummyPath.toString()); desc.getTTable().getSd().getSerdeInfo().setSerializationLib(NullStructSerDe.class.getName()); desc.setInputFormatClass(NullRowsInputFormat.class); @@ -13898,6 +13900,7 @@ protected void addDbAndTabToOutputs(String[] qualifiedTabName, TableType type, t.setParameters(tblProps); t.setTableType(type); t.setTemporary(isTemporary); +// t.setCatName("hive"); // just for test HiveStorageHandler storageHandler = null; if (storageFormat.getStorageHandler() != null) { try { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/UnparseTranslator.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/UnparseTranslator.java index e78bb0177fce..9d385f76ee0a 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/UnparseTranslator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/UnparseTranslator.java @@ -153,14 +153,18 @@ public void addTableNameTranslation(ASTNode tableName, String currentDatabaseNam return; } assert (tableName.getToken().getType() == HiveParser.TOK_TABNAME); - assert (tableName.getChildCount() <= 3); + assert (tableName.getChildCount() <= 4); - if (tableName.getChildCount() == 2 || tableName.getChildCount() == 3) { + if (tableName.getChildCount() == 2 || tableName.getChildCount() == 3 || tableName.getChildCount() == 4) { addIdentifierTranslation((ASTNode)tableName.getChild(0)); addIdentifierTranslation((ASTNode)tableName.getChild(1)); if (tableName.getChildCount() == 3) { addIdentifierTranslation((ASTNode)tableName.getChild(2)); } + if (tableName.getChildCount() == 4) { + addIdentifierTranslation((ASTNode)tableName.getChild(2)); + addIdentifierTranslation((ASTNode)tableName.getChild(3)); + } } else { // transform the table reference to an absolute reference (i.e., "db.table") diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/HiveWrapper.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/HiveWrapper.java index 97a28933e618..9313307689fd 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/HiveWrapper.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/HiveWrapper.java @@ -59,7 +59,7 @@ public Tuple table(final String tableName, HiveConf conf) throws HiveExce // Column statistics won't be accurate if we are dumping only metadata boolean getColStats = !Utils.shouldDumpMetaDataOnlyForExternalTables(db.getTable(dbName, tableName), conf) && !Utils.shouldDumpMetaDataOnly(conf); - return new Tuple<>(functionForSpec, () -> db.getTable(dbName, tableName, null, true, false, + return new Tuple<>(functionForSpec, () -> db.getTable(null, dbName, tableName, null, true, false, getColStats)); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/queryhistory/repository/AbstractRepository.java b/ql/src/java/org/apache/hadoop/hive/ql/queryhistory/repository/AbstractRepository.java index cd4a22ea3e91..129456dbac1b 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/queryhistory/repository/AbstractRepository.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/queryhistory/repository/AbstractRepository.java @@ -99,8 +99,8 @@ private String getDatabaseLocation(String databaseName) throws Exception { protected Table initTable(Hive hive, Database db) { Table table; - try { - table = hive.getTable(QUERY_HISTORY_DB_NAME, QUERY_HISTORY_TABLE_NAME, null, false); + try {// TODO get the correct catalog + table = hive.getTable(SessionState.get().getCurrentCatalog(), QUERY_HISTORY_DB_NAME, QUERY_HISTORY_TABLE_NAME, null, false); if (table == null) { LOG.info("Query history table ({}) isn't created yet", QUERY_HISTORY_TABLE_NAME); table = createTable(hive, db); diff --git a/storage-api/src/java/org/apache/hadoop/hive/common/TableName.java b/storage-api/src/java/org/apache/hadoop/hive/common/TableName.java index 1c9cee6d1601..fb2830fabdc5 100644 --- a/storage-api/src/java/org/apache/hadoop/hive/common/TableName.java +++ b/storage-api/src/java/org/apache/hadoop/hive/common/TableName.java @@ -146,7 +146,8 @@ public String getEscapedNotEmptyDbTable() { */ public String getNotEmptyDbTable() { String metaRefName = tableMetaRef == null ? "" : "." + tableMetaRef; - return db == null || db.trim().isEmpty() ? table : db + DatabaseName.CAT_DB_TABLE_SEPARATOR + table + metaRefName; + return db == null || db.trim().isEmpty() ? table : cat + DatabaseName.CAT_DB_TABLE_SEPARATOR + + db + DatabaseName.CAT_DB_TABLE_SEPARATOR + table + metaRefName; } /**