Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
27 changes: 21 additions & 6 deletions parser/src/java/org/apache/hadoop/hive/ql/parse/FromClauseParser.g
Original file line number Diff line number Diff line change
Expand Up @@ -243,12 +243,27 @@ tableName
@init { gParent.pushMsg("table name", state); }
@after { gParent.popMsg(state); }
:
db=identifier DOT tab=identifier (DOT meta=identifier)?
{tables.add(new ImmutablePair<>($db.text, $tab.text));}
-> ^(TOK_TABNAME $db $tab $meta?)
|
tab=identifier
{tables.add(new ImmutablePair<>(null, $tab.text));}
// case 1:catalog.db.table(.meta)?
(cat=identifier DOT db=identifier DOT tab=identifier (DOT meta=identifier)?)
=>
cat=identifier DOT db=identifier DOT tab=identifier (DOT meta=identifier)?
{
tables.add(new ImmutablePair<>($cat.text + "." + $db.text, $tab.text));
}
-> ^(TOK_TABNAME $cat $db $tab $meta?)

// case 2:db.table
Copy link
Contributor

@Aggarwal-Raghav Aggarwal-Raghav Dec 29, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@zhangbutao , For iceberg meta tables like snapshots, files etc. the select * from db.tbl.meta should work which is the existing behaviour. Meaning, case 2 should have meta? IMO. Won't it misinterpret this example select with the case 1?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Yes. There will be a breaking change here. We cannot distinguish between the semantic expressions cat.db.tbl and db.tbl.meta, so for Iceberg's meta tables, I will switch to using the format cat.db.tbl.meta, or reference Trino's approach as shown at https://trino.io/docs/current/connector/iceberg.html#metadata-tables, for example: SELECT * FROM "test_table$properties". In any case, this will cause backward compatibility issues.

Regarding this breaking change, I will initiate a discussion in the dev community mailing list to see if anyone has better suggestions.

| db=identifier DOT tab=identifier
{
tables.add(new ImmutablePair<>($db.text, $tab.text));
}
-> ^(TOK_TABNAME $db $tab)

// case 3:table
| tab=identifier
{
tables.add(new ImmutablePair<>(null, $tab.text));
}
-> ^(TOK_TABNAME $tab)
;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -233,12 +233,13 @@ private static boolean isIcebergTable(Map<String, String> tblProps) {
.equalsIgnoreCase(tblProps.get(META_TABLE_STORAGE));
}

private String getDefaultLocation(String dbName, String tableName, boolean isExt)
throws SemanticException {
private String getDefaultLocation(TableName qualifiedTabName, boolean isExt)
throws SemanticException {
String tblLocation;
try {
Warehouse wh = new Warehouse(conf);
tblLocation = wh.getDefaultTablePath(db.getDatabase(dbName), tableName, isExt).toUri().getPath();
tblLocation = wh.getDefaultTablePath(db.getDatabase(qualifiedTabName.getCat(),
qualifiedTabName.getDb()), qualifiedTabName.getTable(), isExt).toUri().getPath();
} catch (MetaException | HiveException e) {
throw new SemanticException(e);
}
Expand All @@ -255,7 +256,7 @@ private String getDefaultLocation(String dbName, String tableName, boolean isExt
*/
private Map<String, String> validateAndAddDefaultProperties(Map<String, String> tblProp, boolean isExt,
StorageFormat storageFormat, String qualifiedTableName, List<Order> sortCols, boolean isMaterialization,
boolean isTemporaryTable, boolean isTransactional, boolean isManaged, String[] qualifiedTabName,
boolean isTemporaryTable, boolean isTransactional, boolean isManaged, TableName qualifiedTabName,
boolean isTableTypeChanged)
throws SemanticException {
Map<String, String> retValue = Optional.ofNullable(tblProp).orElseGet(HashMap::new);
Expand Down Expand Up @@ -316,7 +317,7 @@ private Map<String, String> validateAndAddDefaultProperties(Map<String, String>

if (isIcebergTable(retValue)) {
SessionStateUtil.addResourceOrThrow(conf, SessionStateUtil.DEFAULT_TABLE_LOCATION,
getDefaultLocation(qualifiedTabName[0], qualifiedTabName[1], true));
getDefaultLocation(qualifiedTabName, true));
}
return retValue;
}
Expand Down Expand Up @@ -348,7 +349,8 @@ private void updateDefaultTblProps(Map<String, String> source, Map<String, Strin
*/
ASTNode analyzeCreateTable(ASTNode ast, QB qb, PlannerContext plannerCtx)
throws SemanticException {
TableName qualifiedTabName = getQualifiedTableName((ASTNode) ast.getChild(0));
String currentCatalog = HiveUtils.getCurrentCatalogOrDefault(conf);
TableName qualifiedTabName = getQualifiedTableName((ASTNode) ast.getChild(0), currentCatalog);
final String dbDotTab = qualifiedTabName.getNotEmptyDbTable();

String likeTableName = null;
Expand Down Expand Up @@ -636,11 +638,9 @@ ASTNode analyzeCreateTable(ASTNode ast, QB qb, PlannerContext plannerCtx)
}
tblProps =
validateAndAddDefaultProperties(tblProps, isExt, storageFormat, dbDotTab, sortCols, isMaterialization,
isTemporary, isTransactional, isManaged,
new String[]{qualifiedTabName.getDb(), qualifiedTabName.getTable()}, isDefaultTableTypeChanged);
isTemporary, isTransactional, isManaged, qualifiedTabName, isDefaultTableTypeChanged);
isExt = isExternalTableChanged(tblProps, isTransactional, isExt, isDefaultTableTypeChanged);
addDbAndTabToOutputs(new String[]{qualifiedTabName.getDb(), qualifiedTabName.getTable()},
TableType.MANAGED_TABLE, isTemporary, tblProps, storageFormat);
addDbAndTabToOutputs(qualifiedTabName, TableType.MANAGED_TABLE, isTemporary, tblProps, storageFormat);
if (!Strings.isNullOrEmpty(sortOrder)) {
tblProps.put("default-sort-order", sortOrder);
}
Expand All @@ -665,7 +665,7 @@ ASTNode analyzeCreateTable(ASTNode ast, QB qb, PlannerContext plannerCtx)
if (location != null) {
tblLocation = location;
} else {
tblLocation = getDefaultLocation(qualifiedTabName.getDb(), qualifiedTabName.getTable(), isExt);
tblLocation = getDefaultLocation(qualifiedTabName, isExt);
}
boolean isNativeColumnDefaultSupported = false;
try {
Expand Down Expand Up @@ -699,11 +699,9 @@ ASTNode analyzeCreateTable(ASTNode ast, QB qb, PlannerContext plannerCtx)
}
tblProps =
validateAndAddDefaultProperties(tblProps, isExt, storageFormat, dbDotTab, sortCols, isMaterialization,
isTemporary, isTransactional, isManaged,
new String[]{qualifiedTabName.getDb(), qualifiedTabName.getTable()}, isDefaultTableTypeChanged);
isTemporary, isTransactional, isManaged, qualifiedTabName, isDefaultTableTypeChanged);
isExt = isExternalTableChanged(tblProps, isTransactional, isExt, isDefaultTableTypeChanged);
addDbAndTabToOutputs(new String[]{qualifiedTabName.getDb(), qualifiedTabName.getTable()},
TableType.MANAGED_TABLE, false, tblProps, storageFormat);
addDbAndTabToOutputs(qualifiedTabName, TableType.MANAGED_TABLE, false, tblProps, storageFormat);

CreateTableDesc crtTranTblDesc =
new CreateTableDesc(qualifiedTabName, isExt, isTemporary, cols, partCols, bucketCols, sortCols, numBuckets,
Expand All @@ -726,14 +724,10 @@ ASTNode analyzeCreateTable(ASTNode ast, QB qb, PlannerContext plannerCtx)

tblProps =
validateAndAddDefaultProperties(tblProps, isExt, storageFormat, dbDotTab, sortCols, isMaterialization,
isTemporary,

isTransactional, isManaged, new String[]{qualifiedTabName.getDb(), qualifiedTabName.getTable()},
isDefaultTableTypeChanged);
isTemporary, isTransactional, isManaged, qualifiedTabName, isDefaultTableTypeChanged);
tblProps.put(hive_metastoreConstants.TABLE_IS_CTLT, "true");
isExt = isExternalTableChanged(tblProps, isTransactional, isExt, isDefaultTableTypeChanged);
addDbAndTabToOutputs(new String[]{qualifiedTabName.getDb(), qualifiedTabName.getTable()},
TableType.MANAGED_TABLE, isTemporary, tblProps, storageFormat);
addDbAndTabToOutputs(qualifiedTabName, TableType.MANAGED_TABLE, isTemporary, tblProps, storageFormat);

Table likeTable = getTable(likeTableName, false);
if (likeTable != null) {
Expand All @@ -750,10 +744,10 @@ ASTNode analyzeCreateTable(ASTNode ast, QB qb, PlannerContext plannerCtx)
isExt = true;
}
CreateTableLikeDesc crtTblLikeDesc =
new CreateTableLikeDesc(dbDotTab, isExt, isTemporary, storageFormat.getInputFormat(),
new CreateTableLikeDesc(qualifiedTabName, isExt, isTemporary, storageFormat.getInputFormat(),
storageFormat.getOutputFormat(), location, storageFormat.getSerde(), storageFormat.getSerdeProps(),
tblProps, ifNotExists, likeTableName, isUserStorageFormat);
tblLocation = getDefaultLocation(qualifiedTabName.getDb(), qualifiedTabName.getTable(), isExt);
tblLocation = getDefaultLocation(qualifiedTabName, isExt);
SessionStateUtil.addResource(conf, META_TABLE_LOCATION, tblLocation);
rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), crtTblLikeDesc)));
break;
Expand Down Expand Up @@ -815,7 +809,7 @@ ASTNode analyzeCreateTable(ASTNode ast, QB qb, PlannerContext plannerCtx)
}
tblLocation = location;
} else {
tblLocation = getDefaultLocation(qualifiedTabName.getDb(), qualifiedTabName.getTable(), isExt);
tblLocation = getDefaultLocation(qualifiedTabName, isExt);
}
SessionStateUtil.addResource(conf, META_TABLE_LOCATION, tblLocation);
if (!CollectionUtils.isEmpty(partCols)) {
Expand All @@ -825,11 +819,10 @@ ASTNode analyzeCreateTable(ASTNode ast, QB qb, PlannerContext plannerCtx)
tblProps =
validateAndAddDefaultProperties(tblProps, isExt, storageFormat, dbDotTab, sortCols, isMaterialization,
isTemporary, isTransactional, isManaged,
new String[]{qualifiedTabName.getDb(), qualifiedTabName.getTable()}, isDefaultTableTypeChanged);
qualifiedTabName, isDefaultTableTypeChanged);
isExt = isExternalTableChanged(tblProps, isTransactional, isExt, isDefaultTableTypeChanged);
tblProps.put(TABLE_IS_CTAS, "true");
addDbAndTabToOutputs(new String[]{qualifiedTabName.getDb(), qualifiedTabName.getTable()},
TableType.MANAGED_TABLE, isTemporary, tblProps, storageFormat);
addDbAndTabToOutputs(qualifiedTabName, TableType.MANAGED_TABLE, isTemporary, tblProps, storageFormat);
tableDesc = new CreateTableDesc(qualifiedTabName, isExt, isTemporary, cols, partColNames, bucketCols, sortCols,
numBuckets, rowFormatParams.getFieldDelim(), rowFormatParams.getFieldEscape(),
rowFormatParams.getCollItemDelim(), rowFormatParams.getMapKeyDelim(), rowFormatParams.getLineDelim(),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@
import java.util.HashMap;
import java.util.Map;

import org.apache.hadoop.hive.common.TableName;
import org.apache.hadoop.hive.ql.ddl.DDLDesc;
import org.apache.hadoop.hive.ql.plan.Explain;
import org.apache.hadoop.hive.ql.plan.Explain.Level;
Expand All @@ -36,7 +37,7 @@
public class CreateTableLikeDesc implements DDLDesc, Serializable {
private static final long serialVersionUID = 1L;

private final String tableName;
private final TableName tableName;
private boolean isExternal;
private final boolean isTemporary;
private final String defaultInputFormat;
Expand All @@ -49,7 +50,7 @@ public class CreateTableLikeDesc implements DDLDesc, Serializable {
private final String likeTableName;
private final boolean isUserStorageFormat;

public CreateTableLikeDesc(String tableName, boolean isExternal, boolean isTemporary, String defaultInputFormat,
public CreateTableLikeDesc(TableName tableName, boolean isExternal, boolean isTemporary, String defaultInputFormat,
String defaultOutputFormat, String location, String defaultSerName, Map<String, String> defaultSerdeProps,
Map<String, String> tblProps, boolean ifNotExists, String likeTableName, boolean isUserStorageFormat) {
this.tableName = tableName;
Expand All @@ -72,7 +73,7 @@ public boolean getIfNotExists() {
}

@Explain(displayName = "name", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
public String getTableName() {
public TableName getTableName() {
return tableName;
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,7 @@ public int execute() throws HiveException {
}

private Table createViewLikeTable(Table oldTable) throws HiveException {
Table table = context.getDb().newTable(desc.getTableName());
Table table = context.getDb().getTable(desc.getTableName());

if (desc.getTblProps() != null) {
table.getTTable().getParameters().putAll(desc.getTblProps());
Expand Down Expand Up @@ -115,9 +115,8 @@ private Table createViewLikeTable(Table oldTable) throws HiveException {

private Table createTableLikeTable(Table table, Map<String, String> originalProperties)
throws SemanticException, HiveException {
String[] names = Utilities.getDbTableName(desc.getTableName());
table.setDbName(names[0]);
table.setTableName(names[1]);
table.setDbName(desc.getTableName().getDb());
table.setTableName(desc.getTableName().getTable());
table.setOwner(SessionState.getUserFromAuthenticator());

setUserSpecifiedLocation(table);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,9 +18,9 @@

package org.apache.hadoop.hive.ql.ddl.table.create.show;

import java.util.Map.Entry;

import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.tuple.Triple;
import org.apache.hadoop.hive.common.TableName;
import org.apache.hadoop.hive.ql.QueryState;
import org.apache.hadoop.hive.ql.ddl.DDLWork;
import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType;
Expand All @@ -46,17 +46,21 @@ public ShowCreateTableAnalyzer(QueryState queryState) throws SemanticException {
public void analyzeInternal(ASTNode root) throws SemanticException {
ctx.setResFile(ctx.getLocalTmpPath());

Entry<String, String> tableIdentifier = getDbTableNamePair((ASTNode) root.getChild(0));
if (tableIdentifier.getValue().contains(".")) {
Triple<String, String, String> tableIdentifier = getCatDbTableNameTriple((ASTNode) root.getChild(0));
if (tableIdentifier.toString().contains(".")) {
throw new SemanticException("The SHOW CREATE TABLE command is not supported for metadata tables.");
}
Table table = getTable(tableIdentifier.getKey(), tableIdentifier.getValue(), true);
String catName = tableIdentifier.getLeft();
String dbName = tableIdentifier.getMiddle();
String tblName = tableIdentifier.getRight();
TableName tableName = new TableName(catName, dbName, tblName);
Table table = getTable(tableName, true);

inputs.add(new ReadEntity(table));

// If no DB was specified in statement, do not include it in the final output
ShowCreateTableDesc desc = new ShowCreateTableDesc(table.getDbName(), table.getTableName(),
ctx.getResFile().toString(), StringUtils.isBlank(tableIdentifier.getKey()));
ShowCreateTableDesc desc = new ShowCreateTableDesc(table.getCatName(), table.getDbName(), table.getTableName(),
ctx.getResFile().toString(), StringUtils.isBlank(tableIdentifier.getMiddle()));
Task<DDLWork> task = TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc));
rootTasks.add(task);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,12 +33,14 @@ public class ShowCreateTableDesc implements DDLDesc, Serializable {

public static final String SCHEMA = "createtab_stmt#string";

private final String catalogName;
private final String databaseName;
private final String tableName;
private final String resFile;
private final boolean isRelative;

public ShowCreateTableDesc(String databaseName, String tableName, String resFile, boolean isRelative) {
public ShowCreateTableDesc(String catalogName, String databaseName, String tableName, String resFile, boolean isRelative) {
this.catalogName = catalogName;
this.databaseName = databaseName;
this.tableName = tableName;
this.resFile = resFile;
Expand All @@ -60,6 +62,11 @@ public String getDatabaseName() {
return databaseName;
}

@Explain(displayName = "catalog name", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
public String getCatalogName() {
return catalogName;
}

@Explain(displayName = "relative table location", explainLevels = { Level.EXTENDED })
public boolean isRelative() {
return isRelative;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,13 +23,10 @@
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;

import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.common.TableName;
import org.apache.hadoop.hive.ql.ddl.DDLOperation;
import org.apache.hadoop.hive.ql.ddl.DDLOperationContext;
import org.apache.hadoop.hive.ql.ddl.ShowUtils;
Expand All @@ -50,7 +47,8 @@ public ShowCreateTableOperation(DDLOperationContext context, ShowCreateTableDesc
public int execute() throws HiveException {
// get the create table statement for the table and populate the output
try (DataOutputStream outStream = ShowUtils.getOutputStream(new Path(desc.getResFile()), context)) {
Table table = context.getDb().getTable(desc.getDatabaseName(), desc.getTableName());
TableName tn = new TableName(desc.getCatalogName(), desc.getDatabaseName(), desc.getTableName());
Table table = context.getDb().getTable(tn, true);
DDLPlanUtils ddlObj = new DDLPlanUtils();
String command;
if (table.isView()) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -49,17 +49,17 @@ public DropTableAnalyzer(QueryState queryState) throws SemanticException {
@Override
public void analyzeInternal(ASTNode root) throws SemanticException {
TableName qualTabName = getQualifiedTableName((ASTNode) root.getChild(0));
String tableName = qualTabName.getNotEmptyDbTable();
// String tableName = qualTabName.getNotEmptyDbTable();
boolean ifExists = (root.getFirstChildWithType(HiveParser.TOK_IFEXISTS) != null);
boolean throwException = !ifExists && !HiveConf.getBoolVar(conf, ConfVars.DROP_IGNORES_NON_EXISTENT);
//Authorize database for drop table command
// Skip db object if database doesn't exist
Database database = getDatabase(qualTabName.getDb(),false);
Database database = getDatabase(qualTabName.getCat(), qualTabName.getDb(),false);
if (database != null) {
outputs.add(new WriteEntity(database, WriteType.DDL_SHARED));
}

Table table = getTable(tableName, throwException);
Table table = getTable(qualTabName, throwException);
if (table != null) {
inputs.add(new ReadEntity(table));

Expand All @@ -70,7 +70,7 @@ public void analyzeInternal(ASTNode root) throws SemanticException {

boolean purge = (root.getFirstChildWithType(HiveParser.KW_PURGE) != null);
ReplicationSpec replicationSpec = new ReplicationSpec(root);
DropTableDesc desc = new DropTableDesc(tableName, ifExists, purge, replicationSpec);
DropTableDesc desc = new DropTableDesc(qualTabName, ifExists, purge, replicationSpec);
rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)));
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@

import java.io.Serializable;

import org.apache.hadoop.hive.common.TableName;
import org.apache.hadoop.hive.ql.ddl.DDLDesc;
import org.apache.hadoop.hive.ql.parse.ReplicationSpec;
import org.apache.hadoop.hive.ql.plan.Explain;
Expand All @@ -32,17 +33,17 @@
public class DropTableDesc implements DDLDesc, Serializable {
private static final long serialVersionUID = 1L;

private final String tableName;
private final TableName tableName;
private final boolean ifExists;
private final boolean purge;
private final ReplicationSpec replicationSpec;
private final boolean validationRequired;

public DropTableDesc(String tableName, boolean ifExists, boolean ifPurge, ReplicationSpec replicationSpec) {
public DropTableDesc(TableName tableName, boolean ifExists, boolean ifPurge, ReplicationSpec replicationSpec) {
this(tableName, ifExists, ifPurge, replicationSpec, true);
}

public DropTableDesc(String tableName, boolean ifExists, boolean purge, ReplicationSpec replicationSpec,
public DropTableDesc(TableName tableName, boolean ifExists, boolean purge, ReplicationSpec replicationSpec,
boolean validationRequired) {
this.tableName = tableName;
this.ifExists = ifExists;
Expand All @@ -52,7 +53,7 @@ public DropTableDesc(String tableName, boolean ifExists, boolean purge, Replicat
}

@Explain(displayName = "table", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
public String getTableName() {
public TableName getTableName() {
return tableName;
}

Expand Down
Loading