Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -13,14 +13,17 @@
*/
package io.trino.execution;

import com.google.common.collect.Lists;
import com.google.common.util.concurrent.ListenableFuture;
import io.trino.Session;
import io.trino.execution.warnings.WarningCollector;
import io.trino.metadata.Metadata;
import io.trino.metadata.QualifiedObjectName;
import io.trino.metadata.TableHandle;
import io.trino.security.AccessControl;
import io.trino.spi.TrinoException;
import io.trino.sql.tree.Expression;
import io.trino.sql.tree.QualifiedName;
import io.trino.sql.tree.RenameTable;

import javax.inject.Inject;
Expand All @@ -32,9 +35,11 @@
import static io.trino.metadata.MetadataUtil.createQualifiedObjectName;
import static io.trino.spi.StandardErrorCode.CATALOG_NOT_FOUND;
import static io.trino.spi.StandardErrorCode.NOT_SUPPORTED;
import static io.trino.spi.StandardErrorCode.SYNTAX_ERROR;
import static io.trino.spi.StandardErrorCode.TABLE_ALREADY_EXISTS;
import static io.trino.spi.StandardErrorCode.TABLE_NOT_FOUND;
import static io.trino.sql.analyzer.SemanticExceptions.semanticException;
import static java.lang.String.format;
import static java.util.Objects.requireNonNull;

public class RenameTableTask
Expand Down Expand Up @@ -94,7 +99,7 @@ public ListenableFuture<Void> execute(
return immediateVoidFuture();
}

QualifiedObjectName target = createQualifiedObjectName(session, statement, statement.getTarget());
QualifiedObjectName target = createTargetQualifiedObjectName(tableName, statement.getTarget());
if (metadata.getCatalogHandle(session, target.getCatalogName()).isEmpty()) {
throw semanticException(CATALOG_NOT_FOUND, statement, "Target catalog '%s' does not exist", target.getCatalogName());
}
Expand All @@ -110,4 +115,19 @@ public ListenableFuture<Void> execute(

return immediateVoidFuture();
}

private static QualifiedObjectName createTargetQualifiedObjectName(QualifiedObjectName source, QualifiedName target)
{
requireNonNull(target, "target is null");
if (target.getParts().size() > 3) {
throw new TrinoException(SYNTAX_ERROR, format("Too many dots in table name: %s", target));
}

List<String> parts = Lists.reverse(target.getParts());
String objectName = parts.get(0);
String schemaName = (parts.size() > 1) ? parts.get(1) : source.getSchemaName();
String catalogName = (parts.size() > 2) ? parts.get(2) : source.getCatalogName();

return new QualifiedObjectName(catalogName, schemaName, objectName);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -240,34 +240,8 @@ public void testCharVarcharComparison()
}

@Override
public void testCreateSchema()
protected String createSchemaSql(String schemaName)
{
// override needed to provide valid location for schema (it needs to be s3:// based). By default schema is assigned hdfs:// location
// and test fails when schema is dropped with java.lang.RuntimeException: Error checking external files in 'hdfs://.... (test code cannot access hadoop-master:9000)
// TODO: better fix would be to configure hadoop container we use in test to use `s3://` filesystem by default

String schemaName = "test_schema_create_" + randomTableSuffix();
if (!hasBehavior(SUPPORTS_CREATE_SCHEMA)) {
assertQueryFails("CREATE SCHEMA " + schemaName, "This connector does not support creating schemas");
return;
}
assertThat(computeActual("SHOW SCHEMAS").getOnlyColumnAsSet()).doesNotContain(schemaName);
assertUpdate("CREATE SCHEMA " + schemaName + " WITH (location = 's3://" + bucketName + "/" + schemaName + "')");

// verify listing of new schema
assertThat(computeActual("SHOW SCHEMAS").getOnlyColumnAsSet()).contains(schemaName);

// verify SHOW CREATE SCHEMA works
assertThat((String) computeScalar("SHOW CREATE SCHEMA " + schemaName))
.startsWith(format("CREATE SCHEMA %s.%s", getSession().getCatalog().orElseThrow(), schemaName));

// try to create duplicate schema
assertQueryFails("CREATE SCHEMA " + schemaName, format("line 1:1: Schema '.*\\.%s' already exists", schemaName));

// cleanup
assertUpdate("DROP SCHEMA " + schemaName);

// verify DROP SCHEMA for non-existing schema
assertQueryFails("DROP SCHEMA " + schemaName, format("line 1:1: Schema '.*\\.%s' does not exist", schemaName));
return "CREATE SCHEMA " + schemaName + " WITH (location = 's3://" + bucketName + "/" + schemaName + "')";
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -161,11 +161,11 @@ public void testCreateSchema()
{
String schemaName = "test_schema_create_" + randomTableSuffix();
if (!hasBehavior(SUPPORTS_CREATE_SCHEMA)) {
assertQueryFails("CREATE SCHEMA " + schemaName, "This connector does not support creating schemas");
assertQueryFails(createSchemaSql(schemaName), "This connector does not support creating schemas");
return;
}
assertThat(computeActual("SHOW SCHEMAS").getOnlyColumnAsSet()).doesNotContain(schemaName);
assertUpdate("CREATE SCHEMA " + schemaName);
assertUpdate(createSchemaSql(schemaName));

// verify listing of new schema
assertThat(computeActual("SHOW SCHEMAS").getOnlyColumnAsSet()).contains(schemaName);
Expand All @@ -175,7 +175,7 @@ public void testCreateSchema()
.startsWith(format("CREATE SCHEMA %s.%s", getSession().getCatalog().orElseThrow(), schemaName));

// try to create duplicate schema
assertQueryFails("CREATE SCHEMA " + schemaName, format("line 1:1: Schema '.*\\.%s' already exists", schemaName));
assertQueryFails(createSchemaSql(schemaName), format("line 1:1: Schema '.*\\.%s' already exists", schemaName));

// cleanup
assertUpdate("DROP SCHEMA " + schemaName);
Expand Down Expand Up @@ -1636,44 +1636,6 @@ public void testRenameSchema()
}
}

@Test
public void testRenameTableAcrossSchema()
{
if (!hasBehavior(SUPPORTS_RENAME_TABLE_ACROSS_SCHEMAS)) {
if (!hasBehavior(SUPPORTS_RENAME_TABLE)) {
throw new SkipException("Skipping since rename table is not supported at all");
}
assertQueryFails("ALTER TABLE nation RENAME TO other_schema.yyyy", "This connector does not support renaming tables across schemas");
return;
}

if (!hasBehavior(SUPPORTS_CREATE_SCHEMA)) {
throw new AssertionError("Cannot test ALTER TABLE RENAME across schemas without CREATE SCHEMA, the test needs to be implemented in a connector-specific way");
}

if (!hasBehavior(SUPPORTS_CREATE_TABLE)) {
throw new AssertionError("Cannot test ALTER TABLE RENAME across schemas without CREATE TABLE, the test needs to be implemented in a connector-specific way");
}

String tableName = "test_rename_old_" + randomTableSuffix();
assertUpdate("CREATE TABLE " + tableName + " AS SELECT 123 x", 1);

String schemaName = "test_schema_" + randomTableSuffix();
assertUpdate("CREATE SCHEMA " + schemaName);

String renamedTable = schemaName + ".test_rename_new_" + randomTableSuffix();
assertUpdate("ALTER TABLE " + tableName + " RENAME TO " + renamedTable);

assertFalse(getQueryRunner().tableExists(getSession(), tableName));
assertQuery("SELECT x FROM " + renamedTable, "VALUES 123");

assertUpdate("DROP TABLE " + renamedTable);
assertUpdate("DROP SCHEMA " + schemaName);

assertFalse(getQueryRunner().tableExists(getSession(), tableName));
assertFalse(getQueryRunner().tableExists(getSession(), renamedTable));
}

@Test
public void testAddColumn()
{
Expand Down Expand Up @@ -2001,6 +1963,63 @@ public void testRenameTable()
assertFalse(getQueryRunner().tableExists(getSession(), renamedTable));
}

@Test
public void testRenameTableAcrossSchema()
{
if (!hasBehavior(SUPPORTS_RENAME_TABLE_ACROSS_SCHEMAS)) {
if (!hasBehavior(SUPPORTS_RENAME_TABLE)) {
throw new SkipException("Skipping since rename table is not supported at all");
}
assertQueryFails("ALTER TABLE nation RENAME TO other_schema.yyyy", "This connector does not support renaming tables across schemas");
return;
}

if (!hasBehavior(SUPPORTS_CREATE_SCHEMA)) {
throw new AssertionError("Cannot test ALTER TABLE RENAME across schemas without CREATE SCHEMA, the test needs to be implemented in a connector-specific way");
}

if (!hasBehavior(SUPPORTS_CREATE_TABLE)) {
throw new AssertionError("Cannot test ALTER TABLE RENAME across schemas without CREATE TABLE, the test needs to be implemented in a connector-specific way");
}

String tableName = "test_rename_old_" + randomTableSuffix();
assertUpdate("CREATE TABLE " + tableName + " AS SELECT 123 x", 1);

String schemaName = "test_schema_" + randomTableSuffix();
assertUpdate("CREATE SCHEMA " + schemaName);

String renamedTable = schemaName + ".test_rename_new_" + randomTableSuffix();
assertUpdate("ALTER TABLE " + tableName + " RENAME TO " + renamedTable);

assertFalse(getQueryRunner().tableExists(getSession(), tableName));
assertQuery("SELECT x FROM " + renamedTable, "VALUES 123");

assertUpdate("DROP TABLE " + renamedTable);
assertUpdate("DROP SCHEMA " + schemaName);

assertFalse(getQueryRunner().tableExists(getSession(), tableName));
assertFalse(getQueryRunner().tableExists(getSession(), renamedTable));
}

@Test
public void testRenameTableToUnqualifiedPreservesSchema()
{
skipTestUnless(hasBehavior(SUPPORTS_CREATE_SCHEMA) && hasBehavior(SUPPORTS_CREATE_TABLE) && hasBehavior(SUPPORTS_RENAME_TABLE));

String sourceSchemaName = "test_source_schema_" + randomTableSuffix();
assertUpdate(createSchemaSql(sourceSchemaName));

String tableName = "test_rename_unqualified_name_" + randomTableSuffix();
assertUpdate("CREATE TABLE " + sourceSchemaName + "." + tableName + " AS SELECT 123 x", 1);

String renamedTable = "test_rename_unqualified_name_new_" + randomTableSuffix();
assertUpdate("ALTER TABLE " + sourceSchemaName + "." + tableName + " RENAME TO " + renamedTable);
assertQuery("SELECT x FROM " + sourceSchemaName + "." + renamedTable, "VALUES 123");

assertUpdate("DROP TABLE " + sourceSchemaName + "." + renamedTable);
assertUpdate("DROP SCHEMA " + sourceSchemaName);
}

@Test
public void testCommentTable()
{
Expand Down Expand Up @@ -3015,6 +3034,11 @@ protected Consumer<Plan> assertPartialLimitWithPreSortedInputsCount(Session sess
};
}

protected String createSchemaSql(String schemaName)
{
return "CREATE SCHEMA " + schemaName;
}

protected static final class DataMappingTestSetup
{
private final String trinoTypeName;
Expand Down