Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 4 additions & 4 deletions python/pyspark/sql/tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -1684,8 +1684,8 @@ def test_list_tables(self):
self.assertEquals(spark.catalog.listTables(), [])
self.assertEquals(spark.catalog.listTables("some_db"), [])
spark.createDataFrame([(1, 1)]).createOrReplaceTempView("temp_tab")
spark.sql("CREATE TABLE tab1 (name STRING, age INT)")
spark.sql("CREATE TABLE some_db.tab2 (name STRING, age INT)")
spark.sql("CREATE TABLE tab1 (name STRING, age INT) USING parquet")
spark.sql("CREATE TABLE some_db.tab2 (name STRING, age INT) USING parquet")
tables = sorted(spark.catalog.listTables(), key=lambda t: t.name)
tablesDefault = sorted(spark.catalog.listTables("default"), key=lambda t: t.name)
tablesSomeDb = sorted(spark.catalog.listTables("some_db"), key=lambda t: t.name)
Expand Down Expand Up @@ -1763,8 +1763,8 @@ def test_list_columns(self):
spark = self.spark
spark.catalog._reset()
spark.sql("CREATE DATABASE some_db")
spark.sql("CREATE TABLE tab1 (name STRING, age INT)")
spark.sql("CREATE TABLE some_db.tab2 (nickname STRING, tolerance FLOAT)")
spark.sql("CREATE TABLE tab1 (name STRING, age INT) USING parquet")
spark.sql("CREATE TABLE some_db.tab2 (nickname STRING, tolerance FLOAT) USING parquet")
columns = sorted(spark.catalog.listColumns("tab1"), key=lambda c: c.name)
columnsDefault = sorted(spark.catalog.listColumns("tab1", "default"), key=lambda c: c.name)
self.assertEquals(columns, columnsDefault)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -370,22 +370,6 @@ trait CheckAnalysis extends PredicateHelper {
|Conflicting attributes: ${conflictingAttributes.mkString(",")}
""".stripMargin)

case s: SimpleCatalogRelation =>
failAnalysis(
s"""
|Hive support is required to select over the following tables:
|${s.catalogTable.identifier}
""".stripMargin)

// TODO: We need to consolidate this kind of checks for InsertIntoTable
// with the rule of PreWriteCheck defined in extendedCheckRules.
case InsertIntoTable(s: SimpleCatalogRelation, _, _, _, _) =>
failAnalysis(
s"""
|Hive support is required to insert into the following tables:
|${s.catalogTable.identifier}
""".stripMargin)

case InsertIntoTable(t, _, _, _, _)
if !t.isInstanceOf[LeafNode] ||
t.isInstanceOf[Range] ||
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -156,8 +156,7 @@ abstract class ExternalCatalogSuite extends SparkFunSuite with BeforeAndAfterEac

test("the table type of an external table should be EXTERNAL_TABLE") {
val catalog = newBasicCatalog()
val table =
newTable("external_table1", "db2").copy(tableType = CatalogTableType.EXTERNAL)
val table = newTable("external_table1", "db2").copy(tableType = CatalogTableType.EXTERNAL)
catalog.createTable(table, ignoreIfExists = false)
val actual = catalog.getTable("db2", "external_table1")
assert(actual.tableType === CatalogTableType.EXTERNAL)
Expand Down Expand Up @@ -278,7 +277,7 @@ abstract class ExternalCatalogSuite extends SparkFunSuite with BeforeAndAfterEac
schema = new StructType()
.add("HelLo", "int", nullable = false)
.add("WoRLd", "int", nullable = true),
provider = Some("hive"),
provider = Some(defaultProvider),
partitionColumnNames = Seq("WoRLd"),
bucketSpec = Some(BucketSpec(4, Seq("HelLo"), Nil)))
catalog.createTable(tbl, ignoreIfExists = false)
Expand Down Expand Up @@ -330,7 +329,7 @@ abstract class ExternalCatalogSuite extends SparkFunSuite with BeforeAndAfterEac
.add("col2", "string")
.add("partCol1", "int")
.add("partCol2", "string"),
provider = Some("hive"),
provider = Some(defaultProvider),
partitionColumnNames = Seq("partCol1", "partCol2"))
catalog.createTable(table, ignoreIfExists = false)

Expand All @@ -357,7 +356,7 @@ abstract class ExternalCatalogSuite extends SparkFunSuite with BeforeAndAfterEac
.add("col2", "string")
.add("partCol1", "int")
.add("partCol2", "string"),
provider = Some("hive"),
provider = Some(defaultProvider),
partitionColumnNames = Seq("partCol1", "partCol2"))
catalog.createTable(table, ignoreIfExists = false)

Expand Down Expand Up @@ -505,7 +504,7 @@ abstract class ExternalCatalogSuite extends SparkFunSuite with BeforeAndAfterEac
.add("col2", "string")
.add("partCol1", "int")
.add("partCol2", "string"),
provider = Some("hive"),
provider = Some(defaultProvider),
partitionColumnNames = Seq("partCol1", "partCol2"))
catalog.createTable(table, ignoreIfExists = false)

Expand Down Expand Up @@ -726,7 +725,7 @@ abstract class ExternalCatalogSuite extends SparkFunSuite with BeforeAndAfterEac
tableType = CatalogTableType.MANAGED,
storage = CatalogStorageFormat.empty,
schema = new StructType().add("a", "int").add("b", "string"),
provider = Some("hive")
provider = Some(defaultProvider)
)

catalog.createTable(table, ignoreIfExists = false)
Expand All @@ -746,7 +745,7 @@ abstract class ExternalCatalogSuite extends SparkFunSuite with BeforeAndAfterEac
Some(Utils.createTempDir().getAbsolutePath),
None, None, None, false, Map.empty),
schema = new StructType().add("a", "int").add("b", "string"),
provider = Some("hive")
provider = Some(defaultProvider)
)
catalog.createTable(externalTable, ignoreIfExists = false)
assert(!exists(db.locationUri, "external_table"))
Expand All @@ -763,7 +762,7 @@ abstract class ExternalCatalogSuite extends SparkFunSuite with BeforeAndAfterEac
.add("col2", "string")
.add("partCol1", "int")
.add("partCol2", "string"),
provider = Some("hive"),
provider = Some(defaultProvider),
partitionColumnNames = Seq("partCol1", "partCol2"))
catalog.createTable(table, ignoreIfExists = false)

Expand Down Expand Up @@ -829,6 +828,7 @@ abstract class CatalogTestUtils {
// Unimplemented methods
val tableInputFormat: String
val tableOutputFormat: String
val defaultProvider: String
def newEmptyCatalog(): ExternalCatalog

// These fields must be lazy because they rely on fields that are not implemented yet
Expand Down Expand Up @@ -899,7 +899,7 @@ abstract class CatalogTestUtils {
.add("col2", "string")
.add("a", "int")
.add("b", "string"),
provider = Some("hive"),
provider = Some(defaultProvider),
partitionColumnNames = Seq("a", "b"),
bucketSpec = Some(BucketSpec(4, Seq("col1"), Nil)))
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@ class InMemoryCatalogSuite extends ExternalCatalogSuite {
protected override val utils: CatalogTestUtils = new CatalogTestUtils {
override val tableInputFormat: String = "org.apache.park.SequenceFileInputFormat"
override val tableOutputFormat: String = "org.apache.park.SequenceFileOutputFormat"
override val defaultProvider: String = "parquet"
override def newEmptyCatalog(): ExternalCatalog = new InMemoryCatalog
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@ class SessionCatalogSuite extends PlanTest {
private val utils = new CatalogTestUtils {
override val tableInputFormat: String = "com.fruit.eyephone.CameraInputFormat"
override val tableOutputFormat: String = "com.fruit.eyephone.CameraOutputFormat"
override val defaultProvider: String = "parquet"
override def newEmptyCatalog(): ExternalCatalog = new InMemoryCatalog
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ case class CreateDataSourceTableCommand(table: CatalogTable, ignoreIfExists: Boo
identifier = table.identifier.copy(
database = Some(
table.identifier.database.getOrElse(sessionState.catalog.getCurrentDatabase))),
tracksPartitionsInCatalog = sparkSession.sessionState.conf.manageFilesourcePartitions)
tracksPartitionsInCatalog = sessionState.conf.manageFilesourcePartitions)
val dataSource: BaseRelation =
DataSource(
sparkSession = sparkSession,
Expand Down Expand Up @@ -98,7 +98,7 @@ case class CreateDataSourceTableCommand(table: CatalogTable, ignoreIfExists: Boo
// partition provider hive, but no partitions in the metastore. The user has to call
// `msck repair table` to populate the table partitions.
tracksPartitionsInCatalog = partitionColumnNames.nonEmpty &&
sparkSession.sessionState.conf.manageFilesourcePartitions)
sessionState.conf.manageFilesourcePartitions)
// We will return Nil or throw exception at the beginning if the table already exists, so when
// we reach here, the table should not exist and we should set `ignoreIfExists` to false.
sessionState.catalog.createTable(newTable, ignoreIfExists = false)
Expand Down Expand Up @@ -172,8 +172,7 @@ case class CreateDataSourceTableAsSelectCommand(
case fs: HadoopFsRelation if table.partitionColumnNames.nonEmpty &&
sparkSession.sqlContext.conf.manageFilesourcePartitions =>
// Need to recover partitions into the metastore so our saved data is visible.
sparkSession.sessionState.executePlan(
AlterTableRecoverPartitionsCommand(table.identifier)).toRdd
sessionState.executePlan(AlterTableRecoverPartitionsCommand(table.identifier)).toRdd
case _ =>
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -385,7 +385,8 @@ object HiveOnlyCheck extends (LogicalPlan => Unit) {
plan.foreach {
case CreateTable(tableDesc, _, Some(_)) if DDLUtils.isHiveTable(tableDesc) =>
throw new AnalysisException("Hive support is required to use CREATE Hive TABLE AS SELECT")

case CreateTable(tableDesc, _, _) if DDLUtils.isHiveTable(tableDesc) =>
throw new AnalysisException("Hive support is required to CREATE Hive TABLE")
case _ => // OK
}
}
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
-- Create the origin table
CREATE TABLE test_change(a INT, b STRING, c INT);
CREATE TABLE test_change(a INT, b STRING, c INT) using parquet;
DESC test_change;

-- Change column name (not supported yet)
Expand Down Expand Up @@ -47,7 +47,7 @@ CREATE GLOBAL TEMPORARY VIEW global_temp_view(a, b) AS SELECT 1, "one";
ALTER TABLE global_temp.global_temp_view CHANGE a a INT COMMENT 'this is column a';

-- Change column in partition spec (not supported yet)
CREATE TABLE partition_table(a INT, b STRING) PARTITIONED BY (c INT, d STRING);
CREATE TABLE partition_table(a INT, b STRING, c INT, d STRING) USING parquet PARTITIONED BY (c, d);
ALTER TABLE partition_table PARTITION (c = 1) CHANGE COLUMN a new_a INT;

-- DROP TEST TABLE
Expand Down
2 changes: 1 addition & 1 deletion sql/core/src/test/resources/sql-tests/inputs/describe.sql
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
CREATE TABLE t (a STRING, b INT) PARTITIONED BY (c STRING, d STRING);
CREATE TABLE t (a STRING, b INT, c STRING, d STRING) USING parquet PARTITIONED BY (c, d);

ALTER TABLE t ADD PARTITION (c='Us', d=1);

Expand Down
4 changes: 2 additions & 2 deletions sql/core/src/test/resources/sql-tests/inputs/show-tables.sql
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
-- Test data.
CREATE DATABASE showdb;
USE showdb;
CREATE TABLE show_t1(a String, b Int) PARTITIONED BY (c String, d String);
CREATE TABLE show_t1(a String, b Int, c String, d String) USING parquet PARTITIONED BY (c, d);
ALTER TABLE show_t1 ADD PARTITION (c='Us', d=1);
CREATE TABLE show_t2(b String, d Int);
CREATE TABLE show_t2(b String, d Int) USING parquet;
CREATE TEMPORARY VIEW show_t3(e int) USING parquet;
CREATE GLOBAL TEMP VIEW show_t4 AS SELECT 1 as col1;

Expand Down
4 changes: 2 additions & 2 deletions sql/core/src/test/resources/sql-tests/inputs/show_columns.sql
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,8 @@ CREATE DATABASE showdb;

USE showdb;

CREATE TABLE showcolumn1 (col1 int, `col 2` int);
CREATE TABLE showcolumn2 (price int, qty int) partitioned by (year int, month int);
CREATE TABLE showcolumn1 (col1 int, `col 2` int) USING parquet;
CREATE TABLE showcolumn2 (price int, qty int, year int, month int) USING parquet partitioned by (year, month);
CREATE TEMPORARY VIEW showColumn3 (col3 int, `col 4` int) USING parquet;
CREATE GLOBAL TEMP VIEW showColumn4 AS SELECT 1 as col1, 'abc' as `col 5`;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@


-- !query 0
CREATE TABLE test_change(a INT, b STRING, c INT)
CREATE TABLE test_change(a INT, b STRING, c INT) using parquet
-- !query 0 schema
struct<>
-- !query 0 output
Expand Down Expand Up @@ -269,7 +269,7 @@ Database 'global_temp' not found;


-- !query 28
CREATE TABLE partition_table(a INT, b STRING) PARTITIONED BY (c INT, d STRING)
CREATE TABLE partition_table(a INT, b STRING, c INT, d STRING) USING parquet PARTITIONED BY (c, d)
-- !query 28 schema
struct<>
-- !query 28 output
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@


-- !query 0
CREATE TABLE t (a STRING, b INT) PARTITIONED BY (c STRING, d STRING)
CREATE TABLE t (a STRING, b INT, c STRING, d STRING) USING parquet PARTITIONED BY (c, d)
-- !query 0 schema
struct<>
-- !query 0 output
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ struct<>


-- !query 2
CREATE TABLE show_t1(a String, b Int) PARTITIONED BY (c String, d String)
CREATE TABLE show_t1(a String, b Int, c String, d String) USING parquet PARTITIONED BY (c, d)
-- !query 2 schema
struct<>
-- !query 2 output
Expand All @@ -35,7 +35,7 @@ struct<>


-- !query 4
CREATE TABLE show_t2(b String, d Int)
CREATE TABLE show_t2(b String, d Int) USING parquet
-- !query 4 schema
struct<>
-- !query 4 output
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,15 +19,15 @@ struct<>


-- !query 2
CREATE TABLE showcolumn1 (col1 int, `col 2` int)
CREATE TABLE showcolumn1 (col1 int, `col 2` int) USING parquet
-- !query 2 schema
struct<>
-- !query 2 output



-- !query 3
CREATE TABLE showcolumn2 (price int, qty int) partitioned by (year int, month int)
CREATE TABLE showcolumn2 (price int, qty int, year int, month int) USING parquet partitioned by (year, month)
-- !query 3 schema
struct<>
-- !query 3 output
Expand Down
Loading