Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -847,7 +847,7 @@ class JsonSuite extends QueryTest with SharedSQLContext with TestJsonData {

sql(
s"""
|CREATE TEMPORARY TABLE jsonTableSQL
|CREATE TEMPORARY VIEW jsonTableSQL
|USING org.apache.spark.sql.json
|OPTIONS (
| path '$path'
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -57,14 +57,14 @@ class JDBCWriteSuite extends SharedSQLContext with BeforeAndAfter {

sql(
s"""
|CREATE TEMPORARY TABLE PEOPLE
|CREATE OR REPLACE TEMPORARY VIEW PEOPLE
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This testsuite needs CREATE OR REPLACE.

|USING org.apache.spark.sql.jdbc
|OPTIONS (url '$url1', dbtable 'TEST.PEOPLE', user 'testUser', password 'testPass')
""".stripMargin.replaceAll("\n", " "))

sql(
s"""
|CREATE TEMPORARY TABLE PEOPLE1
|CREATE OR REPLACE TEMPORARY VIEW PEOPLE1
|USING org.apache.spark.sql.jdbc
|OPTIONS (url '$url1', dbtable 'TEST.PEOPLE1', user 'testUser', password 'testPass')
""".stripMargin.replaceAll("\n", " "))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -139,7 +139,7 @@ class FilteredScanSuite extends DataSourceTest with SharedSQLContext with Predic
super.beforeAll()
sql(
"""
|CREATE TEMPORARY TABLE oneToTenFiltered
|CREATE TEMPORARY VIEW oneToTenFiltered
|USING org.apache.spark.sql.sources.FilteredScanSource
|OPTIONS (
| from '1',
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@ class PrunedScanSuite extends DataSourceTest with SharedSQLContext {
super.beforeAll()
sql(
"""
|CREATE TEMPORARY TABLE oneToTenPruned
|CREATE TEMPORARY VIEW oneToTenPruned
|USING org.apache.spark.sql.sources.PrunedScanSource
|OPTIONS (
| from '1',
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -137,7 +137,7 @@ class TableScanSuite extends DataSourceTest with SharedSQLContext {
super.beforeAll()
sql(
"""
|CREATE TEMPORARY TABLE oneToTen
|CREATE TEMPORARY VIEW oneToTen
|USING org.apache.spark.sql.sources.SimpleScanSource
|OPTIONS (
| From '1',
Expand All @@ -149,7 +149,7 @@ class TableScanSuite extends DataSourceTest with SharedSQLContext {

sql(
"""
|CREATE TEMPORARY TABLE tableWithSchema (
|CREATE TEMPORARY VIEW tableWithSchema (
|`string$%Field` stRIng,
|binaryField binary,
|`booleanField` boolean,
Expand Down Expand Up @@ -332,7 +332,7 @@ class TableScanSuite extends DataSourceTest with SharedSQLContext {
test("defaultSource") {
sql(
"""
|CREATE TEMPORARY TABLE oneToTenDef
|CREATE TEMPORARY VIEW oneToTenDef
|USING org.apache.spark.sql.sources
|OPTIONS (
| from '1',
Expand All @@ -351,7 +351,7 @@ class TableScanSuite extends DataSourceTest with SharedSQLContext {
val schemaNotAllowed = intercept[Exception] {
sql(
"""
|CREATE TEMPORARY TABLE relationProvierWithSchema (i int)
|CREATE TEMPORARY VIEW relationProvierWithSchema (i int)
|USING org.apache.spark.sql.sources.SimpleScanSource
|OPTIONS (
| From '1',
Expand All @@ -364,7 +364,7 @@ class TableScanSuite extends DataSourceTest with SharedSQLContext {
val schemaNeeded = intercept[Exception] {
sql(
"""
|CREATE TEMPORARY TABLE schemaRelationProvierWithoutSchema
|CREATE TEMPORARY VIEW schemaRelationProvierWithoutSchema
|USING org.apache.spark.sql.sources.AllDataTypesScanSource
|OPTIONS (
| From '1',
Expand All @@ -378,7 +378,7 @@ class TableScanSuite extends DataSourceTest with SharedSQLContext {
test("SPARK-5196 schema field with comment") {
sql(
"""
|CREATE TEMPORARY TABLE student(name string comment "SN", age int comment "SA", grade int)
|CREATE TEMPORARY VIEW student(name string comment "SN", age int comment "SA", grade int)
|USING org.apache.spark.sql.sources.AllDataTypesScanSource
|OPTIONS (
| from '1',
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -153,15 +153,15 @@ class OrcSourceSuite extends OrcSuite {
super.beforeAll()

spark.sql(
s"""CREATE TEMPORARY TABLE normal_orc_source
s"""CREATE TEMPORARY VIEW normal_orc_source
|USING org.apache.spark.sql.hive.orc
|OPTIONS (
| PATH '${new File(orcTableAsDir.getAbsolutePath).getCanonicalPath}'
|)
""".stripMargin)

spark.sql(
s"""CREATE TEMPORARY TABLE normal_orc_as_source
s"""CREATE TEMPORARY VIEW normal_orc_as_source
|USING org.apache.spark.sql.hive.orc
|OPTIONS (
| PATH '${new File(orcTableAsDir.getAbsolutePath).getCanonicalPath}'
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -582,39 +582,39 @@ class ParquetSourceSuite extends ParquetPartitioningTest {
"normal_parquet")

sql( s"""
create temporary table partitioned_parquet
CREATE TEMPORARY VIEW partitioned_parquet
USING org.apache.spark.sql.parquet
OPTIONS (
path '${partitionedTableDir.getCanonicalPath}'
)
""")

sql( s"""
create temporary table partitioned_parquet_with_key
CREATE TEMPORARY VIEW partitioned_parquet_with_key
USING org.apache.spark.sql.parquet
OPTIONS (
path '${partitionedTableDirWithKey.getCanonicalPath}'
)
""")

sql( s"""
create temporary table normal_parquet
CREATE TEMPORARY VIEW normal_parquet
USING org.apache.spark.sql.parquet
OPTIONS (
path '${new File(partitionedTableDir, "p=1").getCanonicalPath}'
)
""")

sql( s"""
CREATE TEMPORARY TABLE partitioned_parquet_with_key_and_complextypes
CREATE TEMPORARY VIEW partitioned_parquet_with_key_and_complextypes
USING org.apache.spark.sql.parquet
OPTIONS (
path '${partitionedTableDirWithKeyAndComplexTypes.getCanonicalPath}'
)
""")

sql( s"""
CREATE TEMPORARY TABLE partitioned_parquet_with_complextypes
CREATE TEMPORARY VIEW partitioned_parquet_with_complextypes
USING org.apache.spark.sql.parquet
OPTIONS (
path '${partitionedTableDirWithComplexTypes.getCanonicalPath}'
Expand Down