Skip to content

Commit b238e8d

Browse files
committed
move test case to DDLSuit
1 parent 334e89f commit b238e8d

File tree

2 files changed

+123
-129
lines changed

2 files changed

+123
-129
lines changed

sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala

Lines changed: 123 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1816,4 +1816,127 @@ class DDLSuite extends QueryTest with SharedSQLContext with BeforeAndAfterEach {
18161816
}
18171817
}
18181818
}
1819+
1820+
test("insert data to a data source table which has a not existed location should succeed") {
1821+
withTable("t") {
1822+
withTempDir { dir =>
1823+
spark.sql(
1824+
s"""
1825+
|CREATE TABLE t(a string, b int)
1826+
|USING parquet
1827+
|OPTIONS(path "file:${dir.getCanonicalPath}")
1828+
""".stripMargin)
1829+
val table = spark.sessionState.catalog.getTableMetadata(TableIdentifier("t"))
1830+
val expectedPath = s"file:${dir.getAbsolutePath.stripSuffix("/")}"
1831+
assert(table.location.stripSuffix("/") == expectedPath)
1832+
1833+
dir.delete
1834+
val tableLocFile = new File(table.location.stripPrefix("file:"))
1835+
assert(!tableLocFile.exists)
1836+
spark.sql("INSERT INTO TABLE t SELECT 'c', 1")
1837+
assert(tableLocFile.exists)
1838+
checkAnswer(spark.table("t"), Row("c", 1) :: Nil)
1839+
1840+
Utils.deleteRecursively(dir)
1841+
assert(!tableLocFile.exists)
1842+
spark.sql("INSERT OVERWRITE TABLE t SELECT 'c', 1")
1843+
assert(tableLocFile.exists)
1844+
checkAnswer(spark.table("t"), Row("c", 1) :: Nil)
1845+
1846+
val newDir = dir.getAbsolutePath.stripSuffix("/") + "/x"
1847+
val newDirFile = new File(newDir)
1848+
spark.sql(s"ALTER TABLE t SET LOCATION '$newDir'")
1849+
spark.sessionState.catalog.refreshTable(TableIdentifier("t"))
1850+
1851+
val table1 = spark.sessionState.catalog.getTableMetadata(TableIdentifier("t"))
1852+
assert(table1.location == newDir)
1853+
assert(!newDirFile.exists)
1854+
1855+
spark.sql("INSERT INTO TABLE t SELECT 'c', 1")
1856+
assert(newDirFile.exists)
1857+
checkAnswer(spark.table("t"), Row("c", 1) :: Nil)
1858+
}
1859+
}
1860+
}
1861+
1862+
test("insert into a data source table with no existed partition location should succeed") {
1863+
withTable("t") {
1864+
withTempDir { dir =>
1865+
spark.sql(
1866+
s"""
1867+
|CREATE TABLE t(a int, b int, c int, d int)
1868+
|USING parquet
1869+
|PARTITIONED BY(a, b)
1870+
|LOCATION "file:${dir.getCanonicalPath}"
1871+
""".stripMargin)
1872+
val table = spark.sessionState.catalog.getTableMetadata(TableIdentifier("t"))
1873+
val expectedPath = s"file:${dir.getAbsolutePath.stripSuffix("/")}"
1874+
assert(table.location.stripSuffix("/") == expectedPath)
1875+
1876+
spark.sql("INSERT INTO TABLE t PARTITION(a=1, b=2) SELECT 3, 4")
1877+
checkAnswer(spark.table("t"), Row(3, 4, 1, 2) :: Nil)
1878+
1879+
val partLoc = new File(s"${dir.getAbsolutePath}/a=1")
1880+
Utils.deleteRecursively(partLoc)
1881+
assert(!partLoc.exists())
1882+
// insert overwrite into a partition which location has been deleted.
1883+
spark.sql("INSERT OVERWRITE TABLE t PARTITION(a=1, b=2) SELECT 7, 8")
1884+
assert(partLoc.exists())
1885+
checkAnswer(spark.table("t"), Row(7, 8, 1, 2) :: Nil)
1886+
1887+
// TODO:insert into a partition after alter the partition location by alter command
1888+
}
1889+
}
1890+
}
1891+
1892+
test("read data from a data source table which has a not existed location should succeed") {
1893+
withTable("t") {
1894+
withTempDir { dir =>
1895+
spark.sql(
1896+
s"""
1897+
|CREATE TABLE t(a string, b int)
1898+
|USING parquet
1899+
|OPTIONS(path "file:${dir.getAbsolutePath}")
1900+
""".stripMargin)
1901+
val table = spark.sessionState.catalog.getTableMetadata(TableIdentifier("t"))
1902+
val expectedPath = s"file:${dir.getAbsolutePath.stripSuffix("/")}"
1903+
assert(table.location.stripSuffix("/") == expectedPath)
1904+
1905+
dir.delete()
1906+
checkAnswer(spark.table("t"), Nil)
1907+
1908+
val newDir = dir.getAbsolutePath.stripSuffix("/") + "/x"
1909+
spark.sql(s"ALTER TABLE t SET LOCATION '$newDir'")
1910+
1911+
val table1 = spark.sessionState.catalog.getTableMetadata(TableIdentifier("t"))
1912+
assert(table1.location == newDir)
1913+
assert(!new File(newDir).exists())
1914+
checkAnswer(spark.table("t"), Nil)
1915+
}
1916+
}
1917+
}
1918+
1919+
test("read data from a data source table with no existed partition location should succeed") {
1920+
withTable("t") {
1921+
withTempDir { dir =>
1922+
spark.sql(
1923+
s"""
1924+
|CREATE TABLE t(a int, b int, c int, d int)
1925+
|USING parquet
1926+
|PARTITIONED BY(a, b)
1927+
|LOCATION "file:${dir.getCanonicalPath}"
1928+
""".stripMargin)
1929+
val table = spark.sessionState.catalog.getTableMetadata(TableIdentifier("t"))
1930+
1931+
spark.sql("INSERT INTO TABLE t PARTITION(a=1, b=2) SELECT 3, 4")
1932+
checkAnswer(spark.table("t"), Row(3, 4, 1, 2) :: Nil)
1933+
1934+
// select from a partition which location has been deleted.
1935+
Utils.deleteRecursively(dir)
1936+
assert(!dir.exists())
1937+
spark.sql("REFRESH TABLE t")
1938+
checkAnswer(spark.sql("select * from t where a=1 and b=2"), Nil)
1939+
}
1940+
}
1941+
}
18191942
}

sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala

Lines changed: 0 additions & 129 deletions
Original file line numberDiff line numberDiff line change
@@ -1432,133 +1432,4 @@ class HiveDDLSuite
14321432
}
14331433
}
14341434
}
1435-
1436-
test("insert data to a data source table which has a not existed location should succeed") {
1437-
withTable("t") {
1438-
withTempDir { dir =>
1439-
spark.sql(
1440-
s"""CREATE TABLE t(a string, b int)
1441-
|USING parquet
1442-
|OPTIONS(path "file:${dir.getCanonicalPath}")
1443-
""".stripMargin)
1444-
var table = spark.sessionState.catalog.getTableMetadata(TableIdentifier("t"))
1445-
val expectedPath = s"file:${dir.getAbsolutePath.stripSuffix("/")}"
1446-
assert(table.location.stripSuffix("/") == expectedPath)
1447-
1448-
dir.delete
1449-
assert(!new File(table.location).exists())
1450-
spark.sql("INSERT INTO TABLE t SELECT 'c', 1")
1451-
checkAnswer(spark.table("t"), Row("c", 1) :: Nil)
1452-
1453-
Utils.deleteRecursively(dir)
1454-
assert(!new File(table.location).exists())
1455-
spark.sql("INSERT OVERWRITE TABLE t SELECT 'c', 1")
1456-
checkAnswer(spark.table("t"), Row("c", 1) :: Nil)
1457-
1458-
var newDir = dir.getAbsolutePath.stripSuffix("/") + "/x"
1459-
spark.sql(s"ALTER TABLE t SET LOCATION '$newDir'")
1460-
spark.sessionState.catalog.refreshTable(TableIdentifier("t"))
1461-
1462-
table = spark.sessionState.catalog.getTableMetadata(TableIdentifier("t"))
1463-
assert(table.location == newDir)
1464-
assert(!new File(newDir).exists())
1465-
1466-
spark.sql("INSERT INTO TABLE t SELECT 'c', 1")
1467-
checkAnswer(spark.table("t"), Row("c", 1) :: Nil)
1468-
}
1469-
}
1470-
}
1471-
1472-
test("insert into a data source table with no existed partition location should succeed") {
1473-
withTable("t") {
1474-
withTempDir { dir =>
1475-
spark.sql(
1476-
s"""CREATE TABLE t(a int, b int, c int, d int)
1477-
|USING parquet
1478-
|PARTITIONED BY(a, b)
1479-
|LOCATION "file:${dir.getCanonicalPath}"
1480-
""".stripMargin)
1481-
var table = spark.sessionState.catalog.getTableMetadata(TableIdentifier("t"))
1482-
val expectedPath = s"file:${dir.getAbsolutePath.stripSuffix("/")}"
1483-
assert(table.location.stripSuffix("/") == expectedPath)
1484-
1485-
spark.sql("INSERT INTO TABLE t PARTITION(a=1, b=2) SELECT 3, 4")
1486-
checkAnswer(spark.table("t"), Row(3, 4, 1, 2) :: Nil)
1487-
1488-
val partLoc = new File(s"${dir.getAbsolutePath}/a=1")
1489-
Utils.deleteRecursively(partLoc)
1490-
assert(!partLoc.exists())
1491-
// insert overwrite into a partition which location has been deleted.
1492-
spark.sql("INSERT OVERWRITE TABLE t PARTITION(a=1, b=2) SELECT 7, 8")
1493-
checkAnswer(spark.table("t"), Row(7, 8, 1, 2) :: Nil)
1494-
1495-
val newDir = dir.getAbsolutePath.stripSuffix("/") + "/x"
1496-
spark.sql(s"ALTER TABLE t PARTITION(a=1, b=2) SET LOCATION '$newDir'")
1497-
assert(!new File(newDir).exists())
1498-
1499-
// insert into a partition which location does not exists.
1500-
spark.sql("INSERT INTO TABLE t PARTITION(a=1, b=2) SELECT 9, 10")
1501-
checkAnswer(spark.table("t"), Row(9, 10, 1, 2) :: Nil)
1502-
}
1503-
}
1504-
}
1505-
1506-
test("read data from a data source table which has a not existed location should succeed") {
1507-
withTable("t") {
1508-
withTempDir { dir =>
1509-
spark.sql(
1510-
s"""CREATE TABLE t(a string, b int)
1511-
|USING parquet
1512-
|OPTIONS(path "file:${dir.getAbsolutePath}")
1513-
""".stripMargin)
1514-
var table = spark.sessionState.catalog.getTableMetadata(TableIdentifier("t"))
1515-
val expectedPath = s"file:${dir.getAbsolutePath.stripSuffix("/")}"
1516-
assert(table.location.stripSuffix("/") == expectedPath)
1517-
1518-
dir.delete()
1519-
checkAnswer(spark.table("t"), Nil)
1520-
1521-
var newDir = dir.getAbsolutePath.stripSuffix("/") + "/x"
1522-
spark.sql(s"ALTER TABLE t SET LOCATION '$newDir'")
1523-
1524-
table = spark.sessionState.catalog.getTableMetadata(TableIdentifier("t"))
1525-
assert(table.location == newDir)
1526-
assert(!new File(newDir).exists())
1527-
checkAnswer(spark.table("t"), Nil)
1528-
}
1529-
}
1530-
}
1531-
1532-
test("read data from a data source table with no existed partition location should succeed") {
1533-
withTable("t") {
1534-
withTempDir { dir =>
1535-
spark.sql(
1536-
s"""CREATE TABLE t(a int, b int, c int, d int)
1537-
|USING parquet
1538-
|PARTITIONED BY(a, b)
1539-
|LOCATION "file:${dir.getCanonicalPath}"
1540-
""".stripMargin)
1541-
var table = spark.sessionState.catalog.getTableMetadata(TableIdentifier("t"))
1542-
1543-
spark.sql("INSERT INTO TABLE t PARTITION(a=1, b=2) SELECT 3, 4")
1544-
checkAnswer(spark.table("t"), Row(3, 4, 1, 2) :: Nil)
1545-
1546-
val newDir = dir.getAbsolutePath.stripSuffix("/") + "/x"
1547-
val newDirFile = new File(newDir)
1548-
spark.sql(s"ALTER TABLE t PARTITION(a=1, b=2) SET LOCATION '$newDir'")
1549-
assert(!newDirFile.exists())
1550-
// select from a partition which location has changed to a not existed location
1551-
checkAnswer(spark.sql("select * from t where a=1 and b=2"), Nil)
1552-
1553-
spark.sql("INSERT INTO TABLE t PARTITION(a=1, b=2) SELECT 5, 6")
1554-
checkAnswer(spark.table("t"), Row(5, 6, 1, 2) :: Nil)
1555-
// select from a partition which location has been deleted.
1556-
Utils.deleteRecursively(newDirFile)
1557-
assert(!newDirFile.exists())
1558-
spark.sql("REFRESH TABLE t")
1559-
checkAnswer(spark.sql("select * from t where a=1 and b=2"), Nil)
1560-
}
1561-
}
1562-
}
1563-
15641435
}

0 commit comments

Comments
 (0)