Skip to content

Commit 96c7678

Browse files
committed
fix some code
1 parent 37eb1dc commit 96c7678

File tree

1 file changed

+16
-45
lines changed
  • sql/core/src/test/scala/org/apache/spark/sql/execution/command

1 file changed

+16
-45
lines changed

sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala

Lines changed: 16 additions & 45 deletions
Original file line numberDiff line numberDiff line change
@@ -1761,34 +1761,6 @@ class DDLSuite extends QueryTest with SharedSQLContext with BeforeAndAfterEach {
17611761
assert(rows.length > 0)
17621762
}
17631763

1764-
test("SET LOCATION for managed table") {
1765-
withTable("tbl") {
1766-
withTempDir { dir =>
1767-
sql("CREATE TABLE tbl(i INT) USING parquet")
1768-
sql("INSERT INTO tbl SELECT 1")
1769-
checkAnswer(spark.table("tbl"), Row(1))
1770-
val defaultTablePath = spark.sessionState.catalog
1771-
.getTableMetadata(TableIdentifier("tbl")).storage.locationUri.get
1772-
1773-
sql(s"ALTER TABLE tbl SET LOCATION '${dir.getCanonicalPath}'")
1774-
spark.catalog.refreshTable("tbl")
1775-
// SET LOCATION won't move data from previous table path to new table path.
1776-
assert(spark.table("tbl").count() == 0)
1777-
// the previous table path should be still there.
1778-
assert(new File(new URI(defaultTablePath)).exists())
1779-
1780-
sql("INSERT INTO tbl SELECT 2")
1781-
checkAnswer(spark.table("tbl"), Row(2))
1782-
// newly inserted data will go to the new table path.
1783-
assert(dir.listFiles().nonEmpty)
1784-
1785-
sql("DROP TABLE tbl")
1786-
// the new table path will be removed after DROP TABLE.
1787-
assert(!dir.exists())
1788-
}
1789-
}
1790-
}
1791-
17921764
test("insert data to a data source table which has a not existed location should succeed") {
17931765
withTable("t") {
17941766
withTempDir { dir =>
@@ -1799,8 +1771,8 @@ class DDLSuite extends QueryTest with SharedSQLContext with BeforeAndAfterEach {
17991771
|OPTIONS(path "$dir")
18001772
""".stripMargin)
18011773
val table = spark.sessionState.catalog.getTableMetadata(TableIdentifier("t"))
1802-
val expectedPath = dir.getAbsolutePath.stripSuffix("/")
1803-
assert(table.location.stripSuffix("/") == expectedPath)
1774+
val expectedPath = dir.getAbsolutePath
1775+
assert(table.location == expectedPath)
18041776

18051777
dir.delete
18061778
val tableLocFile = new File(table.location.stripPrefix("file:"))
@@ -1815,17 +1787,16 @@ class DDLSuite extends QueryTest with SharedSQLContext with BeforeAndAfterEach {
18151787
assert(tableLocFile.exists)
18161788
checkAnswer(spark.table("t"), Row("c", 1) :: Nil)
18171789

1818-
val newDir = dir.getAbsolutePath.stripSuffix("/") + "/x"
1819-
val newDirFile = new File(newDir)
1790+
val newDir = new File(dir, "x")
18201791
spark.sql(s"ALTER TABLE t SET LOCATION '$newDir'")
18211792
spark.sessionState.catalog.refreshTable(TableIdentifier("t"))
18221793

18231794
val table1 = spark.sessionState.catalog.getTableMetadata(TableIdentifier("t"))
1824-
assert(table1.location == newDir)
1825-
assert(!newDirFile.exists)
1795+
assert(table1.location == newDir.getAbsolutePath)
1796+
assert(!newDir.exists)
18261797

18271798
spark.sql("INSERT INTO TABLE t SELECT 'c', 1")
1828-
assert(newDirFile.exists)
1799+
assert(newDir.exists)
18291800
checkAnswer(spark.table("t"), Row("c", 1) :: Nil)
18301801
}
18311802
}
@@ -1838,17 +1809,17 @@ class DDLSuite extends QueryTest with SharedSQLContext with BeforeAndAfterEach {
18381809
s"""
18391810
|CREATE TABLE t(a int, b int, c int, d int)
18401811
|USING parquet
1812+
|OPTIONS(path '$dir')
18411813
|PARTITIONED BY(a, b)
1842-
|LOCATION "$dir"
18431814
""".stripMargin)
18441815
val table = spark.sessionState.catalog.getTableMetadata(TableIdentifier("t"))
1845-
val expectedPath = dir.getAbsolutePath.stripSuffix("/")
1846-
assert(table.location.stripSuffix("/") == expectedPath)
1816+
val expectedPath = dir.getAbsolutePath
1817+
assert(table.location == expectedPath)
18471818

18481819
spark.sql("INSERT INTO TABLE t PARTITION(a=1, b=2) SELECT 3, 4")
18491820
checkAnswer(spark.table("t"), Row(3, 4, 1, 2) :: Nil)
18501821

1851-
val partLoc = new File(s"${dir.getAbsolutePath}/a=1")
1822+
val partLoc = new File(dir, "a=1")
18521823
Utils.deleteRecursively(partLoc)
18531824
assert(!partLoc.exists())
18541825
// insert overwrite into a partition which location has been deleted.
@@ -1869,18 +1840,18 @@ class DDLSuite extends QueryTest with SharedSQLContext with BeforeAndAfterEach {
18691840
|OPTIONS(path "$dir")
18701841
""".stripMargin)
18711842
val table = spark.sessionState.catalog.getTableMetadata(TableIdentifier("t"))
1872-
val expectedPath = dir.getAbsolutePath.stripSuffix("/")
1873-
assert(table.location.stripSuffix("/") == expectedPath)
1843+
val expectedPath = dir.getAbsolutePath
1844+
assert(table.location == expectedPath)
18741845

18751846
dir.delete()
18761847
checkAnswer(spark.table("t"), Nil)
18771848

1878-
val newDir = dir.getAbsolutePath.stripSuffix("/") + "/x"
1849+
val newDir = new File(dir, "x")
18791850
spark.sql(s"ALTER TABLE t SET LOCATION '$newDir'")
18801851

18811852
val table1 = spark.sessionState.catalog.getTableMetadata(TableIdentifier("t"))
1882-
assert(table1.location == newDir)
1883-
assert(!new File(newDir).exists())
1853+
assert(table1.location == newDir.getAbsolutePath)
1854+
assert(!newDir.exists())
18841855
checkAnswer(spark.table("t"), Nil)
18851856
}
18861857
}
@@ -1893,8 +1864,8 @@ class DDLSuite extends QueryTest with SharedSQLContext with BeforeAndAfterEach {
18931864
s"""
18941865
|CREATE TABLE t(a int, b int, c int, d int)
18951866
|USING parquet
1867+
|OPTIONS(path "$dir")
18961868
|PARTITIONED BY(a, b)
1897-
|LOCATION "$dir"
18981869
""".stripMargin)
18991870
spark.sql("INSERT INTO TABLE t PARTITION(a=1, b=2) SELECT 3, 4")
19001871
checkAnswer(spark.table("t"), Row(3, 4, 1, 2) :: Nil)

0 commit comments

Comments
 (0)