@@ -1432,133 +1432,4 @@ class HiveDDLSuite
14321432 }
14331433 }
14341434 }
1435-
1436- test(" insert data to a data source table which has a not existed location should succeed" ) {
1437- withTable(" t" ) {
1438- withTempDir { dir =>
1439- spark.sql(
1440- s """ CREATE TABLE t(a string, b int)
1441- |USING parquet
1442- |OPTIONS(path "file: ${dir.getCanonicalPath}")
1443- """ .stripMargin)
1444- var table = spark.sessionState.catalog.getTableMetadata(TableIdentifier (" t" ))
1445- val expectedPath = s " file: ${dir.getAbsolutePath.stripSuffix(" /" )}"
1446- assert(table.location.stripSuffix(" /" ) == expectedPath)
1447-
1448- dir.delete
1449- assert(! new File (table.location).exists())
1450- spark.sql(" INSERT INTO TABLE t SELECT 'c', 1" )
1451- checkAnswer(spark.table(" t" ), Row (" c" , 1 ) :: Nil )
1452-
1453- Utils .deleteRecursively(dir)
1454- assert(! new File (table.location).exists())
1455- spark.sql(" INSERT OVERWRITE TABLE t SELECT 'c', 1" )
1456- checkAnswer(spark.table(" t" ), Row (" c" , 1 ) :: Nil )
1457-
1458- var newDir = dir.getAbsolutePath.stripSuffix(" /" ) + " /x"
1459- spark.sql(s " ALTER TABLE t SET LOCATION ' $newDir' " )
1460- spark.sessionState.catalog.refreshTable(TableIdentifier (" t" ))
1461-
1462- table = spark.sessionState.catalog.getTableMetadata(TableIdentifier (" t" ))
1463- assert(table.location == newDir)
1464- assert(! new File (newDir).exists())
1465-
1466- spark.sql(" INSERT INTO TABLE t SELECT 'c', 1" )
1467- checkAnswer(spark.table(" t" ), Row (" c" , 1 ) :: Nil )
1468- }
1469- }
1470- }
1471-
1472- test(" insert into a data source table with no existed partition location should succeed" ) {
1473- withTable(" t" ) {
1474- withTempDir { dir =>
1475- spark.sql(
1476- s """ CREATE TABLE t(a int, b int, c int, d int)
1477- |USING parquet
1478- |PARTITIONED BY(a, b)
1479- |LOCATION "file: ${dir.getCanonicalPath}"
1480- """ .stripMargin)
1481- var table = spark.sessionState.catalog.getTableMetadata(TableIdentifier (" t" ))
1482- val expectedPath = s " file: ${dir.getAbsolutePath.stripSuffix(" /" )}"
1483- assert(table.location.stripSuffix(" /" ) == expectedPath)
1484-
1485- spark.sql(" INSERT INTO TABLE t PARTITION(a=1, b=2) SELECT 3, 4" )
1486- checkAnswer(spark.table(" t" ), Row (3 , 4 , 1 , 2 ) :: Nil )
1487-
1488- val partLoc = new File (s " ${dir.getAbsolutePath}/a=1 " )
1489- Utils .deleteRecursively(partLoc)
1490- assert(! partLoc.exists())
1491- // insert overwrite into a partition which location has been deleted.
1492- spark.sql(" INSERT OVERWRITE TABLE t PARTITION(a=1, b=2) SELECT 7, 8" )
1493- checkAnswer(spark.table(" t" ), Row (7 , 8 , 1 , 2 ) :: Nil )
1494-
1495- val newDir = dir.getAbsolutePath.stripSuffix(" /" ) + " /x"
1496- spark.sql(s " ALTER TABLE t PARTITION(a=1, b=2) SET LOCATION ' $newDir' " )
1497- assert(! new File (newDir).exists())
1498-
1499- // insert into a partition which location does not exists.
1500- spark.sql(" INSERT INTO TABLE t PARTITION(a=1, b=2) SELECT 9, 10" )
1501- checkAnswer(spark.table(" t" ), Row (9 , 10 , 1 , 2 ) :: Nil )
1502- }
1503- }
1504- }
1505-
1506- test(" read data from a data source table which has a not existed location should succeed" ) {
1507- withTable(" t" ) {
1508- withTempDir { dir =>
1509- spark.sql(
1510- s """ CREATE TABLE t(a string, b int)
1511- |USING parquet
1512- |OPTIONS(path "file: ${dir.getAbsolutePath}")
1513- """ .stripMargin)
1514- var table = spark.sessionState.catalog.getTableMetadata(TableIdentifier (" t" ))
1515- val expectedPath = s " file: ${dir.getAbsolutePath.stripSuffix(" /" )}"
1516- assert(table.location.stripSuffix(" /" ) == expectedPath)
1517-
1518- dir.delete()
1519- checkAnswer(spark.table(" t" ), Nil )
1520-
1521- var newDir = dir.getAbsolutePath.stripSuffix(" /" ) + " /x"
1522- spark.sql(s " ALTER TABLE t SET LOCATION ' $newDir' " )
1523-
1524- table = spark.sessionState.catalog.getTableMetadata(TableIdentifier (" t" ))
1525- assert(table.location == newDir)
1526- assert(! new File (newDir).exists())
1527- checkAnswer(spark.table(" t" ), Nil )
1528- }
1529- }
1530- }
1531-
1532- test(" read data from a data source table with no existed partition location should succeed" ) {
1533- withTable(" t" ) {
1534- withTempDir { dir =>
1535- spark.sql(
1536- s """ CREATE TABLE t(a int, b int, c int, d int)
1537- |USING parquet
1538- |PARTITIONED BY(a, b)
1539- |LOCATION "file: ${dir.getCanonicalPath}"
1540- """ .stripMargin)
1541- var table = spark.sessionState.catalog.getTableMetadata(TableIdentifier (" t" ))
1542-
1543- spark.sql(" INSERT INTO TABLE t PARTITION(a=1, b=2) SELECT 3, 4" )
1544- checkAnswer(spark.table(" t" ), Row (3 , 4 , 1 , 2 ) :: Nil )
1545-
1546- val newDir = dir.getAbsolutePath.stripSuffix(" /" ) + " /x"
1547- val newDirFile = new File (newDir)
1548- spark.sql(s " ALTER TABLE t PARTITION(a=1, b=2) SET LOCATION ' $newDir' " )
1549- assert(! newDirFile.exists())
1550- // select from a partition which location has changed to a not existed location
1551- checkAnswer(spark.sql(" select * from t where a=1 and b=2" ), Nil )
1552-
1553- spark.sql(" INSERT INTO TABLE t PARTITION(a=1, b=2) SELECT 5, 6" )
1554- checkAnswer(spark.table(" t" ), Row (5 , 6 , 1 , 2 ) :: Nil )
1555- // select from a partition which location has been deleted.
1556- Utils .deleteRecursively(newDirFile)
1557- assert(! newDirFile.exists())
1558- spark.sql(" REFRESH TABLE t" )
1559- checkAnswer(spark.sql(" select * from t where a=1 and b=2" ), Nil )
1560- }
1561- }
1562- }
1563-
15641435}
0 commit comments