@@ -65,6 +65,12 @@ class HiveDDLSuite
6565 fs.exists(filesystemPath)
6666 }
6767
68+ private def makeQualifiedPath (path : String ): Path = {
69+ val hadoopPath = new Path (path)
70+ val fs = hadoopPath.getFileSystem(sparkContext.hadoopConfiguration)
71+ fs.makeQualified(hadoopPath)
72+ }
73+
6874 test(" drop tables" ) {
6975 withTable(" tab1" ) {
7076 val tabName = " tab1"
@@ -1600,9 +1606,7 @@ class HiveDDLSuite
16001606 |LOCATION ' $dir'
16011607 """ .stripMargin)
16021608 val table = spark.sessionState.catalog.getTableMetadata(TableIdentifier (" t" ))
1603- val dirPath = new Path (dir.getAbsolutePath)
1604- val fs = dirPath.getFileSystem(spark.sessionState.newHadoopConf())
1605- assert(new Path (table.location) == fs.makeQualified(dirPath))
1609+ assert(new Path (table.location) == makeQualifiedPath(dir.getAbsolutePath))
16061610
16071611 val tableLocFile = new File (new URI (table.location))
16081612 tableLocFile.delete()
@@ -1644,14 +1648,12 @@ class HiveDDLSuite
16441648 |LOCATION " $dir"
16451649 """ .stripMargin)
16461650 val table = spark.sessionState.catalog.getTableMetadata(TableIdentifier (" t" ))
1647- val dirPath = new Path (dir.getAbsolutePath)
1648- val fs = dirPath.getFileSystem(spark.sessionState.newHadoopConf())
1649- assert(new Path (table.location) == fs.makeQualified(dirPath))
1651+ assert(new Path (table.location) == makeQualifiedPath(dir.getAbsolutePath))
16501652
16511653 spark.sql(" INSERT INTO TABLE t PARTITION(a=1, b=2) SELECT 3, 4" )
16521654 checkAnswer(spark.table(" t" ), Row (3 , 4 , 1 , 2 ) :: Nil )
16531655
1654- val partLoc = new File (s " $dirPath / a=1" )
1656+ val partLoc = new File (dir, " a=1" )
16551657 Utils .deleteRecursively(partLoc)
16561658 assert(! partLoc.exists())
16571659 // insert overwrite into a partition which location has been deleted.
@@ -1682,9 +1684,7 @@ class HiveDDLSuite
16821684 |LOCATION " $dir"
16831685 """ .stripMargin)
16841686 val table = spark.sessionState.catalog.getTableMetadata(TableIdentifier (" t" ))
1685- val dirPath = new Path (dir.getAbsolutePath)
1686- val fs = dirPath.getFileSystem(spark.sessionState.newHadoopConf())
1687- assert(new Path (table.location) == fs.makeQualified(dirPath))
1687+ assert(new Path (table.location) == makeQualifiedPath(dir.getAbsolutePath))
16881688
16891689 dir.delete()
16901690 checkAnswer(spark.table(" t" ), Nil )
@@ -1800,10 +1800,8 @@ class HiveDDLSuite
18001800 |LOCATION ' $dir'
18011801 |AS SELECT 3 as a, 4 as b, 1 as c, 2 as d
18021802 """ .stripMargin)
1803- val dirPath = new Path (dir.getAbsolutePath)
1804- val fs = dirPath.getFileSystem(spark.sessionState.newHadoopConf())
18051803 val table = spark.sessionState.catalog.getTableMetadata(TableIdentifier (" t" ))
1806- assert(new Path (table.location) == fs.makeQualified(dirPath ))
1804+ assert(new Path (table.location) == makeQualifiedPath(dir.getAbsolutePath ))
18071805
18081806 checkAnswer(spark.table(" t" ), Row (3 , 4 , 1 , 2 ))
18091807 }
@@ -1821,10 +1819,8 @@ class HiveDDLSuite
18211819 |LOCATION ' $dir'
18221820 |AS SELECT 3 as a, 4 as b, 1 as c, 2 as d
18231821 """ .stripMargin)
1824- val dirPath = new Path (dir.getAbsolutePath)
1825- val fs = dirPath.getFileSystem(spark.sessionState.newHadoopConf())
18261822 val table = spark.sessionState.catalog.getTableMetadata(TableIdentifier (" t1" ))
1827- assert(new Path (table.location) == fs.makeQualified(dirPath ))
1823+ assert(new Path (table.location) == makeQualifiedPath(dir.getAbsolutePath ))
18281824
18291825 val partDir = new File (dir, " a=3" )
18301826 assert(partDir.exists())
0 commit comments