File tree Expand file tree Collapse file tree 1 file changed +8
-9
lines changed
sql/hive/src/main/scala/org/apache/spark/sql/hive Expand file tree Collapse file tree 1 file changed +8
-9
lines changed Original file line number Diff line number Diff line change @@ -376,15 +376,6 @@ class TestHiveContext(sc: SparkContext) extends HiveContext(sc) {
376376 log.asInstanceOf [org.apache.log4j.Logger ].setLevel(org.apache.log4j.Level .WARN )
377377 }
378378
379- // It is important that we RESET first as broken hooks that might have been set could break
380- // other sql exec here.
381- runSqlHive(" RESET" )
382- // For some reason, RESET does not reset the following variables...
383- runSqlHive(" set datanucleus.cache.collections=true" )
384- runSqlHive(" set datanucleus.cache.collections.lazy=true" )
385- // Lots of tests fail if we do not change the partition whitelist from the default.
386- runSqlHive(" set hive.metastore.partition.name.whitelist.pattern=.*" )
387-
388379 loadedTables.clear()
389380 catalog.client.getAllTables(" default" ).foreach { t =>
390381 logDebug(s " Deleting table $t" )
@@ -410,6 +401,14 @@ class TestHiveContext(sc: SparkContext) extends HiveContext(sc) {
410401 FunctionRegistry .unregisterTemporaryUDF(udfName)
411402 }
412403
404+ // It is important that we RESET first as broken hooks that might have been set could break
405+ // other sql exec here.
406+ runSqlHive(" RESET" )
407+ // For some reason, RESET does not reset the following variables...
408+ runSqlHive(" set datanucleus.cache.collections=true" )
409+ runSqlHive(" set datanucleus.cache.collections.lazy=true" )
410+ // Lots of tests fail if we do not change the partition whitelist from the default.
411+ runSqlHive(" set hive.metastore.partition.name.whitelist.pattern=.*" )
413412 configure()
414413
415414 runSqlHive(" USE default" )
You can’t perform that action at this time.
0 commit comments