|
21 | 21 | individual modules. |
22 | 22 | """ |
23 | 23 | import os |
| 24 | +import glob |
24 | 25 | import sys |
25 | 26 | import subprocess |
26 | 27 | import pydoc |
|
84 | 85 | from pyspark.sql.utils import AnalysisException, ParseException, IllegalArgumentException |
85 | 86 |
|
86 | 87 |
|
| 88 | +def found_file(pattern): |
| 89 | + SPARK_HOME = os.environ["SPARK_HOME"] |
| 90 | + files = glob.glob(os.path.join(SPARK_HOME, pattern)) |
| 91 | + return len(files) > 0 |
| 92 | + |
| 93 | + |
| 94 | +def search_hive_assembly_jars(): |
| 95 | + pattern = "assembly/target/scala-*/jars/spark-hive_*-*.jar" |
| 96 | + if not found_file(pattern): |
| 97 | + raise Exception( |
| 98 | + ("Failed to find Hive assembly jar. ") + |
| 99 | + "You need to build Spark with " |
| 100 | + "'build/sbt -Phive package' or " |
| 101 | + "'build/mvn -DskipTests -Phive package' before running this test.") |
| 102 | + |
| 103 | + |
| 104 | +def search_test_udf_classes(): |
| 105 | + pattern = "sql/core/target/scala-*/test-classes/" + \ |
| 106 | + "test/org/apache/spark/sql/JavaStringLength.class" |
| 107 | + if not found_file(pattern): |
| 108 | + raise Exception( |
| 109 | + ("Failed to find test udf classes. ") + |
| 110 | + "You need to build Spark with 'build/sbt sql/test:compile'") |
| 111 | + |
| 112 | + |
87 | 113 | class UTCOffsetTimezone(datetime.tzinfo): |
88 | 114 | """ |
89 | 115 | Specifies timezone in UTC offset |
@@ -5205,6 +5231,8 @@ def test_invalid_args(self): |
5205 | 5231 |
|
5206 | 5232 | if __name__ == "__main__": |
5207 | 5233 | from pyspark.sql.tests import * |
| 5234 | + search_hive_assembly_jars() |
| 5235 | + search_test_udf_classes() |
5208 | 5236 | if xmlrunner: |
5209 | 5237 | unittest.main(testRunner=xmlrunner.XMLTestRunner(output='target/test-reports')) |
5210 | 5238 | else: |
|
0 commit comments