Skip to content

Commit 742fcff

Browse files
committed
[SPARK-32839][WINDOWS] Make Spark scripts working with the spaces in paths on Windows
### What changes were proposed in this pull request? If you install Spark under the path that has whitespaces, it does not work on Windows, for example as below: ``` >>> SparkSession.builder.getOrCreate() Presence of build for multiple Scala versions detected (C:\...\assembly\target\scala-2.13 and C:\...\assembly\target\scala-2.12). Remove one of them or, set SPARK_SCALA_VERSION=2.13 in spark-env.cmd. Visit https://spark.apache.org/docs/latest/configuration.html#environment-variables for more details about setting environment variables in spark-env.cmd. Either clean one of them or, set SPARK_SCALA_VERSION in spark-env.cmd. ``` This PR fixes the whitespace handling to support any paths on Windows. ### Why are the changes needed? To support Spark working with whitespaces in paths on Windows. ### Does this PR introduce _any_ user-facing change? Yes, users will be able to install and run Spark under the paths with whitespaces. ### How was this patch tested? Manually tested. Closes apache#29706 from HyukjinKwon/window-space-path. Authored-by: HyukjinKwon <[email protected]> Signed-off-by: HyukjinKwon <[email protected]>
1 parent e558b8a commit 742fcff

File tree

3 files changed

+7
-7
lines changed

3 files changed

+7
-7
lines changed

bin/find-spark-home.cmd

+1-1
Original file line numberDiff line numberDiff line change
@@ -55,6 +55,6 @@ if "x%SPARK_HOME%"=="x" (
5555
set SPARK_HOME=%~dp0..
5656
) else (
5757
rem We are pip installed, use the Python script to resolve a reasonable SPARK_HOME
58-
for /f "delims=" %%i in ('%PYTHON_RUNNER% %FIND_SPARK_HOME_PYTHON_SCRIPT%') do set SPARK_HOME=%%i
58+
for /f "delims=" %%i in ('%PYTHON_RUNNER% "%FIND_SPARK_HOME_PYTHON_SCRIPT%"') do set SPARK_HOME=%%i
5959
)
6060
)

bin/load-spark-env.cmd

+3-3
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@ rem conf\ subdirectory.
2424
if not defined SPARK_ENV_LOADED (
2525
set SPARK_ENV_LOADED=1
2626

27-
if [%SPARK_CONF_DIR%] == [] (
27+
if not defined SPARK_CONF_DIR (
2828
set SPARK_CONF_DIR=%~dp0..\conf
2929
)
3030

@@ -36,8 +36,8 @@ rem Setting SPARK_SCALA_VERSION if not already set.
3636
set SCALA_VERSION_1=2.13
3737
set SCALA_VERSION_2=2.12
3838

39-
set ASSEMBLY_DIR1=%SPARK_HOME%\assembly\target\scala-%SCALA_VERSION_1%
40-
set ASSEMBLY_DIR2=%SPARK_HOME%\assembly\target\scala-%SCALA_VERSION_2%
39+
set ASSEMBLY_DIR1="%SPARK_HOME%\assembly\target\scala-%SCALA_VERSION_1%"
40+
set ASSEMBLY_DIR2="%SPARK_HOME%\assembly\target\scala-%SCALA_VERSION_2%"
4141
set ENV_VARIABLE_DOC=https://spark.apache.org/docs/latest/configuration.html#environment-variables
4242

4343
if not defined SPARK_SCALA_VERSION (

bin/spark-class2.cmd

100644100755
+3-3
Original file line numberDiff line numberDiff line change
@@ -30,12 +30,12 @@ if "x%1"=="x" (
3030

3131
rem Find Spark jars.
3232
if exist "%SPARK_HOME%\jars" (
33-
set SPARK_JARS_DIR="%SPARK_HOME%\jars"
33+
set SPARK_JARS_DIR=%SPARK_HOME%\jars
3434
) else (
35-
set SPARK_JARS_DIR="%SPARK_HOME%\assembly\target\scala-%SPARK_SCALA_VERSION%\jars"
35+
set SPARK_JARS_DIR=%SPARK_HOME%\assembly\target\scala-%SPARK_SCALA_VERSION%\jars
3636
)
3737

38-
if not exist "%SPARK_JARS_DIR%"\ (
38+
if not exist "%SPARK_JARS_DIR%" (
3939
echo Failed to find Spark jars directory.
4040
echo You need to build Spark before running this program.
4141
exit /b 1

0 commit comments

Comments
 (0)