Skip to content

Commit 9a3a4cc

Browse files
Jonathan MaurerJonathan Maurer
authored andcommitted
changed the way dp0 is used to accept spaces in path
1 parent 180607d commit 9a3a4cc

14 files changed

+48
-27
lines changed

bin/beeline.cmd

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
@echo off
2-
2+
pushd %~dp0
33
rem
44
rem Licensed to the Apache Software Foundation (ASF) under one or more
55
rem contributor license agreements. See the NOTICE file distributed with
@@ -17,5 +17,5 @@ rem See the License for the specific language governing permissions and
1717
rem limitations under the License.
1818
rem
1919

20-
set SPARK_HOME=%~dp0..
21-
cmd /V /E /C %SPARK_HOME%\bin\spark-class.cmd org.apache.hive.beeline.BeeLine %*
20+
cmd /V /E /C spark-class.cmd org.apache.hive.beeline.BeeLine %*
21+
popd

bin/load-spark-env.cmd

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
@echo off
2+
pushd %~dp0
23

34
rem
45
rem Licensed to the Apache Software Foundation (ASF) under one or more
@@ -27,7 +28,7 @@ if [%SPARK_ENV_LOADED%] == [] (
2728
if not [%SPARK_CONF_DIR%] == [] (
2829
set user_conf_dir=%SPARK_CONF_DIR%
2930
) else (
30-
set user_conf_dir=%~dp0..\conf
31+
set user_conf_dir=\conf
3132
)
3233

3334
call :LoadSparkEnv
@@ -57,3 +58,4 @@ exit /b 0
5758
if exist "%user_conf_dir%\spark-env.cmd" (
5859
call "%user_conf_dir%\spark-env.cmd"
5960
)
61+
popd

bin/pyspark.cmd

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
@echo off
2+
pushd %~dp0
23

34
rem
45
rem Licensed to the Apache Software Foundation (ASF) under one or more
@@ -20,4 +21,5 @@ rem
2021
rem This is the entry point for running PySpark. To avoid polluting the
2122
rem environment, it just launches a new cmd to do the real work.
2223

23-
cmd /V /E /C "%~dp0pyspark2.cmd" %*
24+
cmd /V /E /C pyspark2.cmd %*
25+
popd

bin/pyspark2.cmd

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
@echo off
2+
pushd %~dp0
23

34
rem
45
rem Licensed to the Apache Software Foundation (ASF) under one or more
@@ -18,7 +19,7 @@ rem limitations under the License.
1819
rem
1920

2021
rem Figure out where the Spark framework is installed
21-
set SPARK_HOME=%~dp0..
22+
set SPARK_HOME=..\
2223

2324
call %SPARK_HOME%\bin\load-spark-env.cmd
2425
set _SPARK_CMD_USAGE=Usage: bin\pyspark.cmd [options]
@@ -36,3 +37,4 @@ set OLD_PYTHONSTARTUP=%PYTHONSTARTUP%
3637
set PYTHONSTARTUP=%SPARK_HOME%\python\pyspark\shell.py
3738

3839
call %SPARK_HOME%\bin\spark-submit2.cmd pyspark-shell-main --name "PySparkShell" %*
40+
popd

bin/run-example.cmd

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
@echo off
2+
pushd %~dp0
23

34
rem
45
rem Licensed to the Apache Software Foundation (ASF) under one or more
@@ -20,4 +21,5 @@ rem
2021
rem This is the entry point for running a Spark example. To avoid polluting
2122
rem the environment, it just launches a new cmd to do the real work.
2223

23-
cmd /V /E /C "%~dp0run-example2.cmd" %*
24+
cmd /V /E /C run-example2.cmd %*
25+
popd

bin/run-example2.cmd

Lines changed: 7 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
@echo off
2+
pushd %~dp0
23

34
rem
45
rem Licensed to the Apache Software Foundation (ASF) under one or more
@@ -20,10 +21,7 @@ rem
2021
set SCALA_VERSION=2.10
2122

2223
rem Figure out where the Spark framework is installed
23-
set FWDIR=%~dp0..\
24-
25-
rem Export this as SPARK_HOME
26-
set SPARK_HOME=%FWDIR%
24+
set SPARK_HOME=..\
2725

2826
call %SPARK_HOME%\bin\load-spark-env.cmd
2927

@@ -36,12 +34,12 @@ if not "x%1"=="x" goto arg_given
3634
goto exit
3735
:arg_given
3836

39-
set EXAMPLES_DIR=%FWDIR%examples
37+
set EXAMPLES_DIR=%SPARK_HOME%examples
4038

4139
rem Figure out the JAR file that our examples were packaged into.
4240
set SPARK_EXAMPLES_JAR=
43-
if exist "%FWDIR%RELEASE" (
44-
for %%d in ("%FWDIR%lib\spark-examples*.jar") do (
41+
if exist "%SPARK_HOME%RELEASE" (
42+
for %%d in ("%SPARK_HOME%lib\spark-examples*.jar") do (
4543
set SPARK_EXAMPLES_JAR=%%d
4644
)
4745
) else (
@@ -80,9 +78,10 @@ if "%~1" neq "" (
8078
)
8179
if defined ARGS set ARGS=%ARGS:~1%
8280

83-
call "%FWDIR%bin\spark-submit.cmd" ^
81+
call "%SPARK_HOME%bin\spark-submit.cmd" ^
8482
--master %EXAMPLE_MASTER% ^
8583
--class %EXAMPLE_CLASS% ^
8684
"%SPARK_EXAMPLES_JAR%" %ARGS%
8785

8886
:exit
87+
popd

bin/spark-class.cmd

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
@echo off
2+
pushd %~dp0
23

34
rem
45
rem Licensed to the Apache Software Foundation (ASF) under one or more
@@ -20,4 +21,5 @@ rem
2021
rem This is the entry point for running a Spark class. To avoid polluting
2122
rem the environment, it just launches a new cmd to do the real work.
2223

23-
cmd /V /E /C "%~dp0spark-class2.cmd" %*
24+
cmd /V /E /C spark-class2.cmd %*
25+
popd

bin/spark-class2.cmd

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
@echo off
2+
pushd %~dp0
23

34
rem
45
rem Licensed to the Apache Software Foundation (ASF) under one or more
@@ -18,9 +19,9 @@ rem limitations under the License.
1819
rem
1920

2021
rem Figure out where the Spark framework is installed
21-
set SPARK_HOME=%~dp0..
22+
set SPARK_HOME=..\
2223

23-
call %SPARK_HOME%\bin\load-spark-env.cmd
24+
call %SPARK_HOME%bin\load-spark-env.cmd
2425

2526
rem Test that an argument was given
2627
if "x%1"=="x" (
@@ -34,7 +35,7 @@ set SPARK_ASSEMBLY_JAR=0
3435
if exist "%SPARK_HOME%\RELEASE" (
3536
set ASSEMBLY_DIR=%SPARK_HOME%\lib
3637
) else (
37-
set ASSEMBLY_DIR=%SPARK_HOME%\assembly\target\scala-%SPARK_SCALA_VERSION%
38+
set ASSEMBLY_DIR=%SPARK_HOME\%assembly\target\scala-%SPARK_SCALA_VERSION%
3839
)
3940

4041
for %%d in (%ASSEMBLY_DIR%\spark-assembly*hadoop*.jar) do (
@@ -68,3 +69,4 @@ for /f "tokens=*" %%i in (%LAUNCHER_OUTPUT%) do (
6869
)
6970
del %LAUNCHER_OUTPUT%
7071
%SPARK_CMD%
72+
popd

bin/spark-shell.cmd

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
@echo off
2+
pushd %~dp0
23

34
rem
45
rem Licensed to the Apache Software Foundation (ASF) under one or more
@@ -20,4 +21,5 @@ rem
2021
rem This is the entry point for running Spark shell. To avoid polluting the
2122
rem environment, it just launches a new cmd to do the real work.
2223

23-
cmd /V /E /C "%~dp0spark-shell2.cmd" %*
24+
cmd /V /E /C spark-shell2.cmd %*
25+
popd

bin/spark-shell2.cmd

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
@echo off
2+
pushd %~dp0
23

34
rem
45
rem Licensed to the Apache Software Foundation (ASF) under one or more
@@ -17,7 +18,6 @@ rem See the License for the specific language governing permissions and
1718
rem limitations under the License.
1819
rem
1920

20-
set SPARK_HOME=%~dp0..
2121
set _SPARK_CMD_USAGE=Usage: .\bin\spark-shell.cmd [options]
2222

2323
rem SPARK-4161: scala does not assume use of the java classpath,
@@ -32,4 +32,5 @@ if "x%SPARK_SUBMIT_OPTS%"=="x" (
3232
set SPARK_SUBMIT_OPTS="%SPARK_SUBMIT_OPTS% -Dscala.usejavacp=true"
3333

3434
:run_shell
35-
%SPARK_HOME%\bin\spark-submit2.cmd --class org.apache.spark.repl.Main --name "Spark shell" %*
35+
spark-submit2.cmd --class org.apache.spark.repl.Main --name "Spark shell" %*
36+
popd

0 commit comments

Comments
 (0)