Skip to content

Commit b397a7d

Browse files
committed
[SPARK-3943] Some scripts bin\*.cmd pollutes environment variables in Windows
Modified not to pollute environment variables.
1 parent 186b497 commit b397a7d

File tree

7 files changed

+343
-290
lines changed

7 files changed

+343
-290
lines changed

bin/spark-shell.cmd

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,7 @@ rem See the License for the specific language governing permissions and
1717
rem limitations under the License.
1818
rem
1919

20-
set SPARK_HOME=%~dp0..
20+
rem This is the entry point for running Spark shell. To avoid polluting the
21+
rem environment, it just launches a new cmd to do the real work.
2122

22-
cmd /V /E /C %SPARK_HOME%\bin\spark-submit.cmd --class org.apache.spark.repl.Main %* spark-shell
23+
cmd /V /E /C %~dp0spark-shell2.cmd %*

bin/spark-shell2.cmd

Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,22 @@
1+
@echo off
2+
3+
rem
4+
rem Licensed to the Apache Software Foundation (ASF) under one or more
5+
rem contributor license agreements. See the NOTICE file distributed with
6+
rem this work for additional information regarding copyright ownership.
7+
rem The ASF licenses this file to You under the Apache License, Version 2.0
8+
rem (the "License"); you may not use this file except in compliance with
9+
rem the License. You may obtain a copy of the License at
10+
rem
11+
rem http://www.apache.org/licenses/LICENSE-2.0
12+
rem
13+
rem Unless required by applicable law or agreed to in writing, software
14+
rem distributed under the License is distributed on an "AS IS" BASIS,
15+
rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16+
rem See the License for the specific language governing permissions and
17+
rem limitations under the License.
18+
rem
19+
20+
set SPARK_HOME=%~dp0..
21+
22+
cmd /V /E /C %SPARK_HOME%\bin\spark-submit.cmd --class org.apache.spark.repl.Main %* spark-shell

bin/spark-submit.cmd

Lines changed: 3 additions & 48 deletions
Original file line numberDiff line numberDiff line change
@@ -17,52 +17,7 @@ rem See the License for the specific language governing permissions and
1717
rem limitations under the License.
1818
rem
1919

20-
rem NOTE: Any changes in this file must be reflected in SparkSubmitDriverBootstrapper.scala!
20+
rem This is the entry point for running Spark submit. To avoid polluting the
21+
rem environment, it just launches a new cmd to do the real work.
2122

22-
set SPARK_HOME=%~dp0..
23-
set ORIG_ARGS=%*
24-
25-
rem Reset the values of all variables used
26-
set SPARK_SUBMIT_DEPLOY_MODE=client
27-
set SPARK_SUBMIT_PROPERTIES_FILE=%SPARK_HOME%\conf\spark-defaults.conf
28-
set SPARK_SUBMIT_DRIVER_MEMORY=
29-
set SPARK_SUBMIT_LIBRARY_PATH=
30-
set SPARK_SUBMIT_CLASSPATH=
31-
set SPARK_SUBMIT_OPTS=
32-
set SPARK_SUBMIT_BOOTSTRAP_DRIVER=
33-
34-
:loop
35-
if [%1] == [] goto continue
36-
if [%1] == [--deploy-mode] (
37-
set SPARK_SUBMIT_DEPLOY_MODE=%2
38-
) else if [%1] == [--properties-file] (
39-
set SPARK_SUBMIT_PROPERTIES_FILE=%2
40-
) else if [%1] == [--driver-memory] (
41-
set SPARK_SUBMIT_DRIVER_MEMORY=%2
42-
) else if [%1] == [--driver-library-path] (
43-
set SPARK_SUBMIT_LIBRARY_PATH=%2
44-
) else if [%1] == [--driver-class-path] (
45-
set SPARK_SUBMIT_CLASSPATH=%2
46-
) else if [%1] == [--driver-java-options] (
47-
set SPARK_SUBMIT_OPTS=%2
48-
)
49-
shift
50-
goto loop
51-
:continue
52-
53-
rem For client mode, the driver will be launched in the same JVM that launches
54-
rem SparkSubmit, so we may need to read the properties file for any extra class
55-
rem paths, library paths, java options and memory early on. Otherwise, it will
56-
rem be too late by the time the driver JVM has started.
57-
58-
if [%SPARK_SUBMIT_DEPLOY_MODE%] == [client] (
59-
if exist %SPARK_SUBMIT_PROPERTIES_FILE% (
60-
rem Parse the properties file only if the special configs exist
61-
for /f %%i in ('findstr /r /c:"^[\t ]*spark.driver.memory" /c:"^[\t ]*spark.driver.extra" ^
62-
%SPARK_SUBMIT_PROPERTIES_FILE%') do (
63-
set SPARK_SUBMIT_BOOTSTRAP_DRIVER=1
64-
)
65-
)
66-
)
67-
68-
cmd /V /E /C %SPARK_HOME%\bin\spark-class.cmd org.apache.spark.deploy.SparkSubmit %ORIG_ARGS%
23+
cmd /V /E /C %~dp0spark-submit2.cmd %*

bin/spark-submit2.cmd

Lines changed: 68 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,68 @@
1+
@echo off
2+
3+
rem
4+
rem Licensed to the Apache Software Foundation (ASF) under one or more
5+
rem contributor license agreements. See the NOTICE file distributed with
6+
rem this work for additional information regarding copyright ownership.
7+
rem The ASF licenses this file to You under the Apache License, Version 2.0
8+
rem (the "License"); you may not use this file except in compliance with
9+
rem the License. You may obtain a copy of the License at
10+
rem
11+
rem http://www.apache.org/licenses/LICENSE-2.0
12+
rem
13+
rem Unless required by applicable law or agreed to in writing, software
14+
rem distributed under the License is distributed on an "AS IS" BASIS,
15+
rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16+
rem See the License for the specific language governing permissions and
17+
rem limitations under the License.
18+
rem
19+
20+
rem NOTE: Any changes in this file must be reflected in SparkSubmitDriverBootstrapper.scala!
21+
22+
set SPARK_HOME=%~dp0..
23+
set ORIG_ARGS=%*
24+
25+
rem Reset the values of all variables used
26+
set SPARK_SUBMIT_DEPLOY_MODE=client
27+
set SPARK_SUBMIT_PROPERTIES_FILE=%SPARK_HOME%\conf\spark-defaults.conf
28+
set SPARK_SUBMIT_DRIVER_MEMORY=
29+
set SPARK_SUBMIT_LIBRARY_PATH=
30+
set SPARK_SUBMIT_CLASSPATH=
31+
set SPARK_SUBMIT_OPTS=
32+
set SPARK_SUBMIT_BOOTSTRAP_DRIVER=
33+
34+
:loop
35+
if [%1] == [] goto continue
36+
if [%1] == [--deploy-mode] (
37+
set SPARK_SUBMIT_DEPLOY_MODE=%2
38+
) else if [%1] == [--properties-file] (
39+
set SPARK_SUBMIT_PROPERTIES_FILE=%2
40+
) else if [%1] == [--driver-memory] (
41+
set SPARK_SUBMIT_DRIVER_MEMORY=%2
42+
) else if [%1] == [--driver-library-path] (
43+
set SPARK_SUBMIT_LIBRARY_PATH=%2
44+
) else if [%1] == [--driver-class-path] (
45+
set SPARK_SUBMIT_CLASSPATH=%2
46+
) else if [%1] == [--driver-java-options] (
47+
set SPARK_SUBMIT_OPTS=%2
48+
)
49+
shift
50+
goto loop
51+
:continue
52+
53+
rem For client mode, the driver will be launched in the same JVM that launches
54+
rem SparkSubmit, so we may need to read the properties file for any extra class
55+
rem paths, library paths, java options and memory early on. Otherwise, it will
56+
rem be too late by the time the driver JVM has started.
57+
58+
if [%SPARK_SUBMIT_DEPLOY_MODE%] == [client] (
59+
if exist %SPARK_SUBMIT_PROPERTIES_FILE% (
60+
rem Parse the properties file only if the special configs exist
61+
for /f %%i in ('findstr /r /c:"^[\t ]*spark.driver.memory" /c:"^[\t ]*spark.driver.extra" ^
62+
%SPARK_SUBMIT_PROPERTIES_FILE%') do (
63+
set SPARK_SUBMIT_BOOTSTRAP_DRIVER=1
64+
)
65+
)
66+
)
67+
68+
cmd /V /E /C %SPARK_HOME%\bin\spark-class.cmd org.apache.spark.deploy.SparkSubmit %ORIG_ARGS%

python/.gitignore

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
*.pyc
2-
docs/
2+
docs/_build/
33
pyspark.egg-info
44
build/
55
dist/

0 commit comments

Comments
 (0)