|
1 | 1 | #!/usr/bin/env bash |
2 | 2 |
|
3 | | -# This file contains environment variables required to run Spark. Copy it as |
4 | | -# spark-env.sh and edit that to configure Spark for your site. |
5 | | -# |
6 | | -# The following variables can be set in this file: |
| 3 | +# This file is sourced when running various Spark classes. |
| 4 | +# Copy it as spark-env.sh and edit that to configure Spark for your site. |
| 5 | + |
| 6 | +# Options read when launching programs locally with |
| 7 | +# ./bin/spark-example or ./bin/spark-submit |
7 | 8 | # - SPARK_LOCAL_IP, to set the IP address Spark binds to on this node |
8 | 9 | # - SPARK_PUBLIC_DNS, to set the public dns name of the driver program |
| 10 | +# - SPARK_CLASSPATH, default classpath entries to append |
| 11 | + |
| 12 | +# Options read by executors and drivers running inside the cluster |
| 13 | +# - SPARK_LOCAL_IP, to set the IP address Spark binds to on this node |
| 14 | +# - SPARK_PUBLIC_DNS, to set the public DNS name of the driver program |
9 | 15 | # - SPARK_LOCAL_DIRS, shuffle directories to use on this node |
10 | 16 | # - MESOS_NATIVE_LIBRARY, to point to your libmesos.so if you use Mesos |
11 | | -# - SPARK_JAVA_OPTS, to set node-specific JVM options for Spark. Note that |
12 | | -# we recommend setting app-wide options in the application's driver program. |
13 | | -# Examples of node-specific options : -Dspark.local.dir, GC options |
14 | | -# Examples of app-wide options : -Dspark.serializer |
15 | | -# |
16 | | -# If using the standalone deploy mode, you can also set variables for it here: |
| 17 | +# - SPARK_CLASSPATH, default classpath entries to append |
| 18 | + |
| 19 | +# Options for the daemons used in the standalone deploy mode: |
17 | 20 | # - SPARK_MASTER_IP, to bind the master to a different IP address or hostname |
18 | 21 | # - SPARK_MASTER_PORT / SPARK_MASTER_WEBUI_PORT, to use non-default ports |
19 | 22 | # - SPARK_MASTER_OPTS, to set config properties at the master (e.g "-Dx=y") |
|
0 commit comments