diff --git a/bin/load-spark-env.cmd b/bin/load-spark-env.cmd index f946197b02d55..cefa513b6fb77 100644 --- a/bin/load-spark-env.cmd +++ b/bin/load-spark-env.cmd @@ -19,15 +19,13 @@ rem rem This script loads spark-env.cmd if it exists, and ensures it is only loaded once. rem spark-env.cmd is loaded from SPARK_CONF_DIR if set, or within the current directory's -rem conf/ subdirectory. +rem conf\ subdirectory. if [%SPARK_ENV_LOADED%] == [] ( set SPARK_ENV_LOADED=1 - if not [%SPARK_CONF_DIR%] == [] ( - set user_conf_dir=%SPARK_CONF_DIR% - ) else ( - set user_conf_dir=..\conf + if [%SPARK_CONF_DIR%] == [] ( + set SPARK_CONF_DIR=%~dp0..\conf ) call :LoadSparkEnv @@ -54,6 +52,6 @@ if [%SPARK_SCALA_VERSION%] == [] ( exit /b 0 :LoadSparkEnv -if exist "%user_conf_dir%\spark-env.cmd" ( - call "%user_conf_dir%\spark-env.cmd" +if exist "%SPARK_CONF_DIR%\spark-env.cmd" ( + call "%SPARK_CONF_DIR%\spark-env.cmd" ) diff --git a/bin/load-spark-env.sh b/bin/load-spark-env.sh index d05d94e68c81b..0b5006dbd63ac 100644 --- a/bin/load-spark-env.sh +++ b/bin/load-spark-env.sh @@ -29,15 +29,12 @@ fi if [ -z "$SPARK_ENV_LOADED" ]; then export SPARK_ENV_LOADED=1 - # Returns the parent of the directory this script lives in. - parent_dir="${SPARK_HOME}" + export SPARK_CONF_DIR="${SPARK_CONF_DIR:-"${SPARK_HOME}"/conf}" - user_conf_dir="${SPARK_CONF_DIR:-"$parent_dir"/conf}" - - if [ -f "${user_conf_dir}/spark-env.sh" ]; then + if [ -f "${SPARK_CONF_DIR}/spark-env.sh" ]; then # Promote all variable declarations to environment (exported) variables set -a - . "${user_conf_dir}/spark-env.sh" + . "${SPARK_CONF_DIR}/spark-env.sh" set +a fi fi diff --git a/conf/spark-env.sh.template b/conf/spark-env.sh.template index f8c895f5303b9..bc92c78f0f8f3 100755 --- a/conf/spark-env.sh.template +++ b/conf/spark-env.sh.template @@ -32,7 +32,8 @@ # - SPARK_LOCAL_DIRS, storage directories to use on this node for shuffle and RDD data # - MESOS_NATIVE_JAVA_LIBRARY, to point to your libmesos.so if you use Mesos -# Options read in YARN client mode +# Options read in YARN client/cluster mode +# - SPARK_CONF_DIR, Alternate conf dir. (Default: ${SPARK_HOME}/conf) # - HADOOP_CONF_DIR, to point Spark towards Hadoop configuration files # - YARN_CONF_DIR, to point Spark towards YARN configuration files when you use YARN # - SPARK_EXECUTOR_CORES, Number of cores for the executors (Default: 1).