File fix-spark-home-and-conf.patch of Package spark
Index: src/spark-1.6.3/bin/load-spark-env.sh
===================================================================
--- src/spark-2.2.3.orig/bin/load-spark-env.sh
+++ src/spark-2.2.3/bin/load-spark-env.sh
@@ -26,6 +26,10 @@ if [ -z "${SPARK_HOME}" ]; then
source "$(dirname "$0")"/find-spark-home
fi
+if [ -z "${SPARK_CONF_DIR}" ]; then
+ source "$(dirname "$0")"/find-spark-home
+fi
+
if [ -z "$SPARK_ENV_LOADED" ]; then
export SPARK_ENV_LOADED=1
Index: src/spark-1.6.3/bin/spark-class
===================================================================
--- src/spark-2.2.3.orig/bin/spark-class
+++ src/spark-2.2.3/bin/spark-class
@@ -21,6 +21,10 @@ if [ -z "${SPARK_HOME}" ]; then
source "$(dirname "$0")"/find-spark-home
fi
+if [ -z "${SPARK_CONF_DIR}" ]; then
+ source "$(dirname "$0")"/find-spark-home
+fi
+
. "${SPARK_HOME}"/bin/load-spark-env.sh
# Find the java binary
Index: src/spark-1.6.3/bin/spark-shell
===================================================================
--- src/spark-2.2.3.orig/bin/spark-shell
+++ src/spark-2.2.3/bin/spark-shell
@@ -32,6 +32,10 @@ if [ -z "${SPARK_HOME}" ]; then
source "$(dirname "$0")"/find-spark-home
fi
+if [ -z "${SPARK_CONF_DIR}" ]; then
+ source "$(dirname "$0")"/find-spark-home
+fi
+
export _SPARK_CMD_USAGE="Usage: ./bin/spark-shell [options]"
# SPARK-4161: scala does not assume use of the java classpath,
Index: src/spark-1.6.3/bin/spark-sql
===================================================================
--- src/spark-2.2.3.orig/bin/spark-sql
+++ src/spark-2.2.3/bin/spark-sql
@@ -21,5 +21,9 @@ if [ -z "${SPARK_HOME}" ]; then
source "$(dirname "$0")"/find-spark-home
fi
+if [ -z "${SPARK_CONF_DIR}" ]; then
+ source "$(dirname "$0")"/find-spark-home
+fi
+
export _SPARK_CMD_USAGE="Usage: ./bin/spark-sql [options] [cli option]"
exec "${SPARK_HOME}"/bin/spark-submit --class org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver "$@"
Index: src/spark-1.6.3/bin/spark-submit
===================================================================
--- src/spark-2.2.3.orig/bin/spark-submit
+++ src/spark-2.2.3/bin/spark-submit
@@ -21,6 +21,10 @@ if [ -z "${SPARK_HOME}" ]; then
source "$(dirname "$0")"/find-spark-home
fi
+if [ -z "${SPARK_CONF_DIR}" ]; then
+ source "$(dirname "$0")"/find-spark-home
+fi
+
# disable randomized hash for string in Python 3.3+
export PYTHONHASHSEED=0