|
@@ -0,0 +1,25 @@
|
|
|
+
|
|
|
+mkdir -p ${CONFIG_DIR}/rpmnew
|
|
|
+mkdir -p ${CONFIG_DIR}/externals/spark-hadoop
|
|
|
+
|
|
|
+printf "Installing %-44s ..." "spark-env.sh"
|
|
|
+
|
|
|
+if [ ! -e ${CONFIG_DIR}/spark-env.sh ]; then
|
|
|
+ # Always install new files without comment
|
|
|
+ cp -f ${INSTALL_DIR}/externals/spark-hadoop/conf/spark-env.sh ${CONFIG_DIR}/externals/spark-hadoop/spark-env.sh
|
|
|
+ cp -f ${INSTALL_DIR}/externals/spark-hadoop/conf/spark-env.sh ${CONFIG_DIR}/rpmnew/spark-env.sh
|
|
|
+ log_success_msg
|
|
|
+elif [ -e ${CONFIG_DIR}/rpmnew/spark-env.sh ] && ! `diff -q ${CONFIG_DIR}/rpmnew/spark-env.sh ${INSTALL_DIR}/externals/spark-hadoop/conf/spark-env.sh >/dev/null` ; then
|
|
|
+ # There are changes in the default config since last installed
|
|
|
+ if ! `diff -q ${CONFIG_DIR}/rpmnew/spark-env.sh ${CONFIG_DIR}/externals/spark-hadoop/spark-env.sh >/dev/null` ; then
|
|
|
+ # User has made their own changes too, so don't overwrite
|
|
|
+ log_failure_msg "Not overwriting modified configuration file spark-env.sh"
|
|
|
+ else
|
|
|
+ # User has NOT made their own changes - ok to update
|
|
|
+ cp -f ${INSTALL_DIR}/externals/spark-hadoop/conf/spark-env.sh ${CONFIG_DIR}/externals/spark-hadoop/spark-env.sh
|
|
|
+ cp -f ${INSTALL_DIR}/externals/spark-hadoop/conf/spark-env.sh ${CONFIG_DIR}/rpmnew/spark-env.sh
|
|
|
+ log_success_msg "Updated configuration file spark-env.sh"
|
|
|
+ fi
|
|
|
+else
|
|
|
+ log_success_msg "No changes to configuration file spark-env.sh"
|
|
|
+fi
|