diff --git a/conf/spark-env.sh.template b/conf/spark-env.sh.template index df39ad8b0d..3c003f45ed 100755 --- a/conf/spark-env.sh.template +++ b/conf/spark-env.sh.template @@ -62,6 +62,7 @@ # Generic options for the daemons used in the standalone deploy mode # - SPARK_CONF_DIR Alternate conf dir. (Default: ${SPARK_HOME}/conf) # - SPARK_LOG_DIR Where log files are stored. (Default: ${SPARK_HOME}/logs) +# - SPARK_LOG_MAX_FILES Max log files of Spark daemons can rotate to. Default is 5. # - SPARK_PID_DIR Where the pid file is stored. (Default: /tmp) # - SPARK_IDENT_STRING A string representing this instance of spark. (Default: $USER) # - SPARK_NICENESS The scheduling priority for daemons. (Default: 0) diff --git a/sbin/spark-daemon.sh b/sbin/spark-daemon.sh index 81f2fd40a7..e563f7bff1 100755 --- a/sbin/spark-daemon.sh +++ b/sbin/spark-daemon.sh @@ -23,6 +23,7 @@ # # SPARK_CONF_DIR Alternate conf dir. Default is ${SPARK_HOME}/conf. # SPARK_LOG_DIR Where log files are stored. ${SPARK_HOME}/logs by default. +# SPARK_LOG_MAX_FILES Max log files of Spark daemons can rotate to. Default is 5. # SPARK_MASTER host:path where spark code should be rsync'd from # SPARK_PID_DIR The pid files are stored. /tmp by default. # SPARK_IDENT_STRING A string representing this instance of spark. $USER by default @@ -74,10 +75,16 @@ shift spark_rotate_log () { log=$1; - num=5; - if [ -n "$2" ]; then - num=$2 + + if [[ -z ${SPARK_LOG_MAX_FILES} ]]; then + num=5 + elif [[ ${SPARK_LOG_MAX_FILES} -gt 0 ]]; then + num=${SPARK_LOG_MAX_FILES} + else + echo "Error: SPARK_LOG_MAX_FILES must be a positive number, but got ${SPARK_LOG_MAX_FILES}" + exit -1 fi + if [ -f "$log" ]; then # rotate logs while [ $num -gt 1 ]; do prev=`expr $num - 1`