[SPARK-10317] [CORE] Compatibility between history server script and functionality

Compatibility between history server script and functionality

The history server has its argument parsing class in HistoryServerArguments. However, this doesn't get involved in the start-history-server.sh codepath where the $0 arg is assigned to spark.history.fs.logDirectory and all other arguments discarded (e.g --property-file.)
This stops the other options being usable from this script

Author: Joshi <rekhajoshm@gmail.com>
Author: Rekha Joshi <rekhajoshm@gmail.com>

Closes #8758 from rekhajoshm/SPARK-10317.
This commit is contained in:
Joshi 2015-10-02 15:26:11 -07:00 committed by Marcelo Vanzin
parent b0baa11d3b
commit f85aa06464
3 changed files with 96 additions and 22 deletions

View file

@ -30,12 +30,12 @@ private[history] class HistoryServerArguments(conf: SparkConf, args: Array[Strin
parse(args.toList)
private def parse(args: List[String]): Unit = {
if (args.length == 1) {
setLogDirectory(args.head)
} else {
args match {
case ("--dir" | "-d") :: value :: tail =>
logWarning("Setting log directory through the command line is deprecated as of " +
"Spark 1.1.0. Please set this through spark.history.fs.logDirectory instead.")
conf.set("spark.history.fs.logDirectory", value)
System.setProperty("spark.history.fs.logDirectory", value)
setLogDirectory(value)
parse(tail)
case ("--help" | "-h") :: tail =>
@ -51,6 +51,13 @@ private[history] class HistoryServerArguments(conf: SparkConf, args: Array[Strin
printUsageAndExit(1)
}
}
}
private def setLogDirectory(value: String): Unit = {
logWarning("Setting log directory through the command line is deprecated as of " +
"Spark 1.1.0. Please set this through spark.history.fs.logDirectory instead.")
conf.set("spark.history.fs.logDirectory", value)
}
// This mutates the SparkConf, so all accesses to it must be made after this line
Utils.loadDefaultSparkProperties(conf, propertiesFile)
@ -62,6 +69,8 @@ private[history] class HistoryServerArguments(conf: SparkConf, args: Array[Strin
|Usage: HistoryServer [options]
|
|Options:
| DIR Deprecated; set spark.history.fs.logDirectory directly
| --dir DIR (-d DIR) Deprecated; set spark.history.fs.logDirectory directly
| --properties-file FILE Path to a custom Spark properties file.
| Default is conf/spark-defaults.conf.
|
@ -90,3 +99,4 @@ private[history] class HistoryServerArguments(conf: SparkConf, args: Array[Strin
}
}

View file

@ -0,0 +1,70 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.deploy.history
import java.io.File
import java.nio.charset.StandardCharsets._
import com.google.common.io.Files
import org.apache.spark._
import org.apache.spark.util.Utils
class HistoryServerArgumentsSuite extends SparkFunSuite {
private val logDir = new File("src/test/resources/spark-events")
private val conf = new SparkConf()
.set("spark.history.fs.logDirectory", logDir.getAbsolutePath)
.set("spark.history.fs.updateInterval", "1")
.set("spark.testing", "true")
test("No Arguments Parsing") {
val argStrings = Array[String]()
val hsa = new HistoryServerArguments(conf, argStrings)
assert(conf.get("spark.history.fs.logDirectory") === logDir.getAbsolutePath)
assert(conf.get("spark.history.fs.updateInterval") === "1")
assert(conf.get("spark.testing") === "true")
}
test("Directory Arguments Parsing --dir or -d") {
val argStrings = Array("--dir", "src/test/resources/spark-events1")
val hsa = new HistoryServerArguments(conf, argStrings)
assert(conf.get("spark.history.fs.logDirectory") === "src/test/resources/spark-events1")
}
test("Directory Param can also be set directly") {
val argStrings = Array("src/test/resources/spark-events2")
val hsa = new HistoryServerArguments(conf, argStrings)
assert(conf.get("spark.history.fs.logDirectory") === "src/test/resources/spark-events2")
}
test("Properties File Arguments Parsing --properties-file") {
val tmpDir = Utils.createTempDir()
val outFile = File.createTempFile("test-load-spark-properties", "test", tmpDir)
try {
Files.write("spark.test.CustomPropertyA blah\n" +
"spark.test.CustomPropertyB notblah\n", outFile, UTF_8)
val argStrings = Array("--properties-file", outFile.getAbsolutePath)
val hsa = new HistoryServerArguments(conf, argStrings)
assert(conf.get("spark.test.CustomPropertyA") === "blah")
assert(conf.get("spark.test.CustomPropertyB") === "notblah")
} finally {
Utils.deleteRecursively(tmpDir)
}
}
}

View file

@ -30,10 +30,4 @@ sbin="`cd "$sbin"; pwd`"
. "$sbin/spark-config.sh"
. "$SPARK_PREFIX/bin/load-spark-env.sh"
if [ $# != 0 ]; then
echo "Using command line arguments for setting the log directory is deprecated. Please "
echo "set the spark.history.fs.logDirectory configuration option instead."
export SPARK_HISTORY_OPTS="$SPARK_HISTORY_OPTS -Dspark.history.fs.logDirectory=$1"
fi
exec "$sbin"/spark-daemon.sh start org.apache.spark.deploy.history.HistoryServer 1
exec "$sbin"/spark-daemon.sh start org.apache.spark.deploy.history.HistoryServer 1 $@