[SPARK-25264][K8S] Fix comma-delineated arguments passed into PythonRunner and RRunner

## What changes were proposed in this pull request?

Fixes the issue brought up in https://github.com/GoogleCloudPlatform/spark-on-k8s-operator/issues/273 where the arguments were being comma-delineated, which was incorrect wrt to the PythonRunner and RRunner.

## How was this patch tested?

Modified unit test to test this change.

Author: Ilan Filonenko <if56@cornell.edu>

Closes #22257 from ifilonenko/SPARK-25264.
This commit is contained in:
Ilan Filonenko 2018-08-31 15:46:45 -07:00 committed by mcheah
parent 32da87dfa4
commit e1d72f2c07
4 changed files with 8 additions and 6 deletions

View file

@ -30,11 +30,12 @@ private[spark] class PythonDriverFeatureStep(
override def configurePod(pod: SparkPod): SparkPod = {
val roleConf = kubernetesConf.roleSpecificConf
require(roleConf.mainAppResource.isDefined, "PySpark Main Resource must be defined")
// Delineation is done by " " because that is input into PythonRunner
val maybePythonArgs = Option(roleConf.appArgs).filter(_.nonEmpty).map(
pyArgs =>
new EnvVarBuilder()
.withName(ENV_PYSPARK_ARGS)
.withValue(pyArgs.mkString(","))
.withValue(pyArgs.mkString(" "))
.build())
val maybePythonFiles = kubernetesConf.pyFiles().map(
// Dilineation by ":" is to append the PySpark Files to the PYTHONPATH

View file

@ -30,11 +30,12 @@ private[spark] class RDriverFeatureStep(
override def configurePod(pod: SparkPod): SparkPod = {
val roleConf = kubernetesConf.roleSpecificConf
require(roleConf.mainAppResource.isDefined, "R Main Resource must be defined")
// Delineation is done by " " because that is input into RRunner
val maybeRArgs = Option(roleConf.appArgs).filter(_.nonEmpty).map(
rArgs =>
new EnvVarBuilder()
.withName(ENV_R_ARGS)
.withValue(rArgs.mkString(","))
.withValue(rArgs.mkString(" "))
.build())
val envSeq =
Seq(new EnvVarBuilder()

View file

@ -44,7 +44,7 @@ class PythonDriverFeatureStepSuite extends SparkFunSuite {
Some(PythonMainAppResource("local:///main.py")),
"test-app",
"python-runner",
Seq("5 7")),
Seq("5", "7", "9")),
appResourceNamePrefix = "",
appId = "",
roleLabels = Map.empty,
@ -66,7 +66,7 @@ class PythonDriverFeatureStepSuite extends SparkFunSuite {
.toMap
assert(envs(ENV_PYSPARK_PRIMARY) === expectedMainResource)
assert(envs(ENV_PYSPARK_FILES) === expectedPySparkFiles)
assert(envs(ENV_PYSPARK_ARGS) === "5 7")
assert(envs(ENV_PYSPARK_ARGS) === "5 7 9")
assert(envs(ENV_PYSPARK_MAJOR_PYTHON_VERSION) === "2")
}
test("Python Step testing empty pyfiles") {

View file

@ -38,7 +38,7 @@ class RDriverFeatureStepSuite extends SparkFunSuite {
Some(RMainAppResource(mainResource)),
"test-app",
"r-runner",
Seq("5 7")),
Seq("5", "7", "9")),
appResourceNamePrefix = "",
appId = "",
roleLabels = Map.empty,
@ -58,6 +58,6 @@ class RDriverFeatureStepSuite extends SparkFunSuite {
.map(env => (env.getName, env.getValue))
.toMap
assert(envs(ENV_R_PRIMARY) === expectedMainResource)
assert(envs(ENV_R_ARGS) === "5 7")
assert(envs(ENV_R_ARGS) === "5 7 9")
}
}