diff --git a/core/src/main/scala/org/apache/spark/SparkConf.scala b/core/src/main/scala/org/apache/spark/SparkConf.scala index 529ca3faac..7050396e84 100644 --- a/core/src/main/scala/org/apache/spark/SparkConf.scala +++ b/core/src/main/scala/org/apache/spark/SparkConf.scala @@ -606,7 +606,7 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable with Logging with Seria * configuration out for debugging. */ def toDebugString: String = { - getAll.sorted.map{case (k, v) => k + "=" + v}.mkString("\n") + Utils.redact(this, getAll).sorted.map { case (k, v) => k + "=" + v }.mkString("\n") } } diff --git a/core/src/test/scala/org/apache/spark/SparkConfSuite.scala b/core/src/test/scala/org/apache/spark/SparkConfSuite.scala index 4ba8a3ab1c..5ca4f9c73f 100644 --- a/core/src/test/scala/org/apache/spark/SparkConfSuite.scala +++ b/core/src/test/scala/org/apache/spark/SparkConfSuite.scala @@ -32,7 +32,7 @@ import org.apache.spark.internal.config.Kryo._ import org.apache.spark.internal.config.Network._ import org.apache.spark.network.util.ByteUnit import org.apache.spark.serializer.{JavaSerializer, KryoRegistrator, KryoSerializer} -import org.apache.spark.util.{ResetSystemProperties, RpcUtils} +import org.apache.spark.util.{ResetSystemProperties, RpcUtils, Utils} class SparkConfSuite extends SparkFunSuite with LocalSparkContext with ResetSystemProperties { test("Test byteString conversion") { @@ -354,6 +354,14 @@ class SparkConfSuite extends SparkFunSuite with LocalSparkContext with ResetSyst } } + test("SPARK-27244 toDebugString should redact passwords") { + val conf = new SparkConf().set("dummy.password", "dummy-password") + conf.validateSettings() + + assert(conf.get("dummy.password") === "dummy-password") + assert(conf.toDebugString.contains(s"dummy.password=${Utils.REDACTION_REPLACEMENT_TEXT}")) + } + val defaultIllegalValue = "SomeIllegalValue" val illegalValueTests : Map[String, (SparkConf, String) => Any] = Map( "getTimeAsSeconds" -> (_.getTimeAsSeconds(_)),