[SPARK-19038][YARN] Avoid overwriting keytab configuration in yarn-client

## What changes were proposed in this pull request?

Because yarn#client will reset the `spark.yarn.keytab` configuration to point to the location in distributed file, so if user still uses the old `SparkConf` to create `SparkSession` with Hive enabled, it will read keytab from the path in distributed cached. This is OK for yarn cluster mode, but in yarn client mode where driver is running out of container, it will be failed to fetch the keytab.

So here we should avoid reseting this configuration in the `yarn#client` and only overwriting it for AM, so using `spark.yarn.keytab` could get correct keytab path no matter running in client (keytab in local fs) or cluster (keytab in distributed cache) mode.

## How was this patch tested?

Verified in security cluster.

Author: jerryshao <sshao@hortonworks.com>

Closes #16923 from jerryshao/SPARK-19038.
This commit is contained in:
jerryshao 2017-02-24 09:31:52 -08:00 committed by Marcelo Vanzin
parent b0a8c16fec
commit a920a43694
2 changed files with 6 additions and 7 deletions

View file

@ -100,6 +100,7 @@ private[spark] class Client(
private var principal: String = null
private var keytab: String = null
private var credentials: Credentials = null
private var amKeytabFileName: String = null
private val launcherBackend = new LauncherBackend() {
override def onStopRequest(): Unit = {
@ -471,7 +472,7 @@ private[spark] class Client(
logInfo("To enable the AM to login from keytab, credentials are being copied over to the AM" +
" via the YARN Secure Distributed Cache.")
val (_, localizedPath) = distribute(keytab,
destName = sparkConf.get(KEYTAB),
destName = Some(amKeytabFileName),
appMasterOnly = true)
require(localizedPath != null, "Keytab file already distributed.")
}
@ -708,6 +709,9 @@ private[spark] class Client(
// Save Spark configuration to a file in the archive.
val props = new Properties()
sparkConf.getAll.foreach { case (k, v) => props.setProperty(k, v) }
// Override spark.yarn.key to point to the location in distributed cache which will be used
// by AM.
Option(amKeytabFileName).foreach { k => props.setProperty(KEYTAB.key, k) }
confStream.putNextEntry(new ZipEntry(SPARK_CONF_FILE))
val writer = new OutputStreamWriter(confStream, StandardCharsets.UTF_8)
props.store(writer, "Spark configuration.")
@ -995,8 +999,7 @@ private[spark] class Client(
val f = new File(keytab)
// Generate a file name that can be used for the keytab file, that does not conflict
// with any user file.
val keytabFileName = f.getName + "-" + UUID.randomUUID().toString
sparkConf.set(KEYTAB.key, keytabFileName)
amKeytabFileName = f.getName + "-" + UUID.randomUUID().toString
sparkConf.set(PRINCIPAL.key, principal)
}
// Defensive copy of the credentials

View file

@ -106,10 +106,6 @@ private[hive] class HiveClientImpl(
// Set up kerberos credentials for UserGroupInformation.loginUser within
// current class loader
// Instead of using the spark conf of the current spark context, a new
// instance of SparkConf is needed for the original value of spark.yarn.keytab
// and spark.yarn.principal set in SparkSubmit, as yarn.Client resets the
// keytab configuration for the link name in distributed cache
if (sparkConf.contains("spark.yarn.principal") && sparkConf.contains("spark.yarn.keytab")) {
val principalName = sparkConf.get("spark.yarn.principal")
val keytabFileName = sparkConf.get("spark.yarn.keytab")