[SPARK-25043] print master and appId from spark-sql on startup
## What changes were proposed in this pull request? A small change to print the master and appId from spark-sql as with logging turned down all the way (`log4j.logger.org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver=WARN`), we may not know this information easily. This adds the following string before the `spark-sql>` prompt shows on the screen. `Spark master: yarn, Application Id: application_123456789_12345` ## How was this patch tested? I ran spark-sql locally and saw the appId displayed as expected. Please review http://spark.apache.org/contributing.html before opening a pull request. Closes #22025 from abellina/SPARK-25043_print_master_and_app_id_from_sparksql. Lead-authored-by: Alessandro Bellina <abellina@gmail.com> Co-authored-by: Alessandro Bellina <abellina@yahoo-inc.com> Signed-off-by: Thomas Graves <tgraves@apache.org>
This commit is contained in:
parent
1024875843
commit
b81e3031fd
|
@ -258,6 +258,8 @@ private[hive] object SparkSQLCLIDriver extends Logging {
|
|||
def continuedPromptWithDBSpaces: String = continuedPrompt + ReflectionUtils.invokeStatic(
|
||||
classOf[CliDriver], "spacesForString", classOf[String] -> currentDB)
|
||||
|
||||
cli.printMasterAndAppId
|
||||
|
||||
var currentPrompt = promptWithCurrentDB
|
||||
var line = reader.readLine(currentPrompt + "> ")
|
||||
|
||||
|
@ -323,6 +325,12 @@ private[hive] class SparkSQLCLIDriver extends CliDriver with Logging {
|
|||
hiveVariables.asScala.foreach(kv => SparkSQLEnv.sqlContext.conf.setConfString(kv._1, kv._2))
|
||||
}
|
||||
|
||||
def printMasterAndAppId(): Unit = {
|
||||
val master = SparkSQLEnv.sparkContext.master
|
||||
val appId = SparkSQLEnv.sparkContext.applicationId
|
||||
console.printInfo(s"Spark master: $master, Application Id: $appId")
|
||||
}
|
||||
|
||||
override def processCmd(cmd: String): Int = {
|
||||
val cmd_trimmed: String = cmd.trim()
|
||||
val cmd_lower = cmd_trimmed.toLowerCase(Locale.ROOT)
|
||||
|
|
Loading…
Reference in a new issue