[SPARK-16122][CORE] Add rest api for job environment
## What changes were proposed in this pull request? add rest api for job environment. ## How was this patch tested? existing ut. Author: uncleGen <hustyugm@gmail.com> Closes #16949 from uncleGen/SPARK-16122.
This commit is contained in:
parent
d3147502e7
commit
66c4b79afd
|
@ -199,6 +199,21 @@ private[v1] class ApiRootResource extends ApiRequestContext {
|
|||
new VersionResource(uiRoot)
|
||||
}
|
||||
|
||||
@Path("applications/{appId}/environment")
|
||||
def getEnvironment(@PathParam("appId") appId: String): ApplicationEnvironmentResource = {
|
||||
withSparkUI(appId, None) { ui =>
|
||||
new ApplicationEnvironmentResource(ui)
|
||||
}
|
||||
}
|
||||
|
||||
@Path("applications/{appId}/{attemptId}/environment")
|
||||
def getEnvironment(
|
||||
@PathParam("appId") appId: String,
|
||||
@PathParam("attemptId") attemptId: String): ApplicationEnvironmentResource = {
|
||||
withSparkUI(appId, Some(attemptId)) { ui =>
|
||||
new ApplicationEnvironmentResource(ui)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private[spark] object ApiRootResource {
|
||||
|
|
|
@ -0,0 +1,45 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.spark.status.api.v1
|
||||
|
||||
import javax.ws.rs._
|
||||
import javax.ws.rs.core.MediaType
|
||||
|
||||
import org.apache.spark.ui.SparkUI
|
||||
|
||||
@Produces(Array(MediaType.APPLICATION_JSON))
|
||||
private[v1] class ApplicationEnvironmentResource(ui: SparkUI) {
|
||||
|
||||
@GET
|
||||
def getEnvironmentInfo(): ApplicationEnvironmentInfo = {
|
||||
val listener = ui.environmentListener
|
||||
listener.synchronized {
|
||||
val jvmInfo = Map(listener.jvmInformation: _*)
|
||||
val runtime = new RuntimeInfo(
|
||||
jvmInfo("Java Version"),
|
||||
jvmInfo("Java Home"),
|
||||
jvmInfo("Scala Version"))
|
||||
|
||||
new ApplicationEnvironmentInfo(
|
||||
runtime,
|
||||
listener.sparkProperties,
|
||||
listener.systemProperties,
|
||||
listener.classpathEntries)
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -252,3 +252,14 @@ class AccumulableInfo private[spark](
|
|||
|
||||
class VersionInfo private[spark](
|
||||
val spark: String)
|
||||
|
||||
class ApplicationEnvironmentInfo private[spark] (
|
||||
val runtime: RuntimeInfo,
|
||||
val sparkProperties: Seq[(String, String)],
|
||||
val systemProperties: Seq[(String, String)],
|
||||
val classpathEntries: Seq[(String, String)])
|
||||
|
||||
class RuntimeInfo private[spark](
|
||||
val javaVersion: String,
|
||||
val javaHome: String,
|
||||
val scalaVersion: String)
|
||||
|
|
Loading…
Reference in a new issue