From a950570f91db56cbae488c82def49cd0da16e996 Mon Sep 17 00:00:00 2001 From: Terry Kim Date: Sat, 15 Jun 2019 09:42:22 -0500 Subject: [PATCH] [MINOR][CORE] Remove unused variables, unused imports, etc. ## What changes were proposed in this pull request? - Remove unused variables. - Remove unused imports. - Change var to val in few places. ## How was this patch tested? Unit tests. Closes #24857 from imback82/unused_variable. Authored-by: Terry Kim Signed-off-by: Sean Owen --- .../spark/ExecutorAllocationManager.scala | 2 - .../org/apache/spark/MapOutputTracker.scala | 2 +- .../org/apache/spark/SecurityManager.scala | 1 - .../apache/spark/api/java/JavaPairRDD.scala | 2 +- .../api/python/PythonWorkerFactory.scala | 2 +- .../spark/api/r/RBackendAuthHandler.scala | 1 - .../scala/org/apache/spark/api/r/SerDe.scala | 2 +- .../org/apache/spark/deploy/SparkSubmit.scala | 5 +-- .../spark/deploy/worker/DriverWrapper.scala | 2 +- .../CoarseGrainedExecutorBackend.scala | 5 --- .../spark/memory/ExecutionMemoryPool.scala | 2 +- .../spark/metrics/ExecutorMetricType.scala | 1 - .../spark/metrics/sink/GraphiteSink.scala | 1 - .../org/apache/spark/rdd/CoalescedRDD.scala | 2 +- .../scala/org/apache/spark/rdd/PipedRDD.scala | 3 +- .../org/apache/spark/rpc/RpcEndpointRef.scala | 2 +- .../scheduler/EventLoggingListener.scala | 1 - .../spark/scheduler/SparkListener.scala | 3 +- .../CoarseGrainedSchedulerBackend.scala | 1 - .../apache/spark/status/AppStatusStore.scala | 2 +- .../api/v1/ApplicationListResource.scala | 2 +- .../status/api/v1/JacksonMessageWriter.scala | 1 - .../api/v1/OneApplicationResource.scala | 1 - .../spark/status/api/v1/StagesResource.scala | 6 +-- .../storage/ShuffleBlockFetcherIterator.scala | 3 +- .../scala/org/apache/spark/ui/SparkUI.scala | 7 +--- .../org/apache/spark/ui/jobs/JobPage.scala | 1 - .../org/apache/spark/ui/jobs/JobsTab.scala | 2 - .../org/apache/spark/ui/jobs/PoolPage.scala | 1 - .../org/apache/spark/ui/jobs/StagePage.scala | 37 ------------------- .../org/apache/spark/ui/jobs/StageTable.scala | 2 - .../org/apache/spark/ui/jobs/StagesTab.scala | 2 +- .../apache/spark/util/CommandLineUtils.scala | 2 - .../collection/PartitionedPairBuffer.scala | 1 - .../util/io/ChunkedByteBufferFileRegion.scala | 4 -- 35 files changed, 19 insertions(+), 95 deletions(-) diff --git a/core/src/main/scala/org/apache/spark/ExecutorAllocationManager.scala b/core/src/main/scala/org/apache/spark/ExecutorAllocationManager.scala index 63df7ccf46..bb95feaecc 100644 --- a/core/src/main/scala/org/apache/spark/ExecutorAllocationManager.scala +++ b/core/src/main/scala/org/apache/spark/ExecutorAllocationManager.scala @@ -567,7 +567,6 @@ private[spark] class ExecutorAllocationManager( override def onTaskStart(taskStart: SparkListenerTaskStart): Unit = { val stageId = taskStart.stageId - val taskId = taskStart.taskInfo.taskId val taskIndex = taskStart.taskInfo.index allocationManager.synchronized { @@ -589,7 +588,6 @@ private[spark] class ExecutorAllocationManager( } override def onTaskEnd(taskEnd: SparkListenerTaskEnd): Unit = { - val taskId = taskEnd.taskInfo.taskId val taskIndex = taskEnd.taskInfo.index val stageId = taskEnd.stageId allocationManager.synchronized { diff --git a/core/src/main/scala/org/apache/spark/MapOutputTracker.scala b/core/src/main/scala/org/apache/spark/MapOutputTracker.scala index c6271d2519..5c820f5a2e 100644 --- a/core/src/main/scala/org/apache/spark/MapOutputTracker.scala +++ b/core/src/main/scala/org/apache/spark/MapOutputTracker.scala @@ -227,7 +227,7 @@ private[spark] class MapOutputTrackerMasterEndpoint( case GetMapOutputStatuses(shuffleId: Int) => val hostPort = context.senderAddress.hostPort logInfo("Asked to send map output locations for shuffle " + shuffleId + " to " + hostPort) - val mapOutputStatuses = tracker.post(new GetMapOutputMessage(shuffleId, context)) + tracker.post(new GetMapOutputMessage(shuffleId, context)) case StopMapOutputTracker => logInfo("MapOutputTrackerMasterEndpoint stopped!") diff --git a/core/src/main/scala/org/apache/spark/SecurityManager.scala b/core/src/main/scala/org/apache/spark/SecurityManager.scala index 5a5c5a403f..77db0f5d0e 100644 --- a/core/src/main/scala/org/apache/spark/SecurityManager.scala +++ b/core/src/main/scala/org/apache/spark/SecurityManager.scala @@ -18,7 +18,6 @@ package org.apache.spark import java.io.File -import java.net.{Authenticator, PasswordAuthentication} import java.nio.charset.StandardCharsets.UTF_8 import java.nio.file.Files import java.util.Base64 diff --git a/core/src/main/scala/org/apache/spark/api/java/JavaPairRDD.scala b/core/src/main/scala/org/apache/spark/api/java/JavaPairRDD.scala index 50ed8d9bd3..317f3c51d0 100644 --- a/core/src/main/scala/org/apache/spark/api/java/JavaPairRDD.scala +++ b/core/src/main/scala/org/apache/spark/api/java/JavaPairRDD.scala @@ -19,7 +19,7 @@ package org.apache.spark.api.java import java.{lang => jl} import java.lang.{Iterable => JIterable} -import java.util.{Comparator, Iterator => JIterator, List => JList} +import java.util.{Comparator, List => JList} import scala.collection.JavaConverters._ import scala.language.implicitConversions diff --git a/core/src/main/scala/org/apache/spark/api/python/PythonWorkerFactory.scala b/core/src/main/scala/org/apache/spark/api/python/PythonWorkerFactory.scala index 97aab7415c..6c37844a08 100644 --- a/core/src/main/scala/org/apache/spark/api/python/PythonWorkerFactory.scala +++ b/core/src/main/scala/org/apache/spark/api/python/PythonWorkerFactory.scala @@ -17,7 +17,7 @@ package org.apache.spark.api.python -import java.io.{DataInputStream, DataOutputStream, EOFException, InputStream, OutputStreamWriter} +import java.io.{DataInputStream, DataOutputStream, EOFException, InputStream} import java.net.{InetAddress, ServerSocket, Socket, SocketException} import java.util.Arrays import java.util.concurrent.TimeUnit diff --git a/core/src/main/scala/org/apache/spark/api/r/RBackendAuthHandler.scala b/core/src/main/scala/org/apache/spark/api/r/RBackendAuthHandler.scala index 4162e4a6c7..8cd95ee653 100644 --- a/core/src/main/scala/org/apache/spark/api/r/RBackendAuthHandler.scala +++ b/core/src/main/scala/org/apache/spark/api/r/RBackendAuthHandler.scala @@ -23,7 +23,6 @@ import java.nio.charset.StandardCharsets.UTF_8 import io.netty.channel.{Channel, ChannelHandlerContext, SimpleChannelInboundHandler} import org.apache.spark.internal.Logging -import org.apache.spark.util.Utils /** * Authentication handler for connections from the R process. diff --git a/core/src/main/scala/org/apache/spark/api/r/SerDe.scala b/core/src/main/scala/org/apache/spark/api/r/SerDe.scala index 6e0a3f6398..9172038314 100644 --- a/core/src/main/scala/org/apache/spark/api/r/SerDe.scala +++ b/core/src/main/scala/org/apache/spark/api/r/SerDe.scala @@ -102,7 +102,7 @@ private[spark] object SerDe { def readBytes(in: DataInputStream): Array[Byte] = { val len = readInt(in) val out = new Array[Byte](len) - val bytesRead = in.readFully(out) + in.readFully(out) out } diff --git a/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala b/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala index 926e2dfd34..12a8473b22 100644 --- a/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala +++ b/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala @@ -18,7 +18,7 @@ package org.apache.spark.deploy import java.io._ -import java.lang.reflect.{InvocationTargetException, Modifier, UndeclaredThrowableException} +import java.lang.reflect.{InvocationTargetException, UndeclaredThrowableException} import java.net.{URI, URL} import java.security.PrivilegedExceptionAction import java.text.ParseException @@ -26,7 +26,7 @@ import java.util.{ServiceLoader, UUID} import scala.annotation.tailrec import scala.collection.JavaConverters._ -import scala.collection.mutable.{ArrayBuffer, HashMap, Map} +import scala.collection.mutable.ArrayBuffer import scala.util.{Properties, Try} import org.apache.commons.io.FilenameUtils @@ -317,7 +317,6 @@ private[spark] class SparkSubmit extends Logging { val isKubernetesClient = clusterManager == KUBERNETES && deployMode == CLIENT val isKubernetesClusterModeDriver = isKubernetesClient && sparkConf.getBoolean("spark.kubernetes.submitInDriver", false) - val isMesosClient = clusterManager == MESOS && deployMode == CLIENT if (!isMesosCluster && !isStandAloneCluster) { // Resolve maven dependencies if there are any and add classpath to jars. Add them to py-files diff --git a/core/src/main/scala/org/apache/spark/deploy/worker/DriverWrapper.scala b/core/src/main/scala/org/apache/spark/deploy/worker/DriverWrapper.scala index 9b51beadb4..56356f5f27 100644 --- a/core/src/main/scala/org/apache/spark/deploy/worker/DriverWrapper.scala +++ b/core/src/main/scala/org/apache/spark/deploy/worker/DriverWrapper.scala @@ -22,7 +22,7 @@ import java.io.File import org.apache.commons.lang3.StringUtils import org.apache.spark.{SecurityManager, SparkConf} -import org.apache.spark.deploy.{DependencyUtils, SparkHadoopUtil, SparkSubmit} +import org.apache.spark.deploy.{DependencyUtils, SparkHadoopUtil} import org.apache.spark.internal.{config, Logging} import org.apache.spark.rpc.RpcEnv import org.apache.spark.util._ diff --git a/core/src/main/scala/org/apache/spark/executor/CoarseGrainedExecutorBackend.scala b/core/src/main/scala/org/apache/spark/executor/CoarseGrainedExecutorBackend.scala index bfb7976cfd..9b5f9f5220 100644 --- a/core/src/main/scala/org/apache/spark/executor/CoarseGrainedExecutorBackend.scala +++ b/core/src/main/scala/org/apache/spark/executor/CoarseGrainedExecutorBackend.scala @@ -17,7 +17,6 @@ package org.apache.spark.executor -import java.io.{BufferedInputStream, FileInputStream} import java.net.URL import java.nio.ByteBuffer import java.util.Locale @@ -27,11 +26,7 @@ import scala.collection.mutable import scala.util.{Failure, Success} import scala.util.control.NonFatal -import com.fasterxml.jackson.databind.exc.MismatchedInputException import org.json4s.DefaultFormats -import org.json4s.JsonAST.JArray -import org.json4s.MappingException -import org.json4s.jackson.JsonMethods._ import org.apache.spark._ import org.apache.spark.TaskState.TaskState diff --git a/core/src/main/scala/org/apache/spark/memory/ExecutionMemoryPool.scala b/core/src/main/scala/org/apache/spark/memory/ExecutionMemoryPool.scala index f1915857ea..50055dcd29 100644 --- a/core/src/main/scala/org/apache/spark/memory/ExecutionMemoryPool.scala +++ b/core/src/main/scala/org/apache/spark/memory/ExecutionMemoryPool.scala @@ -151,7 +151,7 @@ private[memory] class ExecutionMemoryPool( */ def releaseMemory(numBytes: Long, taskAttemptId: Long): Unit = lock.synchronized { val curMem = memoryForTask.getOrElse(taskAttemptId, 0L) - var memoryToFree = if (curMem < numBytes) { + val memoryToFree = if (curMem < numBytes) { logWarning( s"Internal error: release called on $numBytes bytes but task only has $curMem bytes " + s"of memory from the $poolName pool") diff --git a/core/src/main/scala/org/apache/spark/metrics/ExecutorMetricType.scala b/core/src/main/scala/org/apache/spark/metrics/ExecutorMetricType.scala index 6d8e9a8af7..b830f9f402 100644 --- a/core/src/main/scala/org/apache/spark/metrics/ExecutorMetricType.scala +++ b/core/src/main/scala/org/apache/spark/metrics/ExecutorMetricType.scala @@ -206,7 +206,6 @@ private[spark] object ExecutorMetricType { var numberOfMetrics = 0 val definedMetricsAndOffset = mutable.LinkedHashMap.empty[String, Int] metricGetters.foreach { m => - var metricInSet = 0 (0 until m.names.length).foreach { idx => definedMetricsAndOffset += (m.names(idx) -> (idx + numberOfMetrics)) } diff --git a/core/src/main/scala/org/apache/spark/metrics/sink/GraphiteSink.scala b/core/src/main/scala/org/apache/spark/metrics/sink/GraphiteSink.scala index ac33e68abb..21b4dfb26d 100644 --- a/core/src/main/scala/org/apache/spark/metrics/sink/GraphiteSink.scala +++ b/core/src/main/scala/org/apache/spark/metrics/sink/GraphiteSink.scala @@ -17,7 +17,6 @@ package org.apache.spark.metrics.sink -import java.net.InetSocketAddress import java.util.{Locale, Properties} import java.util.concurrent.TimeUnit diff --git a/core/src/main/scala/org/apache/spark/rdd/CoalescedRDD.scala b/core/src/main/scala/org/apache/spark/rdd/CoalescedRDD.scala index 836d3e2312..55c141c2b8 100644 --- a/core/src/main/scala/org/apache/spark/rdd/CoalescedRDD.scala +++ b/core/src/main/scala/org/apache/spark/rdd/CoalescedRDD.scala @@ -364,7 +364,7 @@ private class DefaultPartitionCoalescer(val balanceSlack: Double = 0.10) val partNoLocIter = partitionLocs.partsWithoutLocs.iterator groupArr.filter(pg => pg.numPartitions == 0).foreach { pg => while (partNoLocIter.hasNext && pg.numPartitions == 0) { - var nxt_part = partNoLocIter.next() + val nxt_part = partNoLocIter.next() if (!initialHash.contains(nxt_part)) { pg.partitions += nxt_part initialHash += nxt_part diff --git a/core/src/main/scala/org/apache/spark/rdd/PipedRDD.scala b/core/src/main/scala/org/apache/spark/rdd/PipedRDD.scala index f1daf62ad4..3b11e82dab 100644 --- a/core/src/main/scala/org/apache/spark/rdd/PipedRDD.scala +++ b/core/src/main/scala/org/apache/spark/rdd/PipedRDD.scala @@ -32,7 +32,7 @@ import scala.collection.mutable.ArrayBuffer import scala.io.Source import scala.reflect.ClassTag -import org.apache.spark.{Partition, SparkEnv, TaskContext} +import org.apache.spark.{Partition, TaskContext} import org.apache.spark.util.Utils @@ -109,7 +109,6 @@ private[spark] class PipedRDD[T: ClassTag]( } val proc = pb.start() - val env = SparkEnv.get val childThreadException = new AtomicReference[Throwable](null) // Start a thread to print the process's stderr to ours diff --git a/core/src/main/scala/org/apache/spark/rpc/RpcEndpointRef.scala b/core/src/main/scala/org/apache/spark/rpc/RpcEndpointRef.scala index 4d39f144dd..6c4c0383b3 100644 --- a/core/src/main/scala/org/apache/spark/rpc/RpcEndpointRef.scala +++ b/core/src/main/scala/org/apache/spark/rpc/RpcEndpointRef.scala @@ -20,7 +20,7 @@ package org.apache.spark.rpc import scala.concurrent.Future import scala.reflect.ClassTag -import org.apache.spark.{SparkConf, SparkException} +import org.apache.spark.SparkConf import org.apache.spark.internal.Logging import org.apache.spark.util.RpcUtils diff --git a/core/src/main/scala/org/apache/spark/scheduler/EventLoggingListener.scala b/core/src/main/scala/org/apache/spark/scheduler/EventLoggingListener.scala index 069a91f1a8..5ce09c4870 100644 --- a/core/src/main/scala/org/apache/spark/scheduler/EventLoggingListener.scala +++ b/core/src/main/scala/org/apache/spark/scheduler/EventLoggingListener.scala @@ -20,7 +20,6 @@ package org.apache.spark.scheduler import java.io._ import java.net.URI import java.nio.charset.StandardCharsets -import java.util.Locale import scala.collection.mutable.{ArrayBuffer, Map} diff --git a/core/src/main/scala/org/apache/spark/scheduler/SparkListener.scala b/core/src/main/scala/org/apache/spark/scheduler/SparkListener.scala index b332c0f46f..1acfd909da 100644 --- a/core/src/main/scala/org/apache/spark/scheduler/SparkListener.scala +++ b/core/src/main/scala/org/apache/spark/scheduler/SparkListener.scala @@ -24,12 +24,11 @@ import scala.collection.Map import com.fasterxml.jackson.annotation.JsonTypeInfo -import org.apache.spark.{SparkConf, TaskEndReason} +import org.apache.spark.TaskEndReason import org.apache.spark.annotation.DeveloperApi import org.apache.spark.executor.{ExecutorMetrics, TaskMetrics} import org.apache.spark.scheduler.cluster.ExecutorInfo import org.apache.spark.storage.{BlockManagerId, BlockUpdatedInfo} -import org.apache.spark.ui.SparkUI @DeveloperApi @JsonTypeInfo(use = JsonTypeInfo.Id.CLASS, include = JsonTypeInfo.As.PROPERTY, property = "Event") diff --git a/core/src/main/scala/org/apache/spark/scheduler/cluster/CoarseGrainedSchedulerBackend.scala b/core/src/main/scala/org/apache/spark/scheduler/cluster/CoarseGrainedSchedulerBackend.scala index 5f8c2774c5..d81070c362 100644 --- a/core/src/main/scala/org/apache/spark/scheduler/cluster/CoarseGrainedSchedulerBackend.scala +++ b/core/src/main/scala/org/apache/spark/scheduler/cluster/CoarseGrainedSchedulerBackend.scala @@ -21,7 +21,6 @@ import java.util.concurrent.TimeUnit import java.util.concurrent.atomic.{AtomicInteger, AtomicReference} import javax.annotation.concurrent.GuardedBy -import scala.collection.mutable import scala.collection.mutable.{HashMap, HashSet} import scala.concurrent.Future diff --git a/core/src/main/scala/org/apache/spark/status/AppStatusStore.scala b/core/src/main/scala/org/apache/spark/status/AppStatusStore.scala index 62402564c2..6a9677834d 100644 --- a/core/src/main/scala/org/apache/spark/status/AppStatusStore.scala +++ b/core/src/main/scala/org/apache/spark/status/AppStatusStore.scala @@ -25,7 +25,7 @@ import scala.collection.mutable.HashMap import org.apache.spark.{JobExecutionStatus, SparkConf} import org.apache.spark.status.api.v1 import org.apache.spark.ui.scope._ -import org.apache.spark.util.{Distribution, Utils} +import org.apache.spark.util.Utils import org.apache.spark.util.kvstore.{InMemoryStore, KVStore} /** diff --git a/core/src/main/scala/org/apache/spark/status/api/v1/ApplicationListResource.scala b/core/src/main/scala/org/apache/spark/status/api/v1/ApplicationListResource.scala index 69054f2b77..197cf64ebd 100644 --- a/core/src/main/scala/org/apache/spark/status/api/v1/ApplicationListResource.scala +++ b/core/src/main/scala/org/apache/spark/status/api/v1/ApplicationListResource.scala @@ -16,7 +16,7 @@ */ package org.apache.spark.status.api.v1 -import java.util.{Date, List => JList} +import java.util.{List => JList} import javax.ws.rs.{DefaultValue, GET, Produces, QueryParam} import javax.ws.rs.core.MediaType diff --git a/core/src/main/scala/org/apache/spark/status/api/v1/JacksonMessageWriter.scala b/core/src/main/scala/org/apache/spark/status/api/v1/JacksonMessageWriter.scala index 50a286d0d3..846e4f9cc9 100644 --- a/core/src/main/scala/org/apache/spark/status/api/v1/JacksonMessageWriter.scala +++ b/core/src/main/scala/org/apache/spark/status/api/v1/JacksonMessageWriter.scala @@ -19,7 +19,6 @@ package org.apache.spark.status.api.v1 import java.io.OutputStream import java.lang.annotation.Annotation import java.lang.reflect.Type -import java.nio.charset.StandardCharsets import java.text.SimpleDateFormat import java.util.{Calendar, Locale, SimpleTimeZone} import javax.ws.rs.Produces diff --git a/core/src/main/scala/org/apache/spark/status/api/v1/OneApplicationResource.scala b/core/src/main/scala/org/apache/spark/status/api/v1/OneApplicationResource.scala index 1f4082cac8..2ee9d3d081 100644 --- a/core/src/main/scala/org/apache/spark/status/api/v1/OneApplicationResource.scala +++ b/core/src/main/scala/org/apache/spark/status/api/v1/OneApplicationResource.scala @@ -25,7 +25,6 @@ import javax.ws.rs.core.{MediaType, Response, StreamingOutput} import scala.util.control.NonFatal import org.apache.spark.{JobExecutionStatus, SparkContext} -import org.apache.spark.ui.UIUtils @Produces(Array(MediaType.APPLICATION_JSON)) private[v1] class AbstractApplicationResource extends BaseAppResource { diff --git a/core/src/main/scala/org/apache/spark/status/api/v1/StagesResource.scala b/core/src/main/scala/org/apache/spark/status/api/v1/StagesResource.scala index db53a400ed..44ee322a22 100644 --- a/core/src/main/scala/org/apache/spark/status/api/v1/StagesResource.scala +++ b/core/src/main/scala/org/apache/spark/status/api/v1/StagesResource.scala @@ -20,11 +20,7 @@ import java.util.{HashMap, List => JList, Locale} import javax.ws.rs._ import javax.ws.rs.core.{Context, MediaType, MultivaluedMap, UriInfo} -import org.apache.spark.SparkException -import org.apache.spark.scheduler.StageInfo -import org.apache.spark.status.api.v1.StageStatus._ -import org.apache.spark.status.api.v1.TaskSorting._ -import org.apache.spark.ui.{SparkUI, UIUtils} +import org.apache.spark.ui.UIUtils import org.apache.spark.ui.jobs.ApiHelper._ import org.apache.spark.util.Utils diff --git a/core/src/main/scala/org/apache/spark/storage/ShuffleBlockFetcherIterator.scala b/core/src/main/scala/org/apache/spark/storage/ShuffleBlockFetcherIterator.scala index c89d5cc971..a4d91a71f3 100644 --- a/core/src/main/scala/org/apache/spark/storage/ShuffleBlockFetcherIterator.scala +++ b/core/src/main/scala/org/apache/spark/storage/ShuffleBlockFetcherIterator.scala @@ -17,8 +17,7 @@ package org.apache.spark.storage -import java.io.{InputStream, IOException, SequenceInputStream} -import java.nio.ByteBuffer +import java.io.{InputStream, IOException} import java.util.concurrent.{LinkedBlockingQueue, TimeUnit} import javax.annotation.concurrent.GuardedBy diff --git a/core/src/main/scala/org/apache/spark/ui/SparkUI.scala b/core/src/main/scala/org/apache/spark/ui/SparkUI.scala index 7378801f7b..1175bc25de 100644 --- a/core/src/main/scala/org/apache/spark/ui/SparkUI.scala +++ b/core/src/main/scala/org/apache/spark/ui/SparkUI.scala @@ -17,11 +17,9 @@ package org.apache.spark.ui -import java.util.{Date, List => JList, ServiceLoader} +import java.util.Date -import scala.collection.JavaConverters._ - -import org.apache.spark.{JobExecutionStatus, SecurityManager, SparkConf, SparkContext} +import org.apache.spark.{SecurityManager, SparkConf, SparkContext} import org.apache.spark.internal.Logging import org.apache.spark.internal.config.UI._ import org.apache.spark.scheduler._ @@ -32,7 +30,6 @@ import org.apache.spark.ui.env.EnvironmentTab import org.apache.spark.ui.exec.ExecutorsTab import org.apache.spark.ui.jobs.{JobsTab, StagesTab} import org.apache.spark.ui.storage.StorageTab -import org.apache.spark.util.Utils /** * Top level user interface for a Spark application. diff --git a/core/src/main/scala/org/apache/spark/ui/jobs/JobPage.scala b/core/src/main/scala/org/apache/spark/ui/jobs/JobPage.scala index 6e373cfdde..02d5365fb3 100644 --- a/core/src/main/scala/org/apache/spark/ui/jobs/JobPage.scala +++ b/core/src/main/scala/org/apache/spark/ui/jobs/JobPage.scala @@ -26,7 +26,6 @@ import scala.xml.{Node, NodeSeq, Unparsed, Utility} import org.apache.commons.lang3.StringEscapeUtils import org.apache.spark.JobExecutionStatus -import org.apache.spark.scheduler._ import org.apache.spark.status.AppStatusStore import org.apache.spark.status.api.v1 import org.apache.spark.ui._ diff --git a/core/src/main/scala/org/apache/spark/ui/jobs/JobsTab.scala b/core/src/main/scala/org/apache/spark/ui/jobs/JobsTab.scala index 1c1915e7e2..c2644a8eea 100644 --- a/core/src/main/scala/org/apache/spark/ui/jobs/JobsTab.scala +++ b/core/src/main/scala/org/apache/spark/ui/jobs/JobsTab.scala @@ -19,8 +19,6 @@ package org.apache.spark.ui.jobs import javax.servlet.http.HttpServletRequest -import scala.collection.JavaConverters._ - import org.apache.spark.JobExecutionStatus import org.apache.spark.internal.config.SCHEDULER_MODE import org.apache.spark.scheduler.SchedulingMode diff --git a/core/src/main/scala/org/apache/spark/ui/jobs/PoolPage.scala b/core/src/main/scala/org/apache/spark/ui/jobs/PoolPage.scala index 6d2710385d..e10385cbca 100644 --- a/core/src/main/scala/org/apache/spark/ui/jobs/PoolPage.scala +++ b/core/src/main/scala/org/apache/spark/ui/jobs/PoolPage.scala @@ -22,7 +22,6 @@ import javax.servlet.http.HttpServletRequest import scala.xml.Node import org.apache.spark.status.PoolData -import org.apache.spark.status.api.v1._ import org.apache.spark.ui.{UIUtils, WebUIPage} /** Page showing specific pool details */ diff --git a/core/src/main/scala/org/apache/spark/ui/jobs/StagePage.scala b/core/src/main/scala/org/apache/spark/ui/jobs/StagePage.scala index e399f7ee19..838fc82d2e 100644 --- a/core/src/main/scala/org/apache/spark/ui/jobs/StagePage.scala +++ b/core/src/main/scala/org/apache/spark/ui/jobs/StagePage.scala @@ -87,7 +87,6 @@ private[ui] class StagePage(parent: StagesTab, store: AppStatusStore) extends We val parameterAttempt = request.getParameter("attempt") require(parameterAttempt != null && parameterAttempt.nonEmpty, "Missing attempt parameter") - val parameterTaskPage = request.getParameter("task.page") val parameterTaskSortColumn = request.getParameter("task.sort") val parameterTaskSortDesc = request.getParameter("task.desc") val parameterTaskPageSize = request.getParameter("task.pageSize") @@ -98,7 +97,6 @@ private[ui] class StagePage(parent: StagesTab, store: AppStatusStore) extends We var eventTimelineTaskPageSize = Option( eventTimelineParameterTaskPageSize).map(_.toInt).getOrElse(100) - val taskPage = Option(parameterTaskPage).map(_.toInt).getOrElse(1) val taskSortColumn = Option(parameterTaskSortColumn).map { sortColumn => UIUtils.decodeURLParameter(sortColumn) }.getOrElse("Index") @@ -131,13 +129,6 @@ private[ui] class StagePage(parent: StagesTab, store: AppStatusStore) extends We return UIUtils.headerSparkPage(request, stageHeader, content, parent) } - val storedTasks = store.taskCount(stageData.stageId, stageData.attemptId) - val numCompleted = stageData.numCompleteTasks - val totalTasksNumStr = if (totalTasks == storedTasks) { - s"$totalTasks" - } else { - s"$totalTasks, showing $storedTasks" - } if (eventTimelineTaskPageSize < 1 || eventTimelineTaskPageSize > totalTasks) { eventTimelineTaskPageSize = totalTasks } @@ -210,19 +201,6 @@ private[ui] class StagePage(parent: StagesTab, store: AppStatusStore) extends We val stageGraph = parent.store.asOption(parent.store.operationGraphForStage(stageId)) val dagViz = UIUtils.showDagVizForStage(stageId, stageGraph) - val accumulableHeaders: Seq[String] = Seq("Accumulable", "Value") - def accumulableRow(acc: AccumulableInfo): Seq[Node] = { - if (acc.name != null && acc.value != null) { - {acc.name}{acc.value} - } else { - Nil - } - } - val accumulableTable = UIUtils.listingTable( - accumulableHeaders, - accumulableRow, - stageData.accumulatorUpdates.toSeq) - val currentTime = System.currentTimeMillis() val taskTable = try { val _taskTable = new TaskPagedTable( @@ -241,21 +219,6 @@ private[ui] class StagePage(parent: StagesTab, store: AppStatusStore) extends We null } - val jsForScrollingDownToTaskTable = - - val content = summary ++ dagViz ++
++ diff --git a/core/src/main/scala/org/apache/spark/ui/jobs/StageTable.scala b/core/src/main/scala/org/apache/spark/ui/jobs/StageTable.scala index 330b6422a1..72cedb1e60 100644 --- a/core/src/main/scala/org/apache/spark/ui/jobs/StageTable.scala +++ b/core/src/main/scala/org/apache/spark/ui/jobs/StageTable.scala @@ -412,8 +412,6 @@ private[ui] class StageDataSource( } private def stageRow(stageData: v1.StageData): StageTableRowData = { - val description = stageData.description.getOrElse("") - val formattedSubmissionTime = stageData.submissionTime match { case Some(t) => UIUtils.formatDate(t) case None => "Unknown" diff --git a/core/src/main/scala/org/apache/spark/ui/jobs/StagesTab.scala b/core/src/main/scala/org/apache/spark/ui/jobs/StagesTab.scala index b74f3dbe88..2d222b842b 100644 --- a/core/src/main/scala/org/apache/spark/ui/jobs/StagesTab.scala +++ b/core/src/main/scala/org/apache/spark/ui/jobs/StagesTab.scala @@ -23,7 +23,7 @@ import org.apache.spark.internal.config.SCHEDULER_MODE import org.apache.spark.scheduler.SchedulingMode import org.apache.spark.status.AppStatusStore import org.apache.spark.status.api.v1.StageStatus -import org.apache.spark.ui.{SparkUI, SparkUITab, UIUtils} +import org.apache.spark.ui.{SparkUI, SparkUITab} /** Web UI showing progress status of all stages in the given SparkContext. */ private[ui] class StagesTab(val parent: SparkUI, val store: AppStatusStore) diff --git a/core/src/main/scala/org/apache/spark/util/CommandLineUtils.scala b/core/src/main/scala/org/apache/spark/util/CommandLineUtils.scala index add1146c90..651993b057 100644 --- a/core/src/main/scala/org/apache/spark/util/CommandLineUtils.scala +++ b/core/src/main/scala/org/apache/spark/util/CommandLineUtils.scala @@ -19,8 +19,6 @@ package org.apache.spark.util import java.io.PrintStream -import org.apache.spark.SparkException - /** * Contains basic command line parsing functionality and methods to parse some common Spark CLI * options. diff --git a/core/src/main/scala/org/apache/spark/util/collection/PartitionedPairBuffer.scala b/core/src/main/scala/org/apache/spark/util/collection/PartitionedPairBuffer.scala index e17a9de97e..652d8c001e 100644 --- a/core/src/main/scala/org/apache/spark/util/collection/PartitionedPairBuffer.scala +++ b/core/src/main/scala/org/apache/spark/util/collection/PartitionedPairBuffer.scala @@ -19,7 +19,6 @@ package org.apache.spark.util.collection import java.util.Comparator -import org.apache.spark.unsafe.Platform import org.apache.spark.unsafe.array.ByteArrayMethods import org.apache.spark.util.collection.WritablePartitionedPairCollection._ diff --git a/core/src/main/scala/org/apache/spark/util/io/ChunkedByteBufferFileRegion.scala b/core/src/main/scala/org/apache/spark/util/io/ChunkedByteBufferFileRegion.scala index 9622d0ac05..23fc0f88f0 100644 --- a/core/src/main/scala/org/apache/spark/util/io/ChunkedByteBufferFileRegion.scala +++ b/core/src/main/scala/org/apache/spark/util/io/ChunkedByteBufferFileRegion.scala @@ -18,10 +18,6 @@ package org.apache.spark.util.io import java.nio.channels.WritableByteChannel -import io.netty.channel.FileRegion -import io.netty.util.AbstractReferenceCounted - -import org.apache.spark.internal.Logging import org.apache.spark.network.util.AbstractFileRegion