[MINOR][CORE] Remove unused variables, unused imports, etc.

## What changes were proposed in this pull request?

- Remove unused variables.
- Remove unused imports.
- Change var to val in few places.

## How was this patch tested?

Unit tests.

Closes #24857 from imback82/unused_variable.

Authored-by: Terry Kim <yuminkim@gmail.com>
Signed-off-by: Sean Owen <sean.owen@databricks.com>
This commit is contained in:
Terry Kim 2019-06-15 09:42:22 -05:00 committed by Sean Owen
parent 26998b86c1
commit a950570f91
35 changed files with 19 additions and 95 deletions

View file

@ -567,7 +567,6 @@ private[spark] class ExecutorAllocationManager(
override def onTaskStart(taskStart: SparkListenerTaskStart): Unit = {
val stageId = taskStart.stageId
val taskId = taskStart.taskInfo.taskId
val taskIndex = taskStart.taskInfo.index
allocationManager.synchronized {
@ -589,7 +588,6 @@ private[spark] class ExecutorAllocationManager(
}
override def onTaskEnd(taskEnd: SparkListenerTaskEnd): Unit = {
val taskId = taskEnd.taskInfo.taskId
val taskIndex = taskEnd.taskInfo.index
val stageId = taskEnd.stageId
allocationManager.synchronized {

View file

@ -227,7 +227,7 @@ private[spark] class MapOutputTrackerMasterEndpoint(
case GetMapOutputStatuses(shuffleId: Int) =>
val hostPort = context.senderAddress.hostPort
logInfo("Asked to send map output locations for shuffle " + shuffleId + " to " + hostPort)
val mapOutputStatuses = tracker.post(new GetMapOutputMessage(shuffleId, context))
tracker.post(new GetMapOutputMessage(shuffleId, context))
case StopMapOutputTracker =>
logInfo("MapOutputTrackerMasterEndpoint stopped!")

View file

@ -18,7 +18,6 @@
package org.apache.spark
import java.io.File
import java.net.{Authenticator, PasswordAuthentication}
import java.nio.charset.StandardCharsets.UTF_8
import java.nio.file.Files
import java.util.Base64

View file

@ -19,7 +19,7 @@ package org.apache.spark.api.java
import java.{lang => jl}
import java.lang.{Iterable => JIterable}
import java.util.{Comparator, Iterator => JIterator, List => JList}
import java.util.{Comparator, List => JList}
import scala.collection.JavaConverters._
import scala.language.implicitConversions

View file

@ -17,7 +17,7 @@
package org.apache.spark.api.python
import java.io.{DataInputStream, DataOutputStream, EOFException, InputStream, OutputStreamWriter}
import java.io.{DataInputStream, DataOutputStream, EOFException, InputStream}
import java.net.{InetAddress, ServerSocket, Socket, SocketException}
import java.util.Arrays
import java.util.concurrent.TimeUnit

View file

@ -23,7 +23,6 @@ import java.nio.charset.StandardCharsets.UTF_8
import io.netty.channel.{Channel, ChannelHandlerContext, SimpleChannelInboundHandler}
import org.apache.spark.internal.Logging
import org.apache.spark.util.Utils
/**
* Authentication handler for connections from the R process.

View file

@ -102,7 +102,7 @@ private[spark] object SerDe {
def readBytes(in: DataInputStream): Array[Byte] = {
val len = readInt(in)
val out = new Array[Byte](len)
val bytesRead = in.readFully(out)
in.readFully(out)
out
}

View file

@ -18,7 +18,7 @@
package org.apache.spark.deploy
import java.io._
import java.lang.reflect.{InvocationTargetException, Modifier, UndeclaredThrowableException}
import java.lang.reflect.{InvocationTargetException, UndeclaredThrowableException}
import java.net.{URI, URL}
import java.security.PrivilegedExceptionAction
import java.text.ParseException
@ -26,7 +26,7 @@ import java.util.{ServiceLoader, UUID}
import scala.annotation.tailrec
import scala.collection.JavaConverters._
import scala.collection.mutable.{ArrayBuffer, HashMap, Map}
import scala.collection.mutable.ArrayBuffer
import scala.util.{Properties, Try}
import org.apache.commons.io.FilenameUtils
@ -317,7 +317,6 @@ private[spark] class SparkSubmit extends Logging {
val isKubernetesClient = clusterManager == KUBERNETES && deployMode == CLIENT
val isKubernetesClusterModeDriver = isKubernetesClient &&
sparkConf.getBoolean("spark.kubernetes.submitInDriver", false)
val isMesosClient = clusterManager == MESOS && deployMode == CLIENT
if (!isMesosCluster && !isStandAloneCluster) {
// Resolve maven dependencies if there are any and add classpath to jars. Add them to py-files

View file

@ -22,7 +22,7 @@ import java.io.File
import org.apache.commons.lang3.StringUtils
import org.apache.spark.{SecurityManager, SparkConf}
import org.apache.spark.deploy.{DependencyUtils, SparkHadoopUtil, SparkSubmit}
import org.apache.spark.deploy.{DependencyUtils, SparkHadoopUtil}
import org.apache.spark.internal.{config, Logging}
import org.apache.spark.rpc.RpcEnv
import org.apache.spark.util._

View file

@ -17,7 +17,6 @@
package org.apache.spark.executor
import java.io.{BufferedInputStream, FileInputStream}
import java.net.URL
import java.nio.ByteBuffer
import java.util.Locale
@ -27,11 +26,7 @@ import scala.collection.mutable
import scala.util.{Failure, Success}
import scala.util.control.NonFatal
import com.fasterxml.jackson.databind.exc.MismatchedInputException
import org.json4s.DefaultFormats
import org.json4s.JsonAST.JArray
import org.json4s.MappingException
import org.json4s.jackson.JsonMethods._
import org.apache.spark._
import org.apache.spark.TaskState.TaskState

View file

@ -151,7 +151,7 @@ private[memory] class ExecutionMemoryPool(
*/
def releaseMemory(numBytes: Long, taskAttemptId: Long): Unit = lock.synchronized {
val curMem = memoryForTask.getOrElse(taskAttemptId, 0L)
var memoryToFree = if (curMem < numBytes) {
val memoryToFree = if (curMem < numBytes) {
logWarning(
s"Internal error: release called on $numBytes bytes but task only has $curMem bytes " +
s"of memory from the $poolName pool")

View file

@ -206,7 +206,6 @@ private[spark] object ExecutorMetricType {
var numberOfMetrics = 0
val definedMetricsAndOffset = mutable.LinkedHashMap.empty[String, Int]
metricGetters.foreach { m =>
var metricInSet = 0
(0 until m.names.length).foreach { idx =>
definedMetricsAndOffset += (m.names(idx) -> (idx + numberOfMetrics))
}

View file

@ -17,7 +17,6 @@
package org.apache.spark.metrics.sink
import java.net.InetSocketAddress
import java.util.{Locale, Properties}
import java.util.concurrent.TimeUnit

View file

@ -364,7 +364,7 @@ private class DefaultPartitionCoalescer(val balanceSlack: Double = 0.10)
val partNoLocIter = partitionLocs.partsWithoutLocs.iterator
groupArr.filter(pg => pg.numPartitions == 0).foreach { pg =>
while (partNoLocIter.hasNext && pg.numPartitions == 0) {
var nxt_part = partNoLocIter.next()
val nxt_part = partNoLocIter.next()
if (!initialHash.contains(nxt_part)) {
pg.partitions += nxt_part
initialHash += nxt_part

View file

@ -32,7 +32,7 @@ import scala.collection.mutable.ArrayBuffer
import scala.io.Source
import scala.reflect.ClassTag
import org.apache.spark.{Partition, SparkEnv, TaskContext}
import org.apache.spark.{Partition, TaskContext}
import org.apache.spark.util.Utils
@ -109,7 +109,6 @@ private[spark] class PipedRDD[T: ClassTag](
}
val proc = pb.start()
val env = SparkEnv.get
val childThreadException = new AtomicReference[Throwable](null)
// Start a thread to print the process's stderr to ours

View file

@ -20,7 +20,7 @@ package org.apache.spark.rpc
import scala.concurrent.Future
import scala.reflect.ClassTag
import org.apache.spark.{SparkConf, SparkException}
import org.apache.spark.SparkConf
import org.apache.spark.internal.Logging
import org.apache.spark.util.RpcUtils

View file

@ -20,7 +20,6 @@ package org.apache.spark.scheduler
import java.io._
import java.net.URI
import java.nio.charset.StandardCharsets
import java.util.Locale
import scala.collection.mutable.{ArrayBuffer, Map}

View file

@ -24,12 +24,11 @@ import scala.collection.Map
import com.fasterxml.jackson.annotation.JsonTypeInfo
import org.apache.spark.{SparkConf, TaskEndReason}
import org.apache.spark.TaskEndReason
import org.apache.spark.annotation.DeveloperApi
import org.apache.spark.executor.{ExecutorMetrics, TaskMetrics}
import org.apache.spark.scheduler.cluster.ExecutorInfo
import org.apache.spark.storage.{BlockManagerId, BlockUpdatedInfo}
import org.apache.spark.ui.SparkUI
@DeveloperApi
@JsonTypeInfo(use = JsonTypeInfo.Id.CLASS, include = JsonTypeInfo.As.PROPERTY, property = "Event")

View file

@ -21,7 +21,6 @@ import java.util.concurrent.TimeUnit
import java.util.concurrent.atomic.{AtomicInteger, AtomicReference}
import javax.annotation.concurrent.GuardedBy
import scala.collection.mutable
import scala.collection.mutable.{HashMap, HashSet}
import scala.concurrent.Future

View file

@ -25,7 +25,7 @@ import scala.collection.mutable.HashMap
import org.apache.spark.{JobExecutionStatus, SparkConf}
import org.apache.spark.status.api.v1
import org.apache.spark.ui.scope._
import org.apache.spark.util.{Distribution, Utils}
import org.apache.spark.util.Utils
import org.apache.spark.util.kvstore.{InMemoryStore, KVStore}
/**

View file

@ -16,7 +16,7 @@
*/
package org.apache.spark.status.api.v1
import java.util.{Date, List => JList}
import java.util.{List => JList}
import javax.ws.rs.{DefaultValue, GET, Produces, QueryParam}
import javax.ws.rs.core.MediaType

View file

@ -19,7 +19,6 @@ package org.apache.spark.status.api.v1
import java.io.OutputStream
import java.lang.annotation.Annotation
import java.lang.reflect.Type
import java.nio.charset.StandardCharsets
import java.text.SimpleDateFormat
import java.util.{Calendar, Locale, SimpleTimeZone}
import javax.ws.rs.Produces

View file

@ -25,7 +25,6 @@ import javax.ws.rs.core.{MediaType, Response, StreamingOutput}
import scala.util.control.NonFatal
import org.apache.spark.{JobExecutionStatus, SparkContext}
import org.apache.spark.ui.UIUtils
@Produces(Array(MediaType.APPLICATION_JSON))
private[v1] class AbstractApplicationResource extends BaseAppResource {

View file

@ -20,11 +20,7 @@ import java.util.{HashMap, List => JList, Locale}
import javax.ws.rs._
import javax.ws.rs.core.{Context, MediaType, MultivaluedMap, UriInfo}
import org.apache.spark.SparkException
import org.apache.spark.scheduler.StageInfo
import org.apache.spark.status.api.v1.StageStatus._
import org.apache.spark.status.api.v1.TaskSorting._
import org.apache.spark.ui.{SparkUI, UIUtils}
import org.apache.spark.ui.UIUtils
import org.apache.spark.ui.jobs.ApiHelper._
import org.apache.spark.util.Utils

View file

@ -17,8 +17,7 @@
package org.apache.spark.storage
import java.io.{InputStream, IOException, SequenceInputStream}
import java.nio.ByteBuffer
import java.io.{InputStream, IOException}
import java.util.concurrent.{LinkedBlockingQueue, TimeUnit}
import javax.annotation.concurrent.GuardedBy

View file

@ -17,11 +17,9 @@
package org.apache.spark.ui
import java.util.{Date, List => JList, ServiceLoader}
import java.util.Date
import scala.collection.JavaConverters._
import org.apache.spark.{JobExecutionStatus, SecurityManager, SparkConf, SparkContext}
import org.apache.spark.{SecurityManager, SparkConf, SparkContext}
import org.apache.spark.internal.Logging
import org.apache.spark.internal.config.UI._
import org.apache.spark.scheduler._
@ -32,7 +30,6 @@ import org.apache.spark.ui.env.EnvironmentTab
import org.apache.spark.ui.exec.ExecutorsTab
import org.apache.spark.ui.jobs.{JobsTab, StagesTab}
import org.apache.spark.ui.storage.StorageTab
import org.apache.spark.util.Utils
/**
* Top level user interface for a Spark application.

View file

@ -26,7 +26,6 @@ import scala.xml.{Node, NodeSeq, Unparsed, Utility}
import org.apache.commons.lang3.StringEscapeUtils
import org.apache.spark.JobExecutionStatus
import org.apache.spark.scheduler._
import org.apache.spark.status.AppStatusStore
import org.apache.spark.status.api.v1
import org.apache.spark.ui._

View file

@ -19,8 +19,6 @@ package org.apache.spark.ui.jobs
import javax.servlet.http.HttpServletRequest
import scala.collection.JavaConverters._
import org.apache.spark.JobExecutionStatus
import org.apache.spark.internal.config.SCHEDULER_MODE
import org.apache.spark.scheduler.SchedulingMode

View file

@ -22,7 +22,6 @@ import javax.servlet.http.HttpServletRequest
import scala.xml.Node
import org.apache.spark.status.PoolData
import org.apache.spark.status.api.v1._
import org.apache.spark.ui.{UIUtils, WebUIPage}
/** Page showing specific pool details */

View file

@ -87,7 +87,6 @@ private[ui] class StagePage(parent: StagesTab, store: AppStatusStore) extends We
val parameterAttempt = request.getParameter("attempt")
require(parameterAttempt != null && parameterAttempt.nonEmpty, "Missing attempt parameter")
val parameterTaskPage = request.getParameter("task.page")
val parameterTaskSortColumn = request.getParameter("task.sort")
val parameterTaskSortDesc = request.getParameter("task.desc")
val parameterTaskPageSize = request.getParameter("task.pageSize")
@ -98,7 +97,6 @@ private[ui] class StagePage(parent: StagesTab, store: AppStatusStore) extends We
var eventTimelineTaskPageSize = Option(
eventTimelineParameterTaskPageSize).map(_.toInt).getOrElse(100)
val taskPage = Option(parameterTaskPage).map(_.toInt).getOrElse(1)
val taskSortColumn = Option(parameterTaskSortColumn).map { sortColumn =>
UIUtils.decodeURLParameter(sortColumn)
}.getOrElse("Index")
@ -131,13 +129,6 @@ private[ui] class StagePage(parent: StagesTab, store: AppStatusStore) extends We
return UIUtils.headerSparkPage(request, stageHeader, content, parent)
}
val storedTasks = store.taskCount(stageData.stageId, stageData.attemptId)
val numCompleted = stageData.numCompleteTasks
val totalTasksNumStr = if (totalTasks == storedTasks) {
s"$totalTasks"
} else {
s"$totalTasks, showing $storedTasks"
}
if (eventTimelineTaskPageSize < 1 || eventTimelineTaskPageSize > totalTasks) {
eventTimelineTaskPageSize = totalTasks
}
@ -210,19 +201,6 @@ private[ui] class StagePage(parent: StagesTab, store: AppStatusStore) extends We
val stageGraph = parent.store.asOption(parent.store.operationGraphForStage(stageId))
val dagViz = UIUtils.showDagVizForStage(stageId, stageGraph)
val accumulableHeaders: Seq[String] = Seq("Accumulable", "Value")
def accumulableRow(acc: AccumulableInfo): Seq[Node] = {
if (acc.name != null && acc.value != null) {
<tr><td>{acc.name}</td><td>{acc.value}</td></tr>
} else {
Nil
}
}
val accumulableTable = UIUtils.listingTable(
accumulableHeaders,
accumulableRow,
stageData.accumulatorUpdates.toSeq)
val currentTime = System.currentTimeMillis()
val taskTable = try {
val _taskTable = new TaskPagedTable(
@ -241,21 +219,6 @@ private[ui] class StagePage(parent: StagesTab, store: AppStatusStore) extends We
null
}
val jsForScrollingDownToTaskTable =
<script>
{Unparsed {
"""
|$(function() {
| if (/.*&task.sort=.*$/.test(location.search)) {
| var topOffset = $("#tasks-section").offset().top;
| $("html,body").animate({scrollTop: topOffset}, 200);
| }
|});
""".stripMargin
}
}
</script>
val content =
summary ++
dagViz ++ <div id="showAdditionalMetrics"></div> ++

View file

@ -412,8 +412,6 @@ private[ui] class StageDataSource(
}
private def stageRow(stageData: v1.StageData): StageTableRowData = {
val description = stageData.description.getOrElse("")
val formattedSubmissionTime = stageData.submissionTime match {
case Some(t) => UIUtils.formatDate(t)
case None => "Unknown"

View file

@ -23,7 +23,7 @@ import org.apache.spark.internal.config.SCHEDULER_MODE
import org.apache.spark.scheduler.SchedulingMode
import org.apache.spark.status.AppStatusStore
import org.apache.spark.status.api.v1.StageStatus
import org.apache.spark.ui.{SparkUI, SparkUITab, UIUtils}
import org.apache.spark.ui.{SparkUI, SparkUITab}
/** Web UI showing progress status of all stages in the given SparkContext. */
private[ui] class StagesTab(val parent: SparkUI, val store: AppStatusStore)

View file

@ -19,8 +19,6 @@ package org.apache.spark.util
import java.io.PrintStream
import org.apache.spark.SparkException
/**
* Contains basic command line parsing functionality and methods to parse some common Spark CLI
* options.

View file

@ -19,7 +19,6 @@ package org.apache.spark.util.collection
import java.util.Comparator
import org.apache.spark.unsafe.Platform
import org.apache.spark.unsafe.array.ByteArrayMethods
import org.apache.spark.util.collection.WritablePartitionedPairCollection._

View file

@ -18,10 +18,6 @@ package org.apache.spark.util.io
import java.nio.channels.WritableByteChannel
import io.netty.channel.FileRegion
import io.netty.util.AbstractReferenceCounted
import org.apache.spark.internal.Logging
import org.apache.spark.network.util.AbstractFileRegion