[SPARK-17308] Improved the spark core code by replacing all pattern match on boolean value by if/else block.

## What changes were proposed in this pull request?
Improved the code quality of spark by replacing all pattern match on boolean value by if/else block.

## How was this patch tested?

By running the tests

Author: Shivansh <shiv4nsh@gmail.com>

Closes #14873 from shiv4nsh/SPARK-17308.
This commit is contained in:
Shivansh 2016-09-04 12:39:26 +01:00 committed by Sean Owen
parent 6b156e2fcf
commit e75c162e9e
7 changed files with 44 additions and 43 deletions

View file

@ -124,27 +124,26 @@ private class ClientEndpoint(
logInfo("... polling master for driver state") logInfo("... polling master for driver state")
val statusResponse = val statusResponse =
activeMasterEndpoint.askWithRetry[DriverStatusResponse](RequestDriverStatus(driverId)) activeMasterEndpoint.askWithRetry[DriverStatusResponse](RequestDriverStatus(driverId))
statusResponse.found match { if (statusResponse.found) {
case false => logInfo(s"State of $driverId is ${statusResponse.state.get}")
logError(s"ERROR: Cluster master did not recognize $driverId") // Worker node, if present
System.exit(-1) (statusResponse.workerId, statusResponse.workerHostPort, statusResponse.state) match {
case true => case (Some(id), Some(hostPort), Some(DriverState.RUNNING)) =>
logInfo(s"State of $driverId is ${statusResponse.state.get}") logInfo(s"Driver running on $hostPort ($id)")
// Worker node, if present case _ =>
(statusResponse.workerId, statusResponse.workerHostPort, statusResponse.state) match { }
case (Some(id), Some(hostPort), Some(DriverState.RUNNING)) => // Exception, if present
logInfo(s"Driver running on $hostPort ($id)") statusResponse.exception match {
case _ => case Some(e) =>
} logError(s"Exception from cluster was: $e")
// Exception, if present e.printStackTrace()
statusResponse.exception match { System.exit(-1)
case Some(e) => case _ =>
logError(s"Exception from cluster was: $e") System.exit(0)
e.printStackTrace() }
System.exit(-1) } else {
case _ => logError(s"ERROR: Cluster master did not recognize $driverId")
System.exit(0) System.exit(-1)
}
} }
} }

View file

@ -304,11 +304,10 @@ class KMeansSuite extends SparkFunSuite with MLlibTestSparkContext {
object KMeansSuite extends SparkFunSuite { object KMeansSuite extends SparkFunSuite {
def createModel(dim: Int, k: Int, isSparse: Boolean): KMeansModel = { def createModel(dim: Int, k: Int, isSparse: Boolean): KMeansModel = {
val singlePoint = isSparse match { val singlePoint = if (isSparse) {
case true => Vectors.sparse(dim, Array.empty[Int], Array.empty[Double])
Vectors.sparse(dim, Array.empty[Int], Array.empty[Double]) } else {
case _ => Vectors.dense(Array.fill[Double](dim)(0.0))
Vectors.dense(Array.fill[Double](dim)(0.0))
} }
new KMeansModel(Array.fill[Vector](k)(singlePoint)) new KMeansModel(Array.fill[Vector](k)(singlePoint))
} }

View file

@ -212,9 +212,10 @@ object SparkBuild extends PomBuild {
cachedFun(findFiles(scalaSource.in(config).value)) cachedFun(findFiles(scalaSource.in(config).value))
} }
private def findFiles(file: File): Set[File] = file.isDirectory match { private def findFiles(file: File): Set[File] = if (file.isDirectory) {
case true => file.listFiles().toSet.flatMap(findFiles) + file file.listFiles().toSet.flatMap(findFiles) + file
case false => Set(file) } else {
Set(file)
} }
def enableScalaStyle: Seq[sbt.Def.Setting[_]] = Seq( def enableScalaStyle: Seq[sbt.Def.Setting[_]] = Seq(

View file

@ -103,9 +103,10 @@ abstract class TreeNode[BaseType <: TreeNode[BaseType]] extends Product {
* Find the first [[TreeNode]] that satisfies the condition specified by `f`. * Find the first [[TreeNode]] that satisfies the condition specified by `f`.
* The condition is recursively applied to this node and all of its children (pre-order). * The condition is recursively applied to this node and all of its children (pre-order).
*/ */
def find(f: BaseType => Boolean): Option[BaseType] = f(this) match { def find(f: BaseType => Boolean): Option[BaseType] = if (f(this)) {
case true => Some(this) Some(this)
case false => children.foldLeft(Option.empty[BaseType]) { (l, r) => l.orElse(r.find(f)) } } else {
children.foldLeft(Option.empty[BaseType]) { (l, r) => l.orElse(r.find(f)) }
} }
/** /**

View file

@ -22,9 +22,10 @@ package org.apache.spark.sql.catalyst.util
* sensitive or insensitive. * sensitive or insensitive.
*/ */
object StringKeyHashMap { object StringKeyHashMap {
def apply[T](caseSensitive: Boolean): StringKeyHashMap[T] = caseSensitive match { def apply[T](caseSensitive: Boolean): StringKeyHashMap[T] = if (caseSensitive) {
case false => new StringKeyHashMap[T](_.toLowerCase) new StringKeyHashMap[T](identity)
case true => new StringKeyHashMap[T](identity) } else {
new StringKeyHashMap[T](_.toLowerCase)
} }
} }

View file

@ -414,11 +414,10 @@ class AnalysisErrorSuite extends AnalysisTest {
AttributeReference("a", dataType)(exprId = ExprId(2)), AttributeReference("a", dataType)(exprId = ExprId(2)),
AttributeReference("b", IntegerType)(exprId = ExprId(1)))) AttributeReference("b", IntegerType)(exprId = ExprId(1))))
shouldSuccess match { if (shouldSuccess) {
case true => assertAnalysisSuccess(plan, true)
assertAnalysisSuccess(plan, true) } else {
case false => assertAnalysisError(plan, "expression `a` cannot be used as a grouping expression" :: Nil)
assertAnalysisError(plan, "expression `a` cannot be used as a grouping expression" :: Nil)
} }
} }

View file

@ -471,9 +471,10 @@ class JavaPairDStream[K, V](val dstream: DStream[(K, V)])(
val list: JList[V] = values.asJava val list: JList[V] = values.asJava
val scalaState: Optional[S] = JavaUtils.optionToOptional(state) val scalaState: Optional[S] = JavaUtils.optionToOptional(state)
val result: Optional[S] = in.apply(list, scalaState) val result: Optional[S] = in.apply(list, scalaState)
result.isPresent match { if (result.isPresent) {
case true => Some(result.get()) Some(result.get())
case _ => None } else {
None
} }
} }
scalaFunc scalaFunc