[SPARK-17308] Improved the spark core code by replacing all pattern match on boolean value by if/else block.

## What changes were proposed in this pull request?
Improved the code quality of spark by replacing all pattern match on boolean value by if/else block.

## How was this patch tested?

By running the tests

Author: Shivansh <shiv4nsh@gmail.com>

Closes #14873 from shiv4nsh/SPARK-17308.
This commit is contained in:
Shivansh 2016-09-04 12:39:26 +01:00 committed by Sean Owen
parent 6b156e2fcf
commit e75c162e9e
7 changed files with 44 additions and 43 deletions

View file

@ -124,11 +124,7 @@ private class ClientEndpoint(
logInfo("... polling master for driver state")
val statusResponse =
activeMasterEndpoint.askWithRetry[DriverStatusResponse](RequestDriverStatus(driverId))
statusResponse.found match {
case false =>
logError(s"ERROR: Cluster master did not recognize $driverId")
System.exit(-1)
case true =>
if (statusResponse.found) {
logInfo(s"State of $driverId is ${statusResponse.state.get}")
// Worker node, if present
(statusResponse.workerId, statusResponse.workerHostPort, statusResponse.state) match {
@ -145,6 +141,9 @@ private class ClientEndpoint(
case _ =>
System.exit(0)
}
} else {
logError(s"ERROR: Cluster master did not recognize $driverId")
System.exit(-1)
}
}

View file

@ -304,10 +304,9 @@ class KMeansSuite extends SparkFunSuite with MLlibTestSparkContext {
object KMeansSuite extends SparkFunSuite {
def createModel(dim: Int, k: Int, isSparse: Boolean): KMeansModel = {
val singlePoint = isSparse match {
case true =>
val singlePoint = if (isSparse) {
Vectors.sparse(dim, Array.empty[Int], Array.empty[Double])
case _ =>
} else {
Vectors.dense(Array.fill[Double](dim)(0.0))
}
new KMeansModel(Array.fill[Vector](k)(singlePoint))

View file

@ -212,9 +212,10 @@ object SparkBuild extends PomBuild {
cachedFun(findFiles(scalaSource.in(config).value))
}
private def findFiles(file: File): Set[File] = file.isDirectory match {
case true => file.listFiles().toSet.flatMap(findFiles) + file
case false => Set(file)
private def findFiles(file: File): Set[File] = if (file.isDirectory) {
file.listFiles().toSet.flatMap(findFiles) + file
} else {
Set(file)
}
def enableScalaStyle: Seq[sbt.Def.Setting[_]] = Seq(

View file

@ -103,9 +103,10 @@ abstract class TreeNode[BaseType <: TreeNode[BaseType]] extends Product {
* Find the first [[TreeNode]] that satisfies the condition specified by `f`.
* The condition is recursively applied to this node and all of its children (pre-order).
*/
def find(f: BaseType => Boolean): Option[BaseType] = f(this) match {
case true => Some(this)
case false => children.foldLeft(Option.empty[BaseType]) { (l, r) => l.orElse(r.find(f)) }
def find(f: BaseType => Boolean): Option[BaseType] = if (f(this)) {
Some(this)
} else {
children.foldLeft(Option.empty[BaseType]) { (l, r) => l.orElse(r.find(f)) }
}
/**

View file

@ -22,9 +22,10 @@ package org.apache.spark.sql.catalyst.util
* sensitive or insensitive.
*/
object StringKeyHashMap {
def apply[T](caseSensitive: Boolean): StringKeyHashMap[T] = caseSensitive match {
case false => new StringKeyHashMap[T](_.toLowerCase)
case true => new StringKeyHashMap[T](identity)
def apply[T](caseSensitive: Boolean): StringKeyHashMap[T] = if (caseSensitive) {
new StringKeyHashMap[T](identity)
} else {
new StringKeyHashMap[T](_.toLowerCase)
}
}

View file

@ -414,10 +414,9 @@ class AnalysisErrorSuite extends AnalysisTest {
AttributeReference("a", dataType)(exprId = ExprId(2)),
AttributeReference("b", IntegerType)(exprId = ExprId(1))))
shouldSuccess match {
case true =>
if (shouldSuccess) {
assertAnalysisSuccess(plan, true)
case false =>
} else {
assertAnalysisError(plan, "expression `a` cannot be used as a grouping expression" :: Nil)
}
}

View file

@ -471,9 +471,10 @@ class JavaPairDStream[K, V](val dstream: DStream[(K, V)])(
val list: JList[V] = values.asJava
val scalaState: Optional[S] = JavaUtils.optionToOptional(state)
val result: Optional[S] = in.apply(list, scalaState)
result.isPresent match {
case true => Some(result.get())
case _ => None
if (result.isPresent) {
Some(result.get())
} else {
None
}
}
scalaFunc