From 7f3c6ff4ff0a501cc7f1fb53a90ea7b5787f68e1 Mon Sep 17 00:00:00 2001 From: hyukjinkwon Date: Tue, 5 Sep 2017 19:40:05 +0900 Subject: [PATCH] [SPARK-21903][BUILD] Upgrade scalastyle to 1.0.0. ## What changes were proposed in this pull request? 1.0.0 fixes an issue with import order, explicit type for public methods, line length limitation and comment validation: ``` [error] .../spark/repl/scala-2.11/src/main/scala/org/apache/spark/repl/Main.scala:50:16: Are you sure you want to println? If yes, wrap the code block with [error] // scalastyle:off println [error] println(...) [error] // scalastyle:on println [error] .../spark/repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkILoop.scala:49: File line length exceeds 100 characters [error] .../spark/repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkILoop.scala:22:21: Are you sure you want to println? If yes, wrap the code block with [error] // scalastyle:off println [error] println(...) [error] // scalastyle:on println [error] .../spark/streaming/src/test/java/org/apache/spark/streaming/JavaTestUtils.scala:35:6: Public method must have explicit type [error] .../spark/streaming/src/test/java/org/apache/spark/streaming/JavaTestUtils.scala:51:6: Public method must have explicit type [error] .../spark/streaming/src/test/java/org/apache/spark/streaming/JavaTestUtils.scala:93:15: Public method must have explicit type [error] .../spark/streaming/src/test/java/org/apache/spark/streaming/JavaTestUtils.scala:98:15: Public method must have explicit type [error] .../spark/streaming/src/test/java/org/apache/spark/streaming/JavaTestUtils.scala:47:2: Insert a space after the start of the comment [error] .../spark/streaming/src/test/java/org/apache/spark/streaming/JavaTestUtils.scala:26:43: JavaDStream should come before JavaDStreamLike. ``` This PR also fixes the workaround added in SPARK-16877 for `org.scalastyle.scalariform.OverrideJavaChecker` feature, added from 0.9.0. ## How was this patch tested? Manually tested. Author: hyukjinkwon Closes #19116 from HyukjinKwon/scalastyle-1.0.0. --- project/SparkBuild.scala | 5 +++-- project/plugins.sbt | 3 +-- .../src/main/scala/org/apache/spark/repl/Main.scala | 2 ++ .../scala/org/apache/spark/repl/SparkILoop.scala | 5 ++++- scalastyle-config.xml | 5 +---- .../org/apache/spark/streaming/JavaTestUtils.scala | 12 ++++++------ 6 files changed, 17 insertions(+), 15 deletions(-) diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala index 9d903edbf9..20848f0617 100644 --- a/project/SparkBuild.scala +++ b/project/SparkBuild.scala @@ -163,14 +163,15 @@ object SparkBuild extends PomBuild { val configUrlV = scalastyleConfigUrl.in(config).value val streamsV = streams.in(config).value val failOnErrorV = true + val failOnWarningV = false val scalastyleTargetV = scalastyleTarget.in(config).value val configRefreshHoursV = scalastyleConfigRefreshHours.in(config).value val targetV = target.in(config).value val configCacheFileV = scalastyleConfigUrlCacheFile.in(config).value logger.info(s"Running scalastyle on ${name.value} in ${config.name}") - Tasks.doScalastyle(args, configV, configUrlV, failOnErrorV, scalaSourceV, scalastyleTargetV, - streamsV, configRefreshHoursV, targetV, configCacheFileV) + Tasks.doScalastyle(args, configV, configUrlV, failOnErrorV, failOnWarningV, scalaSourceV, + scalastyleTargetV, streamsV, configRefreshHoursV, targetV, configCacheFileV) Set.empty } diff --git a/project/plugins.sbt b/project/plugins.sbt index f67e0a14a8..3c5442b04b 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -7,8 +7,7 @@ addSbtPlugin("com.typesafe.sbteclipse" % "sbteclipse-plugin" % "5.1.0") // sbt 1.0.0 support: https://github.com/jrudolph/sbt-dependency-graph/issues/134 addSbtPlugin("net.virtual-void" % "sbt-dependency-graph" % "0.8.2") -// need to make changes to uptake sbt 1.0 support in "org.scalastyle" %% "scalastyle-sbt-plugin" % "0.9.0" -addSbtPlugin("org.scalastyle" %% "scalastyle-sbt-plugin" % "0.9.0") +addSbtPlugin("org.scalastyle" %% "scalastyle-sbt-plugin" % "1.0.0") addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "0.1.17") diff --git a/repl/scala-2.11/src/main/scala/org/apache/spark/repl/Main.scala b/repl/scala-2.11/src/main/scala/org/apache/spark/repl/Main.scala index 0b16e1b073..cc76a703bd 100644 --- a/repl/scala-2.11/src/main/scala/org/apache/spark/repl/Main.scala +++ b/repl/scala-2.11/src/main/scala/org/apache/spark/repl/Main.scala @@ -47,7 +47,9 @@ object Main extends Logging { private def scalaOptionError(msg: String): Unit = { hasErrors = true + // scalastyle:off println Console.err.println(msg) + // scalastyle:on println } def main(args: Array[String]) { diff --git a/repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkILoop.scala b/repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkILoop.scala index d1d25b7bf0..ea279e4f0e 100644 --- a/repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkILoop.scala +++ b/repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkILoop.scala @@ -19,7 +19,9 @@ package org.apache.spark.repl import java.io.BufferedReader +// scalastyle:off println import scala.Predef.{println => _, _} +// scalastyle:on println import scala.tools.nsc.Settings import scala.tools.nsc.interpreter.{ILoop, JPrintWriter} import scala.tools.nsc.util.stringFromStream @@ -46,7 +48,8 @@ class SparkILoop(in0: Option[BufferedReader], out: JPrintWriter) if (_sc.getConf.getBoolean("spark.ui.reverseProxy", false)) { val proxyUrl = _sc.getConf.get("spark.ui.reverseProxyUrl", null) if (proxyUrl != null) { - println(s"Spark Context Web UI is available at ${proxyUrl}/proxy/${_sc.applicationId}") + println( + s"Spark Context Web UI is available at ${proxyUrl}/proxy/${_sc.applicationId}") } else { println(s"Spark Context Web UI is available at Spark Master Public URL") } diff --git a/scalastyle-config.xml b/scalastyle-config.xml index 0a4073b039..bd7f462b72 100644 --- a/scalastyle-config.xml +++ b/scalastyle-config.xml @@ -268,10 +268,7 @@ This file is divided into 3 sections: - - ^Override$ - override modifier should be used instead of @java.lang.Override. - + diff --git a/streaming/src/test/java/org/apache/spark/streaming/JavaTestUtils.scala b/streaming/src/test/java/org/apache/spark/streaming/JavaTestUtils.scala index ae44fd07ac..0c4a64ccc5 100644 --- a/streaming/src/test/java/org/apache/spark/streaming/JavaTestUtils.scala +++ b/streaming/src/test/java/org/apache/spark/streaming/JavaTestUtils.scala @@ -23,7 +23,7 @@ import scala.collection.JavaConverters._ import scala.reflect.ClassTag import org.apache.spark.api.java.JavaRDDLike -import org.apache.spark.streaming.api.java.{JavaDStreamLike, JavaDStream, JavaStreamingContext} +import org.apache.spark.streaming.api.java.{JavaDStream, JavaDStreamLike, JavaStreamingContext} /** Exposes streaming test functionality in a Java-friendly way. */ trait JavaTestBase extends TestSuiteBase { @@ -35,7 +35,7 @@ trait JavaTestBase extends TestSuiteBase { def attachTestInputStream[T]( ssc: JavaStreamingContext, data: JList[JList[T]], - numPartitions: Int) = { + numPartitions: Int): JavaDStream[T] = { val seqData = data.asScala.map(_.asScala) implicit val cm: ClassTag[T] = @@ -47,9 +47,9 @@ trait JavaTestBase extends TestSuiteBase { /** * Attach a provided stream to it's associated StreamingContext as a * [[org.apache.spark.streaming.TestOutputStream]]. - **/ + */ def attachTestOutputStream[T, This <: JavaDStreamLike[T, This, R], R <: JavaRDDLike[T, R]]( - dstream: JavaDStreamLike[T, This, R]) = { + dstream: JavaDStreamLike[T, This, R]): Unit = { implicit val cm: ClassTag[T] = implicitly[ClassTag[AnyRef]].asInstanceOf[ClassTag[T]] val ostream = new TestOutputStreamWithPartitions(dstream.dstream) @@ -90,10 +90,10 @@ trait JavaTestBase extends TestSuiteBase { } object JavaTestUtils extends JavaTestBase { - override def maxWaitTimeMillis = 20000 + override def maxWaitTimeMillis: Int = 20000 } object JavaCheckpointTestUtils extends JavaTestBase { - override def actuallyWait = true + override def actuallyWait: Boolean = true }