[SPARK-21903][BUILD] Upgrade scalastyle to 1.0.0.
## What changes were proposed in this pull request? 1.0.0 fixes an issue with import order, explicit type for public methods, line length limitation and comment validation: ``` [error] .../spark/repl/scala-2.11/src/main/scala/org/apache/spark/repl/Main.scala:50:16: Are you sure you want to println? If yes, wrap the code block with [error] // scalastyle:off println [error] println(...) [error] // scalastyle:on println [error] .../spark/repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkILoop.scala:49: File line length exceeds 100 characters [error] .../spark/repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkILoop.scala:22:21: Are you sure you want to println? If yes, wrap the code block with [error] // scalastyle:off println [error] println(...) [error] // scalastyle:on println [error] .../spark/streaming/src/test/java/org/apache/spark/streaming/JavaTestUtils.scala:35:6: Public method must have explicit type [error] .../spark/streaming/src/test/java/org/apache/spark/streaming/JavaTestUtils.scala:51:6: Public method must have explicit type [error] .../spark/streaming/src/test/java/org/apache/spark/streaming/JavaTestUtils.scala:93:15: Public method must have explicit type [error] .../spark/streaming/src/test/java/org/apache/spark/streaming/JavaTestUtils.scala:98:15: Public method must have explicit type [error] .../spark/streaming/src/test/java/org/apache/spark/streaming/JavaTestUtils.scala:47:2: Insert a space after the start of the comment [error] .../spark/streaming/src/test/java/org/apache/spark/streaming/JavaTestUtils.scala:26:43: JavaDStream should come before JavaDStreamLike. ``` This PR also fixes the workaround added in SPARK-16877 for `org.scalastyle.scalariform.OverrideJavaChecker` feature, added from 0.9.0. ## How was this patch tested? Manually tested. Author: hyukjinkwon <gurwls223@gmail.com> Closes #19116 from HyukjinKwon/scalastyle-1.0.0.
This commit is contained in:
parent
4e7a29efdb
commit
7f3c6ff4ff
|
@ -163,14 +163,15 @@ object SparkBuild extends PomBuild {
|
|||
val configUrlV = scalastyleConfigUrl.in(config).value
|
||||
val streamsV = streams.in(config).value
|
||||
val failOnErrorV = true
|
||||
val failOnWarningV = false
|
||||
val scalastyleTargetV = scalastyleTarget.in(config).value
|
||||
val configRefreshHoursV = scalastyleConfigRefreshHours.in(config).value
|
||||
val targetV = target.in(config).value
|
||||
val configCacheFileV = scalastyleConfigUrlCacheFile.in(config).value
|
||||
|
||||
logger.info(s"Running scalastyle on ${name.value} in ${config.name}")
|
||||
Tasks.doScalastyle(args, configV, configUrlV, failOnErrorV, scalaSourceV, scalastyleTargetV,
|
||||
streamsV, configRefreshHoursV, targetV, configCacheFileV)
|
||||
Tasks.doScalastyle(args, configV, configUrlV, failOnErrorV, failOnWarningV, scalaSourceV,
|
||||
scalastyleTargetV, streamsV, configRefreshHoursV, targetV, configCacheFileV)
|
||||
|
||||
Set.empty
|
||||
}
|
||||
|
|
|
@ -7,8 +7,7 @@ addSbtPlugin("com.typesafe.sbteclipse" % "sbteclipse-plugin" % "5.1.0")
|
|||
// sbt 1.0.0 support: https://github.com/jrudolph/sbt-dependency-graph/issues/134
|
||||
addSbtPlugin("net.virtual-void" % "sbt-dependency-graph" % "0.8.2")
|
||||
|
||||
// need to make changes to uptake sbt 1.0 support in "org.scalastyle" %% "scalastyle-sbt-plugin" % "0.9.0"
|
||||
addSbtPlugin("org.scalastyle" %% "scalastyle-sbt-plugin" % "0.9.0")
|
||||
addSbtPlugin("org.scalastyle" %% "scalastyle-sbt-plugin" % "1.0.0")
|
||||
|
||||
addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "0.1.17")
|
||||
|
||||
|
|
|
@ -47,7 +47,9 @@ object Main extends Logging {
|
|||
|
||||
private def scalaOptionError(msg: String): Unit = {
|
||||
hasErrors = true
|
||||
// scalastyle:off println
|
||||
Console.err.println(msg)
|
||||
// scalastyle:on println
|
||||
}
|
||||
|
||||
def main(args: Array[String]) {
|
||||
|
|
|
@ -19,7 +19,9 @@ package org.apache.spark.repl
|
|||
|
||||
import java.io.BufferedReader
|
||||
|
||||
// scalastyle:off println
|
||||
import scala.Predef.{println => _, _}
|
||||
// scalastyle:on println
|
||||
import scala.tools.nsc.Settings
|
||||
import scala.tools.nsc.interpreter.{ILoop, JPrintWriter}
|
||||
import scala.tools.nsc.util.stringFromStream
|
||||
|
@ -46,7 +48,8 @@ class SparkILoop(in0: Option[BufferedReader], out: JPrintWriter)
|
|||
if (_sc.getConf.getBoolean("spark.ui.reverseProxy", false)) {
|
||||
val proxyUrl = _sc.getConf.get("spark.ui.reverseProxyUrl", null)
|
||||
if (proxyUrl != null) {
|
||||
println(s"Spark Context Web UI is available at ${proxyUrl}/proxy/${_sc.applicationId}")
|
||||
println(
|
||||
s"Spark Context Web UI is available at ${proxyUrl}/proxy/${_sc.applicationId}")
|
||||
} else {
|
||||
println(s"Spark Context Web UI is available at Spark Master Public URL")
|
||||
}
|
||||
|
|
|
@ -268,10 +268,7 @@ This file is divided into 3 sections:
|
|||
</check>
|
||||
|
||||
<!-- SPARK-16877: Avoid Java annotations -->
|
||||
<check customId="OverrideJavaCase" level="error" class="org.scalastyle.scalariform.TokenChecker" enabled="true">
|
||||
<parameters><parameter name="regex">^Override$</parameter></parameters>
|
||||
<customMessage>override modifier should be used instead of @java.lang.Override.</customMessage>
|
||||
</check>
|
||||
<check level="error" class="org.scalastyle.scalariform.OverrideJavaChecker" enabled="true"></check>
|
||||
|
||||
<check level="error" class="org.scalastyle.scalariform.DeprecatedJavaChecker" enabled="true"></check>
|
||||
|
||||
|
|
|
@ -23,7 +23,7 @@ import scala.collection.JavaConverters._
|
|||
import scala.reflect.ClassTag
|
||||
|
||||
import org.apache.spark.api.java.JavaRDDLike
|
||||
import org.apache.spark.streaming.api.java.{JavaDStreamLike, JavaDStream, JavaStreamingContext}
|
||||
import org.apache.spark.streaming.api.java.{JavaDStream, JavaDStreamLike, JavaStreamingContext}
|
||||
|
||||
/** Exposes streaming test functionality in a Java-friendly way. */
|
||||
trait JavaTestBase extends TestSuiteBase {
|
||||
|
@ -35,7 +35,7 @@ trait JavaTestBase extends TestSuiteBase {
|
|||
def attachTestInputStream[T](
|
||||
ssc: JavaStreamingContext,
|
||||
data: JList[JList[T]],
|
||||
numPartitions: Int) = {
|
||||
numPartitions: Int): JavaDStream[T] = {
|
||||
val seqData = data.asScala.map(_.asScala)
|
||||
|
||||
implicit val cm: ClassTag[T] =
|
||||
|
@ -47,9 +47,9 @@ trait JavaTestBase extends TestSuiteBase {
|
|||
/**
|
||||
* Attach a provided stream to it's associated StreamingContext as a
|
||||
* [[org.apache.spark.streaming.TestOutputStream]].
|
||||
**/
|
||||
*/
|
||||
def attachTestOutputStream[T, This <: JavaDStreamLike[T, This, R], R <: JavaRDDLike[T, R]](
|
||||
dstream: JavaDStreamLike[T, This, R]) = {
|
||||
dstream: JavaDStreamLike[T, This, R]): Unit = {
|
||||
implicit val cm: ClassTag[T] =
|
||||
implicitly[ClassTag[AnyRef]].asInstanceOf[ClassTag[T]]
|
||||
val ostream = new TestOutputStreamWithPartitions(dstream.dstream)
|
||||
|
@ -90,10 +90,10 @@ trait JavaTestBase extends TestSuiteBase {
|
|||
}
|
||||
|
||||
object JavaTestUtils extends JavaTestBase {
|
||||
override def maxWaitTimeMillis = 20000
|
||||
override def maxWaitTimeMillis: Int = 20000
|
||||
|
||||
}
|
||||
|
||||
object JavaCheckpointTestUtils extends JavaTestBase {
|
||||
override def actuallyWait = true
|
||||
override def actuallyWait: Boolean = true
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue