[SPARK-21939][TEST] Use TimeLimits instead of Timeouts

Since ScalaTest 3.0.0, `org.scalatest.concurrent.Timeouts` is deprecated.
This PR replaces the deprecated one with `org.scalatest.concurrent.TimeLimits`.

```scala
-import org.scalatest.concurrent.Timeouts._
+import org.scalatest.concurrent.TimeLimits._
```

Pass the existing test suites.

Author: Dongjoon Hyun <dongjoon@apache.org>

Closes #19150 from dongjoon-hyun/SPARK-21939.

Change-Id: I1a1b07f1b97e51e2263dfb34b7eaaa099b2ded5e
This commit is contained in:
Dongjoon Hyun 2017-09-08 09:31:13 +08:00 committed by jerryshao
parent e00f1a1da1
commit c26976fe14
13 changed files with 24 additions and 21 deletions

View file

@ -18,7 +18,7 @@
package org.apache.spark
import org.scalatest.Matchers
import org.scalatest.concurrent.Timeouts._
import org.scalatest.concurrent.TimeLimits._
import org.scalatest.time.{Millis, Span}
import org.apache.spark.security.EncryptionFunSuite

View file

@ -19,13 +19,13 @@ package org.apache.spark
import java.io.File
import org.scalatest.concurrent.Timeouts
import org.scalatest.concurrent.TimeLimits
import org.scalatest.prop.TableDrivenPropertyChecks._
import org.scalatest.time.SpanSugar._
import org.apache.spark.util.Utils
class DriverSuite extends SparkFunSuite with Timeouts {
class DriverSuite extends SparkFunSuite with TimeLimits {
ignore("driver should exit after finishing without cleanup (SPARK-530)") {
val sparkHome = sys.props.getOrElse("spark.test.home", fail("spark.test.home is not set!"))

View file

@ -17,7 +17,7 @@
package org.apache.spark
import org.scalatest.concurrent.Timeouts._
import org.scalatest.concurrent.TimeLimits._
import org.scalatest.time.{Millis, Span}
class UnpersistSuite extends SparkFunSuite with LocalSparkContext {

View file

@ -31,7 +31,7 @@ import org.apache.commons.io.FileUtils
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FileStatus, FSDataInputStream, Path}
import org.scalatest.{BeforeAndAfterEach, Matchers}
import org.scalatest.concurrent.Timeouts
import org.scalatest.concurrent.TimeLimits
import org.scalatest.time.SpanSugar._
import org.apache.spark._
@ -97,7 +97,7 @@ class SparkSubmitSuite
with Matchers
with BeforeAndAfterEach
with ResetSystemProperties
with Timeouts
with TimeLimits
with TestPrematureExit {
override def beforeEach() {

View file

@ -24,13 +24,13 @@ import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.duration.Duration
import org.scalatest.BeforeAndAfterAll
import org.scalatest.concurrent.Timeouts
import org.scalatest.concurrent.TimeLimits
import org.scalatest.time.SpanSugar._
import org.apache.spark._
import org.apache.spark.util.ThreadUtils
class AsyncRDDActionsSuite extends SparkFunSuite with BeforeAndAfterAll with Timeouts {
class AsyncRDDActionsSuite extends SparkFunSuite with BeforeAndAfterAll with TimeLimits {
@transient private var sc: SparkContext = _

View file

@ -25,7 +25,7 @@ import scala.collection.mutable.{ArrayBuffer, HashMap, HashSet, Map}
import scala.language.reflectiveCalls
import scala.util.control.NonFatal
import org.scalatest.concurrent.Timeouts
import org.scalatest.concurrent.TimeLimits
import org.scalatest.time.SpanSugar._
import org.apache.spark._
@ -98,7 +98,7 @@ class MyRDD(
class DAGSchedulerSuiteDummyException extends Exception
class DAGSchedulerSuite extends SparkFunSuite with LocalSparkContext with Timeouts {
class DAGSchedulerSuite extends SparkFunSuite with LocalSparkContext with TimeLimits {
import DAGSchedulerSuite._

View file

@ -18,7 +18,7 @@
package org.apache.spark.scheduler
import org.apache.hadoop.mapred.{FileOutputCommitter, TaskAttemptContext}
import org.scalatest.concurrent.Timeouts
import org.scalatest.concurrent.TimeLimits
import org.scalatest.time.{Seconds, Span}
import org.apache.spark.{LocalSparkContext, SparkConf, SparkContext, SparkFunSuite, TaskContext}
@ -32,7 +32,7 @@ import org.apache.spark.util.Utils
class OutputCommitCoordinatorIntegrationSuite
extends SparkFunSuite
with LocalSparkContext
with Timeouts {
with TimeLimits {
override def beforeAll(): Unit = {
super.beforeAll()

View file

@ -33,7 +33,7 @@ import org.mockito.{Matchers => mc}
import org.mockito.Mockito.{mock, times, verify, when}
import org.scalatest._
import org.scalatest.concurrent.Eventually._
import org.scalatest.concurrent.Timeouts._
import org.scalatest.concurrent.TimeLimits._
import org.apache.spark._
import org.apache.spark.broadcast.BroadcastManager

View file

@ -24,11 +24,11 @@ import scala.concurrent.duration._
import scala.language.postfixOps
import org.scalatest.concurrent.Eventually._
import org.scalatest.concurrent.Timeouts
import org.scalatest.concurrent.TimeLimits
import org.apache.spark.SparkFunSuite
class EventLoopSuite extends SparkFunSuite with Timeouts {
class EventLoopSuite extends SparkFunSuite with TimeLimits {
test("EventLoop") {
val buffer = new ConcurrentLinkedQueue[Int]

View file

@ -24,7 +24,7 @@ import scala.collection.mutable
import org.eclipse.jetty.util.ConcurrentHashSet
import org.scalatest.concurrent.Eventually
import org.scalatest.concurrent.PatienceConfiguration.Timeout
import org.scalatest.concurrent.Timeouts._
import org.scalatest.concurrent.TimeLimits._
import org.scalatest.time.SpanSugar._
import org.apache.spark.SparkFunSuite

View file

@ -27,7 +27,7 @@ import scala.util.Random
import scala.util.control.NonFatal
import org.scalatest.{Assertions, BeforeAndAfterAll}
import org.scalatest.concurrent.{Eventually, Timeouts}
import org.scalatest.concurrent.{Eventually, Signaler, ThreadSignaler, TimeLimits}
import org.scalatest.concurrent.PatienceConfiguration.Timeout
import org.scalatest.exceptions.TestFailedDueToTimeoutException
import org.scalatest.time.Span
@ -67,8 +67,9 @@ import org.apache.spark.util.{Clock, SystemClock, Utils}
* avoid hanging forever in the case of failures. However, individual suites can change this
* by overriding `streamingTimeout`.
*/
trait StreamTest extends QueryTest with SharedSQLContext with Timeouts with BeforeAndAfterAll {
trait StreamTest extends QueryTest with SharedSQLContext with TimeLimits with BeforeAndAfterAll {
implicit val defaultSignaler: Signaler = ThreadSignaler
override def afterAll(): Unit = {
super.afterAll()
StateStore.stop() // stop the state store maintenance thread and unload store providers

View file

@ -26,7 +26,7 @@ import scala.tools.nsc.Properties
import org.apache.hadoop.fs.Path
import org.scalatest.{BeforeAndAfterEach, Matchers}
import org.scalatest.concurrent.Timeouts
import org.scalatest.concurrent.TimeLimits
import org.scalatest.exceptions.TestFailedDueToTimeoutException
import org.scalatest.time.SpanSugar._
@ -50,7 +50,7 @@ class HiveSparkSubmitSuite
with Matchers
with BeforeAndAfterEach
with ResetSystemProperties
with Timeouts {
with TimeLimits {
// TODO: rewrite these or mark them as slow tests to be run sparingly

View file

@ -24,8 +24,9 @@ import scala.collection.mutable
import org.scalatest.BeforeAndAfter
import org.scalatest.Matchers._
import org.scalatest.concurrent.{Signaler, ThreadSignaler}
import org.scalatest.concurrent.Eventually._
import org.scalatest.concurrent.Timeouts._
import org.scalatest.concurrent.TimeLimits._
import org.scalatest.time.SpanSugar._
import org.apache.spark.{SparkConf, SparkException, SparkFunSuite}
@ -34,6 +35,7 @@ import org.apache.spark.util.ManualClock
class BlockGeneratorSuite extends SparkFunSuite with BeforeAndAfter {
implicit val defaultSignaler: Signaler = ThreadSignaler
private val blockIntervalMs = 10
private val conf = new SparkConf().set("spark.streaming.blockInterval", s"${blockIntervalMs}ms")
@volatile private var blockGenerator: BlockGenerator = null