[SPARK-7558] Demarcate tests in unit-tests.log

Right now `unit-tests.log` are not of much value because we can't tell where the test boundaries are easily. This patch adds log statements before and after each test to outline the test boundaries, e.g.:

```
===== TEST OUTPUT FOR o.a.s.serializer.KryoSerializerSuite: 'kryo with parallelize for primitive arrays' =====

15/05/27 12:36:39.596 pool-1-thread-1-ScalaTest-running-KryoSerializerSuite INFO SparkContext: Starting job: count at KryoSerializerSuite.scala:230
15/05/27 12:36:39.596 dag-scheduler-event-loop INFO DAGScheduler: Got job 3 (count at KryoSerializerSuite.scala:230) with 4 output partitions (allowLocal=false)
15/05/27 12:36:39.596 dag-scheduler-event-loop INFO DAGScheduler: Final stage: ResultStage 3(count at KryoSerializerSuite.scala:230)
15/05/27 12:36:39.596 dag-scheduler-event-loop INFO DAGScheduler: Parents of final stage: List()
15/05/27 12:36:39.597 dag-scheduler-event-loop INFO DAGScheduler: Missing parents: List()
15/05/27 12:36:39.597 dag-scheduler-event-loop INFO DAGScheduler: Submitting ResultStage 3 (ParallelCollectionRDD[5] at parallelize at KryoSerializerSuite.scala:230), which has no missing parents

...

15/05/27 12:36:39.624 pool-1-thread-1-ScalaTest-running-KryoSerializerSuite INFO DAGScheduler: Job 3 finished: count at KryoSerializerSuite.scala:230, took 0.028563 s
15/05/27 12:36:39.625 pool-1-thread-1-ScalaTest-running-KryoSerializerSuite INFO KryoSerializerSuite:

***** FINISHED o.a.s.serializer.KryoSerializerSuite: 'kryo with parallelize for primitive arrays' *****

...
```

Author: Andrew Or <andrew@databricks.com>

Closes #6441 from andrewor14/demarcate-tests and squashes the following commits:

879b060 [Andrew Or] Fix compile after rebase
d622af7 [Andrew Or] Merge branch 'master' of github.com:apache/spark into demarcate-tests
017c8ba [Andrew Or] Merge branch 'master' of github.com:apache/spark into demarcate-tests
7790b6c [Andrew Or] Fix tests after logical merge conflict
c7460c0 [Andrew Or] Merge branch 'master' of github.com:apache/spark into demarcate-tests
c43ffc4 [Andrew Or] Fix tests?
8882581 [Andrew Or] Fix tests
ee22cda [Andrew Or] Fix log message
fa9450e [Andrew Or] Merge branch 'master' of github.com:apache/spark into demarcate-tests
12d1e1b [Andrew Or] Various whitespace changes (minor)
69cbb24 [Andrew Or] Make all test suites extend SparkFunSuite instead of FunSuite
bbce12e [Andrew Or] Fix manual things that cannot be covered through automation
da0b12f [Andrew Or] Add core tests as dependencies in all modules
f7d29ce [Andrew Or] Introduce base abstract class for all test suites
This commit is contained in:
Andrew Or 2015-05-29 14:03:12 -07:00
parent 94f62a4979
commit 9eb222c139
364 changed files with 953 additions and 968 deletions

View file

@ -40,6 +40,13 @@
<artifactId>spark-core_${scala.binary.version}</artifactId> <artifactId>spark-core_${scala.binary.version}</artifactId>
<version>${project.version}</version> <version>${project.version}</version>
</dependency> </dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-core_${scala.binary.version}</artifactId>
<version>${project.version}</version>
<type>test-jar</type>
<scope>test</scope>
</dependency>
<dependency> <dependency>
<groupId>org.scalacheck</groupId> <groupId>org.scalacheck</groupId>
<artifactId>scalacheck_${scala.binary.version}</artifactId> <artifactId>scalacheck_${scala.binary.version}</artifactId>

View file

@ -17,7 +17,7 @@
package org.apache.spark.bagel package org.apache.spark.bagel
import org.scalatest.{BeforeAndAfter, FunSuite, Assertions} import org.scalatest.{BeforeAndAfter, Assertions}
import org.scalatest.concurrent.Timeouts import org.scalatest.concurrent.Timeouts
import org.scalatest.time.SpanSugar._ import org.scalatest.time.SpanSugar._
@ -27,7 +27,7 @@ import org.apache.spark.storage.StorageLevel
class TestVertex(val active: Boolean, val age: Int) extends Vertex with Serializable class TestVertex(val active: Boolean, val age: Int) extends Vertex with Serializable
class TestMessage(val targetId: String) extends Message[String] with Serializable class TestMessage(val targetId: String) extends Message[String] with Serializable
class BagelSuite extends FunSuite with Assertions with BeforeAndAfter with Timeouts { class BagelSuite extends SparkFunSuite with Assertions with BeforeAndAfter with Timeouts {
var sc: SparkContext = _ var sc: SparkContext = _

View file

@ -338,6 +338,12 @@
<dependency> <dependency>
<groupId>org.seleniumhq.selenium</groupId> <groupId>org.seleniumhq.selenium</groupId>
<artifactId>selenium-java</artifactId> <artifactId>selenium-java</artifactId>
<exclusions>
<exclusion>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
</exclusion>
</exclusions>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<!-- Added for selenium: --> <!-- Added for selenium: -->

View file

@ -20,11 +20,10 @@ package org.apache.spark
import scala.collection.mutable import scala.collection.mutable
import scala.ref.WeakReference import scala.ref.WeakReference
import org.scalatest.FunSuite
import org.scalatest.Matchers import org.scalatest.Matchers
class AccumulatorSuite extends FunSuite with Matchers with LocalSparkContext { class AccumulatorSuite extends SparkFunSuite with Matchers with LocalSparkContext {
implicit def setAccum[A]: AccumulableParam[mutable.Set[A], A] = implicit def setAccum[A]: AccumulableParam[mutable.Set[A], A] =

View file

@ -18,7 +18,7 @@
package org.apache.spark package org.apache.spark
import org.mockito.Mockito._ import org.mockito.Mockito._
import org.scalatest.{BeforeAndAfter, FunSuite} import org.scalatest.BeforeAndAfter
import org.scalatest.mock.MockitoSugar import org.scalatest.mock.MockitoSugar
import org.apache.spark.executor.DataReadMethod import org.apache.spark.executor.DataReadMethod
@ -26,7 +26,7 @@ import org.apache.spark.rdd.RDD
import org.apache.spark.storage._ import org.apache.spark.storage._
// TODO: Test the CacheManager's thread-safety aspects // TODO: Test the CacheManager's thread-safety aspects
class CacheManagerSuite extends FunSuite with LocalSparkContext with BeforeAndAfter class CacheManagerSuite extends SparkFunSuite with LocalSparkContext with BeforeAndAfter
with MockitoSugar { with MockitoSugar {
var blockManager: BlockManager = _ var blockManager: BlockManager = _

View file

@ -21,13 +21,11 @@ import java.io.File
import scala.reflect.ClassTag import scala.reflect.ClassTag
import org.scalatest.FunSuite
import org.apache.spark.rdd._ import org.apache.spark.rdd._
import org.apache.spark.storage.{BlockId, StorageLevel, TestBlockId} import org.apache.spark.storage.{BlockId, StorageLevel, TestBlockId}
import org.apache.spark.util.Utils import org.apache.spark.util.Utils
class CheckpointSuite extends FunSuite with LocalSparkContext with Logging { class CheckpointSuite extends SparkFunSuite with LocalSparkContext with Logging {
var checkpointDir: File = _ var checkpointDir: File = _
val partitioner = new HashPartitioner(2) val partitioner = new HashPartitioner(2)

View file

@ -23,7 +23,7 @@ import scala.collection.mutable.{HashSet, SynchronizedSet}
import scala.language.existentials import scala.language.existentials
import scala.util.Random import scala.util.Random
import org.scalatest.{BeforeAndAfter, FunSuite} import org.scalatest.BeforeAndAfter
import org.scalatest.concurrent.{PatienceConfiguration, Eventually} import org.scalatest.concurrent.{PatienceConfiguration, Eventually}
import org.scalatest.concurrent.Eventually._ import org.scalatest.concurrent.Eventually._
import org.scalatest.time.SpanSugar._ import org.scalatest.time.SpanSugar._
@ -44,7 +44,7 @@ import org.apache.spark.storage.ShuffleIndexBlockId
* config options, in particular, a different shuffle manager class * config options, in particular, a different shuffle manager class
*/ */
abstract class ContextCleanerSuiteBase(val shuffleManager: Class[_] = classOf[HashShuffleManager]) abstract class ContextCleanerSuiteBase(val shuffleManager: Class[_] = classOf[HashShuffleManager])
extends FunSuite with BeforeAndAfter with LocalSparkContext extends SparkFunSuite with BeforeAndAfter with LocalSparkContext
{ {
implicit val defaultTimeout = timeout(10000 millis) implicit val defaultTimeout = timeout(10000 millis)
val conf = new SparkConf() val conf = new SparkConf()

View file

@ -17,7 +17,6 @@
package org.apache.spark package org.apache.spark
import org.scalatest.FunSuite
import org.scalatest.concurrent.Timeouts._ import org.scalatest.concurrent.Timeouts._
import org.scalatest.Matchers import org.scalatest.Matchers
import org.scalatest.time.{Millis, Span} import org.scalatest.time.{Millis, Span}
@ -28,7 +27,7 @@ class NotSerializableClass
class NotSerializableExn(val notSer: NotSerializableClass) extends Throwable() {} class NotSerializableExn(val notSer: NotSerializableClass) extends Throwable() {}
class DistributedSuite extends FunSuite with Matchers with LocalSparkContext { class DistributedSuite extends SparkFunSuite with Matchers with LocalSparkContext {
val clusterUrl = "local-cluster[2,1,512]" val clusterUrl = "local-cluster[2,1,512]"

View file

@ -19,14 +19,13 @@ package org.apache.spark
import java.io.File import java.io.File
import org.scalatest.FunSuite
import org.scalatest.concurrent.Timeouts import org.scalatest.concurrent.Timeouts
import org.scalatest.prop.TableDrivenPropertyChecks._ import org.scalatest.prop.TableDrivenPropertyChecks._
import org.scalatest.time.SpanSugar._ import org.scalatest.time.SpanSugar._
import org.apache.spark.util.Utils import org.apache.spark.util.Utils
class DriverSuite extends FunSuite with Timeouts { class DriverSuite extends SparkFunSuite with Timeouts {
ignore("driver should exit after finishing without cleanup (SPARK-530)") { ignore("driver should exit after finishing without cleanup (SPARK-530)") {
val sparkHome = sys.props.getOrElse("spark.test.home", fail("spark.test.home is not set!")) val sparkHome = sys.props.getOrElse("spark.test.home", fail("spark.test.home is not set!"))

View file

@ -19,7 +19,7 @@ package org.apache.spark
import scala.collection.mutable import scala.collection.mutable
import org.scalatest.{BeforeAndAfter, FunSuite, PrivateMethodTester} import org.scalatest.{BeforeAndAfter, PrivateMethodTester}
import org.apache.spark.executor.TaskMetrics import org.apache.spark.executor.TaskMetrics
import org.apache.spark.scheduler._ import org.apache.spark.scheduler._
import org.apache.spark.scheduler.cluster.ExecutorInfo import org.apache.spark.scheduler.cluster.ExecutorInfo
@ -28,7 +28,11 @@ import org.apache.spark.util.ManualClock
/** /**
* Test add and remove behavior of ExecutorAllocationManager. * Test add and remove behavior of ExecutorAllocationManager.
*/ */
class ExecutorAllocationManagerSuite extends FunSuite with LocalSparkContext with BeforeAndAfter { class ExecutorAllocationManagerSuite
extends SparkFunSuite
with LocalSparkContext
with BeforeAndAfter {
import ExecutorAllocationManager._ import ExecutorAllocationManager._
import ExecutorAllocationManagerSuite._ import ExecutorAllocationManagerSuite._

View file

@ -17,8 +17,6 @@
package org.apache.spark package org.apache.spark
import org.scalatest.FunSuite
import org.apache.spark.util.NonSerializable import org.apache.spark.util.NonSerializable
import java.io.NotSerializableException import java.io.NotSerializableException
@ -38,7 +36,7 @@ object FailureSuiteState {
} }
} }
class FailureSuite extends FunSuite with LocalSparkContext { class FailureSuite extends SparkFunSuite with LocalSparkContext {
// Run a 3-task map job in which task 1 deterministically fails once, and check // Run a 3-task map job in which task 1 deterministically fails once, and check
// whether the job completes successfully and we ran 4 tasks in total. // whether the job completes successfully and we ran 4 tasks in total.

View file

@ -24,13 +24,12 @@ import javax.net.ssl.SSLException
import com.google.common.io.{ByteStreams, Files} import com.google.common.io.{ByteStreams, Files}
import org.apache.commons.lang3.RandomUtils import org.apache.commons.lang3.RandomUtils
import org.scalatest.FunSuite
import org.apache.spark.util.Utils import org.apache.spark.util.Utils
import SSLSampleConfigs._ import SSLSampleConfigs._
class FileServerSuite extends FunSuite with LocalSparkContext { class FileServerSuite extends SparkFunSuite with LocalSparkContext {
@transient var tmpDir: File = _ @transient var tmpDir: File = _
@transient var tmpFile: File = _ @transient var tmpFile: File = _

View file

@ -30,12 +30,11 @@ import org.apache.hadoop.mapred.{JobConf, FileAlreadyExistsException, FileSplit,
import org.apache.hadoop.mapreduce.Job import org.apache.hadoop.mapreduce.Job
import org.apache.hadoop.mapreduce.lib.input.{FileSplit => NewFileSplit, TextInputFormat => NewTextInputFormat} import org.apache.hadoop.mapreduce.lib.input.{FileSplit => NewFileSplit, TextInputFormat => NewTextInputFormat}
import org.apache.hadoop.mapreduce.lib.output.{TextOutputFormat => NewTextOutputFormat} import org.apache.hadoop.mapreduce.lib.output.{TextOutputFormat => NewTextOutputFormat}
import org.scalatest.FunSuite
import org.apache.spark.rdd.{NewHadoopRDD, HadoopRDD} import org.apache.spark.rdd.{NewHadoopRDD, HadoopRDD}
import org.apache.spark.util.Utils import org.apache.spark.util.Utils
class FileSuite extends FunSuite with LocalSparkContext { class FileSuite extends SparkFunSuite with LocalSparkContext {
var tempDir: File = _ var tempDir: File = _
override def beforeEach() { override def beforeEach() {

View file

@ -20,10 +20,14 @@ package org.apache.spark
import scala.concurrent.Await import scala.concurrent.Await
import scala.concurrent.duration.Duration import scala.concurrent.duration.Duration
import org.scalatest.{BeforeAndAfter, FunSuite, Matchers} import org.scalatest.{BeforeAndAfter, Matchers}
class FutureActionSuite extends FunSuite with BeforeAndAfter with Matchers with LocalSparkContext { class FutureActionSuite
extends SparkFunSuite
with BeforeAndAfter
with Matchers
with LocalSparkContext {
before { before {
sc = new SparkContext("local", "FutureActionSuite") sc = new SparkContext("local", "FutureActionSuite")

View file

@ -22,7 +22,6 @@ import scala.language.postfixOps
import org.apache.spark.executor.TaskMetrics import org.apache.spark.executor.TaskMetrics
import org.apache.spark.storage.BlockManagerId import org.apache.spark.storage.BlockManagerId
import org.scalatest.FunSuite
import org.mockito.Mockito.{mock, spy, verify, when} import org.mockito.Mockito.{mock, spy, verify, when}
import org.mockito.Matchers import org.mockito.Matchers
import org.mockito.Matchers._ import org.mockito.Matchers._
@ -31,7 +30,7 @@ import org.apache.spark.scheduler.TaskScheduler
import org.apache.spark.util.RpcUtils import org.apache.spark.util.RpcUtils
import org.scalatest.concurrent.Eventually._ import org.scalatest.concurrent.Eventually._
class HeartbeatReceiverSuite extends FunSuite with LocalSparkContext { class HeartbeatReceiverSuite extends SparkFunSuite with LocalSparkContext {
test("HeartbeatReceiver") { test("HeartbeatReceiver") {
sc = spy(new SparkContext("local[2]", "test")) sc = spy(new SparkContext("local[2]", "test"))

View file

@ -17,11 +17,9 @@
package org.apache.spark package org.apache.spark
import org.scalatest.FunSuite
import org.apache.spark.rdd.RDD import org.apache.spark.rdd.RDD
class ImplicitOrderingSuite extends FunSuite with LocalSparkContext { class ImplicitOrderingSuite extends SparkFunSuite with LocalSparkContext {
// Tests that PairRDDFunctions grabs an implicit Ordering in various cases where it should. // Tests that PairRDDFunctions grabs an implicit Ordering in various cases where it should.
test("basic inference of Orderings"){ test("basic inference of Orderings"){
sc = new SparkContext("local", "test") sc = new SparkContext("local", "test")

View file

@ -24,7 +24,7 @@ import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.duration._ import scala.concurrent.duration._
import scala.concurrent.future import scala.concurrent.future
import org.scalatest.{BeforeAndAfter, FunSuite} import org.scalatest.BeforeAndAfter
import org.scalatest.Matchers import org.scalatest.Matchers
import org.apache.spark.scheduler.{SparkListener, SparkListenerTaskStart} import org.apache.spark.scheduler.{SparkListener, SparkListenerTaskStart}
@ -34,7 +34,7 @@ import org.apache.spark.scheduler.{SparkListener, SparkListenerTaskStart}
* (e.g. count) as well as multi-job action (e.g. take). We test the local and cluster schedulers * (e.g. count) as well as multi-job action (e.g. take). We test the local and cluster schedulers
* in both FIFO and fair scheduling modes. * in both FIFO and fair scheduling modes.
*/ */
class JobCancellationSuite extends FunSuite with Matchers with BeforeAndAfter class JobCancellationSuite extends SparkFunSuite with Matchers with BeforeAndAfter
with LocalSparkContext { with LocalSparkContext {
override def afterEach() { override def afterEach() {

View file

@ -19,14 +19,13 @@ package org.apache.spark
import org.mockito.Mockito._ import org.mockito.Mockito._
import org.mockito.Matchers.{any, isA} import org.mockito.Matchers.{any, isA}
import org.scalatest.FunSuite
import org.apache.spark.rpc.{RpcAddress, RpcEndpointRef, RpcCallContext, RpcEnv} import org.apache.spark.rpc.{RpcAddress, RpcEndpointRef, RpcCallContext, RpcEnv}
import org.apache.spark.scheduler.{CompressedMapStatus, MapStatus} import org.apache.spark.scheduler.{CompressedMapStatus, MapStatus}
import org.apache.spark.shuffle.FetchFailedException import org.apache.spark.shuffle.FetchFailedException
import org.apache.spark.storage.BlockManagerId import org.apache.spark.storage.BlockManagerId
class MapOutputTrackerSuite extends FunSuite { class MapOutputTrackerSuite extends SparkFunSuite {
private val conf = new SparkConf private val conf = new SparkConf
def createRpcEnv(name: String, host: String = "localhost", port: Int = 0, def createRpcEnv(name: String, host: String = "localhost", port: Int = 0,

View file

@ -20,12 +20,12 @@ package org.apache.spark
import scala.collection.mutable.ArrayBuffer import scala.collection.mutable.ArrayBuffer
import scala.math.abs import scala.math.abs
import org.scalatest.{FunSuite, PrivateMethodTester} import org.scalatest.PrivateMethodTester
import org.apache.spark.rdd.RDD import org.apache.spark.rdd.RDD
import org.apache.spark.util.StatCounter import org.apache.spark.util.StatCounter
class PartitioningSuite extends FunSuite with SharedSparkContext with PrivateMethodTester { class PartitioningSuite extends SparkFunSuite with SharedSparkContext with PrivateMethodTester {
test("HashPartitioner equality") { test("HashPartitioner equality") {
val p2 = new HashPartitioner(2) val p2 = new HashPartitioner(2)

View file

@ -21,9 +21,9 @@ import java.io.File
import com.google.common.io.Files import com.google.common.io.Files
import org.apache.spark.util.Utils import org.apache.spark.util.Utils
import org.scalatest.{BeforeAndAfterAll, FunSuite} import org.scalatest.BeforeAndAfterAll
class SSLOptionsSuite extends FunSuite with BeforeAndAfterAll { class SSLOptionsSuite extends SparkFunSuite with BeforeAndAfterAll {
test("test resolving property file as spark conf ") { test("test resolving property file as spark conf ") {
val keyStorePath = new File(this.getClass.getResource("/keystore").toURI).getAbsolutePath val keyStorePath = new File(this.getClass.getResource("/keystore").toURI).getAbsolutePath

View file

@ -19,11 +19,9 @@ package org.apache.spark
import java.io.File import java.io.File
import org.scalatest.FunSuite
import org.apache.spark.util.Utils import org.apache.spark.util.Utils
class SecurityManagerSuite extends FunSuite { class SecurityManagerSuite extends SparkFunSuite {
test("set security with conf") { test("set security with conf") {
val conf = new SparkConf val conf = new SparkConf

View file

@ -17,7 +17,6 @@
package org.apache.spark package org.apache.spark
import org.scalatest.FunSuite
import org.scalatest.Matchers import org.scalatest.Matchers
import org.apache.spark.ShuffleSuite.NonJavaSerializableClass import org.apache.spark.ShuffleSuite.NonJavaSerializableClass
@ -26,7 +25,7 @@ import org.apache.spark.serializer.KryoSerializer
import org.apache.spark.storage.{ShuffleDataBlockId, ShuffleBlockId} import org.apache.spark.storage.{ShuffleDataBlockId, ShuffleBlockId}
import org.apache.spark.util.MutablePair import org.apache.spark.util.MutablePair
abstract class ShuffleSuite extends FunSuite with Matchers with LocalSparkContext { abstract class ShuffleSuite extends SparkFunSuite with Matchers with LocalSparkContext {
val conf = new SparkConf(loadDefaults = false) val conf = new SparkConf(loadDefaults = false)

View file

@ -23,13 +23,12 @@ import scala.concurrent.duration._
import scala.language.postfixOps import scala.language.postfixOps
import scala.util.{Try, Random} import scala.util.{Try, Random}
import org.scalatest.FunSuite
import org.apache.spark.network.util.ByteUnit import org.apache.spark.network.util.ByteUnit
import org.apache.spark.serializer.{KryoRegistrator, KryoSerializer} import org.apache.spark.serializer.{KryoRegistrator, KryoSerializer}
import org.apache.spark.util.{RpcUtils, ResetSystemProperties} import org.apache.spark.util.{RpcUtils, ResetSystemProperties}
import com.esotericsoftware.kryo.Kryo import com.esotericsoftware.kryo.Kryo
class SparkConfSuite extends FunSuite with LocalSparkContext with ResetSystemProperties { class SparkConfSuite extends SparkFunSuite with LocalSparkContext with ResetSystemProperties {
test("Test byteString conversion") { test("Test byteString conversion") {
val conf = new SparkConf() val conf = new SparkConf()
// Simply exercise the API, we don't need a complete conversion test since that's handled in // Simply exercise the API, we don't need a complete conversion test since that's handled in

View file

@ -17,10 +17,10 @@
package org.apache.spark package org.apache.spark
import org.scalatest.{Assertions, FunSuite} import org.scalatest.Assertions
import org.apache.spark.storage.StorageLevel import org.apache.spark.storage.StorageLevel
class SparkContextInfoSuite extends FunSuite with LocalSparkContext { class SparkContextInfoSuite extends SparkFunSuite with LocalSparkContext {
test("getPersistentRDDs only returns RDDs that are marked as cached") { test("getPersistentRDDs only returns RDDs that are marked as cached") {
sc = new SparkContext("local", "test") sc = new SparkContext("local", "test")
assert(sc.getPersistentRDDs.isEmpty === true) assert(sc.getPersistentRDDs.isEmpty === true)

View file

@ -17,7 +17,7 @@
package org.apache.spark package org.apache.spark
import org.scalatest.{FunSuite, PrivateMethodTester} import org.scalatest.PrivateMethodTester
import org.apache.spark.scheduler.{SchedulerBackend, TaskScheduler, TaskSchedulerImpl} import org.apache.spark.scheduler.{SchedulerBackend, TaskScheduler, TaskSchedulerImpl}
import org.apache.spark.scheduler.cluster.{SimrSchedulerBackend, SparkDeploySchedulerBackend} import org.apache.spark.scheduler.cluster.{SimrSchedulerBackend, SparkDeploySchedulerBackend}
@ -25,7 +25,7 @@ import org.apache.spark.scheduler.cluster.mesos.{CoarseMesosSchedulerBackend, Me
import org.apache.spark.scheduler.local.LocalBackend import org.apache.spark.scheduler.local.LocalBackend
class SparkContextSchedulerCreationSuite class SparkContextSchedulerCreationSuite
extends FunSuite with LocalSparkContext with PrivateMethodTester with Logging { extends SparkFunSuite with LocalSparkContext with PrivateMethodTester with Logging {
def createTaskScheduler(master: String): TaskSchedulerImpl = def createTaskScheduler(master: String): TaskSchedulerImpl =
createTaskScheduler(master, new SparkConf()) createTaskScheduler(master, new SparkConf())

View file

@ -23,8 +23,6 @@ import java.util.concurrent.TimeUnit
import com.google.common.base.Charsets._ import com.google.common.base.Charsets._
import com.google.common.io.Files import com.google.common.io.Files
import org.scalatest.FunSuite
import org.apache.hadoop.io.{BytesWritable, LongWritable, Text} import org.apache.hadoop.io.{BytesWritable, LongWritable, Text}
import org.apache.hadoop.mapred.TextInputFormat import org.apache.hadoop.mapred.TextInputFormat
import org.apache.hadoop.mapreduce.lib.input.{TextInputFormat => NewTextInputFormat} import org.apache.hadoop.mapreduce.lib.input.{TextInputFormat => NewTextInputFormat}
@ -33,7 +31,7 @@ import org.apache.spark.util.Utils
import scala.concurrent.Await import scala.concurrent.Await
import scala.concurrent.duration.Duration import scala.concurrent.duration.Duration
class SparkContextSuite extends FunSuite with LocalSparkContext { class SparkContextSuite extends SparkFunSuite with LocalSparkContext {
test("Only one SparkContext may be active at a time") { test("Only one SparkContext may be active at a time") {
// Regression test for SPARK-4180 // Regression test for SPARK-4180

View file

@ -0,0 +1,46 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark
import org.scalatest.{FunSuite, Outcome}
/**
* Base abstract class for all unit tests in Spark for handling common functionality.
*/
private[spark] abstract class SparkFunSuite extends FunSuite with Logging {
/**
* Log the suite name and the test name before and after each test.
*
* Subclasses should never override this method. If they wish to run
* custom code before and after each test, they should should mix in
* the {{org.scalatest.BeforeAndAfter}} trait instead.
*/
final protected override def withFixture(test: NoArgTest): Outcome = {
val testName = test.text
val suiteName = this.getClass.getName
val shortSuiteName = suiteName.replaceAll("org.apache.spark", "o.a.s")
try {
logInfo(s"\n\n===== TEST OUTPUT FOR $shortSuiteName: '$testName' =====\n")
test()
} finally {
logInfo(s"\n\n===== FINISHED $shortSuiteName: '$testName' =====\n")
}
}
}

View file

@ -21,12 +21,12 @@ import scala.concurrent.duration._
import scala.language.implicitConversions import scala.language.implicitConversions
import scala.language.postfixOps import scala.language.postfixOps
import org.scalatest.{Matchers, FunSuite} import org.scalatest.Matchers
import org.scalatest.concurrent.Eventually._ import org.scalatest.concurrent.Eventually._
import org.apache.spark.JobExecutionStatus._ import org.apache.spark.JobExecutionStatus._
class StatusTrackerSuite extends FunSuite with Matchers with LocalSparkContext { class StatusTrackerSuite extends SparkFunSuite with Matchers with LocalSparkContext {
test("basic status API usage") { test("basic status API usage") {
sc = new SparkContext("local", "test", new SparkConf(false)) sc = new SparkContext("local", "test", new SparkConf(false))

View file

@ -22,7 +22,6 @@ import java.util.concurrent.atomic.AtomicBoolean
import java.util.concurrent.atomic.AtomicInteger import java.util.concurrent.atomic.AtomicInteger
import org.apache.spark.scheduler._ import org.apache.spark.scheduler._
import org.scalatest.FunSuite
/** /**
* Holds state shared across task threads in some ThreadingSuite tests. * Holds state shared across task threads in some ThreadingSuite tests.
@ -37,7 +36,7 @@ object ThreadingSuiteState {
} }
} }
class ThreadingSuite extends FunSuite with LocalSparkContext { class ThreadingSuite extends SparkFunSuite with LocalSparkContext {
test("accessing SparkContext form a different thread") { test("accessing SparkContext form a different thread") {
sc = new SparkContext("local", "test") sc = new SparkContext("local", "test")

View file

@ -17,11 +17,10 @@
package org.apache.spark package org.apache.spark
import org.scalatest.FunSuite
import org.scalatest.concurrent.Timeouts._ import org.scalatest.concurrent.Timeouts._
import org.scalatest.time.{Millis, Span} import org.scalatest.time.{Millis, Span}
class UnpersistSuite extends FunSuite with LocalSparkContext { class UnpersistSuite extends SparkFunSuite with LocalSparkContext {
test("unpersist RDD") { test("unpersist RDD") {
sc = new SparkContext("local", "test") sc = new SparkContext("local", "test")
val rdd = sc.makeRDD(Array(1, 2, 3, 4), 2).cache() val rdd = sc.makeRDD(Array(1, 2, 3, 4), 2).cache()

View file

@ -21,15 +21,15 @@ import scala.io.Source
import java.io.{PrintWriter, File} import java.io.{PrintWriter, File}
import org.scalatest.{Matchers, FunSuite} import org.scalatest.Matchers
import org.apache.spark.{SharedSparkContext, SparkConf} import org.apache.spark.{SharedSparkContext, SparkConf, SparkFunSuite}
import org.apache.spark.serializer.KryoSerializer import org.apache.spark.serializer.KryoSerializer
import org.apache.spark.util.Utils import org.apache.spark.util.Utils
// This test suite uses SharedSparkContext because we need a SparkEnv in order to deserialize // This test suite uses SharedSparkContext because we need a SparkEnv in order to deserialize
// a PythonBroadcast: // a PythonBroadcast:
class PythonBroadcastSuite extends FunSuite with Matchers with SharedSparkContext { class PythonBroadcastSuite extends SparkFunSuite with Matchers with SharedSparkContext {
test("PythonBroadcast can be serialized with Kryo (SPARK-4882)") { test("PythonBroadcast can be serialized with Kryo (SPARK-4882)") {
val tempDir = Utils.createTempDir() val tempDir = Utils.createTempDir()
val broadcastedString = "Hello, world!" val broadcastedString = "Hello, world!"

View file

@ -19,9 +19,9 @@ package org.apache.spark.api.python
import java.io.{ByteArrayOutputStream, DataOutputStream} import java.io.{ByteArrayOutputStream, DataOutputStream}
import org.scalatest.FunSuite import org.apache.spark.SparkFunSuite
class PythonRDDSuite extends FunSuite { class PythonRDDSuite extends SparkFunSuite {
test("Writing large strings to the worker") { test("Writing large strings to the worker") {
val input: List[String] = List("a"*100000) val input: List[String] = List("a"*100000)

View file

@ -17,11 +17,9 @@
package org.apache.spark.api.python package org.apache.spark.api.python
import org.scalatest.FunSuite import org.apache.spark.{SharedSparkContext, SparkFunSuite}
import org.apache.spark.SharedSparkContext class SerDeUtilSuite extends SparkFunSuite with SharedSparkContext {
class SerDeUtilSuite extends FunSuite with SharedSparkContext {
test("Converting an empty pair RDD to python does not throw an exception (SPARK-5441)") { test("Converting an empty pair RDD to python does not throw an exception (SPARK-5441)") {
val emptyRdd = sc.makeRDD(Seq[(Any, Any)]()) val emptyRdd = sc.makeRDD(Seq[(Any, Any)]())

View file

@ -20,10 +20,10 @@ package org.apache.spark.broadcast
import scala.concurrent.duration._ import scala.concurrent.duration._
import scala.util.Random import scala.util.Random
import org.scalatest.{Assertions, FunSuite} import org.scalatest.Assertions
import org.scalatest.concurrent.Eventually._ import org.scalatest.concurrent.Eventually._
import org.apache.spark.{LocalSparkContext, SparkConf, SparkContext, SparkException, SparkEnv} import org.apache.spark._
import org.apache.spark.io.SnappyCompressionCodec import org.apache.spark.io.SnappyCompressionCodec
import org.apache.spark.rdd.RDD import org.apache.spark.rdd.RDD
import org.apache.spark.serializer.JavaSerializer import org.apache.spark.serializer.JavaSerializer
@ -45,7 +45,7 @@ class DummyBroadcastClass(rdd: RDD[Int]) extends Serializable {
} }
} }
class BroadcastSuite extends FunSuite with LocalSparkContext { class BroadcastSuite extends SparkFunSuite with LocalSparkContext {
private val httpConf = broadcastConf("HttpBroadcastFactory") private val httpConf = broadcastConf("HttpBroadcastFactory")
private val torrentConf = broadcastConf("TorrentBroadcastFactory") private val torrentConf = broadcastConf("TorrentBroadcastFactory")

View file

@ -17,10 +17,11 @@
package org.apache.spark.deploy package org.apache.spark.deploy
import org.scalatest.FunSuite
import org.scalatest.Matchers import org.scalatest.Matchers
class ClientSuite extends FunSuite with Matchers { import org.apache.spark.SparkFunSuite
class ClientSuite extends SparkFunSuite with Matchers {
test("correctly validates driver jar URL's") { test("correctly validates driver jar URL's") {
ClientArguments.isValidJarUrl("http://someHost:8080/foo.jar") should be (true) ClientArguments.isValidJarUrl("http://someHost:8080/foo.jar") should be (true)
ClientArguments.isValidJarUrl("https://someHost:8080/foo.jar") should be (true) ClientArguments.isValidJarUrl("https://someHost:8080/foo.jar") should be (true)

View file

@ -23,14 +23,13 @@ import java.util.Date
import com.fasterxml.jackson.core.JsonParseException import com.fasterxml.jackson.core.JsonParseException
import org.json4s._ import org.json4s._
import org.json4s.jackson.JsonMethods import org.json4s.jackson.JsonMethods
import org.scalatest.FunSuite
import org.apache.spark.deploy.DeployMessages.{MasterStateResponse, WorkerStateResponse} import org.apache.spark.deploy.DeployMessages.{MasterStateResponse, WorkerStateResponse}
import org.apache.spark.deploy.master.{ApplicationInfo, DriverInfo, RecoveryState, WorkerInfo} import org.apache.spark.deploy.master.{ApplicationInfo, DriverInfo, RecoveryState, WorkerInfo}
import org.apache.spark.deploy.worker.{DriverRunner, ExecutorRunner} import org.apache.spark.deploy.worker.{DriverRunner, ExecutorRunner}
import org.apache.spark.{JsonTestUtils, SecurityManager, SparkConf} import org.apache.spark.{JsonTestUtils, SecurityManager, SparkConf, SparkFunSuite}
class JsonProtocolSuite extends FunSuite with JsonTestUtils { class JsonProtocolSuite extends SparkFunSuite with JsonTestUtils {
test("writeApplicationInfo") { test("writeApplicationInfo") {
val output = JsonProtocol.writeApplicationInfo(createAppInfo()) val output = JsonProtocol.writeApplicationInfo(createAppInfo())

View file

@ -23,13 +23,11 @@ import scala.collection.JavaConversions._
import scala.collection.mutable import scala.collection.mutable
import scala.io.Source import scala.io.Source
import org.scalatest.FunSuite
import org.apache.spark.scheduler.cluster.ExecutorInfo import org.apache.spark.scheduler.cluster.ExecutorInfo
import org.apache.spark.scheduler.{SparkListenerExecutorAdded, SparkListener} import org.apache.spark.scheduler.{SparkListenerExecutorAdded, SparkListener}
import org.apache.spark.{SparkConf, SparkContext, LocalSparkContext} import org.apache.spark.{LocalSparkContext, SparkConf, SparkContext, SparkFunSuite}
class LogUrlsStandaloneSuite extends FunSuite with LocalSparkContext { class LogUrlsStandaloneSuite extends SparkFunSuite with LocalSparkContext {
/** Length of time to wait while draining listener events. */ /** Length of time to wait while draining listener events. */
private val WAIT_TIMEOUT_MILLIS = 10000 private val WAIT_TIMEOUT_MILLIS = 10000

View file

@ -17,11 +17,10 @@
package org.apache.spark.deploy package org.apache.spark.deploy
import org.scalatest.FunSuite import org.apache.spark.SparkFunSuite
import org.apache.spark.util.Utils import org.apache.spark.util.Utils
class PythonRunnerSuite extends FunSuite { class PythonRunnerSuite extends SparkFunSuite {
// Test formatting a single path to be added to the PYTHONPATH // Test formatting a single path to be added to the PYTHONPATH
test("format path") { test("format path") {

View file

@ -23,7 +23,6 @@ import scala.collection.mutable.ArrayBuffer
import com.google.common.base.Charsets.UTF_8 import com.google.common.base.Charsets.UTF_8
import com.google.common.io.ByteStreams import com.google.common.io.ByteStreams
import org.scalatest.FunSuite
import org.scalatest.Matchers import org.scalatest.Matchers
import org.scalatest.concurrent.Timeouts import org.scalatest.concurrent.Timeouts
import org.scalatest.time.SpanSugar._ import org.scalatest.time.SpanSugar._
@ -35,7 +34,12 @@ import org.apache.spark.util.{ResetSystemProperties, Utils}
// Note: this suite mixes in ResetSystemProperties because SparkSubmit.main() sets a bunch // Note: this suite mixes in ResetSystemProperties because SparkSubmit.main() sets a bunch
// of properties that neeed to be cleared after tests. // of properties that neeed to be cleared after tests.
class SparkSubmitSuite extends FunSuite with Matchers with ResetSystemProperties with Timeouts { class SparkSubmitSuite
extends SparkFunSuite
with Matchers
with ResetSystemProperties
with Timeouts {
def beforeAll() { def beforeAll() {
System.setProperty("spark.testing", "true") System.setProperty("spark.testing", "true")
} }

View file

@ -20,15 +20,16 @@ package org.apache.spark.deploy
import java.io.{File, PrintStream, OutputStream} import java.io.{File, PrintStream, OutputStream}
import scala.collection.mutable.ArrayBuffer import scala.collection.mutable.ArrayBuffer
import org.scalatest.{BeforeAndAfterAll, FunSuite} import org.scalatest.BeforeAndAfterAll
import org.apache.ivy.core.module.descriptor.MDArtifact import org.apache.ivy.core.module.descriptor.MDArtifact
import org.apache.ivy.core.settings.IvySettings import org.apache.ivy.core.settings.IvySettings
import org.apache.ivy.plugins.resolver.IBiblioResolver import org.apache.ivy.plugins.resolver.IBiblioResolver
import org.apache.spark.SparkFunSuite
import org.apache.spark.deploy.SparkSubmitUtils.MavenCoordinate import org.apache.spark.deploy.SparkSubmitUtils.MavenCoordinate
class SparkSubmitUtilsSuite extends FunSuite with BeforeAndAfterAll { class SparkSubmitUtilsSuite extends SparkFunSuite with BeforeAndAfterAll {
private val noOpOutputStream = new OutputStream { private val noOpOutputStream = new OutputStream {
def write(b: Int) = {} def write(b: Int) = {}

View file

@ -25,15 +25,15 @@ import scala.io.Source
import org.apache.hadoop.fs.Path import org.apache.hadoop.fs.Path
import org.json4s.jackson.JsonMethods._ import org.json4s.jackson.JsonMethods._
import org.scalatest.{BeforeAndAfter, FunSuite} import org.scalatest.BeforeAndAfter
import org.scalatest.Matchers import org.scalatest.Matchers
import org.apache.spark.{Logging, SparkConf} import org.apache.spark.{Logging, SparkConf, SparkFunSuite}
import org.apache.spark.io._ import org.apache.spark.io._
import org.apache.spark.scheduler._ import org.apache.spark.scheduler._
import org.apache.spark.util.{JsonProtocol, ManualClock, Utils} import org.apache.spark.util.{JsonProtocol, ManualClock, Utils}
class FsHistoryProviderSuite extends FunSuite with BeforeAndAfter with Matchers with Logging { class FsHistoryProviderSuite extends SparkFunSuite with BeforeAndAfter with Matchers with Logging {
private var testDir: File = null private var testDir: File = null

View file

@ -22,10 +22,10 @@ import javax.servlet.http.{HttpServletRequest, HttpServletResponse}
import org.apache.commons.io.{FileUtils, IOUtils} import org.apache.commons.io.{FileUtils, IOUtils}
import org.mockito.Mockito.when import org.mockito.Mockito.when
import org.scalatest.{BeforeAndAfter, FunSuite, Matchers} import org.scalatest.{BeforeAndAfter, Matchers}
import org.scalatest.mock.MockitoSugar import org.scalatest.mock.MockitoSugar
import org.apache.spark.{JsonTestUtils, SecurityManager, SparkConf} import org.apache.spark.{JsonTestUtils, SecurityManager, SparkConf, SparkFunSuite}
import org.apache.spark.ui.SparkUI import org.apache.spark.ui.SparkUI
/** /**
@ -39,7 +39,7 @@ import org.apache.spark.ui.SparkUI
* expectations. However, in general this should be done with extreme caution, as the metrics * expectations. However, in general this should be done with extreme caution, as the metrics
* are considered part of Spark's public api. * are considered part of Spark's public api.
*/ */
class HistoryServerSuite extends FunSuite with BeforeAndAfter with Matchers with MockitoSugar class HistoryServerSuite extends SparkFunSuite with BeforeAndAfter with Matchers with MockitoSugar
with JsonTestUtils { with JsonTestUtils {
private val logDir = new File("src/test/resources/spark-events") private val logDir = new File("src/test/resources/spark-events")

View file

@ -27,14 +27,14 @@ import scala.language.postfixOps
import akka.actor.Address import akka.actor.Address
import org.json4s._ import org.json4s._
import org.json4s.jackson.JsonMethods._ import org.json4s.jackson.JsonMethods._
import org.scalatest.{FunSuite, Matchers} import org.scalatest.Matchers
import org.scalatest.concurrent.Eventually import org.scalatest.concurrent.Eventually
import other.supplier.{CustomPersistenceEngine, CustomRecoveryModeFactory} import other.supplier.{CustomPersistenceEngine, CustomRecoveryModeFactory}
import org.apache.spark.{SparkConf, SparkException} import org.apache.spark.{SparkConf, SparkException, SparkFunSuite}
import org.apache.spark.deploy._ import org.apache.spark.deploy._
class MasterSuite extends FunSuite with Matchers with Eventually { class MasterSuite extends SparkFunSuite with Matchers with Eventually {
test("toAkkaUrl") { test("toAkkaUrl") {
val conf = new SparkConf(loadDefaults = false) val conf = new SparkConf(loadDefaults = false)

View file

@ -25,7 +25,7 @@ import scala.collection.mutable
import akka.actor.{Actor, ActorRef, ActorSystem, Props} import akka.actor.{Actor, ActorRef, ActorSystem, Props}
import com.google.common.base.Charsets import com.google.common.base.Charsets
import org.scalatest.{BeforeAndAfterEach, FunSuite} import org.scalatest.BeforeAndAfterEach
import org.json4s.JsonAST._ import org.json4s.JsonAST._
import org.json4s.jackson.JsonMethods._ import org.json4s.jackson.JsonMethods._
@ -38,7 +38,7 @@ import org.apache.spark.deploy.master.DriverState._
/** /**
* Tests for the REST application submission protocol used in standalone cluster mode. * Tests for the REST application submission protocol used in standalone cluster mode.
*/ */
class StandaloneRestSubmitSuite extends FunSuite with BeforeAndAfterEach { class StandaloneRestSubmitSuite extends SparkFunSuite with BeforeAndAfterEach {
private var actorSystem: Option[ActorSystem] = None private var actorSystem: Option[ActorSystem] = None
private var server: Option[RestSubmissionServer] = None private var server: Option[RestSubmissionServer] = None

View file

@ -21,14 +21,13 @@ import java.lang.Boolean
import java.lang.Integer import java.lang.Integer
import org.json4s.jackson.JsonMethods._ import org.json4s.jackson.JsonMethods._
import org.scalatest.FunSuite
import org.apache.spark.SparkConf import org.apache.spark.{SparkConf, SparkFunSuite}
/** /**
* Tests for the REST application submission protocol. * Tests for the REST application submission protocol.
*/ */
class SubmitRestProtocolSuite extends FunSuite { class SubmitRestProtocolSuite extends SparkFunSuite {
test("validate") { test("validate") {
val request = new DummyRequest val request = new DummyRequest

View file

@ -17,11 +17,12 @@
package org.apache.spark.deploy.worker package org.apache.spark.deploy.worker
import org.apache.spark.SparkFunSuite
import org.apache.spark.deploy.Command import org.apache.spark.deploy.Command
import org.apache.spark.util.Utils import org.apache.spark.util.Utils
import org.scalatest.{FunSuite, Matchers} import org.scalatest.Matchers
class CommandUtilsSuite extends FunSuite with Matchers { class CommandUtilsSuite extends SparkFunSuite with Matchers {
test("set libraryPath correctly") { test("set libraryPath correctly") {
val appId = "12345-worker321-9876" val appId = "12345-worker321-9876"

View file

@ -23,13 +23,12 @@ import org.mockito.Mockito._
import org.mockito.Matchers._ import org.mockito.Matchers._
import org.mockito.invocation.InvocationOnMock import org.mockito.invocation.InvocationOnMock
import org.mockito.stubbing.Answer import org.mockito.stubbing.Answer
import org.scalatest.FunSuite
import org.apache.spark.{SecurityManager, SparkConf} import org.apache.spark.{SecurityManager, SparkConf, SparkFunSuite}
import org.apache.spark.deploy.{Command, DriverDescription} import org.apache.spark.deploy.{Command, DriverDescription}
import org.apache.spark.util.Clock import org.apache.spark.util.Clock
class DriverRunnerTest extends FunSuite { class DriverRunnerTest extends SparkFunSuite {
private def createDriverRunner() = { private def createDriverRunner() = {
val command = new Command("mainClass", Seq(), Map(), Seq(), Seq(), Seq()) val command = new Command("mainClass", Seq(), Map(), Seq(), Seq(), Seq())
val driverDescription = new DriverDescription("jarUrl", 512, 1, true, command) val driverDescription = new DriverDescription("jarUrl", 512, 1, true, command)

View file

@ -21,12 +21,10 @@ import java.io.File
import scala.collection.JavaConversions._ import scala.collection.JavaConversions._
import org.scalatest.FunSuite
import org.apache.spark.deploy.{ApplicationDescription, Command, ExecutorState} import org.apache.spark.deploy.{ApplicationDescription, Command, ExecutorState}
import org.apache.spark.SparkConf import org.apache.spark.{SparkConf, SparkFunSuite}
class ExecutorRunnerTest extends FunSuite { class ExecutorRunnerTest extends SparkFunSuite {
test("command includes appId") { test("command includes appId") {
val appId = "12345-worker321-9876" val appId = "12345-worker321-9876"
val sparkHome = sys.props.getOrElse("spark.test.home", fail("spark.test.home is not set!")) val sparkHome = sys.props.getOrElse("spark.test.home", fail("spark.test.home is not set!"))

View file

@ -18,11 +18,10 @@
package org.apache.spark.deploy.worker package org.apache.spark.deploy.worker
import org.apache.spark.SparkConf import org.apache.spark.{SparkConf, SparkFunSuite}
import org.scalatest.FunSuite
class WorkerArgumentsTest extends FunSuite { class WorkerArgumentsTest extends SparkFunSuite {
test("Memory can't be set to 0 when cmd line args leave off M or G") { test("Memory can't be set to 0 when cmd line args leave off M or G") {
val conf = new SparkConf val conf = new SparkConf

View file

@ -17,12 +17,12 @@
package org.apache.spark.deploy.worker package org.apache.spark.deploy.worker
import org.apache.spark.SparkConf import org.apache.spark.{SparkConf, SparkFunSuite}
import org.apache.spark.deploy.Command import org.apache.spark.deploy.Command
import org.scalatest.{Matchers, FunSuite} import org.scalatest.Matchers
class WorkerSuite extends FunSuite with Matchers { class WorkerSuite extends SparkFunSuite with Matchers {
def cmd(javaOpts: String*): Command = { def cmd(javaOpts: String*): Command = {
Command("", Seq.empty, Map.empty, Seq.empty, Seq.empty, Seq(javaOpts : _*)) Command("", Seq.empty, Map.empty, Seq.empty, Seq.empty, Seq(javaOpts : _*))

View file

@ -18,12 +18,11 @@
package org.apache.spark.deploy.worker package org.apache.spark.deploy.worker
import akka.actor.AddressFromURIString import akka.actor.AddressFromURIString
import org.apache.spark.SparkConf import org.apache.spark.{SparkConf, SparkFunSuite}
import org.apache.spark.SecurityManager import org.apache.spark.SecurityManager
import org.apache.spark.rpc.{RpcAddress, RpcEnv} import org.apache.spark.rpc.{RpcAddress, RpcEnv}
import org.scalatest.FunSuite
class WorkerWatcherSuite extends FunSuite { class WorkerWatcherSuite extends SparkFunSuite {
test("WorkerWatcher shuts down on valid disassociation") { test("WorkerWatcher shuts down on valid disassociation") {
val conf = new SparkConf() val conf = new SparkConf()
val rpcEnv = RpcEnv.create("test", "localhost", 12345, conf, new SecurityManager(conf)) val rpcEnv = RpcEnv.create("test", "localhost", 12345, conf, new SecurityManager(conf))

View file

@ -17,9 +17,9 @@
package org.apache.spark.executor package org.apache.spark.executor
import org.scalatest.FunSuite import org.apache.spark.SparkFunSuite
class TaskMetricsSuite extends FunSuite { class TaskMetricsSuite extends SparkFunSuite {
test("[SPARK-5701] updateShuffleReadMetrics: ShuffleReadMetrics not added when no shuffle deps") { test("[SPARK-5701] updateShuffleReadMetrics: ShuffleReadMetrics not added when no shuffle deps") {
val taskMetrics = new TaskMetrics() val taskMetrics = new TaskMetrics()
taskMetrics.updateShuffleReadMetrics() taskMetrics.updateShuffleReadMetrics()

View file

@ -24,11 +24,10 @@ import java.io.FileOutputStream
import scala.collection.immutable.IndexedSeq import scala.collection.immutable.IndexedSeq
import org.scalatest.BeforeAndAfterAll import org.scalatest.BeforeAndAfterAll
import org.scalatest.FunSuite
import org.apache.hadoop.io.Text import org.apache.hadoop.io.Text
import org.apache.spark.{SparkConf, SparkContext} import org.apache.spark.{SparkConf, SparkContext, SparkFunSuite}
import org.apache.spark.util.Utils import org.apache.spark.util.Utils
import org.apache.hadoop.io.compress.{DefaultCodec, CompressionCodecFactory, GzipCodec} import org.apache.hadoop.io.compress.{DefaultCodec, CompressionCodecFactory, GzipCodec}
@ -37,7 +36,7 @@ import org.apache.hadoop.io.compress.{DefaultCodec, CompressionCodecFactory, Gzi
* [[org.apache.spark.input.WholeTextFileRecordReader WholeTextFileRecordReader]]. A temporary * [[org.apache.spark.input.WholeTextFileRecordReader WholeTextFileRecordReader]]. A temporary
* directory is created as fake input. Temporal storage would be deleted in the end. * directory is created as fake input. Temporal storage would be deleted in the end.
*/ */
class WholeTextFileRecordReaderSuite extends FunSuite with BeforeAndAfterAll { class WholeTextFileRecordReaderSuite extends SparkFunSuite with BeforeAndAfterAll {
private var sc: SparkContext = _ private var sc: SparkContext = _
private var factory: CompressionCodecFactory = _ private var factory: CompressionCodecFactory = _

View file

@ -20,11 +20,10 @@ package org.apache.spark.io
import java.io.{ByteArrayInputStream, ByteArrayOutputStream} import java.io.{ByteArrayInputStream, ByteArrayOutputStream}
import com.google.common.io.ByteStreams import com.google.common.io.ByteStreams
import org.scalatest.FunSuite
import org.apache.spark.SparkConf import org.apache.spark.{SparkConf, SparkFunSuite}
class CompressionCodecSuite extends FunSuite { class CompressionCodecSuite extends SparkFunSuite {
val conf = new SparkConf(false) val conf = new SparkConf(false)
def testCodec(codec: CompressionCodec) { def testCodec(codec: CompressionCodec) {

View file

@ -36,14 +36,14 @@ import org.apache.hadoop.mapreduce.lib.input.{CombineFileInputFormat => NewCombi
import org.apache.hadoop.mapreduce.lib.output.{TextOutputFormat => NewTextOutputFormat} import org.apache.hadoop.mapreduce.lib.output.{TextOutputFormat => NewTextOutputFormat}
import org.apache.hadoop.mapreduce.{TaskAttemptContext, InputSplit => NewInputSplit, import org.apache.hadoop.mapreduce.{TaskAttemptContext, InputSplit => NewInputSplit,
RecordReader => NewRecordReader} RecordReader => NewRecordReader}
import org.scalatest.{BeforeAndAfter, FunSuite} import org.scalatest.BeforeAndAfter
import org.apache.spark.SharedSparkContext import org.apache.spark.{SharedSparkContext, SparkFunSuite}
import org.apache.spark.deploy.SparkHadoopUtil import org.apache.spark.deploy.SparkHadoopUtil
import org.apache.spark.scheduler.{SparkListener, SparkListenerTaskEnd} import org.apache.spark.scheduler.{SparkListener, SparkListenerTaskEnd}
import org.apache.spark.util.Utils import org.apache.spark.util.Utils
class InputOutputMetricsSuite extends FunSuite with SharedSparkContext class InputOutputMetricsSuite extends SparkFunSuite with SharedSparkContext
with BeforeAndAfter { with BeforeAndAfter {
@transient var tmpDir: File = _ @transient var tmpDir: File = _

View file

@ -17,9 +17,11 @@
package org.apache.spark.metrics package org.apache.spark.metrics
import org.scalatest.{BeforeAndAfter, FunSuite} import org.scalatest.BeforeAndAfter
class MetricsConfigSuite extends FunSuite with BeforeAndAfter { import org.apache.spark.SparkFunSuite
class MetricsConfigSuite extends SparkFunSuite with BeforeAndAfter {
var filePath: String = _ var filePath: String = _
before { before {

View file

@ -17,9 +17,9 @@
package org.apache.spark.metrics package org.apache.spark.metrics
import org.scalatest.{BeforeAndAfter, FunSuite, PrivateMethodTester} import org.scalatest.{BeforeAndAfter, PrivateMethodTester}
import org.apache.spark.{SecurityManager, SparkConf} import org.apache.spark.{SecurityManager, SparkConf, SparkFunSuite}
import org.apache.spark.deploy.master.MasterSource import org.apache.spark.deploy.master.MasterSource
import org.apache.spark.metrics.source.Source import org.apache.spark.metrics.source.Source
@ -27,7 +27,7 @@ import com.codahale.metrics.MetricRegistry
import scala.collection.mutable.ArrayBuffer import scala.collection.mutable.ArrayBuffer
class MetricsSystemSuite extends FunSuite with BeforeAndAfter with PrivateMethodTester{ class MetricsSystemSuite extends SparkFunSuite with BeforeAndAfter with PrivateMethodTester{
var filePath: String = _ var filePath: String = _
var conf: SparkConf = null var conf: SparkConf = null
var securityMgr: SecurityManager = null var securityMgr: SecurityManager = null

View file

@ -31,12 +31,12 @@ import org.apache.spark.network.buffer.{ManagedBuffer, NioManagedBuffer}
import org.apache.spark.network.shuffle.BlockFetchingListener import org.apache.spark.network.shuffle.BlockFetchingListener
import org.apache.spark.network.{BlockDataManager, BlockTransferService} import org.apache.spark.network.{BlockDataManager, BlockTransferService}
import org.apache.spark.storage.{BlockId, ShuffleBlockId} import org.apache.spark.storage.{BlockId, ShuffleBlockId}
import org.apache.spark.{SecurityManager, SparkConf} import org.apache.spark.{SecurityManager, SparkConf, SparkFunSuite}
import org.mockito.Mockito._ import org.mockito.Mockito._
import org.scalatest.mock.MockitoSugar import org.scalatest.mock.MockitoSugar
import org.scalatest.{FunSuite, ShouldMatchers} import org.scalatest.ShouldMatchers
class NettyBlockTransferSecuritySuite extends FunSuite with MockitoSugar with ShouldMatchers { class NettyBlockTransferSecuritySuite extends SparkFunSuite with MockitoSugar with ShouldMatchers {
test("security default off") { test("security default off") {
val conf = new SparkConf() val conf = new SparkConf()
.set("spark.app.id", "app-id") .set("spark.app.id", "app-id")

View file

@ -18,11 +18,15 @@
package org.apache.spark.network.netty package org.apache.spark.network.netty
import org.apache.spark.network.BlockDataManager import org.apache.spark.network.BlockDataManager
import org.apache.spark.{SecurityManager, SparkConf} import org.apache.spark.{SecurityManager, SparkConf, SparkFunSuite}
import org.mockito.Mockito.mock import org.mockito.Mockito.mock
import org.scalatest._ import org.scalatest._
class NettyBlockTransferServiceSuite extends FunSuite with BeforeAndAfterEach with ShouldMatchers { class NettyBlockTransferServiceSuite
extends SparkFunSuite
with BeforeAndAfterEach
with ShouldMatchers {
private var service0: NettyBlockTransferService = _ private var service0: NettyBlockTransferService = _
private var service1: NettyBlockTransferService = _ private var service1: NettyBlockTransferService = _

View file

@ -24,15 +24,13 @@ import scala.concurrent.duration._
import scala.concurrent.{Await, TimeoutException} import scala.concurrent.{Await, TimeoutException}
import scala.language.postfixOps import scala.language.postfixOps
import org.scalatest.FunSuite import org.apache.spark.{SecurityManager, SparkConf, SparkFunSuite}
import org.apache.spark.{SecurityManager, SparkConf}
import org.apache.spark.util.Utils import org.apache.spark.util.Utils
/** /**
* Test the ConnectionManager with various security settings. * Test the ConnectionManager with various security settings.
*/ */
class ConnectionManagerSuite extends FunSuite { class ConnectionManagerSuite extends SparkFunSuite {
test("security default off") { test("security default off") {
val conf = new SparkConf val conf = new SparkConf

View file

@ -23,13 +23,13 @@ import scala.concurrent.{Await, TimeoutException}
import scala.concurrent.duration.Duration import scala.concurrent.duration.Duration
import scala.concurrent.ExecutionContext.Implicits.global import scala.concurrent.ExecutionContext.Implicits.global
import org.scalatest.{BeforeAndAfterAll, FunSuite} import org.scalatest.BeforeAndAfterAll
import org.scalatest.concurrent.Timeouts import org.scalatest.concurrent.Timeouts
import org.scalatest.time.SpanSugar._ import org.scalatest.time.SpanSugar._
import org.apache.spark.{SparkContext, SparkException, LocalSparkContext} import org.apache.spark.{LocalSparkContext, SparkContext, SparkException, SparkFunSuite}
class AsyncRDDActionsSuite extends FunSuite with BeforeAndAfterAll with Timeouts { class AsyncRDDActionsSuite extends SparkFunSuite with BeforeAndAfterAll with Timeouts {
@transient private var sc: SparkContext = _ @transient private var sc: SparkContext = _

View file

@ -17,11 +17,9 @@
package org.apache.spark.rdd package org.apache.spark.rdd
import org.scalatest.FunSuite
import org.apache.spark._ import org.apache.spark._
class DoubleRDDSuite extends FunSuite with SharedSparkContext { class DoubleRDDSuite extends SparkFunSuite with SharedSparkContext {
test("sum") { test("sum") {
assert(sc.parallelize(Seq.empty[Double]).sum() === 0.0) assert(sc.parallelize(Seq.empty[Double]).sum() === 0.0)
assert(sc.parallelize(Seq(1.0)).sum() === 1.0) assert(sc.parallelize(Seq(1.0)).sum() === 1.0)

View file

@ -19,11 +19,11 @@ package org.apache.spark.rdd
import java.sql._ import java.sql._
import org.scalatest.{BeforeAndAfter, FunSuite} import org.scalatest.BeforeAndAfter
import org.apache.spark.{LocalSparkContext, SparkContext} import org.apache.spark.{LocalSparkContext, SparkContext, SparkFunSuite}
class JdbcRDDSuite extends FunSuite with BeforeAndAfter with LocalSparkContext { class JdbcRDDSuite extends SparkFunSuite with BeforeAndAfter with LocalSparkContext {
before { before {
Class.forName("org.apache.derby.jdbc.EmbeddedDriver") Class.forName("org.apache.derby.jdbc.EmbeddedDriver")

View file

@ -28,12 +28,10 @@ import org.apache.hadoop.conf.{Configurable, Configuration}
import org.apache.hadoop.mapreduce.{JobContext => NewJobContext, OutputCommitter => NewOutputCommitter, import org.apache.hadoop.mapreduce.{JobContext => NewJobContext, OutputCommitter => NewOutputCommitter,
OutputFormat => NewOutputFormat, RecordWriter => NewRecordWriter, OutputFormat => NewOutputFormat, RecordWriter => NewRecordWriter,
TaskAttemptContext => NewTaskAttempContext} TaskAttemptContext => NewTaskAttempContext}
import org.apache.spark.{Partitioner, SharedSparkContext} import org.apache.spark.{Partitioner, SharedSparkContext, SparkFunSuite}
import org.apache.spark.util.Utils import org.apache.spark.util.Utils
import org.scalatest.FunSuite class PairRDDFunctionsSuite extends SparkFunSuite with SharedSparkContext {
class PairRDDFunctionsSuite extends FunSuite with SharedSparkContext {
test("aggregateByKey") { test("aggregateByKey") {
val pairs = sc.parallelize(Array((1, 1), (1, 1), (3, 2), (5, 1), (5, 3)), 2) val pairs = sc.parallelize(Array((1, 1), (1, 1), (3, 2), (5, 1), (5, 3)), 2)

View file

@ -22,10 +22,11 @@ import scala.collection.immutable.NumericRange
import org.scalacheck.Arbitrary._ import org.scalacheck.Arbitrary._
import org.scalacheck.Gen import org.scalacheck.Gen
import org.scalacheck.Prop._ import org.scalacheck.Prop._
import org.scalatest.FunSuite
import org.scalatest.prop.Checkers import org.scalatest.prop.Checkers
class ParallelCollectionSplitSuite extends FunSuite with Checkers { import org.apache.spark.SparkFunSuite
class ParallelCollectionSplitSuite extends SparkFunSuite with Checkers {
test("one element per slice") { test("one element per slice") {
val data = Array(1, 2, 3) val data = Array(1, 2, 3)
val slices = ParallelCollectionRDD.slice(data, 3) val slices = ParallelCollectionRDD.slice(data, 3)

View file

@ -17,11 +17,9 @@
package org.apache.spark.rdd package org.apache.spark.rdd
import org.scalatest.FunSuite import org.apache.spark.{Partition, SharedSparkContext, SparkFunSuite, TaskContext}
import org.apache.spark.{Partition, SharedSparkContext, TaskContext} class PartitionPruningRDDSuite extends SparkFunSuite with SharedSparkContext {
class PartitionPruningRDDSuite extends FunSuite with SharedSparkContext {
test("Pruned Partitions inherit locality prefs correctly") { test("Pruned Partitions inherit locality prefs correctly") {

View file

@ -17,9 +17,7 @@
package org.apache.spark.rdd package org.apache.spark.rdd
import org.scalatest.FunSuite import org.apache.spark.{SharedSparkContext, SparkFunSuite}
import org.apache.spark.SharedSparkContext
import org.apache.spark.util.random.{BernoulliSampler, PoissonSampler, RandomSampler} import org.apache.spark.util.random.{BernoulliSampler, PoissonSampler, RandomSampler}
/** a sampler that outputs its seed */ /** a sampler that outputs its seed */
@ -38,7 +36,7 @@ class MockSampler extends RandomSampler[Long, Long] {
override def clone: MockSampler = new MockSampler override def clone: MockSampler = new MockSampler
} }
class PartitionwiseSampledRDDSuite extends FunSuite with SharedSparkContext { class PartitionwiseSampledRDDSuite extends SparkFunSuite with SharedSparkContext {
test("seed distribution") { test("seed distribution") {
val rdd = sc.makeRDD(Array(1L, 2L, 3L, 4L), 2) val rdd = sc.makeRDD(Array(1L, 2L, 3L, 4L), 2)

View file

@ -22,7 +22,6 @@ import java.io.File
import org.apache.hadoop.fs.Path import org.apache.hadoop.fs.Path
import org.apache.hadoop.io.{LongWritable, Text} import org.apache.hadoop.io.{LongWritable, Text}
import org.apache.hadoop.mapred.{FileSplit, JobConf, TextInputFormat} import org.apache.hadoop.mapred.{FileSplit, JobConf, TextInputFormat}
import org.scalatest.FunSuite
import scala.collection.Map import scala.collection.Map
import scala.language.postfixOps import scala.language.postfixOps
@ -32,7 +31,7 @@ import scala.util.Try
import org.apache.spark._ import org.apache.spark._
import org.apache.spark.util.Utils import org.apache.spark.util.Utils
class PipedRDDSuite extends FunSuite with SharedSparkContext { class PipedRDDSuite extends SparkFunSuite with SharedSparkContext {
test("basic pipe") { test("basic pipe") {
if (testCommandAvailable("cat")) { if (testCommandAvailable("cat")) {

View file

@ -17,14 +17,14 @@
package org.apache.spark.rdd package org.apache.spark.rdd
import org.scalatest.{BeforeAndAfter, FunSuite} import org.scalatest.BeforeAndAfter
import org.apache.spark.{TaskContext, Partition, SparkContext} import org.apache.spark.{Partition, SparkContext, SparkFunSuite, TaskContext}
/** /**
* Tests whether scopes are passed from the RDD operation to the RDDs correctly. * Tests whether scopes are passed from the RDD operation to the RDDs correctly.
*/ */
class RDDOperationScopeSuite extends FunSuite with BeforeAndAfter { class RDDOperationScopeSuite extends SparkFunSuite with BeforeAndAfter {
private var sc: SparkContext = null private var sc: SparkContext = null
private val scope1 = new RDDOperationScope("scope1") private val scope1 = new RDDOperationScope("scope1")
private val scope2 = new RDDOperationScope("scope2", Some(scope1)) private val scope2 = new RDDOperationScope("scope2", Some(scope1))

View file

@ -25,14 +25,12 @@ import scala.collection.mutable.{ArrayBuffer, HashMap}
import scala.collection.JavaConverters._ import scala.collection.JavaConverters._
import scala.reflect.ClassTag import scala.reflect.ClassTag
import org.scalatest.FunSuite
import org.apache.spark._ import org.apache.spark._
import org.apache.spark.api.java.{JavaRDD, JavaSparkContext} import org.apache.spark.api.java.{JavaRDD, JavaSparkContext}
import org.apache.spark.rdd.RDDSuiteUtils._ import org.apache.spark.rdd.RDDSuiteUtils._
import org.apache.spark.util.Utils import org.apache.spark.util.Utils
class RDDSuite extends FunSuite with SharedSparkContext { class RDDSuite extends SparkFunSuite with SharedSparkContext {
test("basic operations") { test("basic operations") {
val nums = sc.makeRDD(Array(1, 2, 3, 4), 2) val nums = sc.makeRDD(Array(1, 2, 3, 4), 2)

View file

@ -17,12 +17,11 @@
package org.apache.spark.rdd package org.apache.spark.rdd
import org.scalatest.FunSuite
import org.scalatest.Matchers import org.scalatest.Matchers
import org.apache.spark.{Logging, SharedSparkContext} import org.apache.spark.{Logging, SharedSparkContext, SparkFunSuite}
class SortingSuite extends FunSuite with SharedSparkContext with Matchers with Logging { class SortingSuite extends SparkFunSuite with SharedSparkContext with Matchers with Logging {
test("sortByKey") { test("sortByKey") {
val pairs = sc.parallelize(Array((1, 0), (2, 0), (0, 0), (3, 0)), 2) val pairs = sc.parallelize(Array((1, 0), (2, 0), (0, 0), (3, 0)), 2)

View file

@ -17,8 +17,7 @@
package org.apache.spark.rdd package org.apache.spark.rdd
import org.apache.spark.SharedSparkContext import org.apache.spark.{SharedSparkContext, SparkFunSuite}
import org.scalatest.FunSuite
object ZippedPartitionsSuite { object ZippedPartitionsSuite {
def procZippedData(i: Iterator[Int], s: Iterator[String], d: Iterator[Double]) : Iterator[Int] = { def procZippedData(i: Iterator[Int], s: Iterator[String], d: Iterator[Double]) : Iterator[Int] = {
@ -26,7 +25,7 @@ object ZippedPartitionsSuite {
} }
} }
class ZippedPartitionsSuite extends FunSuite with SharedSparkContext { class ZippedPartitionsSuite extends SparkFunSuite with SharedSparkContext {
test("print sizes") { test("print sizes") {
val data1 = sc.makeRDD(Array(1, 2, 3, 4), 2) val data1 = sc.makeRDD(Array(1, 2, 3, 4), 2)
val data2 = sc.makeRDD(Array("1", "2", "3", "4", "5", "6"), 2) val data2 = sc.makeRDD(Array("1", "2", "3", "4", "5", "6"), 2)

View file

@ -24,15 +24,15 @@ import scala.concurrent.Await
import scala.concurrent.duration._ import scala.concurrent.duration._
import scala.language.postfixOps import scala.language.postfixOps
import org.scalatest.{BeforeAndAfterAll, FunSuite} import org.scalatest.BeforeAndAfterAll
import org.scalatest.concurrent.Eventually._ import org.scalatest.concurrent.Eventually._
import org.apache.spark.{SparkException, SparkConf} import org.apache.spark.{SparkConf, SparkException, SparkFunSuite}
/** /**
* Common tests for an RpcEnv implementation. * Common tests for an RpcEnv implementation.
*/ */
abstract class RpcEnvSuite extends FunSuite with BeforeAndAfterAll { abstract class RpcEnvSuite extends SparkFunSuite with BeforeAndAfterAll {
var env: RpcEnv = _ var env: RpcEnv = _

View file

@ -17,12 +17,10 @@
package org.apache.spark.scheduler package org.apache.spark.scheduler
import org.apache.spark.{LocalSparkContext, SparkConf, SparkException, SparkContext} import org.apache.spark.{LocalSparkContext, SparkConf, SparkContext, SparkException, SparkFunSuite}
import org.apache.spark.util.{SerializableBuffer, AkkaUtils} import org.apache.spark.util.{SerializableBuffer, AkkaUtils}
import org.scalatest.FunSuite class CoarseGrainedSchedulerBackendSuite extends SparkFunSuite with LocalSparkContext {
class CoarseGrainedSchedulerBackendSuite extends FunSuite with LocalSparkContext {
test("serialized task larger than akka frame size") { test("serialized task larger than akka frame size") {
val conf = new SparkConf val conf = new SparkConf

View file

@ -21,7 +21,7 @@ import scala.collection.mutable.{ArrayBuffer, HashSet, HashMap, Map}
import scala.language.reflectiveCalls import scala.language.reflectiveCalls
import scala.util.control.NonFatal import scala.util.control.NonFatal
import org.scalatest.{BeforeAndAfter, FunSuiteLike} import org.scalatest.BeforeAndAfter
import org.scalatest.concurrent.Timeouts import org.scalatest.concurrent.Timeouts
import org.scalatest.time.SpanSugar._ import org.scalatest.time.SpanSugar._
@ -68,7 +68,7 @@ class MyRDD(
class DAGSchedulerSuiteDummyException extends Exception class DAGSchedulerSuiteDummyException extends Exception
class DAGSchedulerSuite class DAGSchedulerSuite
extends FunSuiteLike with BeforeAndAfter with LocalSparkContext with Timeouts { extends SparkFunSuite with BeforeAndAfter with LocalSparkContext with Timeouts {
val conf = new SparkConf val conf = new SparkConf
/** Set of TaskSets the DAGScheduler has requested executed. */ /** Set of TaskSets the DAGScheduler has requested executed. */

View file

@ -25,7 +25,7 @@ import scala.io.Source
import org.apache.hadoop.fs.Path import org.apache.hadoop.fs.Path
import org.json4s.jackson.JsonMethods._ import org.json4s.jackson.JsonMethods._
import org.scalatest.{FunSuiteLike, BeforeAndAfter, FunSuite} import org.scalatest.BeforeAndAfter
import org.apache.spark._ import org.apache.spark._
import org.apache.spark.deploy.SparkHadoopUtil import org.apache.spark.deploy.SparkHadoopUtil
@ -39,7 +39,7 @@ import org.apache.spark.util.{JsonProtocol, Utils}
* logging events, whether the parsing of the file names is correct, and whether the logged events * logging events, whether the parsing of the file names is correct, and whether the logged events
* can be read and deserialized into actual SparkListenerEvents. * can be read and deserialized into actual SparkListenerEvents.
*/ */
class EventLoggingListenerSuite extends FunSuite with LocalSparkContext with BeforeAndAfter class EventLoggingListenerSuite extends SparkFunSuite with LocalSparkContext with BeforeAndAfter
with Logging { with Logging {
import EventLoggingListenerSuite._ import EventLoggingListenerSuite._

View file

@ -18,14 +18,13 @@
package org.apache.spark.scheduler package org.apache.spark.scheduler
import org.apache.spark.storage.BlockManagerId import org.apache.spark.storage.BlockManagerId
import org.scalatest.FunSuite
import org.apache.spark.SparkConf import org.apache.spark.{SparkConf, SparkFunSuite}
import org.apache.spark.serializer.JavaSerializer import org.apache.spark.serializer.JavaSerializer
import scala.util.Random import scala.util.Random
class MapStatusSuite extends FunSuite { class MapStatusSuite extends SparkFunSuite {
test("compressSize") { test("compressSize") {
assert(MapStatus.compressSize(0L) === 0) assert(MapStatus.compressSize(0L) === 0)

View file

@ -24,7 +24,7 @@ import org.mockito.Matchers
import org.mockito.Mockito._ import org.mockito.Mockito._
import org.mockito.invocation.InvocationOnMock import org.mockito.invocation.InvocationOnMock
import org.mockito.stubbing.Answer import org.mockito.stubbing.Answer
import org.scalatest.{BeforeAndAfter, FunSuite} import org.scalatest.BeforeAndAfter
import org.apache.hadoop.mapred.{TaskAttemptID, JobConf, TaskAttemptContext, OutputCommitter} import org.apache.hadoop.mapred.{TaskAttemptID, JobConf, TaskAttemptContext, OutputCommitter}
@ -64,7 +64,7 @@ import scala.language.postfixOps
* increments would be captured even though the commit in both tasks was executed * increments would be captured even though the commit in both tasks was executed
* erroneously. * erroneously.
*/ */
class OutputCommitCoordinatorSuite extends FunSuite with BeforeAndAfter { class OutputCommitCoordinatorSuite extends SparkFunSuite with BeforeAndAfter {
var outputCommitCoordinator: OutputCommitCoordinator = null var outputCommitCoordinator: OutputCommitCoordinator = null
var tempDir: File = null var tempDir: File = null

View file

@ -19,15 +19,13 @@ package org.apache.spark.scheduler
import java.util.Properties import java.util.Properties
import org.scalatest.FunSuite import org.apache.spark.{LocalSparkContext, SparkConf, SparkContext, SparkFunSuite}
import org.apache.spark.{LocalSparkContext, SparkConf, SparkContext}
/** /**
* Tests that pools and the associated scheduling algorithms for FIFO and fair scheduling work * Tests that pools and the associated scheduling algorithms for FIFO and fair scheduling work
* correctly. * correctly.
*/ */
class PoolSuite extends FunSuite with LocalSparkContext { class PoolSuite extends SparkFunSuite with LocalSparkContext {
def createTaskSetManager(stageId: Int, numTasks: Int, taskScheduler: TaskSchedulerImpl) def createTaskSetManager(stageId: Int, numTasks: Int, taskScheduler: TaskSchedulerImpl)
: TaskSetManager = { : TaskSetManager = {

View file

@ -21,10 +21,10 @@ import java.io.{File, PrintWriter}
import java.net.URI import java.net.URI
import org.json4s.jackson.JsonMethods._ import org.json4s.jackson.JsonMethods._
import org.scalatest.{BeforeAndAfter, FunSuite} import org.scalatest.BeforeAndAfter
import org.apache.spark.{SparkConf, SparkContext, SPARK_VERSION} import org.apache.spark.{SparkConf, SparkContext, SPARK_VERSION}
import org.apache.spark.{SparkConf, SparkContext} import org.apache.spark.{SparkConf, SparkContext, SparkFunSuite}
import org.apache.spark.deploy.SparkHadoopUtil import org.apache.spark.deploy.SparkHadoopUtil
import org.apache.spark.io.CompressionCodec import org.apache.spark.io.CompressionCodec
import org.apache.spark.util.{JsonProtocol, Utils} import org.apache.spark.util.{JsonProtocol, Utils}
@ -32,7 +32,7 @@ import org.apache.spark.util.{JsonProtocol, Utils}
/** /**
* Test whether ReplayListenerBus replays events from logs correctly. * Test whether ReplayListenerBus replays events from logs correctly.
*/ */
class ReplayListenerSuite extends FunSuite with BeforeAndAfter { class ReplayListenerSuite extends SparkFunSuite with BeforeAndAfter {
private val fileSystem = Utils.getHadoopFileSystem("/", private val fileSystem = Utils.getHadoopFileSystem("/",
SparkHadoopUtil.get.newConfiguration(new SparkConf())) SparkHadoopUtil.get.newConfiguration(new SparkConf()))
private var testDir: File = _ private var testDir: File = _

View file

@ -22,13 +22,13 @@ import java.util.concurrent.Semaphore
import scala.collection.mutable import scala.collection.mutable
import scala.collection.JavaConversions._ import scala.collection.JavaConversions._
import org.scalatest.{FunSuite, Matchers} import org.scalatest.Matchers
import org.apache.spark.executor.TaskMetrics import org.apache.spark.executor.TaskMetrics
import org.apache.spark.util.ResetSystemProperties import org.apache.spark.util.ResetSystemProperties
import org.apache.spark.{LocalSparkContext, SparkConf, SparkContext} import org.apache.spark.{LocalSparkContext, SparkConf, SparkContext, SparkFunSuite}
class SparkListenerSuite extends FunSuite with LocalSparkContext with Matchers class SparkListenerSuite extends SparkFunSuite with LocalSparkContext with Matchers
with ResetSystemProperties { with ResetSystemProperties {
/** Length of time to wait while draining listener events. */ /** Length of time to wait while draining listener events. */

View file

@ -18,16 +18,16 @@
package org.apache.spark.scheduler package org.apache.spark.scheduler
import org.apache.spark.scheduler.cluster.ExecutorInfo import org.apache.spark.scheduler.cluster.ExecutorInfo
import org.apache.spark.{SparkContext, LocalSparkContext} import org.apache.spark.{LocalSparkContext, SparkContext, SparkFunSuite}
import org.scalatest.{FunSuite, BeforeAndAfter, BeforeAndAfterAll} import org.scalatest.{BeforeAndAfter, BeforeAndAfterAll}
import scala.collection.mutable import scala.collection.mutable
/** /**
* Unit tests for SparkListener that require a local cluster. * Unit tests for SparkListener that require a local cluster.
*/ */
class SparkListenerWithClusterSuite extends FunSuite with LocalSparkContext class SparkListenerWithClusterSuite extends SparkFunSuite with LocalSparkContext
with BeforeAndAfter with BeforeAndAfterAll { with BeforeAndAfter with BeforeAndAfterAll {
/** Length of time to wait while draining listener events. */ /** Length of time to wait while draining listener events. */

View file

@ -20,7 +20,6 @@ package org.apache.spark.scheduler
import org.mockito.Mockito._ import org.mockito.Mockito._
import org.mockito.Matchers.any import org.mockito.Matchers.any
import org.scalatest.FunSuite
import org.scalatest.BeforeAndAfter import org.scalatest.BeforeAndAfter
import org.apache.spark._ import org.apache.spark._
@ -28,7 +27,7 @@ import org.apache.spark.rdd.RDD
import org.apache.spark.util.{TaskCompletionListenerException, TaskCompletionListener} import org.apache.spark.util.{TaskCompletionListenerException, TaskCompletionListener}
class TaskContextSuite extends FunSuite with BeforeAndAfter with LocalSparkContext { class TaskContextSuite extends SparkFunSuite with BeforeAndAfter with LocalSparkContext {
test("calls TaskCompletionListener after failure") { test("calls TaskCompletionListener after failure") {
TaskContextSuite.completed = false TaskContextSuite.completed = false

View file

@ -23,10 +23,10 @@ import scala.concurrent.duration._
import scala.language.postfixOps import scala.language.postfixOps
import scala.util.control.NonFatal import scala.util.control.NonFatal
import org.scalatest.{BeforeAndAfter, FunSuite} import org.scalatest.BeforeAndAfter
import org.scalatest.concurrent.Eventually._ import org.scalatest.concurrent.Eventually._
import org.apache.spark.{LocalSparkContext, SparkConf, SparkContext, SparkEnv} import org.apache.spark.{LocalSparkContext, SparkConf, SparkContext, SparkEnv, SparkFunSuite}
import org.apache.spark.storage.TaskResultBlockId import org.apache.spark.storage.TaskResultBlockId
/** /**
@ -71,7 +71,7 @@ class ResultDeletingTaskResultGetter(sparkEnv: SparkEnv, scheduler: TaskSchedule
/** /**
* Tests related to handling task results (both direct and indirect). * Tests related to handling task results (both direct and indirect).
*/ */
class TaskResultGetterSuite extends FunSuite with BeforeAndAfter with LocalSparkContext { class TaskResultGetterSuite extends SparkFunSuite with BeforeAndAfter with LocalSparkContext {
// Set the Akka frame size to be as small as possible (it must be an integer, so 1 is as small // Set the Akka frame size to be as small as possible (it must be an integer, so 1 is as small
// as we can make it) so the tests don't take too long. // as we can make it) so the tests don't take too long.

View file

@ -17,8 +17,6 @@
package org.apache.spark.scheduler package org.apache.spark.scheduler
import org.scalatest.FunSuite
import org.apache.spark._ import org.apache.spark._
class FakeSchedulerBackend extends SchedulerBackend { class FakeSchedulerBackend extends SchedulerBackend {
@ -28,7 +26,7 @@ class FakeSchedulerBackend extends SchedulerBackend {
def defaultParallelism(): Int = 1 def defaultParallelism(): Int = 1
} }
class TaskSchedulerImplSuite extends FunSuite with LocalSparkContext with Logging { class TaskSchedulerImplSuite extends SparkFunSuite with LocalSparkContext with Logging {
test("Scheduler does not always schedule tasks on the same workers") { test("Scheduler does not always schedule tasks on the same workers") {
sc = new SparkContext("local", "TaskSchedulerImplSuite") sc = new SparkContext("local", "TaskSchedulerImplSuite")

View file

@ -22,8 +22,6 @@ import java.util.Random
import scala.collection.mutable.ArrayBuffer import scala.collection.mutable.ArrayBuffer
import scala.collection.mutable import scala.collection.mutable
import org.scalatest.FunSuite
import org.apache.spark._ import org.apache.spark._
import org.apache.spark.executor.TaskMetrics import org.apache.spark.executor.TaskMetrics
import org.apache.spark.util.{ManualClock, Utils} import org.apache.spark.util.{ManualClock, Utils}
@ -146,7 +144,7 @@ class LargeTask(stageId: Int) extends Task[Array[Byte]](stageId, 0) {
override def preferredLocations: Seq[TaskLocation] = Seq[TaskLocation]() override def preferredLocations: Seq[TaskLocation] = Seq[TaskLocation]()
} }
class TaskSetManagerSuite extends FunSuite with LocalSparkContext with Logging { class TaskSetManagerSuite extends SparkFunSuite with LocalSparkContext with Logging {
import TaskLocality.{ANY, PROCESS_LOCAL, NO_PREF, NODE_LOCAL, RACK_LOCAL} import TaskLocality.{ANY, PROCESS_LOCAL, NO_PREF, NODE_LOCAL, RACK_LOCAL}
private val conf = new SparkConf private val conf = new SparkConf

View file

@ -18,12 +18,11 @@
package org.apache.spark.scheduler.cluster.mesos package org.apache.spark.scheduler.cluster.mesos
import org.mockito.Mockito._ import org.mockito.Mockito._
import org.scalatest.FunSuite
import org.scalatest.mock.MockitoSugar import org.scalatest.mock.MockitoSugar
import org.apache.spark.{SparkConf, SparkContext} import org.apache.spark.{SparkConf, SparkContext, SparkFunSuite}
class MemoryUtilsSuite extends FunSuite with MockitoSugar { class MemoryUtilsSuite extends SparkFunSuite with MockitoSugar {
test("MesosMemoryUtils should always override memoryOverhead when it's set") { test("MesosMemoryUtils should always override memoryOverhead when it's set") {
val sparkConf = new SparkConf val sparkConf = new SparkConf

View file

@ -30,16 +30,15 @@ import org.apache.mesos.SchedulerDriver
import org.mockito.Matchers._ import org.mockito.Matchers._
import org.mockito.Mockito._ import org.mockito.Mockito._
import org.mockito.{ArgumentCaptor, Matchers} import org.mockito.{ArgumentCaptor, Matchers}
import org.scalatest.FunSuite
import org.scalatest.mock.MockitoSugar import org.scalatest.mock.MockitoSugar
import org.apache.spark.executor.MesosExecutorBackend import org.apache.spark.executor.MesosExecutorBackend
import org.apache.spark.scheduler.cluster.ExecutorInfo import org.apache.spark.scheduler.cluster.ExecutorInfo
import org.apache.spark.scheduler.{LiveListenerBus, SparkListenerExecutorAdded, import org.apache.spark.scheduler.{LiveListenerBus, SparkListenerExecutorAdded,
TaskDescription, TaskSchedulerImpl, WorkerOffer} TaskDescription, TaskSchedulerImpl, WorkerOffer}
import org.apache.spark.{LocalSparkContext, SparkConf, SparkContext} import org.apache.spark.{LocalSparkContext, SparkConf, SparkContext, SparkFunSuite}
class MesosSchedulerBackendSuite extends FunSuite with LocalSparkContext with MockitoSugar { class MesosSchedulerBackendSuite extends SparkFunSuite with LocalSparkContext with MockitoSugar {
test("check spark-class location correctly") { test("check spark-class location correctly") {
val conf = new SparkConf val conf = new SparkConf

View file

@ -19,9 +19,9 @@ package org.apache.spark.scheduler.cluster.mesos
import java.nio.ByteBuffer import java.nio.ByteBuffer
import org.scalatest.FunSuite import org.apache.spark.SparkFunSuite
class MesosTaskLaunchDataSuite extends FunSuite { class MesosTaskLaunchDataSuite extends SparkFunSuite {
test("serialize and deserialize data must be same") { test("serialize and deserialize data must be same") {
val serializedTask = ByteBuffer.allocate(40) val serializedTask = ByteBuffer.allocate(40)
(Range(100, 110).map(serializedTask.putInt(_))) (Range(100, 110).map(serializedTask.putInt(_)))

View file

@ -19,16 +19,15 @@ package org.apache.spark.scheduler.mesos
import java.util.Date import java.util.Date
import org.scalatest.FunSuite
import org.scalatest.mock.MockitoSugar import org.scalatest.mock.MockitoSugar
import org.apache.spark.deploy.Command import org.apache.spark.deploy.Command
import org.apache.spark.deploy.mesos.MesosDriverDescription import org.apache.spark.deploy.mesos.MesosDriverDescription
import org.apache.spark.scheduler.cluster.mesos._ import org.apache.spark.scheduler.cluster.mesos._
import org.apache.spark.{LocalSparkContext, SparkConf} import org.apache.spark.{LocalSparkContext, SparkConf, SparkFunSuite}
class MesosClusterSchedulerSuite extends FunSuite with LocalSparkContext with MockitoSugar { class MesosClusterSchedulerSuite extends SparkFunSuite with LocalSparkContext with MockitoSugar {
private val command = new Command("mainClass", Seq("arg"), null, null, null, null) private val command = new Command("mainClass", Seq("arg"), null, null, null, null)

View file

@ -17,10 +17,9 @@
package org.apache.spark.serializer package org.apache.spark.serializer
import org.apache.spark.SparkConf import org.apache.spark.{SparkConf, SparkFunSuite}
import org.scalatest.FunSuite
class JavaSerializerSuite extends FunSuite { class JavaSerializerSuite extends SparkFunSuite {
test("JavaSerializer instances are serializable") { test("JavaSerializer instances are serializable") {
val serializer = new JavaSerializer(new SparkConf()) val serializer = new JavaSerializer(new SparkConf())
val instance = serializer.newInstance() val instance = serializer.newInstance()

View file

@ -20,12 +20,11 @@ package org.apache.spark.serializer
import org.apache.spark.util.Utils import org.apache.spark.util.Utils
import com.esotericsoftware.kryo.Kryo import com.esotericsoftware.kryo.Kryo
import org.scalatest.FunSuite
import org.apache.spark.{LocalSparkContext, SparkConf, SparkContext, SparkEnv, TestUtils} import org.apache.spark._
import org.apache.spark.serializer.KryoDistributedTest._ import org.apache.spark.serializer.KryoDistributedTest._
class KryoSerializerDistributedSuite extends FunSuite { class KryoSerializerDistributedSuite extends SparkFunSuite {
test("kryo objects are serialised consistently in different processes") { test("kryo objects are serialised consistently in different processes") {
val conf = new SparkConf(false) val conf = new SparkConf(false)

View file

@ -17,15 +17,13 @@
package org.apache.spark.serializer package org.apache.spark.serializer
import org.scalatest.FunSuite import org.apache.spark.{SparkConf, SparkFunSuite}
import org.apache.spark.SparkConf
import org.apache.spark.SparkContext import org.apache.spark.SparkContext
import org.apache.spark.LocalSparkContext import org.apache.spark.LocalSparkContext
import org.apache.spark.SparkException import org.apache.spark.SparkException
class KryoSerializerResizableOutputSuite extends FunSuite { class KryoSerializerResizableOutputSuite extends SparkFunSuite {
// trial and error showed this will not serialize with 1mb buffer // trial and error showed this will not serialize with 1mb buffer
val x = (1 to 400000).toArray val x = (1 to 400000).toArray

View file

@ -23,14 +23,13 @@ import scala.collection.mutable
import scala.reflect.ClassTag import scala.reflect.ClassTag
import com.esotericsoftware.kryo.Kryo import com.esotericsoftware.kryo.Kryo
import org.scalatest.FunSuite
import org.apache.spark.{SharedSparkContext, SparkConf} import org.apache.spark.{SharedSparkContext, SparkConf, SparkFunSuite}
import org.apache.spark.scheduler.HighlyCompressedMapStatus import org.apache.spark.scheduler.HighlyCompressedMapStatus
import org.apache.spark.serializer.KryoTest._ import org.apache.spark.serializer.KryoTest._
import org.apache.spark.storage.BlockManagerId import org.apache.spark.storage.BlockManagerId
class KryoSerializerSuite extends FunSuite with SharedSparkContext { class KryoSerializerSuite extends SparkFunSuite with SharedSparkContext {
conf.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer") conf.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
conf.set("spark.kryo.registrator", classOf[MyRegistrator].getName) conf.set("spark.kryo.registrator", classOf[MyRegistrator].getName)
@ -361,7 +360,7 @@ class KryoSerializerSuite extends FunSuite with SharedSparkContext {
} }
} }
class KryoSerializerAutoResetDisabledSuite extends FunSuite with SharedSparkContext { class KryoSerializerAutoResetDisabledSuite extends SparkFunSuite with SharedSparkContext {
conf.set("spark.serializer", classOf[KryoSerializer].getName) conf.set("spark.serializer", classOf[KryoSerializer].getName)
conf.set("spark.kryo.registrator", classOf[RegistratorWithoutAutoReset].getName) conf.set("spark.kryo.registrator", classOf[RegistratorWithoutAutoReset].getName)
conf.set("spark.kryo.referenceTracking", "true") conf.set("spark.kryo.referenceTracking", "true")

View file

@ -17,9 +17,7 @@
package org.apache.spark.serializer package org.apache.spark.serializer
import org.scalatest.FunSuite import org.apache.spark.{SharedSparkContext, SparkException, SparkFunSuite}
import org.apache.spark.{SharedSparkContext, SparkException}
import org.apache.spark.rdd.RDD import org.apache.spark.rdd.RDD
/* A trivial (but unserializable) container for trivial functions */ /* A trivial (but unserializable) container for trivial functions */
@ -29,7 +27,7 @@ class UnserializableClass {
def pred[T](x: T): Boolean = x.toString.length % 2 == 0 def pred[T](x: T): Boolean = x.toString.length % 2 == 0
} }
class ProactiveClosureSerializationSuite extends FunSuite with SharedSparkContext { class ProactiveClosureSerializationSuite extends SparkFunSuite with SharedSparkContext {
def fixture: (RDD[String], UnserializableClass) = { def fixture: (RDD[String], UnserializableClass) = {
(sc.parallelize(0 until 1000).map(_.toString), new UnserializableClass) (sc.parallelize(0 until 1000).map(_.toString), new UnserializableClass)

View file

@ -19,10 +19,12 @@ package org.apache.spark.serializer
import java.io.{ObjectOutput, ObjectInput} import java.io.{ObjectOutput, ObjectInput}
import org.scalatest.{BeforeAndAfterEach, FunSuite} import org.scalatest.BeforeAndAfterEach
import org.apache.spark.SparkFunSuite
class SerializationDebuggerSuite extends FunSuite with BeforeAndAfterEach { class SerializationDebuggerSuite extends SparkFunSuite with BeforeAndAfterEach {
import SerializationDebugger.find import SerializationDebugger.find

View file

@ -21,9 +21,9 @@ import java.io.{ByteArrayInputStream, ByteArrayOutputStream}
import scala.util.Random import scala.util.Random
import org.scalatest.{Assertions, FunSuite} import org.scalatest.Assertions
import org.apache.spark.SparkConf import org.apache.spark.{SparkConf, SparkFunSuite}
import org.apache.spark.serializer.KryoTest.RegistratorWithoutAutoReset import org.apache.spark.serializer.KryoTest.RegistratorWithoutAutoReset
/** /**
@ -31,7 +31,7 @@ import org.apache.spark.serializer.KryoTest.RegistratorWithoutAutoReset
* describe properties of the serialized stream, such as * describe properties of the serialized stream, such as
* [[Serializer.supportsRelocationOfSerializedObjects]]. * [[Serializer.supportsRelocationOfSerializedObjects]].
*/ */
class SerializerPropertiesSuite extends FunSuite { class SerializerPropertiesSuite extends SparkFunSuite {
import SerializerPropertiesSuite._ import SerializerPropertiesSuite._

View file

@ -17,13 +17,14 @@
package org.apache.spark.shuffle package org.apache.spark.shuffle
import org.scalatest.FunSuite
import org.scalatest.concurrent.Timeouts import org.scalatest.concurrent.Timeouts
import org.scalatest.time.SpanSugar._ import org.scalatest.time.SpanSugar._
import java.util.concurrent.atomic.AtomicBoolean import java.util.concurrent.atomic.AtomicBoolean
import java.util.concurrent.CountDownLatch import java.util.concurrent.CountDownLatch
class ShuffleMemoryManagerSuite extends FunSuite with Timeouts { import org.apache.spark.SparkFunSuite
class ShuffleMemoryManagerSuite extends SparkFunSuite with Timeouts {
/** Launch a thread with the given body block and return it. */ /** Launch a thread with the given body block and return it. */
private def startThread(name: String)(body: => Unit): Thread = { private def startThread(name: String)(body: => Unit): Thread = {
val thread = new Thread("ShuffleMemorySuite " + name) { val thread = new Thread("ShuffleMemorySuite " + name) {

View file

@ -21,16 +21,14 @@ import java.io.{File, FileWriter}
import scala.language.reflectiveCalls import scala.language.reflectiveCalls
import org.scalatest.FunSuite import org.apache.spark.{LocalSparkContext, SparkConf, SparkContext, SparkEnv, SparkFunSuite}
import org.apache.spark.{SparkEnv, SparkContext, LocalSparkContext, SparkConf}
import org.apache.spark.executor.ShuffleWriteMetrics import org.apache.spark.executor.ShuffleWriteMetrics
import org.apache.spark.network.buffer.{FileSegmentManagedBuffer, ManagedBuffer} import org.apache.spark.network.buffer.{FileSegmentManagedBuffer, ManagedBuffer}
import org.apache.spark.serializer.JavaSerializer import org.apache.spark.serializer.JavaSerializer
import org.apache.spark.shuffle.FileShuffleBlockResolver import org.apache.spark.shuffle.FileShuffleBlockResolver
import org.apache.spark.storage.{ShuffleBlockId, FileSegment} import org.apache.spark.storage.{ShuffleBlockId, FileSegment}
class HashShuffleManagerSuite extends FunSuite with LocalSparkContext { class HashShuffleManagerSuite extends SparkFunSuite with LocalSparkContext {
private val testConf = new SparkConf(false) private val testConf = new SparkConf(false)
private def checkSegments(expected: FileSegment, buffer: ManagedBuffer) { private def checkSegments(expected: FileSegment, buffer: ManagedBuffer) {

View file

@ -20,7 +20,7 @@ package org.apache.spark.shuffle.unsafe
import org.mockito.Mockito._ import org.mockito.Mockito._
import org.mockito.invocation.InvocationOnMock import org.mockito.invocation.InvocationOnMock
import org.mockito.stubbing.Answer import org.mockito.stubbing.Answer
import org.scalatest.{FunSuite, Matchers} import org.scalatest.Matchers
import org.apache.spark._ import org.apache.spark._
import org.apache.spark.serializer.{JavaSerializer, KryoSerializer, Serializer} import org.apache.spark.serializer.{JavaSerializer, KryoSerializer, Serializer}
@ -29,7 +29,7 @@ import org.apache.spark.serializer.{JavaSerializer, KryoSerializer, Serializer}
* Tests for the fallback logic in UnsafeShuffleManager. Actual tests of shuffling data are * Tests for the fallback logic in UnsafeShuffleManager. Actual tests of shuffling data are
* performed in other suites. * performed in other suites.
*/ */
class UnsafeShuffleManagerSuite extends FunSuite with Matchers { class UnsafeShuffleManagerSuite extends SparkFunSuite with Matchers {
import UnsafeShuffleManager.canUseUnsafeShuffle import UnsafeShuffleManager.canUseUnsafeShuffle

Some files were not shown because too many files have changed in this diff Show more