[SPARK-32398][TESTS][CORE][STREAMING][SQL][ML] Update to scalatest 3.2.0 for Scala 2.13.3+

### What changes were proposed in this pull request?

Updates to scalatest 3.2.0. Though it looks large, it is 99% changes to the new location of scalatest classes.

### Why are the changes needed?

3.2.0+ has a fix that is required for Scala 2.13.3+ compatibility.

### Does this PR introduce _any_ user-facing change?

No, only affects tests.

### How was this patch tested?

Existing tests.

Closes #29196 from srowen/SPARK-32398.

Authored-by: Sean Owen <srowen@gmail.com>
Signed-off-by: Dongjoon Hyun <dongjoon@apache.org>
This commit is contained in:
Sean Owen 2020-07-23 16:20:17 -07:00 committed by Dongjoon Hyun
parent e7fb67cd88
commit be2eca22e9
116 changed files with 270 additions and 145 deletions

View file

@ -19,9 +19,9 @@ package org.apache.spark.util.sketch
import scala.util.Random
import org.scalatest.FunSuite // scalastyle:ignore funsuite
import org.scalatest.funsuite.AnyFunSuite // scalastyle:ignore funsuite
class BitArraySuite extends FunSuite { // scalastyle:ignore funsuite
class BitArraySuite extends AnyFunSuite { // scalastyle:ignore funsuite
test("error case when create BitArray") {
intercept[IllegalArgumentException](new BitArray(0))

View file

@ -22,9 +22,9 @@ import java.io.{ByteArrayInputStream, ByteArrayOutputStream}
import scala.reflect.ClassTag
import scala.util.Random
import org.scalatest.FunSuite // scalastyle:ignore funsuite
import org.scalatest.funsuite.AnyFunSuite // scalastyle:ignore funsuite
class BloomFilterSuite extends FunSuite { // scalastyle:ignore funsuite
class BloomFilterSuite extends AnyFunSuite { // scalastyle:ignore funsuite
private final val EPSILON = 0.01
// Serializes and deserializes a given `BloomFilter`, then checks whether the deserialized

View file

@ -22,9 +22,9 @@ import java.io.{ByteArrayInputStream, ByteArrayOutputStream}
import scala.reflect.ClassTag
import scala.util.Random
import org.scalatest.FunSuite // scalastyle:ignore funsuite
import org.scalatest.funsuite.AnyFunSuite // scalastyle:ignore funsuite
class CountMinSketchSuite extends FunSuite { // scalastyle:ignore funsuite
class CountMinSketchSuite extends AnyFunSuite { // scalastyle:ignore funsuite
private val epsOfTotalCount = 0.01
private val confidence = 0.9

View file

@ -21,14 +21,15 @@ import org.apache.commons.text.similarity.LevenshteinDistance
import org.scalacheck.{Arbitrary, Gen}
import org.scalatestplus.scalacheck.ScalaCheckDrivenPropertyChecks
// scalastyle:off
import org.scalatest.{FunSuite, Matchers}
import org.scalatest.funsuite.AnyFunSuite
import org.scalatest.matchers.must.Matchers
import org.apache.spark.unsafe.types.UTF8String.{fromString => toUTF8}
/**
* This TestSuite utilize ScalaCheck to generate randomized inputs for UTF8String testing.
*/
class UTF8StringPropertyCheckSuite extends FunSuite with ScalaCheckDrivenPropertyChecks with Matchers {
class UTF8StringPropertyCheckSuite extends AnyFunSuite with ScalaCheckDrivenPropertyChecks with Matchers {
// scalastyle:on
test("toString") {

View file

@ -727,7 +727,7 @@ class JavaSparkContext(val sc: SparkContext) extends Closeable {
* @note This does not necessarily mean the caching or computation was successful.
*/
def getPersistentRDDs: JMap[java.lang.Integer, JavaRDD[_]] = {
sc.getPersistentRDDs.mapValues(s => JavaRDD.fromRDD(s))
sc.getPersistentRDDs.mapValues(s => JavaRDD.fromRDD(s)).toMap
.asJava.asInstanceOf[JMap[java.lang.Integer, JavaRDD[_]]]
}

View file

@ -25,8 +25,8 @@ import scala.collection.mutable.ArrayBuffer
import scala.ref.WeakReference
import scala.util.control.NonFatal
import org.scalatest.Matchers
import org.scalatest.exceptions.TestFailedException
import org.scalatest.matchers.must.Matchers
import org.apache.spark.scheduler._
import org.apache.spark.serializer.JavaSerializer

View file

@ -18,8 +18,8 @@
package org.apache.spark
import org.scalatest.Assertions._
import org.scalatest.Matchers
import org.scalatest.concurrent.{Signaler, ThreadSignaler, TimeLimits}
import org.scalatest.matchers.must.Matchers
import org.scalatest.time.{Millis, Span}
import org.apache.spark.internal.config

View file

@ -19,6 +19,7 @@ package org.apache.spark
import org.scalatest.BeforeAndAfterAll
import org.scalatest.concurrent.Eventually
import org.scalatest.matchers.should.Matchers._
import org.scalatest.time.SpanSugar._
import org.apache.spark.internal.config

View file

@ -19,7 +19,9 @@ package org.apache.spark
import scala.concurrent.duration.Duration
import org.scalatest.{BeforeAndAfter, Matchers}
import org.scalatest.BeforeAndAfter
import org.scalatest.matchers.must.Matchers
import org.scalatest.matchers.should.Matchers._
import org.apache.spark.util.ThreadUtils

View file

@ -25,7 +25,7 @@ import scala.concurrent.Future
import scala.concurrent.duration._
import org.scalatest.BeforeAndAfter
import org.scalatest.Matchers
import org.scalatest.matchers.must.Matchers
import org.apache.spark.internal.config._
import org.apache.spark.internal.config.Deploy._

View file

@ -20,7 +20,8 @@ package org.apache.spark
import java.util.{Locale, Properties}
import java.util.concurrent.{Callable, CyclicBarrier, Executors, ExecutorService}
import org.scalatest.Matchers
import org.scalatest.matchers.must.Matchers
import org.scalatest.matchers.should.Matchers._
import org.apache.spark.ShuffleSuite.NonJavaSerializableClass
import org.apache.spark.internal.config

View file

@ -24,6 +24,7 @@ import scala.collection.JavaConverters._
import org.apache.commons.io.FileUtils
import org.apache.commons.io.filefilter.TrueFileFilter
import org.scalatest.BeforeAndAfterAll
import org.scalatest.matchers.should.Matchers._
import org.apache.spark.internal.config
import org.apache.spark.rdd.ShuffledRDD

View file

@ -31,8 +31,8 @@ import org.apache.hadoop.io.{BytesWritable, LongWritable, Text}
import org.apache.hadoop.mapred.TextInputFormat
import org.apache.hadoop.mapreduce.lib.input.{TextInputFormat => NewTextInputFormat}
import org.json4s.{DefaultFormats, Extraction}
import org.scalatest.Matchers._
import org.scalatest.concurrent.Eventually
import org.scalatest.matchers.must.Matchers._
import org.apache.spark.TestUtils._
import org.apache.spark.internal.config._

View file

@ -25,7 +25,8 @@ import org.apache.log4j.spi.LoggingEvent
import scala.annotation.tailrec
import org.apache.log4j.{Appender, AppenderSkeleton, Level, Logger}
import org.scalatest.{BeforeAndAfter, BeforeAndAfterAll, BeforeAndAfterEach, FunSuite, Outcome}
import org.scalatest.{BeforeAndAfter, BeforeAndAfterAll, BeforeAndAfterEach, Outcome}
import org.scalatest.funsuite.AnyFunSuite
import org.apache.spark.internal.Logging
import org.apache.spark.internal.config.Tests.IS_TESTING
import org.apache.spark.util.{AccumulatorContext, Utils}
@ -57,7 +58,7 @@ import scala.collection.mutable.ArrayBuffer
* }
*/
abstract class SparkFunSuite
extends FunSuite
extends AnyFunSuite
with BeforeAndAfterAll
with BeforeAndAfterEach
with ThreadAudit

View file

@ -20,8 +20,9 @@ package org.apache.spark
import scala.concurrent.duration._
import scala.language.implicitConversions
import org.scalatest.Matchers
import org.scalatest.concurrent.Eventually._
import org.scalatest.matchers.must.Matchers
import org.scalatest.matchers.should.Matchers._
import org.apache.spark.JobExecutionStatus._

View file

@ -21,7 +21,8 @@ import java.io.{File, PrintWriter}
import scala.io.Source
import org.scalatest.Matchers
import org.scalatest.matchers.must.Matchers
import org.scalatest.matchers.should.Matchers._
import org.apache.spark.{SharedSparkContext, SparkConf, SparkFunSuite}
import org.apache.spark.internal.config.Kryo._

View file

@ -17,7 +17,8 @@
package org.apache.spark.deploy
import org.scalatest.Matchers
import org.scalatest.matchers.must.Matchers
import org.scalatest.matchers.should.Matchers._
import org.apache.spark.SparkFunSuite

View file

@ -29,8 +29,10 @@ import com.google.common.io.ByteStreams
import org.apache.commons.io.FileUtils
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FileStatus, FSDataInputStream, Path}
import org.scalatest.{BeforeAndAfterEach, Matchers}
import org.scalatest.BeforeAndAfterEach
import org.scalatest.concurrent.{Signaler, ThreadSignaler, TimeLimits}
import org.scalatest.matchers.must.Matchers
import org.scalatest.matchers.should.Matchers._
import org.scalatest.time.Span
import org.scalatest.time.SpanSugar._

View file

@ -27,7 +27,8 @@ import org.eclipse.jetty.servlet.ServletContextHandler
import org.mockito.ArgumentMatchers.any
import org.mockito.Mockito._
import org.mockito.invocation.InvocationOnMock
import org.scalatest.Matchers
import org.scalatest.matchers.must.Matchers
import org.scalatest.matchers.should.Matchers._
import org.scalatestplus.mockito.MockitoSugar
import org.apache.spark.SparkFunSuite

View file

@ -34,8 +34,9 @@ import org.apache.hadoop.security.AccessControlException
import org.json4s.jackson.JsonMethods._
import org.mockito.ArgumentMatchers.{any, argThat}
import org.mockito.Mockito.{doThrow, mock, spy, verify, when}
import org.scalatest.Matchers
import org.scalatest.concurrent.Eventually._
import org.scalatest.matchers.must.Matchers
import org.scalatest.matchers.should.Matchers._
import org.apache.spark.{JobExecutionStatus, SecurityManager, SPARK_VERSION, SparkConf, SparkFunSuite}
import org.apache.spark.deploy.SparkHadoopUtil

View file

@ -35,8 +35,10 @@ import org.json4s.jackson.JsonMethods._
import org.mockito.Mockito._
import org.openqa.selenium.WebDriver
import org.openqa.selenium.htmlunit.HtmlUnitDriver
import org.scalatest.{BeforeAndAfter, Matchers}
import org.scalatest.BeforeAndAfter
import org.scalatest.concurrent.Eventually
import org.scalatest.matchers.must.Matchers
import org.scalatest.matchers.should.Matchers._
import org.scalatestplus.mockito.MockitoSugar
import org.scalatestplus.selenium.WebBrowser
@ -309,14 +311,18 @@ class HistoryServerSuite extends SparkFunSuite with BeforeAndAfter with Matchers
val urlsThroughKnox = responseThroughKnox \\ "@href" map (_.toString)
val siteRelativeLinksThroughKnox = urlsThroughKnox filter (_.startsWith("/"))
all (siteRelativeLinksThroughKnox) should startWith (knoxBaseUrl)
for (link <- siteRelativeLinksThroughKnox) {
link should startWith (knoxBaseUrl)
}
val directRequest = mock[HttpServletRequest]
val directResponse = page.render(directRequest)
val directUrls = directResponse \\ "@href" map (_.toString)
val directSiteRelativeLinks = directUrls filter (_.startsWith("/"))
all (directSiteRelativeLinks) should not startWith (knoxBaseUrl)
for (link <- directSiteRelativeLinks) {
link should not startWith (knoxBaseUrl)
}
}
test("static relative links are prefixed with uiRoot (spark.ui.proxyBase)") {
@ -331,7 +337,9 @@ class HistoryServerSuite extends SparkFunSuite with BeforeAndAfter with Matchers
// then
val urls = response \\ "@href" map (_.toString)
val siteRelativeLinks = urls filter (_.startsWith("/"))
all (siteRelativeLinks) should startWith (uiRoot)
for (link <- siteRelativeLinks) {
link should startWith (uiRoot)
}
}
test("/version api endpoint") {

View file

@ -22,7 +22,9 @@ import javax.servlet.http.HttpServletRequest
import org.eclipse.jetty.proxy.ProxyServlet
import org.eclipse.jetty.servlet.{ServletContextHandler, ServletHolder}
import org.openqa.selenium.WebDriver
import org.scalatest._
import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach}
import org.scalatest.matchers.must.Matchers
import org.scalatest.matchers.should.Matchers._
import org.scalatestplus.selenium.WebBrowser
import org.apache.spark._
@ -146,7 +148,9 @@ abstract class RealBrowserUIHistoryServerSuite(val driverProp: String)
// there are at least some URL links that were generated via javascript,
// and they all contain the spark.ui.proxyBase (uiRoot)
links.length should be > 4
all(links) should startWith(url + uiRoot)
for (link <- links) {
link should startWith(url + uiRoot)
}
} finally {
contextHandler.stop()
quit()

View file

@ -31,8 +31,10 @@ import scala.reflect.ClassTag
import org.json4s._
import org.json4s.jackson.JsonMethods._
import org.mockito.Mockito.{mock, when}
import org.scalatest.{BeforeAndAfter, Matchers, PrivateMethodTester}
import org.scalatest.{BeforeAndAfter, PrivateMethodTester}
import org.scalatest.concurrent.Eventually
import org.scalatest.matchers.must.Matchers
import org.scalatest.matchers.should.Matchers._
import other.supplier.{CustomPersistenceEngine, CustomRecoveryModeFactory}
import org.apache.spark.{SecurityManager, SparkConf, SparkFunSuite}

View file

@ -19,7 +19,8 @@ package org.apache.spark.deploy.security
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.Path
import org.scalatest.Matchers
import org.scalatest.matchers.must.Matchers
import org.scalatest.matchers.should.Matchers._
import org.apache.spark.{SparkConf, SparkFunSuite}
import org.apache.spark.internal.config.{STAGING_DIR, SUBMIT_DEPLOY_MODE}

View file

@ -17,7 +17,9 @@
package org.apache.spark.deploy.worker
import org.scalatest.{Matchers, PrivateMethodTester}
import org.scalatest.PrivateMethodTester
import org.scalatest.matchers.must.Matchers
import org.scalatest.matchers.should.Matchers._
import org.apache.spark.{SecurityManager, SparkConf, SparkFunSuite}
import org.apache.spark.deploy.Command

View file

@ -29,8 +29,10 @@ import org.mockito.Answers.RETURNS_SMART_NULLS
import org.mockito.ArgumentMatchers.any
import org.mockito.Mockito._
import org.mockito.invocation.InvocationOnMock
import org.scalatest.{BeforeAndAfter, Matchers}
import org.scalatest.BeforeAndAfter
import org.scalatest.concurrent.Eventually.{eventually, interval, timeout}
import org.scalatest.matchers.must.Matchers
import org.scalatest.matchers.should.Matchers._
import org.apache.spark.{SecurityManager, SparkConf, SparkFunSuite}
import org.apache.spark.TestUtils.{createTempJsonFile, createTempScriptWithExpectedOutput}

View file

@ -21,8 +21,9 @@ import java.util.concurrent.TimeUnit
import scala.concurrent.duration._
import org.scalatest.Matchers
import org.scalatest.concurrent.Eventually._
import org.scalatest.matchers.must.Matchers
import org.scalatest.matchers.should.Matchers._
import org.apache.spark._
import org.apache.spark.internal.config.UI.UI_ENABLED

View file

@ -28,7 +28,8 @@ import scala.util.{Failure, Success, Try}
import com.google.common.io.CharStreams
import org.mockito.Mockito._
import org.scalatest.Matchers
import org.scalatest.matchers.must.Matchers
import org.scalatest.matchers.should.Matchers._
import org.scalatestplus.mockito.MockitoSugar
import org.apache.spark.{SecurityManager, SparkConf, SparkFunSuite}

View file

@ -25,7 +25,9 @@ import scala.util.Random
import org.mockito.ArgumentMatchers.any
import org.mockito.Mockito.{mock, times, verify, when}
import org.scalatest._
import org.scalatest.BeforeAndAfterEach
import org.scalatest.matchers.must.Matchers
import org.scalatest.matchers.should.Matchers._
import org.apache.spark.{ExecutorDeadException, SecurityManager, SparkConf, SparkFunSuite}
import org.apache.spark.network.BlockDataManager

View file

@ -17,7 +17,8 @@
package org.apache.spark.rdd
import org.scalatest.Matchers
import org.scalatest.matchers.must.Matchers
import org.scalatest.matchers.should.Matchers._
import org.apache.spark.{SharedSparkContext, SparkFunSuite}
import org.apache.spark.internal.Logging

View file

@ -2281,7 +2281,7 @@ class DAGSchedulerSuite extends SparkFunSuite with LocalSparkContext with TimeLi
assert(stackTraceString.contains("org.apache.spark.rdd.RDD.count"))
// should include the FunSuite setup:
assert(stackTraceString.contains("org.scalatest.FunSuite"))
assert(stackTraceString.contains("org.scalatest.funsuite.AnyFunSuite"))
}
test("catch errors in event loop") {

View file

@ -24,7 +24,8 @@ import scala.collection.JavaConverters._
import scala.collection.mutable
import org.mockito.Mockito
import org.scalatest.Matchers
import org.scalatest.matchers.must.Matchers
import org.scalatest.matchers.should.Matchers._
import org.apache.spark._
import org.apache.spark.executor.TaskMetrics

View file

@ -20,7 +20,7 @@ package org.apache.spark.shuffle.sort
import org.mockito.Mockito.{mock, when}
import org.mockito.invocation.InvocationOnMock
import org.mockito.stubbing.Answer
import org.scalatest.Matchers
import org.scalatest.matchers.must.Matchers
import org.apache.spark._
import org.apache.spark.serializer.{JavaSerializer, KryoSerializer, Serializer}

View file

@ -20,7 +20,7 @@ package org.apache.spark.shuffle.sort
import org.mockito.{Mock, MockitoAnnotations}
import org.mockito.Answers.RETURNS_SMART_NULLS
import org.mockito.Mockito._
import org.scalatest.Matchers
import org.scalatest.matchers.must.Matchers
import org.apache.spark.{Partitioner, SharedSparkContext, ShuffleDependency, SparkFunSuite}
import org.apache.spark.memory.MemoryTestingUtils

View file

@ -20,8 +20,8 @@ package org.apache.spark.status
import java.util.concurrent.atomic.{AtomicBoolean, AtomicInteger}
import org.mockito.Mockito._
import org.scalatest.Matchers._
import org.scalatest.concurrent.Eventually
import org.scalatest.matchers.should.Matchers._
import org.apache.spark.{SparkConf, SparkFunSuite}
import org.apache.spark.internal.config.Status._

View file

@ -18,7 +18,8 @@ package org.apache.spark.status.api.v1
import javax.ws.rs.WebApplicationException
import org.scalatest.Matchers
import org.scalatest.matchers.must.Matchers
import org.scalatest.matchers.should.Matchers._
import org.apache.spark.SparkFunSuite

View file

@ -21,8 +21,8 @@ import scala.concurrent.duration._
import org.mockito.{ArgumentMatchers => mc}
import org.mockito.Mockito.{mock, times, verify, when}
import org.scalatest._
import org.scalatest.concurrent.Eventually._
import org.scalatest.matchers.must.Matchers
import org.apache.spark._
import org.apache.spark.internal.config

View file

@ -26,8 +26,9 @@ import scala.language.implicitConversions
import org.mockito.ArgumentMatchers.any
import org.mockito.Mockito.{doAnswer, mock, spy, when}
import org.scalatest.{BeforeAndAfter, Matchers}
import org.scalatest.BeforeAndAfter
import org.scalatest.concurrent.Eventually._
import org.scalatest.matchers.must.Matchers
import org.apache.spark._
import org.apache.spark.broadcast.BroadcastManager

View file

@ -32,9 +32,11 @@ import scala.reflect.ClassTag
import org.apache.commons.lang3.RandomUtils
import org.mockito.{ArgumentCaptor, ArgumentMatchers => mc}
import org.mockito.Mockito.{doAnswer, mock, never, spy, times, verify, when}
import org.scalatest._
import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach, PrivateMethodTester}
import org.scalatest.concurrent.{Signaler, ThreadSignaler, TimeLimits}
import org.scalatest.concurrent.Eventually._
import org.scalatest.matchers.must.Matchers
import org.scalatest.matchers.should.Matchers._
import org.apache.spark._
import org.apache.spark.broadcast.BroadcastManager

View file

@ -21,7 +21,8 @@ import scala.collection.mutable
import scala.language.implicitConversions
import scala.util.Random
import org.scalatest.{BeforeAndAfter, Matchers}
import org.scalatest.BeforeAndAfter
import org.scalatest.matchers.must.Matchers
import org.apache.spark.{LocalSparkContext, SparkFunSuite}

View file

@ -19,7 +19,8 @@ package org.apache.spark.storage
import java.io.{File, FileOutputStream}
import org.scalatest.{BeforeAndAfter, Matchers}
import org.scalatest.BeforeAndAfter
import org.scalatest.matchers.must.Matchers
import org.apache.spark._
import org.apache.spark.internal.config.STORAGE_REPLICATION_TOPOLOGY_FILE

View file

@ -18,8 +18,10 @@
package org.apache.spark.ui
import org.openqa.selenium.{By, WebDriver}
import org.scalatest._
import org.scalatest.BeforeAndAfterAll
import org.scalatest.concurrent.Eventually._
import org.scalatest.matchers.must.Matchers
import org.scalatest.matchers.should.Matchers._
import org.scalatest.time.SpanSugar._
import org.scalatestplus.selenium.WebBrowser

View file

@ -30,8 +30,10 @@ import org.json4s._
import org.json4s.jackson.JsonMethods
import org.openqa.selenium.{By, WebDriver}
import org.openqa.selenium.htmlunit.HtmlUnitDriver
import org.scalatest._
import org.scalatest.BeforeAndAfterAll
import org.scalatest.concurrent.Eventually._
import org.scalatest.matchers.must.Matchers
import org.scalatest.matchers.should.Matchers._
import org.scalatest.time.SpanSugar._
import org.scalatestplus.selenium.WebBrowser

View file

@ -17,7 +17,8 @@
package org.apache.spark.util
import org.scalatest.Matchers
import org.scalatest.matchers.must.Matchers
import org.scalatest.matchers.should.Matchers._
import org.apache.spark.SparkFunSuite

View file

@ -21,7 +21,8 @@ import java.net.URLClassLoader
import scala.collection.JavaConverters._
import org.scalatest.Matchers
import org.scalatest.matchers.must.Matchers
import org.scalatest.matchers.should.Matchers._
import org.apache.spark.{SparkContext, SparkException, SparkFunSuite, TestUtils}

View file

@ -21,7 +21,8 @@ import java.util.NoSuchElementException
import scala.collection.mutable.Buffer
import org.scalatest.Matchers
import org.scalatest.matchers.must.Matchers
import org.scalatest.matchers.should.Matchers._
import org.apache.spark.SparkFunSuite

View file

@ -21,8 +21,8 @@ import scala.collection.mutable.ArrayBuffer
import scala.concurrent.duration._
import scala.ref.WeakReference
import org.scalatest.Matchers
import org.scalatest.concurrent.Eventually
import org.scalatest.matchers.must.Matchers
import org.apache.spark._
import org.apache.spark.internal.config._

View file

@ -19,7 +19,8 @@ package org.apache.spark.util.collection
import scala.collection.mutable.HashSet
import org.scalatest.Matchers
import org.scalatest.matchers.must.Matchers
import org.scalatest.matchers.should.Matchers._
import org.apache.spark.SparkFunSuite
import org.apache.spark.util.SizeEstimator

View file

@ -17,7 +17,8 @@
package org.apache.spark.util.collection
import org.scalatest.Matchers
import org.scalatest.matchers.must.Matchers
import org.scalatest.matchers.should.Matchers._
import org.apache.spark.SparkFunSuite
import org.apache.spark.util.SizeEstimator

View file

@ -19,7 +19,8 @@ package org.apache.spark.util.collection
import scala.collection.mutable.HashSet
import org.scalatest.Matchers
import org.scalatest.matchers.must.Matchers
import org.scalatest.matchers.should.Matchers._
import org.apache.spark.SparkFunSuite
import org.apache.spark.util.SizeEstimator

View file

@ -22,7 +22,8 @@ import java.util.Random
import scala.collection.mutable.ArrayBuffer
import org.apache.commons.math3.distribution.PoissonDistribution
import org.scalatest.Matchers
import org.scalatest.matchers.must.Matchers
import org.scalatest.matchers.should.Matchers._
import org.apache.spark.SparkFunSuite

View file

@ -18,7 +18,8 @@
package org.apache.spark.util.random
import org.apache.commons.math3.stat.inference.ChiSquareTest
import org.scalatest.Matchers
import org.scalatest.matchers.must.Matchers
import org.scalatest.matchers.should.Matchers._
import org.apache.spark.SparkFunSuite
import org.apache.spark.util.Utils.times

View file

@ -157,9 +157,9 @@ metrics-json/4.1.1//metrics-json-4.1.1.jar
metrics-jvm/4.1.1//metrics-jvm-4.1.1.jar
minlog/1.3.0//minlog-1.3.0.jar
netty-all/4.1.47.Final//netty-all-4.1.47.Final.jar
objenesis/2.5.1//objenesis-2.5.1.jar
objenesis/2.6//objenesis-2.6.jar
okhttp/3.12.6//okhttp-3.12.6.jar
okio/1.15.0//okio-1.15.0.jar
okio/1.14.0//okio-1.14.0.jar
opencsv/2.3//opencsv-2.3.jar
orc-core/1.5.10/nohive/orc-core-1.5.10-nohive.jar
orc-mapreduce/1.5.10/nohive/orc-mapreduce-1.5.10-nohive.jar

View file

@ -172,9 +172,9 @@ metrics-json/4.1.1//metrics-json-4.1.1.jar
metrics-jvm/4.1.1//metrics-jvm-4.1.1.jar
minlog/1.3.0//minlog-1.3.0.jar
netty-all/4.1.47.Final//netty-all-4.1.47.Final.jar
objenesis/2.5.1//objenesis-2.5.1.jar
objenesis/2.6//objenesis-2.6.jar
okhttp/3.12.6//okhttp-3.12.6.jar
okio/1.15.0//okio-1.15.0.jar
okio/1.14.0//okio-1.14.0.jar
opencsv/2.3//opencsv-2.3.jar
orc-core/1.5.10//orc-core-1.5.10.jar
orc-mapreduce/1.5.10//orc-mapreduce-1.5.10.jar

View file

@ -185,10 +185,10 @@ metrics-jvm/4.1.1//metrics-jvm-4.1.1.jar
minlog/1.3.0//minlog-1.3.0.jar
netty-all/4.1.47.Final//netty-all-4.1.47.Final.jar
nimbus-jose-jwt/4.41.1//nimbus-jose-jwt-4.41.1.jar
objenesis/2.5.1//objenesis-2.5.1.jar
objenesis/2.6//objenesis-2.6.jar
okhttp/2.7.5//okhttp-2.7.5.jar
okhttp/3.12.6//okhttp-3.12.6.jar
okio/1.15.0//okio-1.15.0.jar
okio/1.14.0//okio-1.14.0.jar
opencsv/2.3//opencsv-2.3.jar
orc-core/1.5.10//orc-core-1.5.10.jar
orc-mapreduce/1.5.10//orc-mapreduce-1.5.10.jar

View file

@ -26,7 +26,9 @@ import com.amazonaws.services.kinesis.clientlibrary.lib.worker.ShutdownReason
import com.amazonaws.services.kinesis.model.Record
import org.mockito.ArgumentMatchers.{anyList, anyString, eq => meq}
import org.mockito.Mockito.{never, times, verify, when}
import org.scalatest.{BeforeAndAfter, Matchers}
import org.scalatest.BeforeAndAfter
import org.scalatest.matchers.must.Matchers
import org.scalatest.matchers.should.Matchers._
import org.scalatestplus.mockito.MockitoSugar
import org.apache.spark.streaming.{Duration, TestSuiteBase}

View file

@ -23,8 +23,8 @@ import scala.util.Random
import com.amazonaws.services.kinesis.model.Record
import org.scalatest.{BeforeAndAfter, BeforeAndAfterAll}
import org.scalatest.Matchers._
import org.scalatest.concurrent.Eventually
import org.scalatest.matchers.should.Matchers._
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.network.util.JavaUtils

View file

@ -34,7 +34,7 @@ class ShortestPathsSuite extends SparkFunSuite with LocalSparkContext {
val graph = Graph.fromEdgeTuples(edges, 1)
val landmarks = Seq(1, 4).map(_.toLong)
val results = ShortestPaths.run(graph, landmarks).vertices.collect.map {
case (v, spMap) => (v, spMap.mapValues(i => i))
case (v, spMap) => (v, spMap.mapValues(i => i).toMap)
}
assert(results.toSet === shortestPaths)
}

View file

@ -18,13 +18,14 @@
package org.apache.spark.ml
// scalastyle:off
import org.scalatest.{BeforeAndAfterAll, FunSuite}
import org.scalatest.BeforeAndAfterAll
import org.scalatest.funsuite.AnyFunSuite
/**
* Base abstract class for all unit tests in Spark for handling common functionality.
*/
private[spark] abstract class SparkMLFunSuite
extends FunSuite
extends AnyFunSuite
with BeforeAndAfterAll {
// scalastyle:on
}

View file

@ -21,8 +21,8 @@ import scala.collection.JavaConverters._
import scala.util.Random
import scala.util.control.Breaks._
import org.scalatest.Assertions._
import org.scalatest.Matchers
import org.scalatest.matchers.must.Matchers
import org.scalatest.matchers.should.Matchers._
import org.apache.spark.SparkFunSuite
import org.apache.spark.mllib.linalg.{Vector, Vectors}

View file

@ -20,7 +20,7 @@ package org.apache.spark.mllib.optimization
import scala.collection.JavaConverters._
import scala.util.Random
import org.scalatest.Matchers
import org.scalatest.matchers.must.Matchers
import org.apache.spark.SparkFunSuite
import org.apache.spark.mllib.linalg.Vectors

View file

@ -19,7 +19,7 @@ package org.apache.spark.mllib.optimization
import scala.util.Random
import org.scalatest.Matchers
import org.scalatest.matchers.must.Matchers
import org.apache.spark.SparkFunSuite
import org.apache.spark.mllib.linalg.Vectors

View file

@ -17,7 +17,7 @@
package org.apache.spark.mllib.regression
import org.scalatest.Matchers
import org.scalatest.matchers.must.Matchers
import org.apache.spark.{SparkException, SparkFunSuite}
import org.apache.spark.mllib.util.MLlibTestSparkContext

35
pom.xml
View file

@ -334,6 +334,21 @@
<artifactId>scalatest_${scala.binary.version}</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.scalatestplus</groupId>
<artifactId>scalatestplus-scalacheck_${scala.binary.version}</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.scalatestplus</groupId>
<artifactId>scalatestplus-mockito_${scala.binary.version}</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.scalatestplus</groupId>
<artifactId>scalatestplus-selenium_${scala.binary.version}</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
@ -892,7 +907,25 @@
<dependency>
<groupId>org.scalatest</groupId>
<artifactId>scalatest_${scala.binary.version}</artifactId>
<version>3.0.8</version>
<version>3.2.0</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.scalatestplus</groupId>
<artifactId>scalatestplus-scalacheck_${scala.binary.version}</artifactId>
<version>3.1.0.0-RC2</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.scalatestplus</groupId>
<artifactId>scalatestplus-mockito_${scala.binary.version}</artifactId>
<version>1.0.0-SNAP5</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.scalatestplus</groupId>
<artifactId>scalatestplus-selenium_${scala.binary.version}</artifactId>
<version>1.0.0-SNAP5</version>
<scope>test</scope>
</dependency>
<dependency>

View file

@ -28,9 +28,10 @@ import io.fabric8.kubernetes.api.model.Pod
import io.fabric8.kubernetes.client.{KubernetesClientException, Watcher}
import io.fabric8.kubernetes.client.Watcher.Action
import org.scalatest.{BeforeAndAfter, BeforeAndAfterAll, Tag}
import org.scalatest.Matchers
import org.scalatest.concurrent.{Eventually, PatienceConfiguration}
import org.scalatest.concurrent.PatienceConfiguration.{Interval, Timeout}
import org.scalatest.matchers.must.Matchers
import org.scalatest.matchers.should.Matchers._
import org.scalatest.time.{Minutes, Seconds, Span}
import org.apache.spark.SparkFunSuite

View file

@ -22,8 +22,8 @@ import scala.collection.JavaConverters._
import io.fabric8.kubernetes.api.model.{Pod, SecretBuilder}
import org.apache.commons.codec.binary.Base64
import org.apache.commons.io.output.ByteArrayOutputStream
import org.scalatest.concurrent.Eventually
import org.scalatest.matchers.should.Matchers._
import org.apache.spark.deploy.k8s.integrationtest.KubernetesSuite._

View file

@ -25,7 +25,8 @@ import scala.collection.JavaConverters._
import com.google.common.io.Files
import org.apache.mesos.Protos.{FrameworkInfo, Resource, Value}
import org.mockito.Mockito._
import org.scalatest._
import org.scalatest.matchers.must.Matchers
import org.scalatest.matchers.should.Matchers._
import org.scalatestplus.mockito.MockitoSugar
import org.apache.spark.{SparkConf, SparkContext, SparkException, SparkFunSuite}

View file

@ -28,8 +28,9 @@ import scala.concurrent.duration._
import com.google.common.io.Files
import org.apache.hadoop.yarn.conf.YarnConfiguration
import org.apache.hadoop.yarn.server.MiniYARNCluster
import org.scalatest.{BeforeAndAfterAll, Matchers}
import org.scalatest.BeforeAndAfterAll
import org.scalatest.concurrent.Eventually._
import org.scalatest.matchers.must.Matchers
import org.apache.spark._
import org.apache.spark.deploy.yarn.config._

View file

@ -43,7 +43,8 @@ import org.apache.hadoop.yarn.util.Records
import org.mockito.ArgumentMatchers.{any, anyBoolean, anyShort, eq => meq}
import org.mockito.Mockito._
import org.mockito.invocation.InvocationOnMock
import org.scalatest.Matchers
import org.scalatest.matchers.must.Matchers
import org.scalatest.matchers.should.Matchers._
import org.apache.spark.{SparkConf, SparkException, SparkFunSuite, TestUtils}
import org.apache.spark.deploy.yarn.ResourceRequestHelper._

View file

@ -19,9 +19,9 @@ package org.apache.spark.deploy.yarn
import org.apache.hadoop.yarn.api.records._
import org.apache.hadoop.yarn.client.api.AMRMClient.ContainerRequest
import org.scalatest.{BeforeAndAfterEach, Matchers}
import org.scalatest.BeforeAndAfterEach
import org.scalatest.matchers.must.Matchers
import org.apache.spark.SparkConf
import org.apache.spark.SparkFunSuite
import org.apache.spark.resource.ResourceProfile

View file

@ -16,7 +16,8 @@
*/
package org.apache.spark.deploy.yarn
import org.scalatest.Matchers
import org.scalatest.matchers.must.Matchers
import org.scalatest.matchers.should.Matchers._
import org.apache.spark.{SparkConf, SparkFunSuite}
import org.apache.spark.util.ManualClock

View file

@ -19,7 +19,8 @@ package org.apache.spark.deploy.yarn
import org.apache.hadoop.yarn.api.records.Resource
import org.apache.hadoop.yarn.util.Records
import org.scalatest.Matchers
import org.scalatest.matchers.must.Matchers
import org.scalatest.matchers.should.Matchers._
import org.apache.spark.{SparkConf, SparkException, SparkFunSuite}
import org.apache.spark.deploy.yarn.ResourceRequestHelper._

View file

@ -22,7 +22,8 @@ import java.util.Collections
import org.apache.hadoop.yarn.client.api.AMRMClient
import org.apache.hadoop.yarn.client.api.AMRMClient.ContainerRequest
import org.mockito.Mockito._
import org.scalatest.{BeforeAndAfterEach, Matchers}
import org.scalatest.BeforeAndAfterEach
import org.scalatest.matchers.must.Matchers
import org.apache.spark.{SparkConf, SparkFunSuite}
import org.apache.spark.deploy.yarn.config.{YARN_EXCLUDE_NODES, YARN_EXECUTOR_LAUNCH_BLACKLIST_ENABLED}

View file

@ -28,7 +28,9 @@ import org.apache.hadoop.yarn.client.api.AMRMClient.ContainerRequest
import org.apache.hadoop.yarn.conf.YarnConfiguration
import org.mockito.ArgumentCaptor
import org.mockito.Mockito._
import org.scalatest.{BeforeAndAfterEach, Matchers}
import org.scalatest.BeforeAndAfterEach
import org.scalatest.matchers.must.Matchers
import org.scalatest.matchers.should.Matchers._
import org.apache.spark.{SecurityManager, SparkConf, SparkFunSuite}
import org.apache.spark.deploy.SparkHadoopUtil

View file

@ -28,8 +28,9 @@ import scala.io.Source
import com.google.common.io.{ByteStreams, Files}
import org.apache.hadoop.yarn.conf.YarnConfiguration
import org.apache.hadoop.yarn.util.ConverterUtils
import org.scalatest.Matchers
import org.scalatest.concurrent.Eventually._
import org.scalatest.matchers.must.Matchers
import org.scalatest.matchers.should.Matchers._
import org.apache.spark._
import org.apache.spark.api.python.PythonUtils

View file

@ -23,7 +23,8 @@ import java.nio.charset.StandardCharsets
import com.google.common.io.Files
import org.apache.commons.io.FileUtils
import org.apache.hadoop.yarn.conf.YarnConfiguration
import org.scalatest.Matchers
import org.scalatest.matchers.must.Matchers
import org.scalatest.matchers.should.Matchers._
import org.apache.spark._
import org.apache.spark.deploy.yarn.config._

View file

@ -23,7 +23,8 @@ import java.nio.charset.StandardCharsets
import com.google.common.io.{ByteStreams, Files}
import org.apache.hadoop.yarn.api.records.ApplicationAccessType
import org.apache.hadoop.yarn.conf.YarnConfiguration
import org.scalatest.Matchers
import org.scalatest.matchers.must.Matchers
import org.scalatest.matchers.should.Matchers._
import org.apache.spark.{SecurityManager, SparkConf, SparkFunSuite}
import org.apache.spark.deploy.SparkHadoopUtil

View file

@ -21,7 +21,8 @@ import scala.collection.JavaConverters._
import org.apache.hadoop.metrics2.MetricsRecordBuilder
import org.mockito.ArgumentMatchers.{any, anyDouble, anyInt, anyLong}
import org.mockito.Mockito.{mock, times, verify, when}
import org.scalatest.Matchers
import org.scalatest.matchers.must.Matchers
import org.scalatest.matchers.should.Matchers._
import org.apache.spark.SparkFunSuite
import org.apache.spark.network.server.OneForOneStreamManager

View file

@ -34,8 +34,10 @@ import org.apache.hadoop.service.ServiceStateException
import org.apache.hadoop.yarn.api.records.ApplicationId
import org.apache.hadoop.yarn.conf.YarnConfiguration
import org.apache.hadoop.yarn.server.api.{ApplicationInitializationContext, ApplicationTerminationContext}
import org.scalatest.{BeforeAndAfterEach, Matchers}
import org.scalatest.BeforeAndAfterEach
import org.scalatest.concurrent.Eventually._
import org.scalatest.matchers.must.Matchers
import org.scalatest.matchers.should.Matchers._
import org.apache.spark.SecurityManager
import org.apache.spark.SparkFunSuite

View file

@ -17,13 +17,15 @@
package org.apache.spark.sql
import org.scalatest.{FunSpec, Matchers}
import org.scalatest.funspec.AnyFunSpec
import org.scalatest.matchers.must.Matchers
import org.scalatest.matchers.should.Matchers._
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.expressions.{GenericRow, GenericRowWithSchema}
import org.apache.spark.sql.types._
class RowTest extends FunSpec with Matchers {
class RowTest extends AnyFunSpec with Matchers {
val schema = StructType(
StructField("col1", StringType) ::

View file

@ -21,7 +21,7 @@ import java.io.File
import java.net.URI
import org.mockito.Mockito._
import org.scalatest.Matchers
import org.scalatest.matchers.must.Matchers
import org.apache.spark.sql.catalyst.{FunctionIdentifier, TableIdentifier}
import org.apache.spark.sql.catalyst.catalog.{CatalogDatabase, CatalogStorageFormat, CatalogTable, CatalogTableType, ExternalCatalog, InMemoryCatalog, SessionCatalog}
@ -29,7 +29,6 @@ import org.apache.spark.sql.catalyst.expressions.{Alias, AttributeReference}
import org.apache.spark.sql.catalyst.plans.logical.{LocalRelation, Project}
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.types._
import org.apache.spark.util.Utils
class AnalysisExternalCatalogSuite extends AnalysisTest with Matchers {
private def getAnalyzer(externCatalog: ExternalCatalog, databasePath: File): Analyzer = {

View file

@ -23,7 +23,7 @@ import scala.reflect.ClassTag
import scala.reflect.runtime.universe.TypeTag
import org.apache.log4j.Level
import org.scalatest.Matchers
import org.scalatest.matchers.must.Matchers
import org.apache.spark.api.python.PythonEvalType
import org.apache.spark.sql.catalyst.{AliasIdentifier, TableIdentifier}

View file

@ -22,7 +22,7 @@ import java.io.File
import org.mockito.ArgumentMatchers.any
import org.mockito.Mockito._
import org.mockito.invocation.InvocationOnMock
import org.scalatest.Matchers
import org.scalatest.matchers.must.Matchers
import org.apache.spark.sql.catalyst.TableIdentifier
import org.apache.spark.sql.catalyst.catalog.{CatalogDatabase, CatalogStorageFormat, CatalogTable, CatalogTableType, ExternalCatalog, InMemoryCatalog, SessionCatalog}

View file

@ -20,7 +20,8 @@ package org.apache.spark.sql.catalyst.expressions
import java.nio.charset.StandardCharsets
import java.sql.{Date, Timestamp}
import org.scalatest.Matchers
import org.scalatest.matchers.must.Matchers
import org.scalatest.matchers.should.Matchers._
import org.apache.spark.SparkFunSuite
import org.apache.spark.sql.catalyst.InternalRow

View file

@ -17,7 +17,8 @@
package org.apache.spark.sql.catalyst.expressions.codegen
import org.scalatest.{Assertions, BeforeAndAfterEach, Matchers}
import org.scalatest.{Assertions, BeforeAndAfterEach}
import org.scalatest.matchers.must.Matchers
import org.apache.spark.{SparkFunSuite, TestUtils}
import org.apache.spark.deploy.SparkSubmitSuite

View file

@ -17,7 +17,7 @@
package org.apache.spark.sql.catalyst.optimizer
import org.scalatest.Matchers._
import org.scalatest.matchers.must.Matchers._
import org.apache.spark.sql.AnalysisException
import org.apache.spark.sql.catalyst.dsl.expressions._

View file

@ -23,7 +23,8 @@ import java.time.{Instant, LocalDate, LocalDateTime, LocalTime, ZoneId}
import java.util.Locale
import java.util.concurrent.TimeUnit
import org.scalatest.Matchers
import org.scalatest.matchers.must.Matchers
import org.scalatest.matchers.should.Matchers._
import org.apache.spark.SparkFunSuite
import org.apache.spark.sql.catalyst.plans.SQLHelper

View file

@ -19,7 +19,7 @@ package org.apache.spark.sql.catalyst.util
import java.time.DateTimeException
import org.scalatest.Matchers
import org.scalatest.matchers.must.Matchers
import org.apache.spark.{SparkFunSuite, SparkUpgradeException}
import org.apache.spark.sql.catalyst.plans.SQLHelper

View file

@ -21,7 +21,7 @@ import java.sql.{Date, Timestamp}
import java.time.{Instant, LocalDate, LocalDateTime, ZoneId}
import java.util.TimeZone
import org.scalatest.Matchers
import org.scalatest.matchers.must.Matchers
import org.apache.spark.SparkFunSuite
import org.apache.spark.sql.catalyst.plans.SQLHelper

View file

@ -20,6 +20,8 @@ package org.apache.spark.sql.catalyst.util
import java.time.{DateTimeException, Instant, LocalDateTime, LocalTime}
import java.util.concurrent.TimeUnit
import org.scalatest.matchers.should.Matchers._
import org.apache.spark.SparkUpgradeException
import org.apache.spark.sql.catalyst.util.DateTimeTestUtils._
import org.apache.spark.sql.catalyst.util.DateTimeUtils._

View file

@ -20,7 +20,7 @@ import org.mockito.ArgumentMatchers.any
import org.mockito.Mockito.{mock, when}
import org.mockito.invocation.InvocationOnMock
import org.scalatest.Inside
import org.scalatest.Matchers._
import org.scalatest.matchers.should.Matchers._
import org.apache.spark.SparkFunSuite
import org.apache.spark.sql.catalyst.TableIdentifier

View file

@ -20,11 +20,9 @@ package org.apache.spark.sql
import java.sql.{Date, Timestamp}
import java.util.Locale
import scala.collection.JavaConverters._
import org.apache.hadoop.io.{LongWritable, Text}
import org.apache.hadoop.mapreduce.lib.input.{TextInputFormat => NewTextInputFormat}
import org.scalatest.Matchers._
import org.scalatest.matchers.should.Matchers._
import org.apache.spark.sql.catalyst.expressions.{InSet, Literal, NamedExpression}
import org.apache.spark.sql.execution.ProjectExec

View file

@ -19,7 +19,7 @@ package org.apache.spark.sql
import scala.util.Random
import org.scalatest.Matchers.the
import org.scalatest.matchers.must.Matchers.the
import org.apache.spark.sql.execution.WholeStageCodegenExec
import org.apache.spark.sql.execution.adaptive.AdaptiveSparkPlanHelper
@ -31,7 +31,6 @@ import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.test.SharedSparkSession
import org.apache.spark.sql.test.SQLTestData.DecimalData
import org.apache.spark.sql.types._
import org.apache.spark.unsafe.types.CalendarInterval
case class Fact(date: Int, hour: Int, minute: Int, room_name: String, temp: Double)

View file

@ -19,7 +19,7 @@ package org.apache.spark.sql
import java.util.Random
import org.scalatest.Matchers._
import org.scalatest.matchers.must.Matchers._
import org.apache.spark.internal.Logging
import org.apache.spark.sql.execution.stat.StatFunctions

View file

@ -26,7 +26,8 @@ import java.util.concurrent.atomic.AtomicLong
import scala.reflect.runtime.universe.TypeTag
import scala.util.Random
import org.scalatest.Matchers._
import org.scalatest.matchers.must.Matchers
import org.scalatest.matchers.should.Matchers._
import org.apache.spark.SparkException
import org.apache.spark.scheduler.{SparkListener, SparkListenerJobEnd}

View file

@ -17,7 +17,7 @@
package org.apache.spark.sql
import org.scalatest.Matchers.the
import org.scalatest.matchers.must.Matchers.the
import org.apache.spark.TestUtils.{assertNotSpilled, assertSpilled}
import org.apache.spark.sql.catalyst.optimizer.TransposeWindow

View file

@ -24,7 +24,8 @@ import scala.util.{Random, Try}
import scala.util.control.NonFatal
import org.mockito.Mockito._
import org.scalatest.Matchers
import org.scalatest.matchers.must.Matchers
import org.scalatest.matchers.should.Matchers._
import org.apache.spark.{SparkConf, SparkFunSuite, TaskContext, TaskContextImpl}
import org.apache.spark.internal.config.MEMORY_OFFHEAP_ENABLED

View file

@ -17,14 +17,14 @@
package org.apache.spark.sql.execution
import org.scalatest.{Assertions, BeforeAndAfterEach, Matchers}
import org.scalatest.concurrent.TimeLimits
import org.scalatest.{Assertions, BeforeAndAfterEach}
import org.scalatest.matchers.must.Matchers
import org.scalatest.time.SpanSugar._
import org.apache.spark.{SparkFunSuite, TestUtils}
import org.apache.spark.deploy.SparkSubmitSuite
import org.apache.spark.internal.Logging
import org.apache.spark.sql.{LocalSparkSession, QueryTest, Row, SparkSession}
import org.apache.spark.sql.{QueryTest, Row, SparkSession}
import org.apache.spark.sql.functions.{array, col, count, lit}
import org.apache.spark.sql.types.IntegerType
import org.apache.spark.unsafe.Platform

View file

@ -24,7 +24,9 @@ import java.util.{Calendar, Date, Locale}
import java.util.concurrent.TimeUnit._
import org.apache.commons.io.FileUtils
import org.scalatest.{BeforeAndAfter, Matchers}
import org.scalatest.BeforeAndAfter
import org.scalatest.matchers.must.Matchers
import org.scalatest.matchers.should.Matchers._
import org.apache.spark.internal.Logging
import org.apache.spark.sql.{AnalysisException, Dataset}

View file

@ -19,8 +19,10 @@ package org.apache.spark.sql.streaming.ui
import org.openqa.selenium.WebDriver
import org.openqa.selenium.htmlunit.HtmlUnitDriver
import org.scalatest._
import org.scalatest.BeforeAndAfterAll
import org.scalatest.concurrent.Eventually._
import org.scalatest.matchers.must.Matchers
import org.scalatest.matchers.should.Matchers._
import org.scalatest.time.SpanSugar._
import org.scalatestplus.selenium.WebBrowser

Some files were not shown because too many files have changed in this diff Show more