[SPARK-26536][BUILD][TEST] Upgrade Mockito to 2.23.4

## What changes were proposed in this pull request?

This PR upgrades Mockito from 1.10.19 to 2.23.4. The following changes are required.

- Replace `org.mockito.Matchers` with `org.mockito.ArgumentMatchers`
- Replace `anyObject` with `any`
- Replace `getArgumentAt` with `getArgument` and add type annotation.
- Use `isNull` matcher in case of `null` is invoked.
```scala
     saslHandler.channelInactive(null);
-    verify(handler).channelInactive(any(TransportClient.class));
+    verify(handler).channelInactive(isNull());
```

- Make and use `doReturn` wrapper to avoid [SI-4775](https://issues.scala-lang.org/browse/SI-4775)
```scala
private def doReturn(value: Any) = org.mockito.Mockito.doReturn(value, Seq.empty: _*)
```

## How was this patch tested?

Pass the Jenkins with the existing tests.

Closes #23452 from dongjoon-hyun/SPARK-26536.

Authored-by: Dongjoon Hyun <dongjoon@apache.org>
Signed-off-by: Dongjoon Hyun <dongjoon@apache.org>
This commit is contained in:
Dongjoon Hyun 2019-01-04 19:23:38 -08:00
parent bccb8602d7
commit e15a319ccd
No known key found for this signature in database
GPG key ID: EDA00CE834F0FC5C
56 changed files with 131 additions and 111 deletions

View file

@ -347,10 +347,10 @@ public class SparkSaslSuite {
verify(handler).getStreamManager();
saslHandler.channelInactive(null);
verify(handler).channelInactive(any(TransportClient.class));
verify(handler).channelInactive(isNull());
saslHandler.exceptionCaught(null, null);
verify(handler).exceptionCaught(any(Throwable.class), any(TransportClient.class));
verify(handler).exceptionCaught(isNull(), isNull());
}
@Test

View file

@ -27,7 +27,7 @@ import org.junit.Test;
import org.mockito.ArgumentCaptor;
import static org.junit.Assert.*;
import static org.mockito.Matchers.any;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.*;
import org.apache.spark.network.buffer.ManagedBuffer;
@ -79,6 +79,8 @@ public class ExternalShuffleBlockHandlerSuite {
@SuppressWarnings("unchecked")
@Test
public void testOpenShuffleBlocks() {
when(client.getClientId()).thenReturn("app0");
RpcResponseCallback callback = mock(RpcResponseCallback.class);
ManagedBuffer block0Marker = new NioManagedBuffer(ByteBuffer.wrap(new byte[3]));

View file

@ -28,10 +28,10 @@ import org.junit.Test;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.fail;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyInt;
import static org.mockito.Matchers.anyLong;
import static org.mockito.Matchers.eq;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyInt;
import static org.mockito.ArgumentMatchers.anyLong;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.times;

View file

@ -50,8 +50,8 @@ import static org.hamcrest.Matchers.greaterThan;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.mockito.Answers.RETURNS_SMART_NULLS;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyInt;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyInt;
import static org.mockito.Mockito.when;

View file

@ -19,7 +19,7 @@ package org.apache.spark
import scala.collection.mutable
import org.mockito.Matchers.{any, eq => meq}
import org.mockito.ArgumentMatchers.{any, eq => meq}
import org.mockito.Mockito.{mock, never, verify, when}
import org.scalatest.{BeforeAndAfter, PrivateMethodTester}

View file

@ -23,8 +23,7 @@ import scala.collection.mutable
import scala.concurrent.Future
import scala.concurrent.duration._
import org.mockito.Matchers
import org.mockito.Matchers._
import org.mockito.ArgumentMatchers.{any, eq => meq}
import org.mockito.Mockito.{mock, spy, verify, when}
import org.scalatest.{BeforeAndAfterEach, PrivateMethodTester}
@ -151,7 +150,7 @@ class HeartbeatReceiverSuite
heartbeatReceiverClock.advance(executorTimeout)
heartbeatReceiverRef.askSync[Boolean](ExpireDeadHosts)
// Only the second executor should be expired as a dead host
verify(scheduler).executorLost(Matchers.eq(executorId2), any())
verify(scheduler).executorLost(meq(executorId2), any())
val trackedExecutors = getTrackedExecutors
assert(trackedExecutors.size === 1)
assert(trackedExecutors.contains(executorId1))
@ -223,10 +222,10 @@ class HeartbeatReceiverSuite
assert(!response.reregisterBlockManager)
// Additionally verify that the scheduler callback is called with the correct parameters
verify(scheduler).executorHeartbeatReceived(
Matchers.eq(executorId),
Matchers.eq(Array(1L -> metrics.accumulators())),
Matchers.eq(blockManagerId),
Matchers.eq(executorUpdates))
meq(executorId),
meq(Array(1L -> metrics.accumulators())),
meq(blockManagerId),
meq(executorUpdates))
}
}

View file

@ -19,7 +19,7 @@ package org.apache.spark
import scala.collection.mutable.ArrayBuffer
import org.mockito.Matchers.any
import org.mockito.ArgumentMatchers.any
import org.mockito.Mockito._
import org.apache.spark.LocalSparkContext._

View file

@ -20,7 +20,7 @@ package org.apache.spark.deploy
import scala.collection.mutable
import scala.concurrent.duration._
import org.mockito.Matchers.any
import org.mockito.ArgumentMatchers.any
import org.mockito.Mockito.{mock, verify, when}
import org.scalatest.{BeforeAndAfterAll, PrivateMethodTester}
import org.scalatest.concurrent.Eventually._

View file

@ -24,7 +24,7 @@ import scala.collection.mutable
import com.codahale.metrics.Counter
import org.eclipse.jetty.servlet.ServletContextHandler
import org.mockito.Matchers._
import org.mockito.ArgumentMatchers.any
import org.mockito.Mockito._
import org.mockito.invocation.InvocationOnMock
import org.mockito.stubbing.Answer

View file

@ -34,7 +34,7 @@ import org.apache.hadoop.hdfs.{DFSInputStream, DistributedFileSystem}
import org.apache.hadoop.security.AccessControlException
import org.json4s.jackson.JsonMethods._
import org.mockito.ArgumentMatcher
import org.mockito.Matchers.{any, argThat}
import org.mockito.ArgumentMatchers.{any, argThat}
import org.mockito.Mockito.{doThrow, mock, spy, verify, when}
import org.scalatest.BeforeAndAfter
import org.scalatest.Matchers
@ -933,7 +933,7 @@ class FsHistoryProviderSuite extends SparkFunSuite with BeforeAndAfter with Matc
val mockedFs = spy(provider.fs)
doThrow(new AccessControlException("Cannot read accessDenied file")).when(mockedFs).open(
argThat(new ArgumentMatcher[Path]() {
override def matches(path: Any): Boolean = {
override def matches(path: Path): Boolean = {
path.asInstanceOf[Path].getName.toLowerCase(Locale.ROOT) == "accessdenied"
}
}))

View file

@ -20,8 +20,8 @@ package org.apache.spark.deploy.history
import java.io.File
import org.mockito.AdditionalAnswers
import org.mockito.Matchers.{any, anyBoolean, anyLong, eq => meq}
import org.mockito.Mockito._
import org.mockito.ArgumentMatchers.{anyBoolean, anyLong, eq => meq}
import org.mockito.Mockito.{doAnswer, spy}
import org.scalatest.BeforeAndAfter
import org.apache.spark.{SparkConf, SparkFunSuite}
@ -32,6 +32,8 @@ import org.apache.spark.util.kvstore.KVStore
class HistoryServerDiskManagerSuite extends SparkFunSuite with BeforeAndAfter {
private def doReturn(value: Any) = org.mockito.Mockito.doReturn(value, Seq.empty: _*)
private val MAX_USAGE = 3L
private var testDir: File = _

View file

@ -21,7 +21,7 @@ import java.io.File
import scala.concurrent.duration._
import org.mockito.Matchers._
import org.mockito.ArgumentMatchers.{any, anyInt}
import org.mockito.Mockito._
import org.mockito.invocation.InvocationOnMock
import org.mockito.stubbing.Answer

View file

@ -22,7 +22,7 @@ import java.util.function.Supplier
import org.mockito.{Mock, MockitoAnnotations}
import org.mockito.Answers.RETURNS_SMART_NULLS
import org.mockito.Matchers._
import org.mockito.ArgumentMatchers.any
import org.mockito.Mockito._
import org.mockito.invocation.InvocationOnMock
import org.mockito.stubbing.Answer

View file

@ -30,7 +30,7 @@ import scala.concurrent.duration._
import scala.language.postfixOps
import org.mockito.ArgumentCaptor
import org.mockito.Matchers.{any, eq => meq}
import org.mockito.ArgumentMatchers.{any, eq => meq}
import org.mockito.Mockito.{inOrder, verify, when}
import org.mockito.invocation.InvocationOnMock
import org.mockito.stubbing.Answer

View file

@ -23,7 +23,7 @@ import scala.collection.mutable
import scala.concurrent.{ExecutionContext, Future}
import scala.concurrent.duration.Duration
import org.mockito.Matchers.{any, anyLong}
import org.mockito.ArgumentMatchers.{any, anyLong}
import org.mockito.Mockito.{mock, when, RETURNS_SMART_NULLS}
import org.mockito.invocation.InvocationOnMock
import org.mockito.stubbing.Answer

View file

@ -29,7 +29,7 @@ import scala.concurrent.duration._
import scala.language.postfixOps
import com.google.common.io.Files
import org.mockito.Matchers.any
import org.mockito.ArgumentMatchers.any
import org.mockito.Mockito.{mock, never, verify, when}
import org.scalatest.BeforeAndAfterAll
import org.scalatest.concurrent.Eventually._

View file

@ -21,7 +21,7 @@ import java.net.InetSocketAddress
import java.nio.ByteBuffer
import io.netty.channel.Channel
import org.mockito.Matchers._
import org.mockito.ArgumentMatchers.any
import org.mockito.Mockito._
import org.apache.spark.SparkFunSuite

View file

@ -17,7 +17,7 @@
package org.apache.spark.scheduler
import org.mockito.Matchers.any
import org.mockito.ArgumentMatchers.any
import org.mockito.Mockito.{never, verify, when}
import org.mockito.invocation.InvocationOnMock
import org.mockito.stubbing.Answer

View file

@ -21,7 +21,7 @@ import java.io.{ByteArrayInputStream, ByteArrayOutputStream, ObjectInputStream,
import scala.util.Random
import org.mockito.Mockito._
import org.mockito.Mockito.mock
import org.roaringbitmap.RoaringBitmap
import org.apache.spark.{SparkConf, SparkContext, SparkEnv, SparkFunSuite}
@ -31,6 +31,7 @@ import org.apache.spark.serializer.{JavaSerializer, KryoSerializer}
import org.apache.spark.storage.BlockManagerId
class MapStatusSuite extends SparkFunSuite {
private def doReturn(value: Any) = org.mockito.Mockito.doReturn(value, Seq.empty: _*)
test("compressSize") {
assert(MapStatus.compressSize(0L) === 0)

View file

@ -26,8 +26,8 @@ import scala.language.postfixOps
import org.apache.hadoop.mapred._
import org.apache.hadoop.mapreduce.TaskType
import org.mockito.Matchers
import org.mockito.Mockito._
import org.mockito.ArgumentMatchers.{any, eq => meq}
import org.mockito.Mockito.{doAnswer, spy, times, verify}
import org.mockito.invocation.InvocationOnMock
import org.mockito.stubbing.Answer
import org.scalatest.BeforeAndAfter
@ -71,6 +71,8 @@ import org.apache.spark.util.{ThreadUtils, Utils}
*/
class OutputCommitCoordinatorSuite extends SparkFunSuite with BeforeAndAfter {
private def doReturn(value: Any) = org.mockito.Mockito.doReturn(value, Seq.empty: _*)
var outputCommitCoordinator: OutputCommitCoordinator = null
var tempDir: File = null
var sc: SparkContext = null
@ -103,7 +105,7 @@ class OutputCommitCoordinatorSuite extends SparkFunSuite with BeforeAndAfter {
invoke.callRealMethod()
mockTaskScheduler.backend.reviveOffers()
}
}).when(mockTaskScheduler).submitTasks(Matchers.any())
}).when(mockTaskScheduler).submitTasks(any())
doAnswer(new Answer[TaskSetManager]() {
override def answer(invoke: InvocationOnMock): TaskSetManager = {
@ -123,7 +125,7 @@ class OutputCommitCoordinatorSuite extends SparkFunSuite with BeforeAndAfter {
}
}
}
}).when(mockTaskScheduler).createTaskSetManager(Matchers.any(), Matchers.any())
}).when(mockTaskScheduler).createTaskSetManager(any(), any())
sc.taskScheduler = mockTaskScheduler
val dagSchedulerWithMockTaskScheduler = new DAGScheduler(sc, mockTaskScheduler)
@ -154,7 +156,7 @@ class OutputCommitCoordinatorSuite extends SparkFunSuite with BeforeAndAfter {
test("Job should not complete if all commits are denied") {
// Create a mock OutputCommitCoordinator that denies all attempts to commit
doReturn(false).when(outputCommitCoordinator).handleAskPermissionToCommit(
Matchers.any(), Matchers.any(), Matchers.any(), Matchers.any())
any(), any(), any(), any())
val rdd: RDD[Int] = sc.parallelize(Seq(1), 1)
def resultHandler(x: Int, y: Unit): Unit = {}
val futureAction: SimpleFutureAction[Unit] = sc.submitJob[Int, Unit, Unit](rdd,
@ -268,8 +270,8 @@ class OutputCommitCoordinatorSuite extends SparkFunSuite with BeforeAndAfter {
assert(retriedStage.size === 1)
assert(sc.dagScheduler.outputCommitCoordinator.isEmpty)
verify(sc.env.outputCommitCoordinator, times(2))
.stageStart(Matchers.eq(retriedStage.head), Matchers.any())
verify(sc.env.outputCommitCoordinator).stageEnd(Matchers.eq(retriedStage.head))
.stageStart(meq(retriedStage.head), any())
verify(sc.env.outputCommitCoordinator).stageEnd(meq(retriedStage.head))
}
}

View file

@ -19,7 +19,7 @@ package org.apache.spark.scheduler
import java.util.Properties
import org.mockito.Matchers.any
import org.mockito.ArgumentMatchers.any
import org.mockito.Mockito._
import org.scalatest.BeforeAndAfter

View file

@ -28,7 +28,7 @@ import scala.util.control.NonFatal
import com.google.common.util.concurrent.MoreExecutors
import org.mockito.ArgumentCaptor
import org.mockito.Matchers.{any, anyLong}
import org.mockito.ArgumentMatchers.{any, anyLong}
import org.mockito.Mockito.{spy, times, verify}
import org.scalatest.BeforeAndAfter
import org.scalatest.concurrent.Eventually._

View file

@ -22,7 +22,7 @@ import java.nio.ByteBuffer
import scala.collection.mutable.HashMap
import scala.concurrent.duration._
import org.mockito.Matchers.{anyInt, anyObject, anyString, eq => meq}
import org.mockito.ArgumentMatchers.{any, anyInt, anyString, eq => meq}
import org.mockito.Mockito.{atLeast, atMost, never, spy, times, verify, when}
import org.scalatest.BeforeAndAfterEach
import org.scalatest.concurrent.Eventually
@ -430,7 +430,7 @@ class TaskSchedulerImplSuite extends SparkFunSuite with LocalSparkContext with B
verify(blacklist, never).updateBlacklistForSuccessfulTaskSet(
stageId = meq(2),
stageAttemptId = anyInt(),
failuresByExec = anyObject())
failuresByExec = any())
}
test("scheduled tasks obey node and executor blacklists") {
@ -504,7 +504,7 @@ class TaskSchedulerImplSuite extends SparkFunSuite with LocalSparkContext with B
WorkerOffer("executor3", "host1", 2)
)).flatten.size === 0)
assert(tsm.isZombie)
verify(tsm).abort(anyString(), anyObject())
verify(tsm).abort(anyString(), any())
}
test("SPARK-22148 abort timer should kick in when task is completely blacklisted & no new " +
@ -1184,7 +1184,7 @@ class TaskSchedulerImplSuite extends SparkFunSuite with LocalSparkContext with B
assert(finalTsm.isZombie)
// no taskset has completed all of its tasks, so no updates to the blacklist tracker yet
verify(blacklist, never).updateBlacklistForSuccessfulTaskSet(anyInt(), anyInt(), anyObject())
verify(blacklist, never).updateBlacklistForSuccessfulTaskSet(anyInt(), anyInt(), any())
// finally, lets complete all the tasks. We simulate failures in attempt 1, but everything
// else succeeds, to make sure we get the right updates to the blacklist in all cases.
@ -1202,7 +1202,7 @@ class TaskSchedulerImplSuite extends SparkFunSuite with LocalSparkContext with B
// we update the blacklist for the stage attempts with all successful tasks. Even though
// some tasksets had failures, we still consider them all successful from a blacklisting
// perspective, as the failures weren't from a problem w/ the tasks themselves.
verify(blacklist).updateBlacklistForSuccessfulTaskSet(meq(0), meq(stageAttempt), anyObject())
verify(blacklist).updateBlacklistForSuccessfulTaskSet(meq(0), meq(stageAttempt), any())
}
}

View file

@ -16,7 +16,7 @@
*/
package org.apache.spark.scheduler
import org.mockito.Matchers.isA
import org.mockito.ArgumentMatchers.isA
import org.mockito.Mockito.{never, verify}
import org.scalatest.BeforeAndAfterEach
import org.scalatest.mockito.MockitoSugar

View file

@ -22,7 +22,7 @@ import java.util.{Properties, Random}
import scala.collection.mutable
import scala.collection.mutable.ArrayBuffer
import org.mockito.Matchers.{any, anyInt, anyString}
import org.mockito.ArgumentMatchers.{any, anyInt, anyString}
import org.mockito.Mockito.{mock, never, spy, times, verify, when}
import org.mockito.invocation.InvocationOnMock
import org.mockito.stubbing.Answer
@ -1319,7 +1319,7 @@ class TaskSetManagerSuite extends SparkFunSuite with LocalSparkContext with Logg
when(taskSetManagerSpy.addPendingTask(anyInt())).thenAnswer(
new Answer[Unit] {
override def answer(invocationOnMock: InvocationOnMock): Unit = {
val task = invocationOnMock.getArgumentAt(0, classOf[Int])
val task: Int = invocationOnMock.getArgument(0)
assert(taskSetManager.taskSetBlacklistHelperOpt.get.
isExecutorBlacklistedForTask(exec, task))
}

View file

@ -24,7 +24,7 @@ import java.nio.file.Files
import java.util.{Arrays, Random, UUID}
import com.google.common.io.ByteStreams
import org.mockito.Matchers.any
import org.mockito.ArgumentMatchers.any
import org.mockito.Mockito._
import org.apache.spark._

View file

@ -25,7 +25,7 @@ import scala.collection.mutable.ArrayBuffer
import org.mockito.{Mock, MockitoAnnotations}
import org.mockito.Answers.RETURNS_SMART_NULLS
import org.mockito.Matchers._
import org.mockito.ArgumentMatchers.{any, anyInt}
import org.mockito.Mockito._
import org.mockito.invocation.InvocationOnMock
import org.mockito.stubbing.Answer

View file

@ -21,7 +21,7 @@ import java.io.{DataInputStream, File, FileInputStream, FileOutputStream}
import org.mockito.{Mock, MockitoAnnotations}
import org.mockito.Answers.RETURNS_SMART_NULLS
import org.mockito.Matchers._
import org.mockito.ArgumentMatchers.any
import org.mockito.Mockito._
import org.mockito.invocation.InvocationOnMock
import org.mockito.stubbing.Answer

View file

@ -17,7 +17,7 @@
package org.apache.spark.shuffle.sort
import org.mockito.Mockito._
import org.mockito.Mockito.{mock, when}
import org.mockito.invocation.InvocationOnMock
import org.mockito.stubbing.Answer
import org.scalatest.Matchers
@ -31,6 +31,8 @@ import org.apache.spark.serializer.{JavaSerializer, KryoSerializer, Serializer}
*/
class SortShuffleManagerSuite extends SparkFunSuite with Matchers {
private def doReturn(value: Any) = org.mockito.Mockito.doReturn(value, Seq.empty: _*)
import SortShuffleManager.canUseSerializedShuffle
private class RuntimeExceptionAnswer extends Answer[Object] {

View file

@ -27,7 +27,7 @@ import scala.language.{implicitConversions, postfixOps}
import scala.reflect.ClassTag
import org.apache.commons.lang3.RandomUtils
import org.mockito.{Matchers => mc}
import org.mockito.{ArgumentMatchers => mc}
import org.mockito.Mockito.{mock, times, verify, when}
import org.scalatest._
import org.scalatest.concurrent.{Signaler, ThreadSignaler, TimeLimits}

View file

@ -17,7 +17,7 @@
package org.apache.spark.storage
import org.mockito.Matchers
import org.mockito.ArgumentMatchers.{eq => meq}
import org.mockito.Mockito._
import org.scalatest.mockito.MockitoSugar
@ -45,7 +45,7 @@ class PartiallyUnrolledIteratorSuite extends SparkFunSuite with MockitoSugar {
joinIterator.hasNext
joinIterator.hasNext
verify(memoryStore, times(1))
.releaseUnrollMemoryForThisTask(Matchers.eq(ON_HEAP), Matchers.eq(unrollSize.toLong))
.releaseUnrollMemoryForThisTask(meq(ON_HEAP), meq(unrollSize.toLong))
// Secondly, iterate over rest iterator
(unrollSize until unrollSize + restSize).foreach { value =>

View file

@ -24,8 +24,8 @@ import java.util.concurrent.Semaphore
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future
import org.mockito.Matchers.{any, eq => meq}
import org.mockito.Mockito._
import org.mockito.ArgumentMatchers.{any, eq => meq}
import org.mockito.Mockito.{mock, times, verify, when}
import org.mockito.invocation.InvocationOnMock
import org.mockito.stubbing.Answer
import org.scalatest.PrivateMethodTester
@ -40,6 +40,9 @@ import org.apache.spark.util.Utils
class ShuffleBlockFetcherIteratorSuite extends SparkFunSuite with PrivateMethodTester {
private def doReturn(value: Any) = org.mockito.Mockito.doReturn(value, Seq.empty: _*)
// Some of the tests are quite tricky because we are testing the cleanup behavior
// in the presence of faults.

View file

@ -22,7 +22,7 @@ import javax.security.auth.login.{AppConfigurationEntry, Configuration}
import org.apache.hadoop.security.{Credentials, UserGroupInformation}
import org.apache.hadoop.security.token.Token
import org.mockito.Mockito.{doReturn, mock}
import org.mockito.Mockito.mock
import org.scalatest.BeforeAndAfterEach
import org.apache.spark.{SparkConf, SparkEnv, SparkFunSuite}
@ -35,6 +35,8 @@ import org.apache.spark.deploy.security.KafkaTokenUtil.KafkaDelegationTokenIdent
trait KafkaDelegationTokenTest extends BeforeAndAfterEach {
self: SparkFunSuite =>
private def doReturn(value: Any) = org.mockito.Mockito.doReturn(value, Seq.empty: _*)
protected val tokenId = "tokenId" + ju.UUID.randomUUID().toString
protected val tokenPassword = "tokenPassword" + ju.UUID.randomUUID().toString

View file

@ -24,7 +24,7 @@ import scala.concurrent.duration._
import scala.language.postfixOps
import com.amazonaws.services.kinesis.clientlibrary.interfaces.IRecordProcessorCheckpointer
import org.mockito.Matchers._
import org.mockito.ArgumentMatchers._
import org.mockito.Mockito._
import org.mockito.invocation.InvocationOnMock
import org.mockito.stubbing.Answer

View file

@ -24,9 +24,8 @@ import com.amazonaws.services.kinesis.clientlibrary.exceptions._
import com.amazonaws.services.kinesis.clientlibrary.interfaces.IRecordProcessorCheckpointer
import com.amazonaws.services.kinesis.clientlibrary.lib.worker.ShutdownReason
import com.amazonaws.services.kinesis.model.Record
import org.mockito.Matchers._
import org.mockito.Matchers.{eq => meq}
import org.mockito.Mockito._
import org.mockito.ArgumentMatchers.{anyListOf, anyString, eq => meq}
import org.mockito.Mockito.{never, times, verify, when}
import org.scalatest.{BeforeAndAfter, Matchers}
import org.scalatest.mockito.MockitoSugar

View file

@ -23,6 +23,7 @@ import java.util.List;
import org.junit.Before;
import org.junit.Test;
import static org.mockito.ArgumentMatchers.isNull;
import static org.mockito.Mockito.*;
public class SparkSubmitOptionParserSuite extends BaseSuite {
@ -48,14 +49,17 @@ public class SparkSubmitOptionParserSuite extends BaseSuite {
}
}
int nullCount = 0;
for (String[] switchNames : parser.switches) {
int switchCount = 0;
for (String name : switchNames) {
parser.parse(Arrays.asList(name));
count++;
nullCount++;
switchCount++;
verify(parser, times(switchCount)).handle(eq(switchNames[0]), same(null));
verify(parser, times(count)).handle(anyString(), any(String.class));
verify(parser, times(nullCount)).handle(anyString(), isNull());
verify(parser, times(count - nullCount)).handle(anyString(), any(String.class));
verify(parser, times(count)).handleExtraArgs(eq(Collections.emptyList()));
}
}

View file

@ -20,7 +20,7 @@ package org.apache.spark.ml
import scala.collection.JavaConverters._
import org.apache.hadoop.fs.Path
import org.mockito.Matchers.{any, eq => meq}
import org.mockito.ArgumentMatchers.{any, eq => meq}
import org.mockito.Mockito.when
import org.scalatest.mockito.MockitoSugar.mock

View file

@ -764,7 +764,7 @@
<dependency>
<groupId>org.mockito</groupId>
<artifactId>mockito-core</artifactId>
<version>1.10.19</version>
<version>2.23.4</version>
<scope>test</scope>
</dependency>
<dependency>

View file

@ -30,7 +30,7 @@ import scala.io.Source
import scala.language.implicitConversions
import com.google.common.io.Files
import org.mockito.Matchers.anyString
import org.mockito.ArgumentMatchers.anyString
import org.mockito.Mockito._
import org.mockito.invocation.InvocationOnMock
import org.mockito.stubbing.Answer

View file

@ -21,7 +21,7 @@ import java.io.File
import io.fabric8.kubernetes.api.model.{Config => _, _}
import io.fabric8.kubernetes.client.KubernetesClient
import io.fabric8.kubernetes.client.dsl.{MixedOperation, PodResource}
import org.mockito.Matchers.any
import org.mockito.ArgumentMatchers.any
import org.mockito.Mockito.{mock, never, verify, when}
import scala.collection.JavaConverters._

View file

@ -20,8 +20,8 @@ import scala.collection.JavaConverters._
import scala.reflect.ClassTag
import io.fabric8.kubernetes.api.model.{Container, HasMetadata, PodBuilder, SecretBuilder}
import org.mockito.Matchers
import org.mockito.Mockito._
import org.mockito.ArgumentMatchers.any
import org.mockito.Mockito.{mock, when}
import org.mockito.invocation.InvocationOnMock
import org.mockito.stubbing.Answer
@ -37,10 +37,10 @@ object KubernetesFeaturesTestUtils {
when(mockStep.getAdditionalPodSystemProperties())
.thenReturn(Map(stepType -> stepType))
when(mockStep.configurePod(Matchers.any(classOf[SparkPod])))
when(mockStep.configurePod(any(classOf[SparkPod])))
.thenAnswer(new Answer[SparkPod]() {
override def answer(invocation: InvocationOnMock): SparkPod = {
val originalPod = invocation.getArgumentAt(0, classOf[SparkPod])
val originalPod: SparkPod = invocation.getArgument(0)
val configuredPod = new PodBuilder(originalPod.pod)
.editOrNewMetadata()
.addToLabels(stepType, stepType)

View file

@ -20,7 +20,7 @@ import io.fabric8.kubernetes.api.model._
import io.fabric8.kubernetes.client.{KubernetesClient, Watch}
import io.fabric8.kubernetes.client.dsl.PodResource
import org.mockito.{ArgumentCaptor, Mock, MockitoAnnotations}
import org.mockito.Mockito.{doReturn, verify, when}
import org.mockito.Mockito.{verify, when}
import org.scalatest.BeforeAndAfter
import org.scalatest.mockito.MockitoSugar._
@ -31,6 +31,8 @@ import org.apache.spark.deploy.k8s.Fabric8Aliases._
class ClientSuite extends SparkFunSuite with BeforeAndAfter {
private def doReturn(value: Any) = org.mockito.Mockito.doReturn(value, Seq.empty: _*)
private val DRIVER_POD_UID = "pod-id"
private val DRIVER_POD_API_VERSION = "v1"
private val DRIVER_POD_KIND = "pod"

View file

@ -20,7 +20,7 @@ import io.fabric8.kubernetes.api.model.{DoneablePod, Pod, PodBuilder}
import io.fabric8.kubernetes.client.KubernetesClient
import io.fabric8.kubernetes.client.dsl.PodResource
import org.mockito.{ArgumentMatcher, Matchers, Mock, MockitoAnnotations}
import org.mockito.Matchers.{any, eq => meq}
import org.mockito.ArgumentMatchers.{any, eq => meq}
import org.mockito.Mockito.{never, times, verify, when}
import org.mockito.invocation.InvocationOnMock
import org.mockito.stubbing.Answer
@ -156,7 +156,7 @@ class ExecutorPodsAllocatorSuite extends SparkFunSuite with BeforeAndAfter {
private def executorPodAnswer(): Answer[SparkPod] = {
new Answer[SparkPod] {
override def answer(invocation: InvocationOnMock): SparkPod = {
val k8sConf = invocation.getArgumentAt(0, classOf[KubernetesExecutorConf])
val k8sConf: KubernetesExecutorConf = invocation.getArgument(0)
executorPodWithId(k8sConf.executorId.toInt)
}
}

View file

@ -21,7 +21,7 @@ import io.fabric8.kubernetes.api.model.{DoneablePod, Pod}
import io.fabric8.kubernetes.client.KubernetesClient
import io.fabric8.kubernetes.client.dsl.PodResource
import org.mockito.{Mock, MockitoAnnotations}
import org.mockito.Matchers.any
import org.mockito.ArgumentMatchers.any
import org.mockito.Mockito.{mock, never, times, verify, when}
import org.mockito.invocation.InvocationOnMock
import org.mockito.stubbing.Answer
@ -128,7 +128,7 @@ class ExecutorPodsLifecycleManagerSuite extends SparkFunSuite with BeforeAndAfte
private def namedPodsAnswer(): Answer[PodResource[Pod, DoneablePod]] = {
new Answer[PodResource[Pod, DoneablePod]] {
override def answer(invocation: InvocationOnMock): PodResource[Pod, DoneablePod] = {
val podName = invocation.getArgumentAt(0, classOf[String])
val podName: String = invocation.getArgument(0)
namedExecutorPods.getOrElseUpdate(
podName, mock(classOf[PodResource[Pod, DoneablePod]]))
}

View file

@ -19,7 +19,7 @@ package org.apache.spark.scheduler.cluster.k8s
import io.fabric8.kubernetes.client.KubernetesClient
import org.jmock.lib.concurrent.DeterministicScheduler
import org.mockito.{ArgumentCaptor, Mock, MockitoAnnotations}
import org.mockito.Matchers.{eq => mockitoEq}
import org.mockito.ArgumentMatchers.{eq => mockitoEq}
import org.mockito.Mockito.{never, verify, when}
import org.scalatest.BeforeAndAfter

View file

@ -24,7 +24,8 @@ import scala.collection.JavaConverters._
import org.apache.mesos.Protos.{TaskState => MesosTaskState, _}
import org.apache.mesos.Protos.Value.{Scalar, Type}
import org.apache.mesos.SchedulerDriver
import org.mockito.{ArgumentCaptor, Matchers}
import org.mockito.ArgumentCaptor
import org.mockito.ArgumentMatchers.{eq => meq}
import org.mockito.Mockito._
import org.scalatest.mockito.MockitoSugar
@ -133,7 +134,7 @@ class MesosClusterSchedulerSuite extends SparkFunSuite with LocalSparkContext wi
when(
driver.launchTasks(
Matchers.eq(Collections.singleton(offer.getId)),
meq(Collections.singleton(offer.getId)),
capture.capture())
).thenReturn(Status.valueOf(1))
@ -156,7 +157,7 @@ class MesosClusterSchedulerSuite extends SparkFunSuite with LocalSparkContext wi
assert(mem.exists(_.getRole() == "*"))
verify(driver, times(1)).launchTasks(
Matchers.eq(Collections.singleton(offer.getId)),
meq(Collections.singleton(offer.getId)),
capture.capture()
)
}

View file

@ -24,9 +24,8 @@ import scala.concurrent.duration._
import org.apache.mesos.{Protos, Scheduler, SchedulerDriver}
import org.apache.mesos.Protos._
import org.mockito.Matchers
import org.mockito.Matchers._
import org.mockito.Mockito._
import org.mockito.ArgumentMatchers.{any, anyInt, anyLong, anyString, eq => meq}
import org.mockito.Mockito.{times, verify, when}
import org.scalatest.BeforeAndAfter
import org.scalatest.concurrent.ScalaFutures
import org.scalatest.mockito.MockitoSugar
@ -697,9 +696,9 @@ class MesosCoarseGrainedSchedulerBackendSuite extends SparkFunSuite
offerId: OfferID,
filter: Boolean = false): Unit = {
if (filter) {
verify(driver, times(1)).declineOffer(Matchers.eq(offerId), anyObject[Filters])
verify(driver, times(1)).declineOffer(meq(offerId), any[Filters]())
} else {
verify(driver, times(1)).declineOffer(Matchers.eq(offerId))
verify(driver, times(1)).declineOffer(meq(offerId))
}
}

View file

@ -30,8 +30,8 @@ import scala.collection.mutable.ArrayBuffer
import org.apache.mesos.{Protos, Scheduler, SchedulerDriver}
import org.apache.mesos.Protos._
import org.apache.mesos.Protos.Value.Scalar
import org.mockito.{ArgumentCaptor, Matchers}
import org.mockito.Matchers._
import org.mockito.ArgumentCaptor
import org.mockito.ArgumentMatchers.{any, anyLong, eq => meq}
import org.mockito.Mockito._
import org.scalatest.mockito.MockitoSugar
@ -264,7 +264,7 @@ class MesosFineGrainedSchedulerBackendSuite
val capture = ArgumentCaptor.forClass(classOf[Collection[TaskInfo]])
when(
driver.launchTasks(
Matchers.eq(Collections.singleton(mesosOffers.get(0).getId)),
meq(Collections.singleton(mesosOffers.get(0).getId)),
capture.capture(),
any(classOf[Filters])
)
@ -275,7 +275,7 @@ class MesosFineGrainedSchedulerBackendSuite
backend.resourceOffers(driver, mesosOffers)
verify(driver, times(1)).launchTasks(
Matchers.eq(Collections.singleton(mesosOffers.get(0).getId)),
meq(Collections.singleton(mesosOffers.get(0).getId)),
capture.capture(),
any(classOf[Filters])
)
@ -373,7 +373,7 @@ class MesosFineGrainedSchedulerBackendSuite
val capture = ArgumentCaptor.forClass(classOf[Collection[TaskInfo]])
when(
driver.launchTasks(
Matchers.eq(Collections.singleton(mesosOffers.get(0).getId)),
meq(Collections.singleton(mesosOffers.get(0).getId)),
capture.capture(),
any(classOf[Filters])
)
@ -382,7 +382,7 @@ class MesosFineGrainedSchedulerBackendSuite
backend.resourceOffers(driver, mesosOffers)
verify(driver, times(1)).launchTasks(
Matchers.eq(Collections.singleton(mesosOffers.get(0).getId)),
meq(Collections.singleton(mesosOffers.get(0).getId)),
capture.capture(),
any(classOf[Filters])
)

View file

@ -25,8 +25,9 @@ import org.apache.mesos.Protos._
import org.apache.mesos.Protos.Value.{Range => MesosRange, Ranges, Scalar}
import org.apache.mesos.SchedulerDriver
import org.apache.mesos.protobuf.ByteString
import org.mockito.{ArgumentCaptor, Matchers}
import org.mockito.Mockito._
import org.mockito.ArgumentCaptor
import org.mockito.ArgumentMatchers.{any, eq => meq}
import org.mockito.Mockito.{times, verify}
import org.apache.spark.deploy.mesos.config.MesosSecretConfig
@ -84,15 +85,15 @@ object Utils {
def verifyTaskLaunched(driver: SchedulerDriver, offerId: String): List[TaskInfo] = {
val captor = ArgumentCaptor.forClass(classOf[java.util.Collection[TaskInfo]])
verify(driver, times(1)).launchTasks(
Matchers.eq(Collections.singleton(createOfferId(offerId))),
meq(Collections.singleton(createOfferId(offerId))),
captor.capture())
captor.getValue.asScala.toList
}
def verifyTaskNotLaunched(driver: SchedulerDriver, offerId: String): Unit = {
verify(driver, times(0)).launchTasks(
Matchers.eq(Collections.singleton(createOfferId(offerId))),
Matchers.any(classOf[java.util.Collection[TaskInfo]]))
meq(Collections.singleton(createOfferId(offerId))),
any(classOf[java.util.Collection[TaskInfo]]))
}
def createOfferId(offerId: String): OfferID = {

View file

@ -34,8 +34,8 @@ import org.apache.hadoop.yarn.api.records._
import org.apache.hadoop.yarn.client.api.YarnClientApplication
import org.apache.hadoop.yarn.conf.YarnConfiguration
import org.apache.hadoop.yarn.util.Records
import org.mockito.Matchers.{eq => meq, _}
import org.mockito.Mockito._
import org.mockito.ArgumentMatchers.{any, anyBoolean, anyShort, eq => meq}
import org.mockito.Mockito.{spy, verify}
import org.scalatest.Matchers
import org.apache.spark.{SparkConf, SparkFunSuite, TestUtils}
@ -43,6 +43,7 @@ import org.apache.spark.deploy.yarn.config._
import org.apache.spark.util.{SparkConfWithEnv, Utils}
class ClientSuite extends SparkFunSuite with Matchers {
private def doReturn(value: Any) = org.mockito.Mockito.doReturn(value, Seq.empty: _*)
import Client._

View file

@ -19,7 +19,7 @@ package org.apache.spark.network.yarn
import scala.collection.JavaConverters._
import org.apache.hadoop.metrics2.MetricsRecordBuilder
import org.mockito.Matchers._
import org.mockito.ArgumentMatchers.{any, anyDouble, anyInt, anyLong}
import org.mockito.Mockito.{mock, times, verify, when}
import org.scalatest.Matchers
@ -56,8 +56,8 @@ class YarnShuffleServiceMetricsSuite extends SparkFunSuite with Matchers {
YarnShuffleServiceMetrics.collectMetric(builder, testname,
metrics.getMetrics.get(testname))
verify(builder).addCounter(anyObject(), anyLong())
verify(builder, times(4)).addGauge(anyObject(), anyDouble())
verify(builder).addCounter(any(), anyLong())
verify(builder, times(4)).addGauge(any(), anyDouble())
}
}
@ -69,6 +69,6 @@ class YarnShuffleServiceMetricsSuite extends SparkFunSuite with Matchers {
metrics.getMetrics.get("registeredExecutorsSize"))
// only one
verify(builder).addGauge(anyObject(), anyInt())
verify(builder).addGauge(any(), anyInt())
}
}

View file

@ -17,9 +17,9 @@
package org.apache.spark.sql.streaming.continuous
import org.mockito.ArgumentMatchers.{any, eq => eqTo}
import org.mockito.InOrder
import org.mockito.Matchers.{any, eq => eqTo}
import org.mockito.Mockito._
import org.mockito.Mockito.{inOrder, never, verify}
import org.scalatest.BeforeAndAfterEach
import org.scalatest.mockito.MockitoSugar

View file

@ -24,7 +24,7 @@ import java.util.concurrent.TimeUnit
import scala.concurrent.duration._
import org.apache.hadoop.fs.Path
import org.mockito.Matchers.{any, eq => meq}
import org.mockito.ArgumentMatchers.{any, eq => meq}
import org.mockito.Mockito._
import org.scalatest.BeforeAndAfter

View file

@ -26,7 +26,7 @@ import scala.language.{implicitConversions, postfixOps}
import scala.util.Random
import org.apache.hadoop.conf.Configuration
import org.mockito.Matchers.any
import org.mockito.ArgumentMatchers.any
import org.mockito.Mockito.{doThrow, reset, spy}
import org.scalatest.{BeforeAndAfter, Matchers}
import org.scalatest.concurrent.Eventually._

View file

@ -17,8 +17,8 @@
package org.apache.spark.streaming.scheduler
import org.mockito.Matchers.{eq => meq}
import org.mockito.Mockito._
import org.mockito.ArgumentMatchers.{eq => meq}
import org.mockito.Mockito.{never, reset, times, verify, when}
import org.scalatest.{BeforeAndAfter, BeforeAndAfterAll, PrivateMethodTester}
import org.scalatest.concurrent.Eventually.{eventually, timeout}
import org.scalatest.mockito.MockitoSugar

View file

@ -31,8 +31,8 @@ import scala.language.{implicitConversions, postfixOps}
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.Path
import org.mockito.ArgumentCaptor
import org.mockito.Matchers.{eq => meq, _}
import org.mockito.Mockito._
import org.mockito.ArgumentMatchers.{any, anyLong, eq => meq}
import org.mockito.Mockito.{times, verify, when}
import org.scalatest.{BeforeAndAfter, BeforeAndAfterEach, PrivateMethodTester}
import org.scalatest.concurrent.Eventually
import org.scalatest.concurrent.Eventually._