[MINOR][BUILD] Fix about 15 misc build warnings

### What changes were proposed in this pull request?

This addresses about 15 miscellaneous warnings that appear in the current build.

### Why are the changes needed?

No functional changes, it just slightly reduces the amount of extra warning output.

### Does this PR introduce any user-facing change?

No.

### How was this patch tested?

Existing tests, run manually.

Closes #25852 from srowen/BuildWarnings.

Authored-by: Sean Owen <sean.owen@databricks.com>
Signed-off-by: Dongjoon Hyun <dhyun@apple.com>
This commit is contained in:
Sean Owen 2019-09-19 11:37:42 -07:00 committed by Dongjoon Hyun
parent e97b55d322
commit c5d8a51f3b
7 changed files with 16 additions and 17 deletions

View file

@ -252,7 +252,7 @@ public class InMemoryStore implements KVStore {
return (value) -> set.contains(indexValueForEntity(getter, value));
} else {
HashSet<Comparable> set = new HashSet<>(values.size());
HashSet<Comparable<?>> set = new HashSet<>(values.size());
for (Object key : values) {
set.add(asKey(key));
}

View file

@ -124,7 +124,7 @@ public class KVTypeInfo {
Object get(Object instance) throws ReflectiveOperationException;
Class getType();
Class<?> getType();
}
private class FieldAccessor implements Accessor {
@ -141,7 +141,7 @@ public class KVTypeInfo {
}
@Override
public Class getType() {
public Class<?> getType() {
return field.getType();
}
}
@ -160,7 +160,7 @@ public class KVTypeInfo {
}
@Override
public Class getType() {
public Class<?> getType() {
return method.getReturnType();
}
}

View file

@ -172,9 +172,9 @@ public class UnsafeShuffleWriterSuite {
when(taskContext.taskMemoryManager()).thenReturn(taskMemoryManager);
}
private UnsafeShuffleWriter createWriter(boolean transferToEnabled) {
private UnsafeShuffleWriter<Object, Object> createWriter(boolean transferToEnabled) {
conf.set("spark.file.transferTo", String.valueOf(transferToEnabled));
return new UnsafeShuffleWriter(
return new UnsafeShuffleWriter<>(
blockManager,
taskMemoryManager,
new SerializedShuffleHandle<>(0, 1, shuffleDep),
@ -533,7 +533,7 @@ public class UnsafeShuffleWriterSuite {
final long numRecordsPerPage = pageSizeBytes / recordLengthBytes;
taskMemoryManager = spy(taskMemoryManager);
when(taskMemoryManager.pageSizeBytes()).thenReturn(pageSizeBytes);
final UnsafeShuffleWriter writer = new UnsafeShuffleWriter(
final UnsafeShuffleWriter<Object, Object> writer = new UnsafeShuffleWriter<>(
blockManager,
taskMemoryManager,
new SerializedShuffleHandle<>(0, 1, shuffleDep),

View file

@ -26,7 +26,6 @@ import javax.servlet.http.{HttpServletRequest, HttpServletRequestWrapper, HttpSe
import scala.collection.JavaConverters._
import scala.concurrent.duration._
import com.gargoylesoftware.htmlunit.BrowserVersion
import com.google.common.io.{ByteStreams, Files}
import org.apache.commons.io.{FileUtils, IOUtils}
import org.apache.hadoop.fs.{FileStatus, FileSystem, Path}
@ -365,8 +364,7 @@ class HistoryServerSuite extends SparkFunSuite with BeforeAndAfter with Matchers
contextHandler.addServlet(holder, "/")
server.attachHandler(contextHandler)
implicit val webDriver: WebDriver =
new HtmlUnitDriver(BrowserVersion.INTERNET_EXPLORER_11, true)
implicit val webDriver: WebDriver = new HtmlUnitDriver(true)
try {
val url = s"http://localhost:$port"

View file

@ -49,7 +49,7 @@ class KeyLockSuite extends SparkFunSuite with TimeLimits {
@volatile var e: Throwable = null
val threads = (0 until numThreads).map { i =>
new Thread() {
override def run(): Unit = try {
override def run(): Unit = {
latch.await(foreverMs, TimeUnit.MILLISECONDS)
keyLock.withLock(keys(i)) {
var cur = numThreadsHoldingLock.get()

View file

@ -272,7 +272,8 @@ class ExecutorClassLoaderSuite
assert(e.getMessage.contains("ThisIsAClassName"))
// RemoteClassLoaderError must not be LinkageError nor ClassNotFoundException. Otherwise,
// JVM will cache it and doesn't retry to load a class.
assert(!e.isInstanceOf[LinkageError] && !e.isInstanceOf[ClassNotFoundException])
assert(!(classOf[LinkageError].isAssignableFrom(e.getClass)))
assert(!(classOf[ClassNotFoundException].isAssignableFrom(e.getClass)))
} finally {
rpcEnv.shutdown()
rpcEnv.awaitTermination()

View file

@ -680,12 +680,12 @@ class TreeNodeSuite extends SparkFunSuite with SQLHelper {
}
test("clone") {
def assertDifferentInstance(before: AnyRef, after: AnyRef): Unit = {
def assertDifferentInstance[T <: TreeNode[T]](before: TreeNode[T], after: TreeNode[T]): Unit = {
assert(before.ne(after) && before == after)
before.asInstanceOf[TreeNode[_]].children.zip(
after.asInstanceOf[TreeNode[_]].children).foreach {
case (beforeChild: AnyRef, afterChild: AnyRef) =>
assertDifferentInstance(beforeChild, afterChild)
before.children.zip(after.children).foreach { case (beforeChild, afterChild) =>
assertDifferentInstance(
beforeChild.asInstanceOf[TreeNode[T]],
afterChild.asInstanceOf[TreeNode[T]])
}
}