From de7af295c2047f1b508cb02e735e0e743395f181 Mon Sep 17 00:00:00 2001
From: Sean Owen
Date: Fri, 25 Aug 2017 16:07:13 +0100
Subject: [PATCH] [MINOR][BUILD] Fix build warnings and Java lint errors
## What changes were proposed in this pull request?
Fix build warnings and Java lint errors. This just helps a bit in evaluating (new) warnings in another PR I have open.
## How was this patch tested?
Existing tests
Author: Sean Owen
Closes #19051 from srowen/JavaWarnings.
---
.../java/org/apache/spark/util/kvstore/InMemoryStore.java | 2 +-
.../org/apache/spark/util/kvstore/KVStoreIterator.java | 3 ++-
.../apache/spark/network/TransportRequestHandlerSuite.java | 7 +++++--
.../java/org/apache/spark/launcher/SparkLauncherSuite.java | 1 -
.../org/apache/spark/launcher/ChildProcAppHandleSuite.java | 1 -
.../org/apache/spark/ml/tuning/CrossValidatorSuite.scala | 7 +++----
.../apache/spark/ml/tuning/TrainValidationSplitSuite.scala | 7 +++----
pom.xml | 2 +-
.../datasources/parquet/VectorizedColumnReader.java | 3 ++-
.../spark/sql/execution/vectorized/AggregateHashMap.java | 1 -
.../spark/sql/execution/vectorized/ArrowColumnVector.java | 1 -
11 files changed, 17 insertions(+), 18 deletions(-)
diff --git a/common/kvstore/src/main/java/org/apache/spark/util/kvstore/InMemoryStore.java b/common/kvstore/src/main/java/org/apache/spark/util/kvstore/InMemoryStore.java
index 9cae5da5d2..5ca4371285 100644
--- a/common/kvstore/src/main/java/org/apache/spark/util/kvstore/InMemoryStore.java
+++ b/common/kvstore/src/main/java/org/apache/spark/util/kvstore/InMemoryStore.java
@@ -171,7 +171,7 @@ public class InMemoryStore implements KVStore {
public InMemoryView view(Class type) {
Preconditions.checkArgument(ti.type().equals(type), "Unexpected type: %s", type);
Collection all = (Collection) data.values();
- return new InMemoryView(type, all, ti);
+ return new InMemoryView<>(type, all, ti);
}
}
diff --git a/common/kvstore/src/main/java/org/apache/spark/util/kvstore/KVStoreIterator.java b/common/kvstore/src/main/java/org/apache/spark/util/kvstore/KVStoreIterator.java
index 28a432b26d..e6254a9368 100644
--- a/common/kvstore/src/main/java/org/apache/spark/util/kvstore/KVStoreIterator.java
+++ b/common/kvstore/src/main/java/org/apache/spark/util/kvstore/KVStoreIterator.java
@@ -17,6 +17,7 @@
package org.apache.spark.util.kvstore;
+import java.io.Closeable;
import java.util.Iterator;
import java.util.List;
@@ -31,7 +32,7 @@ import org.apache.spark.annotation.Private;
*
*/
@Private
-public interface KVStoreIterator extends Iterator, AutoCloseable {
+public interface KVStoreIterator extends Iterator, Closeable {
/**
* Retrieve multiple elements from the store.
diff --git a/common/network-common/src/test/java/org/apache/spark/network/TransportRequestHandlerSuite.java b/common/network-common/src/test/java/org/apache/spark/network/TransportRequestHandlerSuite.java
index 1ed57116bc..2656cbee95 100644
--- a/common/network-common/src/test/java/org/apache/spark/network/TransportRequestHandlerSuite.java
+++ b/common/network-common/src/test/java/org/apache/spark/network/TransportRequestHandlerSuite.java
@@ -102,7 +102,7 @@ public class TransportRequestHandlerSuite {
private class ExtendedChannelPromise extends DefaultChannelPromise {
- private List listeners = new ArrayList<>();
+ private List>> listeners = new ArrayList<>();
private boolean success;
ExtendedChannelPromise(Channel channel) {
@@ -113,7 +113,10 @@ public class TransportRequestHandlerSuite {
@Override
public ChannelPromise addListener(
GenericFutureListener extends Future super Void>> listener) {
- listeners.add(listener);
+ @SuppressWarnings("unchecked")
+ GenericFutureListener> gfListener =
+ (GenericFutureListener>) listener;
+ listeners.add(gfListener);
return super.addListener(listener);
}
diff --git a/core/src/test/java/org/apache/spark/launcher/SparkLauncherSuite.java b/core/src/test/java/org/apache/spark/launcher/SparkLauncherSuite.java
index db4fc26cdf..ac4391e3ef 100644
--- a/core/src/test/java/org/apache/spark/launcher/SparkLauncherSuite.java
+++ b/core/src/test/java/org/apache/spark/launcher/SparkLauncherSuite.java
@@ -21,7 +21,6 @@ import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
-import org.junit.Before;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
diff --git a/launcher/src/test/java/org/apache/spark/launcher/ChildProcAppHandleSuite.java b/launcher/src/test/java/org/apache/spark/launcher/ChildProcAppHandleSuite.java
index 64a87b365d..602f55a505 100644
--- a/launcher/src/test/java/org/apache/spark/launcher/ChildProcAppHandleSuite.java
+++ b/launcher/src/test/java/org/apache/spark/launcher/ChildProcAppHandleSuite.java
@@ -18,7 +18,6 @@
package org.apache.spark.launcher;
import java.io.File;
-import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.ArrayList;
diff --git a/mllib/src/test/scala/org/apache/spark/ml/tuning/CrossValidatorSuite.scala b/mllib/src/test/scala/org/apache/spark/ml/tuning/CrossValidatorSuite.scala
index dc6043ef19..90778d7890 100644
--- a/mllib/src/test/scala/org/apache/spark/ml/tuning/CrossValidatorSuite.scala
+++ b/mllib/src/test/scala/org/apache/spark/ml/tuning/CrossValidatorSuite.scala
@@ -187,14 +187,13 @@ class CrossValidatorSuite
cv2.getEstimator match {
case ova2: OneVsRest =>
assert(ova.uid === ova2.uid)
- val classifier = ova2.getClassifier
- classifier match {
+ ova2.getClassifier match {
case lr: LogisticRegression =>
assert(ova.getClassifier.asInstanceOf[LogisticRegression].getMaxIter
=== lr.getMaxIter)
- case _ =>
+ case other =>
throw new AssertionError(s"Loaded CrossValidator expected estimator of type" +
- s" LogisticREgression but found ${classifier.getClass.getName}")
+ s" LogisticRegression but found ${other.getClass.getName}")
}
case other =>
diff --git a/mllib/src/test/scala/org/apache/spark/ml/tuning/TrainValidationSplitSuite.scala b/mllib/src/test/scala/org/apache/spark/ml/tuning/TrainValidationSplitSuite.scala
index 7c97865e45..aa8b4cf173 100644
--- a/mllib/src/test/scala/org/apache/spark/ml/tuning/TrainValidationSplitSuite.scala
+++ b/mllib/src/test/scala/org/apache/spark/ml/tuning/TrainValidationSplitSuite.scala
@@ -173,14 +173,13 @@ class TrainValidationSplitSuite
tvs2.getEstimator match {
case ova2: OneVsRest =>
assert(ova.uid === ova2.uid)
- val classifier = ova2.getClassifier
- classifier match {
+ ova2.getClassifier match {
case lr: LogisticRegression =>
assert(ova.getClassifier.asInstanceOf[LogisticRegression].getMaxIter
=== lr.getMaxIter)
- case _ =>
+ case other =>
throw new AssertionError(s"Loaded TrainValidationSplit expected estimator of type" +
- s" LogisticREgression but found ${classifier.getClass.getName}")
+ s" LogisticRegression but found ${other.getClass.getName}")
}
case other =>
diff --git a/pom.xml b/pom.xml
index 8b4a6c5425..fffd70ec1d 100644
--- a/pom.xml
+++ b/pom.xml
@@ -2058,7 +2058,7 @@
${java.version}
-target
${java.version}
- -Xlint:all,-serial,-path
+ -Xlint:all,-serial,-path,-try
diff --git a/sql/core/src/main/java/org/apache/spark/sql/execution/datasources/parquet/VectorizedColumnReader.java b/sql/core/src/main/java/org/apache/spark/sql/execution/datasources/parquet/VectorizedColumnReader.java
index f37864a0f5..2173bbce3e 100644
--- a/sql/core/src/main/java/org/apache/spark/sql/execution/datasources/parquet/VectorizedColumnReader.java
+++ b/sql/core/src/main/java/org/apache/spark/sql/execution/datasources/parquet/VectorizedColumnReader.java
@@ -350,7 +350,8 @@ public class VectorizedColumnReader {
* is guaranteed that num is smaller than the number of values left in the current page.
*/
- private void readBooleanBatch(int rowId, int num, WritableColumnVector column) throws IOException {
+ private void readBooleanBatch(int rowId, int num, WritableColumnVector column)
+ throws IOException {
assert(column.dataType() == DataTypes.BooleanType);
defColumn.readBooleans(
num, column, rowId, maxDefLevel, (VectorizedValuesReader) dataColumn);
diff --git a/sql/core/src/main/java/org/apache/spark/sql/execution/vectorized/AggregateHashMap.java b/sql/core/src/main/java/org/apache/spark/sql/execution/vectorized/AggregateHashMap.java
index 1c94f706dc..cb3ad4eab1 100644
--- a/sql/core/src/main/java/org/apache/spark/sql/execution/vectorized/AggregateHashMap.java
+++ b/sql/core/src/main/java/org/apache/spark/sql/execution/vectorized/AggregateHashMap.java
@@ -21,7 +21,6 @@ import java.util.Arrays;
import com.google.common.annotations.VisibleForTesting;
-import org.apache.spark.memory.MemoryMode;
import org.apache.spark.sql.types.StructType;
import static org.apache.spark.sql.types.DataTypes.LongType;
diff --git a/sql/core/src/main/java/org/apache/spark/sql/execution/vectorized/ArrowColumnVector.java b/sql/core/src/main/java/org/apache/spark/sql/execution/vectorized/ArrowColumnVector.java
index be2a9c2467..1f17104982 100644
--- a/sql/core/src/main/java/org/apache/spark/sql/execution/vectorized/ArrowColumnVector.java
+++ b/sql/core/src/main/java/org/apache/spark/sql/execution/vectorized/ArrowColumnVector.java
@@ -21,7 +21,6 @@ import org.apache.arrow.vector.*;
import org.apache.arrow.vector.complex.*;
import org.apache.arrow.vector.holders.NullableVarCharHolder;
-import org.apache.spark.memory.MemoryMode;
import org.apache.spark.sql.execution.arrow.ArrowUtils;
import org.apache.spark.sql.types.*;
import org.apache.spark.unsafe.types.UTF8String;