[MINOR][BUILD] Fix build warnings and Java lint errors

## What changes were proposed in this pull request?

Fix build warnings and Java lint errors. This just helps a bit in evaluating (new) warnings in another PR I have open.

## How was this patch tested?

Existing tests

Author: Sean Owen <sowen@cloudera.com>

Closes #19051 from srowen/JavaWarnings.
This commit is contained in:
Sean Owen 2017-08-25 16:07:13 +01:00
parent 574ef6c987
commit de7af295c2
11 changed files with 17 additions and 18 deletions

View file

@ -171,7 +171,7 @@ public class InMemoryStore implements KVStore {
public <T> InMemoryView<T> view(Class<T> type) {
Preconditions.checkArgument(ti.type().equals(type), "Unexpected type: %s", type);
Collection<T> all = (Collection<T>) data.values();
return new InMemoryView(type, all, ti);
return new InMemoryView<>(type, all, ti);
}
}

View file

@ -17,6 +17,7 @@
package org.apache.spark.util.kvstore;
import java.io.Closeable;
import java.util.Iterator;
import java.util.List;
@ -31,7 +32,7 @@ import org.apache.spark.annotation.Private;
* </p>
*/
@Private
public interface KVStoreIterator<T> extends Iterator<T>, AutoCloseable {
public interface KVStoreIterator<T> extends Iterator<T>, Closeable {
/**
* Retrieve multiple elements from the store.

View file

@ -102,7 +102,7 @@ public class TransportRequestHandlerSuite {
private class ExtendedChannelPromise extends DefaultChannelPromise {
private List<GenericFutureListener> listeners = new ArrayList<>();
private List<GenericFutureListener<Future<Void>>> listeners = new ArrayList<>();
private boolean success;
ExtendedChannelPromise(Channel channel) {
@ -113,7 +113,10 @@ public class TransportRequestHandlerSuite {
@Override
public ChannelPromise addListener(
GenericFutureListener<? extends Future<? super Void>> listener) {
listeners.add(listener);
@SuppressWarnings("unchecked")
GenericFutureListener<Future<Void>> gfListener =
(GenericFutureListener<Future<Void>>) listener;
listeners.add(gfListener);
return super.addListener(listener);
}

View file

@ -21,7 +21,6 @@ import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
import org.junit.Before;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

View file

@ -18,7 +18,6 @@
package org.apache.spark.launcher;
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.ArrayList;

View file

@ -187,14 +187,13 @@ class CrossValidatorSuite
cv2.getEstimator match {
case ova2: OneVsRest =>
assert(ova.uid === ova2.uid)
val classifier = ova2.getClassifier
classifier match {
ova2.getClassifier match {
case lr: LogisticRegression =>
assert(ova.getClassifier.asInstanceOf[LogisticRegression].getMaxIter
=== lr.getMaxIter)
case _ =>
case other =>
throw new AssertionError(s"Loaded CrossValidator expected estimator of type" +
s" LogisticREgression but found ${classifier.getClass.getName}")
s" LogisticRegression but found ${other.getClass.getName}")
}
case other =>

View file

@ -173,14 +173,13 @@ class TrainValidationSplitSuite
tvs2.getEstimator match {
case ova2: OneVsRest =>
assert(ova.uid === ova2.uid)
val classifier = ova2.getClassifier
classifier match {
ova2.getClassifier match {
case lr: LogisticRegression =>
assert(ova.getClassifier.asInstanceOf[LogisticRegression].getMaxIter
=== lr.getMaxIter)
case _ =>
case other =>
throw new AssertionError(s"Loaded TrainValidationSplit expected estimator of type" +
s" LogisticREgression but found ${classifier.getClass.getName}")
s" LogisticRegression but found ${other.getClass.getName}")
}
case other =>

View file

@ -2058,7 +2058,7 @@
<javacArg>${java.version}</javacArg>
<javacArg>-target</javacArg>
<javacArg>${java.version}</javacArg>
<javacArg>-Xlint:all,-serial,-path</javacArg>
<javacArg>-Xlint:all,-serial,-path,-try</javacArg>
</javacArgs>
</configuration>
</plugin>

View file

@ -350,7 +350,8 @@ public class VectorizedColumnReader {
* is guaranteed that num is smaller than the number of values left in the current page.
*/
private void readBooleanBatch(int rowId, int num, WritableColumnVector column) throws IOException {
private void readBooleanBatch(int rowId, int num, WritableColumnVector column)
throws IOException {
assert(column.dataType() == DataTypes.BooleanType);
defColumn.readBooleans(
num, column, rowId, maxDefLevel, (VectorizedValuesReader) dataColumn);

View file

@ -21,7 +21,6 @@ import java.util.Arrays;
import com.google.common.annotations.VisibleForTesting;
import org.apache.spark.memory.MemoryMode;
import org.apache.spark.sql.types.StructType;
import static org.apache.spark.sql.types.DataTypes.LongType;

View file

@ -21,7 +21,6 @@ import org.apache.arrow.vector.*;
import org.apache.arrow.vector.complex.*;
import org.apache.arrow.vector.holders.NullableVarCharHolder;
import org.apache.spark.memory.MemoryMode;
import org.apache.spark.sql.execution.arrow.ArrowUtils;
import org.apache.spark.sql.types.*;
import org.apache.spark.unsafe.types.UTF8String;