[MINOR] Resolve a number of miscellaneous build warnings
## What changes were proposed in this pull request? This change resolves a number of build warnings that have accumulated, before 2.x. It does not address a large number of deprecation warnings, especially related to the Accumulator API. That will happen separately. ## How was this patch tested? Jenkins Author: Sean Owen <sowen@cloudera.com> Closes #13377 from srowen/BuildWarnings.
This commit is contained in:
parent
472f16181d
commit
ce1572d16f
|
@ -155,8 +155,8 @@ public final class Platform {
|
|||
@SuppressWarnings("unchecked")
|
||||
public static ByteBuffer allocateDirectBuffer(int size) {
|
||||
try {
|
||||
Class cls = Class.forName("java.nio.DirectByteBuffer");
|
||||
Constructor constructor = cls.getDeclaredConstructor(Long.TYPE, Integer.TYPE);
|
||||
Class<?> cls = Class.forName("java.nio.DirectByteBuffer");
|
||||
Constructor<?> constructor = cls.getDeclaredConstructor(Long.TYPE, Integer.TYPE);
|
||||
constructor.setAccessible(true);
|
||||
Field cleanerField = cls.getDeclaredField("cleaner");
|
||||
cleanerField.setAccessible(true);
|
||||
|
|
|
@ -72,7 +72,9 @@ public class JavaPrefixSpanSuite extends SharedSparkSession {
|
|||
|
||||
try {
|
||||
model.save(spark.sparkContext(), outputPath);
|
||||
PrefixSpanModel newModel = PrefixSpanModel.load(spark.sparkContext(), outputPath);
|
||||
@SuppressWarnings("unchecked")
|
||||
PrefixSpanModel<Integer> newModel =
|
||||
(PrefixSpanModel<Integer>) PrefixSpanModel.load(spark.sparkContext(), outputPath);
|
||||
JavaRDD<FreqSequence<Integer>> freqSeqs = newModel.freqSequences().toJavaRDD();
|
||||
List<FreqSequence<Integer>> localFreqSeqs = freqSeqs.collect();
|
||||
Assert.assertEquals(5, localFreqSeqs.size());
|
||||
|
|
|
@ -16,6 +16,8 @@
|
|||
*/
|
||||
package org.apache.spark.mllib.fpm
|
||||
|
||||
import scala.language.existentials
|
||||
|
||||
import org.apache.spark.SparkFunSuite
|
||||
import org.apache.spark.mllib.util.MLlibTestSparkContext
|
||||
import org.apache.spark.util.Utils
|
||||
|
|
|
@ -288,7 +288,7 @@ class ScalaReflectionSuite extends SparkFunSuite {
|
|||
assert(serializer.children.head.asInstanceOf[Literal].value === UTF8String.fromString("value"))
|
||||
assert(serializer.children.last.isInstanceOf[NewInstance])
|
||||
assert(serializer.children.last.asInstanceOf[NewInstance]
|
||||
.cls.isInstanceOf[Class[org.apache.spark.sql.catalyst.util.GenericArrayData]])
|
||||
.cls.isAssignableFrom(classOf[org.apache.spark.sql.catalyst.util.GenericArrayData]))
|
||||
}
|
||||
|
||||
private val dataTypeForComplexData = dataTypeFor[ComplexData]
|
||||
|
|
|
@ -365,7 +365,8 @@ class ExpressionEncoderSuite extends PlanTest with AnalysisTest {
|
|||
Arrays.deepEquals(b1.asInstanceOf[Array[AnyRef]], b2.asInstanceOf[Array[AnyRef]])
|
||||
case (b1: Array[_], b2: Array[_]) =>
|
||||
Arrays.equals(b1.asInstanceOf[Array[AnyRef]], b2.asInstanceOf[Array[AnyRef]])
|
||||
case (left: Comparable[Any], right: Comparable[Any]) => left.compareTo(right) == 0
|
||||
case (left: Comparable[_], right: Comparable[_]) =>
|
||||
left.asInstanceOf[Comparable[Any]].compareTo(right) == 0
|
||||
case _ => input == convertedBack
|
||||
}
|
||||
|
||||
|
|
|
@ -17,6 +17,8 @@
|
|||
|
||||
package org.apache.spark.sql
|
||||
|
||||
import scala.language.existentials
|
||||
|
||||
import org.apache.spark.sql.catalyst.analysis.UnresolvedRelation
|
||||
import org.apache.spark.sql.catalyst.TableIdentifier
|
||||
import org.apache.spark.sql.execution.joins._
|
||||
|
|
|
@ -19,6 +19,8 @@ package org.apache.spark.sql.execution.datasources
|
|||
|
||||
import java.io.File
|
||||
|
||||
import scala.language.reflectiveCalls
|
||||
|
||||
import org.apache.hadoop.fs.Path
|
||||
|
||||
import org.apache.spark.sql.catalyst.util._
|
||||
|
|
|
@ -20,6 +20,7 @@ import java.io.{DataOutputStream, File, FileOutputStream}
|
|||
|
||||
import scala.annotation.tailrec
|
||||
import scala.concurrent.duration._
|
||||
import scala.language.postfixOps
|
||||
|
||||
import org.apache.hadoop.fs.Path
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationId
|
||||
|
@ -27,7 +28,6 @@ import org.apache.hadoop.yarn.conf.YarnConfiguration
|
|||
import org.apache.hadoop.yarn.server.api.{ApplicationInitializationContext, ApplicationTerminationContext}
|
||||
import org.scalatest.{BeforeAndAfterEach, Matchers}
|
||||
import org.scalatest.concurrent.Eventually._
|
||||
import org.scalatest.concurrent.Timeouts
|
||||
|
||||
import org.apache.spark.SparkFunSuite
|
||||
import org.apache.spark.network.shuffle.ShuffleTestAccessor
|
||||
|
|
Loading…
Reference in a new issue