diff --git a/common/unsafe/src/main/java/org/apache/spark/unsafe/Platform.java b/common/unsafe/src/main/java/org/apache/spark/unsafe/Platform.java index bdf52f32c6fe111c05e28174bcc4b69c0e160cb3..77c8c398be955533c260ef3071cd6f5f8933bef3 100644 --- a/common/unsafe/src/main/java/org/apache/spark/unsafe/Platform.java +++ b/common/unsafe/src/main/java/org/apache/spark/unsafe/Platform.java @@ -155,8 +155,8 @@ public final class Platform { @SuppressWarnings("unchecked") public static ByteBuffer allocateDirectBuffer(int size) { try { - Class cls = Class.forName("java.nio.DirectByteBuffer"); - Constructor constructor = cls.getDeclaredConstructor(Long.TYPE, Integer.TYPE); + Class<?> cls = Class.forName("java.nio.DirectByteBuffer"); + Constructor<?> constructor = cls.getDeclaredConstructor(Long.TYPE, Integer.TYPE); constructor.setAccessible(true); Field cleanerField = cls.getDeclaredField("cleaner"); cleanerField.setAccessible(true); diff --git a/mllib/src/test/java/org/apache/spark/mllib/fpm/JavaPrefixSpanSuite.java b/mllib/src/test/java/org/apache/spark/mllib/fpm/JavaPrefixSpanSuite.java index 75b0ec6480196d1b3a0bd4c3f1ac19e872df4dc4..32d3141149a74492ee7696ea7d10f147259d68bb 100644 --- a/mllib/src/test/java/org/apache/spark/mllib/fpm/JavaPrefixSpanSuite.java +++ b/mllib/src/test/java/org/apache/spark/mllib/fpm/JavaPrefixSpanSuite.java @@ -72,7 +72,9 @@ public class JavaPrefixSpanSuite extends SharedSparkSession { try { model.save(spark.sparkContext(), outputPath); - PrefixSpanModel newModel = PrefixSpanModel.load(spark.sparkContext(), outputPath); + @SuppressWarnings("unchecked") + PrefixSpanModel<Integer> newModel = + (PrefixSpanModel<Integer>) PrefixSpanModel.load(spark.sparkContext(), outputPath); JavaRDD<FreqSequence<Integer>> freqSeqs = newModel.freqSequences().toJavaRDD(); List<FreqSequence<Integer>> localFreqSeqs = freqSeqs.collect(); Assert.assertEquals(5, localFreqSeqs.size()); diff --git a/mllib/src/test/scala/org/apache/spark/mllib/fpm/PrefixSpanSuite.scala b/mllib/src/test/scala/org/apache/spark/mllib/fpm/PrefixSpanSuite.scala index 6d8c7b47d8373c93a239d72b6cef2b550a8a6716..4c2376376dd2af6f332d509aba41ce649cae6e02 100644 --- a/mllib/src/test/scala/org/apache/spark/mllib/fpm/PrefixSpanSuite.scala +++ b/mllib/src/test/scala/org/apache/spark/mllib/fpm/PrefixSpanSuite.scala @@ -16,6 +16,8 @@ */ package org.apache.spark.mllib.fpm +import scala.language.existentials + import org.apache.spark.SparkFunSuite import org.apache.spark.mllib.util.MLlibTestSparkContext import org.apache.spark.util.Utils diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/ScalaReflectionSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/ScalaReflectionSuite.scala index 1b25f5d7d0cc5544d152f2451d8633c2f0fe02b0..85563ddedc1651ddf8c0d1c7c48580a25f26b3d2 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/ScalaReflectionSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/ScalaReflectionSuite.scala @@ -288,7 +288,7 @@ class ScalaReflectionSuite extends SparkFunSuite { assert(serializer.children.head.asInstanceOf[Literal].value === UTF8String.fromString("value")) assert(serializer.children.last.isInstanceOf[NewInstance]) assert(serializer.children.last.asInstanceOf[NewInstance] - .cls.isInstanceOf[Class[org.apache.spark.sql.catalyst.util.GenericArrayData]]) + .cls.isAssignableFrom(classOf[org.apache.spark.sql.catalyst.util.GenericArrayData])) } private val dataTypeForComplexData = dataTypeFor[ComplexData] diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/encoders/ExpressionEncoderSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/encoders/ExpressionEncoderSuite.scala index 3d97113b52e393f61f176b8c14221e4b6c777e49..232dcc9ee51ca1bb77247c0ba7705cc810a19931 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/encoders/ExpressionEncoderSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/encoders/ExpressionEncoderSuite.scala @@ -365,7 +365,8 @@ class ExpressionEncoderSuite extends PlanTest with AnalysisTest { Arrays.deepEquals(b1.asInstanceOf[Array[AnyRef]], b2.asInstanceOf[Array[AnyRef]]) case (b1: Array[_], b2: Array[_]) => Arrays.equals(b1.asInstanceOf[Array[AnyRef]], b2.asInstanceOf[Array[AnyRef]]) - case (left: Comparable[Any], right: Comparable[Any]) => left.compareTo(right) == 0 + case (left: Comparable[_], right: Comparable[_]) => + left.asInstanceOf[Comparable[Any]].compareTo(right) == 0 case _ => input == convertedBack } diff --git a/sql/core/src/test/scala/org/apache/spark/sql/JoinSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/JoinSuite.scala index cbf4a8a6125940a2d5578471f7a5a5c037acccb5..44889d92ee306a63d3dcc7209e3f87ca26821c47 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/JoinSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/JoinSuite.scala @@ -17,6 +17,8 @@ package org.apache.spark.sql +import scala.language.existentials + import org.apache.spark.sql.catalyst.analysis.UnresolvedRelation import org.apache.spark.sql.catalyst.TableIdentifier import org.apache.spark.sql.execution.joins._ diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/FileCatalogSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/FileCatalogSuite.scala index 85c2e8ba5529dff654d0cc2b05e7b723a667f166..0d9ea512729bd0a6b2066ff11a2cbd6e1da681af 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/FileCatalogSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/FileCatalogSuite.scala @@ -19,6 +19,8 @@ package org.apache.spark.sql.execution.datasources import java.io.File +import scala.language.reflectiveCalls + import org.apache.hadoop.fs.Path import org.apache.spark.sql.catalyst.util._ diff --git a/yarn/src/test/scala/org/apache/spark/network/yarn/YarnShuffleServiceSuite.scala b/yarn/src/test/scala/org/apache/spark/network/yarn/YarnShuffleServiceSuite.scala index 749e656e6dde8bc474e45841c5219ce9a91817e4..5458fb9d2e75e291bf6d2b8d7694f61ba9f0a3de 100644 --- a/yarn/src/test/scala/org/apache/spark/network/yarn/YarnShuffleServiceSuite.scala +++ b/yarn/src/test/scala/org/apache/spark/network/yarn/YarnShuffleServiceSuite.scala @@ -20,6 +20,7 @@ import java.io.{DataOutputStream, File, FileOutputStream} import scala.annotation.tailrec import scala.concurrent.duration._ +import scala.language.postfixOps import org.apache.hadoop.fs.Path import org.apache.hadoop.yarn.api.records.ApplicationId @@ -27,7 +28,6 @@ import org.apache.hadoop.yarn.conf.YarnConfiguration import org.apache.hadoop.yarn.server.api.{ApplicationInitializationContext, ApplicationTerminationContext} import org.scalatest.{BeforeAndAfterEach, Matchers} import org.scalatest.concurrent.Eventually._ -import org.scalatest.concurrent.Timeouts import org.apache.spark.SparkFunSuite import org.apache.spark.network.shuffle.ShuffleTestAccessor