diff --git a/core/pom.xml b/core/pom.xml index 0c746175afa73a33f801baa58b74f0404b40536f..c3d6b00a443f16e8b9997661ec83a801be2bff12 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -235,7 +235,7 @@ </dependency> <dependency> <groupId>org.easymock</groupId> - <artifactId>easymock</artifactId> + <artifactId>easymockclassextension</artifactId> <scope>test</scope> </dependency> <dependency> diff --git a/core/src/test/scala/org/apache/spark/ContextCleanerSuite.scala b/core/src/test/scala/org/apache/spark/ContextCleanerSuite.scala index 5a8310090890d7ac16e8c53411a53102fadd4865..dc2db66df60e0792be0879f7e8d0df97c4c155be 100644 --- a/core/src/test/scala/org/apache/spark/ContextCleanerSuite.scala +++ b/core/src/test/scala/org/apache/spark/ContextCleanerSuite.scala @@ -25,7 +25,7 @@ import scala.language.postfixOps import scala.util.Random import org.scalatest.{BeforeAndAfter, FunSuite} -import org.scalatest.concurrent.Eventually +import org.scalatest.concurrent.{PatienceConfiguration, Eventually} import org.scalatest.concurrent.Eventually._ import org.scalatest.time.SpanSugar._ @@ -76,7 +76,7 @@ class ContextCleanerSuite extends FunSuite with BeforeAndAfter with LocalSparkCo tester.assertCleanup() // Verify that shuffles can be re-executed after cleaning up - assert(rdd.collect().toList === collected) + assert(rdd.collect().toList.equals(collected)) } test("cleanup broadcast") { @@ -285,7 +285,7 @@ class CleanerTester( sc.cleaner.get.attachListener(cleanerListener) /** Assert that all the stuff has been cleaned up */ - def assertCleanup()(implicit waitTimeout: Eventually.Timeout) { + def assertCleanup()(implicit waitTimeout: PatienceConfiguration.Timeout) { try { eventually(waitTimeout, interval(100 millis)) { assert(isAllCleanedUp) diff --git a/core/src/test/scala/org/apache/spark/ShuffleNettySuite.scala b/core/src/test/scala/org/apache/spark/ShuffleNettySuite.scala index 29d428aa7dc41aeb4b3a5085ebb84e2be35d6370..47df00050c1e25f6129c1cab2798e774628a17a7 100644 --- a/core/src/test/scala/org/apache/spark/ShuffleNettySuite.scala +++ b/core/src/test/scala/org/apache/spark/ShuffleNettySuite.scala @@ -23,11 +23,11 @@ class ShuffleNettySuite extends ShuffleSuite with BeforeAndAfterAll { // This test suite should run all tests in ShuffleSuite with Netty shuffle mode. - override def beforeAll(configMap: Map[String, Any]) { + override def beforeAll() { System.setProperty("spark.shuffle.use.netty", "true") } - override def afterAll(configMap: Map[String, Any]) { + override def afterAll() { System.setProperty("spark.shuffle.use.netty", "false") } } diff --git a/core/src/test/scala/org/apache/spark/rdd/RDDSuite.scala b/core/src/test/scala/org/apache/spark/rdd/RDDSuite.scala index 286e221e33b78019b86469991420a880678e96b1..55af1666df662c35cfeec2d2531018d2f3d69fa5 100644 --- a/core/src/test/scala/org/apache/spark/rdd/RDDSuite.scala +++ b/core/src/test/scala/org/apache/spark/rdd/RDDSuite.scala @@ -266,8 +266,9 @@ class RDDSuite extends FunSuite with SharedSparkContext { // we can optionally shuffle to keep the upstream parallel val coalesced5 = data.coalesce(1, shuffle = true) - assert(coalesced5.dependencies.head.rdd.dependencies.head.rdd.asInstanceOf[ShuffledRDD[_, _, _]] != - null) + val isEquals = coalesced5.dependencies.head.rdd.dependencies.head.rdd. + asInstanceOf[ShuffledRDD[_, _, _]] != null + assert(isEquals) // when shuffling, we can increase the number of partitions val coalesced6 = data.coalesce(20, shuffle = true) diff --git a/core/src/test/scala/org/apache/spark/scheduler/DAGSchedulerSuite.scala b/core/src/test/scala/org/apache/spark/scheduler/DAGSchedulerSuite.scala index 81e64c1846ed5dceb30a8647213cd1c6a91ce90a..7506d56d7e26dfc16fe95edfdb6761e0d3f3b2a5 100644 --- a/core/src/test/scala/org/apache/spark/scheduler/DAGSchedulerSuite.scala +++ b/core/src/test/scala/org/apache/spark/scheduler/DAGSchedulerSuite.scala @@ -23,7 +23,7 @@ import scala.language.reflectiveCalls import akka.actor._ import akka.testkit.{ImplicitSender, TestKit, TestActorRef} -import org.scalatest.{BeforeAndAfter, FunSuite} +import org.scalatest.{BeforeAndAfter, FunSuiteLike} import org.apache.spark._ import org.apache.spark.rdd.RDD @@ -37,7 +37,7 @@ class BuggyDAGEventProcessActor extends Actor { } } -class DAGSchedulerSuite extends TestKit(ActorSystem("DAGSchedulerSuite")) with FunSuite +class DAGSchedulerSuite extends TestKit(ActorSystem("DAGSchedulerSuite")) with FunSuiteLike with ImplicitSender with BeforeAndAfter with LocalSparkContext { val conf = new SparkConf diff --git a/core/src/test/scala/org/apache/spark/util/TimeStampedHashMapSuite.scala b/core/src/test/scala/org/apache/spark/util/TimeStampedHashMapSuite.scala index 6a5653ed2fb5453ec7ff0ab827f80d28df9e1602..c1c605cdb487b6a7bac677766e7b1ecc7608a42a 100644 --- a/core/src/test/scala/org/apache/spark/util/TimeStampedHashMapSuite.scala +++ b/core/src/test/scala/org/apache/spark/util/TimeStampedHashMapSuite.scala @@ -105,7 +105,8 @@ class TimeStampedHashMapSuite extends FunSuite { map("k1") = strongRef map("k2") = "v2" map("k3") = "v3" - assert(map("k1") === strongRef) + val isEquals = map("k1") == strongRef + assert(isEquals) // clear strong reference to "k1" strongRef = null diff --git a/pom.xml b/pom.xml index 87c8e29ad10693ff53f0552a18c34a58cfc07ed2..891468b21bfff2b0c8a40173953d7c8345f75846 100644 --- a/pom.xml +++ b/pom.xml @@ -459,25 +459,31 @@ <dependency> <groupId>org.scalatest</groupId> <artifactId>scalatest_${scala.binary.version}</artifactId> - <version>1.9.1</version> + <version>2.1.5</version> <scope>test</scope> </dependency> <dependency> <groupId>org.easymock</groupId> - <artifactId>easymock</artifactId> + <artifactId>easymockclassextension</artifactId> <version>3.1</version> <scope>test</scope> </dependency> <dependency> <groupId>org.mockito</groupId> <artifactId>mockito-all</artifactId> - <version>1.8.5</version> + <version>1.9.0</version> <scope>test</scope> </dependency> <dependency> <groupId>org.scalacheck</groupId> <artifactId>scalacheck_${scala.binary.version}</artifactId> - <version>1.10.0</version> + <version>1.11.3</version> + <scope>test</scope> + </dependency> + <dependency> + <groupId>junit</groupId> + <artifactId>junit</artifactId> + <version>4.10</version> <scope>test</scope> </dependency> <dependency> @@ -779,6 +785,7 @@ <arg>-unchecked</arg> <arg>-deprecation</arg> <arg>-feature</arg> + <arg>-language:postfixOps</arg> </args> <jvmArgs> <jvmArg>-Xms1024m</jvmArg> diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala index d0049a8ac43aa965b291a56dc0ed623f945b55bc..069913dbaac568015bb10f1352cf097746134b37 100644 --- a/project/SparkBuild.scala +++ b/project/SparkBuild.scala @@ -270,16 +270,17 @@ object SparkBuild extends Build { */ libraryDependencies ++= Seq( - "io.netty" % "netty-all" % "4.0.17.Final", - "org.eclipse.jetty" % "jetty-server" % jettyVersion, - "org.eclipse.jetty" % "jetty-util" % jettyVersion, - "org.eclipse.jetty" % "jetty-plus" % jettyVersion, - "org.eclipse.jetty" % "jetty-security" % jettyVersion, - "org.scalatest" %% "scalatest" % "1.9.1" % "test", - "org.scalacheck" %% "scalacheck" % "1.10.0" % "test", - "com.novocode" % "junit-interface" % "0.10" % "test", - "org.easymock" % "easymock" % "3.1" % "test", - "org.mockito" % "mockito-all" % "1.8.5" % "test" + "io.netty" % "netty-all" % "4.0.17.Final", + "org.eclipse.jetty" % "jetty-server" % jettyVersion, + "org.eclipse.jetty" % "jetty-util" % jettyVersion, + "org.eclipse.jetty" % "jetty-plus" % jettyVersion, + "org.eclipse.jetty" % "jetty-security" % jettyVersion, + "org.scalatest" %% "scalatest" % "2.1.5" % "test", + "org.scalacheck" %% "scalacheck" % "1.11.3" % "test", + "com.novocode" % "junit-interface" % "0.10" % "test", + "org.easymock" % "easymockclassextension" % "3.1" % "test", + "org.mockito" % "mockito-all" % "1.9.0" % "test", + "junit" % "junit" % "4.10" % "test" ), testOptions += Tests.Argument(TestFrameworks.JUnit, "-v", "-a"), @@ -478,7 +479,6 @@ object SparkBuild extends Build { // this non-deterministically. TODO: FIX THIS. parallelExecution in Test := false, libraryDependencies ++= Seq( - "org.scalatest" %% "scalatest" % "1.9.1" % "test", "com.typesafe" %% "scalalogging-slf4j" % "1.0.1" ) ) diff --git a/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala b/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala index 7c765edd55027b70ab3978d81bfedc062f1e85a7..f4ba8d9cc079ba284b166ea6a64ef231d4e9c8e2 100644 --- a/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala +++ b/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala @@ -67,12 +67,14 @@ class ReplSuite extends FunSuite { } def assertContains(message: String, output: String) { - assert(output.contains(message), + val isContain = output.contains(message) + assert(isContain, "Interpreter output did not contain '" + message + "':\n" + output) } def assertDoesNotContain(message: String, output: String) { - assert(!output.contains(message), + val isContain = output.contains(message) + assert(!isContain, "Interpreter output contained '" + message + "':\n" + output) } diff --git a/sql/core/src/test/scala/org/apache/spark/sql/parquet/ParquetQuerySuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/parquet/ParquetQuerySuite.scala index b973ceba5fec007d03a9269d047ef088377920b9..9810520bb9ae60835495efda99618a3218cdc2d2 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/parquet/ParquetQuerySuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/parquet/ParquetQuerySuite.scala @@ -17,7 +17,7 @@ package org.apache.spark.sql.parquet -import org.scalatest.{BeforeAndAfterAll, FunSuite} +import org.scalatest.{BeforeAndAfterAll, FunSuiteLike} import org.apache.hadoop.fs.{Path, FileSystem} import org.apache.hadoop.mapreduce.Job @@ -56,7 +56,7 @@ case class OptionalReflectData( doubleField: Option[Double], booleanField: Option[Boolean]) -class ParquetQuerySuite extends QueryTest with FunSuite with BeforeAndAfterAll { +class ParquetQuerySuite extends QueryTest with FunSuiteLike with BeforeAndAfterAll { import TestData._ TestData // Load test data tables. diff --git a/streaming/src/test/scala/org/apache/spark/streaming/BasicOperationsSuite.scala b/streaming/src/test/scala/org/apache/spark/streaming/BasicOperationsSuite.scala index 04925886c39e479eea88c0283e8bffc50ce7027d..ff6d86c8f81ac0eb6deaa5c59bf608ccc25f836c 100644 --- a/streaming/src/test/scala/org/apache/spark/streaming/BasicOperationsSuite.scala +++ b/streaming/src/test/scala/org/apache/spark/streaming/BasicOperationsSuite.scala @@ -92,9 +92,9 @@ class BasicOperationsSuite extends TestSuiteBase { assert(second.size === 5) assert(third.size === 5) - assert(first.flatten.toSet === (1 to 100).toSet) - assert(second.flatten.toSet === (101 to 200).toSet) - assert(third.flatten.toSet === (201 to 300).toSet) + assert(first.flatten.toSet.equals((1 to 100).toSet) ) + assert(second.flatten.toSet.equals((101 to 200).toSet)) + assert(third.flatten.toSet.equals((201 to 300).toSet)) } test("repartition (fewer partitions)") { @@ -111,9 +111,9 @@ class BasicOperationsSuite extends TestSuiteBase { assert(second.size === 2) assert(third.size === 2) - assert(first.flatten.toSet === (1 to 100).toSet) - assert(second.flatten.toSet === (101 to 200).toSet) - assert(third.flatten.toSet === (201 to 300).toSet) + assert(first.flatten.toSet.equals((1 to 100).toSet)) + assert(second.flatten.toSet.equals( (101 to 200).toSet)) + assert(third.flatten.toSet.equals((201 to 300).toSet)) } test("groupByKey") {