From 23f3e0f117a7cc4fab0c710ea3583f2aee68d663 Mon Sep 17 00:00:00 2001
From: Dmitriy Lyubimov <dlyubimov@apache.org>
Date: Fri, 26 Jul 2013 19:15:11 -0700
Subject: [PATCH] mixing in SharedSparkContext for the kryo-collect test

---
 .../scala/spark/KryoSerializerSuite.scala     | 24 +++++++++----------
 project/SparkBuild.scala                      | 13 +++++-----
 2 files changed, 19 insertions(+), 18 deletions(-)

diff --git a/core/src/test/scala/spark/KryoSerializerSuite.scala b/core/src/test/scala/spark/KryoSerializerSuite.scala
index 83df0af1ae..793b0b66c4 100644
--- a/core/src/test/scala/spark/KryoSerializerSuite.scala
+++ b/core/src/test/scala/spark/KryoSerializerSuite.scala
@@ -26,7 +26,7 @@ import com.esotericsoftware.kryo._
 import SparkContext._
 import spark.test.{ClassWithoutNoArgConstructor, MyRegistrator}
 
-class KryoSerializerSuite extends FunSuite {
+class KryoSerializerSuite extends FunSuite with SharedSparkContext {
   test("basic types") {
     val ser = (new KryoSerializer).newInstance()
     def check[T](t: T) {
@@ -128,23 +128,23 @@ class KryoSerializerSuite extends FunSuite {
     System.clearProperty("spark.kryo.registrator")
   }
 
-  test("kryo-collect") {
+  test("kryo with collect") {
+    val control = 1 :: 2 :: Nil
+    val result = sc.parallelize(control, 2).map(new ClassWithoutNoArgConstructor(_)).collect().map(_.x)
+    assert(control == result.toSeq)
+  }
+
+  override def beforeAll() {
     System.setProperty("spark.serializer", "spark.KryoSerializer")
     System.setProperty("spark.kryo.registrator", classOf[MyRegistrator].getName)
+    super.beforeAll()
+  }
 
-    val sc = new SparkContext("local", "kryoTest")
-    try {
-      val control = 1 :: 2 :: Nil
-      val result = sc.parallelize(control, 2).map(new ClassWithoutNoArgConstructor(_)).collect().map(_.x)
-      assert(control == result.toSeq)
-    } finally {
-      sc.stop()
-    }
-
+  override def afterAll() {
+    super.afterAll()
     System.clearProperty("spark.kryo.registrator")
     System.clearProperty("spark.serializer")
   }
-
 }
 
 package test {
diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala
index f3f67b57c8..998747c68f 100644
--- a/project/SparkBuild.scala
+++ b/project/SparkBuild.scala
@@ -26,14 +26,14 @@ import AssemblyKeys._
 object SparkBuild extends Build {
   // Hadoop version to build against. For example, "0.20.2", "0.20.205.0", or
   // "1.0.4" for Apache releases, or "0.20.2-cdh3u5" for Cloudera Hadoop.
-  val HADOOP_VERSION = "1.0.4"
-  val HADOOP_MAJOR_VERSION = "1"
-  val HADOOP_YARN = false
+  //val HADOOP_VERSION = "1.0.4"
+  //val HADOOP_MAJOR_VERSION = "1"
+  //val HADOOP_YARN = false
 
   // For Hadoop 2 versions such as "2.0.0-mr1-cdh4.1.1", set the HADOOP_MAJOR_VERSION to "2"
-  //val HADOOP_VERSION = "2.0.0-mr1-cdh4.1.1"
-  //val HADOOP_MAJOR_VERSION = "2"
-  //val HADOOP_YARN = false
+  val HADOOP_VERSION = "2.0.0-cdh4.3.0"
+  val HADOOP_MAJOR_VERSION = "2"
+  val HADOOP_YARN = true
 
   // For Hadoop 2 YARN support
   //val HADOOP_VERSION = "2.0.2-alpha"
@@ -185,6 +185,7 @@ object SparkBuild extends Build {
         if (HADOOP_YARN) {
           Seq(
             // Exclude rule required for all ?
+            "org.apache.hadoop" % "hadoop-common" % HADOOP_VERSION excludeAll(excludeJackson, excludeNetty, excludeAsm),
             "org.apache.hadoop" % "hadoop-client" % HADOOP_VERSION excludeAll(excludeJackson, excludeNetty, excludeAsm),
             "org.apache.hadoop" % "hadoop-yarn-api" % HADOOP_VERSION excludeAll(excludeJackson, excludeNetty, excludeAsm),
             "org.apache.hadoop" % "hadoop-yarn-common" % HADOOP_VERSION excludeAll(excludeJackson, excludeNetty, excludeAsm),
-- 
GitLab