diff --git a/bagel/src/test/scala/bagel/BagelSuite.scala b/bagel/src/test/scala/bagel/BagelSuite.scala index d2189169d23c2136f8c0e0a6be012a5e07cf695d..3da7152a093fe74f7776371f0efbd3d73e88c153 100644 --- a/bagel/src/test/scala/bagel/BagelSuite.scala +++ b/bagel/src/test/scala/bagel/BagelSuite.scala @@ -18,7 +18,10 @@ class BagelSuite extends FunSuite with Assertions with BeforeAndAfter { var sc: SparkContext = _ after { - sc.stop() + if (sc != null) { + sc.stop() + sc = null + } } test("halting by voting") { diff --git a/core/src/main/scala/spark/SparkContext.scala b/core/src/main/scala/spark/SparkContext.scala index 43414d2e412a24781a831134d8386b450111ef06..538e057926191ffad805eeb456524f1e8e63a59f 100644 --- a/core/src/main/scala/spark/SparkContext.scala +++ b/core/src/main/scala/spark/SparkContext.scala @@ -65,7 +65,7 @@ class SparkContext( System.setProperty("spark.master.port", "0") } - private val isLocal = (master == "local" || master.startsWith("local[")) && !master.startsWith("localhost") + private val isLocal = (master == "local" || master.startsWith("local[")) // Create the Spark execution environment (cache, map output tracker, etc) val env = SparkEnv.createFromSystemProperties( diff --git a/core/src/main/scala/spark/broadcast/BitTorrentBroadcast.scala b/core/src/main/scala/spark/broadcast/BitTorrentBroadcast.scala index 473d080044349a5e9ef6342b64efdd5afb39b79e..016dc00fb0d2c3ac26f921e6684595174ad515bf 100644 --- a/core/src/main/scala/spark/broadcast/BitTorrentBroadcast.scala +++ b/core/src/main/scala/spark/broadcast/BitTorrentBroadcast.scala @@ -17,7 +17,8 @@ extends Broadcast[T] with Logging with Serializable { def value = value_ MultiTracker.synchronized { - SparkEnv.get.blockManager.putSingle(uuid.toString, value_, StorageLevel.MEMORY_ONLY, false) + SparkEnv.get.blockManager.putSingle( + uuid.toString, value_, StorageLevel.MEMORY_ONLY_DESER, false) } @transient var arrayOfBlocks: Array[BroadcastBlock] = null @@ -133,7 +134,8 @@ extends Broadcast[T] with Logging with Serializable { val receptionSucceeded = receiveBroadcast(uuid) if (receptionSucceeded) { value_ = MultiTracker.unBlockifyObject[T](arrayOfBlocks, totalBytes, totalBlocks) - SparkEnv.get.blockManager.putSingle(uuid.toString, value_, StorageLevel.MEMORY_ONLY, false) + SparkEnv.get.blockManager.putSingle( + uuid.toString, value_, StorageLevel.MEMORY_ONLY_DESER, false) } else { logError("Reading Broadcasted variable " + uuid + " failed") } diff --git a/core/src/main/scala/spark/broadcast/HttpBroadcast.scala b/core/src/main/scala/spark/broadcast/HttpBroadcast.scala index aa8bb77f414f7b83d556ac6ef822ab83adf44a04..03986ea756302d00d20e03f971d4a9d8c0d443b2 100644 --- a/core/src/main/scala/spark/broadcast/HttpBroadcast.scala +++ b/core/src/main/scala/spark/broadcast/HttpBroadcast.scala @@ -18,7 +18,8 @@ extends Broadcast[T] with Logging with Serializable { def value = value_ HttpBroadcast.synchronized { - SparkEnv.get.blockManager.putSingle(uuid.toString, value_, StorageLevel.MEMORY_ONLY, false) + SparkEnv.get.blockManager.putSingle( + uuid.toString, value_, StorageLevel.MEMORY_ONLY_DESER, false) } if (!isLocal) { @@ -35,7 +36,8 @@ extends Broadcast[T] with Logging with Serializable { logInfo("Started reading broadcast variable " + uuid) val start = System.nanoTime value_ = HttpBroadcast.read[T](uuid) - SparkEnv.get.blockManager.putSingle(uuid.toString, value_, StorageLevel.MEMORY_ONLY, false) + SparkEnv.get.blockManager.putSingle( + uuid.toString, value_, StorageLevel.MEMORY_ONLY_DESER, false) val time = (System.nanoTime - start) / 1e9 logInfo("Reading broadcast variable " + uuid + " took " + time + " s") } diff --git a/core/src/main/scala/spark/broadcast/TreeBroadcast.scala b/core/src/main/scala/spark/broadcast/TreeBroadcast.scala index 692825353778d03d1695b974d2ec0ba5e4a8cb3b..c9e1e67d87206241c1d5a45f6b40f57af3151814 100644 --- a/core/src/main/scala/spark/broadcast/TreeBroadcast.scala +++ b/core/src/main/scala/spark/broadcast/TreeBroadcast.scala @@ -16,7 +16,8 @@ extends Broadcast[T] with Logging with Serializable { def value = value_ MultiTracker.synchronized { - SparkEnv.get.blockManager.putSingle(uuid.toString, value_, StorageLevel.MEMORY_ONLY, false) + SparkEnv.get.blockManager.putSingle( + uuid.toString, value_, StorageLevel.MEMORY_ONLY_DESER, false) } @transient var arrayOfBlocks: Array[BroadcastBlock] = null @@ -110,7 +111,8 @@ extends Broadcast[T] with Logging with Serializable { val receptionSucceeded = receiveBroadcast(uuid) if (receptionSucceeded) { value_ = MultiTracker.unBlockifyObject[T](arrayOfBlocks, totalBytes, totalBlocks) - SparkEnv.get.blockManager.putSingle(uuid.toString, value_, StorageLevel.MEMORY_ONLY, false) + SparkEnv.get.blockManager.putSingle( + uuid.toString, value_, StorageLevel.MEMORY_ONLY_DESER, false) } else { logError("Reading Broadcasted variable " + uuid + " failed") } diff --git a/core/src/test/scala/spark/BroadcastSuite.scala b/core/src/test/scala/spark/BroadcastSuite.scala index 1e0b587421b904d3c161d68589c83eec5eca6651..0738a2725b417d4ab40af34449f37fe7276040c3 100644 --- a/core/src/test/scala/spark/BroadcastSuite.scala +++ b/core/src/test/scala/spark/BroadcastSuite.scala @@ -8,8 +8,9 @@ class BroadcastSuite extends FunSuite with BeforeAndAfter { var sc: SparkContext = _ after { - if(sc != null) { + if (sc != null) { sc.stop() + sc = null } } diff --git a/core/src/test/scala/spark/FailureSuite.scala b/core/src/test/scala/spark/FailureSuite.scala index 0aaa16dca4efad89a8139b020045d5ba2c766bb7..440582916111835bcfbc6f0635c68901202934a4 100644 --- a/core/src/test/scala/spark/FailureSuite.scala +++ b/core/src/test/scala/spark/FailureSuite.scala @@ -28,8 +28,9 @@ class FailureSuite extends FunSuite with BeforeAndAfter { var sc: SparkContext = _ after { - if(sc != null) { + if (sc != null) { sc.stop() + sc = null } } diff --git a/core/src/test/scala/spark/FileSuite.scala b/core/src/test/scala/spark/FileSuite.scala index 4cb9c7802f8686679e0cb3405d3dc1bdebbf1b20..17c7a8de43b40e088d918bd355275cdb7e2640e3 100644 --- a/core/src/test/scala/spark/FileSuite.scala +++ b/core/src/test/scala/spark/FileSuite.scala @@ -16,8 +16,9 @@ class FileSuite extends FunSuite with BeforeAndAfter { var sc: SparkContext = _ after { - if(sc != null) { + if (sc != null) { sc.stop() + sc = null } } diff --git a/core/src/test/scala/spark/KryoSerializerSuite.scala b/core/src/test/scala/spark/KryoSerializerSuite.scala index e889769b9acdd905fbed097b758aa0ee29042359..06d446ea246724675ab79cc3b33deb2952606fb7 100644 --- a/core/src/test/scala/spark/KryoSerializerSuite.scala +++ b/core/src/test/scala/spark/KryoSerializerSuite.scala @@ -8,8 +8,7 @@ import com.esotericsoftware.kryo._ import SparkContext._ -class KryoSerializerSuite extends FunSuite{ - +class KryoSerializerSuite extends FunSuite { test("basic types") { val ser = (new KryoSerializer).newInstance() def check[T](t: T) { diff --git a/core/src/test/scala/spark/PartitioningSuite.scala b/core/src/test/scala/spark/PartitioningSuite.scala index cf2ffeb9b1a131267936ec523d4d38c2476ee8ff..5000fa7307aa3a7ccb0d193721241c6465902b5b 100644 --- a/core/src/test/scala/spark/PartitioningSuite.scala +++ b/core/src/test/scala/spark/PartitioningSuite.scala @@ -14,6 +14,7 @@ class PartitioningSuite extends FunSuite with BeforeAndAfter { after { if(sc != null) { sc.stop() + sc = null } } diff --git a/core/src/test/scala/spark/PipedRDDSuite.scala b/core/src/test/scala/spark/PipedRDDSuite.scala index d010a9be7ae89e7f68ffed4347b3eee3805e569a..426652dc15cd1ef99b1470d1ce36196bd19b41f9 100644 --- a/core/src/test/scala/spark/PipedRDDSuite.scala +++ b/core/src/test/scala/spark/PipedRDDSuite.scala @@ -9,8 +9,9 @@ class PipedRDDSuite extends FunSuite with BeforeAndAfter { var sc: SparkContext = _ after { - if(sc != null) { + if (sc != null) { sc.stop() + sc = null } } diff --git a/core/src/test/scala/spark/RDDSuite.scala b/core/src/test/scala/spark/RDDSuite.scala index c467a7f916a296641243c487befbe3514a933e13..ba9b36adb7cdc53ba5750482c1d7d3a2ecbfadef 100644 --- a/core/src/test/scala/spark/RDDSuite.scala +++ b/core/src/test/scala/spark/RDDSuite.scala @@ -10,8 +10,9 @@ class RDDSuite extends FunSuite with BeforeAndAfter { var sc: SparkContext = _ after { - if(sc != null) { + if (sc != null) { sc.stop() + sc = null } } diff --git a/core/src/test/scala/spark/ShuffleSuite.scala b/core/src/test/scala/spark/ShuffleSuite.scala index 5fa494160f06fe10f67be612c51f1747f635aff3..99d13b31ef62cadf756fc6318dfd309ef3e43c47 100644 --- a/core/src/test/scala/spark/ShuffleSuite.scala +++ b/core/src/test/scala/spark/ShuffleSuite.scala @@ -18,8 +18,9 @@ class ShuffleSuite extends FunSuite with BeforeAndAfter { var sc: SparkContext = _ after { - if(sc != null) { + if (sc != null) { sc.stop() + sc = null } } diff --git a/core/src/test/scala/spark/ThreadingSuite.scala b/core/src/test/scala/spark/ThreadingSuite.scala index 90409a54ecb7c7cc278076d96910076d9a56351c..302f73118786dda09463d5db838b1ad5a2100815 100644 --- a/core/src/test/scala/spark/ThreadingSuite.scala +++ b/core/src/test/scala/spark/ThreadingSuite.scala @@ -29,6 +29,7 @@ class ThreadingSuite extends FunSuite with BeforeAndAfter { after { if(sc != null) { sc.stop() + sc = null } }