From f0d3b58d91f43697397cdd7a7e7f38cbb7daaa31 Mon Sep 17 00:00:00 2001 From: Shixiong Zhu <shixiong@databricks.com> Date: Thu, 12 Nov 2015 14:52:03 -0800 Subject: [PATCH] [SPARK-11290][STREAMING][TEST-MAVEN] Fix the test for maven build Should not create SparkContext in the constructor of `TrackStateRDDSuite`. This is a follow up PR for #9256 to fix the test for maven build. Author: Shixiong Zhu <shixiong@databricks.com> Closes #9668 from zsxwing/hotfix. --- .../spark/streaming/rdd/TrackStateRDDSuite.scala | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/streaming/src/test/scala/org/apache/spark/streaming/rdd/TrackStateRDDSuite.scala b/streaming/src/test/scala/org/apache/spark/streaming/rdd/TrackStateRDDSuite.scala index fc5f26607e..f396b76e8d 100644 --- a/streaming/src/test/scala/org/apache/spark/streaming/rdd/TrackStateRDDSuite.scala +++ b/streaming/src/test/scala/org/apache/spark/streaming/rdd/TrackStateRDDSuite.scala @@ -28,11 +28,17 @@ import org.apache.spark.{HashPartitioner, SparkConf, SparkContext, SparkFunSuite class TrackStateRDDSuite extends SparkFunSuite with BeforeAndAfterAll { - private var sc = new SparkContext( - new SparkConf().setMaster("local").setAppName("TrackStateRDDSuite")) + private var sc: SparkContext = null + + override def beforeAll(): Unit = { + sc = new SparkContext( + new SparkConf().setMaster("local").setAppName("TrackStateRDDSuite")) + } override def afterAll(): Unit = { - sc.stop() + if (sc != null) { + sc.stop() + } } test("creation from pair RDD") { -- GitLab