From 4d480ec59e8cf268054ed805abcd1e84eca17b41 Mon Sep 17 00:00:00 2001 From: Matei Zaharia <matei@eecs.berkeley.edu> Date: Mon, 25 Feb 2013 15:53:43 -0800 Subject: [PATCH] Fixed something that was reported as a compile error in ScalaDoc. For some reason, ScalaDoc complained about no such constructor for StreamingContext; it doesn't seem like an actual Scala error but it prevented sbt publish and from working because docs weren't built. --- .../scala/spark/streaming/api/java/JavaStreamingContext.scala | 4 ++-- .../main/scala/spark/streaming/util/MasterFailureTest.scala | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/streaming/src/main/scala/spark/streaming/api/java/JavaStreamingContext.scala b/streaming/src/main/scala/spark/streaming/api/java/JavaStreamingContext.scala index b528ebbc19..755407aecc 100644 --- a/streaming/src/main/scala/spark/streaming/api/java/JavaStreamingContext.scala +++ b/streaming/src/main/scala/spark/streaming/api/java/JavaStreamingContext.scala @@ -41,7 +41,7 @@ class JavaStreamingContext(val ssc: StreamingContext) { * @param batchDuration The time interval at which streaming data will be divided into batches */ def this(master: String, appName: String, batchDuration: Duration) = - this(new StreamingContext(master, appName, batchDuration)) + this(new StreamingContext(master, appName, batchDuration, null, Nil, Map())) /** * Creates a StreamingContext. @@ -58,7 +58,7 @@ class JavaStreamingContext(val ssc: StreamingContext) { batchDuration: Duration, sparkHome: String, jars: Array[String]) = - this(new StreamingContext(master, appName, batchDuration, sparkHome, jars)) + this(new StreamingContext(master, appName, batchDuration, sparkHome, jars, Map())) /** * Creates a StreamingContext. diff --git a/streaming/src/main/scala/spark/streaming/util/MasterFailureTest.scala b/streaming/src/main/scala/spark/streaming/util/MasterFailureTest.scala index bdd9f4d753..f673e5be15 100644 --- a/streaming/src/main/scala/spark/streaming/util/MasterFailureTest.scala +++ b/streaming/src/main/scala/spark/streaming/util/MasterFailureTest.scala @@ -159,7 +159,7 @@ object MasterFailureTest extends Logging { // Setup the streaming computation with the given operation System.clearProperty("spark.driver.port") - var ssc = new StreamingContext("local[4]", "MasterFailureTest", batchDuration) + var ssc = new StreamingContext("local[4]", "MasterFailureTest", batchDuration, null, Nil, Map()) ssc.checkpoint(checkpointDir.toString) val inputStream = ssc.textFileStream(testDir.toString) val operatedStream = operation(inputStream) -- GitLab