From e9bb12bea9fbef94332fbec88e3cd9197a27b7ad Mon Sep 17 00:00:00 2001
From: Tathagata Das <tathagata.das1565@gmail.com>
Date: Tue, 2 Sep 2014 19:02:48 -0700
Subject: [PATCH] [SPARK-1981][Streaming][Hotfix] Fixed docs related to kinesis

- Include kinesis in the unidocs
- Hide non-public classes from docs

Author: Tathagata Das <tathagata.das1565@gmail.com>

Closes #2239 from tdas/kinesis-doc-fix and squashes the following commits:

156e20c [Tathagata Das] More fixes, based on PR comments.
e9a6c01 [Tathagata Das] Fixed docs related to kinesis
---
 docs/_plugins/copy_api_dirs.rb                         |  4 ++--
 .../examples/streaming/JavaKinesisWordCountASL.java    | 10 +++++-----
 .../spark/examples/streaming/KinesisWordCountASL.scala |  6 +++---
 .../apache/spark/streaming/kinesis/KinesisUtils.scala  |  7 ++++---
 project/SparkBuild.scala                               |  6 +++---
 5 files changed, 17 insertions(+), 16 deletions(-)

diff --git a/docs/_plugins/copy_api_dirs.rb b/docs/_plugins/copy_api_dirs.rb
index 2dbbbf6feb..3b02e090ae 100644
--- a/docs/_plugins/copy_api_dirs.rb
+++ b/docs/_plugins/copy_api_dirs.rb
@@ -25,8 +25,8 @@ if not (ENV['SKIP_API'] == '1' or ENV['SKIP_SCALADOC'] == '1')
   curr_dir = pwd
   cd("..")
 
-  puts "Running 'sbt/sbt compile unidoc' from " + pwd + "; this may take a few minutes..."
-  puts `sbt/sbt compile unidoc`
+  puts "Running 'sbt/sbt -Pkinesis-asl compile unidoc' from " + pwd + "; this may take a few minutes..."
+  puts `sbt/sbt -Pkinesis-asl compile unidoc`
 
   puts "Moving back into docs dir."
   cd("docs")
diff --git a/extras/kinesis-asl/src/main/java/org/apache/spark/examples/streaming/JavaKinesisWordCountASL.java b/extras/kinesis-asl/src/main/java/org/apache/spark/examples/streaming/JavaKinesisWordCountASL.java
index 1a710d7b18..aa917d0575 100644
--- a/extras/kinesis-asl/src/main/java/org/apache/spark/examples/streaming/JavaKinesisWordCountASL.java
+++ b/extras/kinesis-asl/src/main/java/org/apache/spark/examples/streaming/JavaKinesisWordCountASL.java
@@ -75,7 +75,7 @@ import com.google.common.collect.Lists;
  *   onto the Kinesis stream. 
  * Usage instructions for KinesisWordCountProducerASL are provided in the class definition.
  */
-public final class JavaKinesisWordCountASL {
+public final class JavaKinesisWordCountASL { // needs to be public for access from run-example
     private static final Pattern WORD_SEPARATOR = Pattern.compile(" ");
     private static final Logger logger = Logger.getLogger(JavaKinesisWordCountASL.class);
 
@@ -87,10 +87,10 @@ public final class JavaKinesisWordCountASL {
         /* Check that all required args were passed in. */
         if (args.length < 2) {
           System.err.println(
-              "|Usage: KinesisWordCount <stream-name> <endpoint-url>\n" +
-              "|    <stream-name> is the name of the Kinesis stream\n" +
-              "|    <endpoint-url> is the endpoint of the Kinesis service\n" +
-              "|                   (e.g. https://kinesis.us-east-1.amazonaws.com)\n");
+              "Usage: JavaKinesisWordCountASL <stream-name> <endpoint-url>\n" +
+              "    <stream-name> is the name of the Kinesis stream\n" +
+              "    <endpoint-url> is the endpoint of the Kinesis service\n" +
+              "                   (e.g. https://kinesis.us-east-1.amazonaws.com)\n");
           System.exit(1);
         }
 
diff --git a/extras/kinesis-asl/src/main/scala/org/apache/spark/examples/streaming/KinesisWordCountASL.scala b/extras/kinesis-asl/src/main/scala/org/apache/spark/examples/streaming/KinesisWordCountASL.scala
index d03edf8b30..fffd90de08 100644
--- a/extras/kinesis-asl/src/main/scala/org/apache/spark/examples/streaming/KinesisWordCountASL.scala
+++ b/extras/kinesis-asl/src/main/scala/org/apache/spark/examples/streaming/KinesisWordCountASL.scala
@@ -69,7 +69,7 @@ import org.apache.log4j.Level
  *   dummy data onto the Kinesis stream.
  * Usage instructions for KinesisWordCountProducerASL are provided in that class definition.
  */
-object KinesisWordCountASL extends Logging {
+private object KinesisWordCountASL extends Logging {
   def main(args: Array[String]) {
     /* Check that all required args were passed in. */
     if (args.length < 2) {
@@ -154,7 +154,7 @@ object KinesisWordCountASL extends Logging {
  *         org.apache.spark.examples.streaming.KinesisWordCountProducerASL mySparkStream \
  *         https://kinesis.us-east-1.amazonaws.com 10 5
  */
-object KinesisWordCountProducerASL {
+private object KinesisWordCountProducerASL {
   def main(args: Array[String]) {
     if (args.length < 4) {
       System.err.println("Usage: KinesisWordCountProducerASL <stream-name> <endpoint-url>" +
@@ -235,7 +235,7 @@ object KinesisWordCountProducerASL {
  *  Utility functions for Spark Streaming examples. 
  *  This has been lifted from the examples/ project to remove the circular dependency.
  */
-object StreamingExamples extends Logging {
+private[streaming] object StreamingExamples extends Logging {
 
   /** Set reasonable logging levels for streaming if the user has not configured log4j. */
   def setStreamingLogLevels() {
diff --git a/extras/kinesis-asl/src/main/scala/org/apache/spark/streaming/kinesis/KinesisUtils.scala b/extras/kinesis-asl/src/main/scala/org/apache/spark/streaming/kinesis/KinesisUtils.scala
index 713cac0e29..96f4399acc 100644
--- a/extras/kinesis-asl/src/main/scala/org/apache/spark/streaming/kinesis/KinesisUtils.scala
+++ b/extras/kinesis-asl/src/main/scala/org/apache/spark/streaming/kinesis/KinesisUtils.scala
@@ -35,7 +35,7 @@ import com.amazonaws.services.kinesis.clientlibrary.lib.worker.InitialPositionIn
 object KinesisUtils {
   /**
    * Create an InputDStream that pulls messages from a Kinesis stream.
-   *
+   * :: Experimental ::
    * @param ssc    StreamingContext object
    * @param streamName   Kinesis stream name
    * @param endpointUrl  Url of Kinesis service (e.g., https://kinesis.us-east-1.amazonaws.com)
@@ -52,6 +52,7 @@ object KinesisUtils {
    *
    * @return ReceiverInputDStream[Array[Byte]]
    */
+  @Experimental
   def createStream(
       ssc: StreamingContext,
       streamName: String,
@@ -65,9 +66,8 @@ object KinesisUtils {
 
   /**
    * Create a Java-friendly InputDStream that pulls messages from a Kinesis stream.
-   *
+   * :: Experimental ::
    * @param jssc Java StreamingContext object
-   * @param ssc    StreamingContext object
    * @param streamName   Kinesis stream name
    * @param endpointUrl  Url of Kinesis service (e.g., https://kinesis.us-east-1.amazonaws.com)
    * @param checkpointInterval  Checkpoint interval for Kinesis checkpointing.
@@ -83,6 +83,7 @@ object KinesisUtils {
    *
    * @return JavaReceiverInputDStream[Array[Byte]]
    */
+  @Experimental
   def createStream(
       jssc: JavaStreamingContext, 
       streamName: String, 
diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala
index 4c696d3d38..a26c2c90cb 100644
--- a/project/SparkBuild.scala
+++ b/project/SparkBuild.scala
@@ -290,9 +290,9 @@ object Unidoc {
     publish := {},
 
     unidocProjectFilter in(ScalaUnidoc, unidoc) :=
-      inAnyProject -- inProjects(OldDeps.project, repl, examples, tools, catalyst, yarn, yarnAlpha),
+      inAnyProject -- inProjects(OldDeps.project, repl, examples, tools, catalyst, streamingFlumeSink, yarn, yarnAlpha),
     unidocProjectFilter in(JavaUnidoc, unidoc) :=
-      inAnyProject -- inProjects(OldDeps.project, repl, bagel, graphx, examples, tools, catalyst, yarn, yarnAlpha),
+      inAnyProject -- inProjects(OldDeps.project, repl, bagel, graphx, examples, tools, catalyst, streamingFlumeSink, yarn, yarnAlpha),
 
     // Skip class names containing $ and some internal packages in Javadocs
     unidocAllSources in (JavaUnidoc, unidoc) := {
@@ -314,7 +314,7 @@ object Unidoc {
       "-group", "Core Java API", packageList("api.java", "api.java.function"),
       "-group", "Spark Streaming", packageList(
         "streaming.api.java", "streaming.flume", "streaming.kafka",
-        "streaming.mqtt", "streaming.twitter", "streaming.zeromq"
+        "streaming.mqtt", "streaming.twitter", "streaming.zeromq", "streaming.kinesis"
       ),
       "-group", "MLlib", packageList(
         "mllib.classification", "mllib.clustering", "mllib.evaluation.binary", "mllib.linalg",
-- 
GitLab