Skip to content
Snippets Groups Projects
Commit a2346219 authored by Denny's avatar Denny
Browse files

Adjust Kafka code to work with new streaming changes.

parent 15df4b0e
No related branches found
No related tags found
No related merge requests found
......@@ -365,8 +365,8 @@ extends Serializable with Logging {
}
}
}
logInfo("Updated checkpoint data for time " + currentTime + ", " + checkpointData.size + " checkpoints, "
+ "[" + checkpointData.mkString(",") + "]")
logInfo("Updated checkpoint data for time " + currentTime + ", " + checkpointData.rdds.size + " checkpoints, "
+ "[" + checkpointData.rdds.mkString(",") + "]")
}
/**
......
......@@ -4,6 +4,7 @@ import java.util.Properties
import kafka.message.Message
import kafka.producer.SyncProducerConfig
import kafka.producer._
import spark.SparkContext
import spark.streaming._
import spark.streaming.StreamingContext._
import spark.storage.StorageLevel
......@@ -19,9 +20,9 @@ object KafkaWordCount {
val Array(master, hostname, port, group, topics, numThreads) = args
val ssc = new StreamingContext(master, "KafkaWordCount")
val sc = new SparkContext(master, "KafkaWordCount")
val ssc = new StreamingContext(sc, Seconds(2))
ssc.checkpoint("checkpoint")
ssc.setBatchDuration(Seconds(2))
val topicpMap = topics.split(",").map((_,numThreads.toInt)).toMap
val lines = ssc.kafkaStream[String](hostname, port.toInt, group, topicpMap)
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment