diff --git a/extras/kinesis-asl/src/main/scala/org/apache/spark/examples/streaming/KinesisWordCountASL.scala b/extras/kinesis-asl/src/main/scala/org/apache/spark/examples/streaming/KinesisWordCountASL.scala index 640ca049e2ec49b00ee459acf4975ff80c98d3b6..df77f4be9db1d8257a2c926857794051d09e53d8 100644 --- a/extras/kinesis-asl/src/main/scala/org/apache/spark/examples/streaming/KinesisWordCountASL.scala +++ b/extras/kinesis-asl/src/main/scala/org/apache/spark/examples/streaming/KinesisWordCountASL.scala @@ -119,7 +119,8 @@ object KinesisWordCountASL extends Logging { val batchInterval = Milliseconds(2000) // Kinesis checkpoint interval is the interval at which the DynamoDB is updated with information - //on sequence number of records that have been received. Same as batchInterval for this example. + // on sequence number of records that have been received. Same as batchInterval for this + // example. val kinesisCheckpointInterval = batchInterval // Get the region name from the endpoint URL to save Kinesis Client Library metadata in @@ -173,7 +174,8 @@ object KinesisWordProducerASL { if (args.length != 4) { System.err.println( """ - |Usage: KinesisWordProducerASL <stream-name> <endpoint-url> <records-per-sec> <words-per-record> + |Usage: KinesisWordProducerASL <stream-name> <endpoint-url> <records-per-sec> + <words-per-record> | | <stream-name> is the name of the Kinesis stream | <endpoint-url> is the endpoint of the Kinesis service diff --git a/extras/kinesis-asl/src/main/scala/org/apache/spark/streaming/kinesis/KinesisReceiver.scala b/extras/kinesis-asl/src/main/scala/org/apache/spark/streaming/kinesis/KinesisReceiver.scala index 01608fbd3fd318768c55f8ad5871085af0e207b0..90164490efb2e9603d6da1060846ed450be61a46 100644 --- a/extras/kinesis-asl/src/main/scala/org/apache/spark/streaming/kinesis/KinesisReceiver.scala +++ b/extras/kinesis-asl/src/main/scala/org/apache/spark/streaming/kinesis/KinesisReceiver.scala @@ -82,8 +82,8 @@ private[kinesis] class KinesisReceiver( */ /** - * workerId is used by the KCL should be based on the ip address of the actual Spark Worker where this code runs - * (not the driver's IP address.) + * workerId is used by the KCL should be based on the ip address of the actual Spark Worker + * where this code runs (not the driver's IP address.) */ private var workerId: String = null