diff --git a/examples/src/main/scala/spark/streaming/examples/QueueStream.scala b/examples/src/main/scala/spark/streaming/examples/QueueStream.scala
index d83db7784d4ab81a3be1d92d0e4803f5762d8946..e9cb7b55ea1320f06b8a868088c3965899a09589 100644
--- a/examples/src/main/scala/spark/streaming/examples/QueueStream.scala
+++ b/examples/src/main/scala/spark/streaming/examples/QueueStream.scala
@@ -30,7 +30,7 @@ object QueueStream {
     
     // Create and push some RDDs into
     for (i <- 1 to 30) {
-      rddQueue += sc.sparkContext.makeRDD(1 to 1000, 10)
+      rddQueue += ssc.sparkContext.makeRDD(1 to 1000, 10)
       Thread.sleep(1000)
     }
     ssc.stop()
diff --git a/examples/src/main/scala/spark/streaming/examples/RawNetworkGrep.scala b/examples/src/main/scala/spark/streaming/examples/RawNetworkGrep.scala
index cce00014261ee1a002e8b339c620e61db8205b9f..49b3223eecd19bbe7ea1431b404759bca806a68a 100644
--- a/examples/src/main/scala/spark/streaming/examples/RawNetworkGrep.scala
+++ b/examples/src/main/scala/spark/streaming/examples/RawNetworkGrep.scala
@@ -34,7 +34,7 @@ object RawNetworkGrep {
     val ssc = new StreamingContext(master, "RawNetworkGrep", Milliseconds(batchMillis))
 
     // Warm up the JVMs on master and slave for JIT compilation to kick in
-    RawTextHelper.warmUp(sc.sparkContext)
+    RawTextHelper.warmUp(ssc.sparkContext)
 
     val rawStreams = (1 to numStreams).map(_ =>
       ssc.rawSocketStream[String](host, port, StorageLevel.MEMORY_ONLY_SER_2)).toArray