diff --git a/core/src/main/scala/org/apache/spark/network/Connection.scala b/core/src/main/scala/org/apache/spark/network/Connection.scala
index 16bd00fd189ff33579fdfe1b3695ba0bfa4ed5a1..2f7576c53b482b8f97b31304802dce8e13de3e7a 100644
--- a/core/src/main/scala/org/apache/spark/network/Connection.scala
+++ b/core/src/main/scala/org/apache/spark/network/Connection.scala
@@ -211,7 +211,7 @@ class SendingConnection(val address: InetSocketAddress, selector_ : Selector,
 
     def addMessage(message: Message) {
       messages.synchronized{
-        /* messages += message*/
+        /* messages += message */
         messages.enqueue(message)
         logDebug("Added [" + message + "] to outbox for sending to " +
           "[" + getRemoteConnectionManagerId() + "]")
@@ -222,7 +222,7 @@ class SendingConnection(val address: InetSocketAddress, selector_ : Selector,
       messages.synchronized {
         while (!messages.isEmpty) {
           /* nextMessageToBeUsed = nextMessageToBeUsed % messages.size */
-          /* val message = messages(nextMessageToBeUsed)*/
+          /* val message = messages(nextMessageToBeUsed) */
           val message = messages.dequeue
           val chunk = message.getChunkForSending(defaultChunkSize)
           if (chunk.isDefined) {
@@ -262,7 +262,7 @@ class SendingConnection(val address: InetSocketAddress, selector_ : Selector,
 
   val currentBuffers = new ArrayBuffer[ByteBuffer]()
 
-  /* channel.socket.setSendBufferSize(256 * 1024)*/
+  /* channel.socket.setSendBufferSize(256 * 1024) */
 
   override def getRemoteAddress() = address
 
@@ -355,7 +355,7 @@ class SendingConnection(val address: InetSocketAddress, selector_ : Selector,
               }
               case None => {
                 // changeConnectionKeyInterest(0)
-                /* key.interestOps(0)*/
+                /* key.interestOps(0) */
                 return false
               }
             }
@@ -540,10 +540,10 @@ private[spark] class ReceivingConnection(
           return false
         }
 
-        /* logDebug("Read " + bytesRead + " bytes for the buffer")*/
+        /* logDebug("Read " + bytesRead + " bytes for the buffer") */
 
         if (currentChunk.buffer.remaining == 0) {
-          /* println("Filled buffer at " + System.currentTimeMillis)*/
+          /* println("Filled buffer at " + System.currentTimeMillis) */
           val bufferMessage = inbox.getMessageForChunk(currentChunk).get
           if (bufferMessage.isCompletelyReceived) {
             bufferMessage.flip
diff --git a/core/src/main/scala/org/apache/spark/network/ConnectionManager.scala b/core/src/main/scala/org/apache/spark/network/ConnectionManager.scala
index 2682f9d0ed7f0ed639f2a9afaf6cc6233853c991..6b0a972f0bbe0d492cede882519caf09c1cd7abb 100644
--- a/core/src/main/scala/org/apache/spark/network/ConnectionManager.scala
+++ b/core/src/main/scala/org/apache/spark/network/ConnectionManager.scala
@@ -505,7 +505,7 @@ private[spark] class ConnectionManager(port: Int, conf: SparkConf,
       }
     }
     handleMessageExecutor.execute(runnable)
-    /* handleMessage(connection, message)*/
+    /* handleMessage(connection, message) */
   }
 
   private def handleClientAuthentication(
@@ -859,14 +859,14 @@ private[spark] object ConnectionManager {
       None
     })
 
-    /* testSequentialSending(manager)*/
-    /* System.gc()*/
+    /* testSequentialSending(manager) */
+    /* System.gc() */
 
-    /* testParallelSending(manager)*/
-    /* System.gc()*/
+    /* testParallelSending(manager) */
+    /* System.gc() */
 
-    /* testParallelDecreasingSending(manager)*/
-    /* System.gc()*/
+    /* testParallelDecreasingSending(manager) */
+    /* System.gc() */
 
     testContinuousSending(manager)
     System.gc()
diff --git a/core/src/main/scala/org/apache/spark/network/ConnectionManagerTest.scala b/core/src/main/scala/org/apache/spark/network/ConnectionManagerTest.scala
index e5745d7daa1538ce0e302c392c0ed8202eefa7d1..9d9b9dbdd5331f52e9e04de7e8f3c07b416487be 100644
--- a/core/src/main/scala/org/apache/spark/network/ConnectionManagerTest.scala
+++ b/core/src/main/scala/org/apache/spark/network/ConnectionManagerTest.scala
@@ -47,8 +47,8 @@ private[spark] object ConnectionManagerTest extends Logging{
     val slaves = slavesFile.mkString.split("\n")
     slavesFile.close()
 
-    /* println("Slaves")*/
-    /* slaves.foreach(println)*/
+    /* println("Slaves") */
+    /* slaves.foreach(println) */
     val tasknum = if (args.length > 2) args(2).toInt else slaves.length
     val size = ( if (args.length > 3) (args(3).toInt) else 10 ) * 1024 * 1024 
     val count = if (args.length > 4) args(4).toInt else 3
diff --git a/core/src/main/scala/org/apache/spark/network/ReceiverTest.scala b/core/src/main/scala/org/apache/spark/network/ReceiverTest.scala
index 17fd931c9f075638af89fa5cd0e414be0eff70ab..2b41c403b2e0a266c54e5cb8b5b2e031108b4895 100644
--- a/core/src/main/scala/org/apache/spark/network/ReceiverTest.scala
+++ b/core/src/main/scala/org/apache/spark/network/ReceiverTest.scala
@@ -27,7 +27,7 @@ private[spark] object ReceiverTest {
     println("Started connection manager with id = " + manager.id)
 
     manager.onReceiveMessage((msg: Message, id: ConnectionManagerId) => {
-      /* println("Received [" + msg + "] from [" + id + "] at " + System.currentTimeMillis)*/
+      /* println("Received [" + msg + "] from [" + id + "] at " + System.currentTimeMillis) */
       val buffer = ByteBuffer.wrap("response".getBytes)
       Some(Message.createBufferMessage(buffer, msg.id))
     })
diff --git a/core/src/main/scala/org/apache/spark/network/SenderTest.scala b/core/src/main/scala/org/apache/spark/network/SenderTest.scala
index 905eddfbb9450422975d0fcb2d4934cfd11a9d92..14c094c6177d584c7cd30ca36c208e5d42a22306 100644
--- a/core/src/main/scala/org/apache/spark/network/SenderTest.scala
+++ b/core/src/main/scala/org/apache/spark/network/SenderTest.scala
@@ -50,7 +50,7 @@ private[spark] object SenderTest {
     (0 until count).foreach(i => {
       val dataMessage = Message.createBufferMessage(buffer.duplicate)
       val startTime = System.currentTimeMillis
-      /* println("Started timer at " + startTime)*/
+      /* println("Started timer at " + startTime) */
       val responseStr = manager.sendMessageReliablySync(targetConnectionManagerId, dataMessage)
         .map { response =>
           val buffer = response.asInstanceOf[BufferMessage].buffers(0)
diff --git a/core/src/main/scala/org/apache/spark/ui/jobs/IndexPage.scala b/core/src/main/scala/org/apache/spark/ui/jobs/IndexPage.scala
index f3c93d4214ad02352ae6868f816d1a88b45c8dd0..70d62b66a482980b29090cd5bc642350621ad5bf 100644
--- a/core/src/main/scala/org/apache/spark/ui/jobs/IndexPage.scala
+++ b/core/src/main/scala/org/apache/spark/ui/jobs/IndexPage.scala
@@ -25,7 +25,7 @@ import org.apache.spark.scheduler.Schedulable
 import org.apache.spark.ui.Page._
 import org.apache.spark.ui.UIUtils
 
-/** Page showing list of all ongoing and recently finished stages and pools*/
+/** Page showing list of all ongoing and recently finished stages and pools */
 private[ui] class IndexPage(parent: JobProgressUI) {
   private val appName = parent.appName
   private val basePath = parent.basePath
diff --git a/core/src/main/scala/org/apache/spark/util/MutablePair.scala b/core/src/main/scala/org/apache/spark/util/MutablePair.scala
index a898824cff0ca3327d294d6c8350b1c5557b1040..a6b39247a54ca34ede2abb268e328e9e12b5fc3c 100644
--- a/core/src/main/scala/org/apache/spark/util/MutablePair.scala
+++ b/core/src/main/scala/org/apache/spark/util/MutablePair.scala
@@ -24,8 +24,8 @@ package org.apache.spark.util
  * @param  _1   Element 1 of this MutablePair
  * @param  _2   Element 2 of this MutablePair
  */
-case class MutablePair[@specialized(Int, Long, Double, Char, Boolean/* , AnyRef*/) T1,
-                       @specialized(Int, Long, Double, Char, Boolean/* , AnyRef*/) T2]
+case class MutablePair[@specialized(Int, Long, Double, Char, Boolean/* , AnyRef */) T1,
+                       @specialized(Int, Long, Double, Char, Boolean/* , AnyRef */) T2]
   (var _1: T1, var _2: T2)
   extends Product2[T1, T2]
 {
diff --git a/examples/src/main/scala/org/apache/spark/streaming/examples/clickstream/PageViewGenerator.scala b/examples/src/main/scala/org/apache/spark/streaming/examples/clickstream/PageViewGenerator.scala
index 0ac46c31c24c862d058f99e684c1d0a9868bd099..251f65fe4df9c7706d397ce503dab9b8783c2a2e 100644
--- a/examples/src/main/scala/org/apache/spark/streaming/examples/clickstream/PageViewGenerator.scala
+++ b/examples/src/main/scala/org/apache/spark/streaming/examples/clickstream/PageViewGenerator.scala
@@ -21,7 +21,7 @@ import java.net.ServerSocket
 import java.io.PrintWriter
 import util.Random
 
-/** Represents a page view on a website with associated dimension data.*/
+/** Represents a page view on a website with associated dimension data. */
 class PageView(val url : String, val status : Int, val zipCode : Int, val userID : Int)
     extends Serializable {
   override def toString() : String = {
diff --git a/external/flume/src/main/scala/org/apache/spark/streaming/flume/FlumeInputDStream.scala b/external/flume/src/main/scala/org/apache/spark/streaming/flume/FlumeInputDStream.scala
index ce3ef47cfe4bcb5137c367c430b45f82d69542e5..34012b846e21e925ffc1158becfe39844df22b62 100644
--- a/external/flume/src/main/scala/org/apache/spark/streaming/flume/FlumeInputDStream.scala
+++ b/external/flume/src/main/scala/org/apache/spark/streaming/flume/FlumeInputDStream.scala
@@ -127,7 +127,7 @@ class FlumeEventServer(receiver : FlumeReceiver) extends AvroSourceProtocol {
 }
 
 /** A NetworkReceiver which listens for events using the
-  * Flume Avro interface.*/
+  * Flume Avro interface. */
 private[streaming]
 class FlumeReceiver(
     host: String,
diff --git a/graphx/src/main/scala/org/apache/spark/graphx/impl/MessageToPartition.scala b/graphx/src/main/scala/org/apache/spark/graphx/impl/MessageToPartition.scala
index bebe3740bc6c043559497f08bbde82c9df851321..9d4f3750cb8e42c753fb571ddd472902de4dcaee 100644
--- a/graphx/src/main/scala/org/apache/spark/graphx/impl/MessageToPartition.scala
+++ b/graphx/src/main/scala/org/apache/spark/graphx/impl/MessageToPartition.scala
@@ -45,7 +45,7 @@ class VertexBroadcastMsg[@specialized(Int, Long, Double, Boolean) T](
  * @param data value to send
  */
 private[graphx]
-class MessageToPartition[@specialized(Int, Long, Double, Char, Boolean/* , AnyRef*/) T](
+class MessageToPartition[@specialized(Int, Long, Double, Char, Boolean/* , AnyRef */) T](
     @transient var partition: PartitionID,
     var data: T)
   extends Product2[PartitionID, T] with Serializable {
diff --git a/project/project/SparkPluginBuild.scala b/project/project/SparkPluginBuild.scala
index 43361aa2b4c412950b3f350705b7d93708fc77d9..5a307044ba123556a225c7d98536384f58c42bb2 100644
--- a/project/project/SparkPluginBuild.scala
+++ b/project/project/SparkPluginBuild.scala
@@ -34,8 +34,7 @@ object SparkPluginDef extends Build {
     version              :=  sparkVersion,
     scalaVersion         :=  "2.10.3",
     scalacOptions        :=  Seq("-unchecked", "-deprecation"),
-    libraryDependencies  ++= Dependencies.scalaStyle,
-    sbtPlugin            :=  true
+    libraryDependencies  ++= Dependencies.scalaStyle
   )
 
   object Dependencies {
diff --git a/project/spark-style/src/main/scala/org/apache/spark/scalastyle/SparkSpaceAfterCommentStyleCheck.scala b/project/spark-style/src/main/scala/org/apache/spark/scalastyle/SparkSpaceAfterCommentStartChecker.scala
similarity index 89%
rename from project/spark-style/src/main/scala/org/apache/spark/scalastyle/SparkSpaceAfterCommentStyleCheck.scala
rename to project/spark-style/src/main/scala/org/apache/spark/scalastyle/SparkSpaceAfterCommentStartChecker.scala
index 2f3c1a182814d5e43a8d6c5349cc16447b7b4f40..80d3faa3fe749cccaa14cbe1cfdbd46c46b7ba5a 100644
--- a/project/spark-style/src/main/scala/org/apache/spark/scalastyle/SparkSpaceAfterCommentStyleCheck.scala
+++ b/project/spark-style/src/main/scala/org/apache/spark/scalastyle/SparkSpaceAfterCommentStartChecker.scala
@@ -25,13 +25,15 @@ import scalariform.lexer.{MultiLineComment, ScalaDocComment, SingleLineComment,
 import scalariform.parser.CompilationUnit
 
 class SparkSpaceAfterCommentStartChecker extends ScalariformChecker {
-  val errorKey: String = "insert.a.single.space.after.comment.start"
+  val errorKey: String = "insert.a.single.space.after.comment.start.and.before.end"
 
   private def multiLineCommentRegex(comment: Token) =
-    Pattern.compile( """/\*\S+.*""", Pattern.DOTALL).matcher(comment.text.trim).matches()
+    Pattern.compile( """/\*\S+.*""", Pattern.DOTALL).matcher(comment.text.trim).matches() ||
+      Pattern.compile( """/\*.*\S\*/""", Pattern.DOTALL).matcher(comment.text.trim).matches()
 
   private def scalaDocPatternRegex(comment: Token) =
-    Pattern.compile( """/\*\*\S+.*""", Pattern.DOTALL).matcher(comment.text.trim).matches()
+    Pattern.compile( """/\*\*\S+.*""", Pattern.DOTALL).matcher(comment.text.trim).matches() ||
+      Pattern.compile( """/\*\*.*\S\*/""", Pattern.DOTALL).matcher(comment.text.trim).matches()
 
   private def singleLineCommentRegex(comment: Token): Boolean =
     comment.text.trim.matches( """//\S+.*""") && !comment.text.trim.matches( """///+""")
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/parquet/ParquetRelation.scala b/sql/core/src/main/scala/org/apache/spark/sql/parquet/ParquetRelation.scala
index 67a34e1f21cc72def7ef69a0f5642b5036c8df89..4ab755c096bd8fb92f30bd755f9d64128d0f8509 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/parquet/ParquetRelation.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/parquet/ParquetRelation.scala
@@ -57,19 +57,19 @@ import org.apache.spark.sql.catalyst.types._
 case class ParquetRelation(tableName: String, path: String)
   extends BaseRelation with MultiInstanceRelation {
 
-  /** Schema derived from ParquetFile **/
+  /** Schema derived from ParquetFile */
   def parquetSchema: MessageType =
     ParquetTypesConverter
       .readMetaData(new Path(path))
       .getFileMetaData
       .getSchema
 
-  /** Attributes **/
+  /** Attributes */
   val attributes =
     ParquetTypesConverter
     .convertToAttributes(parquetSchema)
 
-  /** Output **/
+  /** Output */
   override val output = attributes
 
   // Parquet files have no concepts of keys, therefore no Partitioner