diff --git a/core/src/main/java/org/apache/spark/api/java/function/package-info.java b/core/src/main/java/org/apache/spark/api/java/function/package-info.java
index 463a42f23342ce628a3c2e069a656fa3d22427dc..eefb29aca9d4624346cbf1f7ae984bc4711aab8e 100644
--- a/core/src/main/java/org/apache/spark/api/java/function/package-info.java
+++ b/core/src/main/java/org/apache/spark/api/java/function/package-info.java
@@ -20,4 +20,4 @@
  * these interfaces to pass functions to various Java API methods for Spark. Please visit Spark's
  * Java programming guide for more details.
  */
-package org.apache.spark.api.java.function;
\ No newline at end of file
+package org.apache.spark.api.java.function;
diff --git a/core/src/main/java/org/apache/spark/shuffle/sort/ShuffleExternalSorter.java b/core/src/main/java/org/apache/spark/shuffle/sort/ShuffleExternalSorter.java
index c4041a97e86ad265fbcb576382c54288987f95b8..2be5a16b2d1e4d52c34633600e331ca510a76055 100644
--- a/core/src/main/java/org/apache/spark/shuffle/sort/ShuffleExternalSorter.java
+++ b/core/src/main/java/org/apache/spark/shuffle/sort/ShuffleExternalSorter.java
@@ -256,8 +256,8 @@ final class ShuffleExternalSorter extends MemoryConsumer {
     final long spillSize = freeMemory();
     inMemSorter.reset();
     // Reset the in-memory sorter's pointer array only after freeing up the memory pages holding the
-    // records. Otherwise, if the task is over allocated memory, then without freeing the memory pages,
-    // we might not be able to get memory for the pointer array.
+    // records. Otherwise, if the task is over allocated memory, then without freeing the memory
+    // pages, we might not be able to get memory for the pointer array.
     taskContext.taskMetrics().incMemoryBytesSpilled(spillSize);
     return spillSize;
   }
diff --git a/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/PrefixComparators.java b/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/PrefixComparators.java
index 21f2fde79d73a625e499b44150ef8fd45520e803..c44630fbbc2f0a7622cb799420df39eaed80ca5a 100644
--- a/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/PrefixComparators.java
+++ b/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/PrefixComparators.java
@@ -22,7 +22,6 @@ import com.google.common.primitives.UnsignedLongs;
 import org.apache.spark.annotation.Private;
 import org.apache.spark.unsafe.types.ByteArray;
 import org.apache.spark.unsafe.types.UTF8String;
-import org.apache.spark.util.Utils;
 
 @Private
 public class PrefixComparators {
@@ -69,7 +68,7 @@ public class PrefixComparators {
    * Provides radix sort parameters. Comparators implementing this also are indicating that the
    * ordering they define is compatible with radix sort.
    */
-  public static abstract class RadixSortSupport extends PrefixComparator {
+  public abstract static class RadixSortSupport extends PrefixComparator {
     /** @return Whether the sort should be descending in binary sort order. */
     public abstract boolean sortDescending();
 
@@ -82,37 +81,37 @@ public class PrefixComparators {
   //
 
   public static final class UnsignedPrefixComparator extends RadixSortSupport {
-    @Override public final boolean sortDescending() { return false; }
-    @Override public final boolean sortSigned() { return false; }
+    @Override public boolean sortDescending() { return false; }
+    @Override public boolean sortSigned() { return false; }
     @Override
-    public final int compare(long aPrefix, long bPrefix) {
+    public int compare(long aPrefix, long bPrefix) {
       return UnsignedLongs.compare(aPrefix, bPrefix);
     }
   }
 
   public static final class UnsignedPrefixComparatorDesc extends RadixSortSupport {
-    @Override public final boolean sortDescending() { return true; }
-    @Override public final boolean sortSigned() { return false; }
+    @Override public boolean sortDescending() { return true; }
+    @Override public boolean sortSigned() { return false; }
     @Override
-    public final int compare(long bPrefix, long aPrefix) {
+    public int compare(long bPrefix, long aPrefix) {
       return UnsignedLongs.compare(aPrefix, bPrefix);
     }
   }
 
   public static final class SignedPrefixComparator extends RadixSortSupport {
-    @Override public final boolean sortDescending() { return false; }
-    @Override public final boolean sortSigned() { return true; }
+    @Override public boolean sortDescending() { return false; }
+    @Override public boolean sortSigned() { return true; }
     @Override
-    public final int compare(long a, long b) {
+    public int compare(long a, long b) {
       return (a < b) ? -1 : (a > b) ? 1 : 0;
     }
   }
 
   public static final class SignedPrefixComparatorDesc extends RadixSortSupport {
-    @Override public final boolean sortDescending() { return true; }
-    @Override public final boolean sortSigned() { return true; }
+    @Override public boolean sortDescending() { return true; }
+    @Override public boolean sortSigned() { return true; }
     @Override
-    public final int compare(long b, long a) {
+    public int compare(long b, long a) {
       return (a < b) ? -1 : (a > b) ? 1 : 0;
     }
   }
diff --git a/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/RadixSort.java b/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/RadixSort.java
index 3357b8e4749761366bb8e1f349d9e2c827d49c45..4f3f0de7b8d7e15dba79343636265e8d388f894e 100644
--- a/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/RadixSort.java
+++ b/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/RadixSort.java
@@ -16,7 +16,7 @@
  */
 
 package org.apache.spark.util.collection.unsafe.sort;
- 
+
 import org.apache.spark.unsafe.Platform;
 import org.apache.spark.unsafe.array.LongArray;
 
@@ -227,7 +227,7 @@ public class RadixSort {
     }
     return counts;
   }
-  
+
   /**
    * Specialization of sortAtByte() for key-prefix arrays.
    */
diff --git a/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeExternalSorter.java b/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeExternalSorter.java
index 3c1cd39dc22602091a29f9a644ca450c3bceb5b6..8b6c96a4c4e63b8ae86a56ec80b8e87e2d241160 100644
--- a/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeExternalSorter.java
+++ b/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeExternalSorter.java
@@ -212,8 +212,8 @@ public final class UnsafeExternalSorter extends MemoryConsumer {
     // written to disk. This also counts the space needed to store the sorter's pointer array.
     inMemSorter.reset();
     // Reset the in-memory sorter's pointer array only after freeing up the memory pages holding the
-    // records. Otherwise, if the task is over allocated memory, then without freeing the memory pages,
-    // we might not be able to get memory for the pointer array.
+    // records. Otherwise, if the task is over allocated memory, then without freeing the memory
+    // pages, we might not be able to get memory for the pointer array.
 
     taskContext.taskMetrics().incMemoryBytesSpilled(spillSize);
     totalSpillBytes += spillSize;
diff --git a/core/src/main/scala/org/apache/spark/annotation/package-info.java b/core/src/main/scala/org/apache/spark/annotation/package-info.java
index 12c7afe6f108a007ec632b95780f7a29b602543d..9efdccf6b04086e2d4e4fd3a3c87e9e376290838 100644
--- a/core/src/main/scala/org/apache/spark/annotation/package-info.java
+++ b/core/src/main/scala/org/apache/spark/annotation/package-info.java
@@ -20,4 +20,4 @@
  * This package consist of these annotations, which are used project wide and are reflected in
  * Scala and Java docs.
  */
-package org.apache.spark.annotation;
\ No newline at end of file
+package org.apache.spark.annotation;
diff --git a/core/src/main/scala/org/apache/spark/api/java/package-info.java b/core/src/main/scala/org/apache/spark/api/java/package-info.java
index 10a480fc78e4427962ca1036cc855a5655bb78a5..699181cafae84c3874da9f6dc6b541e1bef3803b 100644
--- a/core/src/main/scala/org/apache/spark/api/java/package-info.java
+++ b/core/src/main/scala/org/apache/spark/api/java/package-info.java
@@ -18,4 +18,4 @@
 /**
  * Spark Java programming APIs.
  */
-package org.apache.spark.api.java;
\ No newline at end of file
+package org.apache.spark.api.java;
diff --git a/core/src/main/scala/org/apache/spark/broadcast/package-info.java b/core/src/main/scala/org/apache/spark/broadcast/package-info.java
index 1510e6e84c7a8abb76f99475f7c83db61f7fc9c0..bbf4a684a19e7bb6664029c385219c6cafa3dd17 100644
--- a/core/src/main/scala/org/apache/spark/broadcast/package-info.java
+++ b/core/src/main/scala/org/apache/spark/broadcast/package-info.java
@@ -18,4 +18,4 @@
 /**
  * Spark's broadcast variables, used to broadcast immutable datasets to all nodes.
  */
-package org.apache.spark.broadcast;
\ No newline at end of file
+package org.apache.spark.broadcast;
diff --git a/core/src/main/scala/org/apache/spark/executor/package-info.java b/core/src/main/scala/org/apache/spark/executor/package-info.java
index dd3b6815fb45fd992197031115bac5da524f4928..fb280964c4900f371f7d818fae33d1dff3137329 100644
--- a/core/src/main/scala/org/apache/spark/executor/package-info.java
+++ b/core/src/main/scala/org/apache/spark/executor/package-info.java
@@ -18,4 +18,4 @@
 /**
  * Package for executor components used with various cluster managers.
  */
-package org.apache.spark.executor;
\ No newline at end of file
+package org.apache.spark.executor;
diff --git a/core/src/main/scala/org/apache/spark/io/package-info.java b/core/src/main/scala/org/apache/spark/io/package-info.java
index bea1bfdb637515d86b7d57b47fbe30ecd6299329..1a466602806e52b8e534d6acccb51383fd56dcf0 100644
--- a/core/src/main/scala/org/apache/spark/io/package-info.java
+++ b/core/src/main/scala/org/apache/spark/io/package-info.java
@@ -18,4 +18,4 @@
 /**
  * IO codecs used for compression.
  */
-package org.apache.spark.io;
\ No newline at end of file
+package org.apache.spark.io;
diff --git a/core/src/main/scala/org/apache/spark/rdd/package-info.java b/core/src/main/scala/org/apache/spark/rdd/package-info.java
index 176cc58179fb09cd69286f03c8963c8330cfc245..d9aa9bebe56da97d8efa5bcbfd8af21166be8af9 100644
--- a/core/src/main/scala/org/apache/spark/rdd/package-info.java
+++ b/core/src/main/scala/org/apache/spark/rdd/package-info.java
@@ -18,4 +18,4 @@
 /**
  * Provides implementation's of various RDDs.
  */
-package org.apache.spark.rdd;
\ No newline at end of file
+package org.apache.spark.rdd;
diff --git a/core/src/main/scala/org/apache/spark/scheduler/package-info.java b/core/src/main/scala/org/apache/spark/scheduler/package-info.java
index 5b4a628d3cee4650c5d389a0430ef079e952b0e3..90fc65251eae05eff196f38242fb18c47e3aa934 100644
--- a/core/src/main/scala/org/apache/spark/scheduler/package-info.java
+++ b/core/src/main/scala/org/apache/spark/scheduler/package-info.java
@@ -18,4 +18,4 @@
 /**
  * Spark's DAG scheduler.
  */
-package org.apache.spark.scheduler;
\ No newline at end of file
+package org.apache.spark.scheduler;
diff --git a/core/src/main/scala/org/apache/spark/util/package-info.java b/core/src/main/scala/org/apache/spark/util/package-info.java
index 819f54ee41a7929cb18005b7988e6ef7e30a3708..4c5d33d88d2b32219fdb322a84294285c49c70dc 100644
--- a/core/src/main/scala/org/apache/spark/util/package-info.java
+++ b/core/src/main/scala/org/apache/spark/util/package-info.java
@@ -18,4 +18,4 @@
 /**
  * Spark utilities.
  */
-package org.apache.spark.util;
\ No newline at end of file
+package org.apache.spark.util;
diff --git a/core/src/main/scala/org/apache/spark/util/random/package-info.java b/core/src/main/scala/org/apache/spark/util/random/package-info.java
index 62c3762dd11b638123d34689ca085a47cc7bef3c..e4f0c0febbbb34c4e01a57e796ada8f8ed9f001f 100644
--- a/core/src/main/scala/org/apache/spark/util/random/package-info.java
+++ b/core/src/main/scala/org/apache/spark/util/random/package-info.java
@@ -18,4 +18,4 @@
 /**
  * Utilities for random number generation.
  */
-package org.apache.spark.util.random;
\ No newline at end of file
+package org.apache.spark.util.random;
diff --git a/dev/checkstyle.xml b/dev/checkstyle.xml
index b66dca9041f2f6ce01ad3ee72e9cc2a706711b1a..3de6aa91dcd5142284a287276931e3eb445ace1e 100644
--- a/dev/checkstyle.xml
+++ b/dev/checkstyle.xml
@@ -64,6 +64,8 @@
         <property name="message" value="No trailing whitespace allowed."/>
     </module>
 
+    <module name="NewlineAtEndOfFile"/>
+
     <module name="TreeWalker">
         <module name="OuterTypeFilename"/>
         <module name="IllegalTokenText">
diff --git a/external/flume/src/main/scala/org/apache/spark/streaming/flume/package-info.java b/external/flume/src/main/scala/org/apache/spark/streaming/flume/package-info.java
index d31aa5f5c096cd2934f324a2b8b8909c1b3bb4bc..4a5da226aded3c9769a72e05ef3a0b1b9b01d10f 100644
--- a/external/flume/src/main/scala/org/apache/spark/streaming/flume/package-info.java
+++ b/external/flume/src/main/scala/org/apache/spark/streaming/flume/package-info.java
@@ -18,4 +18,4 @@
 /**
  * Spark streaming receiver for Flume.
  */
-package org.apache.spark.streaming.flume;
\ No newline at end of file
+package org.apache.spark.streaming.flume;
diff --git a/external/kafka/src/main/scala/org/apache/spark/streaming/kafka/package-info.java b/external/kafka/src/main/scala/org/apache/spark/streaming/kafka/package-info.java
index 947bae115a62021a6da83c145afbb16d8f1aa06c..2e5ab0fb3bef9b7ef638bccaa81c9505883436ec 100644
--- a/external/kafka/src/main/scala/org/apache/spark/streaming/kafka/package-info.java
+++ b/external/kafka/src/main/scala/org/apache/spark/streaming/kafka/package-info.java
@@ -18,4 +18,4 @@
 /**
  * Kafka receiver for spark streaming.
  */
-package org.apache.spark.streaming.kafka;
\ No newline at end of file
+package org.apache.spark.streaming.kafka;
diff --git a/external/kinesis-asl/src/main/resources/log4j.properties b/external/kinesis-asl/src/main/resources/log4j.properties
index 8118d12c5d474f1dc5787b495a2562902eb93e02..4f5ea7bafe48fc78d38d03a64d42dff9c2234f12 100644
--- a/external/kinesis-asl/src/main/resources/log4j.properties
+++ b/external/kinesis-asl/src/main/resources/log4j.properties
@@ -34,4 +34,4 @@ log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{1}:
 log4j.logger.org.spark_project.jetty=WARN
 log4j.logger.org.spark_project.jetty.util.component.AbstractLifeCycle=ERROR
 log4j.logger.org.apache.spark.repl.SparkIMain$exprTyper=INFO
-log4j.logger.org.apache.spark.repl.SparkILoop$SparkILoopInterpreter=INFO
\ No newline at end of file
+log4j.logger.org.apache.spark.repl.SparkILoop$SparkILoopInterpreter=INFO
diff --git a/graphx/src/main/scala/org/apache/spark/graphx/package-info.java b/graphx/src/main/scala/org/apache/spark/graphx/package-info.java
index f659cc518ebd38d37f1b5a46ebe4cde62a4b734f..7c63447070fc4808d0f6dd13ac372eedbc269534 100644
--- a/graphx/src/main/scala/org/apache/spark/graphx/package-info.java
+++ b/graphx/src/main/scala/org/apache/spark/graphx/package-info.java
@@ -19,4 +19,4 @@
  * ALPHA COMPONENT
  * GraphX is a graph processing framework built on top of Spark.
  */
-package org.apache.spark.graphx;
\ No newline at end of file
+package org.apache.spark.graphx;
diff --git a/graphx/src/main/scala/org/apache/spark/graphx/util/package-info.java b/graphx/src/main/scala/org/apache/spark/graphx/util/package-info.java
index 90cd1d46db177dab91946b14671e1264f9343379..86b427e31d269e7953a4e101bf364bbf29afee17 100644
--- a/graphx/src/main/scala/org/apache/spark/graphx/util/package-info.java
+++ b/graphx/src/main/scala/org/apache/spark/graphx/util/package-info.java
@@ -18,4 +18,4 @@
 /**
  * Collections of utilities used by graphx.
  */
-package org.apache.spark.graphx.util;
\ No newline at end of file
+package org.apache.spark.graphx.util;
diff --git a/mllib/src/main/scala/org/apache/spark/mllib/package-info.java b/mllib/src/main/scala/org/apache/spark/mllib/package-info.java
index 4991bc9e972c0f907a9e52ab4f4a9982d851efac..5962efa96baf3ec425578297c0d3f4ac127151fa 100644
--- a/mllib/src/main/scala/org/apache/spark/mllib/package-info.java
+++ b/mllib/src/main/scala/org/apache/spark/mllib/package-info.java
@@ -18,4 +18,4 @@
 /**
  * Spark's machine learning library.
  */
-package org.apache.spark.mllib;
\ No newline at end of file
+package org.apache.spark.mllib;
diff --git a/mllib/src/test/java/org/apache/spark/ml/classification/JavaRandomForestClassifierSuite.java b/mllib/src/test/java/org/apache/spark/ml/classification/JavaRandomForestClassifierSuite.java
index 5aec52ac72b180773a84cf66fc25a135852409d3..4f40fd65b9f11f3c8c8787ca25f5505cf24327cd 100644
--- a/mllib/src/test/java/org/apache/spark/ml/classification/JavaRandomForestClassifierSuite.java
+++ b/mllib/src/test/java/org/apache/spark/ml/classification/JavaRandomForestClassifierSuite.java
@@ -81,15 +81,15 @@ public class JavaRandomForestClassifierSuite implements Serializable {
     for (String featureSubsetStrategy: RandomForestClassifier.supportedFeatureSubsetStrategies()) {
       rf.setFeatureSubsetStrategy(featureSubsetStrategy);
     }
-    String realStrategies[] = {".1", ".10", "0.10", "0.1", "0.9", "1.0"};
+    String[] realStrategies = {".1", ".10", "0.10", "0.1", "0.9", "1.0"};
     for (String strategy: realStrategies) {
       rf.setFeatureSubsetStrategy(strategy);
     }
-    String integerStrategies[] = {"1", "10", "100", "1000", "10000"};
+    String[] integerStrategies = {"1", "10", "100", "1000", "10000"};
     for (String strategy: integerStrategies) {
       rf.setFeatureSubsetStrategy(strategy);
     }
-    String invalidStrategies[] = {"-.1", "-.10", "-0.10", ".0", "0.0", "1.1", "0"};
+    String[] invalidStrategies = {"-.1", "-.10", "-0.10", ".0", "0.0", "1.1", "0"};
     for (String strategy: invalidStrategies) {
       try {
         rf.setFeatureSubsetStrategy(strategy);
diff --git a/mllib/src/test/java/org/apache/spark/ml/regression/JavaRandomForestRegressorSuite.java b/mllib/src/test/java/org/apache/spark/ml/regression/JavaRandomForestRegressorSuite.java
index a8736669f72e76632d50d5da7348a461831bdcb9..38b895f1fdd7d1bfe92cf6df372352c8cc40bbd9 100644
--- a/mllib/src/test/java/org/apache/spark/ml/regression/JavaRandomForestRegressorSuite.java
+++ b/mllib/src/test/java/org/apache/spark/ml/regression/JavaRandomForestRegressorSuite.java
@@ -81,15 +81,15 @@ public class JavaRandomForestRegressorSuite implements Serializable {
     for (String featureSubsetStrategy: RandomForestRegressor.supportedFeatureSubsetStrategies()) {
       rf.setFeatureSubsetStrategy(featureSubsetStrategy);
     }
-    String realStrategies[] = {".1", ".10", "0.10", "0.1", "0.9", "1.0"};
+    String[] realStrategies = {".1", ".10", "0.10", "0.1", "0.9", "1.0"};
     for (String strategy: realStrategies) {
       rf.setFeatureSubsetStrategy(strategy);
     }
-    String integerStrategies[] = {"1", "10", "100", "1000", "10000"};
+    String[] integerStrategies = {"1", "10", "100", "1000", "10000"};
     for (String strategy: integerStrategies) {
       rf.setFeatureSubsetStrategy(strategy);
     }
-    String invalidStrategies[] = {"-.1", "-.10", "-0.10", ".0", "0.0", "1.1", "0"};
+    String[] invalidStrategies = {"-.1", "-.10", "-0.10", ".0", "0.0", "1.1", "0"};
     for (String strategy: invalidStrategies) {
       try {
         rf.setFeatureSubsetStrategy(strategy);
diff --git a/sql/core/src/main/java/org/apache/spark/sql/execution/datasources/parquet/SpecificParquetRecordReaderBase.java b/sql/core/src/main/java/org/apache/spark/sql/execution/datasources/parquet/SpecificParquetRecordReaderBase.java
index b224a868454a5f579980ee99c6082ab652a3507e..cbe8f78164ae7a872dfe71ea190dda1ec5321bb6 100644
--- a/sql/core/src/main/java/org/apache/spark/sql/execution/datasources/parquet/SpecificParquetRecordReaderBase.java
+++ b/sql/core/src/main/java/org/apache/spark/sql/execution/datasources/parquet/SpecificParquetRecordReaderBase.java
@@ -38,7 +38,6 @@ import static org.apache.parquet.hadoop.ParquetFileReader.readFooter;
 import static org.apache.parquet.hadoop.ParquetInputFormat.getFilter;
 
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.mapreduce.InputSplit;
 import org.apache.hadoop.mapreduce.RecordReader;
diff --git a/streaming/src/main/scala/org/apache/spark/streaming/api/java/package-info.java b/streaming/src/main/scala/org/apache/spark/streaming/api/java/package-info.java
index d43d949d76bb3fba94452334445089ae762e8f53..348d21d49ac47df21ab48306295fd61c2e64627c 100644
--- a/streaming/src/main/scala/org/apache/spark/streaming/api/java/package-info.java
+++ b/streaming/src/main/scala/org/apache/spark/streaming/api/java/package-info.java
@@ -18,4 +18,4 @@
 /**
  * Java APIs for spark streaming.
  */
-package org.apache.spark.streaming.api.java;
\ No newline at end of file
+package org.apache.spark.streaming.api.java;
diff --git a/streaming/src/main/scala/org/apache/spark/streaming/dstream/package-info.java b/streaming/src/main/scala/org/apache/spark/streaming/dstream/package-info.java
index 05ca2ddffd3c0b89b890e9a556c0c4f134d80106..4d08afcbfea312354937c8d37dc76fd74ea92da6 100644
--- a/streaming/src/main/scala/org/apache/spark/streaming/dstream/package-info.java
+++ b/streaming/src/main/scala/org/apache/spark/streaming/dstream/package-info.java
@@ -18,4 +18,4 @@
 /**
  * Various implementations of DStreams.
  */
-package org.apache.spark.streaming.dstream;
\ No newline at end of file
+package org.apache.spark.streaming.dstream;