diff --git a/common/network-common/pom.xml b/common/network-common/pom.xml
index bd507c2cb6c4b9c1a08895b2191bfc48fd07e57f..5444ae6d70a92649cdf4ebfea29c61f9c5cf0773 100644
--- a/common/network-common/pom.xml
+++ b/common/network-common/pom.xml
@@ -66,7 +66,7 @@
     </dependency>
     <dependency>
       <groupId>org.apache.spark</groupId>
-      <artifactId>spark-test-tags_${scala.binary.version}</artifactId>
+      <artifactId>spark-tags_${scala.binary.version}</artifactId>
     </dependency>
     <dependency>
       <groupId>org.mockito</groupId>
diff --git a/common/network-shuffle/pom.xml b/common/network-shuffle/pom.xml
index 810ec10ca05b3d2455320721d28dad18774b3ce7..e736436aec4cf5d08005d3143ba58e317d6fb7d9 100644
--- a/common/network-shuffle/pom.xml
+++ b/common/network-shuffle/pom.xml
@@ -80,7 +80,7 @@
     </dependency>
     <dependency>
       <groupId>org.apache.spark</groupId>
-      <artifactId>spark-test-tags_${scala.binary.version}</artifactId>
+      <artifactId>spark-tags_${scala.binary.version}</artifactId>
     </dependency>
     <dependency>
       <groupId>log4j</groupId>
diff --git a/common/network-yarn/pom.xml b/common/network-yarn/pom.xml
index bc83ef24c30ece1c7698d8f88b6cc1b9521a46b6..1fd3af2e6e622bbda0bf2bf9f6a5857dfdcbd9fe 100644
--- a/common/network-yarn/pom.xml
+++ b/common/network-yarn/pom.xml
@@ -48,7 +48,7 @@
     </dependency>
     <dependency>
       <groupId>org.apache.spark</groupId>
-      <artifactId>spark-test-tags_${scala.binary.version}</artifactId>
+      <artifactId>spark-tags_${scala.binary.version}</artifactId>
     </dependency>
 
     <!-- Provided dependencies -->
diff --git a/common/sketch/pom.xml b/common/sketch/pom.xml
index 8bc1f527989413f64e8f2e00dbdcbf51e8391540..bbbb0bd5aa050fe91b2f01a30b83186a247c4578 100644
--- a/common/sketch/pom.xml
+++ b/common/sketch/pom.xml
@@ -38,7 +38,7 @@
   <dependencies>
     <dependency>
       <groupId>org.apache.spark</groupId>
-      <artifactId>spark-test-tags_${scala.binary.version}</artifactId>
+      <artifactId>spark-tags_${scala.binary.version}</artifactId>
     </dependency>
   </dependencies>
 
diff --git a/common/tags/pom.xml b/common/tags/pom.xml
index 8e702b4fefe8c0d9f6c51f736991e7403a535a27..14e94eca93b2204b722b1d0474679074164284c4 100644
--- a/common/tags/pom.xml
+++ b/common/tags/pom.xml
@@ -27,12 +27,12 @@
   </parent>
 
   <groupId>org.apache.spark</groupId>
-  <artifactId>spark-test-tags_2.11</artifactId>
+  <artifactId>spark-tags_2.11</artifactId>
   <packaging>jar</packaging>
-  <name>Spark Project Test Tags</name>
+  <name>Spark Project Tags</name>
   <url>http://spark.apache.org/</url>
   <properties>
-    <sbt.project.name>test-tags</sbt.project.name>
+    <sbt.project.name>tags</sbt.project.name>
   </properties>
 
   <dependencies>
diff --git a/core/src/main/scala/org/apache/spark/annotation/Since.scala b/common/tags/src/main/scala/org/apache/spark/annotation/Since.scala
similarity index 100%
rename from core/src/main/scala/org/apache/spark/annotation/Since.scala
rename to common/tags/src/main/scala/org/apache/spark/annotation/Since.scala
diff --git a/core/src/main/scala/org/apache/spark/annotation/package-info.java b/common/tags/src/main/scala/org/apache/spark/annotation/package-info.java
similarity index 100%
rename from core/src/main/scala/org/apache/spark/annotation/package-info.java
rename to common/tags/src/main/scala/org/apache/spark/annotation/package-info.java
diff --git a/core/src/main/scala/org/apache/spark/annotation/package.scala b/common/tags/src/main/scala/org/apache/spark/annotation/package.scala
similarity index 100%
rename from core/src/main/scala/org/apache/spark/annotation/package.scala
rename to common/tags/src/main/scala/org/apache/spark/annotation/package.scala
diff --git a/common/tags/src/main/java/org/apache/spark/tags/DockerTest.java b/common/tags/src/test/java/org/apache/spark/tags/DockerTest.java
similarity index 100%
rename from common/tags/src/main/java/org/apache/spark/tags/DockerTest.java
rename to common/tags/src/test/java/org/apache/spark/tags/DockerTest.java
diff --git a/common/tags/src/main/java/org/apache/spark/tags/ExtendedHiveTest.java b/common/tags/src/test/java/org/apache/spark/tags/ExtendedHiveTest.java
similarity index 100%
rename from common/tags/src/main/java/org/apache/spark/tags/ExtendedHiveTest.java
rename to common/tags/src/test/java/org/apache/spark/tags/ExtendedHiveTest.java
diff --git a/common/tags/src/main/java/org/apache/spark/tags/ExtendedYarnTest.java b/common/tags/src/test/java/org/apache/spark/tags/ExtendedYarnTest.java
similarity index 100%
rename from common/tags/src/main/java/org/apache/spark/tags/ExtendedYarnTest.java
rename to common/tags/src/test/java/org/apache/spark/tags/ExtendedYarnTest.java
diff --git a/common/unsafe/pom.xml b/common/unsafe/pom.xml
index 93b9580f26b865d2abdf92d28e9633f320d74f49..6f3bd39491b3c7ee417b4d35f0e9860f2dee2f19 100644
--- a/common/unsafe/pom.xml
+++ b/common/unsafe/pom.xml
@@ -61,7 +61,7 @@
     <!-- Test dependencies -->
     <dependency>
       <groupId>org.apache.spark</groupId>
-      <artifactId>spark-test-tags_${scala.binary.version}</artifactId>
+      <artifactId>spark-tags_${scala.binary.version}</artifactId>
     </dependency>
     <dependency>
       <groupId>org.mockito</groupId>
diff --git a/core/pom.xml b/core/pom.xml
index 7349ad35b9595754acbaca61441c5aaec6c366d1..65a8091c69062781fe0701ca5dc8fdc29392137f 100644
--- a/core/pom.xml
+++ b/core/pom.xml
@@ -317,7 +317,7 @@
     </dependency>
     <dependency>
       <groupId>org.apache.spark</groupId>
-      <artifactId>spark-test-tags_${scala.binary.version}</artifactId>
+      <artifactId>spark-tags_${scala.binary.version}</artifactId>
     </dependency>
   </dependencies>
   <build>
diff --git a/dev/sparktestsupport/modules.py b/dev/sparktestsupport/modules.py
index 6d47733ec19450ca23d32cb84872c6393cda8fc0..8b6ce1436d9a0714bb512e3033223ce8f1f951cd 100644
--- a/dev/sparktestsupport/modules.py
+++ b/dev/sparktestsupport/modules.py
@@ -93,9 +93,18 @@ class Module(object):
         return hash(self.name)
 
 
+tags = Module(
+    name="tags",
+    dependencies=[],
+    source_file_regexes=[
+        "common/tags/",
+    ]
+)
+
+
 catalyst = Module(
     name="catalyst",
-    dependencies=[],
+    dependencies=[tags],
     source_file_regexes=[
         "sql/catalyst/",
     ],
@@ -165,7 +174,7 @@ hivecontext_compatibility = Module(
 
 sketch = Module(
     name="sketch",
-    dependencies=[],
+    dependencies=[tags],
     source_file_regexes=[
         "common/sketch/",
     ],
@@ -177,7 +186,7 @@ sketch = Module(
 
 graphx = Module(
     name="graphx",
-    dependencies=[],
+    dependencies=[tags],
     source_file_regexes=[
         "graphx/",
     ],
@@ -189,7 +198,7 @@ graphx = Module(
 
 streaming = Module(
     name="streaming",
-    dependencies=[],
+    dependencies=[tags],
     source_file_regexes=[
         "streaming",
     ],
@@ -205,7 +214,7 @@ streaming = Module(
 # fail other PRs.
 streaming_kinesis_asl = Module(
     name="streaming-kinesis-asl",
-    dependencies=[],
+    dependencies=[tags],
     source_file_regexes=[
         "external/kinesis-asl/",
         "external/kinesis-asl-assembly/",
@@ -270,7 +279,7 @@ streaming_flume_assembly = Module(
 
 mllib_local = Module(
     name="mllib-local",
-    dependencies=[],
+    dependencies=[tags],
     source_file_regexes=[
         "mllib-local",
     ],
diff --git a/external/docker-integration-tests/pom.xml b/external/docker-integration-tests/pom.xml
index 53a24f3e06e086f20e25e29dddd59064f68c00c4..f67e2a993f6ba0c147accc5d122be6a02646f3f4 100644
--- a/external/docker-integration-tests/pom.xml
+++ b/external/docker-integration-tests/pom.xml
@@ -128,9 +128,10 @@
     </dependency>
     <dependency>
       <groupId>org.apache.spark</groupId>
-      <artifactId>spark-test-tags_${scala.binary.version}</artifactId>
+      <artifactId>spark-tags_${scala.binary.version}</artifactId>
       <version>${project.version}</version>
       <scope>test</scope>
+      <classifier>tests</classifier>
     </dependency>
     <dependency>
       <groupId>mysql</groupId>
diff --git a/external/flume-sink/pom.xml b/external/flume-sink/pom.xml
index e4effe158c826e8ecbe236bc9e7dbac77abd3e7a..016af934bb897d80303c319eea42501e84d3ad31 100644
--- a/external/flume-sink/pom.xml
+++ b/external/flume-sink/pom.xml
@@ -92,7 +92,7 @@
     </dependency>
     <dependency>
       <groupId>org.apache.spark</groupId>
-      <artifactId>spark-test-tags_${scala.binary.version}</artifactId>
+      <artifactId>spark-tags_${scala.binary.version}</artifactId>
     </dependency>
   </dependencies>
   <build>
diff --git a/external/flume/pom.xml b/external/flume/pom.xml
index d650dd034d63620eb29a45cade0895aeb0a6aed3..f51d334de0876c37739ed22c36836f5c22b36804 100644
--- a/external/flume/pom.xml
+++ b/external/flume/pom.xml
@@ -68,7 +68,7 @@
     </dependency>
     <dependency>
       <groupId>org.apache.spark</groupId>
-      <artifactId>spark-test-tags_${scala.binary.version}</artifactId>
+      <artifactId>spark-tags_${scala.binary.version}</artifactId>
     </dependency>
   </dependencies>
   <build>
diff --git a/external/java8-tests/pom.xml b/external/java8-tests/pom.xml
index 1ea9196e9dfe36011d9d5212d5b53d35b3bb7bc6..60e3ff60df06559eebcc5ea5573befa1309bc0fc 100644
--- a/external/java8-tests/pom.xml
+++ b/external/java8-tests/pom.xml
@@ -72,7 +72,7 @@
     </dependency>
     <dependency>
       <groupId>org.apache.spark</groupId>
-      <artifactId>spark-test-tags_${scala.binary.version}</artifactId>
+      <artifactId>spark-tags_${scala.binary.version}</artifactId>
     </dependency>
   </dependencies>
 
diff --git a/external/kafka/pom.xml b/external/kafka/pom.xml
index 68d52e9339b3d859fcea5d209541369bfe8c58e5..7f1cfa4e7db5973d4bf9ca4b0e481597b2090bd3 100644
--- a/external/kafka/pom.xml
+++ b/external/kafka/pom.xml
@@ -88,7 +88,7 @@
     </dependency>
     <dependency>
       <groupId>org.apache.spark</groupId>
-      <artifactId>spark-test-tags_${scala.binary.version}</artifactId>
+      <artifactId>spark-tags_${scala.binary.version}</artifactId>
     </dependency>
   </dependencies>
   <build>
diff --git a/external/kinesis-asl/pom.xml b/external/kinesis-asl/pom.xml
index 935155eb5d362015fcb1110df0934a2ed85ab742..b5f5ff2854cfb60236dc3a5d22993f41cc4c5fe7 100644
--- a/external/kinesis-asl/pom.xml
+++ b/external/kinesis-asl/pom.xml
@@ -77,7 +77,7 @@
     </dependency>
     <dependency>
       <groupId>org.apache.spark</groupId>
-      <artifactId>spark-test-tags_${scala.binary.version}</artifactId>
+      <artifactId>spark-tags_${scala.binary.version}</artifactId>
     </dependency>
   </dependencies>
   <build>
diff --git a/graphx/pom.xml b/graphx/pom.xml
index 1813f383cdcba4f371da691ef3cad25ccbbdf11a..fc6c700dd1ec808e7e237e49868194dd50d76a5c 100644
--- a/graphx/pom.xml
+++ b/graphx/pom.xml
@@ -72,7 +72,7 @@
     </dependency>
     <dependency>
       <groupId>org.apache.spark</groupId>
-      <artifactId>spark-test-tags_${scala.binary.version}</artifactId>
+      <artifactId>spark-tags_${scala.binary.version}</artifactId>
     </dependency>
   </dependencies>
   <build>
diff --git a/launcher/pom.xml b/launcher/pom.xml
index ef731948826eff5c25396d738777db5509581a30..e7303853e65653aa1f36bf363cdcbaff78ef0f12 100644
--- a/launcher/pom.xml
+++ b/launcher/pom.xml
@@ -65,7 +65,7 @@
 
     <dependency>
       <groupId>org.apache.spark</groupId>
-      <artifactId>spark-test-tags_${scala.binary.version}</artifactId>
+      <artifactId>spark-tags_${scala.binary.version}</artifactId>
     </dependency>
 
     <!-- Not needed by the test code, but referenced by SparkSubmit which is used by the tests. -->
diff --git a/mllib-local/pom.xml b/mllib-local/pom.xml
index 60b615a07f742a2c91510a8a7683ac3d8ba230d0..078ff3033afbf7b3e48bb96dba55d7d2f9d196f4 100644
--- a/mllib-local/pom.xml
+++ b/mllib-local/pom.xml
@@ -57,6 +57,10 @@
       <artifactId>mockito-core</artifactId>
       <scope>test</scope>
     </dependency>
+    <dependency>
+      <groupId>org.apache.spark</groupId>
+      <artifactId>spark-tags_${scala.binary.version}</artifactId>
+    </dependency>
   </dependencies>
   <profiles>
     <profile>
diff --git a/mllib-local/src/main/scala/org/apache/spark/ml/linalg/Matrices.scala b/mllib-local/src/main/scala/org/apache/spark/ml/linalg/Matrices.scala
index 8204b5af02cff205379e9be75b6f669fc2e73c0a..a47526d36f1a177578e0af82836c9d4699870ee8 100644
--- a/mllib-local/src/main/scala/org/apache/spark/ml/linalg/Matrices.scala
+++ b/mllib-local/src/main/scala/org/apache/spark/ml/linalg/Matrices.scala
@@ -24,21 +24,28 @@ import scala.collection.mutable.{ArrayBuffer, ArrayBuilder => MArrayBuilder, Has
 import breeze.linalg.{CSCMatrix => BSM, DenseMatrix => BDM, Matrix => BM}
 import com.github.fommil.netlib.BLAS.{getInstance => blas}
 
+import org.apache.spark.annotation.Since
+
 /**
  * Trait for a local matrix.
  */
+@Since("2.0.0")
 sealed trait Matrix extends Serializable {
 
   /** Number of rows. */
+  @Since("2.0.0")
   def numRows: Int
 
   /** Number of columns. */
+  @Since("2.0.0")
   def numCols: Int
 
   /** Flag that keeps track whether the matrix is transposed or not. False by default. */
+  @Since("2.0.0")
   val isTransposed: Boolean = false
 
   /** Converts to a dense array in column major. */
+  @Since("2.0.0")
   def toArray: Array[Double] = {
     val newArray = new Array[Double](numRows * numCols)
     foreachActive { (i, j, v) =>
@@ -51,18 +58,21 @@ sealed trait Matrix extends Serializable {
    * Returns an iterator of column vectors.
    * This operation could be expensive, depending on the underlying storage.
    */
+  @Since("2.0.0")
   def colIter: Iterator[Vector]
 
   /**
    * Returns an iterator of row vectors.
    * This operation could be expensive, depending on the underlying storage.
    */
+  @Since("2.0.0")
   def rowIter: Iterator[Vector] = this.transpose.colIter
 
   /** Converts to a breeze matrix. */
   private[ml] def toBreeze: BM[Double]
 
   /** Gets the (i, j)-th element. */
+  @Since("2.0.0")
   def apply(i: Int, j: Int): Double
 
   /** Return the index for the (i, j)-th element in the backing array. */
@@ -72,12 +82,15 @@ sealed trait Matrix extends Serializable {
   private[ml] def update(i: Int, j: Int, v: Double): Unit
 
   /** Get a deep copy of the matrix. */
+  @Since("2.0.0")
   def copy: Matrix
 
   /** Transpose the Matrix. Returns a new `Matrix` instance sharing the same underlying data. */
+  @Since("2.0.0")
   def transpose: Matrix
 
   /** Convenience method for `Matrix`-`DenseMatrix` multiplication. */
+  @Since("2.0.0")
   def multiply(y: DenseMatrix): DenseMatrix = {
     val C: DenseMatrix = DenseMatrix.zeros(numRows, y.numCols)
     BLAS.gemm(1.0, this, y, 0.0, C)
@@ -85,11 +98,13 @@ sealed trait Matrix extends Serializable {
   }
 
   /** Convenience method for `Matrix`-`DenseVector` multiplication. For binary compatibility. */
+  @Since("2.0.0")
   def multiply(y: DenseVector): DenseVector = {
     multiply(y.asInstanceOf[Vector])
   }
 
   /** Convenience method for `Matrix`-`Vector` multiplication. */
+  @Since("2.0.0")
   def multiply(y: Vector): DenseVector = {
     val output = new DenseVector(new Array[Double](numRows))
     BLAS.gemv(1.0, this, y, 0.0, output)
@@ -100,6 +115,7 @@ sealed trait Matrix extends Serializable {
   override def toString: String = toBreeze.toString()
 
   /** A human readable representation of the matrix with maximum lines and width */
+  @Since("2.0.0")
   def toString(maxLines: Int, maxLineWidth: Int): String = toBreeze.toString(maxLines, maxLineWidth)
 
   /**
@@ -129,11 +145,13 @@ sealed trait Matrix extends Serializable {
   /**
    * Find the number of non-zero active values.
    */
+  @Since("2.0.0")
   def numNonzeros: Int
 
   /**
    * Find the number of values stored explicitly. These values can be zero as well.
    */
+  @Since("2.0.0")
   def numActives: Int
 }
 
@@ -154,10 +172,11 @@ sealed trait Matrix extends Serializable {
  * @param isTransposed whether the matrix is transposed. If true, `values` stores the matrix in
  *                     row major.
  */
-class DenseMatrix (
-    val numRows: Int,
-    val numCols: Int,
-    val values: Array[Double],
+@Since("2.0.0")
+class DenseMatrix @Since("2.0.0") (
+    @Since("2.0.0") val numRows: Int,
+    @Since("2.0.0") val numCols: Int,
+    @Since("2.0.0") val values: Array[Double],
     override val isTransposed: Boolean) extends Matrix {
 
   require(values.length == numRows * numCols, "The number of values supplied doesn't match the " +
@@ -178,6 +197,7 @@ class DenseMatrix (
    * @param numCols number of columns
    * @param values matrix entries in column major
    */
+  @Since("2.0.0")
   def this(numRows: Int, numCols: Int, values: Array[Double]) =
     this(numRows, numCols, values, false)
 
@@ -266,6 +286,7 @@ class DenseMatrix (
    * Generate a `SparseMatrix` from the given `DenseMatrix`. The new matrix will have isTransposed
    * set to false.
    */
+  @Since("2.0.0")
   def toSparse: SparseMatrix = {
     val spVals: MArrayBuilder[Double] = new MArrayBuilder.ofDouble
     val colPtrs: Array[Int] = new Array[Int](numCols + 1)
@@ -307,6 +328,7 @@ class DenseMatrix (
 /**
  * Factory methods for [[org.apache.spark.ml.linalg.DenseMatrix]].
  */
+@Since("2.0.0")
 object DenseMatrix {
 
   /**
@@ -315,6 +337,7 @@ object DenseMatrix {
    * @param numCols number of columns of the matrix
    * @return `DenseMatrix` with size `numRows` x `numCols` and values of zeros
    */
+  @Since("2.0.0")
   def zeros(numRows: Int, numCols: Int): DenseMatrix = {
     require(numRows.toLong * numCols <= Int.MaxValue,
             s"$numRows x $numCols dense matrix is too large to allocate")
@@ -327,6 +350,7 @@ object DenseMatrix {
    * @param numCols number of columns of the matrix
    * @return `DenseMatrix` with size `numRows` x `numCols` and values of ones
    */
+  @Since("2.0.0")
   def ones(numRows: Int, numCols: Int): DenseMatrix = {
     require(numRows.toLong * numCols <= Int.MaxValue,
             s"$numRows x $numCols dense matrix is too large to allocate")
@@ -338,6 +362,7 @@ object DenseMatrix {
    * @param n number of rows and columns of the matrix
    * @return `DenseMatrix` with size `n` x `n` and values of ones on the diagonal
    */
+  @Since("2.0.0")
   def eye(n: Int): DenseMatrix = {
     val identity = DenseMatrix.zeros(n, n)
     var i = 0
@@ -355,6 +380,7 @@ object DenseMatrix {
    * @param rng a random number generator
    * @return `DenseMatrix` with size `numRows` x `numCols` and values in U(0, 1)
    */
+  @Since("2.0.0")
   def rand(numRows: Int, numCols: Int, rng: Random): DenseMatrix = {
     require(numRows.toLong * numCols <= Int.MaxValue,
             s"$numRows x $numCols dense matrix is too large to allocate")
@@ -368,6 +394,7 @@ object DenseMatrix {
    * @param rng a random number generator
    * @return `DenseMatrix` with size `numRows` x `numCols` and values in N(0, 1)
    */
+  @Since("2.0.0")
   def randn(numRows: Int, numCols: Int, rng: Random): DenseMatrix = {
     require(numRows.toLong * numCols <= Int.MaxValue,
             s"$numRows x $numCols dense matrix is too large to allocate")
@@ -380,6 +407,7 @@ object DenseMatrix {
    * @return Square `DenseMatrix` with size `values.length` x `values.length` and `values`
    *         on the diagonal
    */
+  @Since("2.0.0")
   def diag(vector: Vector): DenseMatrix = {
     val n = vector.size
     val matrix = DenseMatrix.zeros(n, n)
@@ -415,12 +443,13 @@ object DenseMatrix {
  *                     Compressed Sparse Row (CSR) format, where `colPtrs` behaves as rowPtrs,
  *                     and `rowIndices` behave as colIndices, and `values` are stored in row major.
  */
-class SparseMatrix (
-    val numRows: Int,
-    val numCols: Int,
-    val colPtrs: Array[Int],
-    val rowIndices: Array[Int],
-    val values: Array[Double],
+@Since("2.0.0")
+class SparseMatrix @Since("2.0.0") (
+    @Since("2.0.0") val numRows: Int,
+    @Since("2.0.0") val numCols: Int,
+    @Since("2.0.0") val colPtrs: Array[Int],
+    @Since("2.0.0") val rowIndices: Array[Int],
+    @Since("2.0.0") val values: Array[Double],
     override val isTransposed: Boolean) extends Matrix {
 
   require(values.length == rowIndices.length, "The number of row indices and values don't match! " +
@@ -451,6 +480,7 @@ class SparseMatrix (
    *                   order for each column
    * @param values non-zero matrix entries in column major
    */
+  @Since("2.0.0")
   def this(
       numRows: Int,
       numCols: Int,
@@ -550,6 +580,7 @@ class SparseMatrix (
    * Generate a `DenseMatrix` from the given `SparseMatrix`. The new matrix will have isTransposed
    * set to false.
    */
+  @Since("2.0.0")
   def toDense: DenseMatrix = {
     new DenseMatrix(numRows, numCols, toArray)
   }
@@ -594,6 +625,7 @@ class SparseMatrix (
 /**
  * Factory methods for [[org.apache.spark.ml.linalg.SparseMatrix]].
  */
+@Since("2.0.0")
 object SparseMatrix {
 
   /**
@@ -605,6 +637,7 @@ object SparseMatrix {
    * @param entries Array of (i, j, value) tuples
    * @return The corresponding `SparseMatrix`
    */
+  @Since("2.0.0")
   def fromCOO(numRows: Int, numCols: Int, entries: Iterable[(Int, Int, Double)]): SparseMatrix = {
     val sortedEntries = entries.toSeq.sortBy(v => (v._2, v._1))
     val numEntries = sortedEntries.size
@@ -653,6 +686,7 @@ object SparseMatrix {
    * @param n number of rows and columns of the matrix
    * @return `SparseMatrix` with size `n` x `n` and values of ones on the diagonal
    */
+  @Since("2.0.0")
   def speye(n: Int): SparseMatrix = {
     new SparseMatrix(n, n, (0 to n).toArray, (0 until n).toArray, Array.fill(n)(1.0))
   }
@@ -722,6 +756,7 @@ object SparseMatrix {
    * @param rng a random number generator
    * @return `SparseMatrix` with size `numRows` x `numCols` and values in U(0, 1)
    */
+  @Since("2.0.0")
   def sprand(numRows: Int, numCols: Int, density: Double, rng: Random): SparseMatrix = {
     val mat = genRandMatrix(numRows, numCols, density, rng)
     mat.update(i => rng.nextDouble())
@@ -735,6 +770,7 @@ object SparseMatrix {
    * @param rng a random number generator
    * @return `SparseMatrix` with size `numRows` x `numCols` and values in N(0, 1)
    */
+  @Since("2.0.0")
   def sprandn(numRows: Int, numCols: Int, density: Double, rng: Random): SparseMatrix = {
     val mat = genRandMatrix(numRows, numCols, density, rng)
     mat.update(i => rng.nextGaussian())
@@ -746,6 +782,7 @@ object SparseMatrix {
    * @return Square `SparseMatrix` with size `values.length` x `values.length` and non-zero
    *         `values` on the diagonal
    */
+  @Since("2.0.0")
   def spdiag(vector: Vector): SparseMatrix = {
     val n = vector.size
     vector match {
@@ -762,6 +799,7 @@ object SparseMatrix {
 /**
  * Factory methods for [[org.apache.spark.ml.linalg.Matrix]].
  */
+@Since("2.0.0")
 object Matrices {
 
   /**
@@ -771,6 +809,7 @@ object Matrices {
    * @param numCols number of columns
    * @param values matrix entries in column major
    */
+  @Since("2.0.0")
   def dense(numRows: Int, numCols: Int, values: Array[Double]): Matrix = {
     new DenseMatrix(numRows, numCols, values)
   }
@@ -784,6 +823,7 @@ object Matrices {
    * @param rowIndices the row index of the entry
    * @param values non-zero matrix entries in column major
    */
+  @Since("2.0.0")
   def sparse(
      numRows: Int,
      numCols: Int,
@@ -825,6 +865,7 @@ object Matrices {
    * @param numCols number of columns of the matrix
    * @return `Matrix` with size `numRows` x `numCols` and values of zeros
    */
+  @Since("2.0.0")
   def zeros(numRows: Int, numCols: Int): Matrix = DenseMatrix.zeros(numRows, numCols)
 
   /**
@@ -833,6 +874,7 @@ object Matrices {
    * @param numCols number of columns of the matrix
    * @return `Matrix` with size `numRows` x `numCols` and values of ones
    */
+  @Since("2.0.0")
   def ones(numRows: Int, numCols: Int): Matrix = DenseMatrix.ones(numRows, numCols)
 
   /**
@@ -840,6 +882,7 @@ object Matrices {
    * @param n number of rows and columns of the matrix
    * @return `Matrix` with size `n` x `n` and values of ones on the diagonal
    */
+  @Since("2.0.0")
   def eye(n: Int): Matrix = DenseMatrix.eye(n)
 
   /**
@@ -847,6 +890,7 @@ object Matrices {
    * @param n number of rows and columns of the matrix
    * @return `Matrix` with size `n` x `n` and values of ones on the diagonal
    */
+  @Since("2.0.0")
   def speye(n: Int): Matrix = SparseMatrix.speye(n)
 
   /**
@@ -856,6 +900,7 @@ object Matrices {
    * @param rng a random number generator
    * @return `Matrix` with size `numRows` x `numCols` and values in U(0, 1)
    */
+  @Since("2.0.0")
   def rand(numRows: Int, numCols: Int, rng: Random): Matrix =
     DenseMatrix.rand(numRows, numCols, rng)
 
@@ -867,6 +912,7 @@ object Matrices {
    * @param rng a random number generator
    * @return `Matrix` with size `numRows` x `numCols` and values in U(0, 1)
    */
+  @Since("2.0.0")
   def sprand(numRows: Int, numCols: Int, density: Double, rng: Random): Matrix =
     SparseMatrix.sprand(numRows, numCols, density, rng)
 
@@ -877,6 +923,7 @@ object Matrices {
    * @param rng a random number generator
    * @return `Matrix` with size `numRows` x `numCols` and values in N(0, 1)
    */
+  @Since("2.0.0")
   def randn(numRows: Int, numCols: Int, rng: Random): Matrix =
     DenseMatrix.randn(numRows, numCols, rng)
 
@@ -888,6 +935,7 @@ object Matrices {
    * @param rng a random number generator
    * @return `Matrix` with size `numRows` x `numCols` and values in N(0, 1)
    */
+  @Since("2.0.0")
   def sprandn(numRows: Int, numCols: Int, density: Double, rng: Random): Matrix =
     SparseMatrix.sprandn(numRows, numCols, density, rng)
 
@@ -897,6 +945,7 @@ object Matrices {
    * @return Square `Matrix` with size `values.length` x `values.length` and `values`
    *         on the diagonal
    */
+  @Since("2.0.0")
   def diag(vector: Vector): Matrix = DenseMatrix.diag(vector)
 
   /**
@@ -906,6 +955,7 @@ object Matrices {
    * @param matrices array of matrices
    * @return a single `Matrix` composed of the matrices that were horizontally concatenated
    */
+  @Since("2.0.0")
   def horzcat(matrices: Array[Matrix]): Matrix = {
     if (matrices.isEmpty) {
       return new DenseMatrix(0, 0, Array[Double]())
@@ -964,6 +1014,7 @@ object Matrices {
    * @param matrices array of matrices
    * @return a single `Matrix` composed of the matrices that were vertically concatenated
    */
+  @Since("2.0.0")
   def vertcat(matrices: Array[Matrix]): Matrix = {
     if (matrices.isEmpty) {
       return new DenseMatrix(0, 0, Array[Double]())
diff --git a/mllib-local/src/main/scala/org/apache/spark/ml/linalg/Vectors.scala b/mllib-local/src/main/scala/org/apache/spark/ml/linalg/Vectors.scala
index 4275a22ae000a54cacf4db9d90455338371fa0c3..5b3b3b6887edda2c0995b6d15f917077e6176238 100644
--- a/mllib-local/src/main/scala/org/apache/spark/ml/linalg/Vectors.scala
+++ b/mllib-local/src/main/scala/org/apache/spark/ml/linalg/Vectors.scala
@@ -28,21 +28,26 @@ import org.json4s.DefaultFormats
 import org.json4s.JsonDSL._
 import org.json4s.jackson.JsonMethods.{compact, parse => parseJson, render}
 
+import org.apache.spark.annotation.Since
+
 /**
  * Represents a numeric vector, whose index type is Int and value type is Double.
  *
  * Note: Users should not implement this interface.
  */
+@Since("2.0.0")
 sealed trait Vector extends Serializable {
 
   /**
    * Size of the vector.
    */
+  @Since("2.0.0")
   def size: Int
 
   /**
    * Converts the instance to a double array.
    */
+  @Since("2.0.0")
   def toArray: Array[Double]
 
   override def equals(other: Any): Boolean = {
@@ -96,11 +101,13 @@ sealed trait Vector extends Serializable {
    * Gets the value of the ith element.
    * @param i index
    */
+  @Since("2.0.0")
   def apply(i: Int): Double = toBreeze(i)
 
   /**
    * Makes a deep copy of this vector.
    */
+  @Since("2.0.0")
   def copy: Vector = {
     throw new NotImplementedError(s"copy is not implemented for ${this.getClass}.")
   }
@@ -112,32 +119,38 @@ sealed trait Vector extends Serializable {
    *          the vector with type `Int`, and the second parameter is the corresponding value
    *          with type `Double`.
    */
+  @Since("2.0.0")
   def foreachActive(f: (Int, Double) => Unit): Unit
 
   /**
    * Number of active entries.  An "active entry" is an element which is explicitly stored,
    * regardless of its value.  Note that inactive entries have value 0.
    */
+  @Since("2.0.0")
   def numActives: Int
 
   /**
    * Number of nonzero elements. This scans all active values and count nonzeros.
    */
+  @Since("2.0.0")
   def numNonzeros: Int
 
   /**
    * Converts this vector to a sparse vector with all explicit zeros removed.
    */
+  @Since("2.0.0")
   def toSparse: SparseVector
 
   /**
    * Converts this vector to a dense vector.
    */
+  @Since("2.0.0")
   def toDense: DenseVector = new DenseVector(this.toArray)
 
   /**
    * Returns a vector in either dense or sparse format, whichever uses less storage.
    */
+  @Since("2.0.0")
   def compressed: Vector = {
     val nnz = numNonzeros
     // A dense vector needs 8 * size + 8 bytes, while a sparse vector needs 12 * nnz + 20 bytes.
@@ -152,11 +165,13 @@ sealed trait Vector extends Serializable {
    * Find the index of a maximal element.  Returns the first maximal element in case of a tie.
    * Returns -1 if vector has length 0.
    */
+  @Since("2.0.0")
   def argmax: Int
 
   /**
    * Converts the vector to a JSON string.
    */
+  @Since("2.0.0")
   def toJson: String
 }
 
@@ -165,12 +180,14 @@ sealed trait Vector extends Serializable {
  * We don't use the name `Vector` because Scala imports
  * [[scala.collection.immutable.Vector]] by default.
  */
+@Since("2.0.0")
 object Vectors {
 
   /**
    * Creates a dense vector from its values.
    */
   @varargs
+  @Since("2.0.0")
   def dense(firstValue: Double, otherValues: Double*): Vector =
     new DenseVector((firstValue +: otherValues).toArray)
 
@@ -178,6 +195,7 @@ object Vectors {
   /**
    * Creates a dense vector from a double array.
    */
+  @Since("2.0.0")
   def dense(values: Array[Double]): Vector = new DenseVector(values)
 
   /**
@@ -187,6 +205,7 @@ object Vectors {
    * @param indices index array, must be strictly increasing.
    * @param values value array, must have the same length as indices.
    */
+  @Since("2.0.0")
   def sparse(size: Int, indices: Array[Int], values: Array[Double]): Vector =
     new SparseVector(size, indices, values)
 
@@ -196,6 +215,7 @@ object Vectors {
    * @param size vector size.
    * @param elements vector elements in (index, value) pairs.
    */
+  @Since("2.0.0")
   def sparse(size: Int, elements: Seq[(Int, Double)]): Vector = {
     require(size > 0, "The size of the requested sparse vector must be greater than 0.")
 
@@ -217,6 +237,7 @@ object Vectors {
    * @param size vector size.
    * @param elements vector elements in (index, value) pairs.
    */
+  @Since("2.0.0")
   def sparse(size: Int, elements: JavaIterable[(JavaInteger, JavaDouble)]): Vector = {
     sparse(size, elements.asScala.map { case (i, x) =>
       (i.intValue(), x.doubleValue())
@@ -229,6 +250,7 @@ object Vectors {
    * @param size vector size
    * @return a zero vector
    */
+  @Since("2.0.0")
   def zeros(size: Int): Vector = {
     new DenseVector(new Array[Double](size))
   }
@@ -236,6 +258,7 @@ object Vectors {
   /**
    * Parses the JSON representation of a vector into a [[Vector]].
    */
+  @Since("2.0.0")
   def fromJson(json: String): Vector = {
     implicit val formats = DefaultFormats
     val jValue = parseJson(json)
@@ -281,6 +304,7 @@ object Vectors {
    * @param p norm.
    * @return norm in L^p^ space.
    */
+  @Since("2.0.0")
   def norm(vector: Vector, p: Double): Double = {
     require(p >= 1.0, "To compute the p-norm of the vector, we require that you specify a p>=1. " +
       s"You specified p=$p.")
@@ -333,6 +357,7 @@ object Vectors {
    * @param v2 second Vector.
    * @return squared distance between two Vectors.
    */
+  @Since("2.0.0")
   def sqdist(v1: Vector, v2: Vector): Double = {
     require(v1.size == v2.size, s"Vector dimensions do not match: Dim(v1)=${v1.size} and Dim(v2)" +
       s"=${v2.size}.")
@@ -449,7 +474,8 @@ object Vectors {
 /**
  * A dense vector represented by a value array.
  */
-class DenseVector (val values: Array[Double]) extends Vector {
+@Since("2.0.0")
+class DenseVector @Since("2.0.0") (@Since("2.0.0") val values: Array[Double]) extends Vector {
 
   override def size: Int = values.length
 
@@ -548,9 +574,11 @@ class DenseVector (val values: Array[Double]) extends Vector {
   }
 }
 
+@Since("2.0.0")
 object DenseVector {
 
   /** Extracts the value array from a dense vector. */
+  @Since("2.0.0")
   def unapply(dv: DenseVector): Option[Array[Double]] = Some(dv.values)
 }
 
@@ -561,10 +589,11 @@ object DenseVector {
  * @param indices index array, assume to be strictly increasing.
  * @param values value array, must have the same length as the index array.
  */
-class SparseVector (
+@Since("2.0.0")
+class SparseVector @Since("2.0.0") (
     override val size: Int,
-    val indices: Array[Int],
-    val values: Array[Double]) extends Vector {
+    @Since("2.0.0") val indices: Array[Int],
+    @Since("2.0.0") val values: Array[Double]) extends Vector {
 
   require(indices.length == values.length, "Sparse vectors require that the dimension of the" +
     s" indices match the dimension of the values. You provided ${indices.length} indices and " +
@@ -734,7 +763,9 @@ class SparseVector (
   }
 }
 
+@Since("2.0.0")
 object SparseVector {
+  @Since("2.0.0")
   def unapply(sv: SparseVector): Option[(Int, Array[Int], Array[Double])] =
     Some((sv.size, sv.indices, sv.values))
 }
diff --git a/mllib/pom.xml b/mllib/pom.xml
index 24d8274e2222ff1457d96c022cd3164a85f67d76..c324afb2a2d1b03f419c7ef10fd250e872ba7c11 100644
--- a/mllib/pom.xml
+++ b/mllib/pom.xml
@@ -116,7 +116,7 @@
     </dependency>
     <dependency>
       <groupId>org.apache.spark</groupId>
-      <artifactId>spark-test-tags_${scala.binary.version}</artifactId>
+      <artifactId>spark-tags_${scala.binary.version}</artifactId>
     </dependency>
   </dependencies>
   <profiles>
diff --git a/pom.xml b/pom.xml
index 34c374d669624083100679922746c86e1b23564d..20615d599a679589b365462db55db1d98ffdbaeb 100644
--- a/pom.xml
+++ b/pom.xml
@@ -284,8 +284,14 @@
     <dependencies>
       <dependency>
         <groupId>org.apache.spark</groupId>
-        <artifactId>spark-test-tags_${scala.binary.version}</artifactId>
+        <artifactId>spark-tags_${scala.binary.version}</artifactId>
         <version>${project.version}</version>
+      </dependency>
+      <dependency>
+        <groupId>org.apache.spark</groupId>
+        <artifactId>spark-tags_${scala.binary.version}</artifactId>
+        <version>${project.version}</version>
+        <classifier>tests</classifier>
         <scope>test</scope>
       </dependency>
       <dependency>
diff --git a/project/MimaExcludes.scala b/project/MimaExcludes.scala
index 6fc49a08fe31662ce9119833ffef6d0f9c9bea9b..26a3760bc32f170597cf6adf79bec1cd58ea5a57 100644
--- a/project/MimaExcludes.scala
+++ b/project/MimaExcludes.scala
@@ -700,6 +700,10 @@ object MimaExcludes {
         ProblemFilters.exclude[IncompatibleResultTypeProblem]("org.apache.spark.executor.ShuffleReadMetrics.localBlocksFetched"),
         ProblemFilters.exclude[IncompatibleResultTypeProblem]("org.apache.spark.status.api.v1.ShuffleReadMetrics.remoteBlocksFetched"),
         ProblemFilters.exclude[IncompatibleResultTypeProblem]("org.apache.spark.status.api.v1.ShuffleReadMetrics.localBlocksFetched")
+      ) ++ Seq(
+        // [SPARK-14613] Add @Since into the matrix and vector classes in spark-mllib-local
+        ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.annotation.package$"),
+        ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.annotation.package")
       )
     case v if v.startsWith("1.6") =>
       Seq(
diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala
index ffbca25e46f415f77a6fab57d3307fb488fd9765..ecd08defd2c3fa17cef1f11a6e5cbd8f59afa575 100644
--- a/project/SparkBuild.scala
+++ b/project/SparkBuild.scala
@@ -50,10 +50,10 @@ object BuildCommons {
   ).map(ProjectRef(buildLocation, _))
 
   val allProjects@Seq(
-    core, graphx, mllib, mllibLocal, repl, networkCommon, networkShuffle, launcher, unsafe, testTags, sketch, _*
+    core, graphx, mllib, mllibLocal, repl, networkCommon, networkShuffle, launcher, unsafe, tags, sketch, _*
   ) = Seq(
     "core", "graphx", "mllib", "mllib-local", "repl", "network-common", "network-shuffle", "launcher", "unsafe",
-    "test-tags", "sketch"
+    "tags", "sketch"
   ).map(ProjectRef(buildLocation, _)) ++ sqlProjects ++ streamingProjects
 
   val optionallyEnabledProjects@Seq(yarn, java8Tests, sparkGangliaLgpl,
@@ -336,7 +336,7 @@ object SparkBuild extends PomBuild {
   val mimaProjects = allProjects.filterNot { x =>
     Seq(
       spark, hive, hiveThriftServer, hiveCompatibility, catalyst, repl, networkCommon, networkShuffle, networkYarn,
-      unsafe, testTags, sketch, mllibLocal
+      unsafe, tags, sketch, mllibLocal
     ).contains(x)
   }
 
@@ -680,9 +680,9 @@ object Unidoc {
     publish := {},
 
     unidocProjectFilter in(ScalaUnidoc, unidoc) :=
-      inAnyProject -- inProjects(OldDeps.project, repl, examples, tools, streamingFlumeSink, yarn, testTags),
+      inAnyProject -- inProjects(OldDeps.project, repl, examples, tools, streamingFlumeSink, yarn, tags),
     unidocProjectFilter in(JavaUnidoc, unidoc) :=
-      inAnyProject -- inProjects(OldDeps.project, repl, examples, tools, streamingFlumeSink, yarn, testTags),
+      inAnyProject -- inProjects(OldDeps.project, repl, examples, tools, streamingFlumeSink, yarn, tags),
 
     // Skip actual catalyst, but include the subproject.
     // Catalyst is not public API and contains quasiquotes which break scaladoc.
diff --git a/repl/pom.xml b/repl/pom.xml
index 0f396c9b809bd69616ac5e551b8ceaeaa11a0581..c12d121c611568691846265da90915115265e617 100644
--- a/repl/pom.xml
+++ b/repl/pom.xml
@@ -87,7 +87,7 @@
     </dependency>
     <dependency>
       <groupId>org.apache.spark</groupId>
-      <artifactId>spark-test-tags_${scala.binary.version}</artifactId>
+      <artifactId>spark-tags_${scala.binary.version}</artifactId>
     </dependency>
     <dependency>
       <groupId>org.apache.xbean</groupId>
diff --git a/sql/catalyst/pom.xml b/sql/catalyst/pom.xml
index 1748fa2778d6a6f84a6d79e712706b5c57182805..1923199f4b86129e9d193f0066d8802283b93cae 100644
--- a/sql/catalyst/pom.xml
+++ b/sql/catalyst/pom.xml
@@ -55,7 +55,7 @@
     </dependency>
     <dependency>
       <groupId>org.apache.spark</groupId>
-      <artifactId>spark-test-tags_${scala.binary.version}</artifactId>
+      <artifactId>spark-tags_${scala.binary.version}</artifactId>
     </dependency>
     <dependency>
       <groupId>org.apache.spark</groupId>
diff --git a/sql/core/pom.xml b/sql/core/pom.xml
index e1071ebfb5a613354d2e7521828fff9bd67eb03c..d745e0fb434185ed4fbd2c75842ec9f41d328878 100644
--- a/sql/core/pom.xml
+++ b/sql/core/pom.xml
@@ -73,7 +73,7 @@
     </dependency>
     <dependency>
       <groupId>org.apache.spark</groupId>
-      <artifactId>spark-test-tags_${scala.binary.version}</artifactId>
+      <artifactId>spark-tags_${scala.binary.version}</artifactId>
     </dependency>
     <dependency>
       <groupId>org.apache.parquet</groupId>
diff --git a/sql/hive-thriftserver/pom.xml b/sql/hive-thriftserver/pom.xml
index c8d17bd468582f5a03472c84654676f7a0c1d5c0..eb71507855c0823a42980fcfcca76395fd368871 100644
--- a/sql/hive-thriftserver/pom.xml
+++ b/sql/hive-thriftserver/pom.xml
@@ -95,7 +95,7 @@
     </dependency>
     <dependency>
       <groupId>org.apache.spark</groupId>
-      <artifactId>spark-test-tags_${scala.binary.version}</artifactId>
+      <artifactId>spark-tags_${scala.binary.version}</artifactId>
     </dependency>
   </dependencies>
   <build>
diff --git a/sql/hive/pom.xml b/sql/hive/pom.xml
index 177b6884fa13b407f77e21b89037162ebd85ba2c..efffb56799715dc7362e835601b3c8f521b55c6e 100644
--- a/sql/hive/pom.xml
+++ b/sql/hive/pom.xml
@@ -60,7 +60,9 @@
     </dependency>
     <dependency>
       <groupId>org.apache.spark</groupId>
-      <artifactId>spark-test-tags_${scala.binary.version}</artifactId>
+      <artifactId>spark-tags_${scala.binary.version}</artifactId>
+      <classifier>tests</classifier>
+      <scope>test</scope>
     </dependency>
 <!--
     <dependency>
diff --git a/streaming/pom.xml b/streaming/pom.xml
index 7d409c5d3b076fd1ca25eb9c5ec2f066ad78c962..4fa05b419d1af7ebcd04eae5fc3a21c6a97fa1a0 100644
--- a/streaming/pom.xml
+++ b/streaming/pom.xml
@@ -49,7 +49,7 @@
     </dependency>
     <dependency>
       <groupId>org.apache.spark</groupId>
-      <artifactId>spark-test-tags_${scala.binary.version}</artifactId>
+      <artifactId>spark-tags_${scala.binary.version}</artifactId>
     </dependency>
 
     <!-- Explicit listing of transitive deps that are shaded. Otherwise, odd compiler crashes. -->
diff --git a/yarn/pom.xml b/yarn/pom.xml
index 328bb6678db99feff2234276163993d5af516275..d0b60775602aa5c4bee3b13ccd1bcbaf6abfa79a 100644
--- a/yarn/pom.xml
+++ b/yarn/pom.xml
@@ -53,7 +53,9 @@
     </dependency>
     <dependency>
       <groupId>org.apache.spark</groupId>
-      <artifactId>spark-test-tags_${scala.binary.version}</artifactId>
+      <artifactId>spark-tags_${scala.binary.version}</artifactId>
+      <scope>test</scope>
+      <classifier>tests</classifier>
     </dependency>
     <dependency>
       <groupId>org.apache.hadoop</groupId>