diff --git a/mllib/src/test/java/org/apache/spark/ml/util/JavaDefaultReadWriteSuite.java b/mllib/src/test/java/org/apache/spark/ml/util/JavaDefaultReadWriteSuite.java
index 4f7aeac1ec54c660ffa7364e15065e148a9b679e..c39538014be816ea3b45109905e45816daa28528 100644
--- a/mllib/src/test/java/org/apache/spark/ml/util/JavaDefaultReadWriteSuite.java
+++ b/mllib/src/test/java/org/apache/spark/ml/util/JavaDefaultReadWriteSuite.java
@@ -23,7 +23,7 @@ import java.io.IOException;
 import org.junit.After;
 import org.junit.Assert;
 import org.junit.Before;
-import org.junit.Ignore;
+import org.junit.Test;
 
 import org.apache.spark.api.java.JavaSparkContext;
 import org.apache.spark.sql.SQLContext;
@@ -50,7 +50,7 @@ public class JavaDefaultReadWriteSuite {
     Utils.deleteRecursively(tempDir);
   }
 
-  @Ignore // SPARK-11672
+  @Test
   public void testDefaultReadWrite() throws IOException {
     String uid = "my_params";
     MyParams instance = new MyParams(uid);
diff --git a/mllib/src/test/scala/org/apache/spark/ml/classification/LogisticRegressionSuite.scala b/mllib/src/test/scala/org/apache/spark/ml/classification/LogisticRegressionSuite.scala
index e4c2f1baa4fa1dcdbfc8612fff31883bb3c8276e..51b06b7eb6d534ef0c624eedcdfe3af92eabaefb 100644
--- a/mllib/src/test/scala/org/apache/spark/ml/classification/LogisticRegressionSuite.scala
+++ b/mllib/src/test/scala/org/apache/spark/ml/classification/LogisticRegressionSuite.scala
@@ -872,7 +872,7 @@ class LogisticRegressionSuite
     assert(model1a0.intercept ~== model1b.intercept absTol 1E-3)
   }
 
-  ignore("read/write") { // SPARK-11672
+  test("read/write") {
     // Set some Params to make sure set Params are serialized.
     val lr = new LogisticRegression()
       .setElasticNetParam(0.1)
diff --git a/mllib/src/test/scala/org/apache/spark/ml/feature/BinarizerSuite.scala b/mllib/src/test/scala/org/apache/spark/ml/feature/BinarizerSuite.scala
index a66fe032819355d7abc0d326af5d21db3cd66803..9dfa1439cc303c0eefaf28540e116edcd7c82ad0 100644
--- a/mllib/src/test/scala/org/apache/spark/ml/feature/BinarizerSuite.scala
+++ b/mllib/src/test/scala/org/apache/spark/ml/feature/BinarizerSuite.scala
@@ -68,7 +68,7 @@ class BinarizerSuite extends SparkFunSuite with MLlibTestSparkContext with Defau
     }
   }
 
-  ignore("read/write") { // SPARK-11672
+  test("read/write") {
     val binarizer = new Binarizer()
       .setInputCol("feature")
       .setOutputCol("binarized_feature")
diff --git a/mllib/src/test/scala/org/apache/spark/ml/util/DefaultReadWriteTest.scala b/mllib/src/test/scala/org/apache/spark/ml/util/DefaultReadWriteTest.scala
index 44e09c38f9375412c38ae592bfb0173c9ea6936e..cac4bd9aa3ab8b09a5f6ba9f484606c1b8680bb7 100644
--- a/mllib/src/test/scala/org/apache/spark/ml/util/DefaultReadWriteTest.scala
+++ b/mllib/src/test/scala/org/apache/spark/ml/util/DefaultReadWriteTest.scala
@@ -105,7 +105,7 @@ object MyParams extends Readable[MyParams] {
 class DefaultReadWriteSuite extends SparkFunSuite with MLlibTestSparkContext
   with DefaultReadWriteTest {
 
-  ignore("default read/write") { // SPARK-11672
+  test("default read/write") {
     val myParams = new MyParams("my_params")
     testDefaultReadWrite(myParams)
   }
diff --git a/mllib/src/test/scala/org/apache/spark/mllib/util/MLlibTestSparkContext.scala b/mllib/src/test/scala/org/apache/spark/mllib/util/MLlibTestSparkContext.scala
index 5d1796ef65722f33cd3a005f25c1adf2e9444dc2..998ee481865588aac1ead249d433634291e37d85 100644
--- a/mllib/src/test/scala/org/apache/spark/mllib/util/MLlibTestSparkContext.scala
+++ b/mllib/src/test/scala/org/apache/spark/mllib/util/MLlibTestSparkContext.scala
@@ -32,11 +32,13 @@ trait MLlibTestSparkContext extends BeforeAndAfterAll { self: Suite =>
       .setMaster("local[2]")
       .setAppName("MLlibUnitTest")
     sc = new SparkContext(conf)
+    SQLContext.clearActive()
     sqlContext = new SQLContext(sc)
   }
 
   override def afterAll() {
     sqlContext = null
+    SQLContext.clearActive()
     if (sc != null) {
       sc.stop()
     }