diff --git a/docs/building-spark.md b/docs/building-spark.md
index fec442af95e1be8c71b3942b1f2f7189cb14978c..13c95e4fcb395e64f6cbaf43ac9da832346a95b0 100644
--- a/docs/building-spark.md
+++ b/docs/building-spark.md
@@ -190,6 +190,18 @@ or
 Java 8 tests are automatically enabled when a Java 8 JDK is detected.
 If you have JDK 8 installed but it is not the system default, you can set JAVA_HOME to point to JDK 8 before running the tests.
 
+# Running Docker based Integration Test Suites
+
+Running only docker based integration tests and nothing else.
+
+    mvn install -DskipTests
+    mvn -Pdocker-integration-tests -pl :spark-docker-integration-tests_2.11
+
+or
+
+    sbt docker-integration-tests/test
+
+
 # Packaging without Hadoop Dependencies for YARN
 
 The assembly directory produced by `mvn package` will, by default, include all of Spark's dependencies, including Hadoop and some of its ecosystem projects. On YARN deployments, this causes multiple versions of these to appear on executor classpaths: the version packaged in the Spark assembly and the version on each node, included with `yarn.application.classpath`.  The `hadoop-provided` profile builds the assembly without including Hadoop-ecosystem projects, like ZooKeeper and Hadoop itself.
diff --git a/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/MySQLIntegrationSuite.scala b/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/MySQLIntegrationSuite.scala
index aa47228eff3a2aeb0e01e4bdf026f5be8a69bb2a..a70ed98b52d5d5c8127ef3ff00747bbc1e2e3ad7 100644
--- a/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/MySQLIntegrationSuite.scala
+++ b/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/MySQLIntegrationSuite.scala
@@ -21,12 +21,9 @@ import java.math.BigDecimal
 import java.sql.{Connection, Date, Timestamp}
 import java.util.Properties
 
-import org.scalatest.Ignore
-
 import org.apache.spark.tags.DockerTest
 
 @DockerTest
-@Ignore
 class MySQLIntegrationSuite extends DockerJDBCIntegrationSuite {
   override val db = new DatabaseOnDocker {
     override val imageName = "mysql:5.7.9"
diff --git a/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/OracleIntegrationSuite.scala b/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/OracleIntegrationSuite.scala
index 357866b87ca2acc7a0ba684daa7fa045134595d9..c5e1f8607b3336e26daae04793446918ca8d29ce 100644
--- a/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/OracleIntegrationSuite.scala
+++ b/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/OracleIntegrationSuite.scala
@@ -20,8 +20,6 @@ package org.apache.spark.sql.jdbc
 import java.sql.Connection
 import java.util.Properties
 
-import org.scalatest.Ignore
-
 import org.apache.spark.sql.test.SharedSQLContext
 import org.apache.spark.tags.DockerTest
 
@@ -46,12 +44,11 @@ import org.apache.spark.tags.DockerTest
  * repository.
  */
 @DockerTest
-@Ignore
 class OracleIntegrationSuite extends DockerJDBCIntegrationSuite with SharedSQLContext {
   import testImplicits._
 
   override val db = new DatabaseOnDocker {
-    override val imageName = "wnameless/oracle-xe-11g:latest"
+    override val imageName = "wnameless/oracle-xe-11g:14.04.4"
     override val env = Map(
       "ORACLE_ROOT_PASSWORD" -> "oracle"
     )
diff --git a/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/PostgresIntegrationSuite.scala b/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/PostgresIntegrationSuite.scala
index 6546d4cfd7ce8d4af3206988431254331f6da810..79dd70116ecb85f67a11f7d6919861242a527a5d 100644
--- a/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/PostgresIntegrationSuite.scala
+++ b/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/PostgresIntegrationSuite.scala
@@ -20,15 +20,12 @@ package org.apache.spark.sql.jdbc
 import java.sql.Connection
 import java.util.Properties
 
-import org.scalatest.Ignore
-
 import org.apache.spark.sql.Column
 import org.apache.spark.sql.catalyst.expressions.Literal
 import org.apache.spark.sql.types.{ArrayType, DecimalType}
 import org.apache.spark.tags.DockerTest
 
 @DockerTest
-@Ignore
 class PostgresIntegrationSuite extends DockerJDBCIntegrationSuite {
   override val db = new DatabaseOnDocker {
     override val imageName = "postgres:9.4.5"
diff --git a/pom.xml b/pom.xml
index 852136a998abad969453fbabb3692ca2b44a70c0..3e783fa56e769e9ff2983a6fc062dff0bd4a0ff4 100644
--- a/pom.xml
+++ b/pom.xml
@@ -101,7 +101,6 @@
     <module>sql/core</module>
     <module>sql/hive</module>
     <module>sql/hivecontext-compatibility</module>
-    <module>external/docker-integration-tests</module>
     <module>assembly</module>
     <module>external/flume</module>
     <module>external/flume-sink</module>
@@ -2469,6 +2468,13 @@
       </build>
     </profile>
 
+    <profile>
+      <id>docker-integration-tests</id>
+      <modules>
+        <module>external/docker-integration-tests</module>
+      </modules>
+    </profile>
+
     <!-- A series of build profiles where customizations for particular Hadoop releases can be made -->
 
     <!-- Hadoop-a.b.c dependencies can be found at
diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala
index 9249248c71ec0fdf76472eccb3202080c40c1cd2..f50f41a88dc149349fcb912c600c753febc92986 100644
--- a/project/SparkBuild.scala
+++ b/project/SparkBuild.scala
@@ -382,7 +382,8 @@ object SparkBuild extends PomBuild {
 
   enable(Java8TestSettings.settings)(java8Tests)
 
-  enable(DockerIntegrationTests.settings)(dockerIntegrationTests)
+  // SPARK-14738 - Remove docker tests from main Spark build
+  // enable(DockerIntegrationTests.settings)(dockerIntegrationTests)
 
   /**
    * Adds the ability to run the spark shell directly from SBT without building an assembly