From 45b7557e61d440612d4ce49c31b5ef242fdefa54 Mon Sep 17 00:00:00 2001
From: Reynold Xin <rxin@databricks.com>
Date: Fri, 20 May 2016 22:01:55 -0700
Subject: [PATCH] [SPARK-15424][SPARK-15437][SPARK-14807][SQL] Revert Create a
 hivecontext-compatibility module

## What changes were proposed in this pull request?
I initially asked to create a hivecontext-compatibility module to put the HiveContext there. But we are so close to Spark 2.0 release and there is only a single class in it. It seems overkill to have an entire package, which makes it more inconvenient, for a single class.

## How was this patch tested?
Tests were moved.

Author: Reynold Xin <rxin@databricks.com>

Closes #13207 from rxin/SPARK-15424.
---
 dev/run-tests.py                              |  2 +-
 dev/sparktestsupport/modules.py               | 12 ----
 pom.xml                                       |  1 -
 project/SparkBuild.scala                      |  6 +-
 .../apache/spark/sql/hive/HiveContext.scala   |  0
 .../hive/HiveContextCompatibilitySuite.scala  |  5 +-
 sql/hivecontext-compatibility/pom.xml         | 57 -------------------
 7 files changed, 6 insertions(+), 77 deletions(-)
 rename sql/{hivecontext-compatibility => hive}/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala (100%)
 rename sql/{hivecontext-compatibility => hive}/src/test/scala/org/apache/spark/sql/hive/HiveContextCompatibilitySuite.scala (95%)
 delete mode 100644 sql/hivecontext-compatibility/pom.xml

diff --git a/dev/run-tests.py b/dev/run-tests.py
index 7b3269752b..2030c4ab23 100755
--- a/dev/run-tests.py
+++ b/dev/run-tests.py
@@ -110,7 +110,7 @@ def determine_modules_to_test(changed_modules):
     ['graphx', 'examples']
     >>> x = [x.name for x in determine_modules_to_test([modules.sql])]
     >>> x # doctest: +NORMALIZE_WHITESPACE
-    ['sql', 'hive', 'mllib', 'examples', 'hive-thriftserver', 'hivecontext-compatibility',
+    ['sql', 'hive', 'mllib', 'examples', 'hive-thriftserver',
      'pyspark-sql', 'sparkr', 'pyspark-mllib', 'pyspark-ml']
     """
     modules_to_test = set()
diff --git a/dev/sparktestsupport/modules.py b/dev/sparktestsupport/modules.py
index 0d6aa7422a..8e2364d2f7 100644
--- a/dev/sparktestsupport/modules.py
+++ b/dev/sparktestsupport/modules.py
@@ -158,18 +158,6 @@ hive_thriftserver = Module(
 )
 
 
-hivecontext_compatibility = Module(
-    name="hivecontext-compatibility",
-    dependencies=[hive],
-    source_file_regexes=[
-        "sql/hivecontext-compatibility/",
-    ],
-    sbt_test_goals=[
-        "hivecontext-compatibility/test"
-    ]
-)
-
-
 sketch = Module(
     name="sketch",
     dependencies=[tags],
diff --git a/pom.xml b/pom.xml
index 9c13af17e4..e778f77fa3 100644
--- a/pom.xml
+++ b/pom.xml
@@ -100,7 +100,6 @@
     <module>sql/catalyst</module>
     <module>sql/core</module>
     <module>sql/hive</module>
-    <module>sql/hivecontext-compatibility</module>
     <module>assembly</module>
     <module>external/flume</module>
     <module>external/flume-sink</module>
diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala
index 3ad9873f43..f08ca7001f 100644
--- a/project/SparkBuild.scala
+++ b/project/SparkBuild.scala
@@ -39,8 +39,8 @@ object BuildCommons {
 
   private val buildLocation = file(".").getAbsoluteFile.getParentFile
 
-  val sqlProjects@Seq(catalyst, sql, hive, hiveThriftServer, hiveCompatibility) = Seq(
-    "catalyst", "sql", "hive", "hive-thriftserver", "hivecontext-compatibility"
+  val sqlProjects@Seq(catalyst, sql, hive, hiveThriftServer) = Seq(
+    "catalyst", "sql", "hive", "hive-thriftserver"
   ).map(ProjectRef(buildLocation, _))
 
   val streamingProjects@Seq(
@@ -339,7 +339,7 @@ object SparkBuild extends PomBuild {
 
   val mimaProjects = allProjects.filterNot { x =>
     Seq(
-      spark, hive, hiveThriftServer, hiveCompatibility, catalyst, repl, networkCommon, networkShuffle, networkYarn,
+      spark, hive, hiveThriftServer, catalyst, repl, networkCommon, networkShuffle, networkYarn,
       unsafe, tags, sketch, mllibLocal
     ).contains(x)
   }
diff --git a/sql/hivecontext-compatibility/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala
similarity index 100%
rename from sql/hivecontext-compatibility/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala
rename to sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala
diff --git a/sql/hivecontext-compatibility/src/test/scala/org/apache/spark/sql/hive/HiveContextCompatibilitySuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveContextCompatibilitySuite.scala
similarity index 95%
rename from sql/hivecontext-compatibility/src/test/scala/org/apache/spark/sql/hive/HiveContextCompatibilitySuite.scala
rename to sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveContextCompatibilitySuite.scala
index 1c1db72e27..3aa8174702 100644
--- a/sql/hivecontext-compatibility/src/test/scala/org/apache/spark/sql/hive/HiveContextCompatibilitySuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveContextCompatibilitySuite.scala
@@ -19,7 +19,7 @@ package org.apache.spark.sql.hive
 
 import org.scalatest.BeforeAndAfterEach
 
-import org.apache.spark.{SparkContext, SparkFunSuite}
+import org.apache.spark.{SparkConf, SparkContext, SparkFunSuite}
 
 
 class HiveContextCompatibilitySuite extends SparkFunSuite with BeforeAndAfterEach {
@@ -29,7 +29,7 @@ class HiveContextCompatibilitySuite extends SparkFunSuite with BeforeAndAfterEac
 
   override def beforeAll(): Unit = {
     super.beforeAll()
-    sc = new SparkContext("local[4]", "test")
+    sc = SparkContext.getOrCreate(new SparkConf().setMaster("local").setAppName("test"))
     HiveUtils.newTemporaryConfiguration(useInMemoryDerby = true).foreach { case (k, v) =>
       sc.hadoopConfiguration.set(k, v)
     }
@@ -47,7 +47,6 @@ class HiveContextCompatibilitySuite extends SparkFunSuite with BeforeAndAfterEac
 
   override def afterAll(): Unit = {
     try {
-      sc.stop()
       sc = null
       hc = null
     } finally {
diff --git a/sql/hivecontext-compatibility/pom.xml b/sql/hivecontext-compatibility/pom.xml
deleted file mode 100644
index ed9ef8e279..0000000000
--- a/sql/hivecontext-compatibility/pom.xml
+++ /dev/null
@@ -1,57 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
-  ~ Licensed to the Apache Software Foundation (ASF) under one or more
-  ~ contributor license agreements.  See the NOTICE file distributed with
-  ~ this work for additional information regarding copyright ownership.
-  ~ The ASF licenses this file to You under the Apache License, Version 2.0
-  ~ (the "License"); you may not use this file except in compliance with
-  ~ the License.  You may obtain a copy of the License at
-  ~
-  ~    http://www.apache.org/licenses/LICENSE-2.0
-  ~
-  ~ Unless required by applicable law or agreed to in writing, software
-  ~ distributed under the License is distributed on an "AS IS" BASIS,
-  ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  ~ See the License for the specific language governing permissions and
-  ~ limitations under the License.
-  -->
-
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
-    <modelVersion>4.0.0</modelVersion>
-    <parent>
-        <groupId>org.apache.spark</groupId>
-        <artifactId>spark-parent_2.11</artifactId>
-        <version>2.0.0-SNAPSHOT</version>
-        <relativePath>../../pom.xml</relativePath>
-    </parent>
-
-    <groupId>org.apache.spark</groupId>
-    <artifactId>spark-hivecontext-compatibility_2.11</artifactId>
-    <packaging>jar</packaging>
-    <name>Spark Project HiveContext Compatibility</name>
-    <url>http://spark.apache.org/</url>
-    <properties>
-        <sbt.project.name>hivecontext-compatibility</sbt.project.name>
-    </properties>
-
-    <dependencies>
-        <dependency>
-            <groupId>org.apache.spark</groupId>
-            <artifactId>spark-hive_${scala.binary.version}</artifactId>
-            <version>${project.version}</version>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.spark</groupId>
-            <artifactId>spark-core_${scala.binary.version}</artifactId>
-            <version>${project.version}</version>
-            <type>test-jar</type>
-            <scope>test</scope>
-        </dependency>
-    </dependencies>
-
-    <build>
-        <outputDirectory>target/scala-${scala.binary.version}/classes</outputDirectory>
-        <testOutputDirectory>target/scala-${scala.binary.version}/test-classes</testOutputDirectory>
-    </build>
-</project>
-- 
GitLab