Skip to content
Snippets Groups Projects
Commit 45b7557e authored by Reynold Xin's avatar Reynold Xin
Browse files

[SPARK-15424][SPARK-15437][SPARK-14807][SQL] Revert Create a hivecontext-compatibility module

## What changes were proposed in this pull request?
I initially asked to create a hivecontext-compatibility module to put the HiveContext there. But we are so close to Spark 2.0 release and there is only a single class in it. It seems overkill to have an entire package, which makes it more inconvenient, for a single class.

## How was this patch tested?
Tests were moved.

Author: Reynold Xin <rxin@databricks.com>

Closes #13207 from rxin/SPARK-15424.
parent 021c1970
No related branches found
No related tags found
No related merge requests found
...@@ -110,7 +110,7 @@ def determine_modules_to_test(changed_modules): ...@@ -110,7 +110,7 @@ def determine_modules_to_test(changed_modules):
['graphx', 'examples'] ['graphx', 'examples']
>>> x = [x.name for x in determine_modules_to_test([modules.sql])] >>> x = [x.name for x in determine_modules_to_test([modules.sql])]
>>> x # doctest: +NORMALIZE_WHITESPACE >>> x # doctest: +NORMALIZE_WHITESPACE
['sql', 'hive', 'mllib', 'examples', 'hive-thriftserver', 'hivecontext-compatibility', ['sql', 'hive', 'mllib', 'examples', 'hive-thriftserver',
'pyspark-sql', 'sparkr', 'pyspark-mllib', 'pyspark-ml'] 'pyspark-sql', 'sparkr', 'pyspark-mllib', 'pyspark-ml']
""" """
modules_to_test = set() modules_to_test = set()
......
...@@ -158,18 +158,6 @@ hive_thriftserver = Module( ...@@ -158,18 +158,6 @@ hive_thriftserver = Module(
) )
hivecontext_compatibility = Module(
name="hivecontext-compatibility",
dependencies=[hive],
source_file_regexes=[
"sql/hivecontext-compatibility/",
],
sbt_test_goals=[
"hivecontext-compatibility/test"
]
)
sketch = Module( sketch = Module(
name="sketch", name="sketch",
dependencies=[tags], dependencies=[tags],
......
...@@ -100,7 +100,6 @@ ...@@ -100,7 +100,6 @@
<module>sql/catalyst</module> <module>sql/catalyst</module>
<module>sql/core</module> <module>sql/core</module>
<module>sql/hive</module> <module>sql/hive</module>
<module>sql/hivecontext-compatibility</module>
<module>assembly</module> <module>assembly</module>
<module>external/flume</module> <module>external/flume</module>
<module>external/flume-sink</module> <module>external/flume-sink</module>
......
...@@ -39,8 +39,8 @@ object BuildCommons { ...@@ -39,8 +39,8 @@ object BuildCommons {
private val buildLocation = file(".").getAbsoluteFile.getParentFile private val buildLocation = file(".").getAbsoluteFile.getParentFile
val sqlProjects@Seq(catalyst, sql, hive, hiveThriftServer, hiveCompatibility) = Seq( val sqlProjects@Seq(catalyst, sql, hive, hiveThriftServer) = Seq(
"catalyst", "sql", "hive", "hive-thriftserver", "hivecontext-compatibility" "catalyst", "sql", "hive", "hive-thriftserver"
).map(ProjectRef(buildLocation, _)) ).map(ProjectRef(buildLocation, _))
val streamingProjects@Seq( val streamingProjects@Seq(
...@@ -339,7 +339,7 @@ object SparkBuild extends PomBuild { ...@@ -339,7 +339,7 @@ object SparkBuild extends PomBuild {
val mimaProjects = allProjects.filterNot { x => val mimaProjects = allProjects.filterNot { x =>
Seq( Seq(
spark, hive, hiveThriftServer, hiveCompatibility, catalyst, repl, networkCommon, networkShuffle, networkYarn, spark, hive, hiveThriftServer, catalyst, repl, networkCommon, networkShuffle, networkYarn,
unsafe, tags, sketch, mllibLocal unsafe, tags, sketch, mllibLocal
).contains(x) ).contains(x)
} }
......
...@@ -19,7 +19,7 @@ package org.apache.spark.sql.hive ...@@ -19,7 +19,7 @@ package org.apache.spark.sql.hive
import org.scalatest.BeforeAndAfterEach import org.scalatest.BeforeAndAfterEach
import org.apache.spark.{SparkContext, SparkFunSuite} import org.apache.spark.{SparkConf, SparkContext, SparkFunSuite}
class HiveContextCompatibilitySuite extends SparkFunSuite with BeforeAndAfterEach { class HiveContextCompatibilitySuite extends SparkFunSuite with BeforeAndAfterEach {
...@@ -29,7 +29,7 @@ class HiveContextCompatibilitySuite extends SparkFunSuite with BeforeAndAfterEac ...@@ -29,7 +29,7 @@ class HiveContextCompatibilitySuite extends SparkFunSuite with BeforeAndAfterEac
override def beforeAll(): Unit = { override def beforeAll(): Unit = {
super.beforeAll() super.beforeAll()
sc = new SparkContext("local[4]", "test") sc = SparkContext.getOrCreate(new SparkConf().setMaster("local").setAppName("test"))
HiveUtils.newTemporaryConfiguration(useInMemoryDerby = true).foreach { case (k, v) => HiveUtils.newTemporaryConfiguration(useInMemoryDerby = true).foreach { case (k, v) =>
sc.hadoopConfiguration.set(k, v) sc.hadoopConfiguration.set(k, v)
} }
...@@ -47,7 +47,6 @@ class HiveContextCompatibilitySuite extends SparkFunSuite with BeforeAndAfterEac ...@@ -47,7 +47,6 @@ class HiveContextCompatibilitySuite extends SparkFunSuite with BeforeAndAfterEac
override def afterAll(): Unit = { override def afterAll(): Unit = {
try { try {
sc.stop()
sc = null sc = null
hc = null hc = null
} finally { } finally {
......
<?xml version="1.0" encoding="UTF-8"?>
<!--
~ Licensed to the Apache Software Foundation (ASF) under one or more
~ contributor license agreements. See the NOTICE file distributed with
~ this work for additional information regarding copyright ownership.
~ The ASF licenses this file to You under the Apache License, Version 2.0
~ (the "License"); you may not use this file except in compliance with
~ the License. You may obtain a copy of the License at
~
~ http://www.apache.org/licenses/LICENSE-2.0
~
~ Unless required by applicable law or agreed to in writing, software
~ distributed under the License is distributed on an "AS IS" BASIS,
~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
~ See the License for the specific language governing permissions and
~ limitations under the License.
-->
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.apache.spark</groupId>
<artifactId>spark-parent_2.11</artifactId>
<version>2.0.0-SNAPSHOT</version>
<relativePath>../../pom.xml</relativePath>
</parent>
<groupId>org.apache.spark</groupId>
<artifactId>spark-hivecontext-compatibility_2.11</artifactId>
<packaging>jar</packaging>
<name>Spark Project HiveContext Compatibility</name>
<url>http://spark.apache.org/</url>
<properties>
<sbt.project.name>hivecontext-compatibility</sbt.project.name>
</properties>
<dependencies>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-hive_${scala.binary.version}</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-core_${scala.binary.version}</artifactId>
<version>${project.version}</version>
<type>test-jar</type>
<scope>test</scope>
</dependency>
</dependencies>
<build>
<outputDirectory>target/scala-${scala.binary.version}/classes</outputDirectory>
<testOutputDirectory>target/scala-${scala.binary.version}/test-classes</testOutputDirectory>
</build>
</project>
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment