Skip to content
Snippets Groups Projects
Commit 89d6822f authored by Xiao Li's avatar Xiao Li Committed by Wenchen Fan
Browse files

[SPARK-19148][SQL][FOLLOW-UP] do not expose the external table concept in Catalog

### What changes were proposed in this pull request?
After we renames `Catalog`.`createExternalTable` to `createTable` in the PR: https://github.com/apache/spark/pull/16528, we also need to deprecate the corresponding functions in `SQLContext`.

### How was this patch tested?
N/A

Author: Xiao Li <gatorsmile@gmail.com>

Closes #17502 from gatorsmile/deprecateCreateExternalTable.
parent cf5963c9
No related branches found
No related tags found
No related merge requests found
...@@ -17,7 +17,6 @@ ...@@ -17,7 +17,6 @@
package org.apache.spark.sql package org.apache.spark.sql
import java.beans.BeanInfo
import java.util.Properties import java.util.Properties
import scala.collection.immutable import scala.collection.immutable
...@@ -527,8 +526,9 @@ class SQLContext private[sql](val sparkSession: SparkSession) ...@@ -527,8 +526,9 @@ class SQLContext private[sql](val sparkSession: SparkSession)
* @group ddl_ops * @group ddl_ops
* @since 1.3.0 * @since 1.3.0
*/ */
@deprecated("use sparkSession.catalog.createTable instead.", "2.2.0")
def createExternalTable(tableName: String, path: String): DataFrame = { def createExternalTable(tableName: String, path: String): DataFrame = {
sparkSession.catalog.createExternalTable(tableName, path) sparkSession.catalog.createTable(tableName, path)
} }
/** /**
...@@ -538,11 +538,12 @@ class SQLContext private[sql](val sparkSession: SparkSession) ...@@ -538,11 +538,12 @@ class SQLContext private[sql](val sparkSession: SparkSession)
* @group ddl_ops * @group ddl_ops
* @since 1.3.0 * @since 1.3.0
*/ */
@deprecated("use sparkSession.catalog.createTable instead.", "2.2.0")
def createExternalTable( def createExternalTable(
tableName: String, tableName: String,
path: String, path: String,
source: String): DataFrame = { source: String): DataFrame = {
sparkSession.catalog.createExternalTable(tableName, path, source) sparkSession.catalog.createTable(tableName, path, source)
} }
/** /**
...@@ -552,11 +553,12 @@ class SQLContext private[sql](val sparkSession: SparkSession) ...@@ -552,11 +553,12 @@ class SQLContext private[sql](val sparkSession: SparkSession)
* @group ddl_ops * @group ddl_ops
* @since 1.3.0 * @since 1.3.0
*/ */
@deprecated("use sparkSession.catalog.createTable instead.", "2.2.0")
def createExternalTable( def createExternalTable(
tableName: String, tableName: String,
source: String, source: String,
options: java.util.Map[String, String]): DataFrame = { options: java.util.Map[String, String]): DataFrame = {
sparkSession.catalog.createExternalTable(tableName, source, options) sparkSession.catalog.createTable(tableName, source, options)
} }
/** /**
...@@ -567,11 +569,12 @@ class SQLContext private[sql](val sparkSession: SparkSession) ...@@ -567,11 +569,12 @@ class SQLContext private[sql](val sparkSession: SparkSession)
* @group ddl_ops * @group ddl_ops
* @since 1.3.0 * @since 1.3.0
*/ */
@deprecated("use sparkSession.catalog.createTable instead.", "2.2.0")
def createExternalTable( def createExternalTable(
tableName: String, tableName: String,
source: String, source: String,
options: Map[String, String]): DataFrame = { options: Map[String, String]): DataFrame = {
sparkSession.catalog.createExternalTable(tableName, source, options) sparkSession.catalog.createTable(tableName, source, options)
} }
/** /**
...@@ -581,12 +584,13 @@ class SQLContext private[sql](val sparkSession: SparkSession) ...@@ -581,12 +584,13 @@ class SQLContext private[sql](val sparkSession: SparkSession)
* @group ddl_ops * @group ddl_ops
* @since 1.3.0 * @since 1.3.0
*/ */
@deprecated("use sparkSession.catalog.createTable instead.", "2.2.0")
def createExternalTable( def createExternalTable(
tableName: String, tableName: String,
source: String, source: String,
schema: StructType, schema: StructType,
options: java.util.Map[String, String]): DataFrame = { options: java.util.Map[String, String]): DataFrame = {
sparkSession.catalog.createExternalTable(tableName, source, schema, options) sparkSession.catalog.createTable(tableName, source, schema, options)
} }
/** /**
...@@ -597,12 +601,13 @@ class SQLContext private[sql](val sparkSession: SparkSession) ...@@ -597,12 +601,13 @@ class SQLContext private[sql](val sparkSession: SparkSession)
* @group ddl_ops * @group ddl_ops
* @since 1.3.0 * @since 1.3.0
*/ */
@deprecated("use sparkSession.catalog.createTable instead.", "2.2.0")
def createExternalTable( def createExternalTable(
tableName: String, tableName: String,
source: String, source: String,
schema: StructType, schema: StructType,
options: Map[String, String]): DataFrame = { options: Map[String, String]): DataFrame = {
sparkSession.catalog.createExternalTable(tableName, source, schema, options) sparkSession.catalog.createTable(tableName, source, schema, options)
} }
/** /**
...@@ -1089,9 +1094,9 @@ object SQLContext { ...@@ -1089,9 +1094,9 @@ object SQLContext {
* method for internal use. * method for internal use.
*/ */
private[sql] def beansToRows( private[sql] def beansToRows(
data: Iterator[_], data: Iterator[_],
beanClass: Class[_], beanClass: Class[_],
attrs: Seq[AttributeReference]): Iterator[InternalRow] = { attrs: Seq[AttributeReference]): Iterator[InternalRow] = {
val extractors = val extractors =
JavaTypeInference.getJavaBeanReadableProperties(beanClass).map(_.getReadMethod) JavaTypeInference.getJavaBeanReadableProperties(beanClass).map(_.getReadMethod)
val methodsToConverts = extractors.zip(attrs).map { case (e, attr) => val methodsToConverts = extractors.zip(attrs).map { case (e, attr) =>
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment