diff --git a/R/pkg/inst/tests/testthat/test_sparkSQL.R b/R/pkg/inst/tests/testthat/test_sparkSQL.R index 22eb3ec984673d9fd591ecdf0e612c634398bf05..d747d4f83f24b7e6d6d8e1c18f6fb1e935357f51 100644 --- a/R/pkg/inst/tests/testthat/test_sparkSQL.R +++ b/R/pkg/inst/tests/testthat/test_sparkSQL.R @@ -1853,7 +1853,7 @@ test_that("approxQuantile() on a DataFrame", { test_that("SQL error message is returned from JVM", { retError <- tryCatch(sql(sqlContext, "select * from blah"), error = function(e) e) - expect_equal(grepl("Table not found", retError), TRUE) + expect_equal(grepl("Table or View not found", retError), TRUE) expect_equal(grepl("blah", retError), TRUE) }) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala index 3555a6d7faefac4dcb1711739137fc0812ffb17a..de40ddde1bdd970b17a165ceb741720db4a3abed 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala @@ -409,7 +409,7 @@ class Analyzer( catalog.lookupRelation(u.tableIdentifier, u.alias) } catch { case _: NoSuchTableException => - u.failAnalysis(s"Table not found: ${u.tableName}") + u.failAnalysis(s"Table or View not found: ${u.tableName}") } } diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/CheckAnalysis.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/CheckAnalysis.scala index 48805023980660f0e558d22eddecde80c96613dc..d6a8c3eec81aab9975f4a458fbc859d7dcde3cfd 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/CheckAnalysis.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/CheckAnalysis.scala @@ -52,7 +52,7 @@ trait CheckAnalysis { case p if p.analyzed => // Skip already analyzed sub-plans case u: UnresolvedRelation => - u.failAnalysis(s"Table not found: ${u.tableIdentifier}") + u.failAnalysis(s"Table or View not found: ${u.tableIdentifier}") case operator: LogicalPlan => operator transformExpressionsUp { diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/NoSuchItemException.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/NoSuchItemException.scala index e9f04eecf8d706dc30ee59505346db068ae25a51..96fd1a027e9c4cf96595b28b71ac008c9cd5393c 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/NoSuchItemException.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/NoSuchItemException.scala @@ -33,7 +33,7 @@ class NoSuchDatabaseException(db: String) extends NoSuchItemException { } class NoSuchTableException(db: String, table: String) extends NoSuchItemException { - override def getMessage: String = s"Table $table not found in database $db" + override def getMessage: String = s"Table or View $table not found in database $db" } class NoSuchPartitionException( diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/InMemoryCatalog.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/InMemoryCatalog.scala index 1994acd1add0a0da6df419c2c0c5993b1cefa035..f8a6fb74cc87d557d57e76d130e28d40d7229f8f 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/InMemoryCatalog.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/InMemoryCatalog.scala @@ -62,7 +62,7 @@ class InMemoryCatalog extends ExternalCatalog { private def requireTableExists(db: String, table: String): Unit = { if (!tableExists(db, table)) { throw new AnalysisException( - s"Table not found: '$table' does not exist in database '$db'") + s"Table or View not found: '$table' does not exist in database '$db'") } } @@ -164,7 +164,7 @@ class InMemoryCatalog extends ExternalCatalog { catalog(db).tables.remove(table) } else { if (!ignoreIfNotExists) { - throw new AnalysisException(s"Table '$table' does not exist in database '$db'") + throw new AnalysisException(s"Table or View '$table' does not exist in database '$db'") } } } diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala index c1e5a485e789711c26c4d8c62d90b8411ce07874..34e1cb7315a9c031dbb14df336d43dee0dc67859 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala @@ -242,11 +242,11 @@ class SessionCatalog( val table = formatTableName(name.table) if (name.database.isDefined || !tempTables.contains(table)) { // When ignoreIfNotExists is false, no exception is issued when the table does not exist. - // Instead, log it as an error message. This is consistent with Hive. + // Instead, log it as an error message. if (externalCatalog.tableExists(db, table)) { externalCatalog.dropTable(db, table, ignoreIfNotExists = true) } else if (!ignoreIfNotExists) { - logError(s"Table '${name.quotedString}' does not exist") + logError(s"Table or View '${name.quotedString}' does not exist") } } else { tempTables.remove(table) @@ -304,11 +304,6 @@ class SessionCatalog( name.database.isEmpty && tempTables.contains(formatTableName(name.table)) } - /** - * Return whether View is supported - */ - def isViewSupported: Boolean = false - /** * List all tables in the specified database, including temporary tables. */ diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala index e941736f9abc3d82b34be85357888bfc14e393ba..8a37cf8f4cb047e49f39b653c6f3c0f035f9098b 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala @@ -191,9 +191,6 @@ case class DropTable( override def run(sqlContext: SQLContext): Seq[Row] = { val catalog = sqlContext.sessionState.catalog - if (isView && !catalog.isViewSupported) { - throw new AnalysisException(s"Not supported object: views") - } // If the command DROP VIEW is to drop a table or DROP TABLE is to drop a view // issue an exception. catalog.getTableMetadataOption(tableName).map(_.tableType match { diff --git a/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala index 695dda269aab175ad5a9606c217d96be0c98a8c6..cdd404d699a711c0311f281abd01f222f6bd2eba 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala @@ -1827,12 +1827,12 @@ class SQLQuerySuite extends QueryTest with SharedSQLContext { val e1 = intercept[AnalysisException] { sql("select * from in_valid_table") } - assert(e1.message.contains("Table not found")) + assert(e1.message.contains("Table or View not found")) val e2 = intercept[AnalysisException] { sql("select * from no_db.no_table").show() } - assert(e2.message.contains("Table not found")) + assert(e2.message.contains("Table or View not found")) val e3 = intercept[AnalysisException] { sql("select * from json.invalid_file") diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala index e75e5f5cb228eca6e7f8119834c02a816965205b..c6479bf33e4d8453d604e0352d167a421d8bcf26 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala @@ -432,11 +432,21 @@ class DDLSuite extends QueryTest with SharedSQLContext with BeforeAndAfterEach { sql("DROP TABLE dbx.tab1") } - test("drop view") { + test("drop view in SQLContext") { + // SQLContext does not support create view. Log an error message, if tab1 does not exists + sql("DROP VIEW tab1") + + val catalog = sqlContext.sessionState.catalog + val tableIdent = TableIdentifier("tab1", Some("dbx")) + createDatabase(catalog, "dbx") + createTable(catalog, tableIdent) + assert(catalog.listTables("dbx") == Seq(tableIdent)) + val e = intercept[AnalysisException] { sql("DROP VIEW dbx.tab1") } - assert(e.getMessage.contains("Not supported object: views")) + assert( + e.getMessage.contains("Cannot drop a table with DROP VIEW. Please use DROP TABLE instead")) } private def convertToDatasourceTable( diff --git a/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/CliSuite.scala b/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/CliSuite.scala index 9ec8b9a9a6ba96a1a4e51384d4b941690aa39e05..bfc3d195ff2ab5357e670c288b3b4b1ca75fdae4 100644 --- a/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/CliSuite.scala +++ b/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/CliSuite.scala @@ -230,7 +230,7 @@ class CliSuite extends SparkFunSuite with BeforeAndAfterAll with Logging { runCliWithin(timeout = 2.minute, errorResponses = Seq("AnalysisException"))( "select * from nonexistent_table;" - -> "Error in query: Table not found: nonexistent_table;" + -> "Error in query: Table or View not found: nonexistent_table;" ) } diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionCatalog.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionCatalog.scala index 875652c226048f340a6915ed2a244513247571c3..0cccc22e5a624b914996468668cbe3c69b4cfba2 100644 --- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionCatalog.scala +++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionCatalog.scala @@ -70,8 +70,6 @@ private[sql] class HiveSessionCatalog( } } - override def isViewSupported: Boolean = true - // ---------------------------------------------------------------- // | Methods and fields for interacting with HiveMetastoreCatalog | // ---------------------------------------------------------------- diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveCommandSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveCommandSuite.scala index 8de2bdcfc020259f877ea322f2bf6e9a9c9a2ed2..061d1512a5250bb61142f7376431a64aac22a45a 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveCommandSuite.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveCommandSuite.scala @@ -96,7 +96,7 @@ class HiveCommandSuite extends QueryTest with SQLTestUtils with TestHiveSingleto val message1 = intercept[AnalysisException] { sql("SHOW TBLPROPERTIES badtable") }.getMessage - assert(message1.contains("Table badtable not found in database default")) + assert(message1.contains("Table or View badtable not found in database default")) // When key is not found, a row containing the error is returned. checkAnswer(