Skip to content
Snippets Groups Projects
Commit bcd20762 authored by bomeng's avatar bomeng Committed by Reynold Xin
Browse files

[SPARK-14414][SQL] improve the error message class hierarchy

## What changes were proposed in this pull request?

Before we are using `AnalysisException`, `ParseException`, `NoSuchFunctionException` etc when a parsing error encounters. I am trying to make it consistent and also **minimum** code impact to the current implementation by changing the class hierarchy.
1. `NoSuchItemException` is removed, since it is an abstract class and it just simply takes a message string.
2. `NoSuchDatabaseException`, `NoSuchTableException`, `NoSuchPartitionException` and `NoSuchFunctionException` now extends `AnalysisException`, as well as `ParseException`, they are all under `AnalysisException` umbrella, but you can also determine how to use them in a granular way.

## How was this patch tested?
The existing test cases should cover this patch.

Author: bomeng <bmeng@us.ibm.com>

Closes #12314 from bomeng/SPARK-14414.
parent 85e68b4b
No related branches found
No related tags found
No related merge requests found
...@@ -17,6 +17,7 @@ ...@@ -17,6 +17,7 @@
package org.apache.spark.sql.catalyst.analysis package org.apache.spark.sql.catalyst.analysis
import org.apache.spark.sql.AnalysisException
import org.apache.spark.sql.catalyst.catalog.ExternalCatalog.TablePartitionSpec import org.apache.spark.sql.catalyst.catalog.ExternalCatalog.TablePartitionSpec
...@@ -24,29 +25,13 @@ import org.apache.spark.sql.catalyst.catalog.ExternalCatalog.TablePartitionSpec ...@@ -24,29 +25,13 @@ import org.apache.spark.sql.catalyst.catalog.ExternalCatalog.TablePartitionSpec
* Thrown by a catalog when an item cannot be found. The analyzer will rethrow the exception * Thrown by a catalog when an item cannot be found. The analyzer will rethrow the exception
* as an [[org.apache.spark.sql.AnalysisException]] with the correct position information. * as an [[org.apache.spark.sql.AnalysisException]] with the correct position information.
*/ */
abstract class NoSuchItemException extends Exception { class NoSuchDatabaseException(db: String) extends AnalysisException(s"Database $db not found")
override def getMessage: String
}
class NoSuchDatabaseException(db: String) extends NoSuchItemException { class NoSuchTableException(db: String, table: String)
override def getMessage: String = s"Database $db not found" extends AnalysisException(s"Table or View $table not found in database $db")
}
class NoSuchTableException(db: String, table: String) extends NoSuchItemException { class NoSuchPartitionException(db: String, table: String, spec: TablePartitionSpec) extends
override def getMessage: String = s"Table or View $table not found in database $db" AnalysisException(s"Partition not found in table $table database $db:\n" + spec.mkString("\n"))
}
class NoSuchPartitionException( class NoSuchFunctionException(db: String, func: String)
db: String, extends AnalysisException(s"Function $func not found in database $db")
table: String,
spec: TablePartitionSpec)
extends NoSuchItemException {
override def getMessage: String = {
s"Partition not found in table $table database $db:\n" + spec.mkString("\n")
}
}
class NoSuchFunctionException(db: String, func: String) extends NoSuchItemException {
override def getMessage: String = s"Function $func not found in database $db"
}
...@@ -29,6 +29,7 @@ import org.apache.spark.sql.catalyst.expressions.{Attribute, AttributeReference} ...@@ -29,6 +29,7 @@ import org.apache.spark.sql.catalyst.expressions.{Attribute, AttributeReference}
import org.apache.spark.sql.types._ import org.apache.spark.sql.types._
// Note: The definition of these commands are based on the ones described in // Note: The definition of these commands are based on the ones described in
// https://cwiki.apache.org/confluence/display/Hive/LanguageManual+DDL // https://cwiki.apache.org/confluence/display/Hive/LanguageManual+DDL
......
...@@ -25,7 +25,6 @@ import org.apache.thrift.TException ...@@ -25,7 +25,6 @@ import org.apache.thrift.TException
import org.apache.spark.internal.Logging import org.apache.spark.internal.Logging
import org.apache.spark.sql.AnalysisException import org.apache.spark.sql.AnalysisException
import org.apache.spark.sql.catalyst.TableIdentifier import org.apache.spark.sql.catalyst.TableIdentifier
import org.apache.spark.sql.catalyst.analysis.NoSuchItemException
import org.apache.spark.sql.catalyst.catalog._ import org.apache.spark.sql.catalyst.catalog._
import org.apache.spark.sql.hive.client.HiveClient import org.apache.spark.sql.hive.client.HiveClient
...@@ -66,8 +65,6 @@ private[spark] class HiveExternalCatalog(client: HiveClient) extends ExternalCat ...@@ -66,8 +65,6 @@ private[spark] class HiveExternalCatalog(client: HiveClient) extends ExternalCat
try { try {
body body
} catch { } catch {
case e: NoSuchItemException =>
throw new AnalysisException(e.getMessage)
case NonFatal(e) if isClientException(e) => case NonFatal(e) if isClientException(e) =>
throw new AnalysisException(e.getClass.getCanonicalName + ": " + e.getMessage) throw new AnalysisException(e.getClass.getCanonicalName + ": " + e.getMessage)
} }
......
...@@ -28,7 +28,6 @@ import org.scalatest.BeforeAndAfter ...@@ -28,7 +28,6 @@ import org.scalatest.BeforeAndAfter
import org.apache.spark.{SparkException, SparkFiles} import org.apache.spark.{SparkException, SparkFiles}
import org.apache.spark.sql.{AnalysisException, DataFrame, Row} import org.apache.spark.sql.{AnalysisException, DataFrame, Row}
import org.apache.spark.sql.catalyst.analysis.NoSuchDatabaseException
import org.apache.spark.sql.catalyst.expressions.Cast import org.apache.spark.sql.catalyst.expressions.Cast
import org.apache.spark.sql.catalyst.plans.logical.Project import org.apache.spark.sql.catalyst.plans.logical.Project
import org.apache.spark.sql.execution.joins.BroadcastNestedLoopJoin import org.apache.spark.sql.execution.joins.BroadcastNestedLoopJoin
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment