From bcd2076274b1a95f74616d0ceacb0696e38b5f4c Mon Sep 17 00:00:00 2001 From: bomeng <bmeng@us.ibm.com> Date: Tue, 12 Apr 2016 13:43:39 -0700 Subject: [PATCH] [SPARK-14414][SQL] improve the error message class hierarchy ## What changes were proposed in this pull request? Before we are using `AnalysisException`, `ParseException`, `NoSuchFunctionException` etc when a parsing error encounters. I am trying to make it consistent and also **minimum** code impact to the current implementation by changing the class hierarchy. 1. `NoSuchItemException` is removed, since it is an abstract class and it just simply takes a message string. 2. `NoSuchDatabaseException`, `NoSuchTableException`, `NoSuchPartitionException` and `NoSuchFunctionException` now extends `AnalysisException`, as well as `ParseException`, they are all under `AnalysisException` umbrella, but you can also determine how to use them in a granular way. ## How was this patch tested? The existing test cases should cover this patch. Author: bomeng <bmeng@us.ibm.com> Closes #12314 from bomeng/SPARK-14414. --- .../analysis/NoSuchItemException.scala | 31 +++++-------------- .../spark/sql/execution/command/ddl.scala | 1 + .../spark/sql/hive/HiveExternalCatalog.scala | 3 -- .../sql/hive/execution/HiveQuerySuite.scala | 1 - 4 files changed, 9 insertions(+), 27 deletions(-) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/NoSuchItemException.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/NoSuchItemException.scala index 96fd1a027e..5e18316c94 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/NoSuchItemException.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/NoSuchItemException.scala @@ -17,6 +17,7 @@ package org.apache.spark.sql.catalyst.analysis +import org.apache.spark.sql.AnalysisException import org.apache.spark.sql.catalyst.catalog.ExternalCatalog.TablePartitionSpec @@ -24,29 +25,13 @@ import org.apache.spark.sql.catalyst.catalog.ExternalCatalog.TablePartitionSpec * Thrown by a catalog when an item cannot be found. The analyzer will rethrow the exception * as an [[org.apache.spark.sql.AnalysisException]] with the correct position information. */ -abstract class NoSuchItemException extends Exception { - override def getMessage: String -} +class NoSuchDatabaseException(db: String) extends AnalysisException(s"Database $db not found") -class NoSuchDatabaseException(db: String) extends NoSuchItemException { - override def getMessage: String = s"Database $db not found" -} +class NoSuchTableException(db: String, table: String) + extends AnalysisException(s"Table or View $table not found in database $db") -class NoSuchTableException(db: String, table: String) extends NoSuchItemException { - override def getMessage: String = s"Table or View $table not found in database $db" -} +class NoSuchPartitionException(db: String, table: String, spec: TablePartitionSpec) extends + AnalysisException(s"Partition not found in table $table database $db:\n" + spec.mkString("\n")) -class NoSuchPartitionException( - db: String, - table: String, - spec: TablePartitionSpec) - extends NoSuchItemException { - - override def getMessage: String = { - s"Partition not found in table $table database $db:\n" + spec.mkString("\n") - } -} - -class NoSuchFunctionException(db: String, func: String) extends NoSuchItemException { - override def getMessage: String = s"Function $func not found in database $db" -} +class NoSuchFunctionException(db: String, func: String) + extends AnalysisException(s"Function $func not found in database $db") diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala index 758a7e45d2..5137bd11d8 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala @@ -29,6 +29,7 @@ import org.apache.spark.sql.catalyst.expressions.{Attribute, AttributeReference} import org.apache.spark.sql.types._ + // Note: The definition of these commands are based on the ones described in // https://cwiki.apache.org/confluence/display/Hive/LanguageManual+DDL diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveExternalCatalog.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveExternalCatalog.scala index 482f47428d..f627384253 100644 --- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveExternalCatalog.scala +++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveExternalCatalog.scala @@ -25,7 +25,6 @@ import org.apache.thrift.TException import org.apache.spark.internal.Logging import org.apache.spark.sql.AnalysisException import org.apache.spark.sql.catalyst.TableIdentifier -import org.apache.spark.sql.catalyst.analysis.NoSuchItemException import org.apache.spark.sql.catalyst.catalog._ import org.apache.spark.sql.hive.client.HiveClient @@ -66,8 +65,6 @@ private[spark] class HiveExternalCatalog(client: HiveClient) extends ExternalCat try { body } catch { - case e: NoSuchItemException => - throw new AnalysisException(e.getMessage) case NonFatal(e) if isClientException(e) => throw new AnalysisException(e.getClass.getCanonicalName + ": " + e.getMessage) } diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQuerySuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQuerySuite.scala index 0c57ede9ed..af73baa1f3 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQuerySuite.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQuerySuite.scala @@ -28,7 +28,6 @@ import org.scalatest.BeforeAndAfter import org.apache.spark.{SparkException, SparkFiles} import org.apache.spark.sql.{AnalysisException, DataFrame, Row} -import org.apache.spark.sql.catalyst.analysis.NoSuchDatabaseException import org.apache.spark.sql.catalyst.expressions.Cast import org.apache.spark.sql.catalyst.plans.logical.Project import org.apache.spark.sql.execution.joins.BroadcastNestedLoopJoin -- GitLab