diff --git a/bin/spark-sql b/bin/spark-sql
index 564f1f419060fd8b4b9f1e6385bf26407fabeb4d..2a3cb31f58e8d5fd0c3e3c1a7f0c02825efcf1c5 100755
--- a/bin/spark-sql
+++ b/bin/spark-sql
@@ -24,6 +24,7 @@
 set -o posix
 
 CLASS="org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver"
+CLASS_NOT_FOUND_EXIT_STATUS=1
 
 # Figure out where Spark is installed
 FWDIR="$(cd `dirname $0`/..; pwd)"
@@ -43,52 +44,22 @@ function usage {
   $FWDIR/bin/spark-class $CLASS --help 2>&1 | grep -v "$pattern" 1>&2
 }
 
-function ensure_arg_number {
-  arg_number=$1
-  at_least=$2
-
-  if [[ $arg_number -lt $at_least ]]; then
-    usage
-    exit 1
-  fi
-}
-
-if [[ "$@" = --help ]] || [[ "$@" = -h ]]; then
+if [[ "$@" = *--help ]] || [[ "$@" = *-h ]]; then
   usage
   exit 0
 fi
 
-CLI_ARGS=()
-SUBMISSION_ARGS=()
-
-while (($#)); do
-  case $1 in
-    -d | --define | --database | -f | -h | --hiveconf | --hivevar | -i | -p)
-      ensure_arg_number $# 2
-      CLI_ARGS+=("$1"); shift
-      CLI_ARGS+=("$1"); shift
-      ;;
+source $FWDIR/bin/utils.sh
+SUBMIT_USAGE_FUNCTION=usage
+gatherSparkSubmitOpts "$@"
 
-    -e)
-      ensure_arg_number $# 2
-      CLI_ARGS+=("$1"); shift
-      CLI_ARGS+=("$1"); shift
-      ;;
+"$FWDIR"/bin/spark-submit --class $CLASS "${SUBMISSION_OPTS[@]}" spark-internal "${APPLICATION_OPTS[@]}"
+exit_status=$?
 
-    -s | --silent)
-      CLI_ARGS+=("$1"); shift
-      ;;
-
-    -v | --verbose)
-      # Both SparkSubmit and SparkSQLCLIDriver recognizes -v | --verbose
-      CLI_ARGS+=("$1")
-      SUBMISSION_ARGS+=("$1"); shift
-      ;;
-
-    *)
-      SUBMISSION_ARGS+=("$1"); shift
-      ;;
-  esac
-done
+if [[ exit_status -eq CLASS_NOT_FOUND_EXIT_STATUS ]]; then
+  echo
+  echo "Failed to load Spark SQL CLI main class $CLASS."
+  echo "You need to build Spark with -Phive."
+fi
 
-exec "$FWDIR"/bin/spark-submit --class $CLASS "${SUBMISSION_ARGS[@]}" spark-internal "${CLI_ARGS[@]}"
+exit $exit_status
diff --git a/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala b/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala
index f8cdbc3c392b551382026cf9b4b77cbc29484ad3..550ee72538900c7d99ef290de921cbf8f7bda3d1 100644
--- a/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala
@@ -54,6 +54,8 @@ object SparkSubmit {
   private val SPARK_SHELL = "spark-shell"
   private val PYSPARK_SHELL = "pyspark-shell"
 
+  private val CLASS_NOT_FOUND_EXIT_STATUS = 1
+
   // Exposed for testing
   private[spark] var exitFn: () => Unit = () => System.exit(-1)
   private[spark] var printStream: PrintStream = System.err
@@ -311,8 +313,18 @@ object SparkSubmit {
       System.setProperty(key, value)
     }
 
-    val mainClass = Class.forName(childMainClass, true, loader)
+    var mainClass: Class[_] = null
+
+    try {
+      mainClass = Class.forName(childMainClass, true, loader)
+    } catch {
+      case e: ClassNotFoundException =>
+        e.printStackTrace(printStream)
+        System.exit(CLASS_NOT_FOUND_EXIT_STATUS)
+    }
+
     val mainMethod = mainClass.getMethod("main", new Array[String](0).getClass)
+
     try {
       mainMethod.invoke(null, childArgs.toArray)
     } catch {
diff --git a/sbin/start-thriftserver.sh b/sbin/start-thriftserver.sh
index 2c4452473ccbc3a316850ebe39a83478d9b0d1e5..c519a77df4a141c930151fbd62f0f80ea5889a9c 100755
--- a/sbin/start-thriftserver.sh
+++ b/sbin/start-thriftserver.sh
@@ -27,6 +27,7 @@ set -o posix
 FWDIR="$(cd `dirname $0`/..; pwd)"
 
 CLASS="org.apache.spark.sql.hive.thriftserver.HiveThriftServer2"
+CLASS_NOT_FOUND_EXIT_STATUS=1
 
 function usage {
   echo "Usage: ./sbin/start-thriftserver [options] [thrift server options]"
@@ -43,36 +44,22 @@ function usage {
   $FWDIR/bin/spark-class $CLASS --help 2>&1 | grep -v "$pattern" 1>&2
 }
 
-function ensure_arg_number {
-  arg_number=$1
-  at_least=$2
-
-  if [[ $arg_number -lt $at_least ]]; then
-    usage
-    exit 1
-  fi
-}
-
-if [[ "$@" = --help ]] || [[ "$@" = -h ]]; then
+if [[ "$@" = *--help ]] || [[ "$@" = *-h ]]; then
   usage
   exit 0
 fi
 
-THRIFT_SERVER_ARGS=()
-SUBMISSION_ARGS=()
+source $FWDIR/bin/utils.sh
+SUBMIT_USAGE_FUNCTION=usage
+gatherSparkSubmitOpts "$@"
 
-while (($#)); do
-  case $1 in
-    --hiveconf)
-      ensure_arg_number $# 2
-      THRIFT_SERVER_ARGS+=("$1"); shift
-      THRIFT_SERVER_ARGS+=("$1"); shift
-      ;;
+"$FWDIR"/bin/spark-submit --class $CLASS "${SUBMISSION_OPTS[@]}" spark-internal "${APPLICATION_OPTS[@]}"
+exit_status=$?
 
-    *)
-      SUBMISSION_ARGS+=("$1"); shift
-      ;;
-  esac
-done
+if [[ exit_status -eq CLASS_NOT_FOUND_EXIT_STATUS ]]; then
+  echo
+  echo "Failed to load Hive Thrift server main class $CLASS."
+  echo "You need to build Spark with -Phive."
+fi
 
-exec "$FWDIR"/bin/spark-submit --class $CLASS "${SUBMISSION_ARGS[@]}" spark-internal "${THRIFT_SERVER_ARGS[@]}"
+exit $exit_status