Skip to content
Snippets Groups Projects
Commit faeb9c0e authored by Cheng Lian's avatar Cheng Lian Committed by Michael Armbrust
Browse files

[SPARK-2964] [SQL] Remove duplicated code from spark-sql and start-thriftserver.sh

Author: Cheng Lian <lian.cs.zju@gmail.com>
Author: Kousuke Saruta <sarutak@oss.nttdata.co.jp>

Closes #1886 from sarutak/SPARK-2964 and squashes the following commits:

8ef8751 [Kousuke Saruta] Merge branch 'master' of git://git.apache.org/spark into SPARK-2964
26e7c95 [Kousuke Saruta] Revert "Shorten timeout to more reasonable value"
ffb68fa [Kousuke Saruta] Modified spark-sql and start-thriftserver.sh to use bin/utils.sh
8c6f658 [Kousuke Saruta] Merge branch 'spark-3026' of https://github.com/liancheng/spark into SPARK-2964
81b43a8 [Cheng Lian] Shorten timeout to more reasonable value
a89e66d [Cheng Lian] Fixed command line options quotation in scripts
9c894d3 [Cheng Lian] Fixed bin/spark-sql -S option typo
be4736b [Cheng Lian] Report better error message when running JDBC/CLI without hive-thriftserver profile enabled
parent 2ffd3290
No related branches found
No related tags found
No related merge requests found
......@@ -24,6 +24,7 @@
set -o posix
CLASS="org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver"
CLASS_NOT_FOUND_EXIT_STATUS=1
# Figure out where Spark is installed
FWDIR="$(cd `dirname $0`/..; pwd)"
......@@ -43,52 +44,22 @@ function usage {
$FWDIR/bin/spark-class $CLASS --help 2>&1 | grep -v "$pattern" 1>&2
}
function ensure_arg_number {
arg_number=$1
at_least=$2
if [[ $arg_number -lt $at_least ]]; then
usage
exit 1
fi
}
if [[ "$@" = --help ]] || [[ "$@" = -h ]]; then
if [[ "$@" = *--help ]] || [[ "$@" = *-h ]]; then
usage
exit 0
fi
CLI_ARGS=()
SUBMISSION_ARGS=()
while (($#)); do
case $1 in
-d | --define | --database | -f | -h | --hiveconf | --hivevar | -i | -p)
ensure_arg_number $# 2
CLI_ARGS+=("$1"); shift
CLI_ARGS+=("$1"); shift
;;
source $FWDIR/bin/utils.sh
SUBMIT_USAGE_FUNCTION=usage
gatherSparkSubmitOpts "$@"
-e)
ensure_arg_number $# 2
CLI_ARGS+=("$1"); shift
CLI_ARGS+=("$1"); shift
;;
"$FWDIR"/bin/spark-submit --class $CLASS "${SUBMISSION_OPTS[@]}" spark-internal "${APPLICATION_OPTS[@]}"
exit_status=$?
-s | --silent)
CLI_ARGS+=("$1"); shift
;;
-v | --verbose)
# Both SparkSubmit and SparkSQLCLIDriver recognizes -v | --verbose
CLI_ARGS+=("$1")
SUBMISSION_ARGS+=("$1"); shift
;;
*)
SUBMISSION_ARGS+=("$1"); shift
;;
esac
done
if [[ exit_status -eq CLASS_NOT_FOUND_EXIT_STATUS ]]; then
echo
echo "Failed to load Spark SQL CLI main class $CLASS."
echo "You need to build Spark with -Phive."
fi
exec "$FWDIR"/bin/spark-submit --class $CLASS "${SUBMISSION_ARGS[@]}" spark-internal "${CLI_ARGS[@]}"
exit $exit_status
......@@ -54,6 +54,8 @@ object SparkSubmit {
private val SPARK_SHELL = "spark-shell"
private val PYSPARK_SHELL = "pyspark-shell"
private val CLASS_NOT_FOUND_EXIT_STATUS = 1
// Exposed for testing
private[spark] var exitFn: () => Unit = () => System.exit(-1)
private[spark] var printStream: PrintStream = System.err
......@@ -311,8 +313,18 @@ object SparkSubmit {
System.setProperty(key, value)
}
val mainClass = Class.forName(childMainClass, true, loader)
var mainClass: Class[_] = null
try {
mainClass = Class.forName(childMainClass, true, loader)
} catch {
case e: ClassNotFoundException =>
e.printStackTrace(printStream)
System.exit(CLASS_NOT_FOUND_EXIT_STATUS)
}
val mainMethod = mainClass.getMethod("main", new Array[String](0).getClass)
try {
mainMethod.invoke(null, childArgs.toArray)
} catch {
......
......@@ -27,6 +27,7 @@ set -o posix
FWDIR="$(cd `dirname $0`/..; pwd)"
CLASS="org.apache.spark.sql.hive.thriftserver.HiveThriftServer2"
CLASS_NOT_FOUND_EXIT_STATUS=1
function usage {
echo "Usage: ./sbin/start-thriftserver [options] [thrift server options]"
......@@ -43,36 +44,22 @@ function usage {
$FWDIR/bin/spark-class $CLASS --help 2>&1 | grep -v "$pattern" 1>&2
}
function ensure_arg_number {
arg_number=$1
at_least=$2
if [[ $arg_number -lt $at_least ]]; then
usage
exit 1
fi
}
if [[ "$@" = --help ]] || [[ "$@" = -h ]]; then
if [[ "$@" = *--help ]] || [[ "$@" = *-h ]]; then
usage
exit 0
fi
THRIFT_SERVER_ARGS=()
SUBMISSION_ARGS=()
source $FWDIR/bin/utils.sh
SUBMIT_USAGE_FUNCTION=usage
gatherSparkSubmitOpts "$@"
while (($#)); do
case $1 in
--hiveconf)
ensure_arg_number $# 2
THRIFT_SERVER_ARGS+=("$1"); shift
THRIFT_SERVER_ARGS+=("$1"); shift
;;
"$FWDIR"/bin/spark-submit --class $CLASS "${SUBMISSION_OPTS[@]}" spark-internal "${APPLICATION_OPTS[@]}"
exit_status=$?
*)
SUBMISSION_ARGS+=("$1"); shift
;;
esac
done
if [[ exit_status -eq CLASS_NOT_FOUND_EXIT_STATUS ]]; then
echo
echo "Failed to load Hive Thrift server main class $CLASS."
echo "You need to build Spark with -Phive."
fi
exec "$FWDIR"/bin/spark-submit --class $CLASS "${SUBMISSION_ARGS[@]}" spark-internal "${THRIFT_SERVER_ARGS[@]}"
exit $exit_status
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment