diff --git a/run b/run index 15db23bbe0c6b9b135b3208ffb000232038b4cff..83175e84de73833f0c1d18d8db4b8eb40f774afd 100755 --- a/run +++ b/run @@ -13,10 +13,33 @@ if [ -e $FWDIR/conf/spark-env.sh ] ; then . $FWDIR/conf/spark-env.sh fi -# Check that SCALA_HOME has been specified -if [ -z "$SCALA_HOME" ]; then - echo "SCALA_HOME is not set" >&2 - exit 1 +if [ "$SPARK_LAUNCH_WITH_SCALA" == "1" ]; then + if [ `command -v scala` ]; then + RUNNER="scala" + else + if [ -z "$SCALA_HOME" ]; then + echo "SCALA_HOME is not set" >&2 + exit 1 + fi + RUNNER="${SCALA_HOME}/bin/scala" + fi +else + if [ `command -v java` ]; then + RUNNER="java" + else + if [ -z "$JAVA_HOME" ]; then + echo "JAVA_HOME is not set" >&2 + exit 1 + fi + RUNNER="${JAVA_HOME}/bin/java" + fi + if [ -z "$SCALA_LIBRARY_PATH" ]; then + if [ -z "$SCALA_HOME" ]; then + echo "SCALA_HOME is not set" >&2 + exit 1 + fi + SCALA_LIBRARY_PATH="$SCALA_HOME/lib" + fi fi # Figure out how much memory to use per executor and set it as an environment @@ -70,17 +93,11 @@ export CLASSPATH # Needed for spark-shell # the Spark shell, the wrapper is necessary to properly reset the terminal # when we exit, so we allow it to set a variable to launch with scala. if [ "$SPARK_LAUNCH_WITH_SCALA" == "1" ]; then - RUNNER="${SCALA_HOME}/bin/scala" EXTRA_ARGS="" # Java options will be passed to scala as JAVA_OPTS else - CLASSPATH+=":$SCALA_HOME/lib/scala-library.jar" - CLASSPATH+=":$SCALA_HOME/lib/scala-compiler.jar" - CLASSPATH+=":$SCALA_HOME/lib/jline.jar" - if [ -n "$JAVA_HOME" ]; then - RUNNER="${JAVA_HOME}/bin/java" - else - RUNNER=java - fi + CLASSPATH+=":$SCALA_LIBRARY_PATH/scala-library.jar" + CLASSPATH+=":$SCALA_LIBRARY_PATH/scala-compiler.jar" + CLASSPATH+=":$SCALA_LIBRARY_PATH/jline.jar" # The JVM doesn't read JAVA_OPTS by default so we need to pass it in EXTRA_ARGS="$JAVA_OPTS" fi