Skip to content
Snippets Groups Projects
Commit 731f683b authored by Prashant Sharma's avatar Prashant Sharma Committed by Patrick Wendell
Browse files

[SPARK-2109] Setting SPARK_MEM for bin/pyspark does not work.

Trivial fix.

Author: Prashant Sharma <prashant.s@imaginea.com>

Closes #1050 from ScrapCodes/SPARK-2109/pyspark-script-bug and squashes the following commits:

77072b9 [Prashant Sharma] Changed echos to redirect to STDERR.
13f48a0 [Prashant Sharma] [SPARK-2109] Setting SPARK_MEM for bin/pyspark does not work.
parent a9b52e56
No related branches found
No related tags found
No related merge requests found
...@@ -81,10 +81,10 @@ ASSEMBLY_JAR=$(ls "$assembly_folder"/spark-assembly*hadoop*.jar 2>/dev/null) ...@@ -81,10 +81,10 @@ ASSEMBLY_JAR=$(ls "$assembly_folder"/spark-assembly*hadoop*.jar 2>/dev/null)
# Verify that versions of java used to build the jars and run Spark are compatible # Verify that versions of java used to build the jars and run Spark are compatible
jar_error_check=$("$JAR_CMD" -tf "$ASSEMBLY_JAR" nonexistent/class/path 2>&1) jar_error_check=$("$JAR_CMD" -tf "$ASSEMBLY_JAR" nonexistent/class/path 2>&1)
if [[ "$jar_error_check" =~ "invalid CEN header" ]]; then if [[ "$jar_error_check" =~ "invalid CEN header" ]]; then
echo "Loading Spark jar with '$JAR_CMD' failed. " echo "Loading Spark jar with '$JAR_CMD' failed. " 1>&2
echo "This is likely because Spark was compiled with Java 7 and run " echo "This is likely because Spark was compiled with Java 7 and run " 1>&2
echo "with Java 6. (see SPARK-1703). Please use Java 7 to run Spark " echo "with Java 6. (see SPARK-1703). Please use Java 7 to run Spark " 1>&2
echo "or build Spark with Java 6." echo "or build Spark with Java 6." 1>&2
exit 1 exit 1
fi fi
......
...@@ -26,7 +26,7 @@ export SPARK_HOME="$FWDIR" ...@@ -26,7 +26,7 @@ export SPARK_HOME="$FWDIR"
SCALA_VERSION=2.10 SCALA_VERSION=2.10
if [[ "$@" = *--help ]] || [[ "$@" = *-h ]]; then if [[ "$@" = *--help ]] || [[ "$@" = *-h ]]; then
echo "Usage: ./bin/pyspark [options]" echo "Usage: ./bin/pyspark [options]" 1>&2
$FWDIR/bin/spark-submit --help 2>&1 | grep -v Usage 1>&2 $FWDIR/bin/spark-submit --help 2>&1 | grep -v Usage 1>&2
exit 0 exit 0
fi fi
...@@ -36,8 +36,8 @@ if [ ! -f "$FWDIR/RELEASE" ]; then ...@@ -36,8 +36,8 @@ if [ ! -f "$FWDIR/RELEASE" ]; then
# Exit if the user hasn't compiled Spark # Exit if the user hasn't compiled Spark
ls "$FWDIR"/assembly/target/scala-$SCALA_VERSION/spark-assembly*hadoop*.jar >& /dev/null ls "$FWDIR"/assembly/target/scala-$SCALA_VERSION/spark-assembly*hadoop*.jar >& /dev/null
if [[ $? != 0 ]]; then if [[ $? != 0 ]]; then
echo "Failed to find Spark assembly in $FWDIR/assembly/target" >&2 echo "Failed to find Spark assembly in $FWDIR/assembly/target" 1>&2
echo "You need to build Spark before running this program" >&2 echo "You need to build Spark before running this program" 1>&2
exit 1 exit 1
fi fi
fi fi
......
...@@ -27,9 +27,9 @@ if [ -n "$1" ]; then ...@@ -27,9 +27,9 @@ if [ -n "$1" ]; then
EXAMPLE_CLASS="$1" EXAMPLE_CLASS="$1"
shift shift
else else
echo "Usage: ./bin/run-example <example-class> [example-args]" echo "Usage: ./bin/run-example <example-class> [example-args]" 1>&2
echo " - set MASTER=XX to use a specific master" echo " - set MASTER=XX to use a specific master" 1>&2
echo " - can use abbreviated example class name (e.g. SparkPi, mllib.LinearRegression)" echo " - can use abbreviated example class name (e.g. SparkPi, mllib.LinearRegression)" 1>&2
exit 1 exit 1
fi fi
...@@ -40,8 +40,8 @@ elif [ -e "$EXAMPLES_DIR"/target/scala-$SCALA_VERSION/spark-examples-*hadoop*.ja ...@@ -40,8 +40,8 @@ elif [ -e "$EXAMPLES_DIR"/target/scala-$SCALA_VERSION/spark-examples-*hadoop*.ja
fi fi
if [[ -z $SPARK_EXAMPLES_JAR ]]; then if [[ -z $SPARK_EXAMPLES_JAR ]]; then
echo "Failed to find Spark examples assembly in $FWDIR/lib or $FWDIR/examples/target" >&2 echo "Failed to find Spark examples assembly in $FWDIR/lib or $FWDIR/examples/target" 1>&2
echo "You need to build Spark before running this program" >&2 echo "You need to build Spark before running this program" 1>&2
exit 1 exit 1
fi fi
......
...@@ -33,13 +33,13 @@ export SPARK_HOME="$FWDIR" ...@@ -33,13 +33,13 @@ export SPARK_HOME="$FWDIR"
. $FWDIR/bin/load-spark-env.sh . $FWDIR/bin/load-spark-env.sh
if [ -z "$1" ]; then if [ -z "$1" ]; then
echo "Usage: spark-class <class> [<args>]" >&2 echo "Usage: spark-class <class> [<args>]" 1>&2
exit 1 exit 1
fi fi
if [ -n "$SPARK_MEM" ]; then if [ -n "$SPARK_MEM" ]; then
echo "Warning: SPARK_MEM is deprecated, please use a more specific config option" echo -e "Warning: SPARK_MEM is deprecated, please use a more specific config option" 1>&2
echo "(e.g., spark.executor.memory or SPARK_DRIVER_MEMORY)." echo -e "(e.g., spark.executor.memory or SPARK_DRIVER_MEMORY)." 1>&2
fi fi
# Use SPARK_MEM or 512m as the default memory, to be overridden by specific options # Use SPARK_MEM or 512m as the default memory, to be overridden by specific options
...@@ -147,10 +147,9 @@ fi ...@@ -147,10 +147,9 @@ fi
export CLASSPATH export CLASSPATH
if [ "$SPARK_PRINT_LAUNCH_COMMAND" == "1" ]; then if [ "$SPARK_PRINT_LAUNCH_COMMAND" == "1" ]; then
echo -n "Spark Command: " echo -n "Spark Command: " 1>&2
echo "$RUNNER" -cp "$CLASSPATH" $JAVA_OPTS "$@" echo "$RUNNER" -cp "$CLASSPATH" $JAVA_OPTS "$@" 1>&2
echo "========================================" echo -e "========================================\n" 1>&2
echo
fi fi
exec "$RUNNER" -cp "$CLASSPATH" $JAVA_OPTS "$@" exec "$RUNNER" -cp "$CLASSPATH" $JAVA_OPTS "$@"
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment