Skip to content
Snippets Groups Projects
Commit 2ee6a7e3 authored by Matei Zaharia's avatar Matei Zaharia
Browse files

Print output from spark-daemon only when it fails to launch

parent 25ac5066
No related branches found
No related tags found
No related merge requests found
...@@ -42,7 +42,7 @@ bin=`cd "$bin"; pwd` ...@@ -42,7 +42,7 @@ bin=`cd "$bin"; pwd`
. "$bin/spark-config.sh" . "$bin/spark-config.sh"
# If the slaves file is specified in the command line, # If the slaves file is specified in the command line,
# then it takes precedence over the definition in # then it takes precedence over the definition in
# spark-env.sh. Save it here. # spark-env.sh. Save it here.
HOSTLIST=$SPARK_SLAVES HOSTLIST=$SPARK_SLAVES
...@@ -58,8 +58,6 @@ if [ "$HOSTLIST" = "" ]; then ...@@ -58,8 +58,6 @@ if [ "$HOSTLIST" = "" ]; then
fi fi
fi fi
echo $"${@// /\\ }"
# By default disable strict host key checking # By default disable strict host key checking
if [ "$SPARK_SSH_OPTS" = "" ]; then if [ "$SPARK_SSH_OPTS" = "" ]; then
SPARK_SSH_OPTS="-o StrictHostKeyChecking=no" SPARK_SSH_OPTS="-o StrictHostKeyChecking=no"
......
...@@ -75,6 +75,9 @@ if [ "$SPARK_IDENT_STRING" = "" ]; then ...@@ -75,6 +75,9 @@ if [ "$SPARK_IDENT_STRING" = "" ]; then
export SPARK_IDENT_STRING="$USER" export SPARK_IDENT_STRING="$USER"
fi fi
export SPARK_PRINT_LAUNCH_COMMAND="1"
# get log directory # get log directory
if [ "$SPARK_LOG_DIR" = "" ]; then if [ "$SPARK_LOG_DIR" = "" ]; then
export SPARK_LOG_DIR="$SPARK_HOME/logs" export SPARK_LOG_DIR="$SPARK_HOME/logs"
...@@ -122,12 +125,19 @@ case $startStop in ...@@ -122,12 +125,19 @@ case $startStop in
rsync -a -e ssh --delete --exclude=.svn --exclude='logs/*' --exclude='contrib/hod/logs/*' $SPARK_MASTER/ "$SPARK_HOME" rsync -a -e ssh --delete --exclude=.svn --exclude='logs/*' --exclude='contrib/hod/logs/*' $SPARK_MASTER/ "$SPARK_HOME"
fi fi
spark_rotate_log $log spark_rotate_log "$log"
echo starting $command, logging to $log echo starting $command, logging to $log
cd "$SPARK_PREFIX" cd "$SPARK_PREFIX"
nohup nice -n $SPARK_NICENESS "$SPARK_PREFIX"/spark-class $command "$@" >> "$log" 2>&1 < /dev/null & nohup nice -n $SPARK_NICENESS "$SPARK_PREFIX"/spark-class $command "$@" >> "$log" 2>&1 < /dev/null &
echo $! > $pid newpid=$!
sleep 1; head "$log" echo $newpid > $pid
sleep 2
# Check if the process has died; in that case we'll tail the log so the user can see
if ! kill -0 $newpid >/dev/null 2>&1; then
echo "failed to launch $command:"
tail -2 "$log" | sed 's/^/ /'
echo "full log in $log"
fi
;; ;;
(stop) (stop)
......
...@@ -35,8 +35,6 @@ if [ "$SPARK_MASTER_IP" = "" ]; then ...@@ -35,8 +35,6 @@ if [ "$SPARK_MASTER_IP" = "" ]; then
SPARK_MASTER_IP=`hostname` SPARK_MASTER_IP=`hostname`
fi fi
echo "Master IP: $SPARK_MASTER_IP"
# Launch the slaves # Launch the slaves
if [ "$SPARK_WORKER_INSTANCES" = "" ]; then if [ "$SPARK_WORKER_INSTANCES" = "" ]; then
exec "$bin/slaves.sh" cd "$SPARK_HOME" \; "$bin/start-slave.sh" 1 spark://$SPARK_MASTER_IP:$SPARK_MASTER_PORT exec "$bin/slaves.sh" cd "$SPARK_HOME" \; "$bin/start-slave.sh" 1 spark://$SPARK_MASTER_IP:$SPARK_MASTER_PORT
......
...@@ -20,6 +20,7 @@ ...@@ -20,6 +20,7 @@
# Start all spark daemons. # Start all spark daemons.
# Run this on the master nde # Run this on the master nde
bin=`dirname "$0"` bin=`dirname "$0"`
bin=`cd "$bin"; pwd` bin=`cd "$bin"; pwd`
......
...@@ -143,7 +143,6 @@ class WorkerWebUI(val worker: Worker, val workDir: File, requestedPort: Option[I ...@@ -143,7 +143,6 @@ class WorkerWebUI(val worker: Worker, val workDir: File, requestedPort: Option[I
<html> <html>
<body> <body>
{linkToMaster} {linkToMaster}
<hr />
<div> <div>
<div style="float:left;width:40%">{backButton}</div> <div style="float:left;width:40%">{backButton}</div>
<div style="float:left;">{range}</div> <div style="float:left;">{range}</div>
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment