From f8ba89da217a1f1fd5c856a95a27a3e535017643 Mon Sep 17 00:00:00 2001
From: Josh Rosen <joshrosen@apache.org>
Date: Sun, 15 Dec 2013 18:39:30 -0800
Subject: [PATCH] Fix Cygwin support in several scripts.

This allows the spark-shell, spark-class, run-example, make-distribution.sh,
and ./bin/start-* scripts to work under Cygwin.  Note that this doesn't
support PySpark under Cygwin, since that requires many additional `cygpath`
calls from within Python and will be non-trivial to implement.

This PR was inspired by, and subsumes, #253 (so close #253 after this is merged).
---
 run-example | 10 ++++++++++
 sbt/sbt     | 21 ++++++++++++++++++---
 spark-class | 10 ++++++++++
 spark-shell | 19 +++++++++++++++++--
 4 files changed, 55 insertions(+), 5 deletions(-)

diff --git a/run-example b/run-example
index feade6589a..a78192d31d 100755
--- a/run-example
+++ b/run-example
@@ -17,6 +17,11 @@
 # limitations under the License.
 #
 
+cygwin=false
+case "`uname`" in
+    CYGWIN*) cygwin=true;;
+esac
+
 SCALA_VERSION=2.10
 
 # Figure out where the Scala framework is installed
@@ -59,6 +64,11 @@ fi
 CLASSPATH=`$FWDIR/bin/compute-classpath.sh`
 CLASSPATH="$SPARK_EXAMPLES_JAR:$CLASSPATH"
 
+if $cygwin; then
+    CLASSPATH=`cygpath -wp $CLASSPATH`
+    export SPARK_EXAMPLES_JAR=`cygpath -w $SPARK_EXAMPLES_JAR`
+fi
+
 # Find java binary
 if [ -n "${JAVA_HOME}" ]; then
   RUNNER="${JAVA_HOME}/bin/java"
diff --git a/sbt/sbt b/sbt/sbt
index c31a0280ff..5942280585 100755
--- a/sbt/sbt
+++ b/sbt/sbt
@@ -17,12 +17,27 @@
 # limitations under the License.
 #
 
-EXTRA_ARGS=""
+cygwin=false
+case "`uname`" in
+    CYGWIN*) cygwin=true;;
+esac
+
+EXTRA_ARGS="-Xmx1200m -XX:MaxPermSize=350m -XX:ReservedCodeCacheSize=256m"
 if [ "$MESOS_HOME" != "" ]; then
-  EXTRA_ARGS="-Djava.library.path=$MESOS_HOME/lib/java"
+  EXTRA_ARGS="$EXTRA_ARGS -Djava.library.path=$MESOS_HOME/lib/java"
 fi
 
 export SPARK_HOME=$(cd "$(dirname $0)/.." 2>&1 >/dev/null ; pwd)
 export SPARK_TESTING=1  # To put test classes on classpath
 
-java -Xmx1200m -XX:MaxPermSize=350m -XX:ReservedCodeCacheSize=256m $EXTRA_ARGS $SBT_OPTS -jar "$SPARK_HOME"/sbt/sbt-launch-*.jar "$@"
+SBT_JAR="$SPARK_HOME"/sbt/sbt-launch-*.jar
+if $cygwin; then
+    SBT_JAR=`cygpath -w $SBT_JAR`
+    export SPARK_HOME=`cygpath -w $SPARK_HOME`
+    EXTRA_ARGS="$EXTRA_ARGS -Djline.terminal=jline.UnixTerminal -Dsbt.cygwin=true"
+    stty -icanon min 1 -echo > /dev/null 2>&1
+    java $EXTRA_ARGS $SBT_OPTS -jar $SBT_JAR "$@"
+    stty icanon echo > /dev/null 2>&1
+else
+    java $EXTRA_ARGS $SBT_OPTS -jar $SBT_JAR "$@"
+fi
\ No newline at end of file
diff --git a/spark-class b/spark-class
index 4fa6fb864e..4eb95a9ba2 100755
--- a/spark-class
+++ b/spark-class
@@ -17,6 +17,11 @@
 # limitations under the License.
 #
 
+cygwin=false
+case "`uname`" in
+    CYGWIN*) cygwin=true;;
+esac
+
 SCALA_VERSION=2.10
 
 # Figure out where the Scala framework is installed
@@ -125,6 +130,11 @@ fi
 # Compute classpath using external script
 CLASSPATH=`$FWDIR/bin/compute-classpath.sh`
 CLASSPATH="$SPARK_TOOLS_JAR:$CLASSPATH"
+
+if $cygwin; then
+    CLASSPATH=`cygpath -wp $CLASSPATH`
+    export SPARK_TOOLS_JAR=`cygpath -w $SPARK_TOOLS_JAR`
+fi
 export CLASSPATH
 
 if [ "$SPARK_PRINT_LAUNCH_COMMAND" == "1" ]; then
diff --git a/spark-shell b/spark-shell
index 9608bd3f30..d20af0fb39 100755
--- a/spark-shell
+++ b/spark-shell
@@ -23,7 +23,11 @@
 # if those two env vars are set in spark-env.sh but MASTER is not.
 # Options:
 #    -c <cores>    Set the number of cores for REPL to use
-#
+
+cygwin=false
+case "`uname`" in
+    CYGWIN*) cygwin=true;;
+esac
 
 # Enter posix mode for bash
 set -o posix
@@ -79,7 +83,18 @@ if [[ ! $? ]]; then
   saved_stty=""
 fi
 
-$FWDIR/spark-class $OPTIONS org.apache.spark.repl.Main "$@"
+if $cygwin; then
+    # Workaround for issue involving JLine and Cygwin
+    # (see http://sourceforge.net/p/jline/bugs/40/).
+    # If you're using the Mintty terminal emulator in Cygwin, may need to set the
+    # "Backspace sends ^H" setting in "Keys" section of the Mintty options
+    # (see https://github.com/sbt/sbt/issues/562).
+    stty -icanon min 1 -echo > /dev/null 2>&1
+    $FWDIR/spark-class -Djline.terminal=unix $OPTIONS org.apache.spark.repl.Main "$@"
+    stty icanon echo > /dev/null 2>&1
+else
+    $FWDIR/spark-class $OPTIONS org.apache.spark.repl.Main "$@"
+fi
 
 # record the exit status lest it be overwritten:
 # then reenable echo and propagate the code.
-- 
GitLab