From b4edafa99bd3858c166adeefdafd93dcd4bc9734 Mon Sep 17 00:00:00 2001
From: Jakub Nowacki <j.s.nowacki@gmail.com>
Date: Thu, 23 Nov 2017 12:47:38 +0900
Subject: [PATCH] [SPARK-22495] Fix setup of SPARK_HOME variable on Windows

## What changes were proposed in this pull request?

Fixing the way how `SPARK_HOME` is resolved on Windows. While the previous version was working with the built release download, the set of directories changed slightly for the PySpark `pip` or `conda` install. This has been reflected in Linux files in `bin` but not for Windows `cmd` files.

First fix improves the way how the `jars` directory is found, as this was stoping Windows version of `pip/conda` install from working; JARs were not found by on Session/Context setup.

Second fix is adding `find-spark-home.cmd` script, which uses `find_spark_home.py` script, as the Linux version, to resolve `SPARK_HOME`. It is based on `find-spark-home` bash script, though, some operations are done in different order due to the `cmd` script language limitations. If environment variable is set, the Python script `find_spark_home.py` will not be run. The process can fail if Python is not installed, but it will mostly use this way if PySpark is installed via `pip/conda`, thus, there is some Python in the system.

## How was this patch tested?

Tested on local installation.

Author: Jakub Nowacki <j.s.nowacki@gmail.com>

Closes #19370 from jsnowacki/fix_spark_cmds.
---
 appveyor.yml            |  1 +
 bin/find-spark-home.cmd | 60 +++++++++++++++++++++++++++++++++++++++++
 bin/pyspark2.cmd        |  2 +-
 bin/run-example.cmd     |  4 ++-
 bin/spark-class2.cmd    |  2 +-
 bin/spark-shell2.cmd    |  4 ++-
 bin/sparkR2.cmd         |  2 +-
 7 files changed, 70 insertions(+), 5 deletions(-)
 create mode 100644 bin/find-spark-home.cmd

diff --git a/appveyor.yml b/appveyor.yml
index dc2d81fcdc..48740920cd 100644
--- a/appveyor.yml
+++ b/appveyor.yml
@@ -33,6 +33,7 @@ only_commits:
     - core/src/main/scala/org/apache/spark/api/r/
     - mllib/src/main/scala/org/apache/spark/ml/r/
     - core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala
+    - bin/*.cmd
 
 cache:
   - C:\Users\appveyor\.m2
diff --git a/bin/find-spark-home.cmd b/bin/find-spark-home.cmd
new file mode 100644
index 0000000000..c75e7eedb9
--- /dev/null
+++ b/bin/find-spark-home.cmd
@@ -0,0 +1,60 @@
+@echo off
+
+rem
+rem Licensed to the Apache Software Foundation (ASF) under one or more
+rem contributor license agreements.  See the NOTICE file distributed with
+rem this work for additional information regarding copyright ownership.
+rem The ASF licenses this file to You under the Apache License, Version 2.0
+rem (the "License"); you may not use this file except in compliance with
+rem the License.  You may obtain a copy of the License at
+rem
+rem    http://www.apache.org/licenses/LICENSE-2.0
+rem
+rem Unless required by applicable law or agreed to in writing, software
+rem distributed under the License is distributed on an "AS IS" BASIS,
+rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+rem See the License for the specific language governing permissions and
+rem limitations under the License.
+rem
+
+rem Path to Python script finding SPARK_HOME
+set FIND_SPARK_HOME_PYTHON_SCRIPT=%~dp0find_spark_home.py
+
+rem Default to standard python interpreter unless told otherwise
+set PYTHON_RUNNER=python
+rem If PYSPARK_DRIVER_PYTHON is set, it overwrites the python version
+if not "x%PYSPARK_DRIVER_PYTHON%"=="x" (
+  set PYTHON_RUNNER=%PYSPARK_DRIVER_PYTHON%
+)
+rem If PYSPARK_PYTHON is set, it overwrites the python version
+if not "x%PYSPARK_PYTHON%"=="x" (
+  set PYTHON_RUNNER=%PYSPARK_PYTHON%
+)
+
+rem If there is python installed, trying to use the root dir as SPARK_HOME
+where %PYTHON_RUNNER% > nul 2>$1
+if %ERRORLEVEL% neq 0 (
+  if not exist %PYTHON_RUNNER% (
+    if "x%SPARK_HOME%"=="x" (
+      echo Missing Python executable '%PYTHON_RUNNER%', defaulting to '%~dp0..' for SPARK_HOME ^
+environment variable. Please install Python or specify the correct Python executable in ^
+PYSPARK_DRIVER_PYTHON or PYSPARK_PYTHON environment variable to detect SPARK_HOME safely.
+      set SPARK_HOME=%~dp0..
+    )
+  )
+)
+
+rem Only attempt to find SPARK_HOME if it is not set.
+if "x%SPARK_HOME%"=="x" (
+  if not exist "%FIND_SPARK_HOME_PYTHON_SCRIPT%" (
+    rem If we are not in the same directory as find_spark_home.py we are not pip installed so we don't
+    rem need to search the different Python directories for a Spark installation.
+    rem Note only that, if the user has pip installed PySpark but is directly calling pyspark-shell or
+    rem spark-submit in another directory we want to use that version of PySpark rather than the
+    rem pip installed version of PySpark.
+    set SPARK_HOME=%~dp0..
+  ) else (
+    rem We are pip installed, use the Python script to resolve a reasonable SPARK_HOME
+    for /f "delims=" %%i in ('%PYTHON_RUNNER% %FIND_SPARK_HOME_PYTHON_SCRIPT%') do set SPARK_HOME=%%i
+  )
+)
diff --git a/bin/pyspark2.cmd b/bin/pyspark2.cmd
index 46d4d5c883..663670f2fd 100644
--- a/bin/pyspark2.cmd
+++ b/bin/pyspark2.cmd
@@ -18,7 +18,7 @@ rem limitations under the License.
 rem
 
 rem Figure out where the Spark framework is installed
-set SPARK_HOME=%~dp0..
+call "%~dp0find-spark-home.cmd"
 
 call "%SPARK_HOME%\bin\load-spark-env.cmd"
 set _SPARK_CMD_USAGE=Usage: bin\pyspark.cmd [options]
diff --git a/bin/run-example.cmd b/bin/run-example.cmd
index efa5f81d08..cc6b234406 100644
--- a/bin/run-example.cmd
+++ b/bin/run-example.cmd
@@ -17,7 +17,9 @@ rem See the License for the specific language governing permissions and
 rem limitations under the License.
 rem
 
-set SPARK_HOME=%~dp0..
+rem Figure out where the Spark framework is installed
+call "%~dp0find-spark-home.cmd"
+
 set _SPARK_CMD_USAGE=Usage: ./bin/run-example [options] example-class [example args]
 
 rem The outermost quotes are used to prevent Windows command line parse error
diff --git a/bin/spark-class2.cmd b/bin/spark-class2.cmd
index a93fd2f0e5..5da7d7a430 100644
--- a/bin/spark-class2.cmd
+++ b/bin/spark-class2.cmd
@@ -18,7 +18,7 @@ rem limitations under the License.
 rem
 
 rem Figure out where the Spark framework is installed
-set SPARK_HOME=%~dp0..
+call "%~dp0find-spark-home.cmd"
 
 call "%SPARK_HOME%\bin\load-spark-env.cmd"
 
diff --git a/bin/spark-shell2.cmd b/bin/spark-shell2.cmd
index 7b5d396be8..aaf71906c6 100644
--- a/bin/spark-shell2.cmd
+++ b/bin/spark-shell2.cmd
@@ -17,7 +17,9 @@ rem See the License for the specific language governing permissions and
 rem limitations under the License.
 rem
 
-set SPARK_HOME=%~dp0..
+rem Figure out where the Spark framework is installed
+call "%~dp0find-spark-home.cmd"
+
 set _SPARK_CMD_USAGE=Usage: .\bin\spark-shell.cmd [options]
 
 rem SPARK-4161: scala does not assume use of the java classpath,
diff --git a/bin/sparkR2.cmd b/bin/sparkR2.cmd
index 459b780e2a..b48bea345c 100644
--- a/bin/sparkR2.cmd
+++ b/bin/sparkR2.cmd
@@ -18,7 +18,7 @@ rem limitations under the License.
 rem
 
 rem Figure out where the Spark framework is installed
-set SPARK_HOME=%~dp0..
+call "%~dp0find-spark-home.cmd"
 
 call "%SPARK_HOME%\bin\load-spark-env.cmd"
 
-- 
GitLab