diff --git a/appveyor.yml b/appveyor.yml
index dc2d81fcdc0911455a8a3cfdd8a96e97571451db..48740920cd09b9078464aacf091944bac00593a6 100644
--- a/appveyor.yml
+++ b/appveyor.yml
@@ -33,6 +33,7 @@ only_commits:
     - core/src/main/scala/org/apache/spark/api/r/
     - mllib/src/main/scala/org/apache/spark/ml/r/
     - core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala
+    - bin/*.cmd
 
 cache:
   - C:\Users\appveyor\.m2
diff --git a/bin/find-spark-home.cmd b/bin/find-spark-home.cmd
new file mode 100644
index 0000000000000000000000000000000000000000..c75e7eedb9418319a9f5bfceaccdd96c7f3c7248
--- /dev/null
+++ b/bin/find-spark-home.cmd
@@ -0,0 +1,60 @@
+@echo off
+
+rem
+rem Licensed to the Apache Software Foundation (ASF) under one or more
+rem contributor license agreements.  See the NOTICE file distributed with
+rem this work for additional information regarding copyright ownership.
+rem The ASF licenses this file to You under the Apache License, Version 2.0
+rem (the "License"); you may not use this file except in compliance with
+rem the License.  You may obtain a copy of the License at
+rem
+rem    http://www.apache.org/licenses/LICENSE-2.0
+rem
+rem Unless required by applicable law or agreed to in writing, software
+rem distributed under the License is distributed on an "AS IS" BASIS,
+rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+rem See the License for the specific language governing permissions and
+rem limitations under the License.
+rem
+
+rem Path to Python script finding SPARK_HOME
+set FIND_SPARK_HOME_PYTHON_SCRIPT=%~dp0find_spark_home.py
+
+rem Default to standard python interpreter unless told otherwise
+set PYTHON_RUNNER=python
+rem If PYSPARK_DRIVER_PYTHON is set, it overwrites the python version
+if not "x%PYSPARK_DRIVER_PYTHON%"=="x" (
+  set PYTHON_RUNNER=%PYSPARK_DRIVER_PYTHON%
+)
+rem If PYSPARK_PYTHON is set, it overwrites the python version
+if not "x%PYSPARK_PYTHON%"=="x" (
+  set PYTHON_RUNNER=%PYSPARK_PYTHON%
+)
+
+rem If there is python installed, trying to use the root dir as SPARK_HOME
+where %PYTHON_RUNNER% > nul 2>$1
+if %ERRORLEVEL% neq 0 (
+  if not exist %PYTHON_RUNNER% (
+    if "x%SPARK_HOME%"=="x" (
+      echo Missing Python executable '%PYTHON_RUNNER%', defaulting to '%~dp0..' for SPARK_HOME ^
+environment variable. Please install Python or specify the correct Python executable in ^
+PYSPARK_DRIVER_PYTHON or PYSPARK_PYTHON environment variable to detect SPARK_HOME safely.
+      set SPARK_HOME=%~dp0..
+    )
+  )
+)
+
+rem Only attempt to find SPARK_HOME if it is not set.
+if "x%SPARK_HOME%"=="x" (
+  if not exist "%FIND_SPARK_HOME_PYTHON_SCRIPT%" (
+    rem If we are not in the same directory as find_spark_home.py we are not pip installed so we don't
+    rem need to search the different Python directories for a Spark installation.
+    rem Note only that, if the user has pip installed PySpark but is directly calling pyspark-shell or
+    rem spark-submit in another directory we want to use that version of PySpark rather than the
+    rem pip installed version of PySpark.
+    set SPARK_HOME=%~dp0..
+  ) else (
+    rem We are pip installed, use the Python script to resolve a reasonable SPARK_HOME
+    for /f "delims=" %%i in ('%PYTHON_RUNNER% %FIND_SPARK_HOME_PYTHON_SCRIPT%') do set SPARK_HOME=%%i
+  )
+)
diff --git a/bin/pyspark2.cmd b/bin/pyspark2.cmd
index 46d4d5c883cfb71eb0b238d84a0c78945c340127..663670f2fddaf58aee3e4d6ad43134460689c4f1 100644
--- a/bin/pyspark2.cmd
+++ b/bin/pyspark2.cmd
@@ -18,7 +18,7 @@ rem limitations under the License.
 rem
 
 rem Figure out where the Spark framework is installed
-set SPARK_HOME=%~dp0..
+call "%~dp0find-spark-home.cmd"
 
 call "%SPARK_HOME%\bin\load-spark-env.cmd"
 set _SPARK_CMD_USAGE=Usage: bin\pyspark.cmd [options]
diff --git a/bin/run-example.cmd b/bin/run-example.cmd
index efa5f81d08f7f308d3c6c8f611b1452dc7eeb456..cc6b234406e4a0e1804cf21a467645711ce78963 100644
--- a/bin/run-example.cmd
+++ b/bin/run-example.cmd
@@ -17,7 +17,9 @@ rem See the License for the specific language governing permissions and
 rem limitations under the License.
 rem
 
-set SPARK_HOME=%~dp0..
+rem Figure out where the Spark framework is installed
+call "%~dp0find-spark-home.cmd"
+
 set _SPARK_CMD_USAGE=Usage: ./bin/run-example [options] example-class [example args]
 
 rem The outermost quotes are used to prevent Windows command line parse error
diff --git a/bin/spark-class2.cmd b/bin/spark-class2.cmd
index a93fd2f0e54bc4a53d4bc4170aef91d0e7871de9..5da7d7a430d79e5d196bccc60ddc5029ad3e71c6 100644
--- a/bin/spark-class2.cmd
+++ b/bin/spark-class2.cmd
@@ -18,7 +18,7 @@ rem limitations under the License.
 rem
 
 rem Figure out where the Spark framework is installed
-set SPARK_HOME=%~dp0..
+call "%~dp0find-spark-home.cmd"
 
 call "%SPARK_HOME%\bin\load-spark-env.cmd"
 
diff --git a/bin/spark-shell2.cmd b/bin/spark-shell2.cmd
index 7b5d396be888c11ea37e74f5001d67c94cb4c17a..aaf71906c6526e33d2e43fff8013914f23a985fc 100644
--- a/bin/spark-shell2.cmd
+++ b/bin/spark-shell2.cmd
@@ -17,7 +17,9 @@ rem See the License for the specific language governing permissions and
 rem limitations under the License.
 rem
 
-set SPARK_HOME=%~dp0..
+rem Figure out where the Spark framework is installed
+call "%~dp0find-spark-home.cmd"
+
 set _SPARK_CMD_USAGE=Usage: .\bin\spark-shell.cmd [options]
 
 rem SPARK-4161: scala does not assume use of the java classpath,
diff --git a/bin/sparkR2.cmd b/bin/sparkR2.cmd
index 459b780e2ae3361b8f74dc27673d43884ceeed73..b48bea345c0b9e397efe42678ca202fdcfa3deca 100644
--- a/bin/sparkR2.cmd
+++ b/bin/sparkR2.cmd
@@ -18,7 +18,7 @@ rem limitations under the License.
 rem
 
 rem Figure out where the Spark framework is installed
-set SPARK_HOME=%~dp0..
+call "%~dp0find-spark-home.cmd"
 
 call "%SPARK_HOME%\bin\load-spark-env.cmd"