From a1877f45c3451d18879083ed9b71dd9d5f583f1c Mon Sep 17 00:00:00 2001
From: hyukjinkwon <gurwls223@gmail.com>
Date: Fri, 24 Nov 2017 19:55:26 +0100
Subject: [PATCH] [SPARK-22597][SQL] Add spark-sql cmd script for Windows users

## What changes were proposed in this pull request?

This PR proposes to add cmd scripts so that Windows users can also run `spark-sql` script.

## How was this patch tested?

Manually tested on Windows.

**Before**

```cmd
C:\...\spark>.\bin\spark-sql
'.\bin\spark-sql' is not recognized as an internal or external command,
operable program or batch file.

C:\...\spark>.\bin\spark-sql.cmd
'.\bin\spark-sql.cmd' is not recognized as an internal or external command,
operable program or batch file.
```

**After**

```cmd
C:\...\spark>.\bin\spark-sql
...
spark-sql> SELECT 'Hello World !!';
...
Hello World !!
```

Author: hyukjinkwon <gurwls223@gmail.com>

Closes #19808 from HyukjinKwon/spark-sql-cmd.
---
 bin/find-spark-home.cmd |  2 +-
 bin/run-example.cmd     |  2 +-
 bin/spark-sql.cmd       | 25 +++++++++++++++++++++++++
 bin/spark-sql2.cmd      | 25 +++++++++++++++++++++++++
 bin/sparkR2.cmd         |  3 +--
 5 files changed, 53 insertions(+), 4 deletions(-)
 create mode 100644 bin/spark-sql.cmd
 create mode 100644 bin/spark-sql2.cmd

diff --git a/bin/find-spark-home.cmd b/bin/find-spark-home.cmd
index c75e7eedb9..6025f67c38 100644
--- a/bin/find-spark-home.cmd
+++ b/bin/find-spark-home.cmd
@@ -32,7 +32,7 @@ if not "x%PYSPARK_PYTHON%"=="x" (
 )
 
 rem If there is python installed, trying to use the root dir as SPARK_HOME
-where %PYTHON_RUNNER% > nul 2>$1
+where %PYTHON_RUNNER% > nul 2>&1
 if %ERRORLEVEL% neq 0 (
   if not exist %PYTHON_RUNNER% (
     if "x%SPARK_HOME%"=="x" (
diff --git a/bin/run-example.cmd b/bin/run-example.cmd
index cc6b234406..2dd396e785 100644
--- a/bin/run-example.cmd
+++ b/bin/run-example.cmd
@@ -20,7 +20,7 @@ rem
 rem Figure out where the Spark framework is installed
 call "%~dp0find-spark-home.cmd"
 
-set _SPARK_CMD_USAGE=Usage: ./bin/run-example [options] example-class [example args]
+set _SPARK_CMD_USAGE=Usage: .\bin\run-example [options] example-class [example args]
 
 rem The outermost quotes are used to prevent Windows command line parse error
 rem when there are some quotes in parameters, see SPARK-21877.
diff --git a/bin/spark-sql.cmd b/bin/spark-sql.cmd
new file mode 100644
index 0000000000..919e3214b5
--- /dev/null
+++ b/bin/spark-sql.cmd
@@ -0,0 +1,25 @@
+@echo off
+
+rem
+rem Licensed to the Apache Software Foundation (ASF) under one or more
+rem contributor license agreements.  See the NOTICE file distributed with
+rem this work for additional information regarding copyright ownership.
+rem The ASF licenses this file to You under the Apache License, Version 2.0
+rem (the "License"); you may not use this file except in compliance with
+rem the License.  You may obtain a copy of the License at
+rem
+rem    http://www.apache.org/licenses/LICENSE-2.0
+rem
+rem Unless required by applicable law or agreed to in writing, software
+rem distributed under the License is distributed on an "AS IS" BASIS,
+rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+rem See the License for the specific language governing permissions and
+rem limitations under the License.
+rem
+
+rem This is the entry point for running SparkSQL. To avoid polluting the
+rem environment, it just launches a new cmd to do the real work.
+
+rem The outermost quotes are used to prevent Windows command line parse error
+rem when there are some quotes in parameters, see SPARK-21877.
+cmd /V /E /C ""%~dp0spark-sql2.cmd" %*"
diff --git a/bin/spark-sql2.cmd b/bin/spark-sql2.cmd
new file mode 100644
index 0000000000..c34a3c5aa0
--- /dev/null
+++ b/bin/spark-sql2.cmd
@@ -0,0 +1,25 @@
+@echo off
+
+rem
+rem Licensed to the Apache Software Foundation (ASF) under one or more
+rem contributor license agreements.  See the NOTICE file distributed with
+rem this work for additional information regarding copyright ownership.
+rem The ASF licenses this file to You under the Apache License, Version 2.0
+rem (the "License"); you may not use this file except in compliance with
+rem the License.  You may obtain a copy of the License at
+rem
+rem    http://www.apache.org/licenses/LICENSE-2.0
+rem
+rem Unless required by applicable law or agreed to in writing, software
+rem distributed under the License is distributed on an "AS IS" BASIS,
+rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+rem See the License for the specific language governing permissions and
+rem limitations under the License.
+rem
+
+rem Figure out where the Spark framework is installed
+call "%~dp0find-spark-home.cmd"
+
+set _SPARK_CMD_USAGE=Usage: .\bin\spark-sql [options] [cli option]
+
+call "%SPARK_HOME%\bin\spark-submit2.cmd" --class org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver %*
diff --git a/bin/sparkR2.cmd b/bin/sparkR2.cmd
index b48bea345c..446f0c30bf 100644
--- a/bin/sparkR2.cmd
+++ b/bin/sparkR2.cmd
@@ -21,6 +21,5 @@ rem Figure out where the Spark framework is installed
 call "%~dp0find-spark-home.cmd"
 
 call "%SPARK_HOME%\bin\load-spark-env.cmd"
-
-
+set _SPARK_CMD_USAGE=Usage: .\bin\sparkR [options]
 call "%SPARK_HOME%\bin\spark-submit2.cmd" sparkr-shell-main %*
-- 
GitLab