From 0b6db8c186183704feafd26b454fff58a0e31861 Mon Sep 17 00:00:00 2001
From: Luca Rosellini <lrosellini@paradigmatecnologico.com>
Date: Fri, 3 Jan 2014 12:57:06 +0100
Subject: [PATCH] =?UTF-8?q?Added=20=E2=80=98-i=E2=80=99=20command=20line?=
 =?UTF-8?q?=20option=20to=20spark=20REPL.=20We=20had=20to=20create=20a=20n?=
 =?UTF-8?q?ew=20implementation=20of=20both=20scala.tools.nsc.CompilerComma?=
 =?UTF-8?q?nd=20and=20scala.tools.nsc.Settings,=20because=20using=20scala.?=
 =?UTF-8?q?tools.nsc.GenericRunnerSettings=20would=20bring=20in=20other=20?=
 =?UTF-8?q?options=20(-howtorun,=20-save=20and=20-execute)=20which=20don?=
 =?UTF-8?q?=E2=80=99t=20make=20sense=20in=20Spark.=20Any=20new=20Spark=20s?=
 =?UTF-8?q?pecific=20command=20line=20option=20could=20now=20be=20added=20?=
 =?UTF-8?q?to=20org.apache.spark.repl.SparkRunnerSettings=20class.?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

Since the behavior of loading a script from the command line should be the same as loading it using the “:load” command inside the shell, the script should be loaded when the SparkContext is available, that’s why we had to move the call to ‘loadfiles(settings)’ _after_ the call to postInitialization(). This still doesn’t work if ‘isAsync = true’.
---
 .../apache/spark/repl/SparkCommandLine.scala  | 22 +++++++++++++++++++
 .../org/apache/spark/repl/SparkILoop.scala    |  7 +++---
 .../spark/repl/SparkRunnerSettings.scala      | 17 ++++++++++++++
 3 files changed, 43 insertions(+), 3 deletions(-)
 create mode 100644 repl/src/main/scala/org/apache/spark/repl/SparkCommandLine.scala
 create mode 100644 repl/src/main/scala/org/apache/spark/repl/SparkRunnerSettings.scala

diff --git a/repl/src/main/scala/org/apache/spark/repl/SparkCommandLine.scala b/repl/src/main/scala/org/apache/spark/repl/SparkCommandLine.scala
new file mode 100644
index 0000000000..acb1e4ceda
--- /dev/null
+++ b/repl/src/main/scala/org/apache/spark/repl/SparkCommandLine.scala
@@ -0,0 +1,22 @@
+package org.apache.spark.repl
+
+import scala.tools.nsc.{Settings, CompilerCommand}
+import scala.Predef._
+
+/**
+ * Command class enabling Spark-specific command line options (provided by
+ * <i>org.apache.spark.repl.SparkRunnerSettings</i>).
+ *
+ * @author Luca Rosellini <luca@stratio.com>
+ */
+class SparkCommandLine(args: List[String], override val settings: Settings)
+    extends CompilerCommand(args, settings) {
+
+  def this(args: List[String], error: String => Unit) {
+    this(args, new SparkRunnerSettings(error))
+  }
+
+  def this(args: List[String]) {
+    this(args, str => Console.println("Error: " + str))
+  }
+}
diff --git a/repl/src/main/scala/org/apache/spark/repl/SparkILoop.scala b/repl/src/main/scala/org/apache/spark/repl/SparkILoop.scala
index f108c70f21..14c3feb55c 100644
--- a/repl/src/main/scala/org/apache/spark/repl/SparkILoop.scala
+++ b/repl/src/main/scala/org/apache/spark/repl/SparkILoop.scala
@@ -835,7 +835,7 @@ class SparkILoop(in0: Option[BufferedReader], protected val out: JPrintWriter,
 
   // runs :load `file` on any files passed via -i
   def loadFiles(settings: Settings) = settings match {
-    case settings: GenericRunnerSettings =>
+    case settings: SparkRunnerSettings =>
       for (filename <- settings.loadfiles.value) {
         val cmd = ":load " + filename
         command(cmd)
@@ -902,7 +902,6 @@ class SparkILoop(in0: Option[BufferedReader], protected val out: JPrintWriter,
     addThunk(printWelcome())
     addThunk(initializeSpark())
 
-    loadFiles(settings)
     // it is broken on startup; go ahead and exit
     if (intp.reporter.hasErrors)
       return false
@@ -922,6 +921,8 @@ class SparkILoop(in0: Option[BufferedReader], protected val out: JPrintWriter,
     }
     // printWelcome()
 
+    loadFiles(settings)
+
     try loop()
     catch AbstractOrMissingHandler()
     finally closeInterpreter()
@@ -955,7 +956,7 @@ class SparkILoop(in0: Option[BufferedReader], protected val out: JPrintWriter,
 
   /** process command-line arguments and do as they request */
   def process(args: Array[String]): Boolean = {
-    val command = new CommandLine(args.toList, echo)
+    val command = new SparkCommandLine(args.toList, msg => echo(msg))
     def neededHelp(): String =
       (if (command.settings.help.value) command.usageMsg + "\n" else "") +
       (if (command.settings.Xhelp.value) command.xusageMsg + "\n" else "")
diff --git a/repl/src/main/scala/org/apache/spark/repl/SparkRunnerSettings.scala b/repl/src/main/scala/org/apache/spark/repl/SparkRunnerSettings.scala
new file mode 100644
index 0000000000..235a688334
--- /dev/null
+++ b/repl/src/main/scala/org/apache/spark/repl/SparkRunnerSettings.scala
@@ -0,0 +1,17 @@
+package org.apache.spark.repl
+
+import scala.tools.nsc.Settings
+
+/**
+ * <i>scala.tools.nsc.Settings</i> implementation adding Spark-specific REPL
+ * command line options.
+ *
+ * @author Luca Rosellini <luca@stratio.com>
+ */
+class SparkRunnerSettings(error: String => Unit) extends Settings(error){
+
+  val loadfiles = MultiStringSetting(
+      "-i",
+      "file",
+      "load a file (assumes the code is given interactively)")
+}
-- 
GitLab