diff --git a/repl/src/main/scala/org/apache/spark/repl/SparkILoop.scala b/repl/src/main/scala/org/apache/spark/repl/SparkILoop.scala
index bd132c29bf6c5f6bcef736f267d4f51dccf479c1..988b624febc0e27ab53e28878c7fca6f471e4694 100644
--- a/repl/src/main/scala/org/apache/spark/repl/SparkILoop.scala
+++ b/repl/src/main/scala/org/apache/spark/repl/SparkILoop.scala
@@ -915,6 +915,10 @@ class SparkILoop(in0: Option[BufferedReader], protected val out: JPrintWriter,
   }
 
   def createSparkContext(): SparkContext = {
+   val uri = System.getenv("SPARK_EXECUTOR_URI")
+   if (uri != null) {
+         System.setProperty("spark.executor.uri", uri)
+   }
     val master = this.master match {
       case Some(m) => m
       case None => {
diff --git a/repl/src/main/scala/org/apache/spark/repl/SparkImports.scala b/repl/src/main/scala/org/apache/spark/repl/SparkImports.scala
index a33f07a83e9ff12bfd04254bfdd686ef4b1f568a..64084209e80d202c15f6948d066f3cee69178e8e 100644
--- a/repl/src/main/scala/org/apache/spark/repl/SparkImports.scala
+++ b/repl/src/main/scala/org/apache/spark/repl/SparkImports.scala
@@ -185,8 +185,13 @@ trait SparkImports {
             if (currentImps contains imv) addWrapper()
             val objName = req.lineRep.readPath
             val valName = "$VAL" + newValId();
-            code.append("val " + valName + " = " + objName + ".INSTANCE;\n")
-            code.append("import " + valName + req.accessPath + ".`" + imv + "`;\n")
+            
+            if(!code.toString.endsWith(".`" + imv + "`;\n")) { // Which means already imported
+               code.append("val " + valName + " = " + objName + ".INSTANCE;\n")
+               code.append("import " + valName + req.accessPath + ".`" + imv + "`;\n")
+            }
+            // code.append("val " + valName + " = " + objName + ".INSTANCE;\n")
+            // code.append("import " + valName + req.accessPath + ".`" + imv + "`;\n")
             // code append ("import " + (req fullPath imv) + "\n")
             currentImps += imv
           }