Skip to content
Snippets Groups Projects
  • Prashant Sharma's avatar
    14991011
    SPARK-2632, SPARK-2576. Fixed by only importing what is necessary during class definition. · 14991011
    Prashant Sharma authored
    Without this patch, it imports everything available in the scope.
    
    ```scala
    
    scala> val a = 10l
    val a = 10l
    a: Long = 10
    
    scala> import a._
    import a._
    import a._
    
    scala> case class A(a: Int) // show
    case class A(a: Int) // show
    class $read extends Serializable {
      def <init>() = {
        super.<init>;
        ()
      };
      class $iwC extends Serializable {
        def <init>() = {
          super.<init>;
          ()
        };
        class $iwC extends Serializable {
          def <init>() = {
            super.<init>;
            ()
          };
          import org.apache.spark.SparkContext._;
          class $iwC extends Serializable {
            def <init>() = {
              super.<init>;
              ()
            };
            val $VAL5 = $line5.$read.INSTANCE;
            import $VAL5.$iw.$iw.$iw.$iw.a;
            class $iwC extends Serializable {
              def <init>() = {
                super.<init>;
                ()
              };
              import a._;
              class $iwC extends Serializable {
                def <init>() = {
                  super.<init>;
                  ()
                };
                class $iwC extends Serializable {
                  def <init>() = {
                    super.<init>;
                    ()
                  };
                  case class A extends scala.Product with scala.Serializable {
                    <caseaccessor> <paramaccessor> val a: Int = _;
                    def <init>(a: Int) = {
                      super.<init>;
                      ()
                    }
                  }
                };
                val $iw = new $iwC.<init>
              };
              val $iw = new $iwC.<init>
            };
            val $iw = new $iwC.<init>
          };
          val $iw = new $iwC.<init>
        };
        val $iw = new $iwC.<init>
      };
      val $iw = new $iwC.<init>
    }
    object $read extends scala.AnyRef {
      def <init>() = {
        super.<init>;
        ()
      };
      val INSTANCE = new $read.<init>
    }
    defined class A
    ```
    
    With this patch, it just imports  only the necessary.
    
    ```scala
    
    scala> val a = 10l
    val a = 10l
    a: Long = 10
    
    scala> import a._
    import a._
    import a._
    
    scala> case class A(a: Int) // show
    case class A(a: Int) // show
    class $read extends Serializable {
      def <init>() = {
        super.<init>;
        ()
      };
      class $iwC extends Serializable {
        def <init>() = {
          super.<init>;
          ()
        };
        class $iwC extends Serializable {
          def <init>() = {
            super.<init>;
            ()
          };
          case class A extends scala.Product with scala.Serializable {
            <caseaccessor> <paramaccessor> val a: Int = _;
            def <init>(a: Int) = {
              super.<init>;
              ()
            }
          }
        };
        val $iw = new $iwC.<init>
      };
      val $iw = new $iwC.<init>
    }
    object $read extends scala.AnyRef {
      def <init>() = {
        super.<init>;
        ()
      };
      val INSTANCE = new $read.<init>
    }
    defined class A
    
    scala>
    
    ```
    
    This patch also adds a `:fallback` mode on being enabled it will restore the spark-shell's 1.0.0 behaviour.
    
    Author: Prashant Sharma <scrapcodes@gmail.com>
    Author: Yin Huai <huai@cse.ohio-state.edu>
    Author: Prashant Sharma <prashant.s@imaginea.com>
    
    Closes #1635 from ScrapCodes/repl-fix-necessary-imports and squashes the following commits:
    
    b1968d2 [Prashant Sharma] Added toschemaRDD to test case.
    0b712bb [Yin Huai] Add a REPL test to test importing a method.
    02ad8ff [Yin Huai] Add a REPL test for importing SQLContext.createSchemaRDD.
    ed6d0c7 [Prashant Sharma] Added a fallback mode, incase users run into issues while using repl.
    b63d3b2 [Prashant Sharma] SPARK-2632, SPARK-2576. Fixed by only importing what is necessary during class definition.
    14991011
    History
    SPARK-2632, SPARK-2576. Fixed by only importing what is necessary during class definition.
    Prashant Sharma authored
    Without this patch, it imports everything available in the scope.
    
    ```scala
    
    scala> val a = 10l
    val a = 10l
    a: Long = 10
    
    scala> import a._
    import a._
    import a._
    
    scala> case class A(a: Int) // show
    case class A(a: Int) // show
    class $read extends Serializable {
      def <init>() = {
        super.<init>;
        ()
      };
      class $iwC extends Serializable {
        def <init>() = {
          super.<init>;
          ()
        };
        class $iwC extends Serializable {
          def <init>() = {
            super.<init>;
            ()
          };
          import org.apache.spark.SparkContext._;
          class $iwC extends Serializable {
            def <init>() = {
              super.<init>;
              ()
            };
            val $VAL5 = $line5.$read.INSTANCE;
            import $VAL5.$iw.$iw.$iw.$iw.a;
            class $iwC extends Serializable {
              def <init>() = {
                super.<init>;
                ()
              };
              import a._;
              class $iwC extends Serializable {
                def <init>() = {
                  super.<init>;
                  ()
                };
                class $iwC extends Serializable {
                  def <init>() = {
                    super.<init>;
                    ()
                  };
                  case class A extends scala.Product with scala.Serializable {
                    <caseaccessor> <paramaccessor> val a: Int = _;
                    def <init>(a: Int) = {
                      super.<init>;
                      ()
                    }
                  }
                };
                val $iw = new $iwC.<init>
              };
              val $iw = new $iwC.<init>
            };
            val $iw = new $iwC.<init>
          };
          val $iw = new $iwC.<init>
        };
        val $iw = new $iwC.<init>
      };
      val $iw = new $iwC.<init>
    }
    object $read extends scala.AnyRef {
      def <init>() = {
        super.<init>;
        ()
      };
      val INSTANCE = new $read.<init>
    }
    defined class A
    ```
    
    With this patch, it just imports  only the necessary.
    
    ```scala
    
    scala> val a = 10l
    val a = 10l
    a: Long = 10
    
    scala> import a._
    import a._
    import a._
    
    scala> case class A(a: Int) // show
    case class A(a: Int) // show
    class $read extends Serializable {
      def <init>() = {
        super.<init>;
        ()
      };
      class $iwC extends Serializable {
        def <init>() = {
          super.<init>;
          ()
        };
        class $iwC extends Serializable {
          def <init>() = {
            super.<init>;
            ()
          };
          case class A extends scala.Product with scala.Serializable {
            <caseaccessor> <paramaccessor> val a: Int = _;
            def <init>(a: Int) = {
              super.<init>;
              ()
            }
          }
        };
        val $iw = new $iwC.<init>
      };
      val $iw = new $iwC.<init>
    }
    object $read extends scala.AnyRef {
      def <init>() = {
        super.<init>;
        ()
      };
      val INSTANCE = new $read.<init>
    }
    defined class A
    
    scala>
    
    ```
    
    This patch also adds a `:fallback` mode on being enabled it will restore the spark-shell's 1.0.0 behaviour.
    
    Author: Prashant Sharma <scrapcodes@gmail.com>
    Author: Yin Huai <huai@cse.ohio-state.edu>
    Author: Prashant Sharma <prashant.s@imaginea.com>
    
    Closes #1635 from ScrapCodes/repl-fix-necessary-imports and squashes the following commits:
    
    b1968d2 [Prashant Sharma] Added toschemaRDD to test case.
    0b712bb [Yin Huai] Add a REPL test to test importing a method.
    02ad8ff [Yin Huai] Add a REPL test for importing SQLContext.createSchemaRDD.
    ed6d0c7 [Prashant Sharma] Added a fallback mode, incase users run into issues while using repl.
    b63d3b2 [Prashant Sharma] SPARK-2632, SPARK-2576. Fixed by only importing what is necessary during class definition.
pom.xml 3.99 KiB
<?xml version="1.0" encoding="UTF-8"?>
<!--
  ~ Licensed to the Apache Software Foundation (ASF) under one or more
  ~ contributor license agreements.  See the NOTICE file distributed with
  ~ this work for additional information regarding copyright ownership.
  ~ The ASF licenses this file to You under the Apache License, Version 2.0
  ~ (the "License"); you may not use this file except in compliance with
  ~ the License.  You may obtain a copy of the License at
  ~
  ~    http://www.apache.org/licenses/LICENSE-2.0
  ~
  ~ Unless required by applicable law or agreed to in writing, software
  ~ distributed under the License is distributed on an "AS IS" BASIS,
  ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  ~ See the License for the specific language governing permissions and
  ~ limitations under the License.
  -->

<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
  <modelVersion>4.0.0</modelVersion>
  <parent>
    <groupId>org.apache.spark</groupId>
    <artifactId>spark-parent</artifactId>
    <version>1.1.0-SNAPSHOT</version>
    <relativePath>../pom.xml</relativePath>
  </parent>

  <groupId>org.apache.spark</groupId>
  <artifactId>spark-repl_2.10</artifactId>
  <packaging>jar</packaging>
  <name>Spark Project REPL</name>
  <url>http://spark.apache.org/</url>

  <properties>
    <sbt.project.name>repl</sbt.project.name>
    <deb.install.path>/usr/share/spark</deb.install.path>
    <deb.user>root</deb.user>
  </properties>

  <dependencies>
    <dependency>
      <groupId>org.apache.spark</groupId>
      <artifactId>spark-core_${scala.binary.version}</artifactId>
      <version>${project.version}</version>
    </dependency>
    <dependency>
      <groupId>org.apache.spark</groupId>
      <artifactId>spark-bagel_${scala.binary.version}</artifactId>
      <version>${project.version}</version>
      <scope>runtime</scope>
    </dependency>
    <dependency>
      <groupId>org.apache.spark</groupId>
      <artifactId>spark-mllib_${scala.binary.version}</artifactId>
      <version>${project.version}</version>
      <scope>runtime</scope>
    </dependency>
    <dependency>
      <groupId>org.apache.spark</groupId>
      <artifactId>spark-sql_${scala.binary.version}</artifactId>
      <version>${project.version}</version>
      <scope>test</scope>
    </dependency>
    <dependency>
      <groupId>org.eclipse.jetty</groupId>
      <artifactId>jetty-server</artifactId>
    </dependency>
    <dependency>
      <groupId>org.scala-lang</groupId>
      <artifactId>scala-compiler</artifactId>
      <version>${scala.version}</version>
    </dependency>
    <dependency>
      <groupId>org.scala-lang</groupId>
      <artifactId>scala-reflect</artifactId>
      <version>${scala.version}</version>
    </dependency>
    <dependency>
      <groupId>org.scala-lang</groupId>
      <artifactId>jline</artifactId>
      <version>${scala.version}</version>
    </dependency>
    <dependency>
      <groupId>org.slf4j</groupId>
      <artifactId>jul-to-slf4j</artifactId>
    </dependency>
    <dependency>
      <groupId>org.scalatest</groupId>
      <artifactId>scalatest_${scala.binary.version}</artifactId>
      <scope>test</scope>
    </dependency>
    <dependency>
      <groupId>org.scalacheck</groupId>
      <artifactId>scalacheck_${scala.binary.version}</artifactId>
      <scope>test</scope>
    </dependency>
  </dependencies>
  <build>
    <outputDirectory>target/scala-${scala.binary.version}/classes</outputDirectory>
    <testOutputDirectory>target/scala-${scala.binary.version}/test-classes</testOutputDirectory>
    <plugins>
      <plugin>
        <groupId>org.scalatest</groupId>
        <artifactId>scalatest-maven-plugin</artifactId>
        <configuration>
          <environmentVariables>
            <SPARK_HOME>${basedir}/..</SPARK_HOME>
          </environmentVariables>
        </configuration>
      </plugin>
    </plugins>
  </build>
</project>