spark-instrumented-optimizer/repl
Prashant Sharma 1499101113 SPARK-2632, SPARK-2576. Fixed by only importing what is necessary during class definition.
Without this patch, it imports everything available in the scope.

```scala

scala> val a = 10l
val a = 10l
a: Long = 10

scala> import a._
import a._
import a._

scala> case class A(a: Int) // show
case class A(a: Int) // show
class $read extends Serializable {
  def <init>() = {
    super.<init>;
    ()
  };
  class $iwC extends Serializable {
    def <init>() = {
      super.<init>;
      ()
    };
    class $iwC extends Serializable {
      def <init>() = {
        super.<init>;
        ()
      };
      import org.apache.spark.SparkContext._;
      class $iwC extends Serializable {
        def <init>() = {
          super.<init>;
          ()
        };
        val $VAL5 = $line5.$read.INSTANCE;
        import $VAL5.$iw.$iw.$iw.$iw.a;
        class $iwC extends Serializable {
          def <init>() = {
            super.<init>;
            ()
          };
          import a._;
          class $iwC extends Serializable {
            def <init>() = {
              super.<init>;
              ()
            };
            class $iwC extends Serializable {
              def <init>() = {
                super.<init>;
                ()
              };
              case class A extends scala.Product with scala.Serializable {
                <caseaccessor> <paramaccessor> val a: Int = _;
                def <init>(a: Int) = {
                  super.<init>;
                  ()
                }
              }
            };
            val $iw = new $iwC.<init>
          };
          val $iw = new $iwC.<init>
        };
        val $iw = new $iwC.<init>
      };
      val $iw = new $iwC.<init>
    };
    val $iw = new $iwC.<init>
  };
  val $iw = new $iwC.<init>
}
object $read extends scala.AnyRef {
  def <init>() = {
    super.<init>;
    ()
  };
  val INSTANCE = new $read.<init>
}
defined class A
```

With this patch, it just imports  only the necessary.

```scala

scala> val a = 10l
val a = 10l
a: Long = 10

scala> import a._
import a._
import a._

scala> case class A(a: Int) // show
case class A(a: Int) // show
class $read extends Serializable {
  def <init>() = {
    super.<init>;
    ()
  };
  class $iwC extends Serializable {
    def <init>() = {
      super.<init>;
      ()
    };
    class $iwC extends Serializable {
      def <init>() = {
        super.<init>;
        ()
      };
      case class A extends scala.Product with scala.Serializable {
        <caseaccessor> <paramaccessor> val a: Int = _;
        def <init>(a: Int) = {
          super.<init>;
          ()
        }
      }
    };
    val $iw = new $iwC.<init>
  };
  val $iw = new $iwC.<init>
}
object $read extends scala.AnyRef {
  def <init>() = {
    super.<init>;
    ()
  };
  val INSTANCE = new $read.<init>
}
defined class A

scala>

```

This patch also adds a `:fallback` mode on being enabled it will restore the spark-shell's 1.0.0 behaviour.

Author: Prashant Sharma <scrapcodes@gmail.com>
Author: Yin Huai <huai@cse.ohio-state.edu>
Author: Prashant Sharma <prashant.s@imaginea.com>

Closes #1635 from ScrapCodes/repl-fix-necessary-imports and squashes the following commits:

b1968d2 [Prashant Sharma] Added toschemaRDD to test case.
0b712bb [Yin Huai] Add a REPL test to test importing a method.
02ad8ff [Yin Huai] Add a REPL test for importing SQLContext.createSchemaRDD.
ed6d0c7 [Prashant Sharma] Added a fallback mode, incase users run into issues while using repl.
b63d3b2 [Prashant Sharma] SPARK-2632, SPARK-2576. Fixed by only importing what is necessary during class definition.
2014-07-31 22:57:13 -07:00
..
src SPARK-2632, SPARK-2576. Fixed by only importing what is necessary during class definition. 2014-07-31 22:57:13 -07:00
pom.xml SPARK-2632, SPARK-2576. Fixed by only importing what is necessary during class definition. 2014-07-31 22:57:13 -07:00