spark-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From sro...@apache.org
Subject spark git commit: [SPARK-20706][SPARK-SHELL] Spark-shell not overriding method/variable definition
Date Wed, 06 Dec 2017 00:08:40 GMT
Repository: spark
Updated Branches:
  refs/heads/master 1e17ab83d -> 59aa3d56a


[SPARK-20706][SPARK-SHELL] Spark-shell not overriding method/variable definition

## What changes were proposed in this pull request?

[SPARK-20706](https://issues.apache.org/jira/browse/SPARK-20706): Spark-shell not overriding
method/variable definition
This is a Scala repl bug ( [SI-9740](https://github.com/scala/bug/issues/9740) ), was fixed
in version 2.11.9 ( [see the original PR](https://github.com/scala/scala/pull/5090) )

## How was this patch tested?

Added a new test case in `ReplSuite`.

Author: Mark Petruska <petruska.mark@gmail.com>

Closes #19879 from mpetruska/SPARK-20706.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/59aa3d56
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/59aa3d56
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/59aa3d56

Branch: refs/heads/master
Commit: 59aa3d56af956267261b135936651bbbd41b1bfc
Parents: 1e17ab8
Author: Mark Petruska <petruska.mark@gmail.com>
Authored: Tue Dec 5 18:08:36 2017 -0600
Committer: Sean Owen <sowen@cloudera.com>
Committed: Tue Dec 5 18:08:36 2017 -0600

----------------------------------------------------------------------
 .../spark/repl/SparkILoopInterpreter.scala      | 138 ++++++++++++++++++-
 .../scala/org/apache/spark/repl/ReplSuite.scala |  39 ++++--
 2 files changed, 167 insertions(+), 10 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/59aa3d56/repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkILoopInterpreter.scala
----------------------------------------------------------------------
diff --git a/repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkILoopInterpreter.scala
b/repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkILoopInterpreter.scala
index 0803426..e736607 100644
--- a/repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkILoopInterpreter.scala
+++ b/repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkILoopInterpreter.scala
@@ -17,6 +17,7 @@
 
 package org.apache.spark.repl
 
+import scala.collection.mutable
 import scala.tools.nsc.Settings
 import scala.tools.nsc.interpreter._
 
@@ -30,7 +31,7 @@ class SparkILoopInterpreter(settings: Settings, out: JPrintWriter) extends
IMain
 
     override def chooseHandler(member: intp.global.Tree): MemberHandler = member match {
       case member: Import => new SparkImportHandler(member)
-      case _ => super.chooseHandler (member)
+      case _ => super.chooseHandler(member)
     }
 
     class SparkImportHandler(imp: Import) extends ImportHandler(imp: Import) {
@@ -100,4 +101,139 @@ class SparkILoopInterpreter(settings: Settings, out: JPrintWriter) extends
IMain
   override def typeOfExpression(expr: String, silent: Boolean): global.Type =
     expressionTyper.typeOfExpression(expr, silent)
 
+
+  import global.Name
+  override def importsCode(wanted: Set[Name], wrapper: Request#Wrapper,
+                           definesClass: Boolean, generousImports: Boolean): ComputedImports
= {
+
+    import global._
+    import definitions.{ ObjectClass, ScalaPackage, JavaLangPackage, PredefModule }
+    import memberHandlers._
+
+    val header, code, trailingBraces, accessPath = new StringBuilder
+    val currentImps = mutable.HashSet[Name]()
+    // only emit predef import header if name not resolved in history, loosely
+    var predefEscapes = false
+
+    /**
+     * Narrow down the list of requests from which imports
+     * should be taken.  Removes requests which cannot contribute
+     * useful imports for the specified set of wanted names.
+     */
+    case class ReqAndHandler(req: Request, handler: MemberHandler)
+
+    def reqsToUse: List[ReqAndHandler] = {
+      /**
+       * Loop through a list of MemberHandlers and select which ones to keep.
+       * 'wanted' is the set of names that need to be imported.
+       */
+      def select(reqs: List[ReqAndHandler], wanted: Set[Name]): List[ReqAndHandler] = {
+        // Single symbol imports might be implicits! See bug #1752.  Rather than
+        // try to finesse this, we will mimic all imports for now.
+        def keepHandler(handler: MemberHandler) = handler match {
+          // While defining classes in class based mode - implicits are not needed.
+          case h: ImportHandler if isClassBased && definesClass =>
+            h.importedNames.exists(x => wanted.contains(x))
+          case _: ImportHandler => true
+          case x if generousImports => x.definesImplicit ||
+            (x.definedNames exists (d => wanted.exists(w => d.startsWith(w))))
+          case x => x.definesImplicit ||
+            (x.definedNames exists wanted)
+        }
+
+        reqs match {
+          case Nil =>
+            predefEscapes = wanted contains PredefModule.name ; Nil
+          case rh :: rest if !keepHandler(rh.handler) => select(rest, wanted)
+          case rh :: rest =>
+            import rh.handler._
+            val augment = rh match {
+              case ReqAndHandler(_, _: ImportHandler) => referencedNames
+              case _ => Nil
+            }
+            val newWanted = wanted ++ augment -- definedNames -- importedNames
+            rh :: select(rest, newWanted)
+        }
+      }
+
+      /** Flatten the handlers out and pair each with the original request */
+      select(allReqAndHandlers reverseMap { case (r, h) => ReqAndHandler(r, h) }, wanted).reverse
+    }
+
+    // add code for a new object to hold some imports
+    def addWrapper() {
+      import nme.{ INTERPRETER_IMPORT_WRAPPER => iw }
+      code append (wrapper.prewrap format iw)
+      trailingBraces append wrapper.postwrap
+      accessPath append s".$iw"
+      currentImps.clear()
+    }
+
+    def maybeWrap(names: Name*) = if (names exists currentImps) addWrapper()
+
+    def wrapBeforeAndAfter[T](op: => T): T = {
+      addWrapper()
+      try op finally addWrapper()
+    }
+
+    // imports from Predef are relocated to the template header to allow hiding.
+    def checkHeader(h: ImportHandler) = h.referencedNames contains PredefModule.name
+
+    // loop through previous requests, adding imports for each one
+    wrapBeforeAndAfter {
+      // Reusing a single temporary value when import from a line with multiple definitions.
+      val tempValLines = mutable.Set[Int]()
+      for (ReqAndHandler(req, handler) <- reqsToUse) {
+        val objName = req.lineRep.readPathInstance
+        handler match {
+          case h: ImportHandler if checkHeader(h) =>
+            header.clear()
+            header append f"${h.member}%n"
+          // If the user entered an import, then just use it; add an import wrapping
+          // level if the import might conflict with some other import
+          case x: ImportHandler if x.importsWildcard =>
+            wrapBeforeAndAfter(code append (x.member + "\n"))
+          case x: ImportHandler =>
+            maybeWrap(x.importedNames: _*)
+            code append (x.member + "\n")
+            currentImps ++= x.importedNames
+
+          case x if isClassBased =>
+            for (sym <- x.definedSymbols) {
+              maybeWrap(sym.name)
+              x match {
+                case _: ClassHandler =>
+                  code.append(s"import ${objName}${req.accessPath}.`${sym.name}`\n")
+                case _ =>
+                  val valName = s"${req.lineRep.packageName}${req.lineRep.readName}"
+                  if (!tempValLines.contains(req.lineRep.lineId)) {
+                    code.append(s"val $valName: ${objName}.type = $objName\n")
+                    tempValLines += req.lineRep.lineId
+                  }
+                  code.append(s"import ${valName}${req.accessPath}.`${sym.name}`\n")
+              }
+              currentImps += sym.name
+            }
+          // For other requests, import each defined name.
+          // import them explicitly instead of with _, so that
+          // ambiguity errors will not be generated. Also, quote
+          // the name of the variable, so that we don't need to
+          // handle quoting keywords separately.
+          case x =>
+            for (sym <- x.definedSymbols) {
+              maybeWrap(sym.name)
+              code append s"import ${x.path}\n"
+              currentImps += sym.name
+            }
+        }
+      }
+    }
+
+    val computedHeader = if (predefEscapes) header.toString else ""
+    ComputedImports(computedHeader, code.toString, trailingBraces.toString, accessPath.toString)
+  }
+
+  private def allReqAndHandlers =
+    prevRequestList flatMap (req => req.handlers map (req -> _))
+
 }

http://git-wip-us.apache.org/repos/asf/spark/blob/59aa3d56/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala
----------------------------------------------------------------------
diff --git a/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala b/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala
index a505352..cdd5cdd 100644
--- a/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala
+++ b/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala
@@ -227,14 +227,35 @@ class ReplSuite extends SparkFunSuite {
     assertDoesNotContain("error: not found: value sc", output)
   }
 
-   test("spark-shell should find imported types in class constructors and extends clause")
{
-     val output = runInterpreter("local",
-       """
-         |import org.apache.spark.Partition
-         |class P(p: Partition)
-         |class P(val index: Int) extends Partition
-       """.stripMargin)
-     assertDoesNotContain("error: not found: type Partition", output)
-   }
+  test("spark-shell should find imported types in class constructors and extends clause")
{
+    val output = runInterpreter("local",
+      """
+        |import org.apache.spark.Partition
+        |class P(p: Partition)
+        |class P(val index: Int) extends Partition
+      """.stripMargin)
+    assertDoesNotContain("error: not found: type Partition", output)
+  }
+
+  test("spark-shell should shadow val/def definitions correctly") {
+    val output1 = runInterpreter("local",
+      """
+        |def myMethod() = "first definition"
+        |val tmp = myMethod(); val out = tmp
+        |def myMethod() = "second definition"
+        |val tmp = myMethod(); val out = s"$tmp aabbcc"
+      """.stripMargin)
+    assertContains("second definition aabbcc", output1)
+
+    val output2 = runInterpreter("local",
+      """
+        |val a = 1
+        |val b = a; val c = b;
+        |val a = 2
+        |val b = a; val c = b;
+        |s"!!$b!!"
+      """.stripMargin)
+    assertContains("!!2!!", output2)
+  }
 
 }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org


Mime
View raw message