spark-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From sro...@apache.org
Subject spark git commit: [SPARK-25029][TESTS] Scala 2.12 issues: TaskNotSerializable and Janino "Two non-abstract methods ..." errors
Date Tue, 07 Aug 2018 22:30:41 GMT
Repository: spark
Updated Branches:
  refs/heads/master f6356f9bc -> 66699c5c3


[SPARK-25029][TESTS] Scala 2.12 issues: TaskNotSerializable and Janino "Two non-abstract methods
..." errors

## What changes were proposed in this pull request?

Fixes for test issues that arose after Scala 2.12 support was added -- ones that only affect
the 2.12 build.

## How was this patch tested?

Existing tests.

Closes #22004 from srowen/SPARK-25029.

Authored-by: Sean Owen <srowen@gmail.com>
Signed-off-by: Sean Owen <srowen@gmail.com>


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/66699c5c
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/66699c5c
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/66699c5c

Branch: refs/heads/master
Commit: 66699c5c3061f54463bd1d0f7a8f8e168c2882c9
Parents: f6356f9
Author: Sean Owen <srowen@gmail.com>
Authored: Tue Aug 7 17:30:37 2018 -0500
Committer: Sean Owen <srowen@gmail.com>
Committed: Tue Aug 7 17:30:37 2018 -0500

----------------------------------------------------------------------
 .../scala/org/apache/spark/scheduler/DAGSchedulerSuite.scala  | 7 ++++---
 .../test/scala/org/apache/spark/util/AccumulatorV2Suite.scala | 3 ++-
 .../org/apache/spark/graphx/util/BytecodeUtilsSuite.scala     | 7 +++++++
 .../scala/org/apache/spark/ml/tree/impl/RandomForest.scala    | 2 +-
 repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala     | 2 ++
 5 files changed, 16 insertions(+), 5 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/66699c5c/core/src/test/scala/org/apache/spark/scheduler/DAGSchedulerSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/scheduler/DAGSchedulerSuite.scala b/core/src/test/scala/org/apache/spark/scheduler/DAGSchedulerSuite.scala
index dad339e..8b2b6b6 100644
--- a/core/src/test/scala/org/apache/spark/scheduler/DAGSchedulerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/scheduler/DAGSchedulerSuite.scala
@@ -2386,9 +2386,6 @@ class DAGSchedulerSuite extends SparkFunSuite with LocalSparkContext
with TimeLi
 
     // Runs a job that encounters a single fetch failure but succeeds on the second attempt
     def runJobWithTemporaryFetchFailure: Unit = {
-      object FailThisAttempt {
-        val _fail = new AtomicBoolean(true)
-      }
       val rdd1 = sc.makeRDD(Array(1, 2, 3, 4), 2).map(x => (x, 1)).groupByKey()
       val shuffleHandle =
         rdd1.dependencies.head.asInstanceOf[ShuffleDependency[_, _, _]].shuffleHandle
@@ -2584,3 +2581,7 @@ object DAGSchedulerSuite {
   def makeBlockManagerId(host: String): BlockManagerId =
     BlockManagerId("exec-" + host, host, 12345)
 }
+
+object FailThisAttempt {
+  val _fail = new AtomicBoolean(true)
+}

http://git-wip-us.apache.org/repos/asf/spark/blob/66699c5c/core/src/test/scala/org/apache/spark/util/AccumulatorV2Suite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/util/AccumulatorV2Suite.scala b/core/src/test/scala/org/apache/spark/util/AccumulatorV2Suite.scala
index fe0a9a4..94c7938 100644
--- a/core/src/test/scala/org/apache/spark/util/AccumulatorV2Suite.scala
+++ b/core/src/test/scala/org/apache/spark/util/AccumulatorV2Suite.scala
@@ -165,7 +165,6 @@ class AccumulatorV2Suite extends SparkFunSuite {
   }
 
   test("LegacyAccumulatorWrapper with AccumulatorParam that has no equals/hashCode") {
-    class MyData(val i: Int) extends Serializable
     val param = new AccumulatorParam[MyData] {
       override def zero(initialValue: MyData): MyData = new MyData(0)
       override def addInPlace(r1: MyData, r2: MyData): MyData = new MyData(r1.i + r2.i)
@@ -182,3 +181,5 @@ class AccumulatorV2Suite extends SparkFunSuite {
     ser.serialize(acc)
   }
 }
+
+class MyData(val i: Int) extends Serializable

http://git-wip-us.apache.org/repos/asf/spark/blob/66699c5c/graphx/src/test/scala/org/apache/spark/graphx/util/BytecodeUtilsSuite.scala
----------------------------------------------------------------------
diff --git a/graphx/src/test/scala/org/apache/spark/graphx/util/BytecodeUtilsSuite.scala b/graphx/src/test/scala/org/apache/spark/graphx/util/BytecodeUtilsSuite.scala
index 61e44dc..5325978 100644
--- a/graphx/src/test/scala/org/apache/spark/graphx/util/BytecodeUtilsSuite.scala
+++ b/graphx/src/test/scala/org/apache/spark/graphx/util/BytecodeUtilsSuite.scala
@@ -18,6 +18,7 @@
 package org.apache.spark.graphx.util
 
 import org.apache.spark.SparkFunSuite
+import org.apache.spark.util.ClosureCleanerSuite2
 
 
 // scalastyle:off println
@@ -26,6 +27,7 @@ class BytecodeUtilsSuite extends SparkFunSuite {
   import BytecodeUtilsSuite.TestClass
 
   test("closure invokes a method") {
+    assume(!ClosureCleanerSuite2.supportsLMFs)
     val c1 = {e: TestClass => println(e.foo); println(e.bar); println(e.baz); }
     assert(BytecodeUtils.invokedMethod(c1, classOf[TestClass], "foo"))
     assert(BytecodeUtils.invokedMethod(c1, classOf[TestClass], "bar"))
@@ -43,6 +45,7 @@ class BytecodeUtilsSuite extends SparkFunSuite {
   }
 
   test("closure inside a closure invokes a method") {
+    assume(!ClosureCleanerSuite2.supportsLMFs)
     val c1 = {e: TestClass => println(e.foo); println(e.bar); println(e.baz); }
     val c2 = {e: TestClass => c1(e); println(e.foo); }
     assert(BytecodeUtils.invokedMethod(c2, classOf[TestClass], "foo"))
@@ -51,6 +54,7 @@ class BytecodeUtilsSuite extends SparkFunSuite {
   }
 
   test("closure inside a closure inside a closure invokes a method") {
+    assume(!ClosureCleanerSuite2.supportsLMFs)
     val c1 = {e: TestClass => println(e.baz); }
     val c2 = {e: TestClass => c1(e); println(e.foo); }
     val c3 = {e: TestClass => c2(e) }
@@ -60,6 +64,7 @@ class BytecodeUtilsSuite extends SparkFunSuite {
   }
 
   test("closure calling a function that invokes a method") {
+    assume(!ClosureCleanerSuite2.supportsLMFs)
     def zoo(e: TestClass) {
       println(e.baz)
     }
@@ -70,6 +75,7 @@ class BytecodeUtilsSuite extends SparkFunSuite {
   }
 
   test("closure calling a function that invokes a method which uses another closure") {
+    assume(!ClosureCleanerSuite2.supportsLMFs)
     val c2 = {e: TestClass => println(e.baz)}
     def zoo(e: TestClass) {
       c2(e)
@@ -81,6 +87,7 @@ class BytecodeUtilsSuite extends SparkFunSuite {
   }
 
   test("nested closure") {
+    assume(!ClosureCleanerSuite2.supportsLMFs)
     val c2 = {e: TestClass => println(e.baz)}
     def zoo(e: TestClass, c: TestClass => Unit) {
       c(e)

http://git-wip-us.apache.org/repos/asf/spark/blob/66699c5c/mllib/src/main/scala/org/apache/spark/ml/tree/impl/RandomForest.scala
----------------------------------------------------------------------
diff --git a/mllib/src/main/scala/org/apache/spark/ml/tree/impl/RandomForest.scala b/mllib/src/main/scala/org/apache/spark/ml/tree/impl/RandomForest.scala
index bb3f3a0..918560a 100644
--- a/mllib/src/main/scala/org/apache/spark/ml/tree/impl/RandomForest.scala
+++ b/mllib/src/main/scala/org/apache/spark/ml/tree/impl/RandomForest.scala
@@ -77,7 +77,7 @@ import org.apache.spark.util.random.{SamplingUtils, XORShiftRandom}
  * the heaviest part of the computation.  In general, this implementation is bound by either
  * the cost of statistics computation on workers or by communicating the sufficient statistics.
  */
-private[spark] object RandomForest extends Logging {
+private[spark] object RandomForest extends Logging with Serializable {
 
   /**
    * Train a random forest.

http://git-wip-us.apache.org/repos/asf/spark/blob/66699c5c/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala
----------------------------------------------------------------------
diff --git a/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala b/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala
index cdd5cdd..4f3df72 100644
--- a/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala
+++ b/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala
@@ -21,6 +21,7 @@ import java.io._
 import java.net.URLClassLoader
 
 import scala.collection.mutable.ArrayBuffer
+import scala.tools.nsc.interpreter.SimpleReader
 
 import org.apache.log4j.{Level, LogManager}
 
@@ -84,6 +85,7 @@ class ReplSuite extends SparkFunSuite {
       settings = new scala.tools.nsc.Settings
       settings.usejavacp.value = true
       org.apache.spark.repl.Main.interp = this
+      in = SimpleReader()
     }
 
     val out = new StringWriter()


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org


Mime
View raw message