spark-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From yli...@apache.org
Subject spark git commit: [MINOR][YARN] Move YarnSchedulerBackendSuite to resource-managers/yarn directory.
Date Tue, 17 Jan 2017 08:28:24 GMT
Repository: spark
Updated Branches:
  refs/heads/master 18ee55dd5 -> 84f0b645b


[MINOR][YARN] Move YarnSchedulerBackendSuite to resource-managers/yarn directory.

## What changes were proposed in this pull request?
#16092 moves YARN resource manager related code to resource-managers/yarn directory. The test
case ```YarnSchedulerBackendSuite``` was added after that but with the wrong place. I move
it to correct directory in this PR.

## How was this patch tested?
Existing test.

Author: Yanbo Liang <ybliang8@gmail.com>

Closes #16595 from yanboliang/yarn.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/84f0b645
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/84f0b645
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/84f0b645

Branch: refs/heads/master
Commit: 84f0b645b424eabb429c9eb38092841f44be1310
Parents: 18ee55d
Author: Yanbo Liang <ybliang8@gmail.com>
Authored: Tue Jan 17 00:28:12 2017 -0800
Committer: Yanbo Liang <ybliang8@gmail.com>
Committed: Tue Jan 17 00:28:12 2017 -0800

----------------------------------------------------------------------
 .../cluster/YarnSchedulerBackendSuite.scala     | 58 ++++++++++++++++++++
 .../cluster/YarnSchedulerBackendSuite.scala     | 58 --------------------
 2 files changed, 58 insertions(+), 58 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/84f0b645/resource-managers/yarn/src/test/scala/org/apache/spark/scheduler/cluster/YarnSchedulerBackendSuite.scala
----------------------------------------------------------------------
diff --git a/resource-managers/yarn/src/test/scala/org/apache/spark/scheduler/cluster/YarnSchedulerBackendSuite.scala
b/resource-managers/yarn/src/test/scala/org/apache/spark/scheduler/cluster/YarnSchedulerBackendSuite.scala
new file mode 100644
index 0000000..4079d9e
--- /dev/null
+++ b/resource-managers/yarn/src/test/scala/org/apache/spark/scheduler/cluster/YarnSchedulerBackendSuite.scala
@@ -0,0 +1,58 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.spark.scheduler.cluster
+
+import org.mockito.Mockito.when
+import org.scalatest.mock.MockitoSugar
+
+import org.apache.spark.{LocalSparkContext, SparkContext, SparkFunSuite}
+import org.apache.spark.scheduler.TaskSchedulerImpl
+import org.apache.spark.serializer.JavaSerializer
+
+class YarnSchedulerBackendSuite extends SparkFunSuite with MockitoSugar with LocalSparkContext
{
+
+  test("RequestExecutors reflects node blacklist and is serializable") {
+    sc = new SparkContext("local", "YarnSchedulerBackendSuite")
+    val sched = mock[TaskSchedulerImpl]
+    when(sched.sc).thenReturn(sc)
+    val yarnSchedulerBackend = new YarnSchedulerBackend(sched, sc) {
+      def setHostToLocalTaskCount(hostToLocalTaskCount: Map[String, Int]): Unit = {
+        this.hostToLocalTaskCount = hostToLocalTaskCount
+      }
+    }
+    val ser = new JavaSerializer(sc.conf).newInstance()
+    for {
+      blacklist <- IndexedSeq(Set[String](), Set("a", "b", "c"))
+      numRequested <- 0 until 10
+      hostToLocalCount <- IndexedSeq(
+        Map[String, Int](),
+        Map("a" -> 1, "b" -> 2)
+      )
+    } {
+      yarnSchedulerBackend.setHostToLocalTaskCount(hostToLocalCount)
+      when(sched.nodeBlacklist()).thenReturn(blacklist)
+      val req = yarnSchedulerBackend.prepareRequestExecutors(numRequested)
+      assert(req.requestedTotal === numRequested)
+      assert(req.nodeBlacklist === blacklist)
+      assert(req.hostToLocalTaskCount.keySet.intersect(blacklist).isEmpty)
+      // Serialize to make sure serialization doesn't throw an error
+      ser.serialize(req)
+    }
+    sc.stop()
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/spark/blob/84f0b645/yarn/src/test/scala/org/apache/spark/scheduler/cluster/YarnSchedulerBackendSuite.scala
----------------------------------------------------------------------
diff --git a/yarn/src/test/scala/org/apache/spark/scheduler/cluster/YarnSchedulerBackendSuite.scala
b/yarn/src/test/scala/org/apache/spark/scheduler/cluster/YarnSchedulerBackendSuite.scala
deleted file mode 100644
index 4079d9e..0000000
--- a/yarn/src/test/scala/org/apache/spark/scheduler/cluster/YarnSchedulerBackendSuite.scala
+++ /dev/null
@@ -1,58 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.spark.scheduler.cluster
-
-import org.mockito.Mockito.when
-import org.scalatest.mock.MockitoSugar
-
-import org.apache.spark.{LocalSparkContext, SparkContext, SparkFunSuite}
-import org.apache.spark.scheduler.TaskSchedulerImpl
-import org.apache.spark.serializer.JavaSerializer
-
-class YarnSchedulerBackendSuite extends SparkFunSuite with MockitoSugar with LocalSparkContext
{
-
-  test("RequestExecutors reflects node blacklist and is serializable") {
-    sc = new SparkContext("local", "YarnSchedulerBackendSuite")
-    val sched = mock[TaskSchedulerImpl]
-    when(sched.sc).thenReturn(sc)
-    val yarnSchedulerBackend = new YarnSchedulerBackend(sched, sc) {
-      def setHostToLocalTaskCount(hostToLocalTaskCount: Map[String, Int]): Unit = {
-        this.hostToLocalTaskCount = hostToLocalTaskCount
-      }
-    }
-    val ser = new JavaSerializer(sc.conf).newInstance()
-    for {
-      blacklist <- IndexedSeq(Set[String](), Set("a", "b", "c"))
-      numRequested <- 0 until 10
-      hostToLocalCount <- IndexedSeq(
-        Map[String, Int](),
-        Map("a" -> 1, "b" -> 2)
-      )
-    } {
-      yarnSchedulerBackend.setHostToLocalTaskCount(hostToLocalCount)
-      when(sched.nodeBlacklist()).thenReturn(blacklist)
-      val req = yarnSchedulerBackend.prepareRequestExecutors(numRequested)
-      assert(req.requestedTotal === numRequested)
-      assert(req.nodeBlacklist === blacklist)
-      assert(req.hostToLocalTaskCount.keySet.intersect(blacklist).isEmpty)
-      // Serialize to make sure serialization doesn't throw an error
-      ser.serialize(req)
-    }
-    sc.stop()
-  }
-
-}


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org


Mime
View raw message