spark-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From hvanhov...@apache.org
Subject spark git commit: [SPARK-25352][SQL][FOLLOWUP] Add helper method and address style issue
Date Thu, 13 Sep 2018 12:21:23 GMT
Repository: spark
Updated Branches:
  refs/heads/branch-2.4 abb5196c7 -> e7f511ad0


[SPARK-25352][SQL][FOLLOWUP] Add helper method and address style issue

## What changes were proposed in this pull request?

This follow-up patch addresses [the review comment](https://github.com/apache/spark/pull/22344/files#r217070658)
by adding a helper method to simplify code and fixing style issue.

## How was this patch tested?

Existing unit tests.

Author: Liang-Chi Hsieh <viirya@gmail.com>

Closes #22409 from viirya/SPARK-25352-followup.

(cherry picked from commit 5b761c537a600115450b53817bee0679d5c2bb97)
Signed-off-by: Herman van Hovell <hvanhovell@databricks.com>


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/e7f511ad
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/e7f511ad
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/e7f511ad

Branch: refs/heads/branch-2.4
Commit: e7f511ad0803f4a25c657ea25a63a70c6f33367a
Parents: abb5196
Author: Liang-Chi Hsieh <viirya@gmail.com>
Authored: Thu Sep 13 14:21:00 2018 +0200
Committer: Herman van Hovell <hvanhovell@databricks.com>
Committed: Thu Sep 13 14:21:17 2018 +0200

----------------------------------------------------------------------
 .../spark/sql/execution/SparkStrategies.scala   | 55 ++++++++------------
 1 file changed, 23 insertions(+), 32 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/e7f511ad/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkStrategies.scala
----------------------------------------------------------------------
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkStrategies.scala
b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkStrategies.scala
index 7c8ce31..89442a7 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkStrategies.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkStrategies.scala
@@ -66,44 +66,35 @@ abstract class SparkStrategies extends QueryPlanner[SparkPlan] {
    * Plans special cases of limit operators.
    */
   object SpecialLimits extends Strategy {
-    override def apply(plan: LogicalPlan): Seq[SparkPlan] = plan match {
-      case ReturnAnswer(rootPlan) => rootPlan match {
-        case Limit(IntegerLiteral(limit), s@Sort(order, true, child)) =>
-          if (limit < conf.topKSortFallbackThreshold) {
+    private def decideTopRankNode(limit: Int, child: LogicalPlan): Seq[SparkPlan] = {
+      if (limit < conf.topKSortFallbackThreshold) {
+        child match {
+          case Sort(order, true, child) =>
             TakeOrderedAndProjectExec(limit, order, child.output, planLater(child)) :: Nil
-          } else {
-            GlobalLimitExec(limit,
-              LocalLimitExec(limit, planLater(s)),
-              orderedLimit = true) :: Nil
-          }
-        case Limit(IntegerLiteral(limit), p@Project(projectList, Sort(order, true, child)))
=>
-          if (limit < conf.topKSortFallbackThreshold) {
+          case Project(projectList, Sort(order, true, child)) =>
             TakeOrderedAndProjectExec(limit, order, projectList, planLater(child)) :: Nil
-          } else {
-            GlobalLimitExec(limit,
-              LocalLimitExec(limit, planLater(p)),
-              orderedLimit = true) :: Nil
-          }
+        }
+      } else {
+        GlobalLimitExec(limit,
+          LocalLimitExec(limit, planLater(child)),
+          orderedLimit = true) :: Nil
+      }
+    }
+
+    override def apply(plan: LogicalPlan): Seq[SparkPlan] = plan match {
+      case ReturnAnswer(rootPlan) => rootPlan match {
+        case Limit(IntegerLiteral(limit), s @ Sort(order, true, child)) =>
+          decideTopRankNode(limit, s)
+        case Limit(IntegerLiteral(limit), p @ Project(projectList, Sort(order, true, child)))
=>
+          decideTopRankNode(limit, p)
         case Limit(IntegerLiteral(limit), child) =>
           CollectLimitExec(limit, planLater(child)) :: Nil
         case other => planLater(other) :: Nil
       }
-      case Limit(IntegerLiteral(limit), s@Sort(order, true, child)) =>
-        if (limit < conf.topKSortFallbackThreshold) {
-          TakeOrderedAndProjectExec(limit, order, child.output, planLater(child)) :: Nil
-        } else {
-          GlobalLimitExec(limit,
-            LocalLimitExec(limit, planLater(s)),
-            orderedLimit = true) :: Nil
-        }
-      case Limit(IntegerLiteral(limit), p@Project(projectList, Sort(order, true, child)))
=>
-        if (limit < conf.topKSortFallbackThreshold) {
-          TakeOrderedAndProjectExec(limit, order, projectList, planLater(child)) :: Nil
-        } else {
-          GlobalLimitExec(limit,
-            LocalLimitExec(limit, planLater(p)),
-            orderedLimit = true) :: Nil
-        }
+      case Limit(IntegerLiteral(limit), s @ Sort(order, true, child)) =>
+        decideTopRankNode(limit, s)
+      case Limit(IntegerLiteral(limit), p @ Project(projectList, Sort(order, true, child)))
=>
+        decideTopRankNode(limit, p)
       case _ => Nil
     }
   }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org


Mime
View raw message