spark-reviews mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From marmbrus <...@git.apache.org>
Subject [GitHub] spark pull request: [SPARK-4226][SQL] SparkSQL - Add support for s...
Date Tue, 30 Dec 2014 21:19:15 GMT
Github user marmbrus commented on a diff in the pull request:

    https://github.com/apache/spark/pull/3249#discussion_r22365235
  
    --- Diff: sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
---
    @@ -414,6 +418,123 @@ class Analyzer(catalog: Catalog,
             Generate(g, join = false, outer = false, None, child)
         }
       }
    +
    +  /**
    +   * Transforms the query which has subquery expressions in where clause to join queries.
    +   * Case 1 Uncorelated queries
    +   * -- original query
    +   * select C from R1 where R1.A in (Select B from R2)
    +   * -- rewritten query
    +   * Select C from R1 left semi join (select B as sqc0 from R2) subquery on R1.A = subquery.sqc0
    +   *
    +   * Case 2 Corelated queries
    +   * -- original query
    +   * select C from R1 where R1.A in (Select B from R2 where R1.X = R2.Y)
    +   * -- rewritten query
    +   * select C from R1 left semi join (select B as sqc0, R2.Y as sqc1 from R2) subquery
    +   *   on R1.X = subquery.sqc1 and R1.A = subquery.sqc0
    +   * 
    +   * Refer: https://issues.apache.org/jira/secure/attachment/12614003/SubQuerySpec.pdf
    +   */
    +  object SubQueryExpressions extends Rule[LogicalPlan] {
    +
    +    def apply(plan: LogicalPlan): LogicalPlan = plan transform {
    +      case p: LogicalPlan if !p.childrenResolved => p
    +      case filter @ Filter(conditions, child) =>
    +        val subqueryExprs = new scala.collection.mutable.ArrayBuffer[In]()
    +        val nonSubQueryConds = new scala.collection.mutable.ArrayBuffer[Expression]()
    +        conditions.collect {
    +          case s @ In(exp, Seq(SubqueryExpression(subquery))) =>
    +            subqueryExprs += s
    +        }
    +        val transformedConds = conditions.transform {
    +          // Replace with dummy
    +          case s @ In(exp,Seq(SubqueryExpression(subquery))) =>
    +            Literal(true)
    +        }
    +        if (subqueryExprs.size == 1) {
    +          val subqueryExpr = subqueryExprs.remove(0)
    +          createLeftSemiJoin(
    +            child,
    +            subqueryExpr.value,
    +            subqueryExpr.list(0).asInstanceOf[SubqueryExpression].subquery,
    +            transformedConds)
    +        } else if (subqueryExprs.size > 1) {
    +          // Only one subquery expression is supported.
    +          throw new TreeNodeException(filter, "Only one SubQuery expression is supported.")
    +        } else {
    +          filter
    +        }
    +    }
    +
    +    /**
    +     * Create LeftSemi join with parent query to the subquery which is mentioned in 'IN'
predicate
    +     * And combine the subquery conditions and parent query conditions.
    +     */ 
    +    def createLeftSemiJoin(left: LogicalPlan,
    +        value: Expression,
    +        subquery: LogicalPlan,
    +        parentConds: Expression) : LogicalPlan = {
    +      val (transformedPlan, subqueryConds) = transformAndGetConditions(value, subquery)
    +      // Unify the parent query conditions and subquery conditions and add these as join
conditions
    +      val unifyConds = And(parentConds, subqueryConds)
    +      Join(left, transformedPlan, LeftSemi, Some(unifyConds))
    +    }
    +
    +    /**
    +     * Transform the subquery LogicalPlan and add the expressions which are used as filters
to the
    +     * projection. And also return filter conditions used in subquery
    +     */
    +    def transformAndGetConditions(value: Expression,
    +          subquery: LogicalPlan): (LogicalPlan, Expression) = {
    +      val expr = new scala.collection.mutable.ArrayBuffer[Expression]()
    +      val transformedPlan = subquery transform {
    +        case project @ Project(projectList, f @ Filter(condition, child)) =>
    --- End diff --
    
    This works for the two queries you specified but is going to fail as soon as things get
even a little more complicated.  For example
    
    ```scala
    SELECT a.key FROM src a
    WHERE a.key in
    (SELECT b FROM (SELECT b.key FROM src b WHERE b.key in (230)and a.value=b.value) a)
    ```


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastructure@apache.org or file a JIRA ticket
with INFRA.
---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscribe@spark.apache.org
For additional commands, e-mail: reviews-help@spark.apache.org


Mime
View raw message