spark-reviews mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From chenghao-intel <...@git.apache.org>
Subject [GitHub] spark pull request: [SPARK-7956] [SQL] Use Janino to compile SQL e...
Date Tue, 02 Jun 2015 04:47:00 GMT
Github user chenghao-intel commented on a diff in the pull request:

    https://github.com/apache/spark/pull/6479#discussion_r31492599
  
    --- Diff: sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/GenerateProjection.scala
---
    @@ -38,201 +42,191 @@ object GenerateProjection extends CodeGenerator[Seq[Expression],
Projection] {
     
       // Make Mutablility optional...
       protected def create(expressions: Seq[Expression]): Projection = {
    -    val tupleLength = ru.Literal(Constant(expressions.length))
    -    val lengthDef = q"final val length = $tupleLength"
    -
         /* TODO: Configurable...
         val nullFunctions =
    -      q"""
    +      s"""
             private final val nullSet = new org.apache.spark.util.collection.BitSet(length)
             final def setNullAt(i: Int) = nullSet.set(i)
             final def isNullAt(i: Int) = nullSet.get(i)
           """
          */
     
    -    val nullFunctions =
    -      q"""
    -        private[this] var nullBits = new Array[Boolean](${expressions.size})
    -        override def setNullAt(i: Int) = { nullBits(i) = true }
    -        override def isNullAt(i: Int) = nullBits(i)
    -      """.children
    -
    -    val tupleElements = expressions.zipWithIndex.flatMap {
    +    val ctx = newCodeGenContext()
    +    val columns = expressions.zipWithIndex.map {
           case (e, i) =>
    -        val elementName = newTermName(s"c$i")
    -        val evaluatedExpression = expressionEvaluator(e)
    -        val iLit = ru.Literal(Constant(i))
    +        s"private ${primitiveForType(e.dataType)} c$i = ${defaultPrimitive(e.dataType)};\n"
    +    }.mkString("\n      ")
     
    -        q"""
    -        var ${newTermName(s"c$i")}: ${termForType(e.dataType)} = _
    +    val initColumns = expressions.zipWithIndex.map {
    +      case (e, i) =>
    +        val eval = expressionEvaluator(e, ctx)
    +        s"""
             {
    -          ..${evaluatedExpression.code}
    -          if(${evaluatedExpression.nullTerm})
    -            setNullAt($iLit)
    -          else {
    -            nullBits($iLit) = false
    -            $elementName = ${evaluatedExpression.primitiveTerm}
    +          // column$i
    +          ${eval.code}
    +          nullBits[$i] = ${eval.nullTerm};
    +          if(!${eval.nullTerm}) {
    +            c$i = ${eval.primitiveTerm};
               }
             }
    -        """.children : Seq[Tree]
    -    }
    +        """
    +    }.mkString("\n")
     
    -    val accessorFailure = q"""scala.sys.error("Invalid ordinal:" + i)"""
    -    val applyFunction = {
    -      val cases = (0 until expressions.size).map { i =>
    -        val ordinal = ru.Literal(Constant(i))
    -        val elementName = newTermName(s"c$i")
    -        val iLit = ru.Literal(Constant(i))
    +    val getCases = (0 until expressions.size).map { i =>
    +      s"case $i: return c$i;"
    +    }.mkString("\n        ")
     
    -        q"if(i == $ordinal) { if(isNullAt($i)) return null else return $elementName }"
    -      }
    -      q"override def apply(i: Int): Any = { ..$cases; $accessorFailure }"
    -    }
    -
    -    val updateFunction = {
    -      val cases = expressions.zipWithIndex.map {case (e, i) =>
    -        val ordinal = ru.Literal(Constant(i))
    -        val elementName = newTermName(s"c$i")
    -        val iLit = ru.Literal(Constant(i))
    -
    -        q"""
    -          if(i == $ordinal) {
    -            if(value == null) {
    -              setNullAt(i)
    -            } else {
    -              nullBits(i) = false
    -              $elementName = value.asInstanceOf[${termForType(e.dataType)}]
    -            }
    -            return
    -          }"""
    -      }
    -      q"override def update(i: Int, value: Any): Unit = { ..$cases; $accessorFailure
}"
    -    }
    +    val updateCases = expressions.zipWithIndex.map { case (e, i) =>
    +      s"case $i: { c$i = (${termForType(e.dataType)})value; return;}"
    +    }.mkString("\n        ")
     
         val specificAccessorFunctions = nativeTypes.map { dataType =>
    -      val ifStatements = expressions.zipWithIndex.flatMap {
    -        // getString() is not used by expressions
    -        case (e, i) if e.dataType == dataType && dataType != StringType =>
    -          val elementName = newTermName(s"c$i")
    -          // TODO: The string of ifs gets pretty inefficient as the row grows in size.
    -          // TODO: Optional null checks?
    -          q"if(i == $i) return $elementName" :: Nil
    -        case _ => Nil
    -      }
    -      dataType match {
    -        // Row() need this interface to compile
    -        case StringType =>
    -          q"""
    -          override def getString(i: Int): String = {
    -            $accessorFailure
    -          }"""
    -        case other =>
    -          q"""
    -          override def ${accessorForType(dataType)}(i: Int): ${termForType(dataType)}
= {
    -            ..$ifStatements;
    -            $accessorFailure
    -          }"""
    +      val cases = expressions.zipWithIndex.map {
    +        case (e, i) if e.dataType == dataType =>
    +          s"case $i: return c$i;"
    +        case _ => ""
    +      }.mkString("\n        ")
    +      if (cases.count(_ != '\n') > 0) {
    +        s"""
    +      @Override
    +      public ${primitiveForType(dataType)} ${accessorForType(dataType)}(int i) {
    +        if (isNullAt(i)) {
    +          return ${defaultPrimitive(dataType)};
    +        }
    +        switch (i) {
    +        $cases
    +        }
    +        return ${defaultPrimitive(dataType)};
    +      }"""
    +      } else {
    +        ""
           }
    -    }
    +    }.mkString("\n")
     
         val specificMutatorFunctions = nativeTypes.map { dataType =>
    -      val ifStatements = expressions.zipWithIndex.flatMap {
    -        // setString() is not used by expressions
    -        case (e, i) if e.dataType == dataType && dataType != StringType =>
    -          val elementName = newTermName(s"c$i")
    -          // TODO: The string of ifs gets pretty inefficient as the row grows in size.
    -          // TODO: Optional null checks?
    -          q"if(i == $i) { nullBits($i) = false; $elementName = value; return }" :: Nil
    -        case _ => Nil
    -      }
    -      dataType match {
    -        case StringType =>
    -          // MutableRow() need this interface to compile
    -          q"""
    -          override def setString(i: Int, value: String) {
    -            $accessorFailure
    -          }"""
    -        case other =>
    -          q"""
    -          override def ${mutatorForType(dataType)}(i: Int, value: ${termForType(dataType)})
{
    -            ..$ifStatements;
    -            $accessorFailure
    -          }"""
    +      val cases = expressions.zipWithIndex.map {
    +        case (e, i) if e.dataType == dataType =>
    +          s"case $i: { c$i = value; return; }"
    +        case _ => ""
    +      }.mkString("\n")
    +      if (cases.count(_ != '\n') > 0) {
    +        s"""
    +      @Override
    +      public void ${mutatorForType(dataType)}(int i, ${primitiveForType(dataType)} value)
{
    +        nullBits[i] = false;
    +        switch (i) {
    +        $cases
    +        }
    +      }"""
    +      } else {
    +        ""
           }
    -    }
    +    }.mkString("\n")
     
         val hashValues = expressions.zipWithIndex.map { case (e, i) =>
    -      val elementName = newTermName(s"c$i")
    +      val col = newTermName(s"c$i")
           val nonNull = e.dataType match {
    -        case BooleanType => q"if ($elementName) 0 else 1"
    -        case ByteType | ShortType | IntegerType => q"$elementName.toInt"
    -        case LongType => q"($elementName ^ ($elementName >>> 32)).toInt"
    -        case FloatType => q"java.lang.Float.floatToIntBits($elementName)"
    +        case BooleanType => s"$col ? 0 : 1"
    +        case ByteType | ShortType | IntegerType | DateType => s"$col"
    +        case LongType => s"$col ^ ($col >>> 32)"
    +        case FloatType => s"Float.floatToIntBits($col)"
             case DoubleType =>
    -          q"{ val b = java.lang.Double.doubleToLongBits($elementName); (b ^ (b >>>32)).toInt
}"
    -        case _ => q"$elementName.hashCode"
    +          s"Double.doubleToLongBits($col) ^ (Double.doubleToLongBits($col) >>>
32)"
    +        case _ => s"$col.hashCode()"
           }
    -      q"if (isNullAt($i)) 0 else $nonNull"
    +      s"isNullAt($i) ? 0 : ($nonNull)"
         }
     
    -    val hashUpdates: Seq[Tree] = hashValues.map(v => q"""result = 37 * result + $v""":
Tree)
    +    val hashUpdates: String = hashValues.map( v =>
    +      s"""
    +        result *= 37; result += $v;"""
    +    ).mkString("\n")
     
    -    val hashCodeFunction =
    -      q"""
    -        override def hashCode(): Int = {
    -          var result: Int = 37
    -          ..$hashUpdates
    -          result
    -        }
    +    val columnChecks = expressions.zipWithIndex.map { case (e, i) =>
    +      s"""
    +          if (isNullAt($i) != row.isNullAt($i) || !isNullAt($i) && !get($i).equals(row.get($i)))
{
    +            return false;
    +          }
           """
    +    }.mkString("\n")
     
    -    val columnChecks = (0 until expressions.size).map { i =>
    -      val elementName = newTermName(s"c$i")
    -      q"if (this.$elementName != specificType.$elementName) return false"
    +    val code = s"""
    +    import org.apache.spark.sql.Row;
    +
    +    public SpecificProjection generate($exprType[] expr) {
    +      return new SpecificProjection(expr);
         }
     
    -    val equalsFunction =
    -      q"""
    -        override def equals(other: Any): Boolean = other match {
    -          case specificType: SpecificRow =>
    -            ..$columnChecks
    -            return true
    -          case other => super.equals(other)
    -        }
    -      """
    +    class SpecificProjection extends ${typeOf[BaseProject]} {
    +      private $exprType[] expressions = null;
    +
    +      public SpecificProjection($exprType[] expr) {
    +        expressions = expr;
    +      }
     
    -    val allColumns = (0 until expressions.size).map { i =>
    -      val iLit = ru.Literal(Constant(i))
    -      q"if(isNullAt($iLit)) { null } else { ${newTermName(s"c$i")} }"
    +      @Override
    +      public Object apply(Object r) {
    +        return new SpecificRow(expressions, (Row)r);
    +      }
         }
     
    -    val copyFunction =
    -      q"override def copy() = new $genericRowType(Array[Any](..$allColumns))"
    -
    -    val toSeqFunction =
    -      q"override def toSeq: Seq[Any] = Seq(..$allColumns)"
    -
    -    val classBody =
    -      nullFunctions ++ (
    -        lengthDef +:
    -        applyFunction +:
    -        updateFunction +:
    -        equalsFunction +:
    -        hashCodeFunction +:
    -        copyFunction +:
    -        toSeqFunction +:
    -        (tupleElements ++ specificAccessorFunctions ++ specificMutatorFunctions))
    -
    -    val code = q"""
    -      final class SpecificRow(i: $rowType) extends $mutableRowType {
    -        ..$classBody
    +    final class SpecificRow extends ${typeOf[BaseMutableRow]} {
    --- End diff --
    
    Should we give the generic row a random name and loaded by the same classloader? Or always
loaded by different classloader?


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastructure@apache.org or file a JIRA ticket
with INFRA.
---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscribe@spark.apache.org
For additional commands, e-mail: reviews-help@spark.apache.org


Mime
View raw message