spark-reviews mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From viirya <...@git.apache.org>
Subject [GitHub] spark pull request #20756: [SPARK-23593][SQL] Add interpreted execution for ...
Date Thu, 08 Mar 2018 08:03:11 GMT
Github user viirya commented on a diff in the pull request:

    https://github.com/apache/spark/pull/20756#discussion_r173086568
  
    --- Diff: sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/objects/objects.scala
---
    @@ -1261,8 +1261,39 @@ case class InitializeJavaBean(beanInstance: Expression, setters:
Map[String, Exp
       override def children: Seq[Expression] = beanInstance +: setters.values.toSeq
       override def dataType: DataType = beanInstance.dataType
     
    -  override def eval(input: InternalRow): Any =
    -    throw new UnsupportedOperationException("Only code-generated evaluation is supported.")
    +  private lazy val resolvedSetters = {
    +    val ObjectType(beanClass) = beanInstance.dataType
    +    setters.map {
    +      case (name, expr) =>
    +        // Looking for known type mapping first, then using Class attached in `ObjectType`.
    +        // Finally also looking for general `Object`-type parameter for generic methods.
    +        val paramTypes = CallMethodViaReflection.typeMapping.getOrElse(expr.dataType,
    +            Seq(expr.dataType.asInstanceOf[ObjectType].cls)) ++ Seq(classOf[Object])
    +        val methods = paramTypes.flatMap { fieldClass =>
    +          try {
    +            Some(beanClass.getDeclaredMethod(name, fieldClass))
    +          } catch {
    +            case e: NoSuchMethodException => None
    +          }
    +        }
    +        if (methods.isEmpty) {
    +          throw new NoSuchMethodException(s"""A method named "$name" is not declared
""" +
    +            "in any enclosing class nor any supertype, nor through a static import")
    +        }
    +        methods.head -> expr
    +    }
    +  }
    +
    +  override def eval(input: InternalRow): Any = {
    +    val instance = beanInstance.eval(input).asInstanceOf[Object]
    --- End diff --
    
    Ok.


---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscribe@spark.apache.org
For additional commands, e-mail: reviews-help@spark.apache.org


Mime
View raw message