flink-issues mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From fhueske <...@git.apache.org>
Subject [GitHub] flink pull request #3277: [FLINK-5662] [table] Rework internal type handling...
Date Sun, 12 Feb 2017 23:59:30 GMT
Github user fhueske commented on a diff in the pull request:

    https://github.com/apache/flink/pull/3277#discussion_r100708516
  
    --- Diff: flink-libraries/flink-table/src/main/scala/org/apache/flink/table/api/TableEnvironment.scala
---
    @@ -428,6 +431,113 @@ abstract class TableEnvironment(val config: TableConfig) {
         (fieldNames.toArray, fieldIndexes.toArray)
       }
     
    +  /**
    +    * Creates a final converter that maps the internal row type to external type.
    +    */
    +  protected def sinkConversion[T](
    +      physicalRowTypeInfo: TypeInformation[Row],
    +      logicalRowType: RelDataType,
    +      expectedTypeInfo: TypeInformation[T],
    +      functionName: String)
    +    : Option[MapFunction[Row, T]] = {
    +
    +    // validate that at least the field types of physical and logical type match
    +    // we do that here to make sure that plan translation was correct
    +    val logicalRowTypeInfo = FlinkTypeFactory.toInternalRowTypeInfo(logicalRowType)
    +    if (physicalRowTypeInfo != logicalRowTypeInfo) {
    +      throw TableException("The field types of physical and logical row types do not
match." +
    +        "This is a bug and should not happen. Please file an issue.")
    +    }
    +
    +    // expected type is a row, no conversion needed
    +    // TODO this logic will change with FLINK-5429
    +    if (expectedTypeInfo.getTypeClass == classOf[Row]) {
    +      return None
    +    }
    +
    +    // convert to type information
    +    val logicalFieldTypes = logicalRowType.getFieldList.asScala map { relDataType =>
    +      FlinkTypeFactory.toTypeInfo(relDataType.getType)
    +    }
    +    // field names
    +    val logicalFieldNames = logicalRowType.getFieldNames.asScala
    +
    +    // validate expected type
    +    if (expectedTypeInfo.getArity != logicalFieldTypes.length) {
    +      throw new TableException("Arity of result does not match expected type.")
    +    }
    +    expectedTypeInfo match {
    +
    +      // POJO type expected
    +      case pt: PojoTypeInfo[_] =>
    +        logicalFieldNames.zip(logicalFieldTypes) foreach {
    +          case (fName, fType) =>
    +            val pojoIdx = pt.getFieldIndex(fName)
    +            if (pojoIdx < 0) {
    +              throw new TableException(s"POJO does not define field name: $fName")
    +            }
    +            val expectedTypeInfo = pt.getTypeAt(pojoIdx)
    +            if (fType != expectedTypeInfo) {
    +              throw new TableException(s"Result field does not match expected type. "
+
    +                s"Expected: $expectedTypeInfo; Actual: $fType")
    +            }
    +        }
    +
    +      // Tuple/Case class type expected
    +      case ct: CompositeType[_] =>
    +        logicalFieldTypes.zipWithIndex foreach {
    +          case (fieldTypeInfo, i) =>
    +            val expectedTypeInfo = ct.getTypeAt(i)
    +            if (fieldTypeInfo != expectedTypeInfo) {
    +              throw new TableException(s"Result field does not match expected type. "
+
    +                s"Expected: $expectedTypeInfo; Actual: $fieldTypeInfo")
    +            }
    +        }
    +
    +      // Atomic type expected
    +      case at: AtomicType[_] =>
    +        val fieldTypeInfo = logicalFieldTypes.head
    --- End diff --
    
    Add a check that `logicalFieldTypes.size() == 1`


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastructure@apache.org or file a JIRA ticket
with INFRA.
---

Mime
View raw message