flink-issues mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From "ASF GitHub Bot (JIRA)" <j...@apache.org>
Subject [jira] [Commented] (FLINK-7062) Support the basic functionality of MATCH_RECOGNIZE
Date Fri, 19 Oct 2018 09:31:02 GMT

    [ https://issues.apache.org/jira/browse/FLINK-7062?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=16656511#comment-16656511
] 

ASF GitHub Bot commented on FLINK-7062:
---------------------------------------

twalthr commented on a change in pull request #6815:  [FLINK-7062][cep][table] Added basic
support for MATCH_RECOGNIZE
URL: https://github.com/apache/flink/pull/6815#discussion_r226555963
 
 

 ##########
 File path: flink-libraries/flink-table/src/main/scala/org/apache/flink/table/plan/nodes/MatchRecognize.scala
 ##########
 @@ -18,134 +18,136 @@
 
 package org.apache.flink.table.plan.nodes
 
+import java.util.{List => JList, SortedSet => JSortedSet}
+
 import com.google.common.collect.ImmutableMap
 import org.apache.calcite.rel.{RelCollation, RelWriter}
 import org.apache.calcite.rex.{RexCall, RexLiteral, RexNode}
 import org.apache.calcite.sql.SqlKind
 import org.apache.calcite.sql.SqlMatchRecognize.AfterOption
-import org.apache.flink.cep.nfa.aftermatch.AfterMatchSkipStrategy
-import org.apache.flink.table.plan.nodes.logical.FlinkLogicalMatch
-import org.apache.flink.table.plan.schema.RowSchema
+import org.apache.flink.table.plan.logical.MatchRecognizeDescriptor
+import org.apache.flink.table.runtime.aggregate.SortUtil.directionToOrder
 
 import scala.collection.JavaConverters._
 
 trait MatchRecognize {
 
-  private type JList[T] = java.util.List[T]
-  private type JSortedSet[T] = java.util.SortedSet[T]
-
-  private[flink] def partitionKeysToString(
-    keys: JList[RexNode],
-    schema: RowSchema,
-    expression: (RexNode, List[String], Option[List[RexNode]]) => String
-  ) = keys.asScala.map(k => expression(k, schema.fieldNames.toList, None)).mkString(",
")
+  private def partitionKeysToString(
+      keys: JList[RexNode],
+      fieldNames: Seq[String],
+      expression: (RexNode, Seq[String], Option[Seq[RexNode]]) => String)
+    : String =
+    keys.asScala.map(k => expression(k, fieldNames, None)).mkString(", ")
 
-  private[flink] def orderingToString(orders: RelCollation, schema: RowSchema) =
+  private def orderingToString(orders: RelCollation, fieldNames: Seq[String]): String =
     orders.getFieldCollations.asScala.map {
-    x => schema.relDataType.getFieldList.get(x.getFieldIndex).getName
-  }.mkString(", ")
+      x => s"${fieldNames(x.getFieldIndex)} ${directionToOrder(x.direction).getShortName}"
+    }.mkString(", ")
 
-  private[flink] def measuresDefineToString(
+  private def measuresDefineToString(
       measures: ImmutableMap[String, RexNode],
-      schema: RowSchema,
-      expression: (RexNode, List[String], Option[List[RexNode]]) => String) =
+      fieldNames: Seq[String],
+      expression: (RexNode, Seq[String], Option[Seq[RexNode]]) => String)
+    : String =
     measures.asScala.map {
-    case (k, v) => s"${expression(v, schema.fieldNames.toList, None)} AS $k"
-  }.mkString(", ")
+      case (k, v) => s"${expression(v, fieldNames, None)} AS $k"
+    }.mkString(", ")
 
-  private[flink] def rowsPerMatchToString(isAll: Boolean) =
+  private def rowsPerMatchToString(isAll: Boolean): String =
     if (isAll) "ALL ROWS PER MATCH" else "ONE ROW PER MATCH"
 
-  private[flink] def subsetToString(subset: ImmutableMap[String, JSortedSet[String]]) =
+  private def subsetToString(subset: ImmutableMap[String, JSortedSet[String]]): String =
     subset.asScala.map {
-    case (k, v) => s"$k = (${v.toArray.mkString(", ")})"
-  }.mkString(", ")
-
-  private[flink] def afterMatchToString(
-    after: RexNode,
-    schema: RowSchema,
-    expression: (RexNode, List[String], Option[List[RexNode]]) => String) = after.getKind
match {
-    case SqlKind.SKIP_TO_FIRST => s"SKIP TO FIRST ${
-      expression(after.asInstanceOf[RexCall].operands
-        .get(0), schema.fieldNames.toList, None)
-    }"
-    case SqlKind.SKIP_TO_LAST => s"SKIP TO LAST ${
-      expression(after.asInstanceOf[RexCall].operands
-        .get(0), schema.fieldNames.toList, None)
-    }"
-    case SqlKind.LITERAL => after.asInstanceOf[RexLiteral].getValueAs(classOf[AfterOption])
match {
-      case AfterOption.SKIP_PAST_LAST_ROW => "SKIP PAST LAST ROW"
-      case AfterOption.SKIP_TO_NEXT_ROW => "SKIP TO NEXT ROW"
+      case (k, v) => s"$k = (${v.asScala.mkString(", ")})"
+    }.mkString(", ")
+
+  private def afterMatchToString(
+      after: RexNode,
+      fieldNames: Seq[String])
+    : String =
+    after.getKind match {
+      case SqlKind.SKIP_TO_FIRST => s"SKIP TO FIRST ${
+        after.asInstanceOf[RexCall].operands.get(0).toString
+      }"
+      case SqlKind.SKIP_TO_LAST => s"SKIP TO LAST ${
+        after.asInstanceOf[RexCall].operands.get(0).toString
+      }"
+      case SqlKind.LITERAL => after.asInstanceOf[RexLiteral]
+        .getValueAs(classOf[AfterOption]) match {
+        case AfterOption.SKIP_PAST_LAST_ROW => "SKIP PAST LAST ROW"
+        case AfterOption.SKIP_TO_NEXT_ROW => "SKIP TO NEXT ROW"
+      }
     }
-  }
 
   private[flink] def matchToString(
-    logicalMatch: FlinkLogicalMatch,
-    inputSchema: RowSchema,
-    expression: (RexNode, List[String], Option[List[RexNode]]) => String) =
-    s"Match(${
-      if (!logicalMatch.getPartitionKeys.isEmpty) {
-        s"PARTITION BY:  ${
-          partitionKeysToString(logicalMatch.getPartitionKeys,
-            inputSchema,
-            expression)
-        }"
-      } else {
-        ""
-      }
-    }${
-      if (!logicalMatch.getOrderKeys.getFieldCollations.isEmpty) {
-        s"ORDER BY: ${orderingToString(logicalMatch.getOrderKeys, inputSchema)}"
-      } else {
-        ""
-      }
-    }${
-      if (!logicalMatch.getMeasures.isEmpty) {
-        s"MEASURES: ${measuresDefineToString(logicalMatch.getMeasures, inputSchema, expression)}"
-      } else {
-        ""
-      }
-    }${
-      s"${rowsPerMatchToString(logicalMatch.isAllRows)}"
-    }${
-      s"${afterMatchToString(logicalMatch.getAfter, inputSchema, expression)}"
-    }${
-      s"PATTERN: (${logicalMatch.getPattern.toString})"
-    }${
-      if (!logicalMatch.getSubsets.isEmpty) {
-        s"SUBSET: ${subsetToString(logicalMatch.getSubsets)} "
-      } else {
-        ""
-      }
-    }${
-      s"DEFINE: ${
-        measuresDefineToString(logicalMatch.getPatternDefinitions,
-          inputSchema,
-          expression)
+      logicalMatch: MatchRecognizeDescriptor,
+      fieldNames: Seq[String],
+      expression: (RexNode, Seq[String], Option[Seq[RexNode]]) => String)
+    : String = {
+    val partitionBy = if (!logicalMatch.partitionKeys.isEmpty) {
+      s"PARTITION BY:  ${
 
 Review comment:
   nit: additional space

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


> Support the basic functionality of MATCH_RECOGNIZE
> --------------------------------------------------
>
>                 Key: FLINK-7062
>                 URL: https://issues.apache.org/jira/browse/FLINK-7062
>             Project: Flink
>          Issue Type: Sub-task
>          Components: CEP, Table API &amp; SQL
>            Reporter: Dian Fu
>            Assignee: Dian Fu
>            Priority: Major
>              Labels: pull-request-available
>
> In this JIRA, we will support the basic functionality of {{MATCH_RECOGNIZE}} in Flink
SQL API which includes the support of syntax {{MEASURES}}, {{PATTERN}} and {{DEFINE}}. This
would allow users write basic cep use cases with SQL like the following example:
> {code}
> SELECT T.aid, T.bid, T.cid
> FROM MyTable
> MATCH_RECOGNIZE (
>   MEASURES
>     A.id AS aid,
>     B.id AS bid,
>     C.id AS cid
>   PATTERN (A B C)
>   DEFINE
>     A AS A.name = 'a',
>     B AS B.name = 'b',
>     C AS C.name = 'c'
> ) AS T
> {code}



--
This message was sent by Atlassian JIRA
(v7.6.3#76005)

Mime
View raw message