calcite-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From jcama...@apache.org
Subject calcite git commit: [CALCITE-1392] Druid default time column not properly recognized (addendum)
Date Thu, 06 Oct 2016 15:15:07 GMT
Repository: calcite
Updated Branches:
  refs/heads/master a1e50b618 -> 66274481f


[CALCITE-1392] Druid default time column not properly recognized (addendum)

* Handling of time column referenced in the query result


Project: http://git-wip-us.apache.org/repos/asf/calcite/repo
Commit: http://git-wip-us.apache.org/repos/asf/calcite/commit/66274481
Tree: http://git-wip-us.apache.org/repos/asf/calcite/tree/66274481
Diff: http://git-wip-us.apache.org/repos/asf/calcite/diff/66274481

Branch: refs/heads/master
Commit: 66274481fbca9c3ed9cd1f6f690bb2163ff07ba9
Parents: a1e50b6
Author: Jesus Camacho Rodriguez <jcamacho@apache.org>
Authored: Thu Oct 6 16:14:34 2016 +0100
Committer: Jesus Camacho Rodriguez <jcamacho@apache.org>
Committed: Thu Oct 6 16:14:34 2016 +0100

----------------------------------------------------------------------
 .../adapter/druid/DruidConnectionImpl.java      | 107 ++++++++++----
 .../calcite/adapter/druid/DruidSchema.java      |   5 +-
 .../calcite/adapter/druid/DruidTable.java       |  13 +-
 .../adapter/druid/DruidTableFactory.java        |   8 +-
 .../org/apache/calcite/test/DruidAdapterIT.java | 141 ++++++++++++++-----
 5 files changed, 198 insertions(+), 76 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/calcite/blob/66274481/druid/src/main/java/org/apache/calcite/adapter/druid/DruidConnectionImpl.java
----------------------------------------------------------------------
diff --git a/druid/src/main/java/org/apache/calcite/adapter/druid/DruidConnectionImpl.java
b/druid/src/main/java/org/apache/calcite/adapter/druid/DruidConnectionImpl.java
index 21067db..69ded34 100644
--- a/druid/src/main/java/org/apache/calcite/adapter/druid/DruidConnectionImpl.java
+++ b/druid/src/main/java/org/apache/calcite/adapter/druid/DruidConnectionImpl.java
@@ -66,6 +66,7 @@ class DruidConnectionImpl implements DruidConnection {
   private final String url;
   private final String coordinatorUrl;
 
+  private static final String DEFAULT_RESPONSE_TIMESTAMP_COLUMN = "timestamp";
   private static final SimpleDateFormat UTC_TIMESTAMP_FORMAT;
 
   static {
@@ -109,7 +110,7 @@ class DruidConnectionImpl implements DruidConnection {
     }
   }
 
-  /** Parses the output of a {@code topN} query, sending the results to a
+  /** Parses the output of a query, sending the results to a
    * {@link Sink}. */
   private void parse(QueryType queryType, InputStream in, Sink sink,
       List<String> fieldNames, List<ColumnMetaData.Rep> fieldTypes, Page page)
{
@@ -126,16 +127,27 @@ class DruidConnectionImpl implements DruidConnection {
       }
     }
 
+    int posTimestampField = -1;
+    for (int i = 0; i < fieldTypes.size(); i++) {
+      if (fieldTypes.get(i) == ColumnMetaData.Rep.JAVA_SQL_TIMESTAMP) {
+        posTimestampField = i;
+        break;
+      }
+    }
+
     try (final JsonParser parser = factory.createParser(in)) {
       switch (queryType) {
       case TIMESERIES:
         if (parser.nextToken() == JsonToken.START_ARRAY) {
           while (parser.nextToken() == JsonToken.START_OBJECT) {
-            // loop until token equal to "}"
-            expectScalarField(parser, "timestamp");
+           // loop until token equal to "}"
+            final Long timeValue = extractTimestampField(parser);
             if (parser.nextToken() == JsonToken.FIELD_NAME
                     && parser.getCurrentName().equals("result")
                     && parser.nextToken() == JsonToken.START_OBJECT) {
+              if (posTimestampField != -1) {
+                rowBuilder.set(posTimestampField, timeValue);
+              }
               parseFields(fieldNames, fieldTypes, rowBuilder, parser);
               sink.send(rowBuilder.build());
               rowBuilder.reset();
@@ -148,12 +160,15 @@ class DruidConnectionImpl implements DruidConnection {
       case TOP_N:
         if (parser.nextToken() == JsonToken.START_ARRAY
             && parser.nextToken() == JsonToken.START_OBJECT) {
-          expectScalarField(parser, "timestamp");
+          final Long timeValue = extractTimestampField(parser);
           if (parser.nextToken() == JsonToken.FIELD_NAME
               && parser.getCurrentName().equals("result")
               && parser.nextToken() == JsonToken.START_ARRAY) {
             while (parser.nextToken() == JsonToken.START_OBJECT) {
               // loop until token equal to "}"
+              if (posTimestampField != -1) {
+                rowBuilder.set(posTimestampField, timeValue);
+              }
               parseFields(fieldNames, fieldTypes, rowBuilder, parser);
               sink.send(rowBuilder.build());
               rowBuilder.reset();
@@ -167,7 +182,7 @@ class DruidConnectionImpl implements DruidConnection {
             && parser.nextToken() == JsonToken.START_OBJECT) {
           page.pagingIdentifier = null;
           page.offset = -1;
-          expectScalarField(parser, "timestamp");
+          expectScalarField(parser, DEFAULT_RESPONSE_TIMESTAMP_COLUMN);
           if (parser.nextToken() == JsonToken.FIELD_NAME
               && parser.getCurrentName().equals("result")
               && parser.nextToken() == JsonToken.START_OBJECT) {
@@ -194,7 +209,7 @@ class DruidConnectionImpl implements DruidConnection {
                 if (parser.nextToken() == JsonToken.FIELD_NAME
                     && parser.getCurrentName().equals("event")
                     && parser.nextToken() == JsonToken.START_OBJECT) {
-                  parseFields(fieldNames, fieldTypes, rowBuilder, parser);
+                  parseFields(fieldNames, fieldTypes, posTimestampField, rowBuilder, parser);
                   sink.send(rowBuilder.build());
                   rowBuilder.reset();
                 }
@@ -210,10 +225,13 @@ class DruidConnectionImpl implements DruidConnection {
         if (parser.nextToken() == JsonToken.START_ARRAY) {
           while (parser.nextToken() == JsonToken.START_OBJECT) {
             expectScalarField(parser, "version");
-            expectScalarField(parser, "timestamp");
+            final Long timeValue = extractTimestampField(parser);
             if (parser.nextToken() == JsonToken.FIELD_NAME
                 && parser.getCurrentName().equals("event")
                 && parser.nextToken() == JsonToken.START_OBJECT) {
+              if (posTimestampField != -1) {
+                rowBuilder.set(posTimestampField, timeValue);
+              }
               parseFields(fieldNames, fieldTypes, rowBuilder, parser);
               sink.send(rowBuilder.build());
               rowBuilder.reset();
@@ -227,21 +245,39 @@ class DruidConnectionImpl implements DruidConnection {
     }
   }
 
-  private void parseFields(List<String> fieldNames,
-      List<ColumnMetaData.Rep> fieldTypes, Row.RowBuilder rowBuilder,
-      JsonParser parser) throws IOException {
+  private void parseFields(List<String> fieldNames, List<ColumnMetaData.Rep>
fieldTypes,
+      Row.RowBuilder rowBuilder, JsonParser parser) throws IOException {
+    parseFields(fieldNames, fieldTypes, -1, rowBuilder, parser);
+  }
+
+  private void parseFields(List<String> fieldNames, List<ColumnMetaData.Rep>
fieldTypes,
+      int posTimestampField, Row.RowBuilder rowBuilder, JsonParser parser) throws IOException
{
     while (parser.nextToken() == JsonToken.FIELD_NAME) {
-      parseField(fieldNames, fieldTypes, rowBuilder, parser);
+      parseField(fieldNames, fieldTypes, posTimestampField, rowBuilder, parser);
     }
   }
 
-  private void parseField(List<String> fieldNames,
-      List<ColumnMetaData.Rep> fieldTypes, Row.RowBuilder rowBuilder,
-      JsonParser parser) throws IOException {
+  private void parseField(List<String> fieldNames, List<ColumnMetaData.Rep> fieldTypes,
+      int posTimestampField, Row.RowBuilder rowBuilder, JsonParser parser) throws IOException
{
     final String fieldName = parser.getCurrentName();
 
     // Move to next token, which is name's value
     JsonToken token = parser.nextToken();
+    if (fieldName.equals(DEFAULT_RESPONSE_TIMESTAMP_COLUMN)) {
+      try {
+        final Date parse;
+        // synchronized block to avoid race condition
+        synchronized (UTC_TIMESTAMP_FORMAT) {
+          parse = UTC_TIMESTAMP_FORMAT.parse(parser.getText());
+        }
+        if (posTimestampField != -1) {
+          rowBuilder.set(posTimestampField, parse.getTime());
+        }
+      } catch (ParseException e) {
+        // ignore bad value
+      }
+      return;
+    }
     int i = fieldNames.indexOf(fieldName);
     if (i < 0) {
       return;
@@ -288,20 +324,7 @@ class DruidConnectionImpl implements DruidConnection {
       break;
     case VALUE_STRING:
     default:
-      if (type == ColumnMetaData.Rep.JAVA_SQL_TIMESTAMP) {
-        try {
-          final Date parse;
-          // synchronized block to avoid race condition
-          synchronized (UTC_TIMESTAMP_FORMAT) {
-            parse = UTC_TIMESTAMP_FORMAT.parse(parser.getText());
-          }
-          rowBuilder.set(i, parse.getTime());
-        } catch (ParseException e) {
-          // ignore bad value
-        }
-      } else {
-        rowBuilder.set(i, parser.getText());
-      }
+      rowBuilder.set(i, parser.getText());
       break;
     }
   }
@@ -347,6 +370,27 @@ class DruidConnectionImpl implements DruidConnection {
     }
   }
 
+  private Long extractTimestampField(JsonParser parser)
+      throws IOException {
+    expect(parser, JsonToken.FIELD_NAME);
+    if (!parser.getCurrentName().equals(DEFAULT_RESPONSE_TIMESTAMP_COLUMN)) {
+      throw new RuntimeException("expected field " + DEFAULT_RESPONSE_TIMESTAMP_COLUMN +
", got "
+          + parser.getCurrentName());
+    }
+    parser.nextToken();
+    try {
+      final Date parse;
+      // synchronized block to avoid race condition
+      synchronized (UTC_TIMESTAMP_FORMAT) {
+        parse = UTC_TIMESTAMP_FORMAT.parse(parser.getText());
+      }
+      return parse.getTime();
+    } catch (ParseException e) {
+      // ignore bad value
+    }
+    return null;
+  }
+
   /** Executes a request and returns the resulting rows as an
    * {@link Enumerable}, running the parser in a thread provided by
    * {@code service}. */
@@ -399,7 +443,7 @@ class DruidConnectionImpl implements DruidConnection {
   }
 
   /** Reads segment metadata, and populates a list of columns and metrics. */
-  void metadata(String dataSourceName, List<Interval> intervals,
+  void metadata(String dataSourceName, String timestampColumnName, List<Interval> intervals,
       Map<String, SqlTypeName> fieldBuilder, Set<String> metricNameBuilder) {
     final String url = this.url + "/druid/v2/?pretty";
     final Map<String, String> requestHeaders =
@@ -417,8 +461,13 @@ class DruidConnectionImpl implements DruidConnection {
               JsonSegmentMetadata.class);
       final List<JsonSegmentMetadata> list = mapper.readValue(in, listType);
       in.close();
+      fieldBuilder.put(timestampColumnName, SqlTypeName.TIMESTAMP);
       for (JsonSegmentMetadata o : list) {
         for (Map.Entry<String, JsonColumn> entry : o.columns.entrySet()) {
+          if (entry.getKey().equals(DruidTable.DEFAULT_TIMESTAMP_COLUMN)) {
+            // timestamp column
+            continue;
+          }
           if (!isSupportedType(entry.getValue().type)) {
             continue;
           }

http://git-wip-us.apache.org/repos/asf/calcite/blob/66274481/druid/src/main/java/org/apache/calcite/adapter/druid/DruidSchema.java
----------------------------------------------------------------------
diff --git a/druid/src/main/java/org/apache/calcite/adapter/druid/DruidSchema.java b/druid/src/main/java/org/apache/calcite/adapter/druid/DruidSchema.java
index b55dbb0..99d733a 100644
--- a/druid/src/main/java/org/apache/calcite/adapter/druid/DruidSchema.java
+++ b/druid/src/main/java/org/apache/calcite/adapter/druid/DruidSchema.java
@@ -70,9 +70,10 @@ public class DruidSchema extends AbstractSchema {
               public Table load(@Nonnull String tableName) throws Exception {
                 final Map<String, SqlTypeName> fieldMap = new LinkedHashMap<>();
                 final Set<String> metricNameSet = new LinkedHashSet<>();
-                connection.metadata(tableName, null, fieldMap, metricNameSet);
+                connection.metadata(tableName, DruidTable.DEFAULT_TIMESTAMP_COLUMN,
+                    null, fieldMap, metricNameSet);
                 return DruidTable.create(DruidSchema.this, tableName, null,
-                    fieldMap, metricNameSet, null, connection);
+                    fieldMap, metricNameSet, DruidTable.DEFAULT_TIMESTAMP_COLUMN, connection);
               }
             }));
   }

http://git-wip-us.apache.org/repos/asf/calcite/blob/66274481/druid/src/main/java/org/apache/calcite/adapter/druid/DruidTable.java
----------------------------------------------------------------------
diff --git a/druid/src/main/java/org/apache/calcite/adapter/druid/DruidTable.java b/druid/src/main/java/org/apache/calcite/adapter/druid/DruidTable.java
index 8b4355f..816bec4 100644
--- a/druid/src/main/java/org/apache/calcite/adapter/druid/DruidTable.java
+++ b/druid/src/main/java/org/apache/calcite/adapter/druid/DruidTable.java
@@ -105,18 +105,7 @@ public class DruidTable extends AbstractTable implements TranslatableTable
{
       Set<String> metricNameSet, String timestampColumnName,
       DruidConnectionImpl connection) {
     if (connection != null) {
-      connection.metadata(dataSourceName, intervals, fieldMap, metricNameSet);
-    }
-    if (timestampColumnName != null) {
-      // The default column has been added automatically if we retrieved
-      // the data source information with a segment metadata query. However,
-      // we can remove it since we declared explicitly a name for the
-      // timestamp column.
-      fieldMap.put(timestampColumnName, SqlTypeName.TIMESTAMP);
-      fieldMap.remove(DruidTable.DEFAULT_TIMESTAMP_COLUMN);
-    } else {
-      fieldMap.put(DruidTable.DEFAULT_TIMESTAMP_COLUMN, SqlTypeName.TIMESTAMP);
-      timestampColumnName = DruidTable.DEFAULT_TIMESTAMP_COLUMN;
+      connection.metadata(dataSourceName, timestampColumnName, intervals, fieldMap, metricNameSet);
     }
     final ImmutableMap<String, SqlTypeName> fields =
         ImmutableMap.copyOf(fieldMap);

http://git-wip-us.apache.org/repos/asf/calcite/blob/66274481/druid/src/main/java/org/apache/calcite/adapter/druid/DruidTableFactory.java
----------------------------------------------------------------------
diff --git a/druid/src/main/java/org/apache/calcite/adapter/druid/DruidTableFactory.java b/druid/src/main/java/org/apache/calcite/adapter/druid/DruidTableFactory.java
index f168f57..beb800c 100644
--- a/druid/src/main/java/org/apache/calcite/adapter/druid/DruidTableFactory.java
+++ b/druid/src/main/java/org/apache/calcite/adapter/druid/DruidTableFactory.java
@@ -51,8 +51,14 @@ public class DruidTableFactory implements TableFactory {
     // If "dataSource" operand is present it overrides the table name.
     final String dataSource = (String) operand.get("dataSource");
     final Set<String> metricNameBuilder = new LinkedHashSet<>();
-    final String timestampColumnName = (String) operand.get("timestampColumn");
     final Map<String, SqlTypeName> fieldBuilder = new LinkedHashMap<>();
+    final String timestampColumnName;
+    if (operand.get("timestampColumn") != null) {
+      timestampColumnName = (String) operand.get("timestampColumn");
+    } else {
+      timestampColumnName = DruidTable.DEFAULT_TIMESTAMP_COLUMN;
+    }
+    fieldBuilder.put(timestampColumnName, SqlTypeName.TIMESTAMP);
     final Object dimensionsRaw = operand.get("dimensions");
     if (dimensionsRaw instanceof List) {
       //noinspection unchecked

http://git-wip-us.apache.org/repos/asf/calcite/blob/66274481/druid/src/test/java/org/apache/calcite/test/DruidAdapterIT.java
----------------------------------------------------------------------
diff --git a/druid/src/test/java/org/apache/calcite/test/DruidAdapterIT.java b/druid/src/test/java/org/apache/calcite/test/DruidAdapterIT.java
index 45defdc..6173211 100644
--- a/druid/src/test/java/org/apache/calcite/test/DruidAdapterIT.java
+++ b/druid/src/test/java/org/apache/calcite/test/DruidAdapterIT.java
@@ -124,7 +124,7 @@ public class DruidAdapterIT {
   @Test public void testSelectDistinctWiki() {
     final String explain = "PLAN="
         + "EnumerableInterpreter\n"
-        + "  DruidQuery(table=[[wiki, wiki]], intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]],
filter=[=(CAST($12):VARCHAR(13) CHARACTER SET \"ISO-8859-1\" COLLATE \"ISO-8859-1$en_US$primary\",
'Jeremy Corbyn')], groups=[{4}], aggs=[[]])\n";
+        + "  DruidQuery(table=[[wiki, wiki]], intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]],
filter=[=(CAST($13):VARCHAR(13) CHARACTER SET \"ISO-8859-1\" COLLATE \"ISO-8859-1$en_US$primary\",
'Jeremy Corbyn')], groups=[{5}], aggs=[[]])\n";
     checkSelectDistinctWiki(WIKI, "wiki")
         .explainContains(explain);
   }
@@ -132,7 +132,7 @@ public class DruidAdapterIT {
   @Test public void testSelectDistinctWikiNoColumns() {
     final String explain = "PLAN="
         + "EnumerableInterpreter\n"
-        + "  DruidQuery(table=[[wiki, wiki]], intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]],
filter=[=(CAST($16):VARCHAR(13) CHARACTER SET \"ISO-8859-1\" COLLATE \"ISO-8859-1$en_US$primary\",
'Jeremy Corbyn')], groups=[{6}], aggs=[[]])\n";
+        + "  DruidQuery(table=[[wiki, wiki]], intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]],
filter=[=(CAST($17):VARCHAR(13) CHARACTER SET \"ISO-8859-1\" COLLATE \"ISO-8859-1$en_US$primary\",
'Jeremy Corbyn')], groups=[{7}], aggs=[[]])\n";
     checkSelectDistinctWiki(WIKI_AUTO, "wiki")
         .explainContains(explain);
   }
@@ -164,16 +164,16 @@ public class DruidAdapterIT {
         .returnsUnordered("C=86829");
   }
 
-  @Test public void testSelectTimestampColumnNoTables() {
+  @Test public void testSelectTimestampColumnNoTables1() {
     // Since columns are not explicitly declared, we use the default time
     // column in the query.
     final String sql = "select sum(\"added\")\n"
         + "from \"wikiticker\"\n"
         + "group by floor(\"__time\" to DAY)";
     final String explain = "PLAN="
-            + "EnumerableInterpreter\n"
-            + "  BindableProject(EXPR$0=[$1])\n"
-            + "    DruidQuery(table=[[wiki, wikiticker]], intervals=[[1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z]],
projects=[[FLOOR($0, FLAG(DAY)), $1]], groups=[{0}], aggs=[[SUM($1)]])\n";
+        + "EnumerableInterpreter\n"
+        + "  BindableProject(EXPR$0=[$1])\n"
+        + "    DruidQuery(table=[[wiki, wikiticker]], intervals=[[1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z]],
projects=[[FLOOR($0, FLAG(DAY)), $1]], groups=[{0}], aggs=[[SUM($1)]])\n";
     final String druidQuery = "{'queryType':'timeseries',"
         + "'dataSource':'wikiticker','descending':false,'granularity':'DAY',"
         + "'aggregations':[{'type':'longSum','name':'EXPR$0','fieldName':'added'}],"
@@ -183,6 +183,71 @@ public class DruidAdapterIT {
         .queryContains(druidChecker(druidQuery));
   }
 
+  @Test public void testSelectTimestampColumnNoTables2() {
+    // Since columns are not explicitly declared, we use the default time
+    // column in the query.
+    final String sql = "select \"__time\"\n"
+        + "from \"wikiticker\"\n"
+        + "limit 1\n";
+    final String explain = "PLAN="
+        + "EnumerableInterpreter\n"
+        + "  DruidQuery(table=[[wiki, wikiticker]], intervals=[[1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z]],
projects=[[$0]], fetch=[1])\n";
+    final String druidQuery = "{'queryType':'select',"
+        + "'dataSource':'wikiticker','descending':false,"
+        + "'intervals':['1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z'],"
+        + "'dimensions':[],'metrics':[],'granularity':'all','pagingSpec':{'threshold':1},"
+        + "'context':{'druid.query.fetch':true}}";
+    sql(sql, WIKI_AUTO2)
+        .returnsUnordered("__time=2015-09-12 00:46:58")
+        .explainContains(explain)
+        .queryContains(druidChecker(druidQuery));
+  }
+
+  @Test public void testSelectTimestampColumnNoTables3() {
+    // Since columns are not explicitly declared, we use the default time
+    // column in the query.
+    final String sql = "select floor(\"__time\" to DAY) as \"day\", sum(\"added\")\n"
+        + "from \"wikiticker\"\n"
+        + "group by floor(\"__time\" to DAY)";
+    final String explain = "PLAN="
+        + "EnumerableInterpreter\n"
+        + "  DruidQuery(table=[[wiki, wikiticker]], intervals=[[1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z]],
projects=[[FLOOR($0, FLAG(DAY)), $1]], groups=[{0}], aggs=[[SUM($1)]])\n";
+    final String druidQuery = "{'queryType':'timeseries',"
+        + "'dataSource':'wikiticker','descending':false,'granularity':'DAY',"
+        + "'aggregations':[{'type':'longSum','name':'EXPR$1','fieldName':'added'}],"
+        + "'intervals':['1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z']}";
+    sql(sql, WIKI_AUTO2)
+        .returnsUnordered("day=2015-09-12 00:00:00; EXPR$1=9385573")
+        .explainContains(explain)
+        .queryContains(druidChecker(druidQuery));
+  }
+
+  @Test public void testSelectTimestampColumnNoTables4() {
+    // Since columns are not explicitly declared, we use the default time
+    // column in the query.
+    final String sql = "select sum(\"added\") as \"s\", \"page\", "
+        + "floor(\"__time\" to DAY) as \"day\"\n"
+        + "from \"wikiticker\"\n"
+        + "group by \"page\", floor(\"__time\" to DAY)\n"
+        + "order by \"s\" desc";
+    final String explain = "PLAN="
+        + "EnumerableInterpreter\n"
+        + "  BindableSort(sort0=[$0], dir0=[DESC])\n"
+        + "    BindableProject(s=[$2], page=[$0], day=[$1])\n"
+        + "      DruidQuery(table=[[wiki, wikiticker]], intervals=[[1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z]],
projects=[[$17, FLOOR($0, FLAG(DAY)), $1]], groups=[{0, 1}], aggs=[[SUM($2)]])\n";
+    final String druidQuery = "{'queryType':'groupBy',"
+        + "'dataSource':'wikiticker','granularity':'DAY','dimensions':['page'],"
+        + "'limitSpec':{'type':'default'},"
+        + "'aggregations':[{'type':'longSum','name':'s','fieldName':'added'}],"
+        + "'intervals':['1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z']}";
+    sql(sql, WIKI_AUTO2)
+        .limit(1)
+        .returnsUnordered("s=199818; page=User:QuackGuru/Electronic cigarettes 1; "
+            + "day=2015-09-12 00:00:00")
+        .explainContains(explain)
+        .queryContains(druidChecker(druidQuery));
+  }
+
   private CalciteAssert.AssertQuery checkSelectDistinctWiki(URL url, String tableName) {
     final String sql = "select distinct \"countryName\"\n"
         + "from \"" + tableName + "\"\n"
@@ -202,7 +267,7 @@ public class DruidAdapterIT {
   @Test public void testSelectDistinct() {
     final String explain = "PLAN="
         + "EnumerableInterpreter\n"
-        + "  DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]],
groups=[{29}], aggs=[[]])";
+        + "  DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]],
groups=[{30}], aggs=[[]])";
     final String sql = "select distinct \"state_province\" from \"foodmart\"";
     final String druidQuery = "{'queryType':'groupBy','dataSource':'foodmart','granularity':'all',"
         + "'dimensions':['state_province'],'limitSpec':{'type':'default'},"
@@ -259,7 +324,7 @@ public class DruidAdapterIT {
     final String explain = "PLAN="
         + "EnumerableInterpreter\n"
         + "  BindableSort(sort0=[$1], sort1=[$0], dir0=[ASC], dir1=[DESC])\n"
-        + "    DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]],
projects=[[$38, $29]], groups=[{0, 1}], aggs=[[]])";
+        + "    DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]],
projects=[[$39, $30]], groups=[{0, 1}], aggs=[[]])";
     final String sql = "select distinct \"gender\", \"state_province\"\n"
         + "from \"foodmart\" order by 2, 1 desc";
     sql(sql)
@@ -277,7 +342,7 @@ public class DruidAdapterIT {
     final String explain = "PLAN="
         + "EnumerableInterpreter\n"
         + "  BindableSort(sort0=[$1], sort1=[$0], dir0=[ASC], dir1=[DESC], offset=[2], fetch=[3])\n"
-        + "    DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]],
projects=[[$38, $29]], groups=[{0, 1}], aggs=[[]])";
+        + "    DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]],
projects=[[$39, $30]], groups=[{0, 1}], aggs=[[]])";
     final String sql = "select distinct \"gender\", \"state_province\"\n"
         + "from \"foodmart\"\n"
         + "order by 2, 1 desc offset 2 rows fetch next 3 rows only";
@@ -326,7 +391,7 @@ public class DruidAdapterIT {
     final String explain = "PLAN="
         + "EnumerableLimit(fetch=[3])\n"
         + "  EnumerableInterpreter\n"
-        + "    DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]],
projects=[[$38, $29]], groups=[{0, 1}], aggs=[[]])";
+        + "    DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]],
projects=[[$39, $30]], groups=[{0, 1}], aggs=[[]])";
     sql(sql)
         .runs()
         .explainContains(explain)
@@ -465,8 +530,8 @@ public class DruidAdapterIT {
     final String explain = "PLAN="
         + "EnumerableInterpreter\n"
         + "  BindableUnion(all=[true])\n"
-        + "    DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]],
groups=[{38}], aggs=[[]])\n"
-        + "    DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]],
groups=[{36}], aggs=[[]])";
+        + "    DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]],
groups=[{39}], aggs=[[]])\n"
+        + "    DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]],
groups=[{37}], aggs=[[]])";
     sql(sql)
         .explainContains(explain)
         .returnsUnordered(
@@ -486,8 +551,8 @@ public class DruidAdapterIT {
         + "EnumerableInterpreter\n"
         + "  BindableFilter(condition=[=($0, 'M')])\n"
         + "    BindableUnion(all=[true])\n"
-        + "      DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]],
groups=[{38}], aggs=[[]])\n"
-        + "      DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]],
groups=[{36}], aggs=[[]])";
+        + "      DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]],
groups=[{39}], aggs=[[]])\n"
+        + "      DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]],
groups=[{37}], aggs=[[]])";
     sql(sql)
         .explainContains(explain)
         .returnsUnordered("gender=M",
@@ -517,6 +582,18 @@ public class DruidAdapterIT {
             "C=40778");
   }
 
+  @Test public void testGroupByTimeAndOneColumnNotProjected() {
+    final String sql = "select count(*) as \"c\", floor(\"timestamp\" to MONTH) as \"month\"\n"
+        + "from \"foodmart\"\n"
+        + "group by floor(\"timestamp\" to MONTH), \"state_province\"\n"
+        + "order by \"c\" desc limit 3";
+    sql(sql)
+        .returnsOrdered("c=3072; month=1997-01-01 00:00:00",
+            "c=2231; month=1997-01-01 00:00:00",
+            "c=1730; month=1997-01-01 00:00:00")
+        .queryContains(druidChecker("'queryType':'topN'"));
+  }
+
   @Test public void testOrderByOneColumnNotProjected() {
     // Result including state: CA=24441, OR=21610, WA=40778
     final String sql = "select count(*) as c from \"foodmart\"\n"
@@ -534,7 +611,7 @@ public class DruidAdapterIT {
         + "order by \"state_province\"";
     String explain = "PLAN=EnumerableInterpreter\n"
         + "  BindableSort(sort0=[$0], dir0=[ASC])\n"
-        + "    DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]],
groups=[{29}], aggs=[[COUNT()]])";
+        + "    DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]],
groups=[{30}], aggs=[[COUNT()]])";
     sql(sql)
         .limit(2)
         .returnsOrdered("state_province=CA; C=24441",
@@ -635,7 +712,7 @@ public class DruidAdapterIT {
     final String explain = "PLAN="
         + "EnumerableInterpreter\n"
         + "  BindableProject(S=[$2], M=[$3], P=[$0])\n"
-        + "    DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]],
projects=[[$29, FLOOR($91, FLAG(MONTH)), $88]], groups=[{0, 1}], aggs=[[SUM($2), MAX($2)]],
sort0=[2], dir0=[DESC], fetch=[3])";
+        + "    DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]],
projects=[[$30, FLOOR($0, FLAG(MONTH)), $89]], groups=[{0, 1}], aggs=[[SUM($2), MAX($2)]],
sort0=[2], dir0=[DESC], fetch=[3])";
     final String druidQuery = "{'queryType':'topN','dataSource':'foodmart',"
         + "'granularity':'MONTH','dimension':'state_province','metric':'S',"
         + "'aggregations':[{'type':'longSum','name':'S','fieldName':'unit_sales'},"
@@ -659,7 +736,7 @@ public class DruidAdapterIT {
     final String explain = "PLAN="
         + "EnumerableInterpreter\n"
         + "  BindableProject(S=[$2], M=[$3], P=[$0])\n"
-        + "    DruidQuery(table=[[foodmart, foodmart]], intervals=[[1997-01-01T00:00:00.000Z/1997-09-01T00:00:00.000Z]],
projects=[[$29, FLOOR($91, FLAG(DAY)), $88]], groups=[{0, 1}], aggs=[[SUM($2), MAX($2)]],
sort0=[2], dir0=[DESC], fetch=[3])";
+        + "    DruidQuery(table=[[foodmart, foodmart]], intervals=[[1997-01-01T00:00:00.000Z/1997-09-01T00:00:00.000Z]],
projects=[[$30, FLOOR($0, FLAG(DAY)), $89]], groups=[{0, 1}], aggs=[[SUM($2), MAX($2)]], sort0=[2],
dir0=[DESC], fetch=[3])";
     final String druidQuery = "{'queryType':'topN','dataSource':'foodmart',"
         + "'granularity':'DAY','dimension':'state_province','metric':'S',"
         + "'aggregations':[{'type':'longSum','name':'S','fieldName':'unit_sales'},"
@@ -680,7 +757,7 @@ public class DruidAdapterIT {
         + "EnumerableInterpreter\n"
         + "  BindableSort(sort0=[$0], dir0=[ASC])\n"
         + "    BindableFilter(condition=[>($1, 23000)])\n"
-        + "      DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]],
groups=[{29}], aggs=[[COUNT()]])";
+        + "      DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]],
groups=[{30}], aggs=[[COUNT()]])";
     sql(sql)
         .returnsOrdered("S=CA; C=24441",
             "S=WA; C=40778")
@@ -696,7 +773,7 @@ public class DruidAdapterIT {
     final String explain = "PLAN="
         + "EnumerableInterpreter\n"
         + "  BindableProject(C=[$2], state_province=[$1], city=[$0])\n"
-        + "    DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]],
groups=[{28, 29}], aggs=[[COUNT()]], sort0=[2], dir0=[DESC], fetch=[2])";
+        + "    DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]],
groups=[{29, 30}], aggs=[[COUNT()]], sort0=[2], dir0=[DESC], fetch=[2])";
     sql(sql)
         .returnsOrdered("C=7394; state_province=WA; city=Spokane",
             "C=3958; state_province=WA; city=Olympia")
@@ -717,7 +794,7 @@ public class DruidAdapterIT {
         + "  BindableSort(sort0=[$1], dir0=[DESC], fetch=[2])\n"
         + "    BindableProject(state_province=[$0], CDC=[FLOOR($1)])\n"
         + "      BindableAggregate(group=[{1}], agg#0=[COUNT($0)])\n"
-        + "        DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]],
groups=[{28, 29}], aggs=[[]])";
+        + "        DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]],
groups=[{29, 30}], aggs=[[]])";
     sql(sql)
         .explainContains(explain)
         .returnsUnordered("state_province=CA; CDC=45",
@@ -735,7 +812,7 @@ public class DruidAdapterIT {
         + "EnumerableInterpreter\n"
         + "  BindableProject(product_name=[$0], ZERO=[0])\n"
         + "    BindableSort(sort0=[$0], dir0=[ASC])\n"
-        + "      DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]],
projects=[[$2]])";
+        + "      DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]],
projects=[[$3]])";
     sql(sql)
         .limit(2)
         .explainContains(explain)
@@ -760,11 +837,11 @@ public class DruidAdapterIT {
         + "'intervals':['1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z']}";
     final String explain = "PLAN=EnumerableInterpreter\n"
         + "  DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]],"
-        + " filter=[AND(=(CAST($2):VARCHAR(24) CHARACTER SET \"ISO-8859-1\" COLLATE \"ISO-8859-1$en_US$primary\",
'High Top Dried Mushrooms'),"
-        + " OR(=($86, 'Q2'),"
-        + " =($86, 'Q3')),"
-        + " =(CAST($29):VARCHAR(2) CHARACTER SET \"ISO-8859-1\" COLLATE \"ISO-8859-1$en_US$primary\",
'WA'))],"
-        + " projects=[[$29, $28, $2]], groups=[{0, 1, 2}], aggs=[[]])\n";
+        + " filter=[AND(=(CAST($3):VARCHAR(24) CHARACTER SET \"ISO-8859-1\" COLLATE \"ISO-8859-1$en_US$primary\",
'High Top Dried Mushrooms'),"
+        + " OR(=($87, 'Q2'),"
+        + " =($87, 'Q3')),"
+        + " =(CAST($30):VARCHAR(2) CHARACTER SET \"ISO-8859-1\" COLLATE \"ISO-8859-1$en_US$primary\",
'WA'))],"
+        + " projects=[[$30, $29, $3]], groups=[{0, 1, 2}], aggs=[[]])\n";
     sql(sql)
         .queryContains(druidChecker(druidQuery))
         .explainContains(explain)
@@ -804,11 +881,11 @@ public class DruidAdapterIT {
         + "'pagingSpec':{'threshold':16384},'context':{'druid.query.fetch':false}}";
     final String explain = "PLAN=EnumerableInterpreter\n"
         + "  DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]],"
-        + " filter=[AND(=(CAST($2):VARCHAR(24) CHARACTER SET \"ISO-8859-1\" COLLATE \"ISO-8859-1$en_US$primary\",
'High Top Dried Mushrooms'),"
-        + " OR(=($86, 'Q2'),"
-        + " =($86, 'Q3')),"
-        + " =(CAST($29):VARCHAR(2) CHARACTER SET \"ISO-8859-1\" COLLATE \"ISO-8859-1$en_US$primary\",
'WA'))],"
-        + " projects=[[$29, $28, $2]])\n";
+        + " filter=[AND(=(CAST($3):VARCHAR(24) CHARACTER SET \"ISO-8859-1\" COLLATE \"ISO-8859-1$en_US$primary\",
'High Top Dried Mushrooms'),"
+        + " OR(=($87, 'Q2'),"
+        + " =($87, 'Q3')),"
+        + " =(CAST($30):VARCHAR(2) CHARACTER SET \"ISO-8859-1\" COLLATE \"ISO-8859-1$en_US$primary\",
'WA'))],"
+        + " projects=[[$30, $29, $3]])\n";
     sql(sql)
         .queryContains(druidChecker(druidQuery))
         .explainContains(explain)
@@ -844,7 +921,7 @@ public class DruidAdapterIT {
         + "and extract(month from \"timestamp\") in (4, 6)\n";
     final String explain = "EnumerableInterpreter\n"
         + "  BindableAggregate(group=[{}], C=[COUNT()])\n"
-        + "    BindableFilter(condition=[AND(>=(/INT(Reinterpret($91), 86400000), 1997-01-01),
<(/INT(Reinterpret($91), 86400000), 1998-01-01), >=(/INT(Reinterpret($91), 86400000),
1997-04-01), <(/INT(Reinterpret($91), 86400000), 1997-05-01))])\n"
+        + "    BindableFilter(condition=[AND(>=(/INT(Reinterpret($0), 86400000), 1997-01-01),
<(/INT(Reinterpret($0), 86400000), 1998-01-01), >=(/INT(Reinterpret($0), 86400000),
1997-04-01), <(/INT(Reinterpret($0), 86400000), 1997-05-01))])\n"
         + "      DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]])";
     sql(sql)
         .explainContains(explain)


Mime
View raw message