drill-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From s..@apache.org
Subject [1/2] drill git commit: DRILL-4163 Schema changes support in MergeJoin Operator.
Date Mon, 14 Dec 2015 07:27:57 GMT
Repository: drill
Updated Branches:
  refs/heads/master bb3fc1521 -> e529df460


DRILL-4163 Schema changes support in MergeJoin Operator.


Project: http://git-wip-us.apache.org/repos/asf/drill/repo
Commit: http://git-wip-us.apache.org/repos/asf/drill/commit/cc9175c1
Tree: http://git-wip-us.apache.org/repos/asf/drill/tree/cc9175c1
Diff: http://git-wip-us.apache.org/repos/asf/drill/diff/cc9175c1

Branch: refs/heads/master
Commit: cc9175c13270660ffd9ec2ddcbc70780dd72dada
Parents: bb3fc15
Author: Amit Hadke <amit.hadke@gmail.com>
Authored: Fri Dec 4 16:38:36 2015 -0800
Committer: Steven Phillips <smp@apache.org>
Committed: Sun Dec 13 23:22:55 2015 -0800

----------------------------------------------------------------------
 .../exec/physical/impl/join/JoinTemplate.java   |   2 +-
 .../exec/physical/impl/join/JoinUtils.java      |   3 +
 .../exec/physical/impl/join/MergeJoinBatch.java |  11 +-
 .../drill/exec/record/VectorContainer.java      |   5 +-
 .../join/TestMergeJoinWithSchemaChanges.java    | 348 +++++++++++++++++++
 5 files changed, 360 insertions(+), 9 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/drill/blob/cc9175c1/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/JoinTemplate.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/JoinTemplate.java
b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/JoinTemplate.java
index 40c47b3..43cbf71 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/JoinTemplate.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/JoinTemplate.java
@@ -60,7 +60,7 @@ public abstract class JoinTemplate implements JoinWorker {
       if (status.left.finished()) {
         return true;
       }
-      final int comparison = doCompare(status.left.getCurrentPosition(), status.right.getCurrentPosition());
+      final int comparison = Integer.signum(doCompare(status.left.getCurrentPosition(), status.right.getCurrentPosition()));
       switch (comparison) {
         case -1:
           // left key < right key

http://git-wip-us.apache.org/repos/asf/drill/blob/cc9175c1/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/JoinUtils.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/JoinUtils.java
b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/JoinUtils.java
index 2476a83..61640bc 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/JoinUtils.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/JoinUtils.java
@@ -174,6 +174,9 @@ public class JoinUtils {
       TypeProtos.MinorType rightType = rightExpression.getMajorType().getMinorType();
       TypeProtos.MinorType leftType = leftExpression.getMajorType().getMinorType();
 
+      if (rightType == TypeProtos.MinorType.UNION || leftType == TypeProtos.MinorType.UNION)
{
+        continue;
+      }
       if (rightType != leftType) {
 
         // currently we only support implicit casts if the input types are numeric or varchar/varbinary

http://git-wip-us.apache.org/repos/asf/drill/blob/cc9175c1/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/MergeJoinBatch.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/MergeJoinBatch.java
b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/MergeJoinBatch.java
index edafbfc..9ef5cde 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/MergeJoinBatch.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/MergeJoinBatch.java
@@ -102,6 +102,7 @@ public class MergeJoinBatch extends AbstractRecordBatch<MergeJoinPOP>
{
   private JoinWorker worker;
   private boolean areNullsEqual = false; // whether nulls compare equal
 
+
   private static final String LEFT_INPUT = "LEFT INPUT";
   private static final String RIGHT_INPUT = "RIGHT INPUT";
 
@@ -381,13 +382,11 @@ public class MergeJoinBatch extends AbstractRecordBatch<MergeJoinPOP>
{
   }
 
   private void allocateBatch(boolean newSchema) {
-    // allocate new batch space.
-    container.zeroVectors();
-
     boolean leftAllowed = status.getLeftStatus() != IterOutcome.NONE;
     boolean rightAllowed = status.getRightStatus() != IterOutcome.NONE;
 
     if (newSchema) {
+      container.clear();
       // add fields from both batches
       if (leftAllowed) {
         for (VectorWrapper<?> w : leftIterator) {
@@ -423,6 +422,8 @@ public class MergeJoinBatch extends AbstractRecordBatch<MergeJoinPOP>
{
           }
         }
       }
+    } else {
+      container.zeroVectors();
     }
     for (VectorWrapper w : container) {
       AllocationHelper.allocateNew(w.getValueVector(), Character.MAX_VALUE);
@@ -477,9 +478,9 @@ public class MergeJoinBatch extends AbstractRecordBatch<MergeJoinPOP>
{
 
   private LogicalExpression materializeExpression(LogicalExpression expression, IterOutcome
lastStatus,
                                                   VectorAccessible input, ErrorCollector
collector) throws ClassTransformationException {
-    LogicalExpression materializedExpr = null;
+    LogicalExpression materializedExpr;
     if (lastStatus != IterOutcome.NONE) {
-      materializedExpr = ExpressionTreeMaterializer.materialize(expression, input, collector,
context.getFunctionRegistry());
+      materializedExpr = ExpressionTreeMaterializer.materialize(expression, input, collector,
context.getFunctionRegistry(), unionTypeEnabled);
     } else {
       materializedExpr = new TypedNullConstant(Types.optional(MinorType.INT));
     }

http://git-wip-us.apache.org/repos/asf/drill/blob/cc9175c1/exec/java-exec/src/main/java/org/apache/drill/exec/record/VectorContainer.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/record/VectorContainer.java
b/exec/java-exec/src/main/java/org/apache/drill/exec/record/VectorContainer.java
index 815e2d8..ccc05ff 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/record/VectorContainer.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/record/VectorContainer.java
@@ -29,7 +29,6 @@ import org.apache.drill.common.expression.SchemaPath;
 import org.apache.drill.common.types.TypeProtos.MajorType;
 import org.apache.drill.exec.expr.TypeHelper;
 import org.apache.drill.exec.ops.OperatorContext;
-import org.apache.drill.exec.physical.impl.sort.RecordBatchData;
 import org.apache.drill.exec.record.BatchSchema.SelectionVectorMode;
 import org.apache.drill.exec.vector.SchemaChangeCallBack;
 import org.apache.drill.exec.vector.ValueVector;
@@ -282,8 +281,8 @@ public class VectorContainer implements Iterable<VectorWrapper<?>>,
VectorAccess
 
     if (fieldIds.length == 1 && clazz != null && !clazz.isAssignableFrom(va.getVectorClass()))
{
       throw new IllegalStateException(String.format(
-          "Failure while reading vector.  Expected vector class of %s but was holding vector
class %s.",
-          clazz.getCanonicalName(), va.getVectorClass().getCanonicalName()));
+          "Failure while reading vector.  Expected vector class of %s but was holding vector
class %s, field= %s ",
+          clazz.getCanonicalName(), va.getVectorClass().getCanonicalName(), va.getField()));
     }
 
     return va.getChildWrapper(fieldIds);

http://git-wip-us.apache.org/repos/asf/drill/blob/cc9175c1/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/join/TestMergeJoinWithSchemaChanges.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/join/TestMergeJoinWithSchemaChanges.java
b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/join/TestMergeJoinWithSchemaChanges.java
new file mode 100644
index 0000000..08aae60
--- /dev/null
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/join/TestMergeJoinWithSchemaChanges.java
@@ -0,0 +1,348 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.physical.impl.join;
+
+import org.apache.drill.BaseTestQuery;
+import org.apache.drill.TestBuilder;
+import org.apache.drill.common.exceptions.DrillRuntimeException;
+import org.junit.Ignore;
+import org.junit.Test;
+
+import java.io.BufferedWriter;
+import java.io.File;
+import java.io.FileWriter;
+import java.io.IOException;
+
+public class TestMergeJoinWithSchemaChanges extends BaseTestQuery {
+
+  @Test
+  //@Ignore
+  public void testNumericTypes() throws Exception {
+    final File left_dir = new File(BaseTestQuery.getTempDir("mergejoin-schemachanges-left"));
+    final File right_dir = new File(BaseTestQuery.getTempDir("mergejoin-schemachanges-right"));
+    left_dir.mkdirs();
+    right_dir.mkdirs();
+
+    // First create data for numeric types.
+    // left side int and float vs right side float
+    BufferedWriter writer = new BufferedWriter(new FileWriter(new File(left_dir, "l1.json")));
+    for (int i = 0; i < 5000; ++i) {
+      writer.write(String.format("{ \"kl\" : %d , \"vl\": %d }\n", i, i));
+    }
+    writer.close();
+    writer = new BufferedWriter(new FileWriter(new File(left_dir, "l2.json")));
+    for (int i = 1000; i < 6000; ++i) {
+      writer.write(String.format("{ \"kl\" : %f , \"vl\": %f }\n", (float) i, (float) i));
+    }
+    writer.close();
+
+    // right side is int and float
+    writer = new BufferedWriter(new FileWriter(new File(right_dir, "r1.json")));
+    for (int i = 2000; i < 7000; ++i) {
+      writer.write(String.format("{ \"kr\" : %d , \"vr\": %d }\n", i, i));
+    }
+    writer.close();
+    writer = new BufferedWriter(new FileWriter(new File(right_dir, "r2.json")));
+    for (int i = 3000; i < 8000; ++i) {
+      writer.write(String.format("{ \"kr\" : %f, \"vr\": %f }\n", (float) i, (float) i));
+    }
+    writer.close();
+
+    // INNER JOIN
+    String query = String.format("select * from dfs_test.`%s` L %s join dfs_test.`%s` R on
L.kl=R.kr",
+      left_dir.toPath().toString(), "inner", right_dir.toPath().toString());
+
+    TestBuilder builder = testBuilder()
+      .sqlQuery(query)
+      .optionSettingQueriesForTestQuery("alter session set `planner.enable_hashjoin` = false;
alter session set `exec.enable_union_type` = true")
+      .unOrdered()
+      .baselineColumns("kl", "vl", "kr", "vr");
+
+
+    for (long i = 2000; i < 3000; ++i) {
+      builder.baselineValues(i, i, i, i);
+      builder.baselineValues((double)i, (double)i, i, i);
+    }
+    for (long i = 3000; i < 5000; ++i) {
+      builder.baselineValues(i, i, i, i);
+      builder.baselineValues(i, i, (double)i, (double)i);
+      builder.baselineValues((double)i, (double)i, i, i);
+      builder.baselineValues((double)i, (double)i, (double)i, (double)i);
+    }
+    for (long i = 5000; i < 6000; ++i) {
+      builder.baselineValues((double)i, (double)i, i, i);
+      builder.baselineValues((double) i, (double) i, (double) i, (double) i);
+    }
+    builder.go();
+
+    // LEFT JOIN
+    query = String.format("select * from dfs_test.`%s` L %s join dfs_test.`%s` R on L.kl=R.kr",
+      left_dir.toPath().toString(), "left", right_dir.toPath().toString());
+
+    builder = testBuilder()
+      .sqlQuery(query)
+      .optionSettingQueriesForTestQuery("alter session set `planner.enable_hashjoin` = false;
alter session set `exec.enable_union_type` = true")
+      .unOrdered()
+      .baselineColumns("kl", "vl", "kr", "vr");
+
+    for (long i = 0; i < 2000; ++i)   {
+      builder.baselineValues(i, i, null, null);
+    }
+    for (long i = 1000; i < 2000; ++i) {
+      builder.baselineValues((double)i, (double)i, null, null);
+    }
+    for (long i = 2000; i < 3000; ++i) {
+      builder.baselineValues(i, i, i, i);
+      builder.baselineValues((double)i, (double)i, i, i);
+    }
+    for (long i = 3000; i < 5000; ++i) {
+      builder.baselineValues(i, i, i, i);
+      builder.baselineValues(i, i, (double)i, (double)i);
+      builder.baselineValues((double)i, (double)i, i, i);
+      builder.baselineValues((double)i, (double)i, (double)i, (double)i);
+    }
+    for (long i = 5000; i < 6000; ++i) {
+      builder.baselineValues((double) i, (double)i, i, i);
+      builder.baselineValues((double)i, (double)i, (double)i, (double)i);
+    }
+    builder.go();
+  }
+
+  @Test
+  //@Ignore
+  public void testNumericStringTypes() throws Exception {
+    final File left_dir = new File(BaseTestQuery.getTempDir("mergejoin-schemachanges-left"));
+    final File right_dir = new File(BaseTestQuery.getTempDir("mergejoin-schemachanges-right"));
+    left_dir.mkdirs();
+    right_dir.mkdirs();
+
+    // left side int and strings
+    BufferedWriter writer = new BufferedWriter(new FileWriter(new File(left_dir, "l1.json")));
+    for (int i = 0; i < 5000; ++i) {
+      writer.write(String.format("{ \"kl\" : %d , \"vl\": %d }\n", i, i));
+    }
+    writer.close();
+    writer = new BufferedWriter(new FileWriter(new File(left_dir, "l2.json")));
+    for (int i = 1000; i < 6000; ++i) {
+      writer.write(String.format("{ \"kl\" : \"%s\" , \"vl\": \"%s\" }\n", i, i));
+    }
+    writer.close();
+
+    // right side is float and strings
+    writer = new BufferedWriter(new FileWriter(new File(right_dir, "r1.json")));
+    for (int i = 2000; i < 7000; ++i) {
+      writer.write(String.format("{ \"kr\" : %f , \"vr\": %f }\n", (float)i, (float)i));
+    }
+    writer.close();
+    writer = new BufferedWriter(new FileWriter(new File(right_dir, "r2.json")));
+    for (int i = 3000; i < 8000; ++i) {
+      writer.write(String.format("{ \"kr\" : \"%s\", \"vr\": \"%s\" }\n", i, i));
+    }
+    writer.close();
+
+    // INNER JOIN
+    String query = String.format("select * from dfs_test.`%s` L %s join dfs_test.`%s` R on
L.kl=R.kr",
+      left_dir.toPath().toString(), "inner", right_dir.toPath().toString());
+
+    TestBuilder builder = testBuilder()
+      .sqlQuery(query)
+      .optionSettingQueriesForTestQuery("alter session set `planner.enable_hashjoin` = false;
alter session set `exec.enable_union_type` = true")
+      .unOrdered()
+      .baselineColumns("kl", "vl", "kr", "vr");
+
+    for (long i = 2000; i < 5000; ++i) {
+      builder.baselineValues(i, i, (double)i, (double)i);
+    }
+    for (long i = 3000; i < 6000; ++i) {
+      final String d = Long.toString(i);
+      builder.baselineValues(d, d, d, d);
+    }
+    builder.go();
+
+    // RIGHT JOIN
+    query = String.format("select * from dfs_test.`%s` L %s join dfs_test.`%s` R on L.kl=R.kr",
+      left_dir.toPath().toString(), "right", right_dir.toPath().toString());
+
+    builder = testBuilder()
+      .sqlQuery(query)
+      .optionSettingQueriesForTestQuery("alter session set `planner.enable_hashjoin` = false;
alter session set `exec.enable_union_type` = true")
+      .unOrdered()
+      .baselineColumns("kl", "vl", "kr", "vr");
+
+    for (long i = 2000; i < 5000; ++i) {
+      builder.baselineValues(i, i, (double)i, (double)i);
+    }
+    for (long i = 3000; i < 6000; ++i) {
+      final String d = Long.toString(i);
+      builder.baselineValues(d, d, d, d);
+    }
+    for (long i = 5000; i < 7000; ++i) {
+      builder.baselineValues(null, null, (double)i, (double)i);
+    }
+    for (long i = 6000; i < 8000; ++i) {
+      final String d = Long.toString(i);
+      builder.baselineValues(null, null, d, d);
+    }
+    builder.go();
+  }
+
+  @Test
+  //@Ignore
+  public void testMissingAndNewColumns() throws Exception {
+    final File left_dir = new File(BaseTestQuery.getTempDir("mergejoin-schemachanges-left"));
+    final File right_dir = new File(BaseTestQuery.getTempDir("mergejoin-schemachanges-right"));
+    left_dir.mkdirs();
+    right_dir.mkdirs();
+    System.out.println(left_dir);
+    System.out.println(right_dir);
+
+    // missing column kl
+    BufferedWriter writer = new BufferedWriter(new FileWriter(new File(left_dir, "l1.json")));
+    for (int i = 0; i < 50; ++i) {
+      writer.write(String.format("{ \"kl1\" : %d , \"vl1\": %d }\n", i, i));
+    }
+    writer.close();
+
+    writer = new BufferedWriter(new FileWriter(new File(left_dir, "l2.json")));
+    for (int i = 50; i < 100; ++i) {
+      writer.write(String.format("{ \"kl\" : %d , \"vl\": %d }\n", i, i));
+    }
+    writer.close();
+
+    writer = new BufferedWriter(new FileWriter(new File(left_dir, "l3.json")));
+    for (int i = 100; i < 150; ++i) {
+      writer.write(String.format("{ \"kl2\" : %d , \"vl2\": %d }\n", i, i));
+    }
+    writer.close();
+
+    // right missing column kr
+    writer = new BufferedWriter(new FileWriter(new File(right_dir, "r1.json")));
+    for (int i = 0; i < 50; ++i) {
+      writer.write(String.format("{ \"kr1\" : %f , \"vr1\": %f }\n", (float)i, (float)i));
+    }
+    writer.close();
+
+    writer = new BufferedWriter(new FileWriter(new File(right_dir, "r2.json")));
+    for (int i = 50; i < 100; ++i) {
+      writer.write(String.format("{ \"kr\" : %f , \"vr\": %f }\n", (float)i, (float)i));
+    }
+    writer.close();
+
+    writer = new BufferedWriter(new FileWriter(new File(right_dir, "r3.json")));
+    for (int i = 100; i < 150; ++i) {
+      writer.write(String.format("{ \"kr2\" : %f , \"vr2\": %f }\n", (float)i, (float)i));
+    }
+    writer.close();
+
+    // INNER JOIN
+    String query = String.format("select * from dfs_test.`%s` L %s join dfs_test.`%s` R on
L.kl=R.kr",
+      left_dir.toPath().toString(), "inner", right_dir.toPath().toString());
+
+    TestBuilder builder = testBuilder()
+      .sqlQuery(query)
+      .optionSettingQueriesForTestQuery("alter session set `planner.enable_hashjoin` = false;
alter session set `exec.enable_union_type` = true")
+      .unOrdered()
+      .baselineColumns("kl", "vl", "kr", "vr", "kl1", "vl1", "kl2", "vl2", "kr1", "vr1",
"kr2", "vr2");
+
+    for (long i = 50; i < 100; ++i) {
+      builder.baselineValues(i, i, (double)i, (double)i, null, null, null, null, null, null,
null, null);
+    }
+    builder.go();
+
+    // LEFT JOIN
+    query = String.format("select * from dfs_test.`%s` L %s join dfs_test.`%s` R on L.kl=R.kr",
+      left_dir.toPath().toString(), "left", right_dir.toPath().toString());
+
+    builder = testBuilder()
+      .sqlQuery(query)
+      .optionSettingQueriesForTestQuery("alter session set `planner.enable_hashjoin` = false;
alter session set `exec.enable_union_type` = true")
+      .unOrdered()
+      .baselineColumns("kl", "vl", "kr", "vr", "kl1", "vl1", "kl2", "vl2", "kr1", "vr1",
"kr2", "vr2");
+
+    for (long i = 0; i < 50; ++i) {
+      builder.baselineValues(null, null, null, null, i, i, null, null, null, null, null,
null);
+    }
+    for (long i = 50; i < 100; ++i) {
+      builder.baselineValues(i, i, (double)i, (double)i, null, null, null, null, null, null,
null, null);
+    }
+    for (long i = 100; i < 150; ++i) {
+      builder.baselineValues(null, null, null, null, null, null, i, i, null, null, null,
null);
+    }
+    builder.go();
+
+    // RIGHT JOIN
+    query = String.format("select * from dfs_test.`%s` L %s join dfs_test.`%s` R on L.kl=R.kr",
+      left_dir.toPath().toString(), "right", right_dir.toPath().toString());
+
+    builder = testBuilder()
+      .sqlQuery(query)
+      .optionSettingQueriesForTestQuery("alter session set `planner.enable_hashjoin` = false;
alter session set `exec.enable_union_type` = true")
+      .unOrdered()
+      .baselineColumns("kl", "vl", "kr", "vr", "kl1", "vl1", "kl2", "vl2", "kr1", "vr1",
"kr2", "vr2");
+
+    for (long i = 0; i < 50; ++i) {
+      builder.baselineValues(null, null, null, null, null, null, null, null, (double)i, (double)i,
null, null);
+    }
+    for (long i = 50; i < 100; ++i) {
+      builder.baselineValues(i, i, (double)i, (double)i, null, null, null, null, null, null,
null, null);
+    }
+    for (long i = 100; i < 150; ++i) {
+      builder.baselineValues(null, null, null, null, null, null, null, null, null, null,
(double)i, (double)i);
+    }
+    builder.go();
+  }
+
+  @Test
+  //@Ignore
+  public void testOneSideSchemaChanges() throws Exception {
+    final File left_dir = new File(BaseTestQuery.getTempDir("mergejoin-schemachanges-left"));
+    final File right_dir = new File(BaseTestQuery.getTempDir("mergejoin-schemachanges-right"));
+    left_dir.mkdirs();
+    right_dir.mkdirs();
+    System.out.println(left_dir);
+    System.out.println(right_dir);
+
+    BufferedWriter writer = new BufferedWriter(new FileWriter(new File(left_dir, "l1.json")));
+    for (int i = 0; i < 50; ++i) {
+      writer.write(String.format("{ \"kl\" : %d , \"vl\": %d }\n", i, i));
+    }
+    for (int i = 50; i < 100; ++i) {
+      writer.write(String.format("{ \"kl\" : %f , \"vl\": %f }\n", (float) i, (float) i));
+    }
+    writer.close();
+
+    writer = new BufferedWriter(new FileWriter(new File(right_dir, "r1.json")));
+    for (int i = 0; i < 50; ++i) {
+      writer.write(String.format("{ \"kl\" : %d , \"vl\": %d }\n", i, i));
+    }
+    writer.close();
+
+    String query = String.format("select * from dfs_test.`%s` L %s join dfs_test.`%s` R on
L.kl=R.kl",
+      left_dir.toPath().toString(), "inner", right_dir.toPath().toString());
+    TestBuilder builder = testBuilder()
+      .sqlQuery(query)
+      .optionSettingQueriesForTestQuery("alter session set `planner.enable_hashjoin` = false;
alter session set `exec.enable_union_type` = true")
+      .unOrdered()
+      .baselineColumns("kl", "vl", "kl0", "vl0");
+
+    for (long i = 0; i < 50; ++i) {
+      builder.baselineValues(i, i, i, i);
+    }
+    builder.go();
+  }
+}


Mime
View raw message