drill-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From s..@apache.org
Subject [5/6] drill git commit: DRILL-3229: Miscellaneous Union-type fixes
Date Tue, 03 Nov 2015 08:44:38 GMT
http://git-wip-us.apache.org/repos/asf/drill/blob/bb69f220/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/FieldIdUtil.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/FieldIdUtil.java
b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/FieldIdUtil.java
new file mode 100644
index 0000000..468a7f3
--- /dev/null
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/FieldIdUtil.java
@@ -0,0 +1,124 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.vector.complex;
+
+import org.apache.drill.common.expression.PathSegment;
+import org.apache.drill.common.types.TypeProtos.DataMode;
+import org.apache.drill.common.types.TypeProtos.MajorType;
+import org.apache.drill.exec.record.TypedFieldId;
+import org.apache.drill.exec.vector.ValueVector;
+
+public class FieldIdUtil {
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(FieldIdUtil.class);
+  public static TypedFieldId getFieldIdIfMatches(ValueVector vector, TypedFieldId.Builder
builder, boolean addToBreadCrumb, PathSegment seg) {
+    if (seg == null) {
+      if (addToBreadCrumb) {
+        builder.intermediateType(vector.getField().getType());
+      }
+      return builder.finalType(vector.getField().getType()).build();
+    }
+
+    if (seg.isArray()) {
+      if (seg.isLastPath()) {
+        MajorType type;
+        if (vector instanceof AbstractContainerVector) {
+          type = ((AbstractContainerVector) vector).getLastPathType();
+        } else if (vector instanceof ListVector) {
+          type = ((ListVector) vector).getDataVector().getField().getType();
+          builder.listVector();
+        } else {
+          throw new UnsupportedOperationException("FieldIdUtil does not support vector of
type " + vector.getField().getType());
+        }
+        builder //
+                .withIndex() //
+                .finalType(type);
+
+        // remainder starts with the 1st array segment in SchemaPath.
+        // only set remainder when it's the only array segment.
+        if (addToBreadCrumb) {
+          addToBreadCrumb = false;
+          builder.remainder(seg);
+        }
+        return builder.build();
+      } else {
+        if (addToBreadCrumb) {
+          addToBreadCrumb = false;
+          builder.remainder(seg);
+        }
+      }
+    } else {
+      if (vector instanceof ListVector) {
+        return null;
+      }
+    }
+
+    ValueVector v;
+    if (vector instanceof AbstractContainerVector) {
+      VectorWithOrdinal vord = ((AbstractContainerVector) vector).getChildVectorWithOrdinal(seg.isArray()
? null : seg.getNameSegment().getPath());
+      if (vord == null) {
+        return null;
+      }
+      v = vord.vector;
+      if (addToBreadCrumb) {
+        builder.intermediateType(v.getField().getType());
+        builder.addId(vord.ordinal);
+      }
+    } else if (vector instanceof ListVector) {
+      v = ((ListVector) vector).getDataVector();
+    } else {
+      throw new UnsupportedOperationException("FieldIdUtil does not support vector of type
" + vector.getField().getType());
+    }
+
+    if (v instanceof AbstractContainerVector) {
+      // we're looking for a multi path.
+      AbstractContainerVector c = (AbstractContainerVector) v;
+      return c.getFieldIdIfMatches(builder, addToBreadCrumb, seg.getChild());
+    } else if(v instanceof ListVector) {
+      ListVector list = (ListVector) v;
+      return list.getFieldIdIfMatches(builder, addToBreadCrumb, seg.getChild());
+    } else if (v instanceof  UnionVector) {
+      return ((UnionVector) v).getFieldIdIfMatches(builder, addToBreadCrumb, seg.getChild());
+    } else {
+      if (seg.isNamed()) {
+        if(addToBreadCrumb) {
+          builder.intermediateType(v.getField().getType());
+        }
+        builder.finalType(v.getField().getType());
+      } else {
+        builder.finalType(v.getField().getType().toBuilder().setMode(DataMode.OPTIONAL).build());
+      }
+
+      if (seg.isLastPath()) {
+        return builder.build();
+      } else {
+        PathSegment child = seg.getChild();
+        if (child.isLastPath() && child.isArray()) {
+          if (addToBreadCrumb) {
+            builder.remainder(child);
+          }
+          builder.withIndex();
+          builder.finalType(v.getField().getType().toBuilder().setMode(DataMode.OPTIONAL).build());
+          return builder.build();
+        } else {
+          logger.warn("You tried to request a complex type inside a scalar object or path
or type is wrong.");
+          return null;
+        }
+      }
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/drill/blob/bb69f220/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/ListVector.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/ListVector.java
b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/ListVector.java
index a44efe3..e5d9443 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/ListVector.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/ListVector.java
@@ -34,15 +34,14 @@ import org.apache.drill.exec.record.TypedFieldId;
 import org.apache.drill.exec.util.CallBack;
 import org.apache.drill.exec.util.JsonStringArrayList;
 import org.apache.drill.exec.vector.AddOrGetResult;
-import org.apache.drill.exec.vector.BaseValueVector;
 import org.apache.drill.exec.vector.UInt1Vector;
 import org.apache.drill.exec.vector.UInt4Vector;
 import org.apache.drill.exec.vector.ValueVector;
 import org.apache.drill.exec.vector.VectorDescriptor;
+import org.apache.drill.exec.vector.ZeroVector;
 import org.apache.drill.exec.vector.complex.impl.ComplexCopier;
 import org.apache.drill.exec.vector.complex.impl.UnionListReader;
 import org.apache.drill.exec.vector.complex.impl.UnionListWriter;
-import org.apache.drill.exec.vector.complex.impl.UnionVector;
 import org.apache.drill.exec.vector.complex.reader.FieldReader;
 import org.apache.drill.exec.vector.complex.writer.FieldWriter;
 
@@ -80,16 +79,22 @@ public class ListVector extends BaseRepeatedValueVector {
   public void transferTo(ListVector target) {
     offsets.makeTransferPair(target.offsets).transfer();
     bits.makeTransferPair(target.bits).transfer();
+    if (target.getDataVector() instanceof ZeroVector) {
+      target.addOrGetVector(new VectorDescriptor(vector.getField().getType()));
+    }
     getDataVector().makeTransferPair(target.getDataVector()).transfer();
   }
 
+  public void copyFromSafe(int inIndex, int outIndex, ListVector from) {
+    copyFrom(inIndex, outIndex, from);
+  }
+
   public void copyFrom(int inIndex, int outIndex, ListVector from) {
     FieldReader in = from.getReader();
     in.setPosition(inIndex);
     FieldWriter out = getWriter();
     out.setPosition(outIndex);
-    ComplexCopier copier = new ComplexCopier(in, out);
-    copier.write();
+    ComplexCopier.copy(in, out);
   }
 
   @Override
@@ -128,7 +133,10 @@ public class ListVector extends BaseRepeatedValueVector {
 
     @Override
     public void splitAndTransfer(int startIndex, int length) {
-
+      to.allocateNew();
+      for (int i = 0; i < length; i++) {
+        copyValueSafe(startIndex + i, i);
+      }
     }
 
     @Override
@@ -176,8 +184,10 @@ public class ListVector extends BaseRepeatedValueVector {
         clear();
       }
     }
-    offsets.zeroVector();
-    bits.zeroVector();
+    if (success) {
+      offsets.zeroVector();
+      bits.zeroVector();
+    }
     return success;
   }
 
@@ -254,72 +264,7 @@ public class ListVector extends BaseRepeatedValueVector {
   }
 
   public TypedFieldId getFieldIdIfMatches(TypedFieldId.Builder builder, boolean addToBreadCrumb,
PathSegment seg) {
-    if (seg == null) {
-      if (addToBreadCrumb) {
-        builder.intermediateType(this.getField().getType());
-      }
-      return builder.finalType(this.getField().getType()).build();
-    }
-
-    if (seg.isArray()) {
-      if (seg.isLastPath()) {
-        builder //
-                .withIndex() //
-                .listVector()
-                .finalType(getDataVector().getField().getType());
-
-        // remainder starts with the 1st array segment in SchemaPath.
-        // only set remainder when it's the only array segment.
-        if (addToBreadCrumb) {
-          addToBreadCrumb = false;
-          builder.remainder(seg);
-        }
-        return builder.build();
-      } else {
-        if (addToBreadCrumb) {
-          addToBreadCrumb = false;
-          builder.remainder(seg);
-        }
-      }
-    } else {
-      return null;
-    }
-
-    ValueVector v = getDataVector();
-    if (v instanceof AbstractContainerVector) {
-      // we're looking for a multi path.
-      AbstractContainerVector c = (AbstractContainerVector) v;
-      return c.getFieldIdIfMatches(builder, addToBreadCrumb, seg.getChild());
-    } else if (v instanceof ListVector) {
-      ListVector list = (ListVector) v;
-      return list.getFieldIdIfMatches(builder, addToBreadCrumb, seg.getChild());
-    } else {
-      if (seg.isNamed()) {
-        if(addToBreadCrumb) {
-          builder.intermediateType(v.getField().getType());
-        }
-        builder.finalType(v.getField().getType());
-      } else {
-        builder.finalType(v.getField().getType().toBuilder().setMode(DataMode.OPTIONAL).build());
-      }
-
-      if (seg.isLastPath()) {
-        return builder.build();
-      } else {
-        PathSegment child = seg.getChild();
-        if (child.isLastPath() && child.isArray()) {
-          if (addToBreadCrumb) {
-            builder.remainder(child);
-          }
-          builder.withIndex();
-          builder.finalType(v.getField().getType().toBuilder().setMode(DataMode.OPTIONAL).build());
-          return builder.build();
-        } else {
-//          logger.warn("You tried to request a complex type inside a scalar object or path
or type is wrong.");
-          return null;
-        }
-      }
-    }
+    return FieldIdUtil.getFieldIdIfMatches(this, builder, addToBreadCrumb, seg);
   }
 
   private int lastSet;

http://git-wip-us.apache.org/repos/asf/drill/blob/bb69f220/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/JsonWriter.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/JsonWriter.java
b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/JsonWriter.java
index 6ff7d90..fe43f82 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/JsonWriter.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/JsonWriter.java
@@ -27,6 +27,8 @@ import org.apache.drill.exec.vector.complex.reader.FieldReader;
 import com.fasterxml.jackson.core.JsonFactory;
 import com.fasterxml.jackson.core.JsonGenerationException;
 import com.fasterxml.jackson.core.JsonGenerator;
+import org.apache.drill.exec.vector.complex.writer.BaseWriter.ComplexWriter;
+import org.apache.drill.exec.vector.complex.writer.BaseWriter.MapWriter;
 
 public class JsonWriter {
   static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(JsonWriter.class);

http://git-wip-us.apache.org/repos/asf/drill/blob/bb69f220/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/impl/AbstractBaseReader.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/impl/AbstractBaseReader.java
b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/impl/AbstractBaseReader.java
index 7624b1d..05fe63f 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/impl/AbstractBaseReader.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/impl/AbstractBaseReader.java
@@ -93,7 +93,6 @@ abstract class AbstractBaseReader implements FieldReader{
 
   @Override
   public void copyAsValue(ListWriter writer) {
-    ComplexCopier copier = new ComplexCopier(this, (FieldWriter)writer);
-    copier.write();
+    ComplexCopier.copy(this, (FieldWriter)writer);
   }
 }

http://git-wip-us.apache.org/repos/asf/drill/blob/bb69f220/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/impl/ComplexWriterImpl.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/impl/ComplexWriterImpl.java
b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/impl/ComplexWriterImpl.java
index 23bcfcb..5575b5e 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/impl/ComplexWriterImpl.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/impl/ComplexWriterImpl.java
@@ -126,7 +126,7 @@ public class ComplexWriterImpl extends AbstractFieldWriter implements
ComplexWri
 
     case INIT:
       MapVector map = (MapVector) container;
-      mapRoot = new SingleMapWriter(map, this, unionEnabled, false);
+      mapRoot = new SingleMapWriter(map, this, unionEnabled);
       mapRoot.setPosition(idx());
       mode = Mode.MAP;
       break;
@@ -147,7 +147,7 @@ public class ComplexWriterImpl extends AbstractFieldWriter implements
ComplexWri
 
     case INIT:
       MapVector map = container.addOrGet(name, Types.required(MinorType.MAP), MapVector.class);
-      mapRoot = new SingleMapWriter(map, this, unionEnabled, false);
+      mapRoot = new SingleMapWriter(map, this, unionEnabled);
       mapRoot.setPosition(idx());
       mode = Mode.MAP;
       break;

http://git-wip-us.apache.org/repos/asf/drill/blob/bb69f220/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/impl/PromotableWriter.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/impl/PromotableWriter.java
b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/impl/PromotableWriter.java
index d3459c3..93ce526 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/impl/PromotableWriter.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/impl/PromotableWriter.java
@@ -28,6 +28,7 @@ import org.apache.drill.exec.vector.VectorDescriptor;
 import org.apache.drill.exec.vector.ZeroVector;
 import org.apache.drill.exec.vector.complex.AbstractMapVector;
 import org.apache.drill.exec.vector.complex.ListVector;
+import org.apache.drill.exec.vector.complex.UnionVector;
 import org.apache.drill.exec.vector.complex.writer.FieldWriter;
 
 import java.lang.reflect.Constructor;
@@ -92,7 +93,7 @@ public class PromotableWriter extends AbstractPromotableFieldWriter {
     try {
       Constructor constructor = null;
       for (Constructor c : writerClass.getConstructors()) {
-        if (c.getParameterTypes().length == 4) {
+        if (c.getParameterTypes().length == 3) {
           constructor = c;
         }
       }
@@ -100,7 +101,7 @@ public class PromotableWriter extends AbstractPromotableFieldWriter {
         constructor = writerClass.getConstructor(vectorClass, AbstractFieldWriter.class);
         writer = (FieldWriter) constructor.newInstance(vector, null);
       } else {
-        writer = (FieldWriter) constructor.newInstance(vector, null, true, false);
+        writer = (FieldWriter) constructor.newInstance(vector, null, true);
       }
     } catch (ReflectiveOperationException e) {
       throw new RuntimeException(e);

http://git-wip-us.apache.org/repos/asf/drill/blob/bb69f220/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/impl/UnionListReader.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/impl/UnionListReader.java
b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/impl/UnionListReader.java
index db2d8e4..2d351f2 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/impl/UnionListReader.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/impl/UnionListReader.java
@@ -95,7 +95,6 @@ public class UnionListReader extends AbstractFieldReader {
   }
 
   public void copyAsValue(ListWriter writer) {
-    ComplexCopier copier = new ComplexCopier(this, (FieldWriter) writer);
-    copier.write();
+    ComplexCopier.copy(this, (FieldWriter) writer);
   }
 }

http://git-wip-us.apache.org/repos/asf/drill/blob/bb69f220/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/impl/VectorContainerWriter.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/impl/VectorContainerWriter.java
b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/impl/VectorContainerWriter.java
index 6bc2e05..b19b029 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/impl/VectorContainerWriter.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/impl/VectorContainerWriter.java
@@ -37,7 +37,7 @@ public class VectorContainerWriter extends AbstractFieldWriter implements
Comple
     super(null);
     this.mutator = mutator;
     mapVector = new SpecialMapVector(mutator.getCallBack());
-    mapRoot = new SingleMapWriter(mapVector, this, unionEnabled, false);
+    mapRoot = new SingleMapWriter(mapVector, this, unionEnabled);
   }
 
   public VectorContainerWriter(OutputMutator mutator) {

http://git-wip-us.apache.org/repos/asf/drill/blob/bb69f220/exec/java-exec/src/test/java/org/apache/drill/TestExampleQueries.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/TestExampleQueries.java b/exec/java-exec/src/test/java/org/apache/drill/TestExampleQueries.java
index 05bd138..55440f7 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/TestExampleQueries.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/TestExampleQueries.java
@@ -34,15 +34,6 @@ import org.junit.Test;
 public class TestExampleQueries extends BaseTestQuery {
 //  private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(TestExampleQueries.class);
 
-  @After
-  public void reset() throws Exception {
-    String[] options = new String[] { ExecConstants.SLICE_TARGET, PlannerSettings.HASHJOIN.getOptionName(),
PlannerSettings.HASHAGG.getOptionName(),
-            PlannerSettings.STREAMAGG.getOptionName(), PlannerSettings.MERGEJOIN.getOptionName(),
PlannerSettings.JOIN_ROW_COUNT_ESTIMATE_FACTOR.getOptionName() };
-    for (String option : options) {
-      testNoResult(String.format("reset `%s`", option));
-    }
-  }
-
   @Test // see DRILL-2328
   public void testConcatOnNull() throws Exception {
     try {

http://git-wip-us.apache.org/repos/asf/drill/blob/bb69f220/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/writer/TestParquetWriter.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/writer/TestParquetWriter.java
b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/writer/TestParquetWriter.java
index 4069735..7c4ac1e 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/writer/TestParquetWriter.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/writer/TestParquetWriter.java
@@ -678,6 +678,7 @@ public class TestParquetWriter extends BaseTestQuery {
   }
 
   @Test
+  @Ignore
   public void testSchemaChange() throws Exception {
     File dir = new File("target/" + this.getClass());
     if ((!dir.exists() && !dir.mkdirs()) || (dir.exists() && !dir.isDirectory()))
{

http://git-wip-us.apache.org/repos/asf/drill/blob/bb69f220/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestJsonReader.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestJsonReader.java
b/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestJsonReader.java
index d549a19..1f5c2db 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestJsonReader.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestJsonReader.java
@@ -23,6 +23,7 @@ import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertNull;
 import static org.junit.Assert.assertTrue;
 
+import java.io.BufferedOutputStream;
 import java.io.File;
 import java.io.FileInputStream;
 import java.io.FileOutputStream;
@@ -31,6 +32,7 @@ import java.io.PrintWriter;
 import java.util.List;
 import java.util.zip.GZIPOutputStream;
 
+import com.google.common.base.Joiner;
 import org.apache.drill.BaseTestQuery;
 import org.apache.drill.common.expression.SchemaPath;
 import org.apache.drill.common.util.FileUtils;
@@ -433,14 +435,16 @@ public class TestJsonReader extends BaseTestQuery {
 
   @Test
   public void testSelectFromListWithCase() throws Exception {
-    String query = "select a from (select case when typeOf(field2) = type('list') then asBigInt(field2[4][1].inner7)
end a from cp.`jsoninput/union/a.json`) where a is not null";
+    String query = "select a, typeOf(a) `type` from " +
+            "(select case when is_list(field2) then field2[4][1].inner7 end a " +
+            "from cp.`jsoninput/union/a.json`) where a is not null";
     try {
       testBuilder()
               .sqlQuery(query)
               .ordered()
               .optionSettingQueriesForTestQuery("alter session set `exec.enable_union_type`
= true")
-              .baselineColumns("a")
-              .baselineValues(13L)
+              .baselineColumns("a", "type")
+              .baselineValues(13L, "BIGINT")
               .go();
     } finally {
       testNoResult("alter session set `exec.enable_union_type` = false");
@@ -449,7 +453,9 @@ public class TestJsonReader extends BaseTestQuery {
 
   @Test
   public void testTypeCase() throws Exception {
-    String query = "select case typeOf(field1) when type('bigint') then asBigInt(field1)
when type('list') then asBigInt(field1[0]) when type('map') then asBigInt(t.field1.inner1)
end f1 from cp.`jsoninput/union/a.json` t";
+    String query = "select case when is_bigint(field1) " +
+            "then field1 when is_list(field1) then field1[0] " +
+            "when is_map(field1) then t.field1.inner1 end f1 from cp.`jsoninput/union/a.json`
t";
     try {
       testBuilder()
               .sqlQuery(query)
@@ -468,7 +474,10 @@ public class TestJsonReader extends BaseTestQuery {
 
   @Test
   public void testSumWithTypeCase() throws Exception {
-    String query = "select sum(f1) sum_f1 from (select case typeOf(field1) when type('bigint')
then asBigInt(field1) when type('list') then asBigInt(field1[0]) when type('map') then asBigInt(t.field1.inner1)
end f1 from cp.`jsoninput/union/a.json` t)";
+    String query = "select sum(cast(f1 as bigint)) sum_f1 from " +
+            "(select case when is_bigint(field1) then field1 " +
+            "when is_list(field1) then field1[0] when is_map(field1) then t.field1.inner1
end f1 " +
+            "from cp.`jsoninput/union/a.json` t)";
     try {
       testBuilder()
               .sqlQuery(query)
@@ -500,4 +509,62 @@ public class TestJsonReader extends BaseTestQuery {
     }
   }
 
+  @Test
+  public void testSumMultipleBatches() throws Exception {
+    String dfs_temp = getDfsTestTmpSchemaLocation();
+    System.out.println(dfs_temp);
+    File table_dir = new File(dfs_temp, "multi_batch");
+    table_dir.mkdir();
+    BufferedOutputStream os = new BufferedOutputStream(new FileOutputStream(new File(table_dir,
"a.json")));
+    for (int i = 0; i < 10000; i++) {
+      os.write("{ type : \"map\", data : { a : 1 } }\n".getBytes());
+      os.write("{ type : \"bigint\", data : 1 }\n".getBytes());
+    }
+    os.flush();
+    os.close();
+    String query = "select sum(cast(case when `type` = 'map' then t.data.a else data end
as bigint)) `sum` from dfs_test.tmp.multi_batch t";
+    try {
+      testBuilder()
+              .sqlQuery(query)
+              .ordered()
+              .optionSettingQueriesForTestQuery("alter session set `exec.enable_union_type`
= true")
+              .baselineColumns("sum")
+              .baselineValues(20000L)
+              .go();
+    } finally {
+      testNoResult("alter session set `exec.enable_union_type` = false");
+    }
+  }
+
+  @Test
+  public void testSumFilesWithDifferentSchema() throws Exception {
+    String dfs_temp = getDfsTestTmpSchemaLocation();
+    System.out.println(dfs_temp);
+    File table_dir = new File(dfs_temp, "multi_file");
+    table_dir.mkdir();
+    BufferedOutputStream os = new BufferedOutputStream(new FileOutputStream(new File(table_dir,
"a.json")));
+    for (int i = 0; i < 10000; i++) {
+      os.write("{ type : \"map\", data : { a : 1 } }\n".getBytes());
+    }
+    os.flush();
+    os.close();
+    os = new BufferedOutputStream(new FileOutputStream(new File(table_dir, "b.json")));
+    for (int i = 0; i < 10000; i++) {
+      os.write("{ type : \"bigint\", data : 1 }\n".getBytes());
+    }
+    os.flush();
+    os.close();
+    String query = "select sum(cast(case when `type` = 'map' then t.data.a else data end
as bigint)) `sum` from dfs_test.tmp.multi_file t";
+    try {
+      testBuilder()
+              .sqlQuery(query)
+              .ordered()
+              .optionSettingQueriesForTestQuery("alter session set `exec.enable_union_type`
= true")
+              .baselineColumns("sum")
+              .baselineValues(20000L)
+              .go();
+    } finally {
+      testNoResult("alter session set `exec.enable_union_type` = false");
+    }
+  }
 }


Mime
View raw message