drill-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From par...@apache.org
Subject drill git commit: DRILL-3745: Hive CHAR not supported
Date Mon, 14 Mar 2016 22:01:07 GMT
Repository: drill
Updated Branches:
  refs/heads/master f7197596d -> dd4f03be9


DRILL-3745: Hive CHAR not supported


Project: http://git-wip-us.apache.org/repos/asf/drill/repo
Commit: http://git-wip-us.apache.org/repos/asf/drill/commit/dd4f03be
Tree: http://git-wip-us.apache.org/repos/asf/drill/tree/dd4f03be
Diff: http://git-wip-us.apache.org/repos/asf/drill/diff/dd4f03be

Branch: refs/heads/master
Commit: dd4f03be93c7c804954b2f027f6a9071d5291b38
Parents: f719759
Author: Arina Ielchiieva <arina.yelchiyeva@gmail.com>
Authored: Fri Feb 19 17:03:52 2016 +0000
Committer: Parth Chandra <parthc@apache.org>
Committed: Mon Mar 14 14:08:22 2016 -0700

----------------------------------------------------------------------
 .../core/src/main/codegen/data/HiveTypes.tdd    | 48 ++++++++++++++------
 .../templates/ObjectInspectorHelper.java        | 17 ++++++-
 .../codegen/templates/ObjectInspectors.java     |  3 +-
 ...onvertHiveParquetScanToDrillParquetScan.java |  5 ++
 .../exec/store/hive/HiveFieldConverter.java     | 16 +++++--
 .../drill/exec/store/hive/HiveUtilities.java    |  7 +--
 .../exec/store/hive/schema/DrillHiveTable.java  | 11 ++++-
 .../drill/exec/fn/hive/HiveTestUDFImpls.java    | 18 +++++++-
 .../drill/exec/fn/hive/TestSampleHiveUDFs.java  | 11 +++++
 .../apache/drill/exec/hive/TestHiveStorage.java | 26 +++++++----
 .../exec/hive/TestInfoSchemaOnHiveStorage.java  |  3 +-
 .../exec/store/hive/HiveTestDataGenerator.java  | 36 ++++++++++-----
 12 files changed, 156 insertions(+), 45 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/drill/blob/dd4f03be/contrib/storage-hive/core/src/main/codegen/data/HiveTypes.tdd
----------------------------------------------------------------------
diff --git a/contrib/storage-hive/core/src/main/codegen/data/HiveTypes.tdd b/contrib/storage-hive/core/src/main/codegen/data/HiveTypes.tdd
index 2b4338b..73752a3 100644
--- a/contrib/storage-hive/core/src/main/codegen/data/HiveTypes.tdd
+++ b/contrib/storage-hive/core/src/main/codegen/data/HiveTypes.tdd
@@ -20,79 +20,99 @@
       hiveType: "BOOLEAN",
       hiveOI: "BooleanObjectInspector",
       javaType: "boolean",
-      drillType: "Bit"
+      drillType: "Bit",
+      needOIForDrillType: true
     },
     {
       hiveType: "BYTE",
       hiveOI: "ByteObjectInspector",
       javaType: "byte",
-      drillType: "TinyInt"
+      drillType: "TinyInt",
+      needOIForDrillType: true
     },
     {
       hiveType: "SHORT",
       hiveOI: "ShortObjectInspector",
       javaType: "short",
-      drillType: "SmallInt"
+      drillType: "SmallInt",
+      needOIForDrillType: true
     },
     {
       hiveType: "INT",
       hiveOI: "IntObjectInspector",
       javaType: "int",
-      drillType: "Int"
+      drillType: "Int",
+      needOIForDrillType: true
     },
     {
       hiveType: "LONG",
       hiveOI: "LongObjectInspector",
       javaType: "long",
-      drillType: "BigInt"
+      drillType: "BigInt",
+      needOIForDrillType: true
     },
     {
       hiveType: "FLOAT",
       hiveOI: "FloatObjectInspector",
       javaType: "float",
-      drillType: "Float4"
+      drillType: "Float4",
+      needOIForDrillType: true
     },
     {
       hiveType: "DOUBLE",
       hiveOI: "DoubleObjectInspector",
       javaType: "double",
-      drillType: "Float8"
+      drillType: "Float8",
+      needOIForDrillType: true
     },
     {
       hiveType: "VARCHAR",
       hiveOI: "HiveVarcharObjectInspector",
       javaType: "",
-      drillType: "VarChar"
+      drillType: "VarChar",
+      needOIForDrillType: true
     },
     {
-      hiveType: "STRING",
+      hiveType: "CHAR",
+      hiveOI: "HiveCharObjectInspector",
+      javaType: "",
+      drillType: "VarChar",
+      needOIForDrillType: false
+    },
+    {
+      hiveType: "STRING",l
       hiveOI: "StringObjectInspector",
       javaType: "",
-      drillType: "Var16Char"
+      drillType: "Var16Char",
+      needOIForDrillType: true
     },
     {
       hiveType: "BINARY",
       hiveOI: "BinaryObjectInspector",
       javaType: "",
-      drillType: "VarBinary"
+      drillType: "VarBinary",
+      needOIForDrillType: true
     },
     {
       hiveType: "TIMESTAMP",
       hiveOI: "TimestampObjectInspector",
       javaType: "java.sql.Timestamp",
-      drillType: "TimeStamp"
+      drillType: "TimeStamp",
+      needOIForDrillType: true
     },
     {
       hiveType: "DECIMAL",
       hiveOI: "HiveDecimalObjectInspector",
       javaType: "org.apache.hadoop.hive.common.type.HiveDecimal",
-      drillType: "Decimal38Sparse"
+      drillType: "Decimal38Sparse",
+      needOIForDrillType: true
     },
     {
       hiveType: "DATE",
       hiveOI: "DateObjectInspector",
       javaType: "java.sql.Date",
-      drillType: "Date"
+      drillType: "Date",
+      needOIForDrillType: true
     }
   ]
 }

http://git-wip-us.apache.org/repos/asf/drill/blob/dd4f03be/contrib/storage-hive/core/src/main/codegen/templates/ObjectInspectorHelper.java
----------------------------------------------------------------------
diff --git a/contrib/storage-hive/core/src/main/codegen/templates/ObjectInspectorHelper.java
b/contrib/storage-hive/core/src/main/codegen/templates/ObjectInspectorHelper.java
index 961dfbb..da83c40 100644
--- a/contrib/storage-hive/core/src/main/codegen/templates/ObjectInspectorHelper.java
+++ b/contrib/storage-hive/core/src/main/codegen/templates/ObjectInspectorHelper.java
@@ -48,8 +48,10 @@ public class ObjectInspectorHelper {
   private static Map<MinorType, Class> OIMAP_OPTIONAL = new HashMap<>();
   static {
 <#list drillOI.map as entry>
+    <#if entry.needOIForDrillType == true>
     OIMAP_REQUIRED.put(MinorType.${entry.drillType?upper_case}, Drill${entry.drillType}ObjectInspector.Required.class);
     OIMAP_OPTIONAL.put(MinorType.${entry.drillType?upper_case}, Drill${entry.drillType}ObjectInspector.Optional.class);
+    </#if>
 </#list>
   }
 
@@ -85,7 +87,7 @@ public class ObjectInspectorHelper {
             JType holderClass = TypeHelper.getHolderType(m, returnType, TypeProtos.DataMode.OPTIONAL);
             block.assign(returnValueHolder, JExpr._new(holderClass));
 
-          <#if entry.hiveType == "VARCHAR" || entry.hiveType == "STRING" || entry.hiveType
== "BINARY">
+          <#if entry.hiveType == "VARCHAR" || entry.hiveType == "STRING" || entry.hiveType
== "BINARY" || entry.hiveType == "CHAR">
             block.assign( //
                 returnValueHolder.ref("buffer"), //
                 g
@@ -173,6 +175,19 @@ public class ObjectInspectorHelper {
             jc._else().assign(returnValueHolder.ref("start"), JExpr.lit(0));
             jc._else().assign(returnValueHolder.ref("end"), data.ref("length"));
 
+            <#elseif entry.hiveType == "CHAR">
+                JVar data = jc._else().decl(m.directClass(byte[].class.getCanonicalName()),
"data",
+                castedOI.invoke("getPrimitiveJavaObject").arg(returnValue)
+                    .invoke("getStrippedValue")
+                    .invoke("getBytes"));
+
+            jc._else().add(returnValueHolder.ref("buffer")
+                .invoke("setBytes").arg(JExpr.lit(0)).arg(data));
+
+
+            jc._else().assign(returnValueHolder.ref("start"), JExpr.lit(0));
+            jc._else().assign(returnValueHolder.ref("end"), data.ref("length"));
+
           <#elseif entry.hiveType == "STRING">
             JVar data = jc._else().decl(m.directClass(byte[].class.getCanonicalName()), "data",
               castedOI.invoke("getPrimitiveJavaObject").arg(returnValue)

http://git-wip-us.apache.org/repos/asf/drill/blob/dd4f03be/contrib/storage-hive/core/src/main/codegen/templates/ObjectInspectors.java
----------------------------------------------------------------------
diff --git a/contrib/storage-hive/core/src/main/codegen/templates/ObjectInspectors.java b/contrib/storage-hive/core/src/main/codegen/templates/ObjectInspectors.java
index 10379ff..5c696f9 100644
--- a/contrib/storage-hive/core/src/main/codegen/templates/ObjectInspectors.java
+++ b/contrib/storage-hive/core/src/main/codegen/templates/ObjectInspectors.java
@@ -18,6 +18,7 @@
 <@pp.dropOutputFile />
 
 <#list drillOI.map as entry>
+<#if entry.needOIForDrillType == true>
 <@pp.changeOutputFile name="/org/apache/drill/exec/expr/fn/impl/hive/Drill${entry.drillType}ObjectInspector.java"
/>
 
 <#include "/@includes/license.ftl" />
@@ -305,6 +306,6 @@ public class Drill${entry.drillType}ObjectInspector {
   }
 </#list>
 }
-
+</#if>
 </#list>
 

http://git-wip-us.apache.org/repos/asf/drill/blob/dd4f03be/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/planner/sql/logical/ConvertHiveParquetScanToDrillParquetScan.java
----------------------------------------------------------------------
diff --git a/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/planner/sql/logical/ConvertHiveParquetScanToDrillParquetScan.java
b/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/planner/sql/logical/ConvertHiveParquetScanToDrillParquetScan.java
index f339957..c43664c 100644
--- a/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/planner/sql/logical/ConvertHiveParquetScanToDrillParquetScan.java
+++ b/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/planner/sql/logical/ConvertHiveParquetScanToDrillParquetScan.java
@@ -70,6 +70,8 @@ public class ConvertHiveParquetScanToDrillParquetScan extends StoragePluginOptim
   private static final DrillSqlOperator INT96_TO_TIMESTAMP =
       new DrillSqlOperator("convert_fromTIMESTAMP_IMPALA", 1, true);
 
+  private static final DrillSqlOperator RTRIM = new DrillSqlOperator("RTRIM", 1, true);
+
   private ConvertHiveParquetScanToDrillParquetScan() {
     super(RelOptHelper.any(DrillScanRel.class), "ConvertHiveScanToHiveDrillNativeScan:Parquet");
   }
@@ -311,6 +313,9 @@ public class ConvertHiveParquetScanToDrillParquetScan extends StoragePluginOptim
     final RelDataTypeField inputField = nativeScanRel.getRowType().getField(dirColName, false,
false);
     final RexInputRef inputRef =
         rb.makeInputRef(rb.getTypeFactory().createSqlType(SqlTypeName.VARCHAR), inputField.getIndex());
+    if (outputType.getSqlTypeName() == SqlTypeName.CHAR) {
+      return rb.makeCall(RTRIM, inputRef);
+    }
 
     return rb.makeCast(outputType, inputRef);
   }

http://git-wip-us.apache.org/repos/asf/drill/blob/dd4f03be/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveFieldConverter.java
----------------------------------------------------------------------
diff --git a/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveFieldConverter.java
b/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveFieldConverter.java
index aff9e22..441de21 100644
--- a/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveFieldConverter.java
+++ b/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveFieldConverter.java
@@ -35,9 +35,7 @@ import org.apache.drill.exec.vector.NullableDecimal9Vector;
 import org.apache.drill.exec.vector.NullableFloat4Vector;
 import org.apache.drill.exec.vector.NullableFloat8Vector;
 import org.apache.drill.exec.vector.NullableIntVector;
-import org.apache.drill.exec.vector.NullableSmallIntVector;
 import org.apache.drill.exec.vector.NullableTimeStampVector;
-import org.apache.drill.exec.vector.NullableTinyIntVector;
 import org.apache.drill.exec.vector.NullableVarBinaryVector;
 import org.apache.drill.exec.vector.NullableVarCharVector;
 import org.apache.drill.exec.vector.ValueVector;
@@ -49,6 +47,7 @@ import org.apache.hadoop.hive.serde2.objectinspector.primitive.ByteObjectInspect
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.DateObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.DoubleObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.FloatObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveCharObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveDecimalObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveVarcharObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.IntObjectInspector;
@@ -92,6 +91,7 @@ public abstract class HiveFieldConverter {
     primMap.put(PrimitiveCategory.VARCHAR, VarChar.class);
     primMap.put(PrimitiveCategory.TIMESTAMP, Timestamp.class);
     primMap.put(PrimitiveCategory.DATE, Date.class);
+    primMap.put(PrimitiveCategory.CHAR, Char.class);
   }
 
 
@@ -312,4 +312,14 @@ public abstract class HiveFieldConverter {
     }
   }
 
-}
+  public static class Char extends HiveFieldConverter {
+    @Override
+    public void setSafeValue(ObjectInspector oi, Object hiveFieldValue, ValueVector outputVV,
int outputIndex) {
+      final Text value = ((HiveCharObjectInspector)oi).getPrimitiveWritableObject(hiveFieldValue).getStrippedValue();
+      final byte[] valueBytes = value.getBytes();
+      final int valueLen = value.getLength();
+      ((NullableVarCharVector) outputVV).getMutator().setSafe(outputIndex, valueBytes, 0,
valueLen);
+    }
+  }
+
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/drill/blob/dd4f03be/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveUtilities.java
----------------------------------------------------------------------
diff --git a/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveUtilities.java
b/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveUtilities.java
index 98f0e58..2e23aff 100644
--- a/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveUtilities.java
+++ b/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveUtilities.java
@@ -51,8 +51,6 @@ import org.apache.drill.exec.vector.ValueVector;
 import org.apache.drill.exec.work.ExecErrorConstants;
 
 import org.apache.hadoop.hive.common.type.HiveDecimal;
-import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
 import org.apache.hadoop.hive.metastore.MetaStoreUtils;
 import org.apache.hadoop.hive.metastore.api.Partition;
 import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
@@ -119,6 +117,8 @@ public class HiveUtilities {
         case STRING:
         case VARCHAR:
           return value.getBytes();
+        case CHAR:
+          return value.trim().getBytes();
         case TIMESTAMP:
           return Timestamp.valueOf(value);
         case DATE:
@@ -345,6 +345,7 @@ public class HiveUtilities {
         return TypeProtos.MinorType.BIGINT;
       case STRING:
       case VARCHAR:
+      case CHAR:
         return TypeProtos.MinorType.VARCHAR;
       case TIMESTAMP:
         return TypeProtos.MinorType.TIMESTAMP;
@@ -422,7 +423,7 @@ public class HiveUtilities {
     errMsg.append(System.getProperty("line.separator"));
     errMsg.append("Following Hive data types are supported in Drill for querying: ");
     errMsg.append(
-        "BOOLEAN, TINYINT, SMALLINT, INT, BIGINT, FLOAT, DOUBLE, DATE, TIMESTAMP, BINARY,
DECIMAL, STRING, and VARCHAR");
+        "BOOLEAN, TINYINT, SMALLINT, INT, BIGINT, FLOAT, DOUBLE, DATE, TIMESTAMP, BINARY,
DECIMAL, STRING, VARCHAR and CHAR");
 
     throw UserException.unsupportedError()
         .message(errMsg.toString())

http://git-wip-us.apache.org/repos/asf/drill/blob/dd4f03be/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/schema/DrillHiveTable.java
----------------------------------------------------------------------
diff --git a/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/schema/DrillHiveTable.java
b/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/schema/DrillHiveTable.java
index 6583b9a..29f7757 100644
--- a/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/schema/DrillHiveTable.java
+++ b/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/schema/DrillHiveTable.java
@@ -121,6 +121,15 @@ public class DrillHiveTable extends DrillTable{
         );
       }
 
+      case CHAR: {
+        int maxLen = TypeInfoUtils.getCharacterLengthForType(pTypeInfo);
+        return typeFactory.createTypeWithCharsetAndCollation(
+            typeFactory.createSqlType(SqlTypeName.CHAR, maxLen), /*input type*/
+            Charset.forName("ISO-8859-1"), /*unicode char set*/
+            SqlCollation.IMPLICIT
+        );
+      }
+
       case UNKNOWN:
       case VOID:
       default:
@@ -175,7 +184,7 @@ public class DrillHiveTable extends DrillTable{
     errMsg.append(System.getProperty("line.separator"));
     errMsg.append("Following Hive data types are supported in Drill INFORMATION_SCHEMA: ");
     errMsg.append("BOOLEAN, BYTE, SHORT, INT, LONG, FLOAT, DOUBLE, DATE, TIMESTAMP, BINARY,
DECIMAL, STRING, " +
-        "VARCHAR, LIST, MAP, STRUCT and UNION");
+        "VARCHAR, CHAR, LIST, MAP, STRUCT and UNION");
 
     throw new RuntimeException(errMsg.toString());
   }

http://git-wip-us.apache.org/repos/asf/drill/blob/dd4f03be/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/fn/hive/HiveTestUDFImpls.java
----------------------------------------------------------------------
diff --git a/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/fn/hive/HiveTestUDFImpls.java
b/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/fn/hive/HiveTestUDFImpls.java
index c5699eb..27cfc51 100644
--- a/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/fn/hive/HiveTestUDFImpls.java
+++ b/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/fn/hive/HiveTestUDFImpls.java
@@ -19,6 +19,7 @@
  ******************************************************************************/
 package org.apache.drill.exec.fn.hive;
 
+import org.apache.hadoop.hive.common.type.HiveChar;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.common.type.HiveVarchar;
 import org.apache.hadoop.hive.ql.exec.Description;
@@ -36,6 +37,7 @@ import org.apache.hadoop.hive.serde2.objectinspector.primitive.ByteObjectInspect
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.DateObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.DoubleObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.FloatObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveCharObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveDecimalObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveVarcharObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.IntObjectInspector;
@@ -128,7 +130,14 @@ public class HiveTestUDFImpls {
         case BINARY:
           return PrimitiveObjectInspectorUtils.getBinary(input, (BinaryObjectInspector) argumentOI).getBytes();
         case VARCHAR:
-          return PrimitiveObjectInspectorUtils.getHiveVarchar(input, (HiveVarcharObjectInspector)argumentOI);
+          if (outputType == PrimitiveCategory.CHAR) {
+            HiveVarchar hiveVarchar = PrimitiveObjectInspectorUtils.getHiveVarchar(input,
(HiveVarcharObjectInspector) argumentOI);
+            return new HiveChar(hiveVarchar.getValue(), HiveChar.MAX_CHAR_LENGTH);
+          } else {
+            return PrimitiveObjectInspectorUtils.getHiveVarchar(input, (HiveVarcharObjectInspector)argumentOI);
+          }
+        case CHAR:
+          return PrimitiveObjectInspectorUtils.getHiveChar(input, (HiveCharObjectInspector)
argumentOI);
         case DATE:
           return PrimitiveObjectInspectorUtils.getDate(input, (DateObjectInspector) argumentOI);
         case TIMESTAMP:
@@ -202,6 +211,13 @@ public class HiveTestUDFImpls {
     }
   }
 
+  @Description(name = "testHiveUDFCHAR", value = "_FUNC_(VARCHAR) - Tests varchar data as
input and char data as output")
+  public static class GenericUDFTestCHAR extends GenericUDFTestBase {
+    public GenericUDFTestCHAR() {
+      super("testHiveUDFCHAR", PrimitiveCategory.VARCHAR, PrimitiveCategory.CHAR);
+    }
+  }
+
   @Description(name = "testHiveUDFSTRING", value = "_FUNC_(STRING) - Tests string data as
input and output")
   public static class GenericUDFTestSTRING extends GenericUDFTestBase {
     public GenericUDFTestSTRING() {

http://git-wip-us.apache.org/repos/asf/drill/blob/dd4f03be/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/fn/hive/TestSampleHiveUDFs.java
----------------------------------------------------------------------
diff --git a/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/fn/hive/TestSampleHiveUDFs.java
b/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/fn/hive/TestSampleHiveUDFs.java
index 6a38e1c..ddcd3e4 100644
--- a/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/fn/hive/TestSampleHiveUDFs.java
+++ b/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/fn/hive/TestSampleHiveUDFs.java
@@ -116,6 +116,17 @@ public class TestSampleHiveUDFs extends HiveTestBase {
   }
 
   @Test
+  public void varcharInCharOut() throws Exception {
+    String query = "SELECT " +
+        "testHiveUDFChar(cast ('This is a char' as char(20))) as col1," +
+        "testHiveUDFChar(cast(null as char)) as col2 " +
+        "FROM hive.kv LIMIT 1";
+
+    String expected = "col1,col2\n" + "This is a char,null\n";
+    helper(query, expected);
+  }
+
+  @Test
   @Ignore("doesn't work across timezones")
   public void dateInOut() throws Exception{
     String query = "SELECT " +

http://git-wip-us.apache.org/repos/asf/drill/blob/dd4f03be/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/hive/TestHiveStorage.java
----------------------------------------------------------------------
diff --git a/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/hive/TestHiveStorage.java
b/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/hive/TestHiveStorage.java
index 3e14846..46691fb 100644
--- a/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/hive/TestHiveStorage.java
+++ b/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/hive/TestHiveStorage.java
@@ -115,6 +115,7 @@ public class TestHiveStorage extends HiveTestBase {
             "varchar_field",
             "timestamp_field",
             "date_field",
+            "char_field",
             // There is a regression in Hive 1.2.1 in binary type partition columns. Disable
for now.
             //"binary_part",
             "boolean_part",
@@ -132,7 +133,8 @@ public class TestHiveStorage extends HiveTestBase {
             "string_part",
             "varchar_part",
             "timestamp_part",
-            "date_part")
+            "date_part",
+            "char_part")
         .baselineValues(
             "binaryfield".getBytes(),
             false,
@@ -151,6 +153,7 @@ public class TestHiveStorage extends HiveTestBase {
             "varcharfield",
             new DateTime(Timestamp.valueOf("2013-07-05 17:01:00").getTime()),
             new DateTime(Date.valueOf("2013-07-05").getTime()),
+            "charfield",
             // There is a regression in Hive 1.2.1 in binary type partition columns. Disable
for now.
             //"binary",
             true,
@@ -168,9 +171,10 @@ public class TestHiveStorage extends HiveTestBase {
             "string",
             "varchar",
             new DateTime(Timestamp.valueOf("2013-07-05 17:01:00").getTime()),
-            new DateTime(Date.valueOf("2013-07-05").getTime()))
+            new DateTime(Date.valueOf("2013-07-05").getTime()),
+            "char")
         .baselineValues( // All fields are null, but partition fields have non-null values
-            null, null, null, null, null, null, null, null, null, null, null, null, null,
null, null, null, null,
+            null, null, null, null, null, null, null, null, null, null, null, null, null,
null, null, null, null, null,
             // There is a regression in Hive 1.2.1 in binary type partition columns. Disable
for now.
             //"binary",
             true,
@@ -188,7 +192,8 @@ public class TestHiveStorage extends HiveTestBase {
             "string",
             "varchar",
             new DateTime(Timestamp.valueOf("2013-07-05 17:01:00").getTime()),
-            new DateTime(Date.valueOf("2013-07-05").getTime()))
+            new DateTime(Date.valueOf("2013-07-05").getTime()),
+            "char")
         .go();
   }
 
@@ -225,6 +230,7 @@ public class TestHiveStorage extends HiveTestBase {
               "string_field",
               "varchar_field",
               "timestamp_field",
+              "char_field",
               // There is a regression in Hive 1.2.1 in binary and boolean partition columns.
Disable for now.
               //"binary_part",
               "boolean_part",
@@ -242,7 +248,8 @@ public class TestHiveStorage extends HiveTestBase {
               "string_part",
               "varchar_part",
               "timestamp_part",
-              "date_part")
+              "date_part",
+              "char_part")
           .baselineValues(
               "binaryfield".getBytes(),
               false,
@@ -260,6 +267,7 @@ public class TestHiveStorage extends HiveTestBase {
               "stringfield",
               "varcharfield",
               new DateTime(Timestamp.valueOf("2013-07-05 17:01:00").getTime()),
+              "charfield",
               // There is a regression in Hive 1.2.1 in binary and boolean partition columns.
Disable for now.
               //"binary",
               true,
@@ -277,9 +285,10 @@ public class TestHiveStorage extends HiveTestBase {
               "string",
               "varchar",
               new DateTime(Timestamp.valueOf("2013-07-05 17:01:00").getTime()),
-              new DateTime(Date.valueOf("2013-07-05").getTime()))
+              new DateTime(Date.valueOf("2013-07-05").getTime()),
+              "char")
           .baselineValues( // All fields are null, but partition fields have non-null values
-              null, null, null, null, null, null, null, null, null, null, null, null, null,
null, null, null,
+              null, null, null, null, null, null, null, null, null, null, null, null, null,
null, null, null, null,
               // There is a regression in Hive 1.2.1 in binary and boolean partition columns.
Disable for now.
               //"binary",
               true,
@@ -297,7 +306,8 @@ public class TestHiveStorage extends HiveTestBase {
               "string",
               "varchar",
               new DateTime(Timestamp.valueOf("2013-07-05 17:01:00").getTime()),
-              new DateTime(Date.valueOf("2013-07-05").getTime()))
+              new DateTime(Date.valueOf("2013-07-05").getTime()),
+              "char")
           .go();
     } finally {
         test(String.format("alter session set `%s` = false", ExecConstants.HIVE_OPTIMIZE_SCAN_WITH_NATIVE_READERS));

http://git-wip-us.apache.org/repos/asf/drill/blob/dd4f03be/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/hive/TestInfoSchemaOnHiveStorage.java
----------------------------------------------------------------------
diff --git a/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/hive/TestInfoSchemaOnHiveStorage.java
b/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/hive/TestInfoSchemaOnHiveStorage.java
index 8144dc1..03acd22 100644
--- a/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/hive/TestInfoSchemaOnHiveStorage.java
+++ b/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/hive/TestInfoSchemaOnHiveStorage.java
@@ -178,7 +178,7 @@ public class TestInfoSchemaOnHiveStorage extends HiveTestBase {
         "       NUMERIC_PRECISION_RADIX, NUMERIC_PRECISION, NUMERIC_SCALE " +
         "FROM INFORMATION_SCHEMA.`COLUMNS` " +
         "WHERE TABLE_SCHEMA = 'hive.default' AND TABLE_NAME = 'infoschematest' AND " +
-        "(COLUMN_NAME = 'stringtype' OR COLUMN_NAME = 'varchartype' OR " +
+        "(COLUMN_NAME = 'stringtype' OR COLUMN_NAME = 'varchartype' OR COLUMN_NAME = 'chartype'
OR " +
         "COLUMN_NAME = 'inttype' OR COLUMN_NAME = 'decimaltype')";
 
     testBuilder()
@@ -195,6 +195,7 @@ public class TestInfoSchemaOnHiveStorage extends HiveTestBase {
         .baselineValues("decimaltype", "DECIMAL",            null,   10,   38,    2)
         .baselineValues("stringtype",  "CHARACTER VARYING", 65535, null, null, null)
         .baselineValues("varchartype", "CHARACTER VARYING",    20, null, null, null)
+        .baselineValues("chartype", "CHARACTER", 10, null, null, null)
         .go();
   }
 

http://git-wip-us.apache.org/repos/asf/drill/blob/dd4f03be/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/store/hive/HiveTestDataGenerator.java
----------------------------------------------------------------------
diff --git a/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/store/hive/HiveTestDataGenerator.java
b/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/store/hive/HiveTestDataGenerator.java
index 56c768f..9c2fe1c 100644
--- a/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/store/hive/HiveTestDataGenerator.java
+++ b/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/store/hive/HiveTestDataGenerator.java
@@ -185,7 +185,8 @@ public class HiveTestDataGenerator {
         "  string_field STRING," +
         "  varchar_field VARCHAR(50)," +
         "  timestamp_field TIMESTAMP," +
-        "  date_field DATE" +
+        "  date_field DATE," +
+        "  char_field CHAR(10)" +
         ") PARTITIONED BY (" +
         // There is a regression in Hive 1.2.1 in binary type partition columns. Disable
for now.
         // "  binary_part BINARY," +
@@ -204,7 +205,8 @@ public class HiveTestDataGenerator {
         "  string_part STRING," +
         "  varchar_part VARCHAR(50)," +
         "  timestamp_part TIMESTAMP," +
-        "  date_part DATE" +
+        "  date_part DATE," +
+        "  char_part CHAR(10)" +
         ") ROW FORMAT DELIMITED FIELDS TERMINATED BY ',' " +
         "TBLPROPERTIES ('serialization.null.format'='') "
     );
@@ -229,7 +231,8 @@ public class HiveTestDataGenerator {
         "  string_part='string', " +
         "  varchar_part='varchar', " +
         "  timestamp_part='2013-07-05 17:01:00', " +
-        "  date_part='2013-07-05')"
+        "  date_part='2013-07-05', " +
+        "  char_part='char')"
     );
 
     // Add a second partition to table 'readtest' which contains the same values as the first
partition except
@@ -253,7 +256,8 @@ public class HiveTestDataGenerator {
             "  string_part='string', " +
             "  varchar_part='varchar', " +
             "  timestamp_part='2013-07-05 17:01:00', " +
-            "  date_part='2013-07-05')"
+            "  date_part='2013-07-05', " +
+            "  char_part='char')"
     );
 
     // Load data into table 'readtest'
@@ -276,7 +280,9 @@ public class HiveTestDataGenerator {
         "  string_part='string', " +
         "  varchar_part='varchar', " +
         "  timestamp_part='2013-07-05 17:01:00', " +
-        "  date_part='2013-07-05')", testDataFile));
+        "  date_part='2013-07-05'," +
+        "  char_part='char'" +
+            ")", testDataFile));
 
     // create a table that has all Hive types. This is to test how hive tables metadata is
populated in
     // Drill's INFORMATION_SCHEMA.
@@ -298,7 +304,8 @@ public class HiveTestDataGenerator {
         "listType ARRAY<STRING>, " +
         "mapType MAP<STRING,INT>, " +
         "structType STRUCT<sint:INT,sboolean:BOOLEAN,sstring:STRING>, " +
-        "uniontypeType UNIONTYPE<int, double, array<string>>)"
+        "uniontypeType UNIONTYPE<int, double, array<string>>, " +
+        "charType CHAR(10))"
     );
 
     /**
@@ -321,7 +328,8 @@ public class HiveTestDataGenerator {
             "  smallint_field SMALLINT," +
             "  string_field STRING," +
             "  varchar_field VARCHAR(50)," +
-            "  timestamp_field TIMESTAMP" +
+            "  timestamp_field TIMESTAMP," +
+            "  char_field CHAR(10)" +
             ") PARTITIONED BY (" +
             // There is a regression in Hive 1.2.1 in binary type partition columns. Disable
for now.
             // "  binary_part BINARY," +
@@ -340,7 +348,8 @@ public class HiveTestDataGenerator {
             "  string_part STRING," +
             "  varchar_part VARCHAR(50)," +
             "  timestamp_part TIMESTAMP," +
-            "  date_part DATE" +
+            "  date_part DATE," +
+            "  char_part CHAR(10)" +
             ") STORED AS parquet "
     );
 
@@ -363,7 +372,8 @@ public class HiveTestDataGenerator {
         "  string_part='string', " +
         "  varchar_part='varchar', " +
         "  timestamp_part='2013-07-05 17:01:00', " +
-        "  date_part='2013-07-05'" +
+        "  date_part='2013-07-05', " +
+        "  char_part='char'" +
         ") " +
         " SELECT " +
         "  binary_field," +
@@ -381,7 +391,8 @@ public class HiveTestDataGenerator {
         "  smallint_field," +
         "  string_field," +
         "  varchar_field," +
-        "  timestamp_field" +
+        "  timestamp_field," +
+        "  char_field" +
         " FROM readtest WHERE tinyint_part = 64");
 
     // Add a second partition to table 'readtest_parquet' which contains the same values
as the first partition except
@@ -405,7 +416,8 @@ public class HiveTestDataGenerator {
             "  string_part='string', " +
             "  varchar_part='varchar', " +
             "  timestamp_part='2013-07-05 17:01:00', " +
-            "  date_part='2013-07-05')"
+            "  date_part='2013-07-05', " +
+            "  char_part='char')"
     );
 
     // create a Hive view to test how its metadata is populated in Drill's INFORMATION_SCHEMA
@@ -542,7 +554,7 @@ public class HiveTestDataGenerator {
     PrintWriter printWriter = new PrintWriter(file);
     printWriter.println("YmluYXJ5ZmllbGQ=,false,34,65.99,2347.923,2758725827.9999,29375892739852.7689,"
+
         "89853749534593985.7834783,8.345,4.67,123456,234235,3455,stringfield,varcharfield,"
+
-        "2013-07-05 17:01:00,2013-07-05");
+        "2013-07-05 17:01:00,2013-07-05,charfield");
     printWriter.println(",,,,,,,,,,,,,,,,");
     printWriter.close();
 


Mime
View raw message