hawq-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From m..@apache.org
Subject [70/70] incubator-hawq git commit: HAWQ-992. PXF Hive data type check in Fragmenter too restrictive
Date Thu, 08 Sep 2016 02:08:43 GMT
HAWQ-992. PXF Hive data type check in Fragmenter too restrictive


Project: http://git-wip-us.apache.org/repos/asf/incubator-hawq/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-hawq/commit/b0cc5556
Tree: http://git-wip-us.apache.org/repos/asf/incubator-hawq/tree/b0cc5556
Diff: http://git-wip-us.apache.org/repos/asf/incubator-hawq/diff/b0cc5556

Branch: refs/heads/master
Commit: b0cc5556c528dc3391e9f05b26abb8143b93fb32
Parents: 0fe6d82
Author: Oleksandr Diachenko <odiachenko@pivotal.io>
Authored: Wed Sep 7 14:47:13 2016 -0700
Committer: Oleksandr Diachenko <odiachenko@pivotal.io>
Committed: Wed Sep 7 14:47:13 2016 -0700

----------------------------------------------------------------------
 .../org/apache/hawq/pxf/api/io/DataType.java    |   7 +-
 .../pxf/api/utilities/ColumnDescriptor.java     |  57 +++++----
 .../hawq/pxf/api/utilities/EnumHawqType.java    |  54 ++++-----
 .../plugins/hive/HiveColumnarSerdeResolver.java |   3 +-
 .../plugins/hive/HiveInputFormatFragmenter.java |  70 +----------
 .../hive/utilities/EnumHiveToHawqType.java      |  80 ++++++++++++-
 .../plugins/hive/utilities/HiveUtilities.java   |  73 +++++++++++-
 .../hive/utilities/HiveUtilitiesTest.java       | 119 +++++++++++++++++++
 .../hawq/pxf/plugins/json/JsonResolver.java     |   3 -
 .../pxf/plugins/json/JsonExtensionTest.java     |   3 +-
 .../hawq/pxf/service/BridgeOutputBuilder.java   |   1 -
 .../hawq/pxf/service/io/GPDBWritable.java       |  38 +-----
 .../pxf/service/utilities/ProtocolData.java     |  30 ++++-
 .../pxf/service/BridgeOutputBuilderTest.java    |  19 ++-
 .../hawq/pxf/service/io/GPDBWritableTest.java   |  55 +++++++++
 .../pxf/service/utilities/ProtocolDataTest.java |  78 ++++++++++++
 16 files changed, 514 insertions(+), 176 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/b0cc5556/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/io/DataType.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/io/DataType.java b/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/io/DataType.java
index 9b8b869..cac700c 100644
--- a/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/io/DataType.java
+++ b/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/io/DataType.java
@@ -31,14 +31,19 @@ import java.util.Map;
 public enum DataType {
     BOOLEAN(16),
     BYTEA(17),
-    CHAR(18),
     BIGINT(20),
     SMALLINT(21),
     INTEGER(23),
     TEXT(25),
     REAL(700),
     FLOAT8(701),
+    /**
+     * char(length), blank-padded string, fixed storage length
+     */
     BPCHAR(1042),
+    /**
+     * varchar(length), non-blank-padded string, variable storage length
+     */
     VARCHAR(1043),
     DATE(1082),
     TIME(1083),

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/b0cc5556/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/utilities/ColumnDescriptor.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/utilities/ColumnDescriptor.java b/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/utilities/ColumnDescriptor.java
index baaca1d..a2bc8fe 100644
--- a/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/utilities/ColumnDescriptor.java
+++ b/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/utilities/ColumnDescriptor.java
@@ -26,10 +26,11 @@ package org.apache.hawq.pxf.api.utilities;
  */
 public class ColumnDescriptor {
 
-	int gpdbColumnTypeCode;
-    String gpdbColumnName;
-    String gpdbColumnTypeName;
-    int gpdbColumnIndex;
+    int dbColumnTypeCode;
+    String dbColumnName;
+    String dbColumnTypeName;
+    int dbColumnIndex;
+    Integer[] dbColumnTypeModifiers;
 
     /**
      * Reserved word for a table record key.
@@ -44,12 +45,14 @@ public class ColumnDescriptor {
      * @param typecode OID
      * @param index column index
      * @param typename type name
+     * @param typemods type modifiers
      */
-    public ColumnDescriptor(String name, int typecode, int index, String typename) {
-        gpdbColumnTypeCode = typecode;
-        gpdbColumnTypeName = typename;
-        gpdbColumnName = name;
-        gpdbColumnIndex = index;
+    public ColumnDescriptor(String name, int typecode, int index, String typename, Integer[] typemods) {
+        dbColumnTypeCode = typecode;
+        dbColumnTypeName = typename;
+        dbColumnName = name;
+        dbColumnIndex = index;
+        dbColumnTypeModifiers = typemods;
     }
 
     /**
@@ -58,42 +61,50 @@ public class ColumnDescriptor {
      * @param copy the ColumnDescriptor to copy
      */
     public ColumnDescriptor(ColumnDescriptor copy) {
-        this.gpdbColumnTypeCode = copy.gpdbColumnTypeCode;
-        this.gpdbColumnName = copy.gpdbColumnName;
-        this.gpdbColumnIndex = copy.gpdbColumnIndex;
-        this.gpdbColumnTypeName = copy.gpdbColumnTypeName;
+        this.dbColumnTypeCode = copy.dbColumnTypeCode;
+        this.dbColumnName = copy.dbColumnName;
+        this.dbColumnIndex = copy.dbColumnIndex;
+        this.dbColumnTypeName = copy.dbColumnTypeName;
+        System.arraycopy(this.dbColumnTypeModifiers, 0,
+                copy.dbColumnTypeModifiers, 0,
+                this.dbColumnTypeModifiers.length);
     }
 
     public String columnName() {
-        return gpdbColumnName;
+        return dbColumnName;
     }
 
     public int columnTypeCode() {
-        return gpdbColumnTypeCode;
+        return dbColumnTypeCode;
     }
 
     public int columnIndex() {
-        return gpdbColumnIndex;
+        return dbColumnIndex;
     }
 
     public String columnTypeName() {
-        return gpdbColumnTypeName;
+        return dbColumnTypeName;
+    }
+
+    public Integer[] columnTypeModifiers() {
+        return dbColumnTypeModifiers;
     }
 
     /**
-     * Returns <tt>true</tt> if {@link #gpdbColumnName} is a {@link #RECORD_KEY_NAME}.
+     * Returns <tt>true</tt> if {@link #dbColumnName} is a {@link #RECORD_KEY_NAME}.
      *
      * @return whether column is a record key column
      */
     public boolean isKeyColumn() {
-        return RECORD_KEY_NAME.equalsIgnoreCase(gpdbColumnName);
+        return RECORD_KEY_NAME.equalsIgnoreCase(dbColumnName);
     }
 
     @Override
 	public String toString() {
-		return "ColumnDescriptor [gpdbColumnTypeCode=" + gpdbColumnTypeCode
-				+ ", gpdbColumnName=" + gpdbColumnName
-				+ ", gpdbColumnTypeName=" + gpdbColumnTypeName
-				+ ", gpdbColumnIndex=" + gpdbColumnIndex + "]";
+		return "ColumnDescriptor [dbColumnTypeCode=" + dbColumnTypeCode
+				+ ", dbColumnName=" + dbColumnName
+				+ ", dbColumnTypeName=" + dbColumnTypeName
+				+ ", dbColumnIndex=" + dbColumnIndex
+				+ ", dbColumnTypeModifiers=" + dbColumnTypeModifiers + "]";
 	}
 }

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/b0cc5556/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/utilities/EnumHawqType.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/utilities/EnumHawqType.java b/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/utilities/EnumHawqType.java
index b5a94c6..83499f3 100644
--- a/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/utilities/EnumHawqType.java
+++ b/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/utilities/EnumHawqType.java
@@ -20,6 +20,8 @@
 package org.apache.hawq.pxf.api.utilities;
 
 import java.io.IOException;
+
+import org.apache.hawq.pxf.api.io.DataType;
 import org.codehaus.jackson.JsonGenerator;
 import org.codehaus.jackson.map.JsonSerializer;
 import org.codehaus.jackson.map.annotate.JsonSerialize;
@@ -43,37 +45,32 @@ class EnumHawqTypeSerializer extends JsonSerializer<EnumHawqType> {
  */
 @JsonSerialize(using = EnumHawqTypeSerializer.class)
 public enum EnumHawqType {
-    Int2Type("int2"),
-    Int4Type("int4"),
-    Int8Type("int8"),
-    Float4Type("float4"),
-    Float8Type("float8"),
-    TextType("text"),
-    VarcharType("varchar", (byte) 1, true),
-    ByteaType("bytea"),
-    DateType("date"),
-    TimestampType("timestamp"),
-    BoolType("bool"),
-    NumericType("numeric", (byte) 2, true),
-    BpcharType("bpchar", (byte) 1, true);
+    Int2Type("int2", DataType.SMALLINT),
+    Int4Type("int4", DataType.INTEGER),
+    Int8Type("int8", DataType.BIGINT),
+    Float4Type("float4", DataType.REAL),
+    Float8Type("float8", DataType.FLOAT8),
+    TextType("text", DataType.TEXT),
+    VarcharType("varchar", DataType.VARCHAR, (byte) 1),
+    ByteaType("bytea", DataType.BYTEA),
+    DateType("date", DataType.DATE),
+    TimestampType("timestamp", DataType.TIMESTAMP),
+    BoolType("bool", DataType.BOOLEAN),
+    NumericType("numeric", DataType.NUMERIC, (byte) 2),
+    BpcharType("bpchar", DataType.BPCHAR, (byte) 1);
 
+    private DataType dataType;
     private String typeName;
     private byte modifiersNum;
-    private boolean validateIntegerModifiers;
 
-    EnumHawqType(String typeName) {
+    EnumHawqType(String typeName, DataType dataType) {
         this.typeName = typeName;
+        this.dataType = dataType;
     }
 
-    EnumHawqType(String typeName, byte modifiersNum) {
-        this(typeName);
-        this.modifiersNum = modifiersNum;
-    }
-
-    EnumHawqType(String typeName, byte modifiersNum, boolean validateIntegerModifiers) {
-        this(typeName);
+    EnumHawqType(String typeName, DataType dataType, byte modifiersNum) {
+        this(typeName, dataType);
         this.modifiersNum = modifiersNum;
-        this.validateIntegerModifiers = validateIntegerModifiers;
     }
 
     /**
@@ -94,12 +91,11 @@ public enum EnumHawqType {
 
     /**
      * 
-     * @return whether modifiers should be integers
+     * @return data type
+     * @see DataType
      */
-    public boolean getValidateIntegerModifiers() {
-        return this.validateIntegerModifiers;
+    public DataType getDataType() {
+        return this.dataType;
     }
-}
-
-
 
+}

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/b0cc5556/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveColumnarSerdeResolver.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveColumnarSerdeResolver.java b/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveColumnarSerdeResolver.java
index d298bac..606ddc6 100644
--- a/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveColumnarSerdeResolver.java
+++ b/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveColumnarSerdeResolver.java
@@ -28,6 +28,7 @@ import org.apache.hawq.pxf.api.io.DataType;
 import org.apache.hawq.pxf.api.utilities.ColumnDescriptor;
 import org.apache.hawq.pxf.api.utilities.InputData;
 import org.apache.hawq.pxf.api.utilities.Utilities;
+import org.apache.hawq.pxf.plugins.hive.utilities.HiveUtilities;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
@@ -126,7 +127,7 @@ public class HiveColumnarSerdeResolver extends HiveResolver {
         for (int i = 0; i < numberOfDataColumns; i++) {
             ColumnDescriptor column = input.getColumn(i);
             String columnName = column.columnName();
-            String columnType = HiveInputFormatFragmenter.toHiveType(DataType.get(column.columnTypeCode()), columnName);
+            String columnType = HiveUtilities.toCompatibleHiveType(DataType.get(column.columnTypeCode()));
             columnNames.append(delim).append(columnName);
             columnTypes.append(delim).append(columnType);
             delim = ",";

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/b0cc5556/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveInputFormatFragmenter.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveInputFormatFragmenter.java b/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveInputFormatFragmenter.java
index a666b8b..ccc8fa7 100644
--- a/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveInputFormatFragmenter.java
+++ b/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveInputFormatFragmenter.java
@@ -26,6 +26,8 @@ import org.apache.hawq.pxf.api.UserDataException;
 import org.apache.hawq.pxf.api.io.DataType;
 import org.apache.hawq.pxf.api.utilities.ColumnDescriptor;
 import org.apache.hawq.pxf.api.utilities.InputData;
+import org.apache.hawq.pxf.plugins.hive.utilities.EnumHiveToHawqType;
+import org.apache.hawq.pxf.plugins.hive.utilities.HiveUtilities;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
@@ -146,82 +148,18 @@ public class HiveInputFormatFragmenter extends HiveDataFragmenter {
         for (FieldSchema hiveCol : hiveColumns) {
             ColumnDescriptor colDesc = inputData.getColumn(index++);
             DataType colType = DataType.get(colDesc.columnTypeCode());
-            compareTypes(colType, hiveCol.getType(), colDesc.columnName());
+            HiveUtilities.validateTypeCompatible(colType, colDesc.columnTypeModifiers(), hiveCol.getType(), colDesc.columnName());
         }
         // check partition fields
         List<FieldSchema> hivePartitions = tbl.getPartitionKeys();
         for (FieldSchema hivePart : hivePartitions) {
             ColumnDescriptor colDesc = inputData.getColumn(index++);
             DataType colType = DataType.get(colDesc.columnTypeCode());
-            compareTypes(colType, hivePart.getType(), colDesc.columnName());
+            HiveUtilities.validateTypeCompatible(colType, colDesc.columnTypeModifiers(), hivePart.getType(), colDesc.columnName());
         }
 
     }
 
-    private void compareTypes(DataType type, String hiveType, String fieldName) {
-        String convertedHive = toHiveType(type, fieldName);
-        if (!convertedHive.equals(hiveType)
-                && !(convertedHive.equals("smallint") && hiveType.equals("tinyint"))) {
-            throw new UnsupportedTypeException(
-                    "Schema mismatch definition: Field " + fieldName
-                            + " (Hive type " + hiveType + ", HAWQ type "
-                            + type.toString() + ")");
-        }
-        if (LOG.isDebugEnabled()) {
-            LOG.debug("Field " + fieldName + ": Hive type " + hiveType
-                    + ", HAWQ type " + type.toString());
-        }
-    }
-
-    /**
-     * Converts HAWQ type to hive type. The supported mappings are:<ul>
-     * <li>{@code BOOLEAN -> boolean}</li>
-     * <li>{@code SMALLINT -> smallint (tinyint is converted to smallint)}</li>
-     * <li>{@code BIGINT -> bigint}</li>
-     * <li>{@code TIMESTAMP, TIME -> timestamp}</li>
-     * <li>{@code NUMERIC -> decimal}</li>
-     * <li>{@code BYTEA -> binary}</li>
-     * <li>{@code INTERGER -> int}</li>
-     * <li>{@code TEXT -> string}</li>
-     * <li>{@code REAL -> float}</li>
-     * <li>{@code FLOAT8 -> double}</li>
-     * </ul>
-     * All other types (both in HAWQ and in HIVE) are not supported.
-     *
-     * @param type HAWQ data type
-     * @param name field name
-     * @return Hive type
-     * @throws UnsupportedTypeException if type is not supported
-     */
-    public static String toHiveType(DataType type, String name) {
-        switch (type) {
-            case BOOLEAN:
-            case SMALLINT:
-            case BIGINT:
-            case TIMESTAMP:
-                return type.toString().toLowerCase();
-            case NUMERIC:
-                return "decimal";
-            case BYTEA:
-                return "binary";
-            case INTEGER:
-                return "int";
-            case TEXT:
-                return "string";
-            case REAL:
-                return "float";
-            case FLOAT8:
-                return "double";
-            case TIME:
-                return "timestamp";
-            default:
-                throw new UnsupportedTypeException(
-                        type.toString()
-                                + " conversion is not supported by HiveInputFormatFragmenter (Field "
-                                + name + ")");
-        }
-    }
-
     /*
      * Validates that partition format corresponds to PXF supported formats and
      * transforms the class name to an enumeration for writing it to the

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/b0cc5556/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/utilities/EnumHiveToHawqType.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/utilities/EnumHiveToHawqType.java b/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/utilities/EnumHiveToHawqType.java
index a747bd5..0290f9c 100644
--- a/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/utilities/EnumHiveToHawqType.java
+++ b/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/utilities/EnumHiveToHawqType.java
@@ -19,6 +19,12 @@
 
 package org.apache.hawq.pxf.plugins.hive.utilities;
 
+import java.util.Arrays;
+import java.util.Comparator;
+import java.util.SortedSet;
+import java.util.TreeSet;
+
+import org.apache.hawq.pxf.api.io.DataType;
 import org.apache.hawq.pxf.api.utilities.EnumHawqType;
 import org.apache.hawq.pxf.api.UnsupportedTypeException;
 
@@ -29,8 +35,8 @@ import org.apache.hawq.pxf.api.UnsupportedTypeException;
  */
 public enum EnumHiveToHawqType {
 
-    TinyintType("tinyint", EnumHawqType.Int2Type),
-    SmallintType("smallint", EnumHawqType.Int2Type),
+    TinyintType("tinyint", EnumHawqType.Int2Type, (byte) 1),
+    SmallintType("smallint", EnumHawqType.Int2Type, (byte) 2),
     IntType("int", EnumHawqType.Int4Type),
     BigintType("bigint", EnumHawqType.Int8Type),
     BooleanType("boolean", EnumHawqType.BoolType),
@@ -51,11 +57,17 @@ public enum EnumHiveToHawqType {
     private String typeName;
     private EnumHawqType hawqType;
     private String splitExpression;
+    private byte size;
 
     EnumHiveToHawqType(String typeName, EnumHawqType hawqType) {
         this.typeName = typeName;
         this.hawqType = hawqType;
     }
+    
+    EnumHiveToHawqType(String typeName, EnumHawqType hawqType, byte size) {
+        this(typeName, hawqType);
+        this.size = size;
+    }
 
     EnumHiveToHawqType(String typeName, EnumHawqType hawqType, String splitExpression) {
         this(typeName, hawqType);
@@ -110,4 +122,68 @@ public enum EnumHiveToHawqType {
                 + hiveType + " to HAWQ's type");
     }
 
+
+    /**
+     * 
+     * @param dataType Hawq data type
+     * @return compatible Hive type to given Hawq type, if there are more than one compatible types, it returns one with bigger size
+     * @throws UnsupportedTypeException if there is no corresponding Hive type for given Hawq type
+     */
+    public static EnumHiveToHawqType getCompatibleHiveToHawqType(DataType dataType) {
+
+        SortedSet<EnumHiveToHawqType> types = new TreeSet<EnumHiveToHawqType>(
+                new Comparator<EnumHiveToHawqType>() {
+                    public int compare(EnumHiveToHawqType a,
+                            EnumHiveToHawqType b) {
+                        return Byte.compare(a.getSize(), b.getSize());
+                    }
+                });
+
+        for (EnumHiveToHawqType t : values()) {
+            if (t.getHawqType().getDataType().equals(dataType)) {
+                types.add(t);
+            }
+        }
+
+        if (types.size() == 0)
+            throw new UnsupportedTypeException("Unable to find compatible Hive type for given HAWQ's type: " + dataType);
+
+        return types.last();
+    }
+
+    /**
+     * 
+     * @param hiveType full Hive data type, i.e. varchar(10) etc
+     * @return array of type modifiers
+     * @throws UnsupportedTypeException if there is no such Hive type supported
+     */
+    public static Integer[] extractModifiers(String hiveType) {
+        Integer[] result = null;
+        for (EnumHiveToHawqType t : values()) {
+            String hiveTypeName = hiveType;
+            String splitExpression = t.getSplitExpression();
+            if (splitExpression != null) {
+                String[] tokens = hiveType.split(splitExpression);
+                hiveTypeName = tokens[0];
+                result = new Integer[tokens.length - 1];
+                for (int i = 0; i < tokens.length - 1; i++)
+                    result[i] = Integer.parseInt(tokens[i+1]);
+            }
+            if (t.getTypeName().toLowerCase()
+                    .equals(hiveTypeName.toLowerCase())) {
+                return result;
+            }
+        }
+        throw new UnsupportedTypeException("Unable to map Hive's type: "
+                + hiveType + " to HAWQ's type");
+    }
+
+    /**
+     * This field is needed to find compatible Hive type when more than one Hive type mapped to HAWQ type
+     * @return size of this type in bytes or 0
+     */
+    public byte getSize() {
+        return size;
+    }
+
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/b0cc5556/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/utilities/HiveUtilities.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/utilities/HiveUtilities.java b/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/utilities/HiveUtilities.java
index 096c0ff..6abb5ba 100644
--- a/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/utilities/HiveUtilities.java
+++ b/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/utilities/HiveUtilities.java
@@ -36,6 +36,7 @@ import org.apache.hadoop.hive.metastore.api.Table;
 import org.apache.hawq.pxf.api.Metadata;
 import org.apache.hawq.pxf.api.UnsupportedTypeException;
 import org.apache.hawq.pxf.api.utilities.EnumHawqType;
+import org.apache.hawq.pxf.api.io.DataType;
 import org.apache.hawq.pxf.plugins.hive.utilities.EnumHiveToHawqType;
 
 /**
@@ -138,7 +139,7 @@ public class HiveUtilities {
                                     + ", actual number of modifiers: "
                                     + modifiers.length);
                 }
-                if (hawqType.getValidateIntegerModifiers() && !verifyIntegerModifiers(modifiers)) {
+                if (!verifyIntegerModifiers(modifiers)) {
                     throw new UnsupportedTypeException("HAWQ does not support type " + hiveType + " (Field " + fieldName + "), modifiers should be integers");
                 }
             }
@@ -256,4 +257,74 @@ public class HiveUtilities {
             throw new RuntimeException("Failed connecting to Hive MetaStore service: " + cause.getMessage(), cause);
         }
     }
+
+
+    /**
+     * Converts HAWQ type to hive type.
+     * @see EnumHiveToHawqType For supported mappings
+     * @param type HAWQ data type
+     * @param name field name
+     * @return Hive type
+     * @throws UnsupportedTypeException if type is not supported
+     */
+    public static String toCompatibleHiveType(DataType type) {
+
+        EnumHiveToHawqType hiveToHawqType = EnumHiveToHawqType.getCompatibleHiveToHawqType(type);
+        return hiveToHawqType.getTypeName();
+    }
+
+
+
+    /**
+     * Validates whether given HAWQ and Hive data types are compatible.
+     * If data type could have modifiers, HAWQ data type is valid if it hasn't modifiers at all
+     * or HAWQ's modifiers are greater or equal to Hive's modifiers.
+     * <p>
+     * For example:
+     * <p>
+     * Hive type - varchar(20), HAWQ type varchar - valid.
+     * <p>
+     * Hive type - varchar(20), HAWQ type varchar(20) - valid.
+     * <p>
+     * Hive type - varchar(20), HAWQ type varchar(25) - valid.
+     * <p>
+     * Hive type - varchar(20), HAWQ type varchar(15) - invalid.
+     *
+     *
+     * @param hawqDataType HAWQ data type
+     * @param hawqTypeMods HAWQ type modifiers
+     * @param hiveType full Hive type, i.e. decimal(10,2)
+     * @param hawqColumnName Hive column name
+     * @throws UnsupportedTypeException if types are incompatible
+     */
+    public static void validateTypeCompatible(DataType hawqDataType, Integer[] hawqTypeMods, String hiveType, String hawqColumnName) {
+
+        EnumHiveToHawqType hiveToHawqType = EnumHiveToHawqType.getHiveToHawqType(hiveType);
+        EnumHawqType expectedHawqType = hiveToHawqType.getHawqType();
+
+        if (!expectedHawqType.getDataType().equals(hawqDataType)) {
+            throw new UnsupportedTypeException("Invalid definition for column " + hawqColumnName
+                                    +  ": expected HAWQ type " + expectedHawqType.getDataType() +
+                    ", actual HAWQ type " + hawqDataType);
+        }
+
+        switch (hawqDataType) {
+            case NUMERIC:
+            case VARCHAR:
+            case BPCHAR:
+                if (hawqTypeMods != null && hawqTypeMods.length > 0) {
+                    Integer[] hiveTypeModifiers = EnumHiveToHawqType
+                            .extractModifiers(hiveType);
+                    for (int i = 0; i < hiveTypeModifiers.length; i++) {
+                        if (hawqTypeMods[i] < hiveTypeModifiers[i])
+                            throw new UnsupportedTypeException(
+                                    "Invalid definition for column " + hawqColumnName
+                                            + ": modifiers are not compatible, "
+                                            + Arrays.toString(hiveTypeModifiers) + ", "
+                                            + Arrays.toString(hawqTypeMods));
+                    }
+                }
+                break;
+        }
+    }
 }

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/b0cc5556/pxf/pxf-hive/src/test/java/org/apache/hawq/pxf/plugins/hive/utilities/HiveUtilitiesTest.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-hive/src/test/java/org/apache/hawq/pxf/plugins/hive/utilities/HiveUtilitiesTest.java b/pxf/pxf-hive/src/test/java/org/apache/hawq/pxf/plugins/hive/utilities/HiveUtilitiesTest.java
index e9b024a..8c4d6b6 100644
--- a/pxf/pxf-hive/src/test/java/org/apache/hawq/pxf/plugins/hive/utilities/HiveUtilitiesTest.java
+++ b/pxf/pxf-hive/src/test/java/org/apache/hawq/pxf/plugins/hive/utilities/HiveUtilitiesTest.java
@@ -22,11 +22,16 @@ package org.apache.hawq.pxf.plugins.hive.utilities;
 
 import static org.junit.Assert.*;
 
+import java.util.Arrays;
+
+import org.apache.hawq.pxf.api.io.DataType;
+import org.apache.hawq.pxf.api.utilities.EnumHawqType;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.junit.Test;
 import org.apache.hawq.pxf.api.Metadata;
 import org.apache.hawq.pxf.api.UnsupportedTypeException;
 import org.apache.hawq.pxf.plugins.hive.utilities.EnumHiveToHawqType;
+import org.apache.hawq.pxf.plugins.hive.utilities.HiveUtilities;
 
 public class HiveUtilitiesTest {
 
@@ -121,6 +126,120 @@ public class HiveUtilitiesTest {
     }
 
     @Test
+    public void testCompatibleHiveType() {
+
+        String compatibleTypeName = HiveUtilities.toCompatibleHiveType(DataType.BOOLEAN);
+        assertEquals(compatibleTypeName, EnumHiveToHawqType.BooleanType.getTypeName());
+
+        compatibleTypeName = HiveUtilities.toCompatibleHiveType(DataType.BYTEA);
+        assertEquals(compatibleTypeName, EnumHiveToHawqType.BinaryType.getTypeName());
+
+        compatibleTypeName = HiveUtilities.toCompatibleHiveType(DataType.BPCHAR);
+        assertEquals(compatibleTypeName, EnumHiveToHawqType.CharType.getTypeName());
+
+        compatibleTypeName = HiveUtilities.toCompatibleHiveType(DataType.BIGINT);
+        assertEquals(compatibleTypeName, EnumHiveToHawqType.BigintType.getTypeName());
+
+        compatibleTypeName = HiveUtilities.toCompatibleHiveType(DataType.SMALLINT);
+        assertEquals(compatibleTypeName, EnumHiveToHawqType.SmallintType.getTypeName());
+
+        compatibleTypeName = HiveUtilities.toCompatibleHiveType(DataType.INTEGER);
+        assertEquals(compatibleTypeName, EnumHiveToHawqType.IntType.getTypeName());
+
+        compatibleTypeName = HiveUtilities.toCompatibleHiveType(DataType.TEXT);
+        assertEquals(compatibleTypeName, EnumHiveToHawqType.StringType.getTypeName());
+
+        compatibleTypeName = HiveUtilities.toCompatibleHiveType(DataType.REAL);
+        assertEquals(compatibleTypeName, EnumHiveToHawqType.FloatType.getTypeName());
+
+        compatibleTypeName = HiveUtilities.toCompatibleHiveType(DataType.FLOAT8);
+        assertEquals(compatibleTypeName, EnumHiveToHawqType.DoubleType.getTypeName());
+
+        compatibleTypeName = HiveUtilities.toCompatibleHiveType(DataType.VARCHAR);
+        assertEquals(compatibleTypeName, EnumHiveToHawqType.VarcharType.getTypeName());
+
+        compatibleTypeName = HiveUtilities.toCompatibleHiveType(DataType.DATE);
+        assertEquals(compatibleTypeName, EnumHiveToHawqType.DateType.getTypeName());
+
+        compatibleTypeName = HiveUtilities.toCompatibleHiveType(DataType.TIMESTAMP);
+        assertEquals(compatibleTypeName, EnumHiveToHawqType.TimestampType.getTypeName());
+
+        compatibleTypeName = HiveUtilities.toCompatibleHiveType(DataType.NUMERIC);
+        assertEquals(compatibleTypeName, EnumHiveToHawqType.DecimalType.getTypeName());
+
+        try {
+            compatibleTypeName = HiveUtilities.toCompatibleHiveType(DataType.UNSUPPORTED_TYPE);
+            fail("should fail because there is no mapped Hive type");
+        }
+        catch (UnsupportedTypeException e) {
+            String errorMsg = "Unable to find compatible Hive type for given HAWQ's type: " + DataType.UNSUPPORTED_TYPE;
+            assertEquals(errorMsg, e.getMessage());
+        }
+
+
+    }
+
+    @Test
+    public void validateSchema() throws Exception {
+        String columnName = "abc";
+
+        Integer[] hawqModifiers = {};
+        HiveUtilities.validateTypeCompatible(DataType.SMALLINT, hawqModifiers, EnumHiveToHawqType.TinyintType.getTypeName(), columnName);
+
+        HiveUtilities.validateTypeCompatible(DataType.SMALLINT, hawqModifiers, EnumHiveToHawqType.SmallintType.getTypeName(), columnName);
+
+        //Both Hive and HAWQ types have the same modifiers
+        hawqModifiers = new Integer[]{38, 18};
+        HiveUtilities.validateTypeCompatible(DataType.NUMERIC, hawqModifiers, "decimal(38,18)", columnName);
+
+        //HAWQ datatype doesn't require modifiers, they are empty, Hive has non-empty modifiers
+        //Types are compatible in this case
+        hawqModifiers = new Integer[]{};
+        HiveUtilities.validateTypeCompatible(DataType.NUMERIC, hawqModifiers, "decimal(38,18)", columnName);
+        hawqModifiers = null;
+        HiveUtilities.validateTypeCompatible(DataType.NUMERIC, hawqModifiers, "decimal(38,18)", columnName);
+
+        //HAWQ has wider modifiers than Hive, types are compatible
+        hawqModifiers = new Integer[]{11, 3};
+        HiveUtilities.validateTypeCompatible(DataType.NUMERIC, hawqModifiers, "decimal(10,2)", columnName);
+
+
+        //HAWQ has lesser modifiers than Hive, types aren't compatible
+        try {
+            hawqModifiers = new Integer[]{38, 17};
+            HiveUtilities.validateTypeCompatible(DataType.NUMERIC, hawqModifiers, "decimal(38,18)", columnName);
+            fail("should fail with incompatible modifiers message");
+        }
+        catch (UnsupportedTypeException e) {
+            String errorMsg = "Invalid definition for column " + columnName 
+                    +  ": modifiers are not compatible, "
+                    + Arrays.toString(new String[]{"38", "18"}) + ", "
+                    + Arrays.toString(new String[]{"38", "17"});
+            assertEquals(errorMsg, e.getMessage());
+        }
+
+
+        //Different types, which are not mapped to each other
+        try {
+            hawqModifiers = new Integer[]{};
+            HiveUtilities.validateTypeCompatible(DataType.NUMERIC, hawqModifiers, "boolean", columnName);
+            fail("should fail with incompatible types message");
+        }
+        catch (UnsupportedTypeException e) {
+            String errorMsg = "Invalid definition for column " + columnName
+                    + ": expected HAWQ type " + DataType.BOOLEAN
+                    + ", actual HAWQ type " + DataType.NUMERIC;
+            assertEquals(errorMsg, e.getMessage());
+        }
+    }
+
+    @Test
+    public void extractModifiers() throws Exception {
+        Integer[] mods = EnumHiveToHawqType.extractModifiers("decimal(10,2)");
+        assertEquals(mods, new Integer[]{10, 2});
+    }
+
+    @Test
     public void mapHiveTypeWithModifiersNegative() throws Exception {
 
         String badHiveType = "decimal(2)";

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/b0cc5556/pxf/pxf-json/src/main/java/org/apache/hawq/pxf/plugins/json/JsonResolver.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-json/src/main/java/org/apache/hawq/pxf/plugins/json/JsonResolver.java b/pxf/pxf-json/src/main/java/org/apache/hawq/pxf/plugins/json/JsonResolver.java
index 21db6b7..92e01ed 100644
--- a/pxf/pxf-json/src/main/java/org/apache/hawq/pxf/plugins/json/JsonResolver.java
+++ b/pxf/pxf-json/src/main/java/org/apache/hawq/pxf/plugins/json/JsonResolver.java
@@ -196,9 +196,6 @@ public class JsonResolver extends Plugin implements ReadResolver {
 			case BOOLEAN:
 				oneField.val = val.asBoolean();
 				break;
-			case CHAR:
-				oneField.val = val.asText().charAt(0);
-				break;
 			case BYTEA:
 				oneField.val = val.asText().getBytes();
 				break;

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/b0cc5556/pxf/pxf-json/src/test/java/org/apache/hawq/pxf/plugins/json/JsonExtensionTest.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-json/src/test/java/org/apache/hawq/pxf/plugins/json/JsonExtensionTest.java b/pxf/pxf-json/src/test/java/org/apache/hawq/pxf/plugins/json/JsonExtensionTest.java
index a8161c1..33051b0 100644
--- a/pxf/pxf-json/src/test/java/org/apache/hawq/pxf/plugins/json/JsonExtensionTest.java
+++ b/pxf/pxf-json/src/test/java/org/apache/hawq/pxf/plugins/json/JsonExtensionTest.java
@@ -119,11 +119,10 @@ public class JsonExtensionTest extends PxfUnit {
 		columnDefs.add(new Pair<String, DataType>("realType", DataType.REAL));
 		columnDefs.add(new Pair<String, DataType>("float8Type", DataType.FLOAT8));
 		// The DataType.BYTEA type is left out for further validation.
-		columnDefs.add(new Pair<String, DataType>("charType", DataType.CHAR));
 		columnDefs.add(new Pair<String, DataType>("booleanType", DataType.BOOLEAN));
 		columnDefs.add(new Pair<String, DataType>("bintType", DataType.BIGINT));
 
-		output.add(",varcharType,bpcharType,777,999,3.15,3.14,x,true,666");
+		output.add(",varcharType,bpcharType,777,999,3.15,3.14,true,666");
 
 		super.assertOutput(new Path(System.getProperty("user.dir") + File.separator
 				+ "src/test/resources/datatypes-test.json"), output);

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/b0cc5556/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/BridgeOutputBuilder.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/BridgeOutputBuilder.java b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/BridgeOutputBuilder.java
index c59fbea..d04a2f4 100644
--- a/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/BridgeOutputBuilder.java
+++ b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/BridgeOutputBuilder.java
@@ -373,7 +373,6 @@ public class BridgeOutputBuilder {
                     break;
                 case VARCHAR:
                 case BPCHAR:
-                case CHAR:
                 case TEXT:
                 case NUMERIC:
                 case TIMESTAMP:

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/b0cc5556/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/io/GPDBWritable.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/io/GPDBWritable.java b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/io/GPDBWritable.java
index 5bc26f1..7e374e2 100644
--- a/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/io/GPDBWritable.java
+++ b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/io/GPDBWritable.java
@@ -832,40 +832,10 @@ public class GPDBWritable implements Writable {
      * @return type name
      */
     public static String getTypeName(int oid) {
-        switch (DataType.get(oid)) {
-            case BOOLEAN:
-                return "BOOLEAN";
-            case BYTEA:
-                return "BYTEA";
-            case CHAR:
-                return "CHAR";
-            case BIGINT:
-                return "BIGINT";
-            case SMALLINT:
-                return "SMALLINT";
-            case INTEGER:
-                return "INTEGER";
-            case TEXT:
-                return "TEXT";
-            case REAL:
-                return "REAL";
-            case FLOAT8:
-                return "FLOAT8";
-            case BPCHAR:
-                return "BPCHAR";
-            case VARCHAR:
-                return "VARCHAR";
-            case DATE:
-                return "DATE";
-            case TIME:
-                return "TIME";
-            case TIMESTAMP:
-                return "TIMESTAMP";
-            case NUMERIC:
-                return "NUMERIC";
-            default:
-                return "TEXT";
-        }
+        DataType type = DataType.get(oid);
+        if (type == UNSUPPORTED_TYPE)
+            return DataType.TEXT.name();
+        return type.name();
     }
 
     /*

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/b0cc5556/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/utilities/ProtocolData.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/utilities/ProtocolData.java b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/utilities/ProtocolData.java
index 0337937..1797b88 100644
--- a/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/utilities/ProtocolData.java
+++ b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/utilities/ProtocolData.java
@@ -29,7 +29,6 @@ import org.apache.commons.lang.StringUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.security.UserGroupInformation;
-
 import org.apache.hawq.pxf.api.OutputFormat;
 import org.apache.hawq.pxf.api.utilities.ColumnDescriptor;
 import org.apache.hawq.pxf.api.utilities.InputData;
@@ -390,9 +389,10 @@ public class ProtocolData extends InputData {
             String columnName = getProperty("ATTR-NAME" + i);
             int columnTypeCode = getIntProperty("ATTR-TYPECODE" + i);
             String columnTypeName = getProperty("ATTR-TYPENAME" + i);
+            Integer[] columnTypeMods = parseTypeMods(i);
 
             ColumnDescriptor column = new ColumnDescriptor(columnName,
-                    columnTypeCode, i, columnTypeName);
+                    columnTypeCode, i, columnTypeName, columnTypeMods);
             tupleDescription.add(column);
 
             if (columnName.equalsIgnoreCase(ColumnDescriptor.RECORD_KEY_NAME)) {
@@ -401,6 +401,32 @@ public class ProtocolData extends InputData {
         }
     }
 
+    private Integer[] parseTypeMods(int columnIndex) {
+        String typeModeCountStr = getOptionalProperty("ATTR-TYPEMOD" + columnIndex + "-COUNT");
+        Integer[] result = null;
+        Integer typeModeCount = null;
+        if (typeModeCountStr != null) {
+        try {
+            typeModeCount = Integer.parseInt(typeModeCountStr);
+            if (typeModeCount < 0)
+                throw new IllegalArgumentException("ATTR-TYPEMOD" + columnIndex + "-COUNT cann't be negative");
+            result = new Integer[typeModeCount];
+        } catch (NumberFormatException e) {
+            throw new IllegalArgumentException("ATTR-TYPEMOD" + columnIndex + "-COUNT must be a positive integer");
+        }
+            for (int i = 0; i < typeModeCount; i++) {
+                try {
+                    result[i] = Integer.parseInt(getProperty("ATTR-TYPEMOD" + columnIndex + "-" + i));
+                    if (result[i] < 0)
+                        throw new NumberFormatException();
+                } catch (NumberFormatException e) {
+                    throw new IllegalArgumentException("ATTR-TYPEMOD" + columnIndex + "-" + i + " must be a positive integer");
+                }
+            }
+        }
+        return result;
+    }
+
     /**
      * Sets the index of the allocated data fragment
      *

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/b0cc5556/pxf/pxf-service/src/test/java/org/apache/hawq/pxf/service/BridgeOutputBuilderTest.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-service/src/test/java/org/apache/hawq/pxf/service/BridgeOutputBuilderTest.java b/pxf/pxf-service/src/test/java/org/apache/hawq/pxf/service/BridgeOutputBuilderTest.java
index 7b1d136..a00910d 100644
--- a/pxf/pxf-service/src/test/java/org/apache/hawq/pxf/service/BridgeOutputBuilderTest.java
+++ b/pxf/pxf-service/src/test/java/org/apache/hawq/pxf/service/BridgeOutputBuilderTest.java
@@ -138,7 +138,7 @@ public class BridgeOutputBuilderTest {
     @Test
     public void testFillGPDBWritable() throws Exception {
         Map<String, String> parameters = new HashMap<String, String>();
-        parameters.put("X-GP-ATTRS", "14");
+        parameters.put("X-GP-ATTRS", "13");
 
         addColumn(parameters, 0, DataType.INTEGER, "col0");
         addColumn(parameters, 1, DataType.FLOAT8, "col1");
@@ -149,11 +149,10 @@ public class BridgeOutputBuilderTest {
         addColumn(parameters, 6, DataType.BYTEA, "col6");
         addColumn(parameters, 7, DataType.VARCHAR, "col7");
         addColumn(parameters, 8, DataType.BPCHAR, "col8");
-        addColumn(parameters, 9, DataType.CHAR, "col9");
-        addColumn(parameters, 10, DataType.TEXT, "col10");
-        addColumn(parameters, 11, DataType.NUMERIC, "col11");
-        addColumn(parameters, 12, DataType.TIMESTAMP, "col12");
-        addColumn(parameters, 13, DataType.DATE, "col13");
+        addColumn(parameters, 9, DataType.TEXT, "col9");
+        addColumn(parameters, 10, DataType.NUMERIC, "col10");
+        addColumn(parameters, 11, DataType.TIMESTAMP, "col11");
+        addColumn(parameters, 12, DataType.DATE, "col12");
 
         BridgeOutputBuilder builder = makeBuilder(parameters);
         output = builder.makeGPDBWritableOutput();
@@ -168,7 +167,6 @@ public class BridgeOutputBuilderTest {
                         DataType.BYTEA.getOID(), new byte[] { 0 }),
                 new OneField(DataType.VARCHAR.getOID(), "value"), new OneField(
                         DataType.BPCHAR.getOID(), "value"), new OneField(
-                        DataType.CHAR.getOID(), "value"), new OneField(
                         DataType.TEXT.getOID(), "value"), new OneField(
                         DataType.NUMERIC.getOID(), "0"), new OneField(
                         DataType.TIMESTAMP.getOID(), new Timestamp(0)),
@@ -185,10 +183,9 @@ public class BridgeOutputBuilderTest {
         assertEquals(output.getString(7), "value\0");
         assertEquals(output.getString(8), "value\0");
         assertEquals(output.getString(9), "value\0");
-        assertEquals(output.getString(10), "value\0");
-        assertEquals(output.getString(11), "0\0");
-        assertEquals(Timestamp.valueOf(output.getString(12)), new Timestamp(0));
-        assertEquals(Date.valueOf(output.getString(13).trim()).toString(),
+        assertEquals(output.getString(10), "0\0");
+        assertEquals(Timestamp.valueOf(output.getString(11)), new Timestamp(0));
+        assertEquals(Date.valueOf(output.getString(12).trim()).toString(),
                 new Date(1).toString());
     }
 

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/b0cc5556/pxf/pxf-service/src/test/java/org/apache/hawq/pxf/service/io/GPDBWritableTest.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-service/src/test/java/org/apache/hawq/pxf/service/io/GPDBWritableTest.java b/pxf/pxf-service/src/test/java/org/apache/hawq/pxf/service/io/GPDBWritableTest.java
index dcb89b8..f205b9c 100644
--- a/pxf/pxf-service/src/test/java/org/apache/hawq/pxf/service/io/GPDBWritableTest.java
+++ b/pxf/pxf-service/src/test/java/org/apache/hawq/pxf/service/io/GPDBWritableTest.java
@@ -21,6 +21,8 @@ package org.apache.hawq.pxf.service.io;
 
 
 import org.apache.commons.logging.Log;
+import org.apache.hawq.pxf.api.io.DataType;
+import org.apache.hawq.pxf.service.io.GPDBWritable;
 import org.junit.Before;
 import org.junit.Test;
 import org.junit.runner.RunWith;
@@ -36,6 +38,7 @@ import java.io.EOFException;
 
 import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.assertEquals;
 import static org.mockito.Mockito.mock;
 import static org.mockito.Mockito.when;
 
@@ -123,6 +126,58 @@ public class GPDBWritableTest {
         assertFalse(gpdbWritable.isEmpty()); // len > 0
     }
 
+    @Test
+    public void testGetType() {
+        String typeName = GPDBWritable.getTypeName(-1);
+        assertEquals(typeName, DataType.TEXT.name());
+
+        typeName = GPDBWritable.getTypeName(-7777);
+        assertEquals(typeName, DataType.TEXT.name());
+
+        typeName = GPDBWritable.getTypeName(DataType.BOOLEAN.getOID());
+        assertEquals(typeName, DataType.BOOLEAN.name());
+
+        typeName = GPDBWritable.getTypeName(DataType.BYTEA.getOID());
+        assertEquals(typeName, DataType.BYTEA.name());
+
+        typeName = GPDBWritable.getTypeName(DataType.BIGINT.getOID());
+        assertEquals(typeName, DataType.BIGINT.name());
+
+        typeName = GPDBWritable.getTypeName(DataType.SMALLINT.getOID());
+        assertEquals(typeName, DataType.SMALLINT.name());
+
+        typeName = GPDBWritable.getTypeName(DataType.INTEGER.getOID());
+        assertEquals(typeName, DataType.INTEGER.name());
+
+        typeName = GPDBWritable.getTypeName(DataType.TEXT.getOID());
+        assertEquals(typeName, DataType.TEXT.name());
+
+        typeName = GPDBWritable.getTypeName(DataType.REAL.getOID());
+        assertEquals(typeName, DataType.REAL.name());
+
+        typeName = GPDBWritable.getTypeName(DataType.FLOAT8.getOID());
+        assertEquals(typeName, DataType.FLOAT8.name());
+
+        typeName = GPDBWritable.getTypeName(DataType.BPCHAR.getOID());
+        assertEquals(typeName, DataType.BPCHAR.name());
+
+        typeName = GPDBWritable.getTypeName(DataType.VARCHAR.getOID());
+        assertEquals(typeName, DataType.VARCHAR.name());
+
+        typeName = GPDBWritable.getTypeName(DataType.DATE.getOID());
+        assertEquals(typeName, DataType.DATE.name());
+
+        typeName = GPDBWritable.getTypeName(DataType.TIME.getOID());
+        assertEquals(typeName, DataType.TIME.name());
+
+        typeName = GPDBWritable.getTypeName(DataType.TIMESTAMP.getOID());
+        assertEquals(typeName, DataType.TIMESTAMP.name());
+
+        typeName = GPDBWritable.getTypeName(DataType.NUMERIC.getOID());
+        assertEquals(typeName, DataType.NUMERIC.name());
+
+    }
+
 
     /*
      * helpers functions

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/b0cc5556/pxf/pxf-service/src/test/java/org/apache/hawq/pxf/service/utilities/ProtocolDataTest.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-service/src/test/java/org/apache/hawq/pxf/service/utilities/ProtocolDataTest.java b/pxf/pxf-service/src/test/java/org/apache/hawq/pxf/service/utilities/ProtocolDataTest.java
index 8a257ff..09efe81 100644
--- a/pxf/pxf-service/src/test/java/org/apache/hawq/pxf/service/utilities/ProtocolDataTest.java
+++ b/pxf/pxf-service/src/test/java/org/apache/hawq/pxf/service/utilities/ProtocolDataTest.java
@@ -332,6 +332,84 @@ public class ProtocolDataTest {
         }
     }
 
+    @Test
+    public void typeMods() {
+
+        parameters.put("X-GP-ATTRS", "2");
+        parameters.put("X-GP-ATTR-NAME0", "vc1");
+        parameters.put("X-GP-ATTR-TYPECODE0", "1043");
+        parameters.put("X-GP-ATTR-TYPENAME0", "varchar");
+        parameters.put("X-GP-ATTR-TYPEMOD0-COUNT", "1");
+        parameters.put("X-GP-ATTR-TYPEMOD0-0", "5");
+
+        parameters.put("X-GP-ATTR-NAME1", "dec1");
+        parameters.put("X-GP-ATTR-TYPECODE1", "1700");
+        parameters.put("X-GP-ATTR-TYPENAME1", "numeric");
+        parameters.put("X-GP-ATTR-TYPEMOD1-COUNT", "2");
+        parameters.put("X-GP-ATTR-TYPEMOD1-0", "10");
+        parameters.put("X-GP-ATTR-TYPEMOD1-1", "2");
+
+        ProtocolData protocolData = new ProtocolData(parameters);
+
+        assertEquals(protocolData.getColumn(0).columnTypeModifiers(), new Integer[]{5});
+        assertEquals(protocolData.getColumn(1).columnTypeModifiers(), new Integer[]{10, 2});
+    }
+
+    @Test
+    public void typeModsNegative() {
+
+        parameters.put("X-GP-ATTRS", "1");
+        parameters.put("X-GP-ATTR-NAME0", "vc1");
+        parameters.put("X-GP-ATTR-TYPECODE0", "1043");
+        parameters.put("X-GP-ATTR-TYPENAME0", "varchar");
+        parameters.put("X-GP-ATTR-TYPEMOD0-COUNT", "X");
+        parameters.put("X-GP-ATTR-TYPEMOD0-0", "Y");
+
+
+        try {
+            ProtocolData protocolData = new ProtocolData(parameters);
+            fail("should throw IllegalArgumentException when bad value received for X-GP-ATTR-TYPEMOD0-COUNT");
+        } catch (IllegalArgumentException iae) {
+            assertEquals(
+                    "ATTR-TYPEMOD0-COUNT must be a positive integer",
+                    iae.getMessage());
+        }
+
+        parameters.put("X-GP-ATTR-TYPEMOD0-COUNT", "-1");
+
+        try {
+            ProtocolData protocolData = new ProtocolData(parameters);
+            fail("should throw IllegalArgumentException when negative value received for X-GP-ATTR-TYPEMOD0-COUNT");
+        } catch (IllegalArgumentException iae) {
+            assertEquals(
+                    "ATTR-TYPEMOD0-COUNT cann't be negative",
+                    iae.getMessage());
+        }
+
+        parameters.put("X-GP-ATTR-TYPEMOD0-COUNT", "1");
+
+        try {
+            ProtocolData protocolData = new ProtocolData(parameters);
+            fail("should throw IllegalArgumentException when bad value received for X-GP-ATTR-TYPEMOD0-0");
+        } catch (IllegalArgumentException iae) {
+            assertEquals(
+                    "ATTR-TYPEMOD0-0 must be a positive integer",
+                    iae.getMessage());
+        }
+
+        parameters.put("X-GP-ATTR-TYPEMOD0-COUNT", "2");
+        parameters.put("X-GP-ATTR-TYPEMOD0-0", "42");
+
+        try {
+            ProtocolData protocolData = new ProtocolData(parameters);
+            fail("should throw IllegalArgumentException number of actual type modifiers is less than X-GP-ATTR-TYPEMODX-COUNT");
+        } catch (IllegalArgumentException iae) {
+            assertEquals(
+                    "Internal server error. Property \"ATTR-TYPEMOD0-1\" has no value in current request",
+                    iae.getMessage());
+        }
+    }
+
     /*
      * setUp function called before each test
      */


Mime
View raw message