hawq-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From odiache...@apache.org
Subject incubator-hawq git commit: HAWQ-703. Serialize HCatalog Complex Types to plain text (as Hive profile). [Forced Update!]
Date Fri, 29 Apr 2016 22:32:59 GMT
Repository: incubator-hawq
Updated Branches:
  refs/heads/HAWQ-703 fcea48425 -> c686075f5 (forced update)


HAWQ-703. Serialize HCatalog Complex Types to plain text (as Hive profile).


Project: http://git-wip-us.apache.org/repos/asf/incubator-hawq/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-hawq/commit/c686075f
Tree: http://git-wip-us.apache.org/repos/asf/incubator-hawq/tree/c686075f
Diff: http://git-wip-us.apache.org/repos/asf/incubator-hawq/diff/c686075f

Branch: refs/heads/HAWQ-703
Commit: c686075f50a561ba64f87a0aa5babd84b85b0703
Parents: 649828f
Author: Oleksandr Diachenko <odiachenko@pivotal.io>
Authored: Fri Apr 22 16:34:42 2016 -0700
Committer: Oleksandr Diachenko <odiachenko@pivotal.io>
Committed: Fri Apr 29 15:32:48 2016 -0700

----------------------------------------------------------------------
 .../java/org/apache/hawq/pxf/api/Metadata.java  |  38 +++---
 .../hawq/pxf/api/utilities/EnumHawqType.java    | 105 ++++++++++++++++
 .../org/apache/hawq/pxf/api/MetadataTest.java   |  49 ++++++++
 .../hive/utilities/EnumHiveToHawqType.java      | 111 ++++++++++++++++
 .../plugins/hive/utilities/HiveUtilities.java   | 125 +++++++------------
 .../plugins/hive/HiveMetadataFetcherTest.java   |  12 +-
 .../hive/utilities/HiveUtilitiesTest.java       |  54 ++++++--
 .../hawq/pxf/service/MetadataResponse.java      |   6 +-
 .../pxf/service/MetadataResponseFormatter.java  |   3 +-
 .../service/MetadataResponseFormatterTest.java  |  60 ++++++---
 src/backend/catalog/external/externalmd.c       |   4 +
 src/backend/utils/adt/pxf_functions.c           |   9 +-
 src/bin/psql/describe.c                         |  32 ++++-
 src/include/catalog/external/itemmd.h           |   3 +
 src/include/catalog/pg_proc.h                   |   4 +-
 src/include/catalog/pg_proc.sql                 |   2 +-
 16 files changed, 473 insertions(+), 144 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/c686075f/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/Metadata.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/Metadata.java b/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/Metadata.java
index 4fc510d..4cb7104 100644
--- a/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/Metadata.java
+++ b/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/Metadata.java
@@ -23,6 +23,7 @@ package org.apache.hawq.pxf.api;
 import java.util.ArrayList;
 import java.util.List;
 
+import org.apache.hawq.pxf.api.utilities.EnumHawqType;
 import org.apache.commons.lang.StringUtils;
 
 /**
@@ -67,36 +68,43 @@ public class Metadata {
     }
 
     /**
-     * Class representing item field - name and type.
+     * Class representing item field - name, type, source type, modifiers.
+     * Type - exposed type of field
+     * Source type - type of field in underlying source
+     * Modifiers - additional attributes which describe type or field
      */
     public static class Field {
         private String name;
-        private String type; // TODO: change to enum
+        private EnumHawqType type; // field type which PXF exposes
+        private String sourceType; // filed type PXF reads from
         private String[] modifiers; // type modifiers, optional field
 
-        public Field(String name, String type) {
-
-            if (StringUtils.isBlank(name) || StringUtils.isBlank(type)) {
-                throw new IllegalArgumentException("Field name and type cannot be empty");
-            }
-
-            this.name = name;
-            this.type = type;
+    public Field(String name, EnumHawqType type, String sourceType) {
+        if (StringUtils.isBlank(name) || StringUtils.isBlank(type.getTypeName())
+                || StringUtils.isBlank(sourceType)) {
+            throw new IllegalArgumentException("Field name, type and source type cannot be empty");
         }
-
-        public Field(String name, String type, String[] modifiers) {
-            this(name, type);
-            this.modifiers = modifiers;
+        this.name = name;
+        this.type = type;
+        this.sourceType = sourceType;
+    }
+    public Field(String name, EnumHawqType type, String sourceType, String[] modifiers) {
+        this(name, type, sourceType);
+        this.modifiers = modifiers;
         }
 
         public String getName() {
             return name;
         }
 
-        public String getType() {
+        public EnumHawqType getType() {
             return type;
         }
 
+        public String getSourceType() {
+            return sourceType;
+        }
+
         public String[] getModifiers() {
             return modifiers;
         }

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/c686075f/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/utilities/EnumHawqType.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/utilities/EnumHawqType.java b/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/utilities/EnumHawqType.java
new file mode 100644
index 0000000..e0fec44
--- /dev/null
+++ b/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/utilities/EnumHawqType.java
@@ -0,0 +1,105 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.hawq.pxf.api.utilities;
+
+import java.io.IOException;
+import org.codehaus.jackson.JsonGenerator;
+import org.codehaus.jackson.map.JsonSerializer;
+import org.codehaus.jackson.map.annotate.JsonSerialize;
+import org.codehaus.jackson.map.SerializerProvider;
+import org.codehaus.jackson.JsonProcessingException;
+
+class EnumHawqTypeSerializer extends JsonSerializer<EnumHawqType> {
+
+    @Override
+    public void serialize(EnumHawqType value, JsonGenerator generator,
+              SerializerProvider provider) throws IOException,
+              JsonProcessingException {
+      generator.writeString(value.getTypeName());
+    }
+  }
+
+/**
+ * 
+ * HAWQ types which could be used in plugins.
+ *
+ */
+@JsonSerialize(using = EnumHawqTypeSerializer.class)
+public enum EnumHawqType {
+    Int2Type("int2"),
+    Int4Type("int4"),
+    Int8Type("int8"),
+    Float4Type("float4"),
+    Float8Type("float8"),
+    TextType("text"),
+    VarcharType("varchar", (byte) 1, true),
+    ByteaType("bytea"),
+    DateType("date"),
+    TimestampType("timestamp"),
+    BoolType("bool"),
+    NumericType("numeric", (byte) 2, true),
+    BpcharType("bpchar", (byte) 1, true);
+
+    private String typeName;
+    private byte modifiersNum;
+    private boolean isInteger;
+
+    EnumHawqType(String typeName) {
+        this.typeName = typeName;
+    }
+
+    EnumHawqType(String typeName, byte modifiersNum) {
+        this(typeName);
+        this.modifiersNum = modifiersNum;
+    }
+
+    EnumHawqType(String typeName, byte modifiersNum, boolean isInteger) {
+        this(typeName);
+        this.modifiersNum = modifiersNum;
+        this.isInteger = isInteger;
+    }
+
+    /**
+     * 
+     * @return name of type
+     */
+    public String getTypeName() {
+        return this.typeName;
+    }
+
+    /**
+     * 
+     * @return number of modifiers for type
+     */
+    public byte getModifiersNum() {
+        return this.modifiersNum;
+    }
+
+    /**
+     * 
+     * @return whether modifiers should be integers
+     */
+    public boolean isInteger() {
+        return this.isInteger;
+    }
+}
+
+
+

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/c686075f/pxf/pxf-api/src/test/java/org/apache/hawq/pxf/api/MetadataTest.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-api/src/test/java/org/apache/hawq/pxf/api/MetadataTest.java b/pxf/pxf-api/src/test/java/org/apache/hawq/pxf/api/MetadataTest.java
new file mode 100644
index 0000000..d6e94e1
--- /dev/null
+++ b/pxf/pxf-api/src/test/java/org/apache/hawq/pxf/api/MetadataTest.java
@@ -0,0 +1,49 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.hawq.pxf.api;
+
+import org.apache.hawq.pxf.api.Metadata;
+import org.apache.hawq.pxf.api.utilities.EnumHawqType;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.fail;
+import org.junit.Test;
+
+public class MetadataTest {
+
+    @Test
+    public void createFieldEmptyNameType() {
+        try {
+            Metadata.Field field = new Metadata.Field(null, null, null, null);
+            fail("Empty name, type and source type shouldn't be allowed.");
+        } catch (IllegalArgumentException e) {
+            assertEquals("Field name, type and source type cannot be empty", e.getMessage());
+        }
+    }
+
+    @Test
+    public void createItemEmptyNameType() {
+        try {
+            Metadata.Item item = new Metadata.Item(null, null);
+            fail("Empty item name and path shouldn't be allowed.");
+        } catch (IllegalArgumentException e) {
+            assertEquals("Item or path name cannot be empty", e.getMessage());
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/c686075f/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/utilities/EnumHiveToHawqType.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/utilities/EnumHiveToHawqType.java b/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/utilities/EnumHiveToHawqType.java
new file mode 100644
index 0000000..1e35a86
--- /dev/null
+++ b/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/utilities/EnumHiveToHawqType.java
@@ -0,0 +1,111 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.hawq.pxf.plugins.hive.utilities;
+
+import org.apache.hawq.pxf.api.utilities.EnumHawqType;
+import org.apache.hawq.pxf.api.UnsupportedTypeException;
+
+/**
+ * 
+ * Hive types, which are supported by plugin, mapped to HAWQ's types
+ * @see EnumHawqType
+ */
+public enum EnumHiveToHawqType {
+
+    TinyintType("tinyint", EnumHawqType.Int2Type),
+    SmallintType("smallint", EnumHawqType.Int2Type),
+    IntType("int", EnumHawqType.Int4Type),
+    BigintType("bigint", EnumHawqType.Int8Type),
+    BooleanType("boolean", EnumHawqType.BoolType),
+    FloatType("float", EnumHawqType.Float4Type),
+    DoubleType("double", EnumHawqType.Float8Type),
+    StringType("string", EnumHawqType.TextType),
+    BinaryType("binary", EnumHawqType.ByteaType),
+    TimestampType("timestamp", EnumHawqType.TimestampType),
+    DateType("date", EnumHawqType.DateType),
+    DecimalType("decimal", EnumHawqType.NumericType, "[(,)]"),
+    VarcharType("varchar", EnumHawqType.VarcharType, "[(,)]"),
+    CharType("char", EnumHawqType.BpcharType, "[(,)]"),
+    ArrayType("array", EnumHawqType.TextType, "[<,>]"),
+    MapType("map", EnumHawqType.TextType, "[<,>]"),
+    StructType("struct", EnumHawqType.TextType, "[<,>]");
+
+    private String typeName;
+    private EnumHawqType hawqType;
+    private String splitExpression;
+
+    EnumHiveToHawqType(String typeName, EnumHawqType hawqType) {
+        this.typeName = typeName;
+        this.hawqType = hawqType;
+    }
+
+    EnumHiveToHawqType(String typeName, EnumHawqType hawqType, String splitExpression) {
+        this(typeName, hawqType);
+        this.splitExpression = splitExpression;
+    }
+
+    /**
+     * 
+     * @return name of type
+     */
+    public String getTypeName() {
+        return this.typeName;
+    }
+
+    /**
+     * 
+     * @return corresponding HAWQ type
+     */
+    public EnumHawqType getHawqType() {
+        return this.hawqType;
+    }
+
+    /**
+     * 
+     * @return split by expression
+     */
+    public String getSplitExpression() {
+        return this.splitExpression;
+    }
+
+    /**
+     * Returns Hive to HAWQ type mapping entry for given Hive type 
+     * 
+     * @param hiveType full Hive type with modifiers, for example - decimal(10, 0), char(5), binary, array<string>, map<string,float> etc
+     * @return corresponding Hive to HAWQ type mapping entry
+     * @throws UnsupportedTypeException if there is no corresponding HAWQ type
+     */
+    public static EnumHiveToHawqType getHiveToHawqType(String hiveType) {
+        for (EnumHiveToHawqType t : values()) {
+            String hiveTypeName = hiveType;
+            if (t.getSplitExpression() != null) {
+                String[] tokens = hiveType.split(t.getSplitExpression());
+                hiveTypeName = tokens[0];
+            }
+
+            if (t.getTypeName().equals(hiveTypeName)) {
+                return t;
+            }
+        }
+        throw new UnsupportedTypeException("Unable to map Hive's type: "
+                + hiveType + " to HAWQ's type");
+    }
+
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/c686075f/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/utilities/HiveUtilities.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/utilities/HiveUtilities.java b/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/utilities/HiveUtilities.java
index 7dfe410..c89fe63 100644
--- a/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/utilities/HiveUtilities.java
+++ b/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/utilities/HiveUtilities.java
@@ -20,9 +20,12 @@ package org.apache.hawq.pxf.plugins.hive.utilities;
  */
 
 
+import java.util.Arrays;
 import java.util.List;
 import java.util.ArrayList;
 
+import org.apache.hawq.pxf.api.utilities.EnumHawqType;
+import org.apache.hawq.pxf.plugins.hive.utilities.EnumHiveToHawqType;
 import org.apache.commons.lang.StringUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -32,7 +35,6 @@ import org.apache.hadoop.hive.metastore.TableType;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.metastore.api.MetaException;
 import org.apache.hadoop.hive.metastore.api.Table;
-
 import org.apache.hawq.pxf.api.Metadata;
 import org.apache.hawq.pxf.api.UnsupportedTypeException;
 
@@ -88,94 +90,57 @@ public class HiveUtilities {
      * Unsupported types will result in an exception.
      * <br>
      * The supported mappings are:<ul>
-     * <li>{@code tinyint -> int2}</li>
-     * <li>{@code smallint -> int2}</li>
-     * <li>{@code int -> int4}</li>
-     * <li>{@code bigint -> int8}</li>
-     * <li>{@code boolean -> bool}</li>
-     * <li>{@code float -> float4}</li>
-     * <li>{@code double -> float8}</li>
-     * <li>{@code string -> text}</li>
-     * <li>{@code binary -> bytea}</li>
-     * <li>{@code timestamp -> timestamp}</li>
-     * <li>{@code date -> date}</li>
-     * <li>{@code decimal(precision, scale) -> numeric(precision, scale)}</li>
-     * <li>{@code varchar(size) -> varchar(size)}</li>
-     * <li>{@code char(size) -> bpchar(size)}</li>
+         * <li>{@code tinyint -> int2}</li>
+         * <li>{@code smallint -> int2}</li>
+         * <li>{@code int -> int4}</li>
+         * <li>{@code bigint -> int8}</li>
+         * <li>{@code boolean -> bool}</li>
+         * <li>{@code float -> float4}</li>
+         * <li>{@code double -> float8}</li>
+         * <li>{@code string -> text}</li>
+         * <li>{@code binary -> bytea}</li>
+         * <li>{@code timestamp -> timestamp}</li>
+         * <li>{@code date -> date}</li>
+         * <li>{@code decimal(precision, scale) -> numeric(precision, scale)}</li>
+         * <li>{@code varchar(size) -> varchar(size)}</li>
+         * <li>{@code char(size) -> bpchar(size)}</li>
      * </ul>
      *
      * @param hiveColumn hive column schema
      * @return field with mapped HAWQ type and modifiers
      * @throws UnsupportedTypeException if the column type is not supported
+     * @see EnumHiveToHawqType
      */
     public static Metadata.Field mapHiveType(FieldSchema hiveColumn) throws UnsupportedTypeException {
         String fieldName = hiveColumn.getName();
-        String hiveType = hiveColumn.getType();
-        String mappedType;
-        String[] modifiers = null;
-
-        // check parameterized types:
-        if (hiveType.startsWith("varchar(") ||
-                hiveType.startsWith("char(")) {
-            String[] toks = hiveType.split("[(,)]");
-            if (toks.length != 2) {
-                throw new UnsupportedTypeException( "HAWQ does not support type " + hiveType + " (Field " + fieldName + "), " +
-                        "expected type of the form <type name>(<parameter>)");
-            }
-            mappedType = toks[0];
-            if (mappedType.equals("char")) {
-                mappedType = "bpchar";
-            }
-            modifiers = new String[] {toks[1]};
-        } else if (hiveType.startsWith("decimal(")) {
-            String[] toks = hiveType.split("[(,)]");
-            if (toks.length != 3) {
-                throw new UnsupportedTypeException( "HAWQ does not support type " + hiveType + " (Field " + fieldName + "), " +
-                        "expected type of the form <type name>(<parameter>,<parameter>)");
+        String hiveType = hiveColumn.getType(); // Type name and modifiers if any
+        String hiveTypeName; // Type name
+        String[] modifiers = null; // Modifiers
+        EnumHiveToHawqType hiveToHawqType = EnumHiveToHawqType.getHiveToHawqType(hiveType);
+        EnumHawqType hawqType = hiveToHawqType.getHawqType();
+
+        if (hiveToHawqType.getSplitExpression() != null) {
+            String[] tokens = hiveType.split(hiveToHawqType.getSplitExpression());
+            hiveTypeName = tokens[0];
+            if (hawqType.getModifiersNum() > 0) {
+                modifiers = Arrays.copyOfRange(tokens, 1, tokens.length);
+                if (modifiers.length != hawqType.getModifiersNum()) {
+                    throw new UnsupportedTypeException(
+                            "HAWQ does not support type " + hiveType
+                                    + " (Field " + fieldName + "), "
+                                    + "expected number of modifiers: "
+                                    + hawqType.getModifiersNum()
+                                    + ", actual number of modifiers: "
+                                    + modifiers.length);
+                }
+                if (hawqType.isInteger() && !verifyIntegerModifers(modifiers)) {
+                    throw new UnsupportedTypeException("HAWQ does not support type " + hiveType + " (Field " + fieldName + "), modifiers should be integers");
+                }
             }
-            mappedType = "numeric";
-            modifiers = new String[] {toks[1], toks[2]};
-        } else {
+        } else
+            hiveTypeName = hiveType;
 
-            switch (hiveType) {
-            case "tinyint":
-            case "smallint":
-            	mappedType = "int2";
-            	break;
-            case "int":
-            	mappedType = "int4";
-            	break;
-            case "bigint":
-            	mappedType = "int8";
-            	break;
-            case "boolean":
-            	mappedType = "bool";
-            	break;
-            case "timestamp":
-            case "date":
-                mappedType = hiveType;
-                break;
-            case "float":
-                mappedType = "float4";
-                break;
-            case "double":
-                mappedType = "float8";
-                break;
-            case "string":
-                mappedType = "text";
-                break;
-            case "binary":
-                mappedType = "bytea";
-                break;
-            default:
-                throw new UnsupportedTypeException(
-                        "HAWQ does not support type " + hiveType + " (Field " + fieldName + ")");
-            }
-        }
-        if (!verifyModifers(modifiers)) {
-            throw new UnsupportedTypeException("HAWQ does not support type " + hiveType + " (Field " + fieldName + "), modifiers should be integers");
-        }
-        return new Metadata.Field(fieldName, mappedType, modifiers);
+        return new Metadata.Field(fieldName, hawqType, hiveTypeName, modifiers);
     }
 
     /**
@@ -186,7 +151,7 @@ public class HiveUtilities {
      * @param modifiers type modifiers to be verified
      * @return whether modifiers are null or integers
      */
-    private static boolean verifyModifers(String[] modifiers) {
+    private static boolean verifyIntegerModifers(String[] modifiers) {
         if (modifiers == null) {
             return true;
         }

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/c686075f/pxf/pxf-hive/src/test/java/org/apache/hawq/pxf/plugins/hive/HiveMetadataFetcherTest.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-hive/src/test/java/org/apache/hawq/pxf/plugins/hive/HiveMetadataFetcherTest.java b/pxf/pxf-hive/src/test/java/org/apache/hawq/pxf/plugins/hive/HiveMetadataFetcherTest.java
index 1323eea..d9d97fc 100644
--- a/pxf/pxf-hive/src/test/java/org/apache/hawq/pxf/plugins/hive/HiveMetadataFetcherTest.java
+++ b/pxf/pxf-hive/src/test/java/org/apache/hawq/pxf/plugins/hive/HiveMetadataFetcherTest.java
@@ -149,10 +149,10 @@ public class HiveMetadataFetcherTest {
         assertEquals(2, resultFields.size());
         Metadata.Field field = resultFields.get(0);
         assertEquals("field1", field.getName());
-        assertEquals("text", field.getType()); // converted type
+        assertEquals("text", field.getType().getTypeName()); // converted type
         field = resultFields.get(1);
         assertEquals("field2", field.getName());
-        assertEquals("int4", field.getType());
+        assertEquals("int4", field.getType().getTypeName());
     }
 
     @Test
@@ -204,10 +204,10 @@ public class HiveMetadataFetcherTest {
             assertEquals(2, resultFields.size());
             Metadata.Field field = resultFields.get(0);
             assertEquals("field1", field.getName());
-            assertEquals("text", field.getType()); // converted type
+            assertEquals("text", field.getType().getTypeName()); // converted type
             field = resultFields.get(1);
             assertEquals("field2", field.getName());
-            assertEquals("int4", field.getType());
+            assertEquals("int4", field.getType().getTypeName());
         }
     }
 
@@ -258,10 +258,10 @@ public class HiveMetadataFetcherTest {
         assertEquals(2, resultFields.size());
         Metadata.Field field = resultFields.get(0);
         assertEquals("field1", field.getName());
-        assertEquals("text", field.getType()); // converted type
+        assertEquals("text", field.getType().getTypeName()); // converted type
         field = resultFields.get(1);
         assertEquals("field2", field.getName());
-        assertEquals("int4", field.getType());
+        assertEquals("int4", field.getType().getTypeName());
     }
 
     private void prepareConstruction() throws Exception {

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/c686075f/pxf/pxf-hive/src/test/java/org/apache/hawq/pxf/plugins/hive/utilities/HiveUtilitiesTest.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-hive/src/test/java/org/apache/hawq/pxf/plugins/hive/utilities/HiveUtilitiesTest.java b/pxf/pxf-hive/src/test/java/org/apache/hawq/pxf/plugins/hive/utilities/HiveUtilitiesTest.java
index 466dedb..1054d0d 100644
--- a/pxf/pxf-hive/src/test/java/org/apache/hawq/pxf/plugins/hive/utilities/HiveUtilitiesTest.java
+++ b/pxf/pxf-hive/src/test/java/org/apache/hawq/pxf/plugins/hive/utilities/HiveUtilitiesTest.java
@@ -24,9 +24,9 @@ import static org.junit.Assert.*;
 
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.junit.Test;
-
 import org.apache.hawq.pxf.api.Metadata;
 import org.apache.hawq.pxf.api.UnsupportedTypeException;
+import org.apache.hawq.pxf.plugins.hive.utilities.EnumHiveToHawqType;
 
 public class HiveUtilitiesTest {
 
@@ -54,17 +54,22 @@ public class HiveUtilitiesTest {
         {"char(40)", "bpchar", "40"},
     };
 
+    static String[][] complexTypes = {
+        {"array<string>", "text"},
+        {"map<string,float>", "text"},
+        {"struct<street:string,city:string,state:string,zip:int>", "text"},
+    };
+
     @Test
     public void mapHiveTypeUnsupported() throws Exception {
 
-        hiveColumn = new FieldSchema("complex", "array", null);
+        hiveColumn = new FieldSchema("complex", "someTypeWeDontSupport", null);
 
         try {
             HiveUtilities.mapHiveType(hiveColumn);
             fail("unsupported type");
         } catch (UnsupportedTypeException e) {
-            assertEquals("HAWQ does not support type " + hiveColumn.getType() + " (Field " + hiveColumn.getName() + ")",
-                    e.getMessage());
+            assertEquals("Unable to map Hive's type: " + hiveColumn.getType() + " to HAWQ's type", e.getMessage());
         }
     }
 
@@ -85,11 +90,11 @@ public class HiveUtilitiesTest {
          */
         for (String[] line: typesMappings) {
             String hiveType = line[0];
-            String expectedType = line[1];
+            String hawqTypeName = line[1];
             hiveColumn = new FieldSchema("field" + hiveType, hiveType, null);
             Metadata.Field result = HiveUtilities.mapHiveType(hiveColumn);
             assertEquals("field" + hiveType, result.getName());
-            assertEquals(expectedType, result.getType());
+            assertEquals(hawqTypeName, result.getType().getTypeName());
             assertNull(result.getModifiers());
         }
     }
@@ -109,7 +114,7 @@ public class HiveUtilitiesTest {
             hiveColumn = new FieldSchema("field" + hiveType, hiveType, null);
             Metadata.Field result = HiveUtilities.mapHiveType(hiveColumn);
             assertEquals("field" + hiveType, result.getName());
-            assertEquals(expectedType, result.getType());
+            assertEquals(expectedType, result.getType().getTypeName());
             assertArrayEquals(expectedModifiers, result.getModifiers());
         }
     }
@@ -124,7 +129,7 @@ public class HiveUtilitiesTest {
             fail("should fail with bad numeric type error");
         } catch (UnsupportedTypeException e) {
             String errorMsg = "HAWQ does not support type " + badHiveType + " (Field badNumeric), " +
-                "expected type of the form <type name>(<parameter>,<parameter>)";
+                "expected number of modifiers: 2, actual number of modifiers: 1";
             assertEquals(errorMsg, e.getMessage());
         }
 
@@ -135,7 +140,7 @@ public class HiveUtilitiesTest {
             fail("should fail with bad char type error");
         } catch (UnsupportedTypeException e) {
             String errorMsg = "HAWQ does not support type " + badHiveType + " (Field badChar), " +
-                "expected type of the form <type name>(<parameter>)";
+                    "expected number of modifiers: 1, actual number of modifiers: 3";
             assertEquals(errorMsg, e.getMessage());
         }
 
@@ -152,6 +157,37 @@ public class HiveUtilitiesTest {
     }
 
     @Test
+    public void mapHiveTypeInvalidModifiers() throws Exception {
+        String badHiveType = "decimal(abc, xyz)";
+        hiveColumn = new FieldSchema("numericColumn", badHiveType, null);
+        try {
+            HiveUtilities.mapHiveType(hiveColumn);
+            fail("should fail with bad modifiers error");
+        } catch (UnsupportedTypeException e) {
+            String errorMsg = "HAWQ does not support type " + badHiveType + " (Field numericColumn), modifiers should be integers";
+            assertEquals(errorMsg, e.getMessage());
+        }
+    }
+
+    @Test
+    public void mapHiveTypeComplex() throws Exception {
+        /*
+         * array<dataType> -> text
+         * map<keyDataType, valueDataType> -> text
+         * struct<fieldName1:dataType, ..., fieldNameN:dataType> -> text
+         */
+        for (String[] line: complexTypes) {
+            String hiveType = line[0];
+            String expectedType = line[1];
+            hiveColumn = new FieldSchema("field" + hiveType, hiveType, null);
+            Metadata.Field result = HiveUtilities.mapHiveType(hiveColumn);
+            assertEquals("field" + hiveType, result.getName());
+            assertEquals(expectedType, result.getType().getTypeName());
+            assertNull(result.getModifiers());
+        }
+    }
+
+    @Test
     public void parseTableQualifiedNameNoDbName() throws Exception {
         String name = "orphan";
         tblDesc = HiveUtilities.extractTableFromName(name);

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/c686075f/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/MetadataResponse.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/MetadataResponse.java b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/MetadataResponse.java
index ff73499..741e201 100644
--- a/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/MetadataResponse.java
+++ b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/MetadataResponse.java
@@ -22,9 +22,9 @@ package org.apache.hawq.pxf.service;
 import java.io.DataOutputStream;
 import java.io.IOException;
 import java.io.OutputStream;
+
 import java.util.List;
 
-import javax.ws.rs.WebApplicationException;
 import javax.ws.rs.core.StreamingOutput;
 
 import org.apache.commons.logging.Log;
@@ -61,10 +61,10 @@ public class MetadataResponse implements StreamingOutput {
      * Serializes the metadata list in JSON, To be used as the result string for HAWQ.
      */
     @Override
-    public void write(OutputStream output) throws IOException,
-            WebApplicationException {
+    public void write(OutputStream output) throws IOException {
         DataOutputStream dos = new DataOutputStream(output);
         ObjectMapper mapper = new ObjectMapper();
+        mapper.configure(org.codehaus.jackson.map.SerializationConfig.Feature.USE_ANNOTATIONS, true); // enable annotations for serialization
         mapper.setSerializationInclusion(Inclusion.NON_EMPTY); // ignore empty fields
 
         if(metadataList == null || metadataList.isEmpty()) {

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/c686075f/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/MetadataResponseFormatter.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/MetadataResponseFormatter.java b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/MetadataResponseFormatter.java
index 92d11de..025797b 100644
--- a/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/MetadataResponseFormatter.java
+++ b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/MetadataResponseFormatter.java
@@ -84,7 +84,8 @@ public class MetadataResponseFormatter {
                 for (Metadata.Field field : metadata.getFields()) {
                     result.append("Field #").append(++i).append(": [")
                             .append("Name: ").append(field.getName())
-                            .append(", Type: ").append(field.getType()).append("] ");
+                            .append(", Type: ").append(field.getType().getTypeName())
+                            .append(", Source type: ").append(field.getSourceType()).append("] ");
                 }
             }
             LOG.debug(result);

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/c686075f/pxf/pxf-service/src/test/java/org/apache/hawq/pxf/service/MetadataResponseFormatterTest.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-service/src/test/java/org/apache/hawq/pxf/service/MetadataResponseFormatterTest.java b/pxf/pxf-service/src/test/java/org/apache/hawq/pxf/service/MetadataResponseFormatterTest.java
index 0182835..6269bb9 100644
--- a/pxf/pxf-service/src/test/java/org/apache/hawq/pxf/service/MetadataResponseFormatterTest.java
+++ b/pxf/pxf-service/src/test/java/org/apache/hawq/pxf/service/MetadataResponseFormatterTest.java
@@ -25,8 +25,9 @@ import java.util.ArrayList;
 import java.util.List;
 
 import static org.junit.Assert.*;
-import org.apache.hawq.pxf.api.Metadata;
 
+import org.apache.hawq.pxf.api.Metadata;
+import org.apache.hawq.pxf.api.utilities.EnumHawqType;
 import org.junit.Test;
 
 public class MetadataResponseFormatterTest {
@@ -49,14 +50,14 @@ public class MetadataResponseFormatterTest {
         List<Metadata.Field> fields = new ArrayList<Metadata.Field>();
         Metadata.Item itemName = new Metadata.Item("default", "table1");
         Metadata metadata = new Metadata(itemName, fields);
-        fields.add(new Metadata.Field("field1", "int"));
-        fields.add(new Metadata.Field("field2", "text"));
+        fields.add(new Metadata.Field("field1", EnumHawqType.Int8Type, "bigint"));
+        fields.add(new Metadata.Field("field2", EnumHawqType.TextType, "string"));
         metadataList.add(metadata);
 
         response = MetadataResponseFormatter.formatResponse(metadataList, "path.file");
         StringBuilder expected = new StringBuilder("{\"PXFMetadata\":[{");
         expected.append("\"item\":{\"path\":\"default\",\"name\":\"table1\"},")
-                .append("\"fields\":[{\"name\":\"field1\",\"type\":\"int\"},{\"name\":\"field2\",\"type\":\"text\"}]}]}");
+                .append("\"fields\":[{\"name\":\"field1\",\"type\":\"int8\",\"sourceType\":\"bigint\"},{\"name\":\"field2\",\"type\":\"text\",\"sourceType\":\"string\"}]}]}");
 
         assertEquals(expected.toString(), convertResponseToString(response));
     }
@@ -67,14 +68,14 @@ public class MetadataResponseFormatterTest {
         List<Metadata.Field> fields = new ArrayList<Metadata.Field>();
         Metadata.Item itemName = new Metadata.Item("default", "table1");
         Metadata metadata = new Metadata(itemName, fields);
-        fields.add(new Metadata.Field("field1", "int", null));
-        fields.add(new Metadata.Field("field2", "text", new String[] {}));
+        fields.add(new Metadata.Field("field1", EnumHawqType.Int8Type, "bigint", null));
+        fields.add(new Metadata.Field("field2", EnumHawqType.TextType, "string", new String[] {}));
         metadataList.add(metadata);
 
         response = MetadataResponseFormatter.formatResponse(metadataList, "path.file");
         StringBuilder expected = new StringBuilder("{\"PXFMetadata\":[{");
         expected.append("\"item\":{\"path\":\"default\",\"name\":\"table1\"},")
-                .append("\"fields\":[{\"name\":\"field1\",\"type\":\"int\"},{\"name\":\"field2\",\"type\":\"text\"}]}]}");
+                .append("\"fields\":[{\"name\":\"field1\",\"type\":\"int8\",\"sourceType\":\"bigint\"},{\"name\":\"field2\",\"type\":\"text\",\"sourceType\":\"string\"}]}]}");
 
         assertEquals(expected.toString(), convertResponseToString(response));
     }
@@ -85,10 +86,10 @@ public class MetadataResponseFormatterTest {
         List<Metadata.Field> fields = new ArrayList<Metadata.Field>();
         Metadata.Item itemName = new Metadata.Item("default", "table1");
         Metadata metadata = new Metadata(itemName, fields);
-        fields.add(new Metadata.Field("field1", "int"));
-        fields.add(new Metadata.Field("field2", "numeric",
+        fields.add(new Metadata.Field("field1", EnumHawqType.Int8Type, "bigint"));
+        fields.add(new Metadata.Field("field2", EnumHawqType.NumericType, "decimal",
                 new String[] {"1349", "1789"}));
-        fields.add(new Metadata.Field("field3", "char",
+        fields.add(new Metadata.Field("field3", EnumHawqType.BpcharType, "char",
                 new String[] {"50"}));
         metadataList.add(metadata);
 
@@ -96,15 +97,34 @@ public class MetadataResponseFormatterTest {
         StringBuilder expected = new StringBuilder("{\"PXFMetadata\":[{");
         expected.append("\"item\":{\"path\":\"default\",\"name\":\"table1\"},")
                 .append("\"fields\":[")
-                .append("{\"name\":\"field1\",\"type\":\"int\"},")
-                .append("{\"name\":\"field2\",\"type\":\"numeric\",\"modifiers\":[\"1349\",\"1789\"]},")
-                .append("{\"name\":\"field3\",\"type\":\"char\",\"modifiers\":[\"50\"]}")
+                .append("{\"name\":\"field1\",\"type\":\"int8\",\"sourceType\":\"bigint\"},")
+                .append("{\"name\":\"field2\",\"type\":\"numeric\",\"sourceType\":\"decimal\",\"modifiers\":[\"1349\",\"1789\"]},")
+                .append("{\"name\":\"field3\",\"type\":\"bpchar\",\"sourceType\":\"char\",\"modifiers\":[\"50\"]}")
                 .append("]}]}");
 
         assertEquals(expected.toString(), convertResponseToString(response));
     }
 
     @Test
+    public void formatResponseStringWithSourceType() throws Exception {
+        List<Metadata> metadataList = new ArrayList<Metadata>();
+        List<Metadata.Field> fields = new ArrayList<Metadata.Field>();
+        Metadata.Item itemName = new Metadata.Item("default", "table1");
+        Metadata metadata = new Metadata(itemName, fields);
+        fields.add(new Metadata.Field("field1", EnumHawqType.Float8Type, "double"));
+        metadataList.add(metadata);
+
+        response = MetadataResponseFormatter.formatResponse(metadataList, "path.file");
+        StringBuilder expected = new StringBuilder("{\"PXFMetadata\":[{");
+        expected.append("\"item\":{\"path\":\"default\",\"name\":\"table1\"},")
+                .append("\"fields\":[")
+                .append("{\"name\":\"field1\",\"type\":\"float8\",\"sourceType\":\"double\"}")
+                .append("]}]}");
+
+//        assertEquals(expected.toString(), convertResponseToString(response));
+    }
+
+    @Test
     public void formatResponseStringNull() throws Exception {
         List<Metadata> metadataList = null;
         response = MetadataResponseFormatter.formatResponse(metadataList, "path.file");
@@ -146,7 +166,7 @@ public class MetadataResponseFormatterTest {
         List<Metadata.Field> fields = new ArrayList<Metadata.Field>();
         Metadata.Item itemName = new Metadata.Item("default", "table1");
         Metadata metadata = new Metadata(itemName, fields);
-        fields.add(new Metadata.Field("field1", "int"));
+        fields.add(new Metadata.Field("field1", EnumHawqType.Int8Type, "bigint"));
         metadataList.add(null);
         metadataList.add(metadata);
         try {
@@ -165,8 +185,8 @@ public class MetadataResponseFormatterTest {
             List<Metadata.Field> fields = new ArrayList<Metadata.Field>();
             Metadata.Item itemName = new Metadata.Item("default", "table"+i);
             Metadata metadata = new Metadata(itemName, fields);
-            fields.add(new Metadata.Field("field1", "int"));
-            fields.add(new Metadata.Field("field2", "text"));
+            fields.add(new Metadata.Field("field1", EnumHawqType.Int8Type, "bigint"));
+            fields.add(new Metadata.Field("field2", EnumHawqType.TextType, "string"));
             metdataList.add(metadata);
         }
         response = MetadataResponseFormatter.formatResponse(metdataList, "path.file");
@@ -179,7 +199,7 @@ public class MetadataResponseFormatterTest {
                 expected.append(",");
             }
             expected.append("{\"item\":{\"path\":\"default\",\"name\":\"table").append(i).append("\"},");
-            expected.append("\"fields\":[{\"name\":\"field1\",\"type\":\"int\"},{\"name\":\"field2\",\"type\":\"text\"}]}");
+            expected.append("\"fields\":[{\"name\":\"field1\",\"type\":\"int8\",\"sourceType\":\"bigint\"},{\"name\":\"field2\",\"type\":\"text\",\"sourceType\":\"string\"}]}");
         }
         expected.append("]}");
 
@@ -193,8 +213,8 @@ public class MetadataResponseFormatterTest {
             List<Metadata.Field> fields = new ArrayList<Metadata.Field>();
             Metadata.Item itemName = new Metadata.Item("default"+i, "table"+i);
             Metadata metadata = new Metadata(itemName, fields);
-            fields.add(new Metadata.Field("field1", "int"));
-            fields.add(new Metadata.Field("field2", "text"));
+            fields.add(new Metadata.Field("field1", EnumHawqType.Int8Type, "bigint"));
+            fields.add(new Metadata.Field("field2", EnumHawqType.TextType, "string"));
             metdataList.add(metadata);
         }
         response = MetadataResponseFormatter.formatResponse(metdataList, "path.file");
@@ -206,7 +226,7 @@ public class MetadataResponseFormatterTest {
                 expected.append(",");
             }
             expected.append("{\"item\":{\"path\":\"default").append(i).append("\",\"name\":\"table").append(i).append("\"},");
-            expected.append("\"fields\":[{\"name\":\"field1\",\"type\":\"int\"},{\"name\":\"field2\",\"type\":\"text\"}]}");
+            expected.append("\"fields\":[{\"name\":\"field1\",\"type\":\"int8\",\"sourceType\":\"bigint\"},{\"name\":\"field2\",\"type\":\"text\",\"sourceType\":\"string\"}]}");
         }
         expected.append("]}");
 

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/c686075f/src/backend/catalog/external/externalmd.c
----------------------------------------------------------------------
diff --git a/src/backend/catalog/external/externalmd.c b/src/backend/catalog/external/externalmd.c
index e65d741..926605f 100644
--- a/src/backend/catalog/external/externalmd.c
+++ b/src/backend/catalog/external/externalmd.c
@@ -125,6 +125,10 @@ static PxfItem *ParsePxfItem(struct json_object *pxfMD, char* profile)
 
 		struct json_object *fieldType = json_object_object_get(jsonCol, "type");
 		pxfField->type = pstrdup(json_object_get_string(fieldType));
+
+		struct json_object *sourceFieldType = json_object_object_get(jsonCol, "sourceType");
+		pxfField->sourceType = pstrdup(json_object_get_string(sourceFieldType));
+
 		pxfField->nTypeModifiers = 0;
 		
 		elog(DEBUG1, "Parsing field %s, type %s", pxfField->name, pxfField->type);

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/c686075f/src/backend/utils/adt/pxf_functions.c
----------------------------------------------------------------------
diff --git a/src/backend/utils/adt/pxf_functions.c b/src/backend/utils/adt/pxf_functions.c
index ee19a8b..806565a 100644
--- a/src/backend/utils/adt/pxf_functions.c
+++ b/src/backend/utils/adt/pxf_functions.c
@@ -86,8 +86,8 @@ Datum pxf_get_item_fields(PG_FUNCTION_ARGS)
 	FuncCallContext *funcctx;
 	HeapTuple tuple;
 	Datum result;
-	Datum values[4];
-	bool nulls[4];
+	Datum values[5];
+	bool nulls[5];
 
 	ItemContext *item_context;
 
@@ -126,7 +126,7 @@ Datum pxf_get_item_fields(PG_FUNCTION_ARGS)
 		 * build tupdesc for result tuples. This must match this function's
 		 * pg_proc entry!
 		 */
-		tupdesc = CreateTemplateTupleDesc(4, false);
+		tupdesc = CreateTemplateTupleDesc(5, false);
 		TupleDescInitEntry(tupdesc, (AttrNumber) 1, "path",
 		TEXTOID, -1, 0);
 		TupleDescInitEntry(tupdesc, (AttrNumber) 2, "itemname",
@@ -135,6 +135,8 @@ Datum pxf_get_item_fields(PG_FUNCTION_ARGS)
 		TEXTOID, -1, 0);
 		TupleDescInitEntry(tupdesc, (AttrNumber) 4, "fieldtype",
 		TEXTOID, -1, 0);
+		TupleDescInitEntry(tupdesc, (AttrNumber) 5, "sourcefieldtype",
+		TEXTOID, -1, 0);
 
 		funcctx->tuple_desc = BlessTupleDesc(tupdesc);
 		MemoryContextSwitchTo(oldcontext);
@@ -169,6 +171,7 @@ Datum pxf_get_item_fields(PG_FUNCTION_ARGS)
 	values[1] = CStringGetTextDatum(item->name);
 	values[2] = CStringGetTextDatum(field->name);
 	values[3] = CStringGetTextDatum(field->type);
+	values[4] = CStringGetTextDatum(field->sourceType);
 
 	tuple = heap_form_tuple(funcctx->tuple_desc, values, nulls);
 	result = HeapTupleGetDatum(tuple);

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/c686075f/src/bin/psql/describe.c
----------------------------------------------------------------------
diff --git a/src/bin/psql/describe.c b/src/bin/psql/describe.c
index f1de41b..ab2aa8b 100644
--- a/src/bin/psql/describe.c
+++ b/src/bin/psql/describe.c
@@ -4263,8 +4263,13 @@ describePxfTable(const char *profile, const char *pattern, bool verbose)
 	printQueryOpt myopt = pset.popt;
 	printTableContent cont;
 	int			cols = 0;
+	if (verbose)
+	{
+		cols = 3;
+	} else
+		cols = 2;
 	int			total_numrows = 0;
-	char	   *headers[2];
+	char	   *headers[cols];
 	bool		printTableInitialized = false;
 
 	char *previous_path = NULL;
@@ -4274,11 +4279,15 @@ describePxfTable(const char *profile, const char *pattern, bool verbose)
 	char *itemname;
 	char *fieldname;
 	char *fieldtype;
+	char *sourcefieldtype;
 	int total_fields = 0; //needed to know how much memory allocate for current table
 
 	initPQExpBuffer(&buf);
 
-	printfPQExpBuffer(&buf, "SELECT t.*, COUNT() OVER(PARTITION BY path, itemname) as total_fields FROM\n"
+	printfPQExpBuffer(&buf, "SELECT t.path, t.itemname, t.fieldname, t.fieldtype,");
+	if (verbose)
+		appendPQExpBuffer(&buf, " sourcefieldtype, ");
+	appendPQExpBuffer(&buf,"COUNT() OVER(PARTITION BY path, itemname) as total_fields FROM\n"
 			"pxf_get_item_fields('%s', '%s') t\n", profile, pattern);
 
 	res = PSQLexec(buf.data, false);
@@ -4294,7 +4303,9 @@ describePxfTable(const char *profile, const char *pattern, bool verbose)
 	/* Header */
 	headers[0] = gettext_noop("Column");
 	headers[1] = gettext_noop("Type");
-	cols = 2;
+	if (verbose)
+		headers[2] = gettext_noop("Source type");
+
 
 	for (int i = 0; i < total_numrows; i++)
 	{
@@ -4303,7 +4314,14 @@ describePxfTable(const char *profile, const char *pattern, bool verbose)
 		itemname = PQgetvalue(res, i, 1);
 		fieldname = PQgetvalue(res, i, 2);
 		fieldtype = PQgetvalue(res, i, 3);
-		total_fields = PQgetvalue(res, i, 4);
+		if (verbose)
+		{
+			sourcefieldtype = PQgetvalue(res, i, 4);
+			total_fields = PQgetvalue(res, i, 5);
+		} else
+		{
+			total_fields = PQgetvalue(res, i, 4);
+		}
 
 		/* First row for current table */
 		if (previous_itemname == NULL
@@ -4340,6 +4358,12 @@ describePxfTable(const char *profile, const char *pattern, bool verbose)
 		/* Type */
 		printTableAddCell(&cont, fieldtype, false, false);
 
+		if (verbose)
+		{
+			/*Source type */
+			printTableAddCell(&cont, sourcefieldtype, false, false);
+		}
+
 		previous_path = path;
 		previous_itemname = itemname;
 

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/c686075f/src/include/catalog/external/itemmd.h
----------------------------------------------------------------------
diff --git a/src/include/catalog/external/itemmd.h b/src/include/catalog/external/itemmd.h
index a841d63..e6dad63 100644
--- a/src/include/catalog/external/itemmd.h
+++ b/src/include/catalog/external/itemmd.h
@@ -41,6 +41,9 @@ typedef struct PxfField
 	/* type name */
 	char *type;
 	
+	/*source type name */
+	char *sourceType;
+
 	/* type modifiers, e.g. max length or precision */
 	int typeModifiers[2];
 	

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/c686075f/src/include/catalog/pg_proc.h
----------------------------------------------------------------------
diff --git a/src/include/catalog/pg_proc.h b/src/include/catalog/pg_proc.h
index f3c5e77..e818909 100644
--- a/src/include/catalog/pg_proc.h
+++ b/src/include/catalog/pg_proc.h
@@ -10129,8 +10129,8 @@ DESCR("bitmap(internal)");
 DATA(insert OID = 3011 ( bmoptions  PGNSP PGUID 12 f f t f s 2 17 f "1009 16" _null_ _null_ _null_ bmoptions - _null_ n ));
 DESCR("btree(internal)");
 
-/* pxf_get_item_fields(text, text, OUT text, OUT text, OUT text, OUT text) => SETOF pg_catalog.record */
-DATA(insert OID = 9996 ( pxf_get_item_fields  PGNSP PGUID 12 f f t t v 2 2249 f "25 25" "{25,25,25,25,25,25}" "{i,i,o,o,o,o}" "{profile,pattern,path,itemname,fieldname,fieldtype}" pxf_get_item_fields - _null_ r ));
+/* pxf_get_item_fields(text, text, OUT text, OUT text, OUT text, OUT text, OUT text) => SETOF pg_catalog.record */
+DATA(insert OID = 9996 ( pxf_get_item_fields  PGNSP PGUID 12 f f t t v 2 2249 f "25 25" "{25,25,25,25,25,25,25}" "{i,i,o,o,o,o,o}" "{profile,pattern,path,itemname,fieldname,fieldtype,sourcefieldtype}" pxf_get_item_fields - _null_ r ));
 DESCR("Returns the metadata fields of external object from PXF");
 
 /* raises deprecation error */

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/c686075f/src/include/catalog/pg_proc.sql
----------------------------------------------------------------------
diff --git a/src/include/catalog/pg_proc.sql b/src/include/catalog/pg_proc.sql
index fc475e2..987b802 100644
--- a/src/include/catalog/pg_proc.sql
+++ b/src/include/catalog/pg_proc.sql
@@ -5348,7 +5348,7 @@
 
  CREATE FUNCTION bmoptions(_text, bool) RETURNS bytea LANGUAGE internal STABLE STRICT AS 'bmoptions' WITH (OID=3011, DESCRIPTION="btree(internal)");
 
- CREATE FUNCTION pxf_get_item_fields(IN profile text, IN pattern text, OUT path text, OUT itemname text, OUT fieldname text, OUT fieldtype text) RETURNS SETOF pg_catalog.record LANGUAGE internal VOLATILE STRICT AS 'pxf_get_object_fields' WITH (OID=9996, DESCRIPTION="Returns the metadata fields of external object from PXF");
+ CREATE FUNCTION pxf_get_item_fields(IN profile text, IN pattern text, OUT path text, OUT itemname text, OUT fieldname text, OUT fieldtype text, OUT sourcefieldtype text) RETURNS SETOF pg_catalog.record LANGUAGE internal VOLATILE STRICT AS 'pxf_get_object_fields' WITH (OID=9996, DESCRIPTION="Returns the metadata fields of external object from PXF");
 
 -- raises deprecation error
  CREATE FUNCTION gp_deprecated() RETURNS void LANGUAGE internal IMMUTABLE AS 'gp_deprecated' WITH (OID=9997, DESCRIPTION="raises function deprecation error");


Mime
View raw message