hawq-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From odiache...@apache.org
Subject incubator-hawq git commit: HAWQ-703. Serialize HCatalog Complex Types to plain text (as Hive profile). [Forced Update!]
Date Thu, 05 May 2016 22:33:39 GMT
Repository: incubator-hawq
Updated Branches:
  refs/heads/HAWQ-703 bdcb854ad -> a042001ff (forced update)


HAWQ-703. Serialize HCatalog Complex Types to plain text (as Hive profile).


Project: http://git-wip-us.apache.org/repos/asf/incubator-hawq/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-hawq/commit/a042001f
Tree: http://git-wip-us.apache.org/repos/asf/incubator-hawq/tree/a042001f
Diff: http://git-wip-us.apache.org/repos/asf/incubator-hawq/diff/a042001f

Branch: refs/heads/HAWQ-703
Commit: a042001ff4212c03e0135d57a5cbf9864e062758
Parents: 6717e7d b357dbc
Author: Oleksandr Diachenko <odiachenko@pivotal.io>
Authored: Mon May 2 13:00:44 2016 -0700
Committer: Oleksandr Diachenko <odiachenko@pivotal.io>
Committed: Thu May 5 15:33:36 2016 -0700

----------------------------------------------------------------------
 depends/libyarn/src/CMakeLists.txt              |   2 +-
 .../libyarn/src/libyarnclient/LibYarnClient.cpp |   6 +-
 pxf/Makefile                                    |   5 +-
 pxf/build.gradle                                |   5 +
 .../java/org/apache/hawq/pxf/api/Metadata.java  |  28 +-
 .../org/apache/hawq/pxf/api/package-info.java   |  23 ++
 .../hawq/pxf/api/utilities/Utilities.java       |  11 +-
 .../org/apache/hawq/pxf/api/MetadataTest.java   |  11 +
 .../hawq/pxf/plugins/hbase/package-info.java    |  23 ++
 .../hawq/pxf/plugins/hdfs/AvroResolver.java     |   4 +-
 .../hawq/pxf/plugins/hdfs/package-info.java     |  23 ++
 .../hawq/pxf/plugins/hive/package-info.java     |  23 ++
 .../hive/utilities/EnumHiveToHawqType.java      |   7 +-
 .../plugins/hive/utilities/HiveUtilities.java   |  59 +--
 .../hive/utilities/HiveUtilitiesTest.java       |   8 +-
 .../pxf/service/MetadataResponseFormatter.java  |   1 +
 .../apache/hawq/pxf/service/package-info.java   |  23 ++
 .../pxf/service/utilities/ProtocolData.java     |   2 +-
 .../service/MetadataResponseFormatterTest.java  |   2 +-
 src/backend/catalog/external/externalmd.c       |  53 ++-
 src/backend/executor/execMain.c                 |   2 +-
 .../gpopt/translate/CTranslatorUtils.cpp        |  10 +-
 .../communication/rmcomm_RM2RMSEG.c             |  13 +-
 src/backend/utils/misc/etc/gpcheck.cnf          |  48 ++-
 src/bin/psql/describe.c                         |   5 +-
 .../data/hcatalog/invalid_numeric_range.json    |   2 +-
 .../hcatalog/invalid_typemod_timestamp.json     |   2 +-
 src/test/regress/data/hcatalog/multi_table.json |   2 +-
 .../data/hcatalog/multi_table_duplicates.json   |   2 +-
 .../regress/data/hcatalog/null_field_name.json  |   1 +
 .../data/hcatalog/null_field_source_type.json   |   1 +
 .../regress/data/hcatalog/null_field_type.json  |   1 +
 src/test/regress/data/hcatalog/null_fields.json |   1 +
 src/test/regress/data/hcatalog/null_item.json   |   1 +
 .../regress/data/hcatalog/null_item_name.json   |   1 +
 .../regress/data/hcatalog/null_item_path.json   |   1 +
 .../regress/data/hcatalog/single_table.json     |   2 +-
 src/test/regress/input/json_load.source         |  35 ++
 src/test/regress/output/hcatalog_lookup.source  |   8 +-
 src/test/regress/output/json_load.source        |  35 ++
 tools/bin/gpcheck                               | 372 ++++++++++++++++++-
 tools/bin/gppylib/gpcheckutil.py                |  17 +-
 tools/bin/hawqpylib/hawqlib.py                  |  18 +
 tools/doc/gpcheck_help                          |  44 ++-
 tools/sbin/gpcheck_hostdump                     |  50 ++-
 45 files changed, 880 insertions(+), 113 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/a042001f/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/Metadata.java
----------------------------------------------------------------------
diff --cc pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/Metadata.java
index 4cb7104,4fc510d..9e1c137
--- a/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/Metadata.java
+++ b/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/Metadata.java
@@@ -75,22 -71,22 +75,26 @@@ public class Metadata 
       */
      public static class Field {
          private String name;
 -        private String type; // TODO: change to enum
 +        private EnumHawqType type; // field type which PXF exposes
-         private String sourceType; // filed type PXF reads from
++        private String sourceType; // field type PXF reads from
          private String[] modifiers; // type modifiers, optional field
  
-     public Field(String name, EnumHawqType type, String sourceType) {
-         if (StringUtils.isBlank(name) || StringUtils.isBlank(type.getTypeName())
-                 || StringUtils.isBlank(sourceType)) {
-             throw new IllegalArgumentException("Field name, type and source type cannot
be empty");
 -        public Field(String name, String type) {
 -
 -            if (StringUtils.isBlank(name) || StringUtils.isBlank(type)) {
 -                throw new IllegalArgumentException("Field name and type cannot be empty");
++        public Field(String name, EnumHawqType type, String sourceType) {
++            if (StringUtils.isBlank(name)
++                    || type == null
++                    || StringUtils.isBlank(sourceType)) {
++                throw new IllegalArgumentException(
++                        "Field name, type and source type cannot be empty");
+             }
 -
+             this.name = name;
+             this.type = type;
++            this.sourceType = sourceType;
          }
-         this.name = name;
-         this.type = type;
-         this.sourceType = sourceType;
-     }
-     public Field(String name, EnumHawqType type, String sourceType, String[] modifiers)
{
-         this(name, type, sourceType);
-         this.modifiers = modifiers;
+ 
 -        public Field(String name, String type, String[] modifiers) {
 -            this(name, type);
++        public Field(String name, EnumHawqType type, String sourceType,
++                String[] modifiers) {
++            this(name, type, sourceType);
+             this.modifiers = modifiers;
          }
  
          public String getName() {

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/a042001f/pxf/pxf-api/src/test/java/org/apache/hawq/pxf/api/MetadataTest.java
----------------------------------------------------------------------
diff --cc pxf/pxf-api/src/test/java/org/apache/hawq/pxf/api/MetadataTest.java
index d6e94e1,0000000..327a15b
mode 100644,000000..100644
--- a/pxf/pxf-api/src/test/java/org/apache/hawq/pxf/api/MetadataTest.java
+++ b/pxf/pxf-api/src/test/java/org/apache/hawq/pxf/api/MetadataTest.java
@@@ -1,49 -1,0 +1,60 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + * 
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + * 
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +package org.apache.hawq.pxf.api;
 +
 +import org.apache.hawq.pxf.api.Metadata;
 +import org.apache.hawq.pxf.api.utilities.EnumHawqType;
++
 +import static org.junit.Assert.assertEquals;
 +import static org.junit.Assert.fail;
++
 +import org.junit.Test;
 +
 +public class MetadataTest {
 +
 +    @Test
 +    public void createFieldEmptyNameType() {
 +        try {
 +            Metadata.Field field = new Metadata.Field(null, null, null, null);
 +            fail("Empty name, type and source type shouldn't be allowed.");
 +        } catch (IllegalArgumentException e) {
 +            assertEquals("Field name, type and source type cannot be empty", e.getMessage());
 +        }
 +    }
 +
 +    @Test
++    public void createFieldNullType() {
++        try {
++            Metadata.Field field = new Metadata.Field("col1", null, "string");
++            fail("Empty name, type and source type shouldn't be allowed.");
++        } catch (IllegalArgumentException e) {
++            assertEquals("Field name, type and source type cannot be empty", e.getMessage());
++        }
++    }
++    @Test
 +    public void createItemEmptyNameType() {
 +        try {
 +            Metadata.Item item = new Metadata.Item(null, null);
 +            fail("Empty item name and path shouldn't be allowed.");
 +        } catch (IllegalArgumentException e) {
 +            assertEquals("Item or path name cannot be empty", e.getMessage());
 +        }
 +    }
 +}

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/a042001f/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/utilities/EnumHiveToHawqType.java
----------------------------------------------------------------------
diff --cc pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/utilities/EnumHiveToHawqType.java
index 13d402c,0000000..a747bd5
mode 100644,000000..100644
--- a/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/utilities/EnumHiveToHawqType.java
+++ b/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/utilities/EnumHiveToHawqType.java
@@@ -1,112 -1,0 +1,113 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + * 
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + * 
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +package org.apache.hawq.pxf.plugins.hive.utilities;
 +
 +import org.apache.hawq.pxf.api.utilities.EnumHawqType;
 +import org.apache.hawq.pxf.api.UnsupportedTypeException;
 +
 +/**
 + * 
 + * Hive types, which are supported by plugin, mapped to HAWQ's types
 + * @see EnumHawqType
 + */
 +public enum EnumHiveToHawqType {
 +
 +    TinyintType("tinyint", EnumHawqType.Int2Type),
 +    SmallintType("smallint", EnumHawqType.Int2Type),
 +    IntType("int", EnumHawqType.Int4Type),
 +    BigintType("bigint", EnumHawqType.Int8Type),
 +    BooleanType("boolean", EnumHawqType.BoolType),
 +    FloatType("float", EnumHawqType.Float4Type),
 +    DoubleType("double", EnumHawqType.Float8Type),
 +    StringType("string", EnumHawqType.TextType),
 +    BinaryType("binary", EnumHawqType.ByteaType),
 +    TimestampType("timestamp", EnumHawqType.TimestampType),
 +    DateType("date", EnumHawqType.DateType),
 +    DecimalType("decimal", EnumHawqType.NumericType, "[(,)]"),
 +    VarcharType("varchar", EnumHawqType.VarcharType, "[(,)]"),
 +    CharType("char", EnumHawqType.BpcharType, "[(,)]"),
 +    ArrayType("array", EnumHawqType.TextType, "[<,>]"),
 +    MapType("map", EnumHawqType.TextType, "[<,>]"),
 +    StructType("struct", EnumHawqType.TextType, "[<,>]"),
 +    UnionType("uniontype", EnumHawqType.TextType, "[<,>]");
 +
 +    private String typeName;
 +    private EnumHawqType hawqType;
 +    private String splitExpression;
 +
 +    EnumHiveToHawqType(String typeName, EnumHawqType hawqType) {
 +        this.typeName = typeName;
 +        this.hawqType = hawqType;
 +    }
 +
 +    EnumHiveToHawqType(String typeName, EnumHawqType hawqType, String splitExpression) {
 +        this(typeName, hawqType);
 +        this.splitExpression = splitExpression;
 +    }
 +
 +    /**
 +     * 
 +     * @return name of type
 +     */
 +    public String getTypeName() {
 +        return this.typeName;
 +    }
 +
 +    /**
 +     * 
 +     * @return corresponding HAWQ type
 +     */
 +    public EnumHawqType getHawqType() {
 +        return this.hawqType;
 +    }
 +
 +    /**
 +     * 
 +     * @return split by expression
 +     */
 +    public String getSplitExpression() {
 +        return this.splitExpression;
 +    }
 +
 +    /**
 +     * Returns Hive to HAWQ type mapping entry for given Hive type 
 +     * 
 +     * @param hiveType full Hive type with modifiers, for example - decimal(10, 0), char(5),
binary, array<string>, map<string,float> etc
 +     * @return corresponding Hive to HAWQ type mapping entry
 +     * @throws UnsupportedTypeException if there is no corresponding HAWQ type
 +     */
 +    public static EnumHiveToHawqType getHiveToHawqType(String hiveType) {
 +        for (EnumHiveToHawqType t : values()) {
 +            String hiveTypeName = hiveType;
-             if (t.getSplitExpression() != null) {
-                 String[] tokens = hiveType.split(t.getSplitExpression());
++            String splitExpression = t.getSplitExpression();
++            if (splitExpression != null) {
++                String[] tokens = hiveType.split(splitExpression);
 +                hiveTypeName = tokens[0];
 +            }
 +
-             if (t.getTypeName().equals(hiveTypeName)) {
++            if (t.getTypeName().toLowerCase().equals(hiveTypeName.toLowerCase())) {
 +                return t;
 +            }
 +        }
 +        throw new UnsupportedTypeException("Unable to map Hive's type: "
 +                + hiveType + " to HAWQ's type");
 +    }
 +
 +}

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/a042001f/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/utilities/HiveUtilities.java
----------------------------------------------------------------------
diff --cc pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/utilities/HiveUtilities.java
index c5751c5,7dfe410..096c0ff
--- a/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/utilities/HiveUtilities.java
+++ b/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/utilities/HiveUtilities.java
@@@ -35,8 -32,9 +33,10 @@@ import org.apache.hadoop.hive.metastore
  import org.apache.hadoop.hive.metastore.api.FieldSchema;
  import org.apache.hadoop.hive.metastore.api.MetaException;
  import org.apache.hadoop.hive.metastore.api.Table;
 -
  import org.apache.hawq.pxf.api.Metadata;
  import org.apache.hawq.pxf.api.UnsupportedTypeException;
++import org.apache.hawq.pxf.api.utilities.EnumHawqType;
++import org.apache.hawq.pxf.plugins.hive.utilities.EnumHiveToHawqType;
  
  /**
   * Class containing helper functions connecting
@@@ -85,66 -83,99 +85,67 @@@ public class HiveUtilities 
      }
  
      /**
--     * Checks if hive type is supported, and if so
--     * return its matching HAWQ type.
--     * Unsupported types will result in an exception.
--     * <br>
--     * The supported mappings are:<ul>
-          * <li>{@code tinyint -> int2}</li>
-          * <li>{@code smallint -> int2}</li>
-          * <li>{@code int -> int4}</li>
-          * <li>{@code bigint -> int8}</li>
-          * <li>{@code boolean -> bool}</li>
-          * <li>{@code float -> float4}</li>
-          * <li>{@code double -> float8}</li>
-          * <li>{@code string -> text}</li>
-          * <li>{@code binary -> bytea}</li>
-          * <li>{@code timestamp -> timestamp}</li>
-          * <li>{@code date -> date}</li>
-          * <li>{@code decimal(precision, scale) -> numeric(precision, scale)}</li>
-          * <li>{@code varchar(size) -> varchar(size)}</li>
-          * <li>{@code char(size) -> bpchar(size)}</li>
-          * <li>{@code array<dataType> -> text}</li>
-          * <li>{@code map<keyDataType, valueDataType> -> text}</li>
-          * <li>{@code struct<field1:dataType,...,fieldN:dataType> -> text}</li>
-          * <li>{@code uniontype<...> -> text}</li>
++     * Checks if hive type is supported, and if so return its matching HAWQ
++     * type. Unsupported types will result in an exception. <br>
++     * The supported mappings are:
++     * <ul>
+      * <li>{@code tinyint -> int2}</li>
+      * <li>{@code smallint -> int2}</li>
+      * <li>{@code int -> int4}</li>
+      * <li>{@code bigint -> int8}</li>
+      * <li>{@code boolean -> bool}</li>
+      * <li>{@code float -> float4}</li>
+      * <li>{@code double -> float8}</li>
+      * <li>{@code string -> text}</li>
+      * <li>{@code binary -> bytea}</li>
+      * <li>{@code timestamp -> timestamp}</li>
+      * <li>{@code date -> date}</li>
+      * <li>{@code decimal(precision, scale) -> numeric(precision, scale)}</li>
+      * <li>{@code varchar(size) -> varchar(size)}</li>
+      * <li>{@code char(size) -> bpchar(size)}</li>
++     * <li>{@code array<dataType> -> text}</li>
++     * <li>{@code map<keyDataType, valueDataType> -> text}</li>
++     * <li>{@code struct<field1:dataType,...,fieldN:dataType> -> text}</li>
++     * <li>{@code uniontype<...> -> text}</li>
       * </ul>
       *
--     * @param hiveColumn hive column schema
++     * @param hiveColumn
++     *            hive column schema
       * @return field with mapped HAWQ type and modifiers
--     * @throws UnsupportedTypeException if the column type is not supported
++     * @throws UnsupportedTypeException
++     *             if the column type is not supported
 +     * @see EnumHiveToHawqType
       */
      public static Metadata.Field mapHiveType(FieldSchema hiveColumn) throws UnsupportedTypeException
{
          String fieldName = hiveColumn.getName();
 -        String hiveType = hiveColumn.getType();
 -        String mappedType;
 -        String[] modifiers = null;
 -
 -        // check parameterized types:
 -        if (hiveType.startsWith("varchar(") ||
 -                hiveType.startsWith("char(")) {
 -            String[] toks = hiveType.split("[(,)]");
 -            if (toks.length != 2) {
 -                throw new UnsupportedTypeException( "HAWQ does not support type " + hiveType
+ " (Field " + fieldName + "), " +
 -                        "expected type of the form <type name>(<parameter>)");
 -            }
 -            mappedType = toks[0];
 -            if (mappedType.equals("char")) {
 -                mappedType = "bpchar";
 -            }
 -            modifiers = new String[] {toks[1]};
 -        } else if (hiveType.startsWith("decimal(")) {
 -            String[] toks = hiveType.split("[(,)]");
 -            if (toks.length != 3) {
 -                throw new UnsupportedTypeException( "HAWQ does not support type " + hiveType
+ " (Field " + fieldName + "), " +
 -                        "expected type of the form <type name>(<parameter>,<parameter>)");
 -            }
 -            mappedType = "numeric";
 -            modifiers = new String[] {toks[1], toks[2]};
 -        } else {
 -
 -            switch (hiveType) {
 -            case "tinyint":
 -            case "smallint":
 -            	mappedType = "int2";
 -            	break;
 -            case "int":
 -            	mappedType = "int4";
 -            	break;
 -            case "bigint":
 -            	mappedType = "int8";
 -            	break;
 -            case "boolean":
 -            	mappedType = "bool";
 -            	break;
 -            case "timestamp":
 -            case "date":
 -                mappedType = hiveType;
 -                break;
 -            case "float":
 -                mappedType = "float4";
 -                break;
 -            case "double":
 -                mappedType = "float8";
 -                break;
 -            case "string":
 -                mappedType = "text";
 -                break;
 -            case "binary":
 -                mappedType = "bytea";
 -                break;
 -            default:
 -                throw new UnsupportedTypeException(
 -                        "HAWQ does not support type " + hiveType + " (Field " + fieldName
+ ")");
 +        String hiveType = hiveColumn.getType(); // Type name and modifiers if any
 +        String hiveTypeName; // Type name
 +        String[] modifiers = null; // Modifiers
 +        EnumHiveToHawqType hiveToHawqType = EnumHiveToHawqType.getHiveToHawqType(hiveType);
 +        EnumHawqType hawqType = hiveToHawqType.getHawqType();
 +
 +        if (hiveToHawqType.getSplitExpression() != null) {
 +            String[] tokens = hiveType.split(hiveToHawqType.getSplitExpression());
 +            hiveTypeName = tokens[0];
 +            if (hawqType.getModifiersNum() > 0) {
 +                modifiers = Arrays.copyOfRange(tokens, 1, tokens.length);
 +                if (modifiers.length != hawqType.getModifiersNum()) {
 +                    throw new UnsupportedTypeException(
 +                            "HAWQ does not support type " + hiveType
 +                                    + " (Field " + fieldName + "), "
 +                                    + "expected number of modifiers: "
 +                                    + hawqType.getModifiersNum()
 +                                    + ", actual number of modifiers: "
 +                                    + modifiers.length);
 +                }
-                 if (hawqType.getValidateIntegerModifiers() && !verifyIntegerModifers(modifiers))
{
++                if (hawqType.getValidateIntegerModifiers() && !verifyIntegerModifiers(modifiers))
{
 +                    throw new UnsupportedTypeException("HAWQ does not support type " + hiveType
+ " (Field " + fieldName + "), modifiers should be integers");
 +                }
              }
 -        }
 -        if (!verifyModifers(modifiers)) {
 -            throw new UnsupportedTypeException("HAWQ does not support type " + hiveType
+ " (Field " + fieldName + "), modifiers should be integers");
 -        }
 -        return new Metadata.Field(fieldName, mappedType, modifiers);
 +        } else
 +            hiveTypeName = hiveType;
 +
 +        return new Metadata.Field(fieldName, hawqType, hiveTypeName, modifiers);
      }
  
      /**
@@@ -155,7 -186,7 +156,7 @@@
       * @param modifiers type modifiers to be verified
       * @return whether modifiers are null or integers
       */
-     private static boolean verifyIntegerModifers(String[] modifiers) {
 -    private static boolean verifyModifers(String[] modifiers) {
++    private static boolean verifyIntegerModifiers(String[] modifiers) {
          if (modifiers == null) {
              return true;
          }

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/a042001f/pxf/pxf-hive/src/test/java/org/apache/hawq/pxf/plugins/hive/utilities/HiveUtilitiesTest.java
----------------------------------------------------------------------
diff --cc pxf/pxf-hive/src/test/java/org/apache/hawq/pxf/plugins/hive/utilities/HiveUtilitiesTest.java
index 1054d0d,466dedb..e9b024a
--- a/pxf/pxf-hive/src/test/java/org/apache/hawq/pxf/plugins/hive/utilities/HiveUtilitiesTest.java
+++ b/pxf/pxf-hive/src/test/java/org/apache/hawq/pxf/plugins/hive/utilities/HiveUtilitiesTest.java
@@@ -54,12 -54,6 +54,13 @@@ public class HiveUtilitiesTest 
          {"char(40)", "bpchar", "40"},
      };
  
 +    static String[][] complexTypes = {
-         {"array<string>", "text"},
-         {"map<string,float>", "text"},
-         {"struct<street:string,city:string,state:string,zip:int>", "text"},
++        {"ArraY<string>", "text"},
++        {"MaP<stRing, float>", "text"},
++        {"Struct<street:string, city:string, state:string, zip:int>", "text"},
++        {"UnionType<array<string>, string,int>", "text"}
 +    };
 +
      @Test
      public void mapHiveTypeUnsupported() throws Exception {
  
@@@ -157,37 -152,6 +158,38 @@@
      }
  
      @Test
 +    public void mapHiveTypeInvalidModifiers() throws Exception {
 +        String badHiveType = "decimal(abc, xyz)";
 +        hiveColumn = new FieldSchema("numericColumn", badHiveType, null);
 +        try {
 +            HiveUtilities.mapHiveType(hiveColumn);
 +            fail("should fail with bad modifiers error");
 +        } catch (UnsupportedTypeException e) {
 +            String errorMsg = "HAWQ does not support type " + badHiveType + " (Field numericColumn),
modifiers should be integers";
 +            assertEquals(errorMsg, e.getMessage());
 +        }
 +    }
 +
 +    @Test
 +    public void mapHiveTypeComplex() throws Exception {
 +        /*
 +         * array<dataType> -> text
 +         * map<keyDataType, valueDataType> -> text
 +         * struct<fieldName1:dataType, ..., fieldNameN:dataType> -> text
++         * uniontype<...> -> text
 +         */
 +        for (String[] line: complexTypes) {
 +            String hiveType = line[0];
 +            String expectedType = line[1];
 +            hiveColumn = new FieldSchema("field" + hiveType, hiveType, null);
 +            Metadata.Field result = HiveUtilities.mapHiveType(hiveColumn);
 +            assertEquals("field" + hiveType, result.getName());
 +            assertEquals(expectedType, result.getType().getTypeName());
 +            assertNull(result.getModifiers());
 +        }
 +    }
 +
 +    @Test
      public void parseTableQualifiedNameNoDbName() throws Exception {
          String name = "orphan";
          tblDesc = HiveUtilities.extractTableFromName(name);

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/a042001f/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/MetadataResponseFormatter.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/a042001f/pxf/pxf-service/src/test/java/org/apache/hawq/pxf/service/MetadataResponseFormatterTest.java
----------------------------------------------------------------------
diff --cc pxf/pxf-service/src/test/java/org/apache/hawq/pxf/service/MetadataResponseFormatterTest.java
index 6269bb9,0182835..21bf423
--- a/pxf/pxf-service/src/test/java/org/apache/hawq/pxf/service/MetadataResponseFormatterTest.java
+++ b/pxf/pxf-service/src/test/java/org/apache/hawq/pxf/service/MetadataResponseFormatterTest.java
@@@ -97,31 -96,12 +97,31 @@@ public class MetadataResponseFormatterT
          StringBuilder expected = new StringBuilder("{\"PXFMetadata\":[{");
          expected.append("\"item\":{\"path\":\"default\",\"name\":\"table1\"},")
                  .append("\"fields\":[")
 -                .append("{\"name\":\"field1\",\"type\":\"int\"},")
 -                .append("{\"name\":\"field2\",\"type\":\"numeric\",\"modifiers\":[\"1349\",\"1789\"]},")
 -                .append("{\"name\":\"field3\",\"type\":\"char\",\"modifiers\":[\"50\"]}")
 +                .append("{\"name\":\"field1\",\"type\":\"int8\",\"sourceType\":\"bigint\"},")
 +                .append("{\"name\":\"field2\",\"type\":\"numeric\",\"sourceType\":\"decimal\",\"modifiers\":[\"1349\",\"1789\"]},")
 +                .append("{\"name\":\"field3\",\"type\":\"bpchar\",\"sourceType\":\"char\",\"modifiers\":[\"50\"]}")
 +                .append("]}]}");
 +
 +        assertEquals(expected.toString(), convertResponseToString(response));
 +    }
 +
 +    @Test
 +    public void formatResponseStringWithSourceType() throws Exception {
 +        List<Metadata> metadataList = new ArrayList<Metadata>();
 +        List<Metadata.Field> fields = new ArrayList<Metadata.Field>();
 +        Metadata.Item itemName = new Metadata.Item("default", "table1");
 +        Metadata metadata = new Metadata(itemName, fields);
 +        fields.add(new Metadata.Field("field1", EnumHawqType.Float8Type, "double"));
 +        metadataList.add(metadata);
 +
 +        response = MetadataResponseFormatter.formatResponse(metadataList, "path.file");
 +        StringBuilder expected = new StringBuilder("{\"PXFMetadata\":[{");
 +        expected.append("\"item\":{\"path\":\"default\",\"name\":\"table1\"},")
 +                .append("\"fields\":[")
 +                .append("{\"name\":\"field1\",\"type\":\"float8\",\"sourceType\":\"double\"}")
                  .append("]}]}");
  
- //        assertEquals(expected.toString(), convertResponseToString(response));
+         assertEquals(expected.toString(), convertResponseToString(response));
      }
  
      @Test

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/a042001f/src/backend/catalog/external/externalmd.c
----------------------------------------------------------------------
diff --cc src/backend/catalog/external/externalmd.c
index 926605f,e65d741..aca07c4
--- a/src/backend/catalog/external/externalmd.c
+++ b/src/backend/catalog/external/externalmd.c
@@@ -96,6 -96,6 +96,11 @@@ List *ParsePxfEntries(StringInfo json, 
   * ParsePxfItem
   * 		Parse the given json object representing a single PXF item into the internal
   * 		representation
++ * 		Reports error and exits if any of mandatory attributes in given json are missing
++ * 		Input JSON schema:
++ *
++ * 		{"PXFMetadata":[{"item":{"path":"<ITEM_PATH>","name":"<ITEM_NAME>"},"fields":[{"name":"<FIELD_NAME>","type":"<FIELD_TYPE>","sourceType":"<SOURCE_TYPE>"[,"modifiers":["<MODIFIER1>","<MODIFIER2>"]]},...]},
...]}
++ *
   */
  static PxfItem *ParsePxfItem(struct json_object *pxfMD, char* profile)
  {
@@@ -103,17 -103,17 +108,37 @@@
  
  	/* parse item name */
  	struct json_object *jsonItem = json_object_object_get(pxfMD, "item");
--	char *itemPath = pstrdup(json_object_get_string(json_object_object_get(jsonItem, "path")));
--	char *itemName = pstrdup(json_object_get_string(json_object_object_get(jsonItem, "name")));
--	
++	if (NULL == jsonItem)
++		ereport(ERROR,
++			(errcode(ERRCODE_NULL_VALUE_NOT_ALLOWED),
++			 errmsg("Could not parse PXF item, expected not null value for attribute \"item\"")));
++
++	struct json_object *itemPath = json_object_object_get(jsonItem, "path");
++	if (NULL == itemPath)
++		ereport(ERROR,
++			(errcode(ERRCODE_NULL_VALUE_NOT_ALLOWED),
++			 errmsg("Could not parse PXF item, expected not null value for attribute \"path\"")));
++
++	struct json_object *itemName = json_object_object_get(jsonItem, "name");
++	if (NULL == itemName)
++		ereport(ERROR,
++			(errcode(ERRCODE_NULL_VALUE_NOT_ALLOWED),
++			 errmsg("Could not parse PXF item, expected not null value for attribute \"name\"")));
++
  	pxfItem->profile = profile;
--	pxfItem->path = itemPath;
--	pxfItem->name = itemName;
++	pxfItem->path = pstrdup(json_object_get_string(itemPath));
++	pxfItem->name = pstrdup(json_object_get_string(itemName));
  	
  	elog(DEBUG1, "Parsed item %s, namespace %s", itemName, itemPath);
  		
  	/* parse columns */
  	struct json_object *jsonFields = json_object_object_get(pxfMD, "fields");
++
++	if (NULL == jsonFields)
++		ereport(ERROR,
++			(errcode(ERRCODE_NULL_VALUE_NOT_ALLOWED),
++			 errmsg("Could not parse PXF item, expected not null value for attribute \"fields\"")));
++
  	const int numFields = json_object_array_length(jsonFields);
  	for (int i = 0; i < numFields; i++)
  	{
@@@ -121,14 -121,10 +146,32 @@@
  		struct json_object *jsonCol = json_object_array_get_idx(jsonFields, i);
  
  		struct json_object *fieldName = json_object_object_get(jsonCol, "name");
++
++		if (NULL == fieldName)
++			ereport(ERROR,
++				(errcode(ERRCODE_NULL_VALUE_NOT_ALLOWED),
++				 errmsg("Could not parse PXF item, expected not null value for attribute \"name\"")));
++
  		pxfField->name = pstrdup(json_object_get_string(fieldName));
  
  		struct json_object *fieldType = json_object_object_get(jsonCol, "type");
++
++		if (NULL == fieldType)
++			ereport(ERROR,
++				(errcode(ERRCODE_NULL_VALUE_NOT_ALLOWED),
++				 errmsg("Could not parse PXF item, expected not null value for attribute \"type\"")));
++
  		pxfField->type = pstrdup(json_object_get_string(fieldType));
 +
 +		struct json_object *sourceFieldType = json_object_object_get(jsonCol, "sourceType");
++
++		if (NULL == sourceFieldType)
++			ereport(ERROR,
++				(errcode(ERRCODE_NULL_VALUE_NOT_ALLOWED),
++				 errmsg("Could not parse PXF item, expected not null value for attribute \"sourceType\"")));
++
 +		pxfField->sourceType = pstrdup(json_object_get_string(sourceFieldType));
 +
  		pxfField->nTypeModifiers = 0;
  		
  		elog(DEBUG1, "Parsing field %s, type %s", pxfField->name, pxfField->type);

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/a042001f/src/bin/psql/describe.c
----------------------------------------------------------------------
diff --cc src/bin/psql/describe.c
index ab2aa8b,00cfe33..85012b2
--- a/src/bin/psql/describe.c
+++ b/src/bin/psql/describe.c
@@@ -4263,13 -4263,8 +4263,12 @@@ describePxfTable(const char *profile, c
  	printQueryOpt myopt = pset.popt;
  	printTableContent cont;
  	int			cols = 0;
 +	if (verbose)
- 	{
 +		cols = 3;
- 	} else
++	else
 +		cols = 2;
  	int			total_numrows = 0;
 -	char	   *headers[2];
 +	char	   *headers[cols];
  	bool		printTableInitialized = false;
  
  	char *previous_path = NULL;

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/a042001f/src/test/regress/data/hcatalog/invalid_numeric_range.json
----------------------------------------------------------------------
diff --cc src/test/regress/data/hcatalog/invalid_numeric_range.json
index 1888777,1888777..41149eb
--- a/src/test/regress/data/hcatalog/invalid_numeric_range.json
+++ b/src/test/regress/data/hcatalog/invalid_numeric_range.json
@@@ -1,1 -1,1 +1,1 @@@
--{"PXFMetadata":[{"item":{"path":"default","name":"mytable2"},"fields":[{"name":"n5","type":"numeric","modifiers":["30","40"]}]}]}
++{"PXFMetadata":[{"item":{"path":"default","name":"mytable2"},"fields":[{"name":"n5","type":"numeric","modifiers":["30","40"],"sourceType":"decimal"}]}]}

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/a042001f/src/test/regress/data/hcatalog/invalid_typemod_timestamp.json
----------------------------------------------------------------------
diff --cc src/test/regress/data/hcatalog/invalid_typemod_timestamp.json
index 8e3c570,8e3c570..66e9041
--- a/src/test/regress/data/hcatalog/invalid_typemod_timestamp.json
+++ b/src/test/regress/data/hcatalog/invalid_typemod_timestamp.json
@@@ -1,1 -1,1 +1,1 @@@
--{"PXFMetadata":[{"item":{"path":"default","name":"mytable3"},"fields":[{"name":"n5","type":"timestamp","modifiers":["30","40"]}]}]}
++{"PXFMetadata":[{"item":{"path":"default","name":"mytable3"},"fields":[{"name":"n5","type":"timestamp","modifiers":["30","40"],"sourceType":"decimal"}]}]}

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/a042001f/src/test/regress/data/hcatalog/multi_table.json
----------------------------------------------------------------------
diff --cc src/test/regress/data/hcatalog/multi_table.json
index 5d5c0ff,5d5c0ff..82e70b0
--- a/src/test/regress/data/hcatalog/multi_table.json
+++ b/src/test/regress/data/hcatalog/multi_table.json
@@@ -1,1 -1,1 +1,1 @@@
--{"PXFMetadata":[{"item":{"path":"db1","name":"ht1"},"fields":[{"name":"c1","type":"bpchar","modifiers":["3"]},{"name":"vc2","type":"varchar","modifiers":["3"]}]},{"item":{"path":"db2","name":"ht1"},"fields":[{"name":"c1","type":"bpchar","modifiers":["3"]},{"name":"vc2","type":"varchar","modifiers":["3"]}]},{"item":{"path":"db2","name":"ht2"},"fields":[{"name":"c1","type":"bpchar","modifiers":["3"]},{"name":"vc2","type":"varchar","modifiers":["3"]}]}]}
++{"PXFMetadata":[{"item":{"path":"db1","name":"ht1"},"fields":[{"name":"c1","type":"bpchar","modifiers":["3"],"sourceType":"char"},{"name":"vc2","type":"varchar","modifiers":["3"],"sourceType":"varchar"}]},{"item":{"path":"db2","name":"ht1"},"fields":[{"name":"c1","type":"bpchar","modifiers":["3"],"sourceType":"char"},{"name":"vc2","type":"varchar","modifiers":["3"],"sourceType":"varchar"}]},{"item":{"path":"db2","name":"ht2"},"fields":[{"name":"c1","type":"bpchar","modifiers":["3"],"sourceType":"char"},{"name":"vc2","type":"varchar","modifiers":["3"],"sourceType":"varchar"}]}]}

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/a042001f/src/test/regress/data/hcatalog/multi_table_duplicates.json
----------------------------------------------------------------------
diff --cc src/test/regress/data/hcatalog/multi_table_duplicates.json
index 45db598,45db598..3c3acc0
--- a/src/test/regress/data/hcatalog/multi_table_duplicates.json
+++ b/src/test/regress/data/hcatalog/multi_table_duplicates.json
@@@ -1,1 -1,1 +1,1 @@@
--{"PXFMetadata":[{"item":{"path":"db","name":"t"},"fields":[{"name":"c1","type":"bpchar","modifiers":["3"]},{"name":"vc2","type":"varchar","modifiers":["3"]}]},{"item":{"path":"db","name":"t"},"fields":[{"name":"c1","type":"bpchar","modifiers":["3"]},{"name":"vc2","type":"varchar","modifiers":["3"]}]}]}
++{"PXFMetadata":[{"item":{"path":"db","name":"t"},"fields":[{"name":"c1","type":"bpchar","modifiers":["3"],"sourceType":"char"},{"name":"vc2","type":"varchar","modifiers":["3"],"sourceType":"varchar"}]},{"item":{"path":"db","name":"t"},"fields":[{"name":"c1","type":"bpchar","modifiers":["3"],"sourceType":"char"},{"name":"vc2","type":"varchar","modifiers":["3"],"sourceType":
"varchar"}]}]}

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/a042001f/src/test/regress/data/hcatalog/null_field_name.json
----------------------------------------------------------------------
diff --cc src/test/regress/data/hcatalog/null_field_name.json
index 0000000,0000000..e627865
new file mode 100644
--- /dev/null
+++ b/src/test/regress/data/hcatalog/null_field_name.json
@@@ -1,0 -1,0 +1,1 @@@
++{"PXFMetadata":[{"item":{"path":"default","name":"mytable"},"fields":[{"name":"col1","type":"bytea","sourceType":"binary"},
{"type":"bytea","sourceType":"binary"}]}]}

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/a042001f/src/test/regress/data/hcatalog/null_field_source_type.json
----------------------------------------------------------------------
diff --cc src/test/regress/data/hcatalog/null_field_source_type.json
index 0000000,0000000..6d17592
new file mode 100644
--- /dev/null
+++ b/src/test/regress/data/hcatalog/null_field_source_type.json
@@@ -1,0 -1,0 +1,1 @@@
++{"PXFMetadata":[{"item":{"path":"default","name":"mytable"},"fields":[{"name":"col1","type":"bytea","sourceType":"binary"},
{"name":"col2", "type":"bytea"}]}]}

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/a042001f/src/test/regress/data/hcatalog/null_field_type.json
----------------------------------------------------------------------
diff --cc src/test/regress/data/hcatalog/null_field_type.json
index 0000000,0000000..9dffd56
new file mode 100644
--- /dev/null
+++ b/src/test/regress/data/hcatalog/null_field_type.json
@@@ -1,0 -1,0 +1,1 @@@
++{"PXFMetadata":[{"item":{"path":"default","name":"mytable"},"fields":[{"name":"col1","type":"bytea","sourceType":"binary"},
{"name":"col2","sourceType":"binary"}]}]}

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/a042001f/src/test/regress/data/hcatalog/null_fields.json
----------------------------------------------------------------------
diff --cc src/test/regress/data/hcatalog/null_fields.json
index 0000000,0000000..a20a447
new file mode 100644
--- /dev/null
+++ b/src/test/regress/data/hcatalog/null_fields.json
@@@ -1,0 -1,0 +1,1 @@@
++{"PXFMetadata":[{"item":{"name": "mytable", "path" : "default"}}]}

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/a042001f/src/test/regress/data/hcatalog/null_item.json
----------------------------------------------------------------------
diff --cc src/test/regress/data/hcatalog/null_item.json
index 0000000,0000000..b613cd8
new file mode 100644
--- /dev/null
+++ b/src/test/regress/data/hcatalog/null_item.json
@@@ -1,0 -1,0 +1,1 @@@
++{"PXFMetadata":[{}]}

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/a042001f/src/test/regress/data/hcatalog/null_item_name.json
----------------------------------------------------------------------
diff --cc src/test/regress/data/hcatalog/null_item_name.json
index 0000000,0000000..0c580c9
new file mode 100644
--- /dev/null
+++ b/src/test/regress/data/hcatalog/null_item_name.json
@@@ -1,0 -1,0 +1,1 @@@
++{"PXFMetadata":[{"item":{"path":"default"},"fields":[{"name":"s1","type":"text","sourceType":"string"}]}]}

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/a042001f/src/test/regress/data/hcatalog/null_item_path.json
----------------------------------------------------------------------
diff --cc src/test/regress/data/hcatalog/null_item_path.json
index 0000000,0000000..fdba035
new file mode 100644
--- /dev/null
+++ b/src/test/regress/data/hcatalog/null_item_path.json
@@@ -1,0 -1,0 +1,1 @@@
++{"PXFMetadata":[{"item":{"name": "mytable"},"fields":[{"name":"s1","type":"text","sourceType":"string"}]}]}

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/a042001f/src/test/regress/data/hcatalog/single_table.json
----------------------------------------------------------------------
diff --cc src/test/regress/data/hcatalog/single_table.json
index 7df3427,7df3427..b571e5d
--- a/src/test/regress/data/hcatalog/single_table.json
+++ b/src/test/regress/data/hcatalog/single_table.json
@@@ -1,1 -1,1 +1,1 @@@
--{"PXFMetadata":[{"item":{"path":"default","name":"mytable"},"fields":[{"name":"s1","type":"text"},{"name":"s2","type":"text"},{"name":"n1","type":"int4"},{"name":"d1","type":"float8"},{"name":"dc1","type":"numeric","modifiers":["38","18"]},{"name":"tm","type":"timestamp"},{"name":"f","type":"float4"},{"name":"bg","type":"int8"},{"name":"b","type":"bool"},{"name":"tn","type":"int2"},{"name":"sml","type":"int2"},{"name":"dt","type":"date"},{"name":"vc1","type":"varchar","modifiers":["5"]},{"name":"c1","type":"bpchar","modifiers":["3"]},{"name":"bin","type":"bytea"}]}]}
++{"PXFMetadata":[{"item":{"path":"default","name":"mytable"},"fields":[{"name":"s1","type":"text","sourceType":"string"},{"name":"s2","type":"text","sourceType":"string"},{"name":"n1","type":"int4","sourceType":"int"},{"name":"d1","type":"float8","sourceType":"double"},{"name":"dc1","type":"numeric","modifiers":["38","18"],"sourceType":"decimal"},{"name":"tm","type":"timestamp","sourceType":"timestamp"},{"name":"f","type":"float4","sourceType":"float"},{"name":"bg","type":"int8","sourceType":"bigint"},{"name":"b","type":"bool","sourceType":"boolean"},{"name":"tn","type":"int2","sourceType":"tinyint"},{"name":"sml","type":"int2","sourceType":"tinyint"},{"name":"dt","type":"date","sourceType":"date"},{"name":"vc1","type":"varchar","modifiers":["5"],"sourceType":"varchar"},{"name":"c1","type":"bpchar","modifiers":["3"],"sourceType":"char"},{"name":"bin","type":"bytea","sourceType":"binary"}]}]}

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/a042001f/src/test/regress/input/json_load.source
----------------------------------------------------------------------
diff --cc src/test/regress/input/json_load.source
index d0430d2,d0430d2..6dcef8a
--- a/src/test/regress/input/json_load.source
+++ b/src/test/regress/input/json_load.source
@@@ -91,6 -91,6 +91,41 @@@ BEGIN TRANSACTION
  SELECT load_json_data('@abs_builddir@/data/hcatalog/invalid_typemod_timestamp.json');
  END TRANSACTION;
  
++-- negative test case: null "item" attribute
++BEGIN TRANSACTION;
++SELECT load_json_data('@abs_builddir@/data/hcatalog/null_item.json');
++END TRANSACTION;
++
++-- negative test case: null "name" attribute
++BEGIN TRANSACTION;
++SELECT load_json_data('@abs_builddir@/data/hcatalog/null_item_name.json');
++END TRANSACTION;
++
++-- negative test case: null "path" attribute
++BEGIN TRANSACTION;
++SELECT load_json_data('@abs_builddir@/data/hcatalog/null_item_path.json');
++END TRANSACTION;
++
++-- negative test case: null "fields" attribute
++BEGIN TRANSACTION;
++SELECT load_json_data('@abs_builddir@/data/hcatalog/null_fields.json');
++END TRANSACTION;
++
++-- negative test case: null "name" attribute
++BEGIN TRANSACTION;
++SELECT load_json_data('@abs_builddir@/data/hcatalog/null_field_name.json');
++END TRANSACTION;
++
++-- negative test case: null "type" attribute
++BEGIN TRANSACTION;
++SELECT load_json_data('@abs_builddir@/data/hcatalog/null_field_type.json');
++END TRANSACTION;
++
++-- negative test case: null "sourceType" attribute
++BEGIN TRANSACTION;
++SELECT load_json_data('@abs_builddir@/data/hcatalog/null_field_source_type.json');
++END TRANSACTION;
++
  -- cleanup
  drop table mytable_internal;
  DROP FUNCTION load_json_data(filename text);

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/a042001f/src/test/regress/output/hcatalog_lookup.source
----------------------------------------------------------------------
diff --cc src/test/regress/output/hcatalog_lookup.source
index ac2a4f6,ac2a4f6..a79ac93
--- a/src/test/regress/output/hcatalog_lookup.source
+++ b/src/test/regress/output/hcatalog_lookup.source
@@@ -2,14 -2,14 +2,14 @@@
  -- test hcatalog lookup
  -- --------------------------------------
  SELECT * FROM pxf_get_item_fields('Hive', '*');
-- path | itemname | fieldname | fieldtype 
--------+----------+-----------+-----------
++ path | itemname | fieldname | fieldtype | sourcefieldtype 
++------+----------+-----------+-----------+-----------------
  (0 rows)
  
  \d hcatalog.*.*
  SELECT * FROM pxf_get_item_fields('Hive', '*abc*abc*');
-- path | itemname | fieldname | fieldtype 
--------+----------+-----------+-----------
++ path | itemname | fieldname | fieldtype | sourcefieldtype 
++------+----------+-----------+-----------+-----------------
  (0 rows)
  
  \d hcatalog.*abc*.*abc*

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/a042001f/src/test/regress/output/json_load.source
----------------------------------------------------------------------
diff --cc src/test/regress/output/json_load.source
index 3e7bea2,3e7bea2..5bec43e
--- a/src/test/regress/output/json_load.source
+++ b/src/test/regress/output/json_load.source
@@@ -196,6 -196,6 +196,41 @@@ BEGIN TRANSACTION
  SELECT load_json_data('@abs_builddir@/data/hcatalog/invalid_typemod_timestamp.json');
  ERROR:  Invalid typemod for imported column n5
  END TRANSACTION;
++-- negative test case: null "item" attribute
++BEGIN TRANSACTION;
++SELECT load_json_data('@abs_builddir@/data/hcatalog/null_item.json');
++ERROR:  Could not parse PXF item, expected not null value for attribute "item"
++END TRANSACTION;
++-- negative test case: null "name" attribute
++BEGIN TRANSACTION;
++SELECT load_json_data('@abs_builddir@/data/hcatalog/null_item_name.json');
++ERROR:  Could not parse PXF item, expected not null value for attribute "name"
++END TRANSACTION;
++-- negative test case: null "path" attribute
++BEGIN TRANSACTION;
++SELECT load_json_data('@abs_builddir@/data/hcatalog/null_item_path.json');
++ERROR:  Could not parse PXF item, expected not null value for attribute "path"
++END TRANSACTION;
++-- negative test case: null "fields" attribute
++BEGIN TRANSACTION;
++SELECT load_json_data('@abs_builddir@/data/hcatalog/null_fields.json');
++ERROR:  Could not parse PXF item, expected not null value for attribute "fields"
++END TRANSACTION;
++-- negative test case: null "name" attribute
++BEGIN TRANSACTION;
++SELECT load_json_data('@abs_builddir@/data/hcatalog/null_field_name.json');
++ERROR:  Could not parse PXF item, expected not null value for attribute "name"
++END TRANSACTION;
++-- negative test case: null "type" attribute
++BEGIN TRANSACTION;
++SELECT load_json_data('@abs_builddir@/data/hcatalog/null_field_type.json');
++ERROR:  Could not parse PXF item, expected not null value for attribute "type"
++END TRANSACTION;
++-- negative test case: null "sourceType" attribute
++BEGIN TRANSACTION;
++SELECT load_json_data('@abs_builddir@/data/hcatalog/null_field_source_type.json');
++ERROR:  Could not parse PXF item, expected not null value for attribute "sourceType"
++END TRANSACTION;
  -- cleanup
  drop table mytable_internal;
  DROP FUNCTION load_json_data(filename text);



Mime
View raw message