hawq-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From shiv...@apache.org
Subject [1/4] incubator-hawq git commit: HAWQ-931. Removed redundant function overrides
Date Thu, 08 Sep 2016 20:56:03 GMT
Repository: incubator-hawq
Updated Branches:
  refs/heads/HAWQ-931 [created] 00db7776a


HAWQ-931. Removed redundant function overrides


Project: http://git-wip-us.apache.org/repos/asf/incubator-hawq/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-hawq/commit/00db7776
Tree: http://git-wip-us.apache.org/repos/asf/incubator-hawq/tree/00db7776
Diff: http://git-wip-us.apache.org/repos/asf/incubator-hawq/diff/00db7776

Branch: refs/heads/HAWQ-931
Commit: 00db7776ac100fd26261807e18afba76104df124
Parents: 1094271
Author: Shivram Mani <shivram.mani@gmail.com>
Authored: Tue Aug 2 14:57:56 2016 -0700
Committer: Shivram Mani <shivram.mani@gmail.com>
Committed: Tue Aug 2 15:12:58 2016 -0700

----------------------------------------------------------------------
 .../pxf/plugins/hive/HiveORCSerdeResolver.java  | 289 -------------------
 .../hawq/pxf/plugins/hive/HiveResolver.java     |   3 +-
 2 files changed, 2 insertions(+), 290 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/00db7776/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveORCSerdeResolver.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveORCSerdeResolver.java
b/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveORCSerdeResolver.java
index 6ac4e70..67c009b 100644
--- a/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveORCSerdeResolver.java
+++ b/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveORCSerdeResolver.java
@@ -147,293 +147,4 @@ public class HiveORCSerdeResolver extends HiveResolver {
         deserializer.initialize(new JobConf(new Configuration(), HiveORCSerdeResolver.class),
serdeProperties);
     }
 
-    /*
-     * If the object representing the whole record is null or if an object
-     * representing a composite sub-object (map, list,..) is null - then
-     * BadRecordException will be thrown. If a primitive field value is null,
-     * then a null will appear for the field in the record in the query result.
-     */
-    private void traverseTuple(Object obj, ObjectInspector objInspector,
-                               List<OneField> record, boolean toFlatten)
-            throws IOException, BadRecordException {
-        ObjectInspector.Category category = objInspector.getCategory();
-        if ((obj == null) && (category != ObjectInspector.Category.PRIMITIVE)) {
-            throw new BadRecordException("NULL Hive composite object");
-        }
-        switch (category) {
-            case PRIMITIVE:
-                resolvePrimitive(obj, (PrimitiveObjectInspector) objInspector,
-                        record, toFlatten);
-                break;
-            case LIST:
-                List<OneField> listRecord = traverseList(obj,
-                        (ListObjectInspector) objInspector);
-                addOneFieldToRecord(record, TEXT, String.format("[%s]",
-                        HdfsUtilities.toString(listRecord, collectionDelim)));
-                break;
-            case MAP:
-                List<OneField> mapRecord = traverseMap(obj,
-                        (MapObjectInspector) objInspector);
-                addOneFieldToRecord(record, TEXT, String.format("{%s}",
-                        HdfsUtilities.toString(mapRecord, collectionDelim)));
-                break;
-            case STRUCT:
-                List<OneField> structRecord = traverseStruct(obj,
-                        (StructObjectInspector) objInspector, true);
-                addOneFieldToRecord(record, TEXT, String.format("{%s}",
-                        HdfsUtilities.toString(structRecord, collectionDelim)));
-                break;
-            case UNION:
-                List<OneField> unionRecord = traverseUnion(obj,
-                        (UnionObjectInspector) objInspector);
-                addOneFieldToRecord(record, TEXT, String.format("[%s]",
-                        HdfsUtilities.toString(unionRecord, collectionDelim)));
-                break;
-            default:
-                throw new UnsupportedTypeException("Unknown category type: "
-                        + objInspector.getCategory());
-        }
-    }
-
-    private List<OneField> traverseUnion(Object obj, UnionObjectInspector uoi)
-            throws BadRecordException, IOException {
-        List<OneField> unionRecord = new LinkedList<>();
-        List<? extends ObjectInspector> ois = uoi.getObjectInspectors();
-        if (ois == null) {
-            throw new BadRecordException(
-                    "Illegal value NULL for Hive data type Union");
-        }
-        traverseTuple(uoi.getField(obj), ois.get(uoi.getTag(obj)), unionRecord,
-                true);
-        return unionRecord;
-    }
-
-    private List<OneField> traverseList(Object obj, ListObjectInspector loi)
-            throws BadRecordException, IOException {
-        List<OneField> listRecord = new LinkedList<>();
-        List<?> list = loi.getList(obj);
-        ObjectInspector eoi = loi.getListElementObjectInspector();
-        if (list == null) {
-            throw new BadRecordException(
-                    "Illegal value NULL for Hive data type List");
-        }
-        for (Object object : list) {
-            traverseTuple(object, eoi, listRecord, true);
-        }
-        return listRecord;
-    }
-
-    private List<OneField> traverseStruct(Object struct,
-                                          StructObjectInspector soi,
-                                          boolean toFlatten)
-            throws BadRecordException, IOException {
-        List<? extends StructField> fields = soi.getAllStructFieldRefs();
-        List<Object> structFields = soi.getStructFieldsDataAsList(struct);
-        if (structFields == null) {
-            throw new BadRecordException(
-                    "Illegal value NULL for Hive data type Struct");
-        }
-        List<OneField> structRecord = new LinkedList<>();
-        List<OneField> complexRecord = new LinkedList<>();
-        for (int i = 0; i < structFields.size(); i++) {
-            if (toFlatten) {
-                complexRecord.add(new OneField(TEXT.getOID(), String.format(
-                        "\"%s\"", fields.get(i).getFieldName())));
-            }
-            traverseTuple(structFields.get(i),
-                    fields.get(i).getFieldObjectInspector(), complexRecord,
-                    toFlatten);
-            if (toFlatten) {
-                addOneFieldToRecord(structRecord, TEXT,
-                        HdfsUtilities.toString(complexRecord, mapkeyDelim));
-                complexRecord.clear();
-            }
-        }
-        return toFlatten ? structRecord : complexRecord;
-    }
-
-    private List<OneField> traverseMap(Object obj, MapObjectInspector moi)
-            throws BadRecordException, IOException {
-        List<OneField> complexRecord = new LinkedList<>();
-        List<OneField> mapRecord = new LinkedList<>();
-        ObjectInspector koi = moi.getMapKeyObjectInspector();
-        ObjectInspector voi = moi.getMapValueObjectInspector();
-        Map<?, ?> map = moi.getMap(obj);
-        if (map == null) {
-            throw new BadRecordException(
-                    "Illegal value NULL for Hive data type Map");
-        } else if (map.isEmpty()) {
-            traverseTuple(null, koi, complexRecord, true);
-            traverseTuple(null, voi, complexRecord, true);
-            addOneFieldToRecord(mapRecord, TEXT,
-                    HdfsUtilities.toString(complexRecord, mapkeyDelim));
-        } else {
-            for (Map.Entry<?, ?> entry : map.entrySet()) {
-                traverseTuple(entry.getKey(), koi, complexRecord, true);
-                traverseTuple(entry.getValue(), voi, complexRecord, true);
-                addOneFieldToRecord(mapRecord, TEXT,
-                        HdfsUtilities.toString(complexRecord, mapkeyDelim));
-                complexRecord.clear();
-            }
-        }
-        return mapRecord;
-    }
-
-    private void resolvePrimitive(Object o, PrimitiveObjectInspector oi,
-                                  List<OneField> record, boolean toFlatten)
-            throws IOException {
-        Object val;
-        switch (oi.getPrimitiveCategory()) {
-            case BOOLEAN: {
-                val = (o != null) ? ((BooleanObjectInspector) oi).get(o) : null;
-                addOneFieldToRecord(record, BOOLEAN, val);
-                break;
-            }
-            case SHORT: {
-                val = (o != null) ? ((ShortObjectInspector) oi).get(o) : null;
-                addOneFieldToRecord(record, SMALLINT, val);
-                break;
-            }
-            case INT: {
-                val = (o != null) ? ((IntObjectInspector) oi).get(o) : null;
-                addOneFieldToRecord(record, INTEGER, val);
-                break;
-            }
-            case LONG: {
-                val = (o != null) ? ((LongObjectInspector) oi).get(o) : null;
-                addOneFieldToRecord(record, BIGINT, val);
-                break;
-            }
-            case FLOAT: {
-                val = (o != null) ? ((FloatObjectInspector) oi).get(o) : null;
-                addOneFieldToRecord(record, REAL, val);
-                break;
-            }
-            case DOUBLE: {
-                val = (o != null) ? ((DoubleObjectInspector) oi).get(o) : null;
-                addOneFieldToRecord(record, FLOAT8, val);
-                break;
-            }
-            case DECIMAL: {
-                String sVal = null;
-                if (o != null) {
-                    HiveDecimal hd = ((HiveDecimalObjectInspector) oi).getPrimitiveJavaObject(o);
-                    if (hd != null) {
-                        BigDecimal bd = hd.bigDecimalValue();
-                        sVal = bd.toString();
-                    }
-                }
-                addOneFieldToRecord(record, NUMERIC, sVal);
-                break;
-            }
-            case STRING: {
-                val = (o != null) ? ((StringObjectInspector) oi).getPrimitiveJavaObject(o)
-                        : null;
-                addOneFieldToRecord(record, TEXT,
-                        toFlatten ? String.format("\"%s\"", val) : val);
-                break;
-            }
-            case VARCHAR:
-                val = (o != null) ? ((HiveVarcharObjectInspector) oi).getPrimitiveJavaObject(o)
-                        : null;
-                addOneFieldToRecord(record, VARCHAR,
-                        toFlatten ? String.format("\"%s\"", val) : val);
-                break;
-            case CHAR:
-                val = (o != null) ? ((HiveCharObjectInspector) oi).getPrimitiveJavaObject(o)
-                        : null;
-                addOneFieldToRecord(record, BPCHAR,
-                        toFlatten ? String.format("\"%s\"", val) : val);
-                break;
-            case BINARY: {
-                byte[] toEncode = null;
-                if (o != null) {
-                    BytesWritable bw = ((BinaryObjectInspector) oi).getPrimitiveWritableObject(o);
-                    toEncode = new byte[bw.getLength()];
-                    System.arraycopy(bw.getBytes(), 0, toEncode, 0,
-                            bw.getLength());
-                }
-                addOneFieldToRecord(record, BYTEA, toEncode);
-                break;
-            }
-            case TIMESTAMP: {
-                val = (o != null) ? ((TimestampObjectInspector) oi).getPrimitiveJavaObject(o)
-                        : null;
-                addOneFieldToRecord(record, TIMESTAMP, val);
-                break;
-            }
-            case DATE:
-                val = (o != null) ? ((DateObjectInspector) oi).getPrimitiveJavaObject(o)
-                        : null;
-                addOneFieldToRecord(record, DATE, val);
-                break;
-            case BYTE: { /* TINYINT */
-                val = (o != null) ? new Short(((ByteObjectInspector) oi).get(o))
-                        : null;
-                addOneFieldToRecord(record, SMALLINT, val);
-                break;
-            }
-            default: {
-                throw new UnsupportedTypeException(oi.getTypeName()
-                        + " conversion is not supported by "
-                        + getClass().getSimpleName());
-            }
-        }
-    }
-
-    private void addOneFieldToRecord(List<OneField> record,
-                                     DataType gpdbWritableType, Object val) {
-        record.add(new OneField(gpdbWritableType.getOID(), val));
-    }
-
-    /*
-     * Gets the delimiter character from the URL, verify and store it. Must be a
-     * single ascii character (same restriction as Hawq's). If a hex
-     * representation was passed, convert it to its char.
-     */
-    void parseDelimiterChar(InputData input) {
-
-        String userDelim = input.getUserProperty("DELIMITER");
-
-        if (userDelim == null) {
-            throw new IllegalArgumentException("DELIMITER is a required option");
-        }
-
-        final int VALID_LENGTH = 1;
-        final int VALID_LENGTH_HEX = 4;
-
-        if (userDelim.startsWith("\\x")) { // hexadecimal sequence
-
-            if (userDelim.length() != VALID_LENGTH_HEX) {
-                throw new IllegalArgumentException(
-                        "Invalid hexdecimal value for delimiter (got"
-                                + userDelim + ")");
-            }
-
-            delimiter = (char) Integer.parseInt(
-                    userDelim.substring(2, VALID_LENGTH_HEX), 16);
-
-            if (!CharUtils.isAscii(delimiter)) {
-                throw new IllegalArgumentException(
-                        "Invalid delimiter value. Must be a single ASCII character, or a
hexadecimal sequence (got non ASCII "
-                                + delimiter + ")");
-            }
-
-            return;
-        }
-
-        if (userDelim.length() != VALID_LENGTH) {
-            throw new IllegalArgumentException(
-                    "Invalid delimiter value. Must be a single ASCII character, or a hexadecimal
sequence (got "
-                            + userDelim + ")");
-        }
-
-        if (!CharUtils.isAscii(userDelim.charAt(0))) {
-            throw new IllegalArgumentException(
-                    "Invalid delimiter value. Must be a single ASCII character, or a hexadecimal
sequence (got non ASCII "
-                            + userDelim + ")");
-        }
-
-        delimiter = userDelim.charAt(0);
-    }
 }

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/00db7776/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveResolver.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveResolver.java
b/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveResolver.java
index 2562d3d..1639742 100644
--- a/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveResolver.java
+++ b/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveResolver.java
@@ -346,6 +346,7 @@ public class HiveResolver extends Plugin implements ReadResolver {
      * representing a composite sub-object (map, list,..) is null - then
      * BadRecordException will be thrown. If a primitive field value is null,
      * then a null will appear for the field in the record in the query result.
+     * flatten is true only when we are dealing with a non primitive field
      */
     private void traverseTuple(Object obj, ObjectInspector objInspector,
                                List<OneField> record, boolean toFlatten)
@@ -417,7 +418,7 @@ public class HiveResolver extends Plugin implements ReadResolver {
         return listRecord;
     }
 
-    private List<OneField> traverseStruct(Object struct,
+    protected List<OneField> traverseStruct(Object struct,
                                           StructObjectInspector soi,
                                           boolean toFlatten)
             throws BadRecordException, IOException {


Mime
View raw message