hawq-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From kavin...@apache.org
Subject incubator-hawq git commit: HAWQ-964. Support for OR and NOT Logical Operators
Date Thu, 13 Oct 2016 21:17:20 GMT
Repository: incubator-hawq
Updated Branches:
  refs/heads/master cb050d153 -> 2cc152eb0


HAWQ-964. Support for OR and NOT Logical Operators

Signed-off-by: Leslie Chang <hi.lesliechang@gmail.com>


Project: http://git-wip-us.apache.org/repos/asf/incubator-hawq/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-hawq/commit/2cc152eb
Tree: http://git-wip-us.apache.org/repos/asf/incubator-hawq/tree/2cc152eb
Diff: http://git-wip-us.apache.org/repos/asf/incubator-hawq/diff/2cc152eb

Branch: refs/heads/master
Commit: 2cc152eb05514df1b6815a5a32174a71ef5c8ca4
Parents: cb050d1
Author: Kavinder Dhaliwal <kavinderd@gmail.com>
Authored: Thu Sep 15 10:56:20 2016 -0700
Committer: Kavinder Dhaliwal <kavinderd@gmail.com>
Committed: Thu Oct 13 14:14:34 2016 -0700

----------------------------------------------------------------------
 pxf/build.gradle                                |   4 -
 .../org/apache/hawq/pxf/api/BasicFilter.java    |  56 +++++++
 .../org/apache/hawq/pxf/api/FilterParser.java   | 125 +++++++--------
 .../org/apache/hawq/pxf/api/LogicalFilter.java  |  30 ++++
 .../apache/hawq/pxf/api/FilterParserTest.java   | 126 +++++++++++++--
 .../pxf/plugins/hbase/HBaseFilterBuilder.java   |  69 ++++----
 .../plugins/hbase/HBaseFilterBuilderTest.java   |  34 ++++
 .../hawq/pxf/plugins/hive/HiveAccessor.java     | 157 +++++++++++++++----
 .../pxf/plugins/hive/HiveDataFragmenter.java    |  75 +++++++--
 .../pxf/plugins/hive/HiveFilterBuilder.java     |  80 +++++-----
 .../hawq/pxf/plugins/hive/HiveORCAccessor.java  |  47 ++++--
 .../pxf/plugins/hive/HiveFilterBuilderTest.java |  46 ++++--
 12 files changed, 637 insertions(+), 212 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/2cc152eb/pxf/build.gradle
----------------------------------------------------------------------
diff --git a/pxf/build.gradle b/pxf/build.gradle
index e70104c..aed48b1 100644
--- a/pxf/build.gradle
+++ b/pxf/build.gradle
@@ -350,10 +350,6 @@ project('pxf-hive') {
         compile "org.apache.hive:hive-common:$hiveVersion"
         compile "org.apache.hive:hive-serde:$hiveVersion"
         testCompile 'pl.pragmatists:JUnitParams:1.0.2'
-        configurations {
-            // Remove hive-exec from unit tests as it causes VerifyError
-            testRuntime.exclude module: 'hive-exec'
-        }
     }
 
     ospackage {

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/2cc152eb/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/BasicFilter.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/BasicFilter.java b/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/BasicFilter.java
new file mode 100644
index 0000000..61c1206
--- /dev/null
+++ b/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/BasicFilter.java
@@ -0,0 +1,56 @@
+package org.apache.hawq.pxf.api;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+/**
+ * Basic filter provided for cases where the target storage system does not provide it own filter
+ * For example: Hbase storage provides its own filter but for a Writable based record in a
+ * SequenceFile there is no filter provided and so we need to have a default
+ */
+public class BasicFilter {
+    private FilterParser.Operation oper;
+    private FilterParser.ColumnIndex column;
+    private FilterParser.Constant constant;
+
+    /**
+     * Constructs a BasicFilter.
+     *
+     * @param oper the parse operation to perform
+     * @param column the column index
+     * @param constant the constant object
+     */
+    public BasicFilter(FilterParser.Operation oper, FilterParser.ColumnIndex column, FilterParser.Constant constant) {
+        this.oper = oper;
+        this.column = column;
+        this.constant = constant;
+    }
+
+    public FilterParser.Operation getOperation() {
+        return oper;
+    }
+
+    public FilterParser.ColumnIndex getColumn() {
+        return column;
+    }
+
+    public FilterParser.Constant getConstant() {
+        return constant;
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/2cc152eb/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/FilterParser.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/FilterParser.java b/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/FilterParser.java
index 00fbf2b..80ab198 100644
--- a/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/FilterParser.java
+++ b/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/FilterParser.java
@@ -51,36 +51,65 @@ public class FilterParser {
     private Stack<Object> operandsStack;
     private FilterBuilder filterBuilder;
 
-    private static Map<Integer, Operation> operatorTranslationMap = initOperatorTransMap();
-
     /** Supported operations by the parser. */
     public enum Operation {
+        NOOP,
         HDOP_LT,
         HDOP_GT,
         HDOP_LE,
         HDOP_GE,
         HDOP_EQ,
         HDOP_NE,
-        HDOP_AND,
         HDOP_LIKE
     }
 
     /**
+     * This enum was added to support filter pushdown with the logical operators OR and NOT
+     * HAWQ-964
+     */
+    public enum LogicalOperation {
+        HDOP_AND,
+        HDOP_OR,
+        HDOP_NOT
+    }
+
+    /**
      * Interface a user of FilterParser should implement.
      * This is used to let the user build filter expressions in the manner she sees fit.
      * When an operator is parsed, this function is called to let the user decide what to do with its operands.
      */
     public interface FilterBuilder {
         /**
-         * Builds the filter.
+         * Builds the filter for an operation
          *
-         * @param operation the parse operation to perform
+         * @param operation the parsed operation to perform
          * @param left the left operand
          * @param right the right operand
          * @return the built filter
          * @throws Exception if building the filter failed
          */
         public Object build(Operation operation, Object left, Object right) throws Exception;
+
+        /**
+         * Builds the filter for a logical operation and two operands
+         *
+         * @param operation the parsed logical operation to perform
+         * @param left the left operand
+         * @param right the right operand
+         * @return the built filter
+         * @throws Exception if building the filter failed
+         */
+        public Object build(LogicalOperation operation, Object left, Object right) throws Exception;
+
+        /**
+         * Builds the filter for a logical operation and one operand
+         *
+         * @param operation the parsed unary logical operation to perform
+         * @param filter the single operand
+         * @return the built filter
+         * @throws Exception if building the filter failed
+         */
+        public Object build(LogicalOperation operation, Object filter) throws Exception;
     }
 
     /** Represents a column index. */
@@ -110,42 +139,6 @@ public class FilterParser {
     }
 
     /**
-     * Basic filter provided for cases where the target storage system does not provide it own filter
-     * For example: Hbase storage provides its own filter but for a Writable based record in a
-     * SequenceFile there is no filter provided and so we need to have a default
-     */
-    static public class BasicFilter {
-        private Operation oper;
-        private ColumnIndex column;
-        private Constant constant;
-
-        /**
-         * Constructs a BasicFilter.
-         *
-         * @param oper the parse operation to perform
-         * @param column the column index
-         * @param constant the constant object
-         */
-        public BasicFilter(Operation oper, ColumnIndex column, Constant constant) {
-            this.oper = oper;
-            this.column = column;
-            this.constant = constant;
-        }
-
-        public Operation getOperation() {
-            return oper;
-        }
-
-        public ColumnIndex getColumn() {
-            return column;
-        }
-
-        public Constant getConstant() {
-            return constant;
-        }
-    }
-
-    /**
      * Thrown when a filter's parsing exception occurs.
      */
     @SuppressWarnings("serial")
@@ -175,6 +168,7 @@ public class FilterParser {
     public Object parse(String filter) throws Exception {
         index = 0;
         filterString = filter;
+        int opNumber;
 
         if (filter == null) {
             throw new FilterStringSyntaxException("filter parsing ended with no result");
@@ -191,8 +185,8 @@ public class FilterParser {
                     operandsStack.push(new Constant(parseParameter()));
                     break;
                 case 'o':
-                    // Parse and translate opcode
-                    Operation operation = operatorTranslationMap.get(safeToInt(parseNumber()));
+                    opNumber = safeToInt(parseNumber());
+                    Operation operation = opNumber < Operation.values().length ? Operation.values()[opNumber] : null;
                     if (operation == null) {
                         throw new FilterStringSyntaxException("unknown op ending at " + index);
                     }
@@ -209,6 +203,10 @@ public class FilterParser {
                     }
                     Object leftOperand = operandsStack.pop();
 
+                    if (leftOperand instanceof BasicFilter || rightOperand instanceof BasicFilter) {
+                        throw new FilterStringSyntaxException("missing logical operator before op " + operation + " at " + index);
+                    }
+
                     // Normalize order, evaluate
                     // Column should be on the left
                     Object result = (leftOperand instanceof Constant)
@@ -220,6 +218,28 @@ public class FilterParser {
                     // Store result on stack
                     operandsStack.push(result);
                     break;
+                // Handle parsing logical operator (HAWQ-964)
+                case 'l':
+                    opNumber = safeToInt(parseNumber());
+                    LogicalOperation logicalOperation = opNumber < LogicalOperation.values().length ? LogicalOperation.values()[opNumber] : null;
+
+                    if (logicalOperation == null) {
+                        throw new FilterStringSyntaxException("unknown op ending at " + index);
+                    }
+
+                    if (logicalOperation == LogicalOperation.HDOP_NOT) {
+                        Object exp = operandsStack.pop();
+                        result = filterBuilder.build(logicalOperation, exp);
+                    } else if (logicalOperation == LogicalOperation.HDOP_AND || logicalOperation == LogicalOperation.HDOP_OR){
+                        rightOperand  = operandsStack.pop();
+                        leftOperand = operandsStack.pop();
+
+                        result = filterBuilder.build(logicalOperation, leftOperand, rightOperand);
+                    } else {
+                        throw new FilterStringSyntaxException("unknown logical op code " + opNumber);
+                    }
+                    operandsStack.push(result);
+                    break;
                 default:
                     index--; // move index back to operand location
                     throw new FilterStringSyntaxException("unknown opcode " + op +
@@ -377,23 +397,4 @@ public class FilterParser {
 
         return operation;
     }
-
-    /**
-     * Create a translation table of opcodes to their enum meaning.
-     *
-     * These codes correspond to the codes in GPDB C code
-     * see gphdfilters.h in pxf protocol.
-     */
-    static private Map<Integer, Operation> initOperatorTransMap() {
-        Map<Integer, Operation> operatorTranslationMap = new HashMap<Integer, Operation>();
-        operatorTranslationMap.put(1, Operation.HDOP_LT);
-        operatorTranslationMap.put(2, Operation.HDOP_GT);
-        operatorTranslationMap.put(3, Operation.HDOP_LE);
-        operatorTranslationMap.put(4, Operation.HDOP_GE);
-        operatorTranslationMap.put(5, Operation.HDOP_EQ);
-        operatorTranslationMap.put(6, Operation.HDOP_NE);
-        operatorTranslationMap.put(7, Operation.HDOP_AND);
-        operatorTranslationMap.put(8, Operation.HDOP_LIKE);
-        return operatorTranslationMap;
-    }
 }

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/2cc152eb/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/LogicalFilter.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/LogicalFilter.java b/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/LogicalFilter.java
new file mode 100644
index 0000000..d7e570e
--- /dev/null
+++ b/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/LogicalFilter.java
@@ -0,0 +1,30 @@
+package org.apache.hawq.pxf.api;
+
+
+import java.util.List;
+
+public class LogicalFilter {
+    private FilterParser.LogicalOperation operator;
+    private List<Object> filterList;
+
+    public LogicalFilter(FilterParser.LogicalOperation operator, List<Object> result) {
+        this.operator = operator;
+        this.filterList = result;
+    }
+
+    public FilterParser.LogicalOperation getOperator() {
+        return operator;
+    }
+
+    public void setOperator(FilterParser.LogicalOperation operator) {
+        this.operator = operator;
+    }
+
+    public List<Object> getFilterList() {
+        return filterList;
+    }
+
+    public void setFilterList(List<Object> filterList) {
+        this.filterList = filterList;
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/2cc152eb/pxf/pxf-api/src/test/java/org/apache/hawq/pxf/api/FilterParserTest.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-api/src/test/java/org/apache/hawq/pxf/api/FilterParserTest.java b/pxf/pxf-api/src/test/java/org/apache/hawq/pxf/api/FilterParserTest.java
index 1ded4a3..53f37a0 100644
--- a/pxf/pxf-api/src/test/java/org/apache/hawq/pxf/api/FilterParserTest.java
+++ b/pxf/pxf-api/src/test/java/org/apache/hawq/pxf/api/FilterParserTest.java
@@ -22,13 +22,17 @@ package org.apache.hawq.pxf.api;
 
 import org.apache.hawq.pxf.api.FilterParser.FilterBuilder;
 import org.apache.hawq.pxf.api.FilterParser.Operation;
+import org.apache.hawq.pxf.api.FilterParser.LogicalOperation;
 import org.junit.Before;
+import org.junit.Rule;
 import org.junit.Test;
+import org.junit.rules.ExpectedException;
 import org.junit.runner.RunWith;
 import org.powermock.core.classloader.annotations.PrepareForTest;
 import org.powermock.modules.junit4.PowerMockRunner;
 
 import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertThat;
 import static org.junit.Assert.fail;
 import static org.mockito.Matchers.any;
 import static org.mockito.Matchers.eq;
@@ -213,10 +217,6 @@ public class FilterParserTest {
         runParseOneOperation("this filter was build from HDOP_NE", filter, op);
 
         filter = "a1c2o7";
-        op = Operation.HDOP_AND;
-        runParseOneOperation("this filter was build from HDOP_AND", filter, op);
-        
-        filter = "a1c2o8";
         op = Operation.HDOP_LIKE;
         runParseOneOperation("this filter was built from HDOP_LIKE", filter, op);
     }
@@ -249,17 +249,13 @@ public class FilterParserTest {
         runParseOneOperation("this filter was build from HDOP_NE using reverse!", filter, op);
 
         filter = "c2a1o7";
-        op = Operation.HDOP_AND;
-        runParseOneOperation("this filter was build from HDOP_AND using reverse!", filter, op);
-        
-        filter = "c2a1o8";
         op = Operation.HDOP_LIKE;
         runParseOneOperation("this filter was build from HDOP_LIKE using reverse!", filter, op);
     }
 
     @Test
     public void parseFilterWith2Operations() throws Exception {
-        filter = "a1c\"first\"o5a2c2o2o7";
+        filter = "a1c\"first\"o5a2c2o2l0";
 
         Object firstOp = "first operation HDOP_EQ";
         Object secondOp = "second operation HDOP_GT";
@@ -273,7 +269,7 @@ public class FilterParserTest {
                 any(),
                 any())).thenReturn(secondOp);
 
-        when(filterBuilder.build(eq(Operation.HDOP_AND),
+        when(filterBuilder.build(eq(LogicalOperation.HDOP_AND),
                 eq(firstOp),
                 eq(secondOp))).thenReturn(lastOp);
 
@@ -282,6 +278,116 @@ public class FilterParserTest {
         assertEquals(lastOp, result);
     }
 
+    @Test
+    public void parseLogicalAndOperator() throws Exception {
+        filter = "a1c0o5a2c3o2l0";
+
+        Object firstOp = "first operation HDOP_EQ";
+        Object secondOp = "second operation HDOP_GT";
+        Object lastOp = "filter with 2 operations connected by AND";
+
+        when(filterBuilder.build(eq(Operation.HDOP_EQ),
+                any(),
+                any())).thenReturn(firstOp);
+
+        when(filterBuilder.build(eq(Operation.HDOP_GT),
+                any(),
+                any())).thenReturn(secondOp);
+
+        when(filterBuilder.build(eq(LogicalOperation.HDOP_AND),
+                any(),
+                any())).thenReturn(lastOp);
+
+        Object result = filterParser.parse(filter);
+
+        assertEquals(lastOp, result);
+    }
+
+    @Test
+    public void parseLogicalOrOperator() throws Exception {
+        filter = "a1c0o5a2c3o2l1";
+
+        Object firstOp = "first operation HDOP_EQ";
+        Object secondOp = "second operation HDOP_GT";
+        Object lastOp = "filter with 1 OR operator";
+
+        when(filterBuilder.build(eq(Operation.HDOP_EQ),
+                any(),
+                any())).thenReturn(firstOp);
+
+        when(filterBuilder.build(eq(Operation.HDOP_GT),
+                any(),
+                any())).thenReturn(secondOp);
+
+        when(filterBuilder.build(eq(LogicalOperation.HDOP_OR),
+                any(),
+                any())).thenReturn(lastOp);
+
+        Object result = filterParser.parse(filter);
+        assertEquals(lastOp, result);
+    }
+
+    @Test
+    public void parseLogicalNotOperator() throws Exception {
+        filter = "a1c0o5l2";
+
+        Object firstOp = "first operation HDOP_EQ";
+        Object op = "filter with NOT operator";
+
+        when(filterBuilder.build(eq(Operation.HDOP_EQ),
+                any(),
+                any())).thenReturn(firstOp);
+
+        when(filterBuilder.build(eq(LogicalOperation.HDOP_NOT),
+                any())).thenReturn(op);
+
+        Object result = filterParser.parse(filter);
+        assertEquals(op, result);
+    }
+
+    @Rule
+    public ExpectedException thrown = ExpectedException.none();
+    @Test
+    public void parseLogicalUnknownCodeError() throws Exception {
+        thrown.expect(FilterParser.FilterStringSyntaxException.class);
+        thrown.expectMessage("unknown op ending at 2");
+
+        filter = "l7";
+        when(filterBuilder.build(eq(LogicalOperation.HDOP_AND),
+                any(),
+                any())).thenReturn(null);
+
+        Object result = filterParser.parse(filter);
+    }
+
+    @Test
+    public void parseLogicalOperatorNotExpression() throws Exception {
+        filter = "a1c\"first\"o5a2c2o2l0l2";
+        Object firstOp = "first operation HDOP_EQ";
+        Object secondOp = "second operation HDOP_GT";
+        Object thirdOp = "filter with 2 operations connected by AND";
+        Object lastOp = "filter with 1 NOT operation";
+
+        when(filterBuilder.build(eq(Operation.HDOP_EQ),
+                any(),
+                any())).thenReturn(firstOp);
+
+
+        when(filterBuilder.build(eq(Operation.HDOP_GT),
+                any(),
+                any())).thenReturn(secondOp);
+
+        when(filterBuilder.build(eq(LogicalOperation.HDOP_AND),
+                any(),
+                any())).thenReturn(thirdOp);
+
+        when(filterBuilder.build(eq(LogicalOperation.HDOP_NOT),
+                any())).thenReturn(lastOp);
+
+        Object result = filterParser.parse(filter);
+        assertEquals(lastOp, result);
+    }
+
 	/*
      * Helper functions
 	 */

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/2cc152eb/pxf/pxf-hbase/src/main/java/org/apache/hawq/pxf/plugins/hbase/HBaseFilterBuilder.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-hbase/src/main/java/org/apache/hawq/pxf/plugins/hbase/HBaseFilterBuilder.java b/pxf/pxf-hbase/src/main/java/org/apache/hawq/pxf/plugins/hbase/HBaseFilterBuilder.java
index 8eadc88..1b06f81 100644
--- a/pxf/pxf-hbase/src/main/java/org/apache/hawq/pxf/plugins/hbase/HBaseFilterBuilder.java
+++ b/pxf/pxf-hbase/src/main/java/org/apache/hawq/pxf/plugins/hbase/HBaseFilterBuilder.java
@@ -29,8 +29,11 @@ import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.filter.*;
 import org.apache.hadoop.hbase.util.Bytes;
 
+import java.util.EnumMap;
 import java.util.HashMap;
 import java.util.Map;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
 
 import static org.apache.hawq.pxf.api.io.DataType.TEXT;
 
@@ -52,17 +55,32 @@ import static org.apache.hawq.pxf.api.io.DataType.TEXT;
  */
 public class HBaseFilterBuilder implements FilterParser.FilterBuilder {
     private Map<FilterParser.Operation, CompareFilter.CompareOp> operatorsMap;
+    private Map<FilterParser.LogicalOperation, FilterList.Operator> logicalOperatorsMap;
     private byte[] startKey;
     private byte[] endKey;
     private HBaseTupleDescription tupleDescription;
+    private static final String NOT_OP = "l2";
 
     public HBaseFilterBuilder(HBaseTupleDescription tupleDescription) {
         initOperatorsMap();
+        initLogicalOperatorsMap();
         startKey = HConstants.EMPTY_START_ROW;
         endKey = HConstants.EMPTY_END_ROW;
         this.tupleDescription = tupleDescription;
     }
 
+    private boolean filterNotOpPresent(String filterString) {
+        if (filterString.contains(NOT_OP)) {
+            String regex = ".*[o\\d|l\\d]l2.*";
+            Pattern p = Pattern.compile(regex);
+            Matcher m = p.matcher(filterString);
+            return m.matches();
+        } else {
+            return false;
+        }
+
+    }
+
     /**
      * Translates a filterString into a HBase {@link Filter} object.
      *
@@ -71,11 +89,15 @@ public class HBaseFilterBuilder implements FilterParser.FilterBuilder {
      * @throws Exception if parsing failed
      */
     public Filter getFilterObject(String filterString) throws Exception {
+        // First check for NOT, HBase does not support this
+        if (filterNotOpPresent(filterString))
+            return null;
+
         FilterParser parser = new FilterParser(this);
         Object result = parser.parse(filterString);
 
         if (!(result instanceof Filter)) {
-            throw new Exception("String " + filterString + " resolved to no filter");
+            throw new Exception("String " + filterString + " couldn't be resolved to any supported filter");
         }
 
         return (Filter) result;
@@ -122,18 +144,6 @@ public class HBaseFilterBuilder implements FilterParser.FilterBuilder {
     public Object build(FilterParser.Operation opId,
                         Object leftOperand,
                         Object rightOperand) throws Exception {
-        if (leftOperand instanceof Filter) {
-            if (opId != FilterParser.Operation.HDOP_AND ||
-                    !(rightOperand instanceof Filter)) {
-                throw new Exception("Only AND is allowed between compound expressions");
-            }
-
-            return handleCompoundOperations((Filter) leftOperand, (Filter) rightOperand);
-        }
-
-        if (!(rightOperand instanceof FilterParser.Constant)) {
-            throw new Exception("expressions of column-op-column are not supported");
-        }
 
         // Assume column is on the left
         return handleSimpleOperations(opId,
@@ -141,11 +151,21 @@ public class HBaseFilterBuilder implements FilterParser.FilterBuilder {
                 (FilterParser.Constant) rightOperand);
     }
 
+    @Override
+    public Object build(FilterParser.LogicalOperation opId, Object leftOperand, Object rightOperand) {
+        return handleCompoundOperations(opId, (Filter) leftOperand, (Filter) rightOperand);
+    }
+
+    @Override
+    public Object build(FilterParser.LogicalOperation opId, Object leftOperand) {
+        return null;
+    }
+
     /**
      * Initializes the {@link #operatorsMap} with appropriate values.
      */
     private void initOperatorsMap() {
-        operatorsMap = new HashMap<FilterParser.Operation, CompareFilter.CompareOp>();
+        operatorsMap = new EnumMap<FilterParser.Operation, CompareFilter.CompareOp>(FilterParser.Operation.class);
         operatorsMap.put(FilterParser.Operation.HDOP_LT, CompareFilter.CompareOp.LESS); // "<"
         operatorsMap.put(FilterParser.Operation.HDOP_GT, CompareFilter.CompareOp.GREATER); // ">"
         operatorsMap.put(FilterParser.Operation.HDOP_LE, CompareFilter.CompareOp.LESS_OR_EQUAL); // "<="
@@ -154,6 +174,12 @@ public class HBaseFilterBuilder implements FilterParser.FilterBuilder {
         operatorsMap.put(FilterParser.Operation.HDOP_NE, CompareFilter.CompareOp.NOT_EQUAL); // "!="
     }
 
+    private void initLogicalOperatorsMap() {
+        logicalOperatorsMap = new EnumMap<>(FilterParser.LogicalOperation.class);
+        logicalOperatorsMap.put(FilterParser.LogicalOperation.HDOP_AND, FilterList.Operator.MUST_PASS_ALL);
+        logicalOperatorsMap.put(FilterParser.LogicalOperation.HDOP_OR, FilterList.Operator.MUST_PASS_ONE);
+    }
+
     /**
      * Handles simple column-operator-constant expressions.
      * Creates a special filter in the case the column is the row key column.
@@ -227,19 +253,8 @@ public class HBaseFilterBuilder implements FilterParser.FilterBuilder {
      * <p>
      * Currently, 1, 2 can occur, since no parenthesis are used.
      */
-    private Filter handleCompoundOperations(Filter left, Filter right) {
-        FilterList result;
-
-        if (left instanceof FilterList) {
-            result = (FilterList) left;
-            result.addFilter(right);
-
-            return result;
-        }
-
-        result = new FilterList(FilterList.Operator.MUST_PASS_ALL, new Filter[] {left, right});
-
-        return result;
+    private Filter handleCompoundOperations(FilterParser.LogicalOperation opId, Filter left, Filter right) {
+        return new FilterList(logicalOperatorsMap.get(opId), new Filter[] {left, right});
     }
 
     /**

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/2cc152eb/pxf/pxf-hbase/src/test/java/org/apache/hawq/pxf/plugins/hbase/HBaseFilterBuilderTest.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-hbase/src/test/java/org/apache/hawq/pxf/plugins/hbase/HBaseFilterBuilderTest.java b/pxf/pxf-hbase/src/test/java/org/apache/hawq/pxf/plugins/hbase/HBaseFilterBuilderTest.java
new file mode 100644
index 0000000..d8f270c
--- /dev/null
+++ b/pxf/pxf-hbase/src/test/java/org/apache/hawq/pxf/plugins/hbase/HBaseFilterBuilderTest.java
@@ -0,0 +1,34 @@
+package org.apache.hawq.pxf.plugins.hbase;
+
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.ExpectedException;
+
+import static org.junit.Assert.*;
+
+public class HBaseFilterBuilderTest {
+    @Rule
+    public ExpectedException thrown = ExpectedException.none();
+
+    @Test
+    public void parseNOTExpressionIgnored() throws Exception {
+        String filter = "a1c2o1a1c2o2l0l2";
+        HBaseFilterBuilder builder = new HBaseFilterBuilder(null);
+        assertNull(builder.getFilterObject(filter));
+
+        filter = "a1c2o1a1c2o2l2l0";
+        builder = new HBaseFilterBuilder(null);
+        assertNull(builder.getFilterObject(filter));
+    }
+
+    @Test
+    public void parseNOTOpCodeInConstant() throws Exception {
+        String filter = "a1c\"l2\"o1a1c2o2l0";
+        HBaseFilterBuilder builder = new HBaseFilterBuilder(null);
+        //Testing that we get past the parsing stage
+        //Very crude but it avoids instantiating all the necessary dependencies
+        thrown.expect(NullPointerException.class);
+        builder.getFilterObject(filter);
+    }
+
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/2cc152eb/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveAccessor.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveAccessor.java b/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveAccessor.java
index ab40b3c..1722093 100644
--- a/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveAccessor.java
+++ b/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveAccessor.java
@@ -19,7 +19,12 @@ package org.apache.hawq.pxf.plugins.hive;
  * under the License.
  */
 
+import org.apache.hadoop.hive.common.type.HiveDecimal;
+import org.apache.hadoop.hive.serde.serdeConstants;
+import org.apache.hawq.pxf.api.BasicFilter;
 import org.apache.hawq.pxf.api.FilterParser;
+import org.apache.hawq.pxf.api.LogicalFilter;
+import org.apache.hawq.pxf.api.UnsupportedTypeException;
 import org.apache.hawq.pxf.api.utilities.ColumnDescriptor;
 import org.apache.hawq.pxf.api.utilities.InputData;
 import org.apache.hawq.pxf.plugins.hdfs.HdfsSplittableDataAccessor;
@@ -31,9 +36,16 @@ import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.Reporter;
 
 import java.io.IOException;
+import java.sql.Date;
+import java.sql.Timestamp;
+import java.util.Arrays;
 import java.util.LinkedList;
 import java.util.List;
 
+import static org.apache.hawq.pxf.api.io.DataType.*;
+import static org.apache.hawq.pxf.api.io.DataType.BPCHAR;
+import static org.apache.hawq.pxf.api.io.DataType.BYTEA;
+
 /**
  * Accessor for Hive tables. The accessor will open and read a split belonging
  * to a Hive table. Opening a split means creating the corresponding InputFormat
@@ -47,6 +59,7 @@ import java.util.List;
 public class HiveAccessor extends HdfsSplittableDataAccessor {
     private static final Log LOG = LogFactory.getLog(HiveAccessor.class);
     List<HivePartition> partitions;
+    String HIVE_DEFAULT_PARTITION = "__HIVE_DEFAULT_PARTITION__";
 
     class HivePartition {
         public String name;
@@ -205,55 +218,139 @@ public class HiveAccessor extends HdfsSplittableDataAccessor {
         return testOneFilter(partitionFields, filter, inputData);
     }
 
-    /*
-     * We are testing one filter against all the partition fields. The filter
-     * has the form "fieldA = valueA". The partitions have the form
-     * partitionOne=valueOne/partitionTwo=ValueTwo/partitionThree=valueThree 1.
-     * For a filter to match one of the partitions, lets say partitionA for
-     * example, we need: fieldA = partittionOne and valueA = valueOne. If this
-     * condition occurs, we return true. 2. If fieldA does not match any one of
-     * the partition fields we also return true, it means we ignore this filter
-     * because it is not on a partition field. 3. If fieldA = partittionOne and
-     * valueA != valueOne, then we return false.
-     */
-    private boolean testOneFilter(List<HivePartition> partitionFields,
-                                  Object filter, InputData input) {
-        // Let's look first at the filter
-        FilterParser.BasicFilter bFilter = (FilterParser.BasicFilter) filter;
+    private boolean testForUnsupportedOperators(List<Object> filterList) {
+        boolean nonAndOp = true;
+        for (Object filter : filterList) {
+            if (filter instanceof LogicalFilter) {
+                if (((LogicalFilter) filter).getOperator() != FilterParser.LogicalOperation.HDOP_AND)
+                    return false;
+                if (((LogicalFilter) filter).getFilterList() != null)
+                    nonAndOp = testForUnsupportedOperators(((LogicalFilter) filter).getFilterList());
+            }
+        }
+        return nonAndOp;
+    }
 
-        boolean isFilterOperationEqual = (bFilter.getOperation() == FilterParser.Operation.HDOP_EQ);
-        if (!isFilterOperationEqual) /*
+    private boolean testForPartitionEquality(List<HivePartition> partitionFields, List<Object> filterList, InputData input) {
+        boolean partitionAllowed = true;
+        for (Object filter : filterList) {
+            if (filter instanceof BasicFilter) {
+                BasicFilter bFilter = (BasicFilter) filter;
+                boolean isFilterOperationEqual = (bFilter.getOperation() == FilterParser.Operation.HDOP_EQ);
+                if (!isFilterOperationEqual) /*
                                       * in case this is not an "equality filter"
                                       * we ignore it here - in partition
                                       * filtering
                                       */{
-            return true;
-        }
+                    return true;
+                }
+
+                int filterColumnIndex = bFilter.getColumn().index();
+                String filterValue = bFilter.getConstant().constant().toString();
+                ColumnDescriptor filterColumn = input.getColumn(filterColumnIndex);
+                String filterColumnName = filterColumn.columnName();
 
-        int filterColumnIndex = bFilter.getColumn().index();
-        String filterValue = bFilter.getConstant().constant().toString();
-        ColumnDescriptor filterColumn = input.getColumn(filterColumnIndex);
-        String filterColumnName = filterColumn.columnName();
+                for (HivePartition partition : partitionFields) {
+                    if (filterColumnName.equals(partition.name)) {
 
-        for (HivePartition partition : partitionFields) {
-            if (filterColumnName.equals(partition.name)) {
                 /*
                  * the filter field matches a partition field, but the values do
                  * not match
                  */
-                return filterValue.equals(partition.val);
-            }
-        }
+                        boolean keepPartition = filterValue.equals(partition.val);
+
+                        /*
+                         * If the string comparison fails then we should check the comparison of
+                         * the two operands as typed values
+                         * If the partition value equals HIVE_DEFAULT_PARTITION just skip
+                         */
+                        if (!keepPartition && !partition.val.equals(HIVE_DEFAULT_PARTITION)){
+                            keepPartition = testFilterByType(filterValue, partition);
+                        }
+                        return keepPartition;
+                    }
+                }
 
         /*
          * filter field did not match any partition field, so we ignore this
          * filter and hence return true
          */
-        return true;
+            } else if (filter instanceof LogicalFilter) {
+                partitionAllowed = testForPartitionEquality(partitionFields, ((LogicalFilter) filter).getFilterList(), input);
+            }
+        }
+        return partitionAllowed;
+    }
+
+    /*
+     * Given two values in String form and their type, convert each to the same type do an equality check
+     */
+    private boolean testFilterByType(String filterValue, HivePartition partition) {
+        boolean result;
+        switch (partition.type) {
+            case serdeConstants.BOOLEAN_TYPE_NAME:
+                result = Boolean.valueOf(filterValue).equals(Boolean.valueOf(partition.val));
+                break;
+            case serdeConstants.TINYINT_TYPE_NAME:
+            case serdeConstants.SMALLINT_TYPE_NAME:
+                result = (Short.parseShort(filterValue) == Short.parseShort(partition.val));
+                break;
+            case serdeConstants.INT_TYPE_NAME:
+                result = (Integer.parseInt(filterValue) == Integer.parseInt(partition.val));
+                break;
+            case serdeConstants.BIGINT_TYPE_NAME:
+                result = (Long.parseLong(filterValue) == Long.parseLong(partition.val));
+                break;
+            case serdeConstants.FLOAT_TYPE_NAME:
+                result = (Float.parseFloat(filterValue) == Float.parseFloat(partition.val));
+                break;
+            case serdeConstants.DOUBLE_TYPE_NAME:
+                result = (Double.parseDouble(filterValue) == Double.parseDouble(partition.val));
+                break;
+            case serdeConstants.TIMESTAMP_TYPE_NAME:
+                result = Timestamp.valueOf(filterValue).equals(Timestamp.valueOf(partition.val));
+                break;
+            case serdeConstants.DATE_TYPE_NAME:
+                result = Date.valueOf(filterValue).equals(Date.valueOf(partition.val));
+                break;
+            case serdeConstants.DECIMAL_TYPE_NAME:
+                result = HiveDecimal.create(filterValue).bigDecimalValue().equals(HiveDecimal.create(partition.val).bigDecimalValue());
+                break;
+            case serdeConstants.BINARY_TYPE_NAME:
+                result = filterValue.getBytes().equals(partition.val.getBytes());
+                break;
+            case serdeConstants.STRING_TYPE_NAME:
+            case serdeConstants.VARCHAR_TYPE_NAME:
+            case serdeConstants.CHAR_TYPE_NAME:
+            default:
+               result = false;
+        }
+
+        return result;
+    }
+
+    /*
+     * We are testing one filter against all the partition fields. The filter
+     * has the form "fieldA = valueA". The partitions have the form
+     * partitionOne=valueOne/partitionTwo=ValueTwo/partitionThree=valueThree 1.
+     * For a filter to match one of the partitions, lets say partitionA for
+     * example, we need: fieldA = partittionOne and valueA = valueOne. If this
+     * condition occurs, we return true. 2. If fieldA does not match any one of
+     * the partition fields we also return true, it means we ignore this filter
+     * because it is not on a partition field. 3. If fieldA = partittionOne and
+     * valueA != valueOne, then we return false.
+     */
+    private boolean testOneFilter(List<HivePartition> partitionFields,
+                                  Object filter, InputData input) {
+        // Let's look first at the filter and escape if there are any OR or NOT ops
+        if (!testForUnsupportedOperators(Arrays.asList(filter)))
+            return true;
+
+        return testForPartitionEquality(partitionFields, Arrays.asList(filter), input);
     }
 
     private void printOneBasicFilter(Object filter) {
-        FilterParser.BasicFilter bFilter = (FilterParser.BasicFilter) filter;
+        BasicFilter bFilter = (BasicFilter) filter;
         boolean isOperationEqual = (bFilter.getOperation() == FilterParser.Operation.HDOP_EQ);
         int columnIndex = bFilter.getColumn().index();
         String value = bFilter.getConstant().constant().toString();

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/2cc152eb/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveDataFragmenter.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveDataFragmenter.java b/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveDataFragmenter.java
index 2fe31cd..97a297e 100644
--- a/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveDataFragmenter.java
+++ b/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveDataFragmenter.java
@@ -46,10 +46,12 @@ import org.apache.hadoop.mapred.InputFormat;
 import org.apache.hadoop.mapred.InputSplit;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.TextInputFormat;
+import org.apache.hawq.pxf.api.BasicFilter;
 import org.apache.hawq.pxf.api.FilterParser;
 import org.apache.hawq.pxf.api.Fragment;
 import org.apache.hawq.pxf.api.Fragmenter;
 import org.apache.hawq.pxf.api.FragmentsStats;
+import org.apache.hawq.pxf.api.LogicalFilter;
 import org.apache.hawq.pxf.api.Metadata;
 import org.apache.hawq.pxf.api.io.DataType;
 import org.apache.hawq.pxf.api.utilities.ColumnDescriptor;
@@ -80,6 +82,11 @@ public class HiveDataFragmenter extends Fragmenter {
     static final String HIVE_NO_PART_TBL = "!HNPT!";
 
     static final String HIVE_API_EQ = " = ";
+    static final String HIVE_API_LT = " < ";
+    static final String HIVE_API_GT = " > ";
+    static final String HIVE_API_LTE = " <= ";
+    static final String HIVE_API_GTE = " >= ";
+    static final String HIVE_API_NE = " != ";
     static final String HIVE_API_DQUOTE = "\"";
 
     private JobConf jobConf;
@@ -404,24 +411,40 @@ public class HiveDataFragmenter extends Fragmenter {
         HiveFilterBuilder eval = new HiveFilterBuilder(inputData);
         Object filter = eval.getFilterObject(filterInput);
 
-        String prefix = "";
-
-        if (filter instanceof List) {
-
-            for (Object f : (List<?>) filter) {
-                if (buildSingleFilter(f, filtersString, prefix)) {
-                    // Set 'and' operator between each matched partition filter.
-                    prefix = " and ";
-                }
-            }
-
+        if (filter instanceof LogicalFilter) {
+            buildCompoundFilter((LogicalFilter) filter, filtersString);
         } else {
-            buildSingleFilter(filter, filtersString, prefix);
+            buildSingleFilter(filter, filtersString, "");
         }
 
         return filtersString.toString();
     }
 
+    private void buildCompoundFilter(LogicalFilter filter, StringBuilder filterString) throws Exception{
+        String prefix;
+        switch(filter.getOperator()) {
+            case HDOP_AND:
+                prefix = " and ";
+                break;
+            case HDOP_OR:
+                prefix = " or ";
+                break;
+            case HDOP_NOT:
+                prefix = " not ";
+                break;
+            default:
+                prefix = "";
+        }
+
+        for (Object f : filter.getFilterList()) {
+            if (f instanceof LogicalFilter) {
+                buildCompoundFilter((LogicalFilter) f, filterString);
+            } else {
+                buildSingleFilter(f, filterString, prefix);
+            }
+        }
+    }
+
     /*
      * Build filter string for a single filter and append to the filters string.
      * Filter string shell be added if filter name match hive partition name
@@ -433,7 +456,7 @@ public class HiveDataFragmenter extends Fragmenter {
             throws Exception {
 
         // Let's look first at the filter
-        FilterParser.BasicFilter bFilter = (FilterParser.BasicFilter) filter;
+        BasicFilter bFilter = (BasicFilter) filter;
 
         // In case this is not an "equality filter", we ignore this filter (no
         // add to filter list)
@@ -463,9 +486,31 @@ public class HiveDataFragmenter extends Fragmenter {
             return false;
         }
 
-        filtersString.append(prefix);
+        if (filtersString.length() != 0)
+            filtersString.append(prefix);
         filtersString.append(filterColumnName);
-        filtersString.append(HIVE_API_EQ);
+
+        switch(((BasicFilter) filter).getOperation()) {
+            case HDOP_EQ:
+                filtersString.append(HIVE_API_EQ);
+                break;
+            case HDOP_LT:
+                filtersString.append(HIVE_API_LT);
+                break;
+            case HDOP_GT:
+                filtersString.append(HIVE_API_GT);
+                break;
+            case HDOP_LE:
+                filtersString.append(HIVE_API_LTE);
+                break;
+            case HDOP_GE:
+                filtersString.append(HIVE_API_GTE);
+                break;
+            case HDOP_NE:
+                filtersString.append(HIVE_API_NE);
+                break;
+        }
+
         filtersString.append(HIVE_API_DQUOTE);
         filtersString.append(filterValue);
         filtersString.append(HIVE_API_DQUOTE);

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/2cc152eb/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveFilterBuilder.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveFilterBuilder.java b/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveFilterBuilder.java
index da20f74..bd82a3b 100644
--- a/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveFilterBuilder.java
+++ b/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveFilterBuilder.java
@@ -20,17 +20,20 @@ package org.apache.hawq.pxf.plugins.hive;
  */
 
 
+import org.apache.hawq.pxf.api.BasicFilter;
 import org.apache.hawq.pxf.api.FilterParser;
+import org.apache.hawq.pxf.api.LogicalFilter;
 import org.apache.hawq.pxf.api.utilities.InputData;
 
+import java.util.Arrays;
 import java.util.LinkedList;
 import java.util.List;
 
 /**
  * Uses the filter parser code to build a filter object, either simple - a
- * single {@link org.apache.hawq.pxf.api.FilterParser.BasicFilter} object or a
+ * single {@link BasicFilter} object or a
  * compound - a {@link java.util.List} of
- * {@link org.apache.hawq.pxf.api.FilterParser.BasicFilter} objects.
+ * {@link BasicFilter} objects.
  * {@link org.apache.hawq.pxf.plugins.hive.HiveAccessor} will use the filter for
  * partition filtering.
  */
@@ -47,13 +50,13 @@ public class HiveFilterBuilder implements FilterParser.FilterBuilder {
     }
 
     /**
-     * Translates a filterString into a {@link org.apache.hawq.pxf.api.FilterParser.BasicFilter} or a
+     * Translates a filterString into a {@link BasicFilter} or a
      * list of such filters.
      *
      * @param filterString the string representation of the filter
-     * @return a single {@link org.apache.hawq.pxf.api.FilterParser.BasicFilter}
+     * @return a single {@link BasicFilter}
      *         object or a {@link java.util.List} of
-     *         {@link org.apache.hawq.pxf.api.FilterParser.BasicFilter} objects.
+     *         {@link BasicFilter} objects.
      * @throws Exception if parsing the filter failed or filter is not a basic
      *             filter or list of basic filters
      */
@@ -61,7 +64,7 @@ public class HiveFilterBuilder implements FilterParser.FilterBuilder {
         FilterParser parser = new FilterParser(this);
         Object result = parser.parse(filterString);
 
-        if (!(result instanceof FilterParser.BasicFilter)
+        if (!(result instanceof LogicalFilter) && !(result instanceof BasicFilter)
                 && !(result instanceof List)) {
             throw new Exception("String " + filterString
                     + " resolved to no filter");
@@ -71,33 +74,19 @@ public class HiveFilterBuilder implements FilterParser.FilterBuilder {
     }
 
     @Override
+    public Object build(FilterParser.LogicalOperation op, Object leftOperand, Object rightOperand) {
+        return handleLogicalOperation(op, leftOperand, rightOperand);
+    }
+
+    @Override
+    public Object build(FilterParser.LogicalOperation op, Object filter) {
+        return handleLogicalOperation(op, filter);
+    }
+
+    @Override
     @SuppressWarnings("unchecked")
     public Object build(FilterParser.Operation opId, Object leftOperand,
                         Object rightOperand) throws Exception {
-        if (leftOperand instanceof FilterParser.BasicFilter
-                || leftOperand instanceof List) {
-            if (opId != FilterParser.Operation.HDOP_AND
-                    || !(rightOperand instanceof FilterParser.BasicFilter)) {
-                throw new Exception(
-                        "Only AND is allowed between compound expressions");
-            }
-
-            if (leftOperand instanceof List) {
-                return handleCompoundOperations(
-                        (List<FilterParser.BasicFilter>) leftOperand,
-                        (FilterParser.BasicFilter) rightOperand);
-            } else {
-                return handleCompoundOperations(
-                        (FilterParser.BasicFilter) leftOperand,
-                        (FilterParser.BasicFilter) rightOperand);
-            }
-        }
-
-        if (!(rightOperand instanceof FilterParser.Constant)) {
-            throw new Exception(
-                    "expressions of column-op-column are not supported");
-        }
-
         // Assume column is on the left
         return handleSimpleOperations(opId,
                 (FilterParser.ColumnIndex) leftOperand,
@@ -108,10 +97,10 @@ public class HiveFilterBuilder implements FilterParser.FilterBuilder {
      * Handles simple column-operator-constant expressions Creates a special
      * filter in the case the column is the row key column
      */
-    private FilterParser.BasicFilter handleSimpleOperations(FilterParser.Operation opId,
-                                                            FilterParser.ColumnIndex column,
-                                                            FilterParser.Constant constant) {
-        return new FilterParser.BasicFilter(opId, column, constant);
+    private BasicFilter handleSimpleOperations(FilterParser.Operation opId,
+                                               FilterParser.ColumnIndex column,
+                                               FilterParser.Constant constant) {
+        return new BasicFilter(opId, column, constant);
     }
 
     /**
@@ -131,19 +120,32 @@ public class HiveFilterBuilder implements FilterParser.FilterBuilder {
      * @param right right hand filter
      * @return list of filters constructing the filter tree
      */
-    private List<FilterParser.BasicFilter> handleCompoundOperations(List<FilterParser.BasicFilter> left,
-                                                                    FilterParser.BasicFilter right) {
+    private List<BasicFilter> handleCompoundOperations(List<BasicFilter> left,
+                                                       BasicFilter right) {
         left.add(right);
         return left;
     }
 
-    private List<FilterParser.BasicFilter> handleCompoundOperations(FilterParser.BasicFilter left,
-                                                                    FilterParser.BasicFilter right) {
-        List<FilterParser.BasicFilter> result = new LinkedList<FilterParser.BasicFilter>();
+    private List<BasicFilter> handleCompoundOperations(BasicFilter left,
+                                                       BasicFilter right) {
+        List<BasicFilter> result = new LinkedList<BasicFilter>();
 
         result.add(left);
         result.add(right);
 
         return result;
     }
+
+    private Object handleLogicalOperation(FilterParser.LogicalOperation operator, Object leftOperand, Object rightOperand) {
+
+        List<Object> result = new LinkedList<>();
+
+        result.add(leftOperand);
+        result.add(rightOperand);
+        return new LogicalFilter(operator, result);
+    }
+
+    private Object handleLogicalOperation(FilterParser.LogicalOperation operator, Object filter) {
+        return new LogicalFilter(operator, Arrays.asList(filter));
+    }
 }

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/2cc152eb/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveORCAccessor.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveORCAccessor.java b/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveORCAccessor.java
index 23fc66e..ab2f96e 100644
--- a/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveORCAccessor.java
+++ b/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveORCAccessor.java
@@ -23,11 +23,13 @@ package org.apache.hawq.pxf.plugins.hive;
 import org.apache.hadoop.hive.ql.io.orc.OrcInputFormat;
 import org.apache.hadoop.hive.ql.io.sarg.SearchArgument;
 import org.apache.hadoop.hive.ql.io.sarg.SearchArgumentFactory;
-import org.apache.hawq.pxf.api.FilterParser;
+import org.apache.hawq.pxf.api.BasicFilter;
+import org.apache.hawq.pxf.api.LogicalFilter;
 import org.apache.hawq.pxf.api.utilities.ColumnDescriptor;
 import org.apache.hawq.pxf.api.utilities.InputData;
 import org.apache.commons.lang.StringUtils;
 import java.util.ArrayList;
+import java.util.Arrays;
 import java.util.List;
 
 import static org.apache.hawq.pxf.plugins.hive.HiveInputFormatFragmenter.PXF_HIVE_SERDES;
@@ -97,24 +99,49 @@ public class HiveORCAccessor extends HiveAccessor {
         String filterStr = inputData.getFilterString();
         HiveFilterBuilder eval = new HiveFilterBuilder(inputData);
         Object filter = eval.getFilterObject(filterStr);
-
         SearchArgument.Builder filterBuilder = SearchArgumentFactory.newBuilder();
-        filterBuilder.startAnd();
-        if (filter instanceof List) {
-            for (Object f : (List<?>) filter) {
-                buildArgument(filterBuilder, f);
-            }
-        } else {
+
+        /*
+         * If there is only a single filter it will be of type Basic Filter
+         * need special case logic to make sure to still wrap the filter in a
+         * startAnd() & end() block
+         */
+        if (filter instanceof LogicalFilter)
+            buildExpression(filterBuilder, Arrays.asList(filter));
+        else {
+            filterBuilder.startAnd();
             buildArgument(filterBuilder, filter);
+            filterBuilder.end();
         }
-        filterBuilder.end();
         SearchArgument sarg = filterBuilder.build();
         jobConf.set(SARG_PUSHDOWN, sarg.toKryo());
     }
 
+    private void buildExpression(SearchArgument.Builder builder, List<Object> filterList) {
+        for (Object f : filterList) {
+            if (f instanceof LogicalFilter) {
+                switch(((LogicalFilter) f).getOperator()) {
+                    case HDOP_OR:
+                        builder.startOr();
+                        break;
+                    case HDOP_AND:
+                        builder.startAnd();
+                        break;
+                    case HDOP_NOT:
+                        builder.startNot();
+                        break;
+                }
+                buildExpression(builder, ((LogicalFilter) f).getFilterList());
+                builder.end();
+            } else {
+                buildArgument(builder, f);
+            }
+        }
+    }
+
     private void buildArgument(SearchArgument.Builder builder, Object filterObj) {
         /* The below functions will not be compatible and requires update  with Hive 2.0 APIs */
-        FilterParser.BasicFilter filter = (FilterParser.BasicFilter) filterObj;
+        BasicFilter filter = (BasicFilter) filterObj;
         int filterColumnIndex = filter.getColumn().index();
         Object filterValue = filter.getConstant().constant();
         ColumnDescriptor filterColumn = inputData.getColumn(filterColumnIndex);

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/2cc152eb/pxf/pxf-hive/src/test/java/org/apache/hawq/pxf/plugins/hive/HiveFilterBuilderTest.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-hive/src/test/java/org/apache/hawq/pxf/plugins/hive/HiveFilterBuilderTest.java b/pxf/pxf-hive/src/test/java/org/apache/hawq/pxf/plugins/hive/HiveFilterBuilderTest.java
index bfbfaa4..e0e6536 100755
--- a/pxf/pxf-hive/src/test/java/org/apache/hawq/pxf/plugins/hive/HiveFilterBuilderTest.java
+++ b/pxf/pxf-hive/src/test/java/org/apache/hawq/pxf/plugins/hive/HiveFilterBuilderTest.java
@@ -20,11 +20,12 @@ package org.apache.hawq.pxf.plugins.hive;
  */
 
 
+import org.apache.hawq.pxf.api.FilterParser.LogicalOperation;
+import org.apache.hawq.pxf.api.LogicalFilter;
 import org.junit.Test;
 
-import java.util.List;
+import org.apache.hawq.pxf.api.BasicFilter;
 
-import static org.apache.hawq.pxf.api.FilterParser.BasicFilter;
 import static org.apache.hawq.pxf.api.FilterParser.Operation;
 import static org.apache.hawq.pxf.api.FilterParser.Operation.*;
 import static org.junit.Assert.assertEquals;
@@ -33,18 +34,33 @@ public class HiveFilterBuilderTest {
     @Test
     public void parseFilterWithThreeOperations() throws Exception {
         HiveFilterBuilder builder = new HiveFilterBuilder(null);
-        String[] consts = new String[] {"first", "2", "3"};
-        Operation[] ops = new Operation[] {HDOP_EQ, HDOP_GT, HDOP_LT};
-        int[] idx = new int[] {1, 2, 3};
-
-        @SuppressWarnings("unchecked")
-        List<BasicFilter> filterList = (List) builder.getFilterObject("a1c\"first\"o5a2c2o2o7a3c3o1o7");
-        assertEquals(consts.length, filterList.size());
-        for (int i = 0; i < filterList.size(); i++) {
-            BasicFilter filter = filterList.get(i);
-            assertEquals(filter.getConstant().constant().toString(), consts[i]);
-            assertEquals(filter.getOperation(), ops[i]);
-            assertEquals(filter.getColumn().index(), idx[i]);
-        }
+        String[] consts = new String[] {"first", "2"};
+        Operation[] ops = new Operation[] {HDOP_EQ, HDOP_GT};
+        int[] idx = new int[] {1, 2};
+
+        LogicalFilter filterList = (LogicalFilter) builder.getFilterObject("a1c\"first\"o5a2c2o2l0");
+        assertEquals(LogicalOperation.HDOP_AND, filterList.getOperator());
+        BasicFilter leftOperand = (BasicFilter) filterList.getFilterList().get(0);
+        assertEquals(consts[0], leftOperand.getConstant().constant());
+        assertEquals(idx[0], leftOperand.getColumn().index());
+        assertEquals(ops[0], leftOperand.getOperation());
+    }
+
+    @Test
+    public void parseFilterWithLogicalOperation() throws Exception {
+        HiveFilterBuilder builder = new HiveFilterBuilder(null);
+        LogicalFilter filter = (LogicalFilter) builder.getFilterObject("a1c\"first\"o5a2c2o2l0");
+        assertEquals(LogicalOperation.HDOP_AND, filter.getOperator());
+        assertEquals(2, filter.getFilterList().size());
     }
+
+    @Test
+    public void parseNestedExpressionWithLogicalOperation() throws Exception {
+        HiveFilterBuilder builder = new HiveFilterBuilder(null);
+        LogicalFilter filter = (LogicalFilter) builder.getFilterObject("a1c\"first\"o5a2c2o2l0a1c1o1l1");
+        assertEquals(LogicalOperation.HDOP_OR, filter.getOperator());
+        assertEquals(LogicalOperation.HDOP_AND, ((LogicalFilter) filter.getFilterList().get(0)).getOperator());
+        assertEquals(HDOP_LT, ((BasicFilter) filter.getFilterList().get(1)).getOperation());
+    }
+
 }


Mime
View raw message