hawq-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From odiache...@apache.org
Subject incubator-hawq git commit: HAWQ-1057. Skip SARG_PUSHDOWN parameter if any of filters isn't supported.
Date Tue, 18 Oct 2016 22:57:54 GMT
Repository: incubator-hawq
Updated Branches:
  refs/heads/master c8ef3a0ac -> 6e921df29


HAWQ-1057. Skip SARG_PUSHDOWN parameter if any of filters isn't supported.


Project: http://git-wip-us.apache.org/repos/asf/incubator-hawq/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-hawq/commit/6e921df2
Tree: http://git-wip-us.apache.org/repos/asf/incubator-hawq/tree/6e921df2
Diff: http://git-wip-us.apache.org/repos/asf/incubator-hawq/diff/6e921df2

Branch: refs/heads/master
Commit: 6e921df29d25d846b5d4916994c807b8b619662a
Parents: c8ef3a0
Author: Oleksandr Diachenko <odiachenko@pivotal.io>
Authored: Tue Oct 18 15:57:02 2016 -0700
Committer: Oleksandr Diachenko <odiachenko@pivotal.io>
Committed: Tue Oct 18 15:57:20 2016 -0700

----------------------------------------------------------------------
 .../hawq/pxf/plugins/hive/HiveORCAccessor.java  | 37 +++++++++++++++-----
 1 file changed, 28 insertions(+), 9 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/6e921df2/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveORCAccessor.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveORCAccessor.java
b/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveORCAccessor.java
index ab2f96e..1cea9c7 100644
--- a/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveORCAccessor.java
+++ b/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveORCAccessor.java
@@ -20,6 +20,8 @@ package org.apache.hawq.pxf.plugins.hive;
  */
 
 
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hive.ql.io.orc.OrcInputFormat;
 import org.apache.hadoop.hive.ql.io.sarg.SearchArgument;
 import org.apache.hadoop.hive.ql.io.sarg.SearchArgumentFactory;
@@ -41,6 +43,8 @@ import static org.apache.hawq.pxf.plugins.hive.HiveInputFormatFragmenter.PXF_HIV
  */
 public class HiveORCAccessor extends HiveAccessor {
 
+    private static final Log LOG = LogFactory.getLog(HiveORCAccessor.class);
+
     private final String READ_COLUMN_IDS_CONF_STR = "hive.io.file.readcolumn.ids";
     private final String READ_ALL_COLUMNS = "hive.io.file.read.all.columns";
     private final String READ_COLUMN_NAMES_CONF_STR = "hive.io.file.readcolumn.names";
@@ -106,18 +110,23 @@ public class HiveORCAccessor extends HiveAccessor {
          * need special case logic to make sure to still wrap the filter in a
          * startAnd() & end() block
          */
-        if (filter instanceof LogicalFilter)
-            buildExpression(filterBuilder, Arrays.asList(filter));
+        if (filter instanceof LogicalFilter) {
+            if (!buildExpression(filterBuilder, Arrays.asList(filter))) {
+                return;
+            }
+        }
         else {
             filterBuilder.startAnd();
-            buildArgument(filterBuilder, filter);
+            if(!buildArgument(filterBuilder, filter)) {
+                return;
+            }
             filterBuilder.end();
         }
         SearchArgument sarg = filterBuilder.build();
         jobConf.set(SARG_PUSHDOWN, sarg.toKryo());
     }
 
-    private void buildExpression(SearchArgument.Builder builder, List<Object> filterList)
{
+    private boolean buildExpression(SearchArgument.Builder builder, List<Object> filterList)
{
         for (Object f : filterList) {
             if (f instanceof LogicalFilter) {
                 switch(((LogicalFilter) f).getOperator()) {
@@ -131,15 +140,21 @@ public class HiveORCAccessor extends HiveAccessor {
                         builder.startNot();
                         break;
                 }
-                buildExpression(builder, ((LogicalFilter) f).getFilterList());
-                builder.end();
+                if (buildExpression(builder, ((LogicalFilter) f).getFilterList())) {
+                    builder.end();
+                } else {
+                    return false;
+                }
             } else {
-                buildArgument(builder, f);
+                if (!buildArgument(builder, f)) {
+                    return false;
+                }
             }
         }
+        return true;
     }
 
-    private void buildArgument(SearchArgument.Builder builder, Object filterObj) {
+    private boolean buildArgument(SearchArgument.Builder builder, Object filterObj) {
         /* The below functions will not be compatible and requires update  with Hive 2.0
APIs */
         BasicFilter filter = (BasicFilter) filterObj;
         int filterColumnIndex = filter.getColumn().index();
@@ -166,8 +181,12 @@ public class HiveORCAccessor extends HiveAccessor {
             case HDOP_NE:
                 builder.startNot().equals(filterColumnName, filterValue).end();
                 break;
+            default: {
+                LOG.debug("Filter push-down is not supported for " + filter.getOperation()
+ "operation.");
+                return false;
+            }
         }
-        return;
+        return true;
     }
 
 }


Mime
View raw message