hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From jcama...@apache.org
Subject hive git commit: HIVE-13831: Error pushing predicates to HBase storage handler (Jesus Camacho Rodriguez, reviewed by Ashutosh Chauhan)
Date Tue, 31 May 2016 14:19:13 GMT
Repository: hive
Updated Branches:
  refs/heads/branch-2.1 bb5fb1039 -> c20a113e4


HIVE-13831: Error pushing predicates to HBase storage handler (Jesus Camacho Rodriguez, reviewed
by Ashutosh Chauhan)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/c20a113e
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/c20a113e
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/c20a113e

Branch: refs/heads/branch-2.1
Commit: c20a113e494387f62867906caf101bc9032004e7
Parents: bb5fb10
Author: Jesus Camacho Rodriguez <jcamacho@apache.org>
Authored: Tue May 31 15:19:03 2016 +0100
Committer: Jesus Camacho Rodriguez <jcamacho@apache.org>
Committed: Tue May 31 15:19:03 2016 +0100

----------------------------------------------------------------------
 .../predicate/AccumuloPredicateHandler.java     |   4 +-
 .../hive/accumulo/predicate/PushdownTuple.java  |   2 +-
 .../hive/hbase/AbstractHBaseKeyFactory.java     |   6 +-
 .../hadoop/hive/hbase/HBaseStorageHandler.java  | 117 +++++++++++++++++--
 .../results/positive/external_table_ppd.q.out   |   1 -
 .../test/results/positive/hbase_pushdown.q.out  |   2 +-
 .../test/results/positive/hbase_timestamp.q.out |  12 +-
 .../test/results/positive/ppd_key_ranges.q.out  |   2 +-
 .../hive/ql/index/IndexPredicateAnalyzer.java   |  36 +++++-
 .../hive/ql/index/IndexSearchCondition.java     |  33 ++++--
 .../metadata/HiveStoragePredicateHandler.java   |   4 +-
 11 files changed, 178 insertions(+), 41 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/c20a113e/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/AccumuloPredicateHandler.java
----------------------------------------------------------------------
diff --git a/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/AccumuloPredicateHandler.java
b/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/AccumuloPredicateHandler.java
index d5cc9a5..a7ec7c5 100644
--- a/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/AccumuloPredicateHandler.java
+++ b/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/AccumuloPredicateHandler.java
@@ -149,7 +149,7 @@ public class AccumuloPredicateHandler {
       return clz.newInstance();
     } catch (ClassCastException e) {
       throw new SerDeException("Column type mismatch in WHERE clause "
-          + sc.getComparisonExpr().getExprString() + " found type "
+          + sc.getIndexExpr().getExprString() + " found type "
           + sc.getConstantDesc().getTypeString() + " instead of "
           + sc.getColumnDesc().getTypeString());
     } catch (IllegalAccessException e) {
@@ -181,7 +181,7 @@ public class AccumuloPredicateHandler {
       return clz.newInstance();
     } catch (ClassCastException e) {
       throw new SerDeException("Column type mismatch in WHERE clause "
-          + sc.getComparisonExpr().getExprString() + " found type "
+          + sc.getIndexExpr().getExprString() + " found type "
           + sc.getConstantDesc().getTypeString() + " instead of "
           + sc.getColumnDesc().getTypeString());
     } catch (IllegalAccessException e) {

http://git-wip-us.apache.org/repos/asf/hive/blob/c20a113e/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/PushdownTuple.java
----------------------------------------------------------------------
diff --git a/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/PushdownTuple.java
b/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/PushdownTuple.java
index f326d52..085146d 100644
--- a/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/PushdownTuple.java
+++ b/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/PushdownTuple.java
@@ -60,7 +60,7 @@ public class PushdownTuple {
     } catch (ClassCastException cce) {
       log.info(StringUtils.stringifyException(cce));
       throw new SerDeException(" Column type mismatch in where clause "
-          + sc.getComparisonExpr().getExprString() + " found type "
+          + sc.getIndexExpr().getExprString() + " found type "
           + sc.getConstantDesc().getTypeString() + " instead of "
           + sc.getColumnDesc().getTypeString());
     } catch (HiveException e) {

http://git-wip-us.apache.org/repos/asf/hive/blob/c20a113e/hbase-handler/src/java/org/apache/hadoop/hive/hbase/AbstractHBaseKeyFactory.java
----------------------------------------------------------------------
diff --git a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/AbstractHBaseKeyFactory.java
b/hbase-handler/src/java/org/apache/hadoop/hive/hbase/AbstractHBaseKeyFactory.java
index 18fb5ea..4cc9619 100644
--- a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/AbstractHBaseKeyFactory.java
+++ b/hbase-handler/src/java/org/apache/hadoop/hive/hbase/AbstractHBaseKeyFactory.java
@@ -18,6 +18,9 @@
 
 package org.apache.hadoop.hive.hbase;
 
+import java.io.IOException;
+import java.util.Properties;
+
 import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
 import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
 import org.apache.hadoop.hive.ql.plan.TableDesc;
@@ -25,9 +28,6 @@ import org.apache.hadoop.hive.serde2.Deserializer;
 import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.mapred.JobConf;
 
-import java.io.IOException;
-import java.util.Properties;
-
 public abstract class AbstractHBaseKeyFactory implements HBaseKeyFactory {
 
   protected HBaseSerDeParameters hbaseParams;

http://git-wip-us.apache.org/repos/asf/hive/blob/c20a113e/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseStorageHandler.java
----------------------------------------------------------------------
diff --git a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseStorageHandler.java
b/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseStorageHandler.java
index 42ea24e..1a1f780 100644
--- a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseStorageHandler.java
+++ b/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseStorageHandler.java
@@ -29,8 +29,6 @@ import java.util.Properties;
 import java.util.Set;
 
 import org.apache.commons.io.IOUtils;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.HBaseConfiguration;
@@ -53,6 +51,7 @@ import org.apache.hadoop.hive.metastore.MetaStoreUtils;
 import org.apache.hadoop.hive.metastore.api.MetaException;
 import org.apache.hadoop.hive.metastore.api.Table;
 import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants;
+import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
 import org.apache.hadoop.hive.ql.index.IndexPredicateAnalyzer;
 import org.apache.hadoop.hive.ql.index.IndexSearchCondition;
 import org.apache.hadoop.hive.ql.metadata.DefaultStorageHandler;
@@ -60,14 +59,27 @@ import org.apache.hadoop.hive.ql.metadata.HiveStoragePredicateHandler;
 import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
 import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
 import org.apache.hadoop.hive.ql.plan.TableDesc;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqual;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqualOrGreaterThan;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqualOrLessThan;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPGreaterThan;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPLessThan;
 import org.apache.hadoop.hive.serde2.Deserializer;
 import org.apache.hadoop.hive.serde2.SerDe;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils.PrimitiveGrouping;
+import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
 import org.apache.hadoop.hive.shims.ShimLoader;
 import org.apache.hadoop.mapred.InputFormat;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.OutputFormat;
 import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.util.StringUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import com.yammer.metrics.core.MetricsRegistry;
 
@@ -549,6 +561,7 @@ public class HBaseStorageHandler extends DefaultStorageHandler
         keyMapping.columnName, keyMapping.isComparable(),
         tsMapping == null ? null : tsMapping.columnName);
     List<IndexSearchCondition> conditions = new ArrayList<IndexSearchCondition>();
+    ExprNodeGenericFuncDesc pushedPredicate = null;
     ExprNodeGenericFuncDesc residualPredicate =
         (ExprNodeGenericFuncDesc)analyzer.analyzePredicate(predicate, conditions);
 
@@ -562,21 +575,107 @@ public class HBaseStorageHandler extends DefaultStorageHandler
         // 1. key < 20                        (size = 1)
         // 2. key = 20                        (size = 1)
         // 3. key < 20 and key > 10           (size = 2)
-        return null;
+        // Add to residual
+        residualPredicate =
+                extractResidualCondition(analyzer, searchConditions, residualPredicate);
+        continue;
       }
       if (scSize == 2 &&
-          (searchConditions.get(0).getComparisonOp()
-              .equals("org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqual") ||
-              searchConditions.get(1).getComparisonOp()
-                  .equals("org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqual"))) {
+          (searchConditions.get(0).getComparisonOp().equals(GenericUDFOPEqual.class.getName())
||
+              searchConditions.get(1).getComparisonOp().equals(GenericUDFOPEqual.class.getName())))
{
         // If one of the predicates is =, then any other predicate with it is illegal.
-        return null;
+        // Add to residual
+        residualPredicate =
+                extractResidualCondition(analyzer, searchConditions, residualPredicate);
+        continue;
       }
+      boolean sameType = sameTypeIndexSearchConditions(searchConditions);
+      if (!sameType) {
+        // If type for column and constant are different, we currently do not support pushing
them
+        residualPredicate =
+                extractResidualCondition(analyzer, searchConditions, residualPredicate);
+        continue;
+      }
+      TypeInfo typeInfo = searchConditions.get(0).getColumnDesc().getTypeInfo();
+      if (typeInfo.getCategory() == Category.PRIMITIVE && PrimitiveObjectInspectorUtils.getPrimitiveGrouping(
+              ((PrimitiveTypeInfo) typeInfo).getPrimitiveCategory()) == PrimitiveGrouping.NUMERIC_GROUP)
{
+        // If the predicate is on a numeric column, and it specifies an
+        // open range e.g. key < 20 , we do not support conversion, as negative
+        // values are lexicographically stored after positive values and thus they
+        // would be returned.
+        if (scSize == 2) {
+          boolean lowerBound = false;
+          boolean upperBound = false;
+          if (searchConditions.get(0).getComparisonOp().equals(GenericUDFOPEqualOrLessThan.class.getName())
||
+                searchConditions.get(0).getComparisonOp().equals(GenericUDFOPLessThan.class.getName()))
{
+            lowerBound = true;
+          } else {
+            upperBound = true;
+          }
+          if (searchConditions.get(1).getComparisonOp().equals(GenericUDFOPEqualOrGreaterThan.class.getName())
||
+                searchConditions.get(1).getComparisonOp().equals(GenericUDFOPGreaterThan.class.getName()))
{
+            upperBound = true;
+          } else {
+            lowerBound = true;
+          }
+          if (!upperBound || !lowerBound) {
+            // Not valid range, add to residual
+            residualPredicate =
+                    extractResidualCondition(analyzer, searchConditions, residualPredicate);
+            continue;
+          }
+        } else {
+          // scSize == 1
+          if (!searchConditions.get(0).getComparisonOp().equals(GenericUDFOPEqual.class.getName()))
{
+            // Not valid range, add to residual
+            residualPredicate =
+                    extractResidualCondition(analyzer, searchConditions, residualPredicate);
+            continue;
+          }
+        }
+      }
+
+      // This one can be pushed
+      pushedPredicate =
+              extractStorageHandlerCondition(analyzer, searchConditions, pushedPredicate);
     }
 
     DecomposedPredicate decomposedPredicate = new DecomposedPredicate();
-    decomposedPredicate.pushedPredicate = analyzer.translateSearchConditions(conditions);
+    decomposedPredicate.pushedPredicate = pushedPredicate;
     decomposedPredicate.residualPredicate = residualPredicate;
     return decomposedPredicate;
   }
+
+  private static ExprNodeGenericFuncDesc extractStorageHandlerCondition(IndexPredicateAnalyzer
analyzer,
+          List<IndexSearchCondition> searchConditions, ExprNodeGenericFuncDesc inputExpr)
{
+    if (inputExpr == null) {
+      return analyzer.translateSearchConditions(searchConditions);
+    }
+    List<ExprNodeDesc> children = new ArrayList<ExprNodeDesc>();
+    children.add(analyzer.translateSearchConditions(searchConditions));
+    children.add(inputExpr);
+    return new ExprNodeGenericFuncDesc(TypeInfoFactory.booleanTypeInfo,
+            FunctionRegistry.getGenericUDFForAnd(), children);
+  }
+
+  private static ExprNodeGenericFuncDesc extractResidualCondition(IndexPredicateAnalyzer
analyzer,
+          List<IndexSearchCondition> searchConditions, ExprNodeGenericFuncDesc inputExpr)
{
+    if (inputExpr == null) {
+      return analyzer.translateOriginalConditions(searchConditions);
+    }
+    List<ExprNodeDesc> children = new ArrayList<ExprNodeDesc>();
+    children.add(analyzer.translateOriginalConditions(searchConditions));
+    children.add(inputExpr);
+    return new ExprNodeGenericFuncDesc(TypeInfoFactory.booleanTypeInfo,
+            FunctionRegistry.getGenericUDFForAnd(), children);
+  }
+
+  private static boolean sameTypeIndexSearchConditions(List<IndexSearchCondition> searchConditions)
{
+    for (IndexSearchCondition isc : searchConditions) {
+      if (!isc.getColumnDesc().getTypeInfo().equals(isc.getConstantDesc().getTypeInfo()))
{
+        return false;
+      }
+    }
+    return true;
+  }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/c20a113e/hbase-handler/src/test/results/positive/external_table_ppd.q.out
----------------------------------------------------------------------
diff --git a/hbase-handler/src/test/results/positive/external_table_ppd.q.out b/hbase-handler/src/test/results/positive/external_table_ppd.q.out
index 590906b..f09b880 100644
--- a/hbase-handler/src/test/results/positive/external_table_ppd.q.out
+++ b/hbase-handler/src/test/results/positive/external_table_ppd.q.out
@@ -133,7 +133,6 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: t_hbase
-            filterExpr: (int_col > 0) (type: boolean)
             Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
             Filter Operator
               predicate: (int_col > 0) (type: boolean)

http://git-wip-us.apache.org/repos/asf/hive/blob/c20a113e/hbase-handler/src/test/results/positive/hbase_pushdown.q.out
----------------------------------------------------------------------
diff --git a/hbase-handler/src/test/results/positive/hbase_pushdown.q.out b/hbase-handler/src/test/results/positive/hbase_pushdown.q.out
index a42e36f..d5661be 100644
--- a/hbase-handler/src/test/results/positive/hbase_pushdown.q.out
+++ b/hbase-handler/src/test/results/positive/hbase_pushdown.q.out
@@ -233,7 +233,7 @@ STAGE PLANS:
             alias: hbase_pushdown
             Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
             Filter Operator
-              predicate: ((key = 80) and (key = 90) and (value like '%90%')) (type: boolean)
+              predicate: (((key = 80) and (key = 90)) and (value like '%90%')) (type: boolean)
               Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
               Select Operator
                 expressions: 90 (type: int), value (type: string)

http://git-wip-us.apache.org/repos/asf/hive/blob/c20a113e/hbase-handler/src/test/results/positive/hbase_timestamp.q.out
----------------------------------------------------------------------
diff --git a/hbase-handler/src/test/results/positive/hbase_timestamp.q.out b/hbase-handler/src/test/results/positive/hbase_timestamp.q.out
index 29a04f9..3918121 100644
--- a/hbase-handler/src/test/results/positive/hbase_timestamp.q.out
+++ b/hbase-handler/src/test/results/positive/hbase_timestamp.q.out
@@ -171,10 +171,9 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: hbase_table
-            filterExpr: (((key > 100.0) and (key < 400.0)) and (time < 200000000000))
(type: boolean)
             Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
             Filter Operator
-              predicate: ((UDFToDouble(key) > 100.0) and (UDFToDouble(key) < 400.0)
and (time < 200000000000)) (type: boolean)
+              predicate: (((UDFToDouble(key) > 100.0) and (UDFToDouble(key) < 400.0))
and (time < 200000000000)) (type: boolean)
               Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
               Select Operator
                 expressions: key (type: string), value (type: string), CAST( time AS TIMESTAMP)
(type: timestamp)
@@ -220,10 +219,9 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: hbase_table
-            filterExpr: (((key > 100.0) and (key < 400.0)) and (time > 100000000000))
(type: boolean)
             Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
             Filter Operator
-              predicate: ((UDFToDouble(key) > 100.0) and (UDFToDouble(key) < 400.0)
and (time > 100000000000)) (type: boolean)
+              predicate: (((UDFToDouble(key) > 100.0) and (UDFToDouble(key) < 400.0))
and (time > 100000000000)) (type: boolean)
               Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
               Select Operator
                 expressions: key (type: string), value (type: string), CAST( time AS TIMESTAMP)
(type: timestamp)
@@ -271,10 +269,9 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: hbase_table
-            filterExpr: (((key > 100.0) and (key < 400.0)) and (time <= 100000000000))
(type: boolean)
             Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
             Filter Operator
-              predicate: ((UDFToDouble(key) > 100.0) and (UDFToDouble(key) < 400.0)
and (time <= 100000000000)) (type: boolean)
+              predicate: (((UDFToDouble(key) > 100.0) and (UDFToDouble(key) < 400.0))
and (time <= 100000000000)) (type: boolean)
               Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
               Select Operator
                 expressions: key (type: string), value (type: string), CAST( time AS TIMESTAMP)
(type: timestamp)
@@ -320,10 +317,9 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: hbase_table
-            filterExpr: (((key > 100.0) and (key < 400.0)) and (time >= 200000000000))
(type: boolean)
             Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
             Filter Operator
-              predicate: ((UDFToDouble(key) > 100.0) and (UDFToDouble(key) < 400.0)
and (time >= 200000000000)) (type: boolean)
+              predicate: (((UDFToDouble(key) > 100.0) and (UDFToDouble(key) < 400.0))
and (time >= 200000000000)) (type: boolean)
               Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
               Select Operator
                 expressions: key (type: string), value (type: string), CAST( time AS TIMESTAMP)
(type: timestamp)

http://git-wip-us.apache.org/repos/asf/hive/blob/c20a113e/hbase-handler/src/test/results/positive/ppd_key_ranges.q.out
----------------------------------------------------------------------
diff --git a/hbase-handler/src/test/results/positive/ppd_key_ranges.q.out b/hbase-handler/src/test/results/positive/ppd_key_ranges.q.out
index 34c3b23..812ce95 100644
--- a/hbase-handler/src/test/results/positive/ppd_key_ranges.q.out
+++ b/hbase-handler/src/test/results/positive/ppd_key_ranges.q.out
@@ -189,7 +189,7 @@ STAGE PLANS:
             alias: hbase_ppd_keyrange
             Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
             Filter Operator
-              predicate: ((key >= 9) and (key < 17) and (key = 11)) (type: boolean)
+              predicate: (((key >= 9) and (key < 17)) and (key = 11)) (type: boolean)
               Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
               Select Operator
                 expressions: 11 (type: int), value (type: string)

http://git-wip-us.apache.org/repos/asf/hive/blob/c20a113e/ql/src/java/org/apache/hadoop/hive/ql/index/IndexPredicateAnalyzer.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/index/IndexPredicateAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/index/IndexPredicateAnalyzer.java
index 2f0deae..1a107d7 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/index/IndexPredicateAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/index/IndexPredicateAnalyzer.java
@@ -278,13 +278,15 @@ public class IndexPredicateAnalyzer {
     List<ExprNodeDesc> list = new ArrayList<ExprNodeDesc>();
     list.add(expr1);
     list.add(expr2);
-    expr = new ExprNodeGenericFuncDesc(expr.getTypeInfo(), expr.getGenericUDF(), list);
+    ExprNodeGenericFuncDesc indexExpr =
+            new ExprNodeGenericFuncDesc(expr.getTypeInfo(), expr.getGenericUDF(), list);
 
     searchConditions.add(
       new IndexSearchCondition(
         columnDesc,
         udfName,
         constantDesc,
+        indexExpr,
         expr,
         fields));
 
@@ -311,12 +313,40 @@ public class IndexPredicateAnalyzer {
     ExprNodeGenericFuncDesc expr = null;
     for (IndexSearchCondition searchCondition : searchConditions) {
       if (expr == null) {
-        expr = searchCondition.getComparisonExpr();
+        expr = searchCondition.getIndexExpr();
         continue;
       }
       List<ExprNodeDesc> children = new ArrayList<ExprNodeDesc>();
       children.add(expr);
-      children.add(searchCondition.getComparisonExpr());
+      children.add(searchCondition.getIndexExpr());
+      expr = new ExprNodeGenericFuncDesc(
+        TypeInfoFactory.booleanTypeInfo,
+        FunctionRegistry.getGenericUDFForAnd(),
+        children);
+    }
+    return expr;
+  }
+
+  /**
+   * Translates original conditions back to ExprNodeDesc form (as
+   * a left-deep conjunction).
+   *
+   * @param searchConditions (typically produced by analyzePredicate)
+   *
+   * @return ExprNodeGenericFuncDesc form of search conditions
+   */
+  public ExprNodeGenericFuncDesc translateOriginalConditions(
+    List<IndexSearchCondition> searchConditions) {
+
+    ExprNodeGenericFuncDesc expr = null;
+    for (IndexSearchCondition searchCondition : searchConditions) {
+      if (expr == null) {
+        expr = searchCondition.getOriginalExpr();
+        continue;
+      }
+      List<ExprNodeDesc> children = new ArrayList<ExprNodeDesc>();
+      children.add(expr);
+      children.add(searchCondition.getOriginalExpr());
       expr = new ExprNodeGenericFuncDesc(
         TypeInfoFactory.booleanTypeInfo,
         FunctionRegistry.getGenericUDFForAnd(),

http://git-wip-us.apache.org/repos/asf/hive/blob/c20a113e/ql/src/java/org/apache/hadoop/hive/ql/index/IndexSearchCondition.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/index/IndexSearchCondition.java b/ql/src/java/org/apache/hadoop/hive/ql/index/IndexSearchCondition.java
index 3a2ecb7..dcdae2a 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/index/IndexSearchCondition.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/index/IndexSearchCondition.java
@@ -31,7 +31,8 @@ public class IndexSearchCondition
   private ExprNodeColumnDesc columnDesc;
   private String comparisonOp;
   private ExprNodeConstantDesc constantDesc;
-  private ExprNodeGenericFuncDesc comparisonExpr;
+  private ExprNodeGenericFuncDesc indexExpr;
+  private ExprNodeGenericFuncDesc originalExpr;
 
   private String[] fields;
 
@@ -40,7 +41,7 @@ public class IndexSearchCondition
       String comparisonOp,
       ExprNodeConstantDesc constantDesc,
       ExprNodeGenericFuncDesc comparisonExpr) {
-    this(columnDesc, comparisonOp, constantDesc, comparisonExpr, null);
+    this(columnDesc, comparisonOp, constantDesc, comparisonExpr, comparisonExpr, null);
   }
 
   /**
@@ -54,19 +55,23 @@ public class IndexSearchCondition
    *
    * @param constantDesc constant value to search for
    *
-   * @param comparisonExpr the original comparison expression
+   * @param indexExpr the comparison expression for the index
+   * 
+   * @param originalExpr the original comparison expression
    */
   public IndexSearchCondition(
     ExprNodeColumnDesc columnDesc,
     String comparisonOp,
     ExprNodeConstantDesc constantDesc,
-    ExprNodeGenericFuncDesc comparisonExpr,
+    ExprNodeGenericFuncDesc indexExpr,
+    ExprNodeGenericFuncDesc originalExpr,
     String[] fields) {
 
     this.columnDesc = columnDesc;
     this.comparisonOp = comparisonOp;
     this.constantDesc = constantDesc;
-    this.comparisonExpr = comparisonExpr;
+    this.indexExpr = indexExpr;
+    this.originalExpr = originalExpr;
     this.fields = fields;
   }
 
@@ -94,12 +99,20 @@ public class IndexSearchCondition
     return constantDesc;
   }
 
-  public void setComparisonExpr(ExprNodeGenericFuncDesc comparisonExpr) {
-    this.comparisonExpr = comparisonExpr;
+  public void setIndexExpr(ExprNodeGenericFuncDesc indexExpr) {
+    this.indexExpr = indexExpr;
   }
 
-  public ExprNodeGenericFuncDesc getComparisonExpr() {
-    return comparisonExpr;
+  public ExprNodeGenericFuncDesc getIndexExpr() {
+    return indexExpr;
+  }
+
+  public void setOriginalExpr(ExprNodeGenericFuncDesc originalExpr) {
+    this.originalExpr = originalExpr;
+  }
+
+  public ExprNodeGenericFuncDesc getOriginalExpr() {
+    return originalExpr;
   }
 
   public String[] getFields() {
@@ -108,6 +121,6 @@ public class IndexSearchCondition
 
   @Override
   public String toString() {
-    return comparisonExpr.getExprString();
+    return indexExpr.getExprString();
   }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/c20a113e/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveStoragePredicateHandler.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveStoragePredicateHandler.java
b/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveStoragePredicateHandler.java
index 7d7c764..9324628 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveStoragePredicateHandler.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveStoragePredicateHandler.java
@@ -18,13 +18,13 @@
 
 package org.apache.hadoop.hive.ql.metadata;
 
+import java.io.Serializable;
+
 import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
 import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
 import org.apache.hadoop.hive.serde2.Deserializer;
 import org.apache.hadoop.mapred.JobConf;
 
-import java.io.Serializable;
-
 /**
  * HiveStoragePredicateHandler is an optional companion to {@link
  * HiveStorageHandler}; it should only be implemented by handlers which


Mime
View raw message