drill-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From par...@apache.org
Subject [10/10] drill git commit: DRILL-1950: Parquet rowgroup level filter pushdown in query planning time.
Date Sat, 05 Nov 2016 00:11:37 GMT
DRILL-1950: Parquet rowgroup level filter pushdown in query planning time.

Implement Parquet rowgroup level filter pushdown. The filter pushdown is performed in
in Drill physical planning phase.

Only a local filter, which refers to columns in a single table, is qualified for filter pushdown.

A filter may be qualified if it is a simple comparison filter, or a compound "and/or" filter consists of
simple comparison filter. Data types allowed in comparison filter are int, bigint, float, double, date,
timestamp, time. Comparison operators are =, !=, <, <=, >, >=. Operands have to be a column of the above
data types, or an explicit cast or implicit cast function, or a constant expressions.

This closes #637


Project: http://git-wip-us.apache.org/repos/asf/drill/repo
Commit: http://git-wip-us.apache.org/repos/asf/drill/commit/9411b26e
Tree: http://git-wip-us.apache.org/repos/asf/drill/tree/9411b26e
Diff: http://git-wip-us.apache.org/repos/asf/drill/diff/9411b26e

Branch: refs/heads/master
Commit: 9411b26ece34ed8b2f498deea5e41f1901eb1013
Parents: 0d4319b
Author: Jinfeng Ni <jni@apache.org>
Authored: Fri Nov 4 14:56:52 2016 -0700
Committer: Parth Chandra <parthc@apache.org>
Committed: Fri Nov 4 16:07:00 2016 -0700

----------------------------------------------------------------------
 .../org/apache/drill/common/types/Types.java    |   1 +
 .../exec/expr/ExpressionTreeMaterializer.java   |  78 ++-
 .../exec/expr/fn/FunctionGenerationHelper.java  |   7 +
 .../fn/interpreter/InterpreterEvaluator.java    |  83 ++--
 .../exec/expr/stat/ParquetFilterPredicate.java  |  23 +
 .../drill/exec/expr/stat/ParquetPredicates.java | 352 ++++++++++++++
 .../exec/expr/stat/RangeExprEvaluator.java      | 275 +++++++++++
 .../drill/exec/expr/stat/TypedFieldExpr.java    |  63 +++
 .../apache/drill/exec/planner/PlannerPhase.java |  39 +-
 .../exec/planner/physical/PlannerSettings.java  |  15 +
 .../planner/sql/handlers/DefaultSqlHandler.java |   6 +
 .../server/options/SystemOptionManager.java     |   2 +
 .../exec/store/ImplicitColumnExplorer.java      |  25 +-
 .../ParquetCompareFunctionProcessor.java        | 280 -----------
 .../store/parquet/ParquetFilterBuilder.java     | 481 ++++++++++---------
 .../exec/store/parquet/ParquetGroupScan.java    | 168 ++++++-
 .../store/parquet/ParquetPushDownFilter.java    | 189 ++++----
 .../store/parquet/ParquetRGFilterEvaluator.java | 115 +++++
 .../exec/store/parquet/ParquetRowGroupScan.java |  17 +-
 .../store/parquet/ParquetScanBatchCreator.java  |   3 +-
 .../ParquetToDrillTypeConverter.java            |  25 +-
 .../store/parquet/stat/ColumnStatCollector.java |  32 ++
 .../store/parquet/stat/ColumnStatistics.java    |  40 ++
 .../stat/ParquetFooterStatCollector.java        | 199 ++++++++
 .../parquet/stat/ParquetMetaStatCollector.java  | 170 +++++++
 .../org/apache/drill/TestPartitionFilter.java   |  12 +-
 .../java/org/apache/drill/TestUnionAll.java     |   2 +-
 .../java/org/apache/drill/exec/ExecTest.java    |  14 +
 .../apache/drill/exec/expr/ExpressionTest.java  |   8 -
 .../fn/interp/ExpressionInterpreterTest.java    |   8 -
 .../physical/impl/join/TestNestedLoopJoin.java  |   2 +-
 .../exec/store/parquet/TestFileGenerator.java   |   2 +-
 .../parquet/TestParquetFilterPushDown.java      | 413 ++++++++++++++++
 .../dateTbl1_9/t1/0_0_0.parquet                 | Bin 0 -> 337 bytes
 .../dateTblCorrupted/t1/0_0_0.parquet           | Bin 0 -> 304 bytes
 .../dateTblCorrupted/t2/0_0_0.parquet           | Bin 0 -> 307 bytes
 .../dateTblCorrupted/t3/0_0_0.parquet           | Bin 0 -> 292 bytes
 .../parquetFilterPush/intTbl/intAllNull.parquet | Bin 0 -> 232 bytes
 .../parquetFilterPush/intTbl/intTbl.parquet     | Bin 0 -> 341 bytes
 .../parquetFilterPush/tsTbl/t1/0_0_0.parquet    | Bin 0 -> 339 bytes
 .../parquetFilterPush/tsTbl/t2/0_0_0.parquet    | Bin 0 -> 353 bytes
 .../parquetFilterPush/tsTbl/t3/0_0_0.parquet    | Bin 0 -> 337 bytes
 .../common/expression/ValueExpressions.java     |  15 +
 .../common/expression/fn/CastFunctions.java     |   9 +
 44 files changed, 2447 insertions(+), 726 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/drill/blob/9411b26e/common/src/main/java/org/apache/drill/common/types/Types.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/org/apache/drill/common/types/Types.java b/common/src/main/java/org/apache/drill/common/types/Types.java
index 116d0d1..ce2abf9 100644
--- a/common/src/main/java/org/apache/drill/common/types/Types.java
+++ b/common/src/main/java/org/apache/drill/common/types/Types.java
@@ -35,6 +35,7 @@ public class Types {
   public static final MajorType LATE_BIND_TYPE = optional(MinorType.LATE);
   public static final MajorType REQUIRED_BIT = required(MinorType.BIT);
   public static final MajorType OPTIONAL_BIT = optional(MinorType.BIT);
+  public static final MajorType OPTIONAL_INT = optional(MinorType.INT);
 
   public static boolean isUnion(MajorType toType) {
     return toType.getMinorType() == MinorType.UNION;

http://git-wip-us.apache.org/repos/asf/drill/blob/9411b26e/exec/java-exec/src/main/java/org/apache/drill/exec/expr/ExpressionTreeMaterializer.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/ExpressionTreeMaterializer.java b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/ExpressionTreeMaterializer.java
index bece93d..b70ad26 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/ExpressionTreeMaterializer.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/ExpressionTreeMaterializer.java
@@ -24,6 +24,7 @@ import java.util.Deque;
 import java.util.HashSet;
 import java.util.IdentityHashMap;
 import java.util.List;
+import java.util.Map;
 import java.util.Queue;
 import java.util.Set;
 
@@ -78,6 +79,8 @@ import org.apache.drill.exec.expr.fn.DrillComplexWriterFuncHolder;
 import org.apache.drill.exec.expr.fn.DrillFuncHolder;
 import org.apache.drill.exec.expr.fn.ExceptionFunction;
 import org.apache.drill.exec.expr.fn.FunctionLookupContext;
+import org.apache.drill.exec.expr.stat.TypedFieldExpr;
+import org.apache.drill.exec.record.MaterializeVisitor;
 import org.apache.drill.exec.record.TypedFieldId;
 import org.apache.drill.exec.record.VectorAccessible;
 import org.apache.drill.exec.resolver.FunctionResolver;
@@ -89,6 +92,7 @@ import com.google.common.base.Predicate;
 import com.google.common.collect.ImmutableList;
 import com.google.common.collect.Iterables;
 import com.google.common.collect.Lists;
+import org.apache.drill.exec.store.parquet.stat.ColumnStatistics;
 import org.apache.drill.exec.util.DecimalUtility;
 
 public class ExpressionTreeMaterializer {
@@ -116,6 +120,12 @@ public class ExpressionTreeMaterializer {
     return materialize(expr, batch, errorCollector, functionLookupContext, allowComplexWriterExpr, false);
   }
 
+  public static LogicalExpression materializeFilterExpr(LogicalExpression expr, Map<SchemaPath, ColumnStatistics> fieldTypes, ErrorCollector errorCollector, FunctionLookupContext functionLookupContext) {
+    final FilterMaterializeVisitor filterMaterializeVisitor = new FilterMaterializeVisitor(fieldTypes, errorCollector);
+    LogicalExpression out =  expr.accept(filterMaterializeVisitor, functionLookupContext);
+    return out;
+  }
+
   public static LogicalExpression materialize(LogicalExpression expr, VectorAccessible batch, ErrorCollector errorCollector, FunctionLookupContext functionLookupContext,
       boolean allowComplexWriterExpr, boolean unionTypeEnabled) {
     LogicalExpression out =  expr.accept(new MaterializeVisitor(batch, errorCollector, allowComplexWriterExpr, unionTypeEnabled), functionLookupContext);
@@ -214,12 +224,58 @@ public class ExpressionTreeMaterializer {
     errorCollector.addGeneralError(call.getPosition(), sb.toString());
   }
 
+  private static class MaterializeVisitor extends AbstractMaterializeVisitor {
+    private final VectorAccessible batch;
+
+    public MaterializeVisitor(VectorAccessible batch, ErrorCollector errorCollector, boolean allowComplexWriter, boolean unionTypeEnabled) {
+      super(errorCollector, allowComplexWriter, unionTypeEnabled);
+      this.batch = batch;
+    }
 
-  private static class MaterializeVisitor extends AbstractExprVisitor<LogicalExpression, FunctionLookupContext, RuntimeException> {
+    @Override
+    public LogicalExpression visitSchemaPath(SchemaPath path, FunctionLookupContext functionLookupContext) {
+      //      logger.debug("Visiting schema path {}", path);
+      TypedFieldId tfId = batch.getValueVectorId(path);
+      if (tfId == null) {
+        logger.warn("Unable to find value vector of path {}, returning null instance.", path);
+        return NullExpression.INSTANCE;
+      } else {
+        ValueVectorReadExpression e = new ValueVectorReadExpression(tfId);
+        return e;
+      }
+    }
+  }
+
+  private static class FilterMaterializeVisitor extends AbstractMaterializeVisitor {
+    private final Map<SchemaPath, ColumnStatistics> stats;
+
+    public FilterMaterializeVisitor(Map<SchemaPath, ColumnStatistics> stats, ErrorCollector errorCollector) {
+      super(errorCollector, false, false);
+      this.stats = stats;
+    }
+
+    @Override
+    public LogicalExpression visitSchemaPath(SchemaPath path, FunctionLookupContext functionLookupContext) {
+      MajorType type = null;
+
+      if (stats.containsKey(path)) {
+        type = stats.get(path).getMajorType();
+      }
+
+      if (type != null) {
+        return new TypedFieldExpr(path, type);
+      } else {
+        logger.warn("Unable to find value vector of path {}, returning null-int instance.", path);
+        return new TypedFieldExpr(path, Types.OPTIONAL_INT);
+        // return NullExpression.INSTANCE;
+      }
+    }
+  }
+
+  private static abstract class AbstractMaterializeVisitor extends AbstractExprVisitor<LogicalExpression, FunctionLookupContext, RuntimeException> {
     private ExpressionValidator validator = new ExpressionValidator();
     private ErrorCollector errorCollector;
     private Deque<ErrorCollector> errorCollectors = new ArrayDeque<>();
-    private final VectorAccessible batch;
     private final boolean allowComplexWriter;
     /**
      * If this is false, the materializer will not handle or create UnionTypes
@@ -231,8 +287,7 @@ public class ExpressionTreeMaterializer {
      */
     private Set<LogicalExpression> materializedExpressions = Sets.newIdentityHashSet();
 
-    public MaterializeVisitor(VectorAccessible batch, ErrorCollector errorCollector, boolean allowComplexWriter, boolean unionTypeEnabled) {
-      this.batch = batch;
+    public AbstractMaterializeVisitor(ErrorCollector errorCollector, boolean allowComplexWriter, boolean unionTypeEnabled) {
       this.errorCollector = errorCollector;
       this.allowComplexWriter = allowComplexWriter;
       this.unionTypeEnabled = unionTypeEnabled;
@@ -243,6 +298,8 @@ public class ExpressionTreeMaterializer {
       return newExpr;
     }
 
+    abstract public LogicalExpression visitSchemaPath(SchemaPath path, FunctionLookupContext functionLookupContext);
+
     @Override
     public LogicalExpression visitUnknown(LogicalExpression e, FunctionLookupContext functionLookupContext)
       throws RuntimeException {
@@ -635,19 +692,6 @@ public class ExpressionTreeMaterializer {
     }
 
     @Override
-    public LogicalExpression visitSchemaPath(SchemaPath path, FunctionLookupContext functionLookupContext) {
-//      logger.debug("Visiting schema path {}", path);
-      TypedFieldId tfId = batch.getValueVectorId(path);
-      if (tfId == null) {
-        logger.warn("Unable to find value vector of path {}, returning null instance.", path);
-        return NullExpression.INSTANCE;
-      } else {
-        ValueVectorReadExpression e = new ValueVectorReadExpression(tfId);
-        return e;
-      }
-    }
-
-    @Override
     public LogicalExpression visitIntConstant(IntExpression intExpr, FunctionLookupContext functionLookupContext) {
       return intExpr;
     }

http://git-wip-us.apache.org/repos/asf/drill/blob/9411b26e/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/FunctionGenerationHelper.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/FunctionGenerationHelper.java b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/FunctionGenerationHelper.java
index 90b0816..b83350d 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/FunctionGenerationHelper.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/FunctionGenerationHelper.java
@@ -44,6 +44,13 @@ public class FunctionGenerationHelper {
   public static final String COMPARE_TO_NULLS_HIGH = "compare_to_nulls_high";
   public static final String COMPARE_TO_NULLS_LOW = "compare_to_nulls_low";
 
+  public static final String EQ = "equal";
+  public static final String NE = "not_equal";
+  public static final String GT = "greater_than";
+  public static final String GE = "greater_than_or_equal_to";
+  public static final String LT = "less_than";
+  public static final String LE = "less_than_or_equal_to";
+
   /**
    * Finds ordering comparator ("compare_to...") FunctionHolderExpression with
    * a specified ordering for NULL (and considering NULLS <i>equal</i>).

http://git-wip-us.apache.org/repos/asf/drill/blob/9411b26e/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/interpreter/InterpreterEvaluator.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/interpreter/InterpreterEvaluator.java b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/interpreter/InterpreterEvaluator.java
index d3a5573..d106887 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/interpreter/InterpreterEvaluator.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/interpreter/InterpreterEvaluator.java
@@ -85,6 +85,49 @@ public class InterpreterEvaluator {
 
   }
 
+  public static ValueHolder evaluateFunction(DrillSimpleFunc interpreter, ValueHolder[] args, String funcName) throws Exception {
+    Preconditions.checkArgument(interpreter != null, "interpreter could not be null when use interpreted model to evaluate function " + funcName);
+
+    // the current input index to assign into the next available parameter, found using the @Param notation
+    // the order parameters are declared in the java class for the DrillFunc is meaningful
+    int currParameterIndex = 0;
+    Field outField = null;
+    try {
+      Field[] fields = interpreter.getClass().getDeclaredFields();
+      for (Field f : fields) {
+        // if this is annotated as a parameter to the function
+        if ( f.getAnnotation(Param.class) != null ) {
+          f.setAccessible(true);
+          if (currParameterIndex < args.length) {
+            f.set(interpreter, args[currParameterIndex]);
+          }
+          currParameterIndex++;
+        } else if ( f.getAnnotation(Output.class) != null ) {
+          f.setAccessible(true);
+          outField = f;
+          // create an instance of the holder for the output to be stored in
+          f.set(interpreter, f.getType().newInstance());
+        }
+      }
+    } catch (IllegalAccessException e) {
+      throw new RuntimeException(e);
+    }
+    if (args.length != currParameterIndex ) {
+      throw new DrillRuntimeException(
+          String.format("Wrong number of parameters provided to interpreted expression evaluation " +
+                  "for function %s, expected %d parameters, but received %d.",
+              funcName, currParameterIndex, args.length));
+    }
+    if (outField == null) {
+      throw new DrillRuntimeException("Malformed DrillFunction without a return type: " + funcName);
+    }
+    interpreter.setup();
+    interpreter.eval();
+    ValueHolder out = (ValueHolder) outField.get(interpreter);
+
+    return out;
+  }
+
   private static class InitVisitor extends AbstractExprVisitor<LogicalExpression, VectorAccessible, RuntimeException> {
 
     private UdfUtilities udfUtilities;
@@ -270,44 +313,7 @@ public class InterpreterEvaluator {
       try {
         DrillSimpleFunc interpreter =  ((DrillFuncHolderExpr) holderExpr).getInterpreter();
 
-        Preconditions.checkArgument(interpreter != null, "interpreter could not be null when use interpreted model to evaluate function " + holder.getRegisteredNames()[0]);
-
-        // the current input index to assign into the next available parameter, found using the @Param notation
-        // the order parameters are declared in the java class for the DrillFunc is meaningful
-        int currParameterIndex = 0;
-        Field outField = null;
-        try {
-          Field[] fields = interpreter.getClass().getDeclaredFields();
-          for (Field f : fields) {
-            // if this is annotated as a parameter to the function
-            if ( f.getAnnotation(Param.class) != null ) {
-              f.setAccessible(true);
-              if (currParameterIndex < args.length) {
-                f.set(interpreter, args[currParameterIndex]);
-              }
-              currParameterIndex++;
-            } else if ( f.getAnnotation(Output.class) != null ) {
-              f.setAccessible(true);
-              outField = f;
-              // create an instance of the holder for the output to be stored in
-              f.set(interpreter, f.getType().newInstance());
-            }
-          }
-        } catch (IllegalAccessException e) {
-            throw new RuntimeException(e);
-        }
-        if (args.length != currParameterIndex ) {
-          throw new DrillRuntimeException(
-              String.format("Wrong number of parameters provided to interpreted expression evaluation " +
-                  "for function %s, expected %d parameters, but received %d.",
-                  holderExpr.getName(), currParameterIndex, args.length));
-        }
-        if (outField == null) {
-          throw new DrillRuntimeException("Malformed DrillFunction without a return type: " + holderExpr.getName());
-        }
-        interpreter.setup();
-        interpreter.eval();
-        ValueHolder out = (ValueHolder) outField.get(interpreter);
+        ValueHolder out = evaluateFunction(interpreter, args, holderExpr.getName());
 
         if (TypeHelper.getValueHolderType(out).getMode() == TypeProtos.DataMode.OPTIONAL &&
             holderExpr.getMajorType().getMode() == TypeProtos.DataMode.REQUIRED) {
@@ -325,6 +331,7 @@ public class InterpreterEvaluator {
 
     }
 
+
     @Override
     public ValueHolder visitBooleanOperator(BooleanOperator op, Integer inIndex) {
       // Apply short circuit evaluation to boolean operator.

http://git-wip-us.apache.org/repos/asf/drill/blob/9411b26e/exec/java-exec/src/main/java/org/apache/drill/exec/expr/stat/ParquetFilterPredicate.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/stat/ParquetFilterPredicate.java b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/stat/ParquetFilterPredicate.java
new file mode 100644
index 0000000..2711faa
--- /dev/null
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/stat/ParquetFilterPredicate.java
@@ -0,0 +1,23 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.drill.exec.expr.stat;
+
+public interface ParquetFilterPredicate {
+  boolean canDrop(RangeExprEvaluator evaluator);
+}

http://git-wip-us.apache.org/repos/asf/drill/blob/9411b26e/exec/java-exec/src/main/java/org/apache/drill/exec/expr/stat/ParquetPredicates.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/stat/ParquetPredicates.java b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/stat/ParquetPredicates.java
new file mode 100644
index 0000000..54f703a
--- /dev/null
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/stat/ParquetPredicates.java
@@ -0,0 +1,352 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.expr.stat;
+
+import org.apache.drill.common.expression.BooleanOperator;
+import org.apache.drill.common.expression.ExpressionPosition;
+import org.apache.drill.common.expression.LogicalExpression;
+import org.apache.drill.common.expression.LogicalExpressionBase;
+import org.apache.drill.common.expression.visitors.ExprVisitor;
+import org.apache.parquet.column.statistics.Statistics;
+import org.apache.parquet.hadoop.metadata.ColumnChunkMetaData;
+
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.List;
+
+public abstract  class ParquetPredicates {
+  public static abstract  class ParquetCompPredicate extends LogicalExpressionBase implements ParquetFilterPredicate {
+    protected final LogicalExpression left;
+    protected final LogicalExpression right;
+
+    public ParquetCompPredicate(LogicalExpression left, LogicalExpression right) {
+      super(left.getPosition());
+      this.left = left;
+      this.right = right;
+    }
+
+    @Override
+    public Iterator<LogicalExpression> iterator() {
+      final List<LogicalExpression> args = new ArrayList<>();
+      args.add(left);
+      args.add(right);
+      return args.iterator();
+    }
+
+    @Override
+    public <T, V, E extends Exception> T accept(ExprVisitor<T, V, E> visitor, V value) throws E {
+      return visitor.visitUnknown(this, value);
+    }
+
+  }
+
+  public static abstract class ParquetBooleanPredicate extends BooleanOperator implements ParquetFilterPredicate {
+    public ParquetBooleanPredicate(String name, List<LogicalExpression> args, ExpressionPosition pos) {
+      super(name, args, pos);
+    }
+
+    @Override
+    public <T, V, E extends Exception> T accept(ExprVisitor<T, V, E> visitor, V value) throws E {
+      return visitor.visitBooleanOperator(this, value);
+    }
+  }
+
+  public static class AndPredicate extends ParquetBooleanPredicate {
+    public AndPredicate(String name, List<LogicalExpression> args, ExpressionPosition pos) {
+      super(name, args, pos);
+    }
+
+    @Override
+    public boolean canDrop(RangeExprEvaluator evaluator) {
+      // "and" : as long as one branch is OK to drop, we can drop it.
+      for (LogicalExpression child : this) {
+        if (((ParquetFilterPredicate) child).canDrop(evaluator)) {
+          return true;
+        }
+      }
+      return false;
+    }
+  }
+
+  public static class OrPredicate extends ParquetBooleanPredicate {
+    public OrPredicate(String name, List<LogicalExpression> args, ExpressionPosition pos) {
+      super(name, args, pos);
+    }
+
+    @Override
+    public boolean canDrop(RangeExprEvaluator evaluator) {
+      for (LogicalExpression child : this) {
+        // "long" : as long as one branch is NOT ok to drop, we can NOT drop it.
+        if (! ((ParquetFilterPredicate) child).canDrop(evaluator)) {
+          return false;
+        }
+      }
+
+      return true;
+    }
+  }
+
+  // is this column chunk composed entirely of nulls?
+  // assumes the column chunk's statistics is not empty
+  protected static boolean isAllNulls(Statistics stat, long rowCount) {
+    return stat.getNumNulls() == rowCount;
+  }
+
+  // are there any nulls in this column chunk?
+  // assumes the column chunk's statistics is not empty
+  protected static boolean hasNulls(Statistics stat) {
+    return stat.getNumNulls() > 0;
+  }
+
+  /**
+   * EQ (=) predicate
+   */
+  public static class EqualPredicate extends ParquetCompPredicate {
+    public EqualPredicate(LogicalExpression left, LogicalExpression right) {
+      super(left, right);
+    }
+
+    /**
+        Semantics of canDrop() is very similar to what is implemented in Parquet library's
+        {@link org.apache.parquet.filter2.statisticslevel.StatisticsFilter} and
+        {@link org.apache.parquet.filter2.predicate.FilterPredicate}
+
+        Main difference :
+     1. A RangeExprEvaluator is used to compute the min/max of an expression, such as CAST function
+        of a column. CAST function could be explicitly added by Drill user (It's recommended to use CAST
+        function after DRILL-4372, if user wants to reduce planning time for limit 0 query), or implicitly
+        inserted by Drill, when the types of compare operands are not identical. Therefore, it's important
+         to allow CAST function to appear in the filter predicate.
+     2. We do not require list of ColumnChunkMetaData to do the evaluation, while Parquet library's
+        StatisticsFilter has such requirement. Drill's ParquetTableMetaData does not maintain ColumnChunkMetaData,
+        making it impossible to directly use Parquet library's StatisticFilter in query planning time.
+     3. We allows both sides of comparison operator to be a min/max range. As such, we support
+           expression_of(Column1)   <   expression_of(Column2),
+        where Column1 and Column2 are from same parquet table.
+     */
+    @Override
+    public boolean canDrop(RangeExprEvaluator evaluator) {
+      Statistics leftStat = left.accept(evaluator, null);
+      Statistics rightStat = right.accept(evaluator, null);
+
+      if (leftStat == null ||
+          rightStat == null ||
+          leftStat.isEmpty() ||
+          rightStat.isEmpty()) {
+        return false;
+      }
+
+      // if either side is ALL null, = is evaluated to UNKNOW -> canDrop
+      if (isAllNulls(leftStat, evaluator.getRowCount()) ||
+          isAllNulls(rightStat, evaluator.getRowCount())) {
+        return true;
+      }
+
+      // can drop when left's max < right's min, or right's max < left's min
+      if ( ( leftStat.genericGetMax().compareTo(rightStat.genericGetMin()) < 0
+            || rightStat.genericGetMax().compareTo(leftStat.genericGetMin()) < 0)) {
+        return true;
+      } else {
+        return false;
+      }
+    }
+
+    @Override
+    public String toString() {
+      return left.toString()  + " = " + right.toString();
+    }
+  }
+
+  /**
+   * GT (>) predicate.
+   */
+  public static class GTPredicate extends ParquetCompPredicate {
+    public GTPredicate(LogicalExpression left, LogicalExpression right) {
+      super(left, right);
+    }
+
+    @Override
+    public boolean canDrop(RangeExprEvaluator evaluator) {
+      Statistics leftStat = left.accept(evaluator, null);
+      Statistics rightStat = right.accept(evaluator, null);
+
+      if (leftStat == null ||
+          rightStat == null ||
+          leftStat.isEmpty() ||
+          rightStat.isEmpty()) {
+        return false;
+      }
+
+      // if either side is ALL null, = is evaluated to UNKNOW -> canDrop
+      if (isAllNulls(leftStat, evaluator.getRowCount()) ||
+          isAllNulls(rightStat, evaluator.getRowCount())) {
+        return true;
+      }
+
+      // can drop when left's max <= right's min.
+      if ( leftStat.genericGetMax().compareTo(rightStat.genericGetMin()) <= 0 ) {
+        return true;
+      } else {
+        return false;
+      }
+    }
+  }
+
+  /**
+   * GE (>=) predicate.
+   */
+  public static class GEPredicate extends ParquetCompPredicate {
+    public GEPredicate(LogicalExpression left, LogicalExpression right) {
+      super(left, right);
+    }
+
+    @Override
+    public boolean canDrop(RangeExprEvaluator evaluator) {
+      Statistics leftStat = left.accept(evaluator, null);
+      Statistics rightStat = right.accept(evaluator, null);
+
+      if (leftStat == null ||
+          rightStat == null ||
+          leftStat.isEmpty() ||
+          rightStat.isEmpty()) {
+        return false;
+      }
+
+      // if either side is ALL null, = is evaluated to UNKNOW -> canDrop
+      if (isAllNulls(leftStat, evaluator.getRowCount()) ||
+          isAllNulls(rightStat, evaluator.getRowCount())) {
+        return true;
+      }
+
+      // can drop when left's max < right's min.
+      if ( leftStat.genericGetMax().compareTo(rightStat.genericGetMin()) < 0 ) {
+        return true;
+      } else {
+        return false;
+      }
+    }
+  }
+
+  /**
+   * LT (<) predicate.
+   */
+  public static class LTPredicate extends ParquetCompPredicate {
+    public LTPredicate(LogicalExpression left, LogicalExpression right) {
+      super(left, right);
+    }
+
+    @Override
+    public boolean canDrop(RangeExprEvaluator evaluator) {
+      Statistics leftStat = left.accept(evaluator, null);
+      Statistics rightStat = right.accept(evaluator, null);
+
+      if (leftStat == null ||
+          rightStat == null ||
+          leftStat.isEmpty() ||
+          rightStat.isEmpty()) {
+        return false;
+      }
+
+      // if either side is ALL null, = is evaluated to UNKNOW -> canDrop
+      if (isAllNulls(leftStat, evaluator.getRowCount()) ||
+          isAllNulls(rightStat, evaluator.getRowCount())) {
+        return true;
+      }
+
+      // can drop when right's max <= left's min.
+      if ( rightStat.genericGetMax().compareTo(leftStat.genericGetMin()) <= 0 ) {
+        return true;
+      } else {
+        return false;
+      }
+    }
+  }
+
+  /**
+   * LE (<=) predicate.
+   */
+  public static class LEPredicate extends ParquetCompPredicate {
+    public LEPredicate(LogicalExpression left, LogicalExpression right) {
+      super(left, right);
+    }
+
+    @Override
+    public boolean canDrop(RangeExprEvaluator evaluator) {
+      Statistics leftStat = left.accept(evaluator, null);
+      Statistics rightStat = right.accept(evaluator, null);
+
+      if (leftStat == null ||
+          rightStat == null ||
+          leftStat.isEmpty() ||
+          rightStat.isEmpty()) {
+        return false;
+      }
+
+      // if either side is ALL null, = is evaluated to UNKNOW -> canDrop
+      if (isAllNulls(leftStat, evaluator.getRowCount()) ||
+          isAllNulls(rightStat, evaluator.getRowCount())) {
+        return true;
+      }
+
+      // can drop when right's max < left's min.
+      if ( rightStat.genericGetMax().compareTo(leftStat.genericGetMin()) < 0 ) {
+        return true;
+      } else {
+        return false;
+      }
+    }
+  }
+
+  /**
+   * NE (!=) predicate.
+   */
+  public static class NEPredicate extends ParquetCompPredicate {
+    public NEPredicate(LogicalExpression left, LogicalExpression right) {
+      super(left, right);
+    }
+
+    @Override
+    public boolean canDrop(RangeExprEvaluator evaluator) {
+      Statistics leftStat = left.accept(evaluator, null);
+      Statistics rightStat = right.accept(evaluator, null);
+
+      if (leftStat == null ||
+          rightStat == null ||
+          leftStat.isEmpty() ||
+          rightStat.isEmpty()) {
+        return false;
+      }
+
+      // if either side is ALL null, comparison is evaluated to UNKNOW -> canDrop
+      if (isAllNulls(leftStat, evaluator.getRowCount()) ||
+          isAllNulls(rightStat, evaluator.getRowCount())) {
+        return true;
+      }
+
+      // can drop when there is only one unique value.
+      if ( leftStat.genericGetMin().compareTo(leftStat.genericGetMax()) == 0 &&
+           rightStat.genericGetMin().compareTo(rightStat.genericGetMax()) ==0 &&
+           leftStat.genericGetMax().compareTo(rightStat.genericGetMax()) == 0) {
+        return true;
+      } else {
+        return false;
+      }
+    }
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/drill/blob/9411b26e/exec/java-exec/src/main/java/org/apache/drill/exec/expr/stat/RangeExprEvaluator.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/stat/RangeExprEvaluator.java b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/stat/RangeExprEvaluator.java
new file mode 100644
index 0000000..8f77070
--- /dev/null
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/stat/RangeExprEvaluator.java
@@ -0,0 +1,275 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.expr.stat;
+
+import com.google.common.base.Preconditions;
+import org.apache.drill.common.exceptions.DrillRuntimeException;
+import org.apache.drill.common.expression.FunctionHolderExpression;
+import org.apache.drill.common.expression.LogicalExpression;
+import org.apache.drill.common.expression.SchemaPath;
+import org.apache.drill.common.expression.ValueExpressions;
+import org.apache.drill.common.expression.fn.CastFunctions;
+import org.apache.drill.common.expression.fn.FuncHolder;
+import org.apache.drill.common.expression.visitors.AbstractExprVisitor;
+import org.apache.drill.common.types.TypeProtos;
+import org.apache.drill.common.types.Types;
+import org.apache.drill.exec.expr.DrillSimpleFunc;
+import org.apache.drill.exec.expr.fn.DrillSimpleFuncHolder;
+import org.apache.drill.exec.expr.fn.interpreter.InterpreterEvaluator;
+import org.apache.drill.exec.expr.holders.BigIntHolder;
+import org.apache.drill.exec.expr.holders.Float4Holder;
+import org.apache.drill.exec.expr.holders.Float8Holder;
+import org.apache.drill.exec.expr.holders.IntHolder;
+import org.apache.drill.exec.expr.holders.ValueHolder;
+import org.apache.drill.exec.store.parquet.stat.ColumnStatistics;
+import org.apache.drill.exec.vector.ValueHolderHelper;
+import org.apache.parquet.column.statistics.DoubleStatistics;
+import org.apache.parquet.column.statistics.FloatStatistics;
+import org.apache.parquet.column.statistics.IntStatistics;
+import org.apache.parquet.column.statistics.LongStatistics;
+import org.apache.parquet.column.statistics.Statistics;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Set;
+
+public class RangeExprEvaluator extends AbstractExprVisitor<Statistics, Void, RuntimeException> {
+  static final Logger logger = LoggerFactory.getLogger(RangeExprEvaluator.class);
+
+  private final Map<SchemaPath, ColumnStatistics> columnStatMap;
+  private final long rowCount;
+
+  public RangeExprEvaluator(final Map<SchemaPath, ColumnStatistics> columnStatMap, long rowCount) {
+    this.columnStatMap = columnStatMap;
+    this.rowCount = rowCount;
+  }
+
+  public long getRowCount() {
+    return this.rowCount;
+  }
+
+  @Override
+  public Statistics visitUnknown(LogicalExpression e, Void value) throws RuntimeException {
+    if (e instanceof TypedFieldExpr) {
+      TypedFieldExpr fieldExpr = (TypedFieldExpr) e;
+      final ColumnStatistics columnStatistics = columnStatMap.get(fieldExpr.getPath());
+      if (columnStatistics != null) {
+        return columnStatistics.getStatistics();
+      } else {
+        // field does not exist.
+        Preconditions.checkArgument(fieldExpr.getMajorType().equals(Types.OPTIONAL_INT));
+        IntStatistics intStatistics = new IntStatistics();
+        intStatistics.setNumNulls(rowCount); // all values are nulls
+        return intStatistics;
+      }
+    }
+    return null;
+  }
+
+  @Override
+  public Statistics visitIntConstant(ValueExpressions.IntExpression expr, Void value) throws RuntimeException {
+    return getStatistics(expr.getInt());
+  }
+
+  @Override
+  public Statistics visitLongConstant(ValueExpressions.LongExpression expr, Void value) throws RuntimeException {
+    return getStatistics(expr.getLong());
+  }
+
+  @Override
+  public Statistics visitFloatConstant(ValueExpressions.FloatExpression expr, Void value) throws RuntimeException {
+    return getStatistics(expr.getFloat());
+  }
+
+  @Override
+  public Statistics visitDoubleConstant(ValueExpressions.DoubleExpression expr, Void value) throws RuntimeException {
+    return getStatistics(expr.getDouble());
+  }
+
+  @Override
+  public Statistics visitDateConstant(ValueExpressions.DateExpression expr, Void value) throws RuntimeException {
+    long dateInMillis = expr.getDate();
+    return getStatistics(dateInMillis);
+  }
+
+  @Override
+  public Statistics visitTimeStampConstant(ValueExpressions.TimeStampExpression tsExpr, Void value) throws RuntimeException {
+    long tsInMillis = tsExpr.getTimeStamp();
+    return getStatistics(tsInMillis);
+  }
+
+  @Override
+  public Statistics visitTimeConstant(ValueExpressions.TimeExpression timeExpr, Void value) throws RuntimeException {
+    int milliSeconds = timeExpr.getTime();
+    return getStatistics(milliSeconds);
+  }
+
+  @Override
+  public Statistics visitFunctionHolderExpression(FunctionHolderExpression holderExpr, Void value) throws RuntimeException {
+    FuncHolder funcHolder = holderExpr.getHolder();
+
+    if (! (funcHolder instanceof DrillSimpleFuncHolder)) {
+      // Only Drill function is allowed.
+      return null;
+    }
+
+    final String funcName = ((DrillSimpleFuncHolder) funcHolder).getRegisteredNames()[0];
+
+    if (CastFunctions.isCastFunction(funcName)) {
+      Statistics stat = holderExpr.args.get(0).accept(this, null);
+      if (stat != null && ! stat.isEmpty()) {
+        return evalCastFunc(holderExpr, stat);
+      }
+    }
+    return null;
+  }
+
+  private IntStatistics getStatistics(int value) {
+    return getStatistics(value, value);
+  }
+
+  private IntStatistics getStatistics(int min, int max) {
+    final IntStatistics intStatistics = new IntStatistics();
+    intStatistics.setMinMax(min, max);
+    return intStatistics;
+  }
+
+  private LongStatistics getStatistics(long value) {
+    return getStatistics(value, value);
+  }
+
+  private LongStatistics getStatistics(long min, long max) {
+    final LongStatistics longStatistics = new LongStatistics();
+    longStatistics.setMinMax(min, max);
+    return longStatistics;
+  }
+
+  private DoubleStatistics getStatistics(double value) {
+    return getStatistics(value, value);
+  }
+
+  private DoubleStatistics getStatistics(double min, double max) {
+    final DoubleStatistics doubleStatistics = new DoubleStatistics();
+    doubleStatistics.setMinMax(min, max);
+    return doubleStatistics;
+  }
+
+  private FloatStatistics getStatistics(float value) {
+    return getStatistics(value, value);
+  }
+
+  private FloatStatistics getStatistics(float min, float max) {
+    final FloatStatistics floatStatistics = new FloatStatistics();
+    floatStatistics.setMinMax(min, max);
+    return floatStatistics;
+  }
+
+  private Statistics evalCastFunc(FunctionHolderExpression holderExpr, Statistics input) {
+    try {
+      DrillSimpleFuncHolder funcHolder = (DrillSimpleFuncHolder) holderExpr.getHolder();
+
+      DrillSimpleFunc interpreter = funcHolder.createInterpreter();
+
+      final ValueHolder minHolder, maxHolder;
+
+      TypeProtos.MinorType srcType = holderExpr.args.get(0).getMajorType().getMinorType();
+      TypeProtos.MinorType destType = holderExpr.getMajorType().getMinorType();
+
+      if (srcType.equals(destType)) {
+        // same type cast ==> NoOp.
+        return input;
+      } else if (!CAST_FUNC.containsKey(srcType) || !CAST_FUNC.get(srcType).contains(destType)) {
+        return null; // cast func between srcType and destType is NOT allowed.
+      }
+
+      switch (srcType) {
+      case INT :
+        minHolder = ValueHolderHelper.getIntHolder(((IntStatistics)input).getMin());
+        maxHolder = ValueHolderHelper.getIntHolder(((IntStatistics)input).getMax());
+        break;
+      case BIGINT:
+        minHolder = ValueHolderHelper.getBigIntHolder(((LongStatistics)input).getMin());
+        maxHolder = ValueHolderHelper.getBigIntHolder(((LongStatistics)input).getMax());
+        break;
+      case FLOAT4:
+        minHolder = ValueHolderHelper.getFloat4Holder(((FloatStatistics)input).getMin());
+        maxHolder = ValueHolderHelper.getFloat4Holder(((FloatStatistics)input).getMax());
+        break;
+      case FLOAT8:
+        minHolder = ValueHolderHelper.getFloat8Holder(((DoubleStatistics)input).getMin());
+        maxHolder = ValueHolderHelper.getFloat8Holder(((DoubleStatistics)input).getMax());
+        break;
+      default:
+        return null;
+      }
+
+      final ValueHolder[] args1 = {minHolder};
+      final ValueHolder[] args2 = {maxHolder};
+
+      final ValueHolder minFuncHolder = InterpreterEvaluator.evaluateFunction(interpreter, args1, holderExpr.getName());
+      final ValueHolder maxFuncHolder = InterpreterEvaluator.evaluateFunction(interpreter, args2, holderExpr.getName());
+
+      switch (destType) {
+      //TODO : need handle # of nulls.
+      case INT:
+        return getStatistics( ((IntHolder)minFuncHolder).value, ((IntHolder)maxFuncHolder).value);
+      case BIGINT:
+        return getStatistics( ((BigIntHolder)minFuncHolder).value, ((BigIntHolder)maxFuncHolder).value);
+      case FLOAT4:
+        return getStatistics( ((Float4Holder)minFuncHolder).value, ((Float4Holder)maxFuncHolder).value);
+      case FLOAT8:
+        return getStatistics( ((Float8Holder)minFuncHolder).value, ((Float8Holder)maxFuncHolder).value);
+      default:
+        return null;
+      }
+    } catch (Exception e) {
+      throw new DrillRuntimeException("Error in evaluating function of " + holderExpr.getName() );
+    }
+  }
+
+  static Map<TypeProtos.MinorType, Set<TypeProtos.MinorType>> CAST_FUNC = new HashMap<>();
+  static {
+    // float -> double , int, bigint
+    CAST_FUNC.put(TypeProtos.MinorType.FLOAT4, new HashSet<TypeProtos.MinorType>());
+    CAST_FUNC.get(TypeProtos.MinorType.FLOAT4).add(TypeProtos.MinorType.FLOAT8);
+    CAST_FUNC.get(TypeProtos.MinorType.FLOAT4).add(TypeProtos.MinorType.INT);
+    CAST_FUNC.get(TypeProtos.MinorType.FLOAT4).add(TypeProtos.MinorType.BIGINT);
+
+    // double -> float, int, bigint
+    CAST_FUNC.put(TypeProtos.MinorType.FLOAT8, new HashSet<TypeProtos.MinorType>());
+    CAST_FUNC.get(TypeProtos.MinorType.FLOAT8).add(TypeProtos.MinorType.FLOAT4);
+    CAST_FUNC.get(TypeProtos.MinorType.FLOAT8).add(TypeProtos.MinorType.INT);
+    CAST_FUNC.get(TypeProtos.MinorType.FLOAT8).add(TypeProtos.MinorType.BIGINT);
+
+    // int -> float, double, bigint
+    CAST_FUNC.put(TypeProtos.MinorType.INT, new HashSet<TypeProtos.MinorType>());
+    CAST_FUNC.get(TypeProtos.MinorType.INT).add(TypeProtos.MinorType.FLOAT4);
+    CAST_FUNC.get(TypeProtos.MinorType.INT).add(TypeProtos.MinorType.FLOAT8);
+    CAST_FUNC.get(TypeProtos.MinorType.INT).add(TypeProtos.MinorType.BIGINT);
+
+    // bigint -> int, float, double
+    CAST_FUNC.put(TypeProtos.MinorType.BIGINT, new HashSet<TypeProtos.MinorType>());
+    CAST_FUNC.get(TypeProtos.MinorType.BIGINT).add(TypeProtos.MinorType.INT);
+    CAST_FUNC.get(TypeProtos.MinorType.BIGINT).add(TypeProtos.MinorType.FLOAT4);
+    CAST_FUNC.get(TypeProtos.MinorType.BIGINT).add(TypeProtos.MinorType.FLOAT8);
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/drill/blob/9411b26e/exec/java-exec/src/main/java/org/apache/drill/exec/expr/stat/TypedFieldExpr.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/stat/TypedFieldExpr.java b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/stat/TypedFieldExpr.java
new file mode 100644
index 0000000..4287929
--- /dev/null
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/stat/TypedFieldExpr.java
@@ -0,0 +1,63 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.expr.stat;
+
+import com.google.common.collect.Iterators;
+import org.apache.drill.common.expression.LogicalExpression;
+import org.apache.drill.common.expression.LogicalExpressionBase;
+import org.apache.drill.common.expression.SchemaPath;
+import org.apache.drill.common.expression.visitors.ExprVisitor;
+import org.apache.drill.common.types.TypeProtos;
+
+import java.util.Iterator;
+
+public class TypedFieldExpr extends LogicalExpressionBase {
+  TypeProtos.MajorType type;
+  SchemaPath path;
+
+  public TypedFieldExpr(SchemaPath path, TypeProtos.MajorType type) {
+    super(path.getPosition());
+    this.path = path;
+    this.type = type;
+  }
+
+  @Override
+  public <T, V, E extends Exception> T accept(ExprVisitor<T, V, E> visitor, V value) throws E {
+    return visitor.visitUnknown(this, value);
+  }
+
+  @Override
+  public Iterator<LogicalExpression> iterator() {
+    return Iterators.emptyIterator();
+  }
+
+  @Override
+  public TypeProtos.MajorType getMajorType() {
+    return this.type;
+  }
+
+  @Override
+  public String toString() {
+    return this.path.getRootSegment().getPath() + "(" + type.getMinorType() + "_" + type.getMode() +")";
+  }
+
+  public SchemaPath getPath() {
+    return this.path;
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/drill/blob/9411b26e/exec/java-exec/src/main/java/org/apache/drill/exec/planner/PlannerPhase.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/PlannerPhase.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/PlannerPhase.java
index 22a8b6f..1551040 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/PlannerPhase.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/PlannerPhase.java
@@ -17,16 +17,13 @@
  */
 package org.apache.drill.exec.planner;
 
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.List;
-
+import com.google.common.collect.ImmutableSet;
+import com.google.common.collect.ImmutableSet.Builder;
 import org.apache.calcite.plan.RelOptRule;
 import org.apache.calcite.plan.volcano.AbstractConverter.ExpandConversionRule;
 import org.apache.calcite.rel.core.RelFactories;
 import org.apache.calcite.rel.rules.AggregateExpandDistinctAggregatesRule;
 import org.apache.calcite.rel.rules.AggregateRemoveRule;
-import org.apache.calcite.rel.rules.FilterAggregateTransposeRule;
 import org.apache.calcite.rel.rules.FilterMergeRule;
 import org.apache.calcite.rel.rules.JoinPushExpressionsRule;
 import org.apache.calcite.rel.rules.JoinPushThroughJoinRule;
@@ -88,9 +85,11 @@ import org.apache.drill.exec.planner.physical.WindowPrule;
 import org.apache.drill.exec.planner.physical.WriterPrule;
 import org.apache.drill.exec.store.AbstractStoragePlugin;
 import org.apache.drill.exec.store.StoragePlugin;
+import org.apache.drill.exec.store.parquet.ParquetPushDownFilter;
 
-import com.google.common.collect.ImmutableSet;
-import com.google.common.collect.ImmutableSet.Builder;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.List;
 
 public enum PlannerPhase {
   //private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(DrillRuleSets.class);
@@ -154,6 +153,12 @@ public enum PlannerPhase {
     }
   },
 
+  PHYSICAL_PARTITION_PRUNING("Physical Partition Prune Planning") {
+    public RuleSet getRules(OptimizerRulesContext context, Collection<StoragePlugin> plugins) {
+      return PlannerPhase.mergedRuleSets(getPhysicalPruneScanRules(context), getStorageRules(context, plugins, this));
+    }
+  },
+
   DIRECTORY_PRUNING("Directory Prune Planning") {
     public RuleSet getRules(OptimizerRulesContext context, Collection<StoragePlugin> plugins) {
       return PlannerPhase.mergedRuleSets(getDirPruneScanRules(context), getStorageRules(context, plugins, this));
@@ -346,6 +351,26 @@ public enum PlannerPhase {
   }
 
   /**
+   *   Get an immutable list of pruning rules that will be used post physical planning.
+   */
+  static RuleSet getPhysicalPruneScanRules(OptimizerRulesContext optimizerRulesContext) {
+    final ImmutableSet<RelOptRule> pruneRules = ImmutableSet.<RelOptRule>builder()
+        .add(
+            // See DRILL-4998 for more detail.
+            // Main reason for doing this is we want to reduce the performance regression possibility
+            // caused by a different join order, as a result of reduced row count in scan operator.
+            // Ideally this should be done in logical planning, before join order planning is done.
+            // Before we can make such change, we have to figure out how to adjust the selectivity
+            // estimation of filter operator, after filter is pushed down to scan.
+            ParquetPushDownFilter.getFilterOnProject(optimizerRulesContext),
+            ParquetPushDownFilter.getFilterOnScan(optimizerRulesContext)
+        )
+        .build();
+
+    return RuleSets.ofList(pruneRules);
+  }
+
+  /**
    *  Get an immutable list of directory-based partition pruing rules that will be used in Calcite logical planning.
    * @param optimizerRulesContext
    * @return

http://git-wip-us.apache.org/repos/asf/drill/blob/9411b26e/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/PlannerSettings.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/PlannerSettings.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/PlannerSettings.java
index 218bf5b..b3dc7d6 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/PlannerSettings.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/PlannerSettings.java
@@ -99,6 +99,13 @@ public class PlannerSettings implements Context{
   public static final LongValidator IN_SUBQUERY_THRESHOLD =
       new PositiveLongValidator("planner.in_subquery_threshold", Integer.MAX_VALUE, 20); /* Same as Calcite's default IN List subquery size */
 
+  public static final String PARQUET_ROWGROUP_FILTER_PUSHDOWN_PLANNING_KEY = "planner.store.parquet.rowgroup.filter.pushdown";
+  public static final BooleanValidator PARQUET_ROWGROUP_FILTER_PUSHDOWN_PLANNING = new BooleanValidator(PARQUET_ROWGROUP_FILTER_PUSHDOWN_PLANNING_KEY, true);
+  public static final String PARQUET_ROWGROUP_FILTER_PUSHDOWN_PLANNING_THRESHOLD_KEY = "planner.store.parquet.rowgroup.filter.pushdown.threshold";
+  public static final PositiveLongValidator PARQUET_ROWGROUP_FILTER_PUSHDOWN_PLANNING_THRESHOLD = new PositiveLongValidator(PARQUET_ROWGROUP_FILTER_PUSHDOWN_PLANNING_THRESHOLD_KEY,
+      Long.MAX_VALUE, 10000);
+
+
   public OptionManager options = null;
   public FunctionImplementationRegistry functionImplementationRegistry = null;
 
@@ -247,6 +254,14 @@ public class PlannerSettings implements Context{
     return options.getOption(UNIONALL_DISTRIBUTE);
   }
 
+  public boolean isParquetRowGroupFilterPushdownPlanningEnabled() {
+    return options.getOption(PARQUET_ROWGROUP_FILTER_PUSHDOWN_PLANNING);
+  }
+
+  public long getParquetRowGroupFilterPushDownThreshold() {
+    return options.getOption(PARQUET_ROWGROUP_FILTER_PUSHDOWN_PLANNING_THRESHOLD);
+  }
+
   @Override
   public <T> T unwrap(Class<T> clazz) {
     if(clazz == PlannerSettings.class){

http://git-wip-us.apache.org/repos/asf/drill/blob/9411b26e/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/DefaultSqlHandler.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/DefaultSqlHandler.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/DefaultSqlHandler.java
index 2d0c069..15702e3 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/DefaultSqlHandler.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/DefaultSqlHandler.java
@@ -486,6 +486,12 @@ public class DefaultSqlHandler extends AbstractSqlHandler {
           .getHashJoinSwapMarginFactor()));
     }
 
+    /* Parquet row group filter pushdown in planning time */
+
+    if (context.getPlannerSettings().isParquetRowGroupFilterPushdownPlanningEnabled()) {
+      phyRelNode = (Prel) transform(PlannerType.HEP_BOTTOM_UP, PlannerPhase.PHYSICAL_PARTITION_PRUNING, phyRelNode);
+    }
+
     /*
      * 1.2) Break up all expressions with complex outputs into their own project operations
      */

http://git-wip-us.apache.org/repos/asf/drill/blob/9411b26e/exec/java-exec/src/main/java/org/apache/drill/exec/server/options/SystemOptionManager.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/server/options/SystemOptionManager.java b/exec/java-exec/src/main/java/org/apache/drill/exec/server/options/SystemOptionManager.java
index d803fa3..2c322c7 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/server/options/SystemOptionManager.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/server/options/SystemOptionManager.java
@@ -90,6 +90,8 @@ public class SystemOptionManager extends BaseOptionManager implements AutoClosea
       PlannerSettings.TYPE_INFERENCE,
       PlannerSettings.IN_SUBQUERY_THRESHOLD,
       PlannerSettings.UNIONALL_DISTRIBUTE,
+      PlannerSettings.PARQUET_ROWGROUP_FILTER_PUSHDOWN_PLANNING,
+      PlannerSettings.PARQUET_ROWGROUP_FILTER_PUSHDOWN_PLANNING_THRESHOLD,
       ExecConstants.CAST_TO_NULLABLE_NUMERIC_OPTION,
       ExecConstants.OUTPUT_FORMAT_VALIDATOR,
       ExecConstants.PARQUET_BLOCK_SIZE_VALIDATOR,

http://git-wip-us.apache.org/repos/asf/drill/blob/9411b26e/exec/java-exec/src/main/java/org/apache/drill/exec/store/ImplicitColumnExplorer.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/ImplicitColumnExplorer.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/ImplicitColumnExplorer.java
index b67d8b5..42ff827 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/ImplicitColumnExplorer.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/ImplicitColumnExplorer.java
@@ -52,12 +52,21 @@ public class ImplicitColumnExplorer {
    * Also populates map with implicit columns names as keys and their values
    */
   public ImplicitColumnExplorer(FragmentContext context, List<SchemaPath> columns) {
-    this.partitionDesignator = context.getOptions().getOption(ExecConstants.FILESYSTEM_PARTITION_COLUMN_LABEL).string_val;
+    this(context.getOptions(), columns);
+  }
+
+  /**
+   * Helper class that encapsulates logic for sorting out columns
+   * between actual table columns, partition columns and implicit file columns.
+   * Also populates map with implicit columns names as keys and their values
+   */
+  public ImplicitColumnExplorer(OptionManager optionManager, List<SchemaPath> columns) {
+    this.partitionDesignator = optionManager.getOption(ExecConstants.FILESYSTEM_PARTITION_COLUMN_LABEL).string_val;
     this.columns = columns;
     this.isStarQuery = columns != null && AbstractRecordReader.isStarQuery(columns);
     this.selectedPartitionColumns = Lists.newArrayList();
     this.tableColumns = Lists.newArrayList();
-    this.allImplicitColumns = initImplicitFileColumns(context.getOptions());
+    this.allImplicitColumns = initImplicitFileColumns(optionManager);
     this.selectedImplicitColumns = CaseInsensitiveMap.newHashMap();
 
     init();
@@ -84,10 +93,20 @@ public class ImplicitColumnExplorer {
    * @return map with columns names as keys and their values
    */
   public Map<String, String> populateImplicitColumns(FileWork work, String selectionRoot) {
+    return populateImplicitColumns(work.getPath(), selectionRoot);
+  }
+
+  /**
+   * Compares selection root and actual file path to determine partition columns values.
+   * Adds implicit file columns according to columns list.
+   *
+   * @return map with columns names as keys and their values
+   */
+  public Map<String, String> populateImplicitColumns(String filePath, String selectionRoot) {
     Map<String, String> implicitValues = Maps.newLinkedHashMap();
     if (selectionRoot != null) {
       String[] r = Path.getPathWithoutSchemeAndAuthority(new Path(selectionRoot)).toString().split("/");
-      Path path = Path.getPathWithoutSchemeAndAuthority(new Path(work.getPath()));
+      Path path = Path.getPathWithoutSchemeAndAuthority(new Path(filePath));
       String[] p = path.toString().split("/");
       if (p.length > r.length) {
         String[] q = ArrayUtils.subarray(p, r.length, p.length - 1);

http://git-wip-us.apache.org/repos/asf/drill/blob/9411b26e/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/ParquetCompareFunctionProcessor.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/ParquetCompareFunctionProcessor.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/ParquetCompareFunctionProcessor.java
deleted file mode 100644
index bd59021..0000000
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/ParquetCompareFunctionProcessor.java
+++ /dev/null
@@ -1,280 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.drill.exec.store.parquet;
-
-import org.apache.drill.common.expression.CastExpression;
-import org.apache.drill.common.expression.ConvertExpression;
-import org.apache.drill.common.expression.FunctionCall;
-import org.apache.drill.common.expression.LogicalExpression;
-import org.apache.drill.common.expression.SchemaPath;
-import org.apache.drill.common.expression.ValueExpressions.BooleanExpression;
-import org.apache.drill.common.expression.ValueExpressions.DateExpression;
-import org.apache.drill.common.expression.ValueExpressions.DoubleExpression;
-import org.apache.drill.common.expression.ValueExpressions.FloatExpression;
-import org.apache.drill.common.expression.ValueExpressions.IntExpression;
-import org.apache.drill.common.expression.ValueExpressions.LongExpression;
-import org.apache.drill.common.expression.ValueExpressions.QuotedString;
-import org.apache.drill.common.expression.ValueExpressions.TimeExpression;
-import org.apache.drill.common.expression.ValueExpressions.TimeStampExpression;
-import org.apache.drill.common.expression.visitors.AbstractExprVisitor;
-
-import com.google.common.collect.ImmutableMap;
-import com.google.common.collect.ImmutableSet;
-import org.joda.time.DateTimeUtils;
-
-public class ParquetCompareFunctionProcessor extends
-        AbstractExprVisitor<Boolean, LogicalExpression, RuntimeException> {
-    private Object value;
-    private boolean success;
-    private boolean isEqualityFn;
-    private SchemaPath path;
-    private String functionName;
-
-    public static final long JULIAN_DAY_EPOC = DateTimeUtils.toJulianDayNumber(0);
-
-    public static boolean isCompareFunction(String functionName) {
-        return COMPARE_FUNCTIONS_TRANSPOSE_MAP.keySet().contains(functionName);
-    }
-
-    public static ParquetCompareFunctionProcessor process(FunctionCall call) {
-        String functionName = call.getName();
-        LogicalExpression nameArg = call.args.get(0);
-        LogicalExpression valueArg = call.args.size() == 2 ? call.args.get(1)
-                : null;
-        ParquetCompareFunctionProcessor evaluator = new ParquetCompareFunctionProcessor(
-                functionName);
-
-        if (valueArg != null) { // binary function
-            if (VALUE_EXPRESSION_CLASSES.contains(nameArg.getClass())) {
-                LogicalExpression swapArg = valueArg;
-                valueArg = nameArg;
-                nameArg = swapArg;
-                evaluator.functionName = COMPARE_FUNCTIONS_TRANSPOSE_MAP
-                        .get(functionName);
-            }
-            evaluator.success = nameArg.accept(evaluator, valueArg);
-        } else if (call.args.get(0) instanceof SchemaPath) {
-            evaluator.success = true;
-            evaluator.path = (SchemaPath) nameArg;
-        }
-
-        return evaluator;
-    }
-
-    public ParquetCompareFunctionProcessor(String functionName) {
-        this.success = false;
-        this.functionName = functionName;
-        this.isEqualityFn = COMPARE_FUNCTIONS_TRANSPOSE_MAP
-                .containsKey(functionName)
-                && COMPARE_FUNCTIONS_TRANSPOSE_MAP.get(functionName).equals(
-                functionName);
-    }
-
-    public Object getValue() {
-        return value;
-    }
-
-    public boolean isSuccess() {
-        return success;
-    }
-
-    public SchemaPath getPath() {
-        return path;
-    }
-
-    public String getFunctionName() {
-        return functionName;
-    }
-
-    @Override
-    public Boolean visitCastExpression(CastExpression e,
-                                       LogicalExpression valueArg) throws RuntimeException {
-        if (e.getInput() instanceof CastExpression
-                || e.getInput() instanceof SchemaPath) {
-            return e.getInput().accept(this, valueArg);
-        }
-        return false;
-    }
-
-    @Override
-    public Boolean visitConvertExpression(ConvertExpression e,
-                                          LogicalExpression valueArg) throws RuntimeException {
-        if (e.getConvertFunction() == ConvertExpression.CONVERT_FROM
-                && e.getInput() instanceof SchemaPath) {
-            String encodingType = e.getEncodingType();
-            switch (encodingType) {
-                case "INT_BE":
-                case "INT":
-                case "UINT_BE":
-                case "UINT":
-                case "UINT4_BE":
-                case "UINT4":
-                    if (valueArg instanceof IntExpression
-                            && (isEqualityFn || encodingType.startsWith("U"))) {
-                        this.value = ((IntExpression) valueArg).getInt();
-                    }
-                    break;
-                case "BIGINT_BE":
-                case "BIGINT":
-                case "UINT8_BE":
-                case "UINT8":
-                    if (valueArg instanceof LongExpression
-                            && (isEqualityFn || encodingType.startsWith("U"))) {
-                        this.value = ((LongExpression) valueArg).getLong();
-                    }
-                    break;
-                case "FLOAT":
-                    if (valueArg instanceof FloatExpression && isEqualityFn) {
-                        this.value = ((FloatExpression) valueArg).getFloat();
-                    }
-                    break;
-                case "DOUBLE":
-                    if (valueArg instanceof DoubleExpression && isEqualityFn) {
-                        this.value = ((DoubleExpression) valueArg).getDouble();
-                    }
-                    break;
-                case "TIME_EPOCH":
-                case "TIME_EPOCH_BE":
-                    if (valueArg instanceof TimeExpression) {
-                        this.value = ((TimeExpression) valueArg).getTime();
-                    }
-                    break;
-                case "DATE_EPOCH":
-                case "DATE_EPOCH_BE":
-                    if (valueArg instanceof DateExpression) {
-                        long dateInMillis = ((DateExpression) valueArg).getDate();
-                        this.value = (int) (DateTimeUtils.toJulianDayNumber(dateInMillis) + JULIAN_DAY_EPOC);
-                    }
-                    break;
-                case "BOOLEAN_BYTE":
-                    if (valueArg instanceof BooleanExpression) {
-                        this.value = ((BooleanExpression) valueArg).getBoolean();
-                    }
-                    break;
-                case "UTF8":
-                    // let visitSchemaPath() handle this.
-                    return e.getInput().accept(this, valueArg);
-            }
-
-            if (value != null) {
-                this.path = (SchemaPath) e.getInput();
-                return true;
-            }
-        }
-        return false;
-    }
-
-    @Override
-    public Boolean visitUnknown(LogicalExpression e, LogicalExpression valueArg)
-            throws RuntimeException {
-        return false;
-    }
-
-    @Override
-    public Boolean visitSchemaPath(SchemaPath path, LogicalExpression valueArg)
-            throws RuntimeException {
-        if (valueArg instanceof QuotedString) {
-            this.value = ((QuotedString) valueArg).value;
-            this.path = path;
-            return true;
-        }
-
-        if (valueArg instanceof IntExpression) {
-            this.value = ((IntExpression) valueArg).getInt();
-            this.path = path;
-            return true;
-        }
-
-        if (valueArg instanceof LongExpression) {
-            this.value = ((LongExpression) valueArg).getLong();
-            this.path = path;
-            return true;
-        }
-
-        if (valueArg instanceof FloatExpression) {
-            this.value = ((FloatExpression) valueArg).getFloat();
-            this.path = path;
-            return true;
-        }
-
-        if (valueArg instanceof DoubleExpression) {
-            this.value = ((DoubleExpression) valueArg).getDouble();
-            this.path = path;
-            return true;
-        }
-
-        if (valueArg instanceof BooleanExpression) {
-            this.value = ((BooleanExpression) valueArg).getBoolean();
-            this.path = path;
-            return true;
-        }
-
-        if (valueArg instanceof TimeExpression) {
-            this.value = ((TimeExpression) valueArg).getTime();
-            this.path = path;
-            return true;
-        }
-
-        if (valueArg instanceof TimeStampExpression) {
-            this.value = ((TimeStampExpression) valueArg).getTimeStamp();
-            this.path = path;
-            return true;
-        }
-
-        if (valueArg instanceof DateExpression) {
-            long dateInMillis = ((DateExpression) valueArg).getDate();
-            this.value = (int) (DateTimeUtils.toJulianDayNumber(dateInMillis) + JULIAN_DAY_EPOC);
-            this.path = path;
-            return true;
-        }
-
-        return false;
-    }
-
-    private static final ImmutableSet<Class<? extends LogicalExpression>> VALUE_EXPRESSION_CLASSES;
-    static {
-        ImmutableSet.Builder<Class<? extends LogicalExpression>> builder = ImmutableSet
-                .builder();
-        VALUE_EXPRESSION_CLASSES = builder.add(BooleanExpression.class)
-                .add(DateExpression.class).add(DoubleExpression.class)
-                .add(FloatExpression.class).add(IntExpression.class)
-                .add(LongExpression.class).add(QuotedString.class)
-                .add(TimeExpression.class).add(TimeStampExpression.class)
-                .add(DateExpression.class).build();
-    }
-
-    private static final ImmutableMap<String, String> COMPARE_FUNCTIONS_TRANSPOSE_MAP;
-    static {
-        ImmutableMap.Builder<String, String> builder = ImmutableMap.builder();
-        COMPARE_FUNCTIONS_TRANSPOSE_MAP = builder
-                // unary functions
-                .put("isnotnull", "isnotnull")
-                .put("isNotNull", "isNotNull")
-                .put("is not null", "is not null")
-                .put("isnull", "isnull")
-                .put("isNull", "isNull")
-                .put("is null", "is null")
-                        // binary functions
-                .put("equal", "equal").put("not_equal", "not_equal")
-                .put("greater_than_or_equal_to", "less_than_or_equal_to")
-                .put("greater_than", "less_than")
-                .put("less_than_or_equal_to", "greater_than_or_equal_to")
-                .put("less_than", "greater_than").build();
-    }
-
-}


Mime
View raw message