hadoop-hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From na...@apache.org
Subject svn commit: r769826 [2/3] - in /hadoop/hive/trunk: ./ ql/src/java/org/apache/hadoop/hive/ql/exec/ ql/src/java/org/apache/hadoop/hive/ql/lib/ ql/src/java/org/apache/hadoop/hive/ql/parse/ ql/src/java/org/apache/hadoop/hive/ql/plan/ ql/src/java/org/apache...
Date Wed, 29 Apr 2009 16:47:37 GMT
Added: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/UDFArgumentTypeException.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/UDFArgumentTypeException.java?rev=769826&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/UDFArgumentTypeException.java (added)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/UDFArgumentTypeException.java Wed Apr 29 16:47:34 2009
@@ -0,0 +1,44 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec;
+
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+
+/**                                                                                     
+ * Generic exception class for Hive
+ */
+
+public class UDFArgumentTypeException extends HiveException {
+  
+  int argumentId;
+  
+  public UDFArgumentTypeException() {
+    super();
+  }
+  
+  public UDFArgumentTypeException(int argumentId, String message) {
+    super(message);
+    this.argumentId = argumentId;
+  }
+
+  public int getArgumentId() {
+    return argumentId;
+  }
+}
+

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/UnionOperator.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/UnionOperator.java?rev=769826&r1=769825&r2=769826&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/UnionOperator.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/UnionOperator.java Wed Apr 29 16:47:34 2009
@@ -34,9 +34,9 @@
   private static final long serialVersionUID = 1L;
 
   @Override
-  public void process(Object row, ObjectInspector rowInspector)
+  public void process(Object row, ObjectInspector rowInspector, int tag)
       throws HiveException {
-    forward(row, rowInspector);    
+    forward(row, rowInspector);
   }
 
   /**

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/lib/DefaultRuleDispatcher.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/lib/DefaultRuleDispatcher.java?rev=769826&r1=769825&r2=769826&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/lib/DefaultRuleDispatcher.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/lib/DefaultRuleDispatcher.java Wed Apr 29 16:47:34 2009
@@ -35,7 +35,7 @@
 
   /**
    * constructor
-   * @param defaultProc defualt processor to be fired if no rule matches
+   * @param defaultProc default processor to be fired if no rule matches
    * @param rules operator processor that handles actual processing of the node
    * @param procCtx operator processor context, which is opaque to the dispatcher
    */

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ErrorMsg.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ErrorMsg.java?rev=769826&r1=769825&r2=769826&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ErrorMsg.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ErrorMsg.java Wed Apr 29 16:47:34 2009
@@ -38,6 +38,7 @@
   INVALID_FUNCTION("Invalid Function"),
   INVALID_FUNCTION_SIGNATURE("Function Argument Type Mismatch"),
   INVALID_OPERATOR_SIGNATURE("Operator Argument Type Mismatch"),
+  INVALID_ARGUMENT_TYPE("Argument Type Mismatch"),
   INVALID_JOIN_CONDITION_1("Both Left and Right Aliases Encountered in Join"),
   INVALID_JOIN_CONDITION_2("Neither Left nor Right Aliases Encountered in Join"),
   INVALID_JOIN_CONDITION_3("OR not supported in Join currently"),

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g?rev=769826&r1=769825&r2=769826&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g Wed Apr 29 16:47:34 2009
@@ -859,6 +859,26 @@
     RPAREN -> ^(TOK_FUNCTION primitiveType expression)
     ;
     
+caseExpression
+@init { msgs.push("case expression"); }
+@after { msgs.pop(); }
+    :
+    KW_CASE expression
+    (KW_WHEN expression KW_THEN expression)+
+    (KW_ELSE expression)?
+    KW_END -> ^(TOK_FUNCTION KW_CASE expression*)
+    ;
+    
+whenExpression
+@init { msgs.push("case expression"); }
+@after { msgs.pop(); }
+    :
+    KW_CASE
+     ( KW_WHEN expression KW_THEN expression)+
+    (KW_ELSE expression)?
+    KW_END -> ^(TOK_FUNCTION KW_WHEN expression*)
+    ;
+    
 constant
 @init { msgs.push("constant"); }
 @after { msgs.pop(); }
@@ -889,6 +909,8 @@
     | constant
     | function
     | castExpression
+    | caseExpression
+    | whenExpression
     | tableOrColumn
     | LPAREN! expression RPAREN!
     ;
@@ -1157,6 +1179,11 @@
 KW_PROPERTIES: 'TBLPROPERTIES';
 KW_VALUE_TYPE: '$VALUE$';
 KW_ELEM_TYPE: '$ELEM$';
+KW_CASE: 'CASE';
+KW_WHEN: 'WHEN';
+KW_THEN: 'THEN';
+KW_ELSE: 'ELSE';
+KW_END: 'END';
 
 // Operators
 

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/PartitionPruner.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/PartitionPruner.java?rev=769826&r1=769825&r2=769826&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/PartitionPruner.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/PartitionPruner.java Wed Apr 29 16:47:34 2009
@@ -22,6 +22,7 @@
 
 import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator;
 import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluatorFactory;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
 import org.apache.hadoop.hive.ql.metadata.*;
 import org.apache.hadoop.hive.ql.plan.exprNodeColumnDesc;
 import org.apache.hadoop.hive.ql.plan.exprNodeConstantDesc;
@@ -39,7 +40,6 @@
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
 import org.apache.hadoop.hive.ql.udf.UDFType;
 import org.apache.hadoop.hive.serde2.SerDeException;
-import org.apache.hadoop.hive.serde2.objectinspector.InspectableObject;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
@@ -82,7 +82,6 @@
     return onlyContainsPartCols;
   }
   
-  
   /** Class to store the return result of genExprNodeDesc.
    * 
    *  TODO: In the future when we refactor the PartitionPruner code, we should
@@ -227,6 +226,7 @@
           ExprNodeTempDesc child = genExprNodeDesc((ASTNode)expr.getChild(ci));
           tempChildren.add(child);
         }
+
         // Is it a special case: table DOT column?
         if (expr.getType() == HiveParser.DOT && tempChildren.get(0).getIsTableName()) {
           String tabAlias = tempChildren.get(0).getTableName();
@@ -241,7 +241,13 @@
           }
           
           // Create function desc
-          exprNodeDesc desc = TypeCheckProcFactory.DefaultExprProcessor.getXpathOrFuncExprNodeDesc(expr, isFunction, children);
+          exprNodeDesc desc = null;
+          try {
+            desc = TypeCheckProcFactory.DefaultExprProcessor.getXpathOrFuncExprNodeDesc(expr, isFunction, children);
+          } catch (UDFArgumentTypeException e) {
+            throw new SemanticException(ErrorMsg.INVALID_ARGUMENT_TYPE
+                .getMsg(expr.getChild(childrenBegin + e.getArgumentId()), e.getMessage()));
+          }
           
           if (desc instanceof exprNodeFuncDesc && (
               ((exprNodeFuncDesc)desc).getUDFMethod().getDeclaringClass().equals(UDFOPAnd.class) 
@@ -463,11 +469,7 @@
     try {
       StructObjectInspector rowObjectInspector = (StructObjectInspector)this.tab.getDeserializer().getObjectInspector();
       Object[] rowWithPart = new Object[2];
-      InspectableObject inspectableObject = new InspectableObject();
-     
-      ExprNodeEvaluator evaluator = null;
-      if (this.prunerExpr != null)
-        evaluator = ExprNodeEvaluatorFactory.get(this.prunerExpr);
+      
       for(Partition part: Hive.get().getPartitions(this.tab)) {
         // Set all the variables here
         LinkedHashMap<String, String> partSpec = part.getSpec();
@@ -490,9 +492,11 @@
         StructObjectInspector rowWithPartObjectInspector = ObjectInspectorFactory.getUnionStructObjectInspector(ois);
         
         // evaluate the expression tree
-        if (evaluator != null) {
-          evaluator.evaluate(rowWithPart, rowWithPartObjectInspector, inspectableObject);
-          Boolean r = (Boolean) ((PrimitiveObjectInspector)inspectableObject.oi).getPrimitiveJavaObject(inspectableObject.o);
+        if (this.prunerExpr != null) {
+          ExprNodeEvaluator evaluator = ExprNodeEvaluatorFactory.get(this.prunerExpr);
+          ObjectInspector evaluateResultOI = evaluator.initialize(rowWithPartObjectInspector);
+          Object evaluateResultO = evaluator.evaluate(rowWithPart);
+          Boolean r = (Boolean) ((PrimitiveObjectInspector)evaluateResultOI).getPrimitiveJavaObject(evaluateResultO);
           LOG.trace("prune result for partition " + partSpec + ": " + r);
           if (Boolean.TRUE.equals(r)) {
             LOG.debug("retained partition: " + partSpec);

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java?rev=769826&r1=769825&r2=769826&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java Wed Apr 29 16:47:34 2009
@@ -52,6 +52,7 @@
 import org.apache.hadoop.hive.ql.exec.UDAF;
 import org.apache.hadoop.hive.ql.exec.UDAFEvaluator;
 import org.apache.hadoop.hive.ql.exec.UDF;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
 import org.apache.hadoop.hive.ql.exec.Utilities;
 import org.apache.hadoop.hive.ql.io.HiveOutputFormat;
 import org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat;
@@ -82,8 +83,6 @@
 import org.apache.hadoop.hive.ql.optimizer.GenMRRedSink3;
 import org.apache.hadoop.hive.ql.plan.*;
 import org.apache.hadoop.hive.ql.udf.UDFOPPositive;
-import org.apache.hadoop.hive.ql.exec.MapRedTask;
-import org.apache.hadoop.hive.ql.exec.ExecDriver;
 import org.apache.hadoop.hive.ql.exec.*;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.plan.PlanUtils;
@@ -3182,19 +3181,24 @@
       }
     }
 
-    exprNodeDesc hashfnExpr = TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("default_sample_hashfn", args);
-    assert(hashfnExpr != null);
-    LOG.info("hashfnExpr = " + hashfnExpr);
-    exprNodeDesc andExpr = TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("&", hashfnExpr, intMaxExpr);
-    assert(andExpr != null);
-    LOG.info("andExpr = " + andExpr);
-    exprNodeDesc modExpr = TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("%", andExpr, denominatorExpr);
-    assert(modExpr != null);
-    LOG.info("modExpr = " + modExpr);
-    LOG.info("numeratorExpr = " + numeratorExpr);
-    exprNodeDesc equalsExpr = TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("==", modExpr, numeratorExpr);
-    LOG.info("equalsExpr = " + equalsExpr);
-    assert(equalsExpr != null);
+    exprNodeDesc equalsExpr = null;
+    try {
+      exprNodeDesc hashfnExpr = TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("default_sample_hashfn", args);
+      assert(hashfnExpr != null);
+      LOG.info("hashfnExpr = " + hashfnExpr);
+      exprNodeDesc andExpr = TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("&", hashfnExpr, intMaxExpr);
+      assert(andExpr != null);
+      LOG.info("andExpr = " + andExpr);
+      exprNodeDesc modExpr = TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("%", andExpr, denominatorExpr);
+      assert(modExpr != null);
+      LOG.info("modExpr = " + modExpr);
+      LOG.info("numeratorExpr = " + numeratorExpr);
+      equalsExpr = TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("==", modExpr, numeratorExpr);
+      LOG.info("equalsExpr = " + equalsExpr);
+      assert(equalsExpr != null);
+    } catch (UDFArgumentTypeException e) {
+      throw new RuntimeException("Hive 2 internal exception", e);
+    }
     return equalsExpr;
   }
   
@@ -3646,8 +3650,10 @@
     opRules.put(new RuleRegExp("R2", HiveParser.Number + "%"), TypeCheckProcFactory.getNumExprProcessor());
     opRules.put(new RuleRegExp("R3", HiveParser.Identifier + "%|" + 
                                      HiveParser.StringLiteral + "%|" + 
+                                     HiveParser.TOK_CHARSETLITERAL + "%|" +
                                      HiveParser.KW_IF + "%|" + 
-                                     HiveParser.TOK_CHARSETLITERAL + "%"), 
+                                     HiveParser.KW_CASE + "%|" +
+                                     HiveParser.KW_WHEN + "%"),
                                TypeCheckProcFactory.getStrExprProcessor());
     opRules.put(new RuleRegExp("R4", HiveParser.KW_TRUE + "%|" + HiveParser.KW_FALSE + "%"), 
                                TypeCheckProcFactory.getBoolExprProcessor());
@@ -3670,7 +3676,6 @@
     return desc;
   }
   
-  
   /**
    * Gets the table Alias for the column from the column name. This function throws
    * and exception in case the same column name is present in multiple table. The exception 

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java?rev=769826&r1=769825&r2=769826&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java Wed Apr 29 16:47:34 2009
@@ -28,8 +28,10 @@
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hive.ql.exec.ColumnInfo;
+import org.apache.hadoop.hive.ql.exec.FunctionInfo;
 import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
 import org.apache.hadoop.hive.ql.exec.UDF;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
 import org.apache.hadoop.hive.ql.lib.Node;
 import org.apache.hadoop.hive.ql.lib.NodeProcessor;
 import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
@@ -38,6 +40,7 @@
 import org.apache.hadoop.hive.ql.plan.exprNodeDesc;
 import org.apache.hadoop.hive.ql.plan.exprNodeFieldDesc;
 import org.apache.hadoop.hive.ql.plan.exprNodeFuncDesc;
+import org.apache.hadoop.hive.ql.plan.exprNodeGenericFuncDesc;
 import org.apache.hadoop.hive.ql.plan.exprNodeIndexDesc;
 import org.apache.hadoop.hive.ql.plan.exprNodeNullDesc;
 import org.apache.hadoop.hive.serde.Constants;
@@ -48,9 +51,11 @@
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
 import org.apache.hadoop.hive.ql.udf.UDFOPPositive;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
+import org.apache.hadoop.util.ReflectionUtils;
 
 /**
  * The Factory for creating typecheck processors. The typecheck processors are used to
@@ -408,9 +413,14 @@
      * @param name
      * @param children
      * @return The expression node descriptor
+     * @throws UDFArgumentTypeException 
      */
     public static exprNodeDesc getFuncExprNodeDesc(String name, exprNodeDesc... children) {
-      return getFuncExprNodeDesc(name, Arrays.asList(children));
+      try {
+        return getFuncExprNodeDesc(name, Arrays.asList(children));
+      } catch (UDFArgumentTypeException e) {
+        throw new RuntimeException("Hive 2 internal error", e);
+      }
     }
     
     /**
@@ -418,8 +428,19 @@
      * It will insert implicit type conversion functions if necessary. 
      * @throws SemanticException 
      */
-    public static exprNodeDesc getFuncExprNodeDesc(String udfName, List<exprNodeDesc> children) {
+    public static exprNodeDesc getFuncExprNodeDesc(String udfName, List<exprNodeDesc> children)
+        throws UDFArgumentTypeException {
 
+      FunctionInfo fi = FunctionRegistry.getFunctionInfo(udfName);
+      if (fi == null) return null;
+      
+      // Is it a generic UDF?
+      Class<? extends GenericUDF> genericUDFClass = fi.getGenericUDFClass();
+      if (genericUDFClass != null) {
+        return exprNodeGenericFuncDesc.newInstance(genericUDFClass, children);
+      }
+      
+      // TODO: extract as a function
       // Find the corresponding method
       ArrayList<TypeInfo> argumentTypeInfos = new ArrayList<TypeInfo>(children.size());
       for(int i=0; i<children.size(); i++) {
@@ -430,6 +451,7 @@
       Method udfMethod = FunctionRegistry.getUDFMethod(udfName, argumentTypeInfos);
       if (udfMethod == null) return null;
 
+      // Convert the parameters if the type of parameters do not exactly match.
       ArrayList<exprNodeDesc> ch = SemanticAnalyzer.convertParameters(udfMethod, children);
 
       // The return type of a function can be of either Java Primitive Type/Class or Writable Class.
@@ -449,7 +471,7 @@
 
     static exprNodeDesc getXpathOrFuncExprNodeDesc(ASTNode expr, boolean isFunction,
         ArrayList<exprNodeDesc> children)
-        throws SemanticException {
+        throws SemanticException, UDFArgumentTypeException {
       // return the child directly if the conversion is redundant.
       if (isRedundantConversionFunction(expr, isFunction, children)) {
         assert(children.size() == 1);
@@ -524,8 +546,9 @@
         }
       } else {
         // other operators or functions
-        Class<? extends UDF> udf = FunctionRegistry.getUDFClass(funcText);
-        if (udf == null) {
+        FunctionInfo fi = FunctionRegistry.getFunctionInfo(funcText);
+        
+        if (fi == null) {
           if (isFunction)
             throw new SemanticException(ErrorMsg.INVALID_FUNCTION.getMsg((ASTNode)expr.getChild(0)));
           else
@@ -626,7 +649,12 @@
       }
       
       // Create function desc
-      return getXpathOrFuncExprNodeDesc(expr, isFunction, children);
+      try {
+        return getXpathOrFuncExprNodeDesc(expr, isFunction, children);
+      } catch (UDFArgumentTypeException e) {
+        throw new SemanticException(ErrorMsg.INVALID_ARGUMENT_TYPE
+            .getMsg(expr.getChild(childrenBegin + e.getArgumentId()), e.getMessage()));
+      }
     }
     
   }

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeConstantDesc.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeConstantDesc.java?rev=769826&r1=769825&r2=769826&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeConstantDesc.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeConstantDesc.java Wed Apr 29 16:47:34 2009
@@ -24,7 +24,9 @@
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
 
-
+/**
+ * A constant expression.
+ */
 public class exprNodeConstantDesc extends exprNodeDesc implements Serializable {
   private static final long serialVersionUID = 1L;
   private Object value;

Added: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeGenericFuncDesc.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeGenericFuncDesc.java?rev=769826&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeGenericFuncDesc.java (added)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeGenericFuncDesc.java Wed Apr 29 16:47:34 2009
@@ -0,0 +1,140 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.plan;
+
+import java.io.Serializable;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
+import org.apache.hadoop.hive.ql.exec.Utilities;
+import org.apache.hadoop.hive.ql.lib.Node;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
+import org.apache.hadoop.util.ReflectionUtils;
+
+/**
+ * Describes a GenericFunc node.
+ */
+public class exprNodeGenericFuncDesc extends exprNodeDesc implements Serializable {
+
+  private static final long serialVersionUID = 1L;
+  private Class<? extends GenericUDF> genericUDFClass;
+  private List<exprNodeDesc> childExprs; 
+  
+  public exprNodeGenericFuncDesc() {}
+  public exprNodeGenericFuncDesc(TypeInfo typeInfo, Class<? extends GenericUDF> genericUDFClass, 
+                          List<exprNodeDesc> children) {
+    super(typeInfo);
+    assert(genericUDFClass != null);
+    this.genericUDFClass = genericUDFClass;
+    this.childExprs = children;
+  }
+  
+  public Class<? extends GenericUDF> getGenericUDFClass() {
+    return genericUDFClass;
+  }
+  
+  public void setGenericUDFClass(Class<? extends GenericUDF> GenericUDFClass) {
+    this.genericUDFClass = GenericUDFClass;
+  }
+  
+  public List<exprNodeDesc> getChildExprs() {
+    return this.childExprs;
+  }
+  public void setChildExprs(List<exprNodeDesc> children) {
+    this.childExprs = children;
+  }
+  @Override
+  public List<? extends Node> getChildren() {
+    return (List<? extends Node>)this.childExprs;
+  }
+  public String toString() {
+    StringBuilder sb = new StringBuilder();
+    sb.append(genericUDFClass.toString());
+    sb.append("(");
+    for(int i=0; i<childExprs.size(); i++) {
+      if (i>0) sb.append(", ");
+      sb.append(childExprs.get(i).toString());
+    }
+    sb.append("(");
+    sb.append(")");
+    return sb.toString();
+  }
+  
+  @explain(displayName="expr")
+  @Override
+  public String getExprString() {
+    // Get the children expr strings
+    String[] childrenExprStrings = new String[childExprs.size()];
+    for (int i=0; i<childrenExprStrings.length; i++) {
+      childrenExprStrings[i] = childExprs.get(i).getExprString();
+    }
+    
+    GenericUDF genericUDF = (GenericUDF) ReflectionUtils.newInstance(genericUDFClass, null);
+    return genericUDF.getDisplayString(childrenExprStrings);
+  }
+
+  public List<String> getCols() {
+    List<String> colList = new ArrayList<String>();
+    if (childExprs != null) {
+      int pos = 0;
+      while (pos < childExprs.size()) {
+        List<String> colCh = childExprs.get(pos).getCols();
+        colList = Utilities.mergeUniqElems(colList, colCh);
+        pos++;
+      }
+    }
+
+    return colList;
+  }
+  
+  @Override
+  public exprNodeDesc clone() {
+    List<exprNodeDesc> cloneCh = new ArrayList<exprNodeDesc>(childExprs.size());
+    for(exprNodeDesc ch :  childExprs) {
+      cloneCh.add(ch.clone());
+    }
+    exprNodeGenericFuncDesc clone = new exprNodeGenericFuncDesc(this.typeInfo,
+        this.genericUDFClass, cloneCh);
+    return clone;
+  }
+  
+  /**
+   * Create a exprNodeGenericFuncDesc based on the genericUDFClass and the children
+   * parameters.
+   * @throws UDFArgumentTypeException
+   */
+  public static exprNodeGenericFuncDesc newInstance(Class<? extends GenericUDF> genericUDFClass, 
+      List<exprNodeDesc> children) throws UDFArgumentTypeException {
+    ObjectInspector[] childrenOIs = new ObjectInspector[children.size()];
+    for(int i=0; i<childrenOIs.length; i++) {
+      childrenOIs[i] = TypeInfoUtils.getStandardObjectInspectorFromTypeInfo(
+          children.get(i).getTypeInfo());
+    }
+    GenericUDF genericUDF = (GenericUDF) ReflectionUtils.newInstance(genericUDFClass, null);
+    
+    ObjectInspector oi = genericUDF.initialize(childrenOIs);
+    return new exprNodeGenericFuncDesc(TypeInfoUtils.getTypeInfoFromObjectInspector(oi),
+        genericUDFClass, children);
+  }
+  
+}

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerInfo.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerInfo.java?rev=769826&r1=769825&r2=769826&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerInfo.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerInfo.java Wed Apr 29 16:47:34 2009
@@ -64,7 +64,7 @@
    * information for each node is the value which is used while walking the tree by 
    * its parent
    */
-  private Map<String, List<exprNodeFuncDesc>> pushdownPreds;
+  private Map<String, List<exprNodeDesc>> pushdownPreds;
   /**
    *  Values the expression sub-trees (predicates) that can be pushed down for root 
    * expression tree. Since there can be more than one alias in an expression tree, 
@@ -73,7 +73,7 @@
   private Map<exprNodeDesc,ExprInfo> exprInfoMap;
   
   public ExprWalkerInfo() {
-    this.pushdownPreds = new HashMap<String, List<exprNodeFuncDesc>>();
+    this.pushdownPreds = new HashMap<String, List<exprNodeDesc>>();
     this.exprInfoMap   = new HashMap<exprNodeDesc, ExprInfo>();
   }
   
@@ -81,7 +81,7 @@
     this.op = op;
     this.toRR   = toRR;
     
-    this.pushdownPreds = new HashMap<String, List<exprNodeFuncDesc>>();
+    this.pushdownPreds = new HashMap<String, List<exprNodeDesc>>();
     this.exprInfoMap   = new HashMap<exprNodeDesc, ExprInfo>();
   }
   
@@ -179,12 +179,12 @@
    * Adds the specified expr as the top-most pushdown expr (ie all its children can be pushed)
    * @param expr
    */
-  public void addFinalCandidate(exprNodeFuncDesc expr) {
+  public void addFinalCandidate(exprNodeDesc expr) {
     String alias = this.getAlias(expr);
     if(pushdownPreds.get(alias) == null) {
-      pushdownPreds.put(alias, new ArrayList<exprNodeFuncDesc>());
+      pushdownPreds.put(alias, new ArrayList<exprNodeDesc>());
     }
-    pushdownPreds.get(alias).add((exprNodeFuncDesc) expr.clone());
+    pushdownPreds.get(alias).add((exprNodeDesc) expr.clone());
   }
 
   /**
@@ -192,7 +192,7 @@
    * RowResolver. The exprs in each list can be combined using conjunction (AND)
    * @return the map of alias to a list of pushdown predicates
    */
-  public Map<String, List<exprNodeFuncDesc>> getFinalCandidates() {
+  public Map<String, List<exprNodeDesc>> getFinalCandidates() {
     return pushdownPreds;
   }
 
@@ -203,8 +203,8 @@
   public void merge(ExprWalkerInfo ewi) {
     if(ewi == null)
       return;
-    for(Entry<String, List<exprNodeFuncDesc>> e :  ewi.getFinalCandidates().entrySet()) {
-      List<exprNodeFuncDesc> predList = pushdownPreds.get(e.getKey());
+    for(Entry<String, List<exprNodeDesc>> e :  ewi.getFinalCandidates().entrySet()) {
+      List<exprNodeDesc> predList = pushdownPreds.get(e.getKey());
       if( predList != null ) {
         predList.addAll(e.getValue());
       } else {

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerProcFactory.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerProcFactory.java?rev=769826&r1=769825&r2=769826&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerProcFactory.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerProcFactory.java Wed Apr 29 16:47:34 2009
@@ -40,6 +40,7 @@
 import org.apache.hadoop.hive.ql.plan.exprNodeDesc;
 import org.apache.hadoop.hive.ql.plan.exprNodeFieldDesc;
 import org.apache.hadoop.hive.ql.plan.exprNodeFuncDesc;
+import org.apache.hadoop.hive.ql.plan.exprNodeGenericFuncDesc;
 import org.apache.hadoop.hive.ql.plan.exprNodeIndexDesc;
 import org.apache.hadoop.hive.ql.udf.UDFOPAnd;
 import org.apache.hadoop.hive.ql.udf.UDFType;
@@ -139,6 +140,57 @@
 
   }
 
+  /**
+   * If all children are candidates and refer only to one table alias then this expr is a candidate
+   * else it is not a candidate but its children could be final candidates
+   */
+  public static class GenericFuncExprProcessor implements NodeProcessor {
+
+    @Override
+    public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx procCtx,
+        Object... nodeOutputs) throws SemanticException {
+      ExprWalkerInfo ctx = (ExprWalkerInfo) procCtx;
+      String alias = null;
+      exprNodeGenericFuncDesc expr = (exprNodeGenericFuncDesc) nd;
+
+      UDFType note = expr.getGenericUDFClass().getAnnotation(UDFType.class);
+      if(note != null && !note.deterministic()) {
+        // this GenericUDF can't be pushed down
+        ctx.setIsCandidate(expr, false);
+        return false;
+      }
+      
+      boolean isCandidate = true;
+      for (int i=0; i < nd.getChildren().size(); i++) {
+        exprNodeDesc ch = (exprNodeDesc) nd.getChildren().get(i);
+        exprNodeDesc newCh = ctx.getConvertedNode(ch);
+        if (newCh != null) {
+          expr.getChildExprs().set(i, newCh);
+          ch = newCh;
+        }
+        String chAlias = ctx.getAlias(ch);
+        
+        isCandidate = isCandidate && ctx.isCandidate(ch);
+        // need to iterate through all children even if one is found to be not a candidate
+        // in case if the other children could be individually pushed up
+        if (isCandidate && chAlias != null) {
+          if (alias == null) {
+            alias = chAlias;
+          } else if (!chAlias.equalsIgnoreCase(alias)) {
+            isCandidate = false;
+          }
+        }
+        
+        if(!isCandidate)
+          break;
+      }
+      ctx.addAlias(expr, alias);
+      ctx.setIsCandidate(expr, isCandidate);
+      return isCandidate;
+    }
+
+  }
+  
   public static class IndexExprProcessor implements NodeProcessor {
 
     @Override
@@ -205,6 +257,10 @@
     return new FuncExprProcessor();
   }
 
+  public static NodeProcessor getGenericFuncProcessor() {
+    return new GenericFuncExprProcessor();
+  }
+
   public static NodeProcessor getIndexProcessor() {
     return new IndexExprProcessor();
   }
@@ -215,8 +271,8 @@
 
   public static ExprWalkerInfo extractPushdownPreds(OpWalkerInfo opContext, 
       Operator<? extends Serializable> op,
-      exprNodeFuncDesc pred) throws SemanticException {
-    List<exprNodeFuncDesc> preds = new ArrayList<exprNodeFuncDesc>();
+      exprNodeDesc pred) throws SemanticException {
+    List<exprNodeDesc> preds = new ArrayList<exprNodeDesc>();
     preds.add(pred);
     return extractPushdownPreds(opContext, op, preds);
   }
@@ -231,7 +287,7 @@
    */
   public static ExprWalkerInfo extractPushdownPreds(OpWalkerInfo opContext, 
       Operator<? extends Serializable> op,
-      List<exprNodeFuncDesc> preds) throws SemanticException {
+      List<exprNodeDesc> preds) throws SemanticException {
     // Create the walker, the rules dispatcher and the context.
     ExprWalkerInfo exprContext = new ExprWalkerInfo(op, opContext.getRowResolver(op));
     
@@ -242,22 +298,23 @@
     exprRules.put(new RuleRegExp("R2", exprNodeFieldDesc.class.getName() + "%"), getFuncProcessor());
     exprRules.put(new RuleRegExp("R3", exprNodeFuncDesc.class.getName() + "%"), getFuncProcessor());
     exprRules.put(new RuleRegExp("R4", exprNodeIndexDesc.class.getName() + "%"), getIndexProcessor());
+    exprRules.put(new RuleRegExp("R5", exprNodeGenericFuncDesc.class.getName() + "%"), getGenericFuncProcessor());
   
     // The dispatcher fires the processor corresponding to the closest matching rule and passes the context along
     Dispatcher disp = new DefaultRuleDispatcher(getDefaultExprProcessor(), exprRules, exprContext);
     GraphWalker egw = new DefaultGraphWalker(disp);
   
     List<Node> startNodes = new ArrayList<Node>();
-    List<exprNodeFuncDesc> clonedPreds = new ArrayList<exprNodeFuncDesc>();
-    for (exprNodeFuncDesc node : preds) {
-      clonedPreds.add((exprNodeFuncDesc) node.clone());
+    List<exprNodeDesc> clonedPreds = new ArrayList<exprNodeDesc>();
+    for (exprNodeDesc node : preds) {
+      clonedPreds.add((exprNodeDesc) node.clone());
     }
     startNodes.addAll(clonedPreds);
     
     egw.startWalking(startNodes, null);
     
     // check the root expression for final candidates
-    for (exprNodeFuncDesc pred : clonedPreds) {
+    for (exprNodeDesc pred : clonedPreds) {
       extractFinalCandidates(pred, exprContext);
     }
     return exprContext;
@@ -266,20 +323,20 @@
   /**
    * Walks through the top AND nodes and determine which of them are final candidates
    */
-  private static void extractFinalCandidates(exprNodeFuncDesc expr, ExprWalkerInfo ctx) {
+  private static void extractFinalCandidates(exprNodeDesc expr, ExprWalkerInfo ctx) {
     if (ctx.isCandidate(expr)) {
       ctx.addFinalCandidate(expr);
       return;
     }
     
-    if (!UDFOPAnd.class.isAssignableFrom(expr.getUDFClass())) {
-      return;
+    // If the operator is AND, we can try to push down its children
+    if (expr instanceof exprNodeFuncDesc
+        && ((exprNodeFuncDesc)expr).getUDFClass().equals(UDFOPAnd.class)) {
+      // now determine if any of the children are final candidates
+      for (Node ch : expr.getChildren()) {
+        extractFinalCandidates((exprNodeDesc) ch, ctx);
+      }        
     }
-    // now determine if any of the children are final candidates
-    for (Node ch : expr.getChildren()) {
-      if(ch instanceof exprNodeFuncDesc)
-        extractFinalCandidates((exprNodeFuncDesc) ch, ctx);
-    }        
+    
   }
-
 }

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/OpProcFactory.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/OpProcFactory.java?rev=769826&r1=769825&r2=769826&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/OpProcFactory.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/OpProcFactory.java Wed Apr 29 16:47:34 2009
@@ -104,9 +104,9 @@
       }
       
       // combine all predicates into a single expression
-      List<exprNodeFuncDesc> preds = null;
+      List<exprNodeDesc> preds = null;
       exprNodeFuncDesc condn = null; 
-      Iterator<List<exprNodeFuncDesc>> iterator = pushDownPreds.getFinalCandidates().values().iterator();
+      Iterator<List<exprNodeDesc>> iterator = pushDownPreds.getFinalCandidates().values().iterator();
       while (iterator.hasNext()) {
         preds = iterator.next();
         int i = 0;
@@ -285,9 +285,9 @@
      * @param ewi
      */
     protected void logExpr(Node nd, ExprWalkerInfo ewi) {
-      for (Entry<String, List<exprNodeFuncDesc>> e : ewi.getFinalCandidates().entrySet()) {
+      for (Entry<String, List<exprNodeDesc>> e : ewi.getFinalCandidates().entrySet()) {
         LOG.info("Pushdown Predicates of " + nd.getName() + " For Alias : " + e.getKey() );
-        for (exprNodeFuncDesc n : e.getValue()) {
+        for (exprNodeDesc n : e.getValue()) {
         LOG.info("\t" + n.getExprString());
         }
       }
@@ -316,7 +316,7 @@
       if(ewi == null) {
         ewi = new ExprWalkerInfo();
       }
-      for (Entry<String, List<exprNodeFuncDesc>> e : childPreds.getFinalCandidates().entrySet()) {
+      for (Entry<String, List<exprNodeDesc>> e : childPreds.getFinalCandidates().entrySet()) {
         if(ignoreAliases || aliases == null || aliases.contains(e.getKey()) || e.getKey() == null) {
           // e.getKey() (alias) can be null in case of constant expressions. see input8.q
           ExprWalkerInfo extractPushdownPreds = ExprWalkerProcFactory.extractPushdownPreds(owi, op, e.getValue());

Added: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDF.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDF.java?rev=769826&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDF.java (added)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDF.java Wed Apr 29 16:47:34 2009
@@ -0,0 +1,83 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.udf.generic;
+
+import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.udf.UDFType;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+
+/**
+ * A Generic User-defined function (GenericUDF) for the use with Hive.
+ * 
+ * New GenericUDF classes need to inherit from this GenericUDF class.
+ * 
+ * The GenericUDF are superior to normal UDFs in the following ways:
+ * 1. It can accept arguments of complex types, and return complex types.
+ * 2. It can accept variable length of arguments.
+ * 3. It can accept an infinite number of function signature - for example, 
+ *    it's easy to write a GenericUDF that accepts array<int>, 
+ *    array<array<int>> and so on (arbitrary levels of nesting).
+ * 4. It can do short-circuit evaluations using DeferedObject.  
+ */
+@UDFType(deterministic=true)
+public abstract class GenericUDF {
+  
+  /**
+   * A Defered Object allows us to do lazy-evaluation
+   * and short-circuiting.
+   * GenericUDF use DeferedObject to pass arguments.
+   */
+  public static interface DeferredObject {
+    public Object get() throws HiveException; 
+  };
+  
+  /**
+   * The constructor
+   */
+  public GenericUDF() {
+  }
+
+  /**
+   * Initialize this GenericUDF. This will be called once and only once per
+   * GenericUDF instance.
+   * 
+   * @param arguments     The ObjectInspector for the arguments
+   * @throws UDFArgumentTypeException
+   *                      Thrown when arguments have wrong types
+   * @return              The ObjectInspector for the return value
+   */
+  public abstract ObjectInspector initialize(ObjectInspector[] arguments) 
+      throws UDFArgumentTypeException;
+  
+  /**
+   * Evaluate the GenericUDF with the arguments.
+   * @param arguments  The arguments as DeferedObject, use DeferedObject.get() to
+   *                   get the actual argument Object.  The Objects can be inspected
+   *                   by the ObjectInspectors passed in the initialize call.
+   * @return The 
+   */
+  public abstract Object evaluate(DeferredObject[] arguments) throws HiveException;
+  
+  /**
+   * Get the String to be displayed in explain.
+   */
+  public abstract String getDisplayString(String[] children);
+  
+}

Added: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFCase.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFCase.java?rev=769826&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFCase.java (added)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFCase.java Wed Apr 29 16:47:34 2009
@@ -0,0 +1,133 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.udf.generic;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils.ObjectInspectorCopyOption;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.VoidObjectInspector;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
+
+/**
+ * GenericUDF Class for SQL construct "CASE WHEN a THEN b WHEN c THEN d [ELSE f] END".
+ * 
+ * NOTES:
+ * 1. a and c should be boolean, or an exception will be thrown.
+ * 2. b, d and f should have the same TypeInfo, or an exception will be thrown.
+ */
+public class GenericUDFCase extends GenericUDF {
+
+  private static Log LOG = LogFactory.getLog(GenericUDFCase.class.getName());
+
+  ObjectInspector[] argumentOIs;
+  GenericUDFUtils.ReturnObjectInspectorResolver returnOIResolver;
+  GenericUDFUtils.ReturnObjectInspectorResolver caseOIResolver;
+  
+  @Override
+  public ObjectInspector initialize(ObjectInspector[] arguments)
+      throws UDFArgumentTypeException {
+    
+    this.argumentOIs = arguments;
+    caseOIResolver = new GenericUDFUtils.ReturnObjectInspectorResolver();
+    returnOIResolver = new GenericUDFUtils.ReturnObjectInspectorResolver();
+    
+    boolean r = caseOIResolver.update(arguments[0]);
+    assert(r);
+    for (int i=1; i+1<arguments.length; i+=2) {
+      if (!caseOIResolver.update(arguments[i])) {
+        throw new UDFArgumentTypeException(i,
+            "The expressions after WHEN should have the same type with that after CASE: \""
+            + caseOIResolver.get().getTypeName() + "\" is expected but \"" 
+            + arguments[i].getTypeName() + "\" is found");
+      }
+      if (!returnOIResolver.update(arguments[i+1])) {
+        throw new UDFArgumentTypeException(i+1,
+            "The expressions after THEN should have the same type: \""
+            + returnOIResolver.get().getTypeName() + "\" is expected but \""
+            + arguments[i+1].getTypeName() + "\" is found");
+      }
+    }
+    if (arguments.length % 2 == 0) {
+      int i = arguments.length - 2;
+      if (!returnOIResolver.update(arguments[i+1])) {
+        throw new UDFArgumentTypeException(i+1,
+            "The expression after ELSE should have the same type as those after THEN: \""
+            + returnOIResolver.get().getTypeName() + "\" is expected but \""
+            + arguments[i+1].getTypeName() + "\" is found");
+      }
+    }
+    
+    return returnOIResolver.get();
+  }
+
+  @Override
+  public Object evaluate(DeferredObject[] arguments) throws HiveException {
+    Object exprValue = arguments[0].get(); 
+    for (int i=1; i+1<arguments.length; i+=2) {
+      Object caseKey = arguments[i].get();
+      if (PrimitiveObjectInspectorUtils.comparePrimitiveObjects(
+          exprValue, (PrimitiveObjectInspector)argumentOIs[0],
+          caseKey, (PrimitiveObjectInspector)argumentOIs[i])) {
+        Object caseValue = arguments[i+1].get();
+        return returnOIResolver.convertIfNecessary(caseValue,
+            argumentOIs[i+1]);
+      }
+    }
+    // Process else statement
+    if (arguments.length % 2 == 0) {
+      int i = arguments.length - 2;
+      Object elseValue = arguments[i+1].get();
+      return returnOIResolver.convertIfNecessary(elseValue,
+          argumentOIs[i+1]);
+    }
+    return null;
+  }
+
+  @Override
+  public String getDisplayString(String[] children) {
+    assert(children.length >= 3);
+    StringBuilder sb = new StringBuilder();
+    sb.append("CASE (");
+    sb.append(children[0]);
+    sb.append(")");
+    for(int i=1; i+1<children.length; i+=2) {
+      sb.append(" WHEN (");
+      sb.append(children[i]);
+      sb.append(") THEN (");
+      sb.append(children[i+1]);
+      sb.append(")");
+    }
+    if (children.length % 2 == 0) {
+      sb.append(" ELSE (");
+      sb.append(children[children.length - 1]);
+      sb.append(")");
+    }
+    sb.append(" END");
+    return sb.toString();
+  }
+
+}

Added: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFUtils.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFUtils.java?rev=769826&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFUtils.java (added)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFUtils.java Wed Apr 29 16:47:34 2009
@@ -0,0 +1,147 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.udf.generic;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils.ObjectInspectorCopyOption;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.VoidObjectInspector;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
+
+/**
+ * Util functions for GenericUDF classes.
+ */
+public class GenericUDFUtils {
+
+  private static Log LOG = LogFactory.getLog(GenericUDFUtils.class.getName());
+
+
+  /**
+   * This class helps to find the return ObjectInspector for a GenericUDF.
+   * 
+   * In many cases like CASE and IF, the GenericUDF is returning a value out
+   * of several possibilities.  However these possibilities may not always 
+   * have the same ObjectInspector, although they should have the same 
+   * TypeInfo.
+   * 
+   * This class will help detect whether all possibilities have exactly the
+   * same ObjectInspector.  If not, then we need to convert the Objects to
+   * the same ObjectInspector.
+   * 
+   * A special case is when some values are constant NULL. In this case we 
+   * can use the same ObjectInspector.
+   */
+  public static class ReturnObjectInspectorResolver {
+    boolean valueInspectorsAreTheSame;
+    ObjectInspector returnObjectInspector;
+    
+    ReturnObjectInspectorResolver() {
+      valueInspectorsAreTheSame = true;
+    }
+    /**
+     * Update returnObjectInspector and valueInspectorsAreTheSame based on the
+     * ObjectInspector seen.
+     * @return false if there is a type mismatch
+     */
+    public boolean update(ObjectInspector oi)
+        throws UDFArgumentTypeException {
+      if (!(oi instanceof VoidObjectInspector)) {
+        if (returnObjectInspector == null) {
+          returnObjectInspector = oi;
+        } else if (TypeInfoUtils.getTypeInfoFromObjectInspector(oi)
+            != TypeInfoUtils.getTypeInfoFromObjectInspector(returnObjectInspector)) {
+          System.out.println(TypeInfoUtils.getTypeInfoFromObjectInspector(oi).getTypeName());
+          System.out.println(TypeInfoUtils.getTypeInfoFromObjectInspector(returnObjectInspector).getTypeName());
+          return false;
+        } else {
+          valueInspectorsAreTheSame = valueInspectorsAreTheSame &&
+              oi == returnObjectInspector;
+        }
+      }
+      return true;
+    }
+    
+    /**
+     * Returns the ObjectInspector of the return value.
+     */
+    public ObjectInspector get() {
+      return valueInspectorsAreTheSame
+          ? returnObjectInspector
+          : ObjectInspectorUtils.getStandardObjectInspector(returnObjectInspector,
+              ObjectInspectorCopyOption.WRITABLE);
+    }
+    
+    /**
+     * Convert the return Object if necessary (when the ObjectInspectors of
+     * different possibilities are not all the same).
+     */
+    public Object convertIfNecessary(Object o, ObjectInspector oi) {
+      if (valueInspectorsAreTheSame || oi instanceof VoidObjectInspector) {
+        return o;
+      } else {
+        return ObjectInspectorUtils.copyToStandardObject(
+            o, oi, ObjectInspectorCopyOption.WRITABLE);
+      }   
+    }
+    
+  }
+  
+  /**
+   * This class helps to make sure the TypeInfo of different possibilities
+   * of the return values are all the same. 
+   */
+  public static class ReturnTypeInfoResolver {
+    
+    TypeInfo returnTypeInfo = null;
+    /**
+     * Update the return TypeInfo based on the new value TypeInfo.
+     * @return  false if there is a type mismatch
+     */
+    public boolean updateReturnTypeInfo(TypeInfo newValueTypeInfo) 
+        throws UDFArgumentTypeException {
+      if (newValueTypeInfo == TypeInfoFactory.voidTypeInfo) {
+        // do nothing
+      } else if (returnTypeInfo == null) {
+        returnTypeInfo = newValueTypeInfo;
+      } else if (returnTypeInfo != newValueTypeInfo) {
+        return false;
+      } else {
+        // do nothing
+      }
+      return true;
+    }
+    
+    public TypeInfo getReturnTypeInfo() {
+      return returnTypeInfo;
+    }
+    
+  }
+  
+  
+  
+}

Added: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFWhen.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFWhen.java?rev=769826&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFWhen.java (added)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFWhen.java Wed Apr 29 16:47:34 2009
@@ -0,0 +1,126 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.udf.generic;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.serde.Constants;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils.ObjectInspectorCopyOption;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.BooleanObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.VoidObjectInspector;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
+
+/**
+ * GenericUDF Class for SQL construct "CASE a WHEN b THEN c [ELSE f] END".
+ * 
+ * NOTES:
+ * 1. a and b should have the same TypeInfo, or an exception will be thrown.
+ * 2. c and f should have the same TypeInfo, or an exception will be thrown.
+ */
+public class GenericUDFWhen extends GenericUDF {
+
+  private static Log LOG = LogFactory.getLog(GenericUDFWhen.class.getName());
+
+  ObjectInspector[] argumentOIs;
+  GenericUDFUtils.ReturnObjectInspectorResolver returnOIResolver;
+  GenericUDFUtils.ReturnObjectInspectorResolver caseOIResolver;
+  
+  @Override
+  public ObjectInspector initialize(ObjectInspector[] arguments)
+      throws UDFArgumentTypeException {
+    
+    this.argumentOIs = arguments;
+    returnOIResolver = new GenericUDFUtils.ReturnObjectInspectorResolver();
+    
+    for (int i=0; i+1<arguments.length; i+=2) {
+      if (!arguments[i].getTypeName().equals(Constants.BOOLEAN_TYPE_NAME)) {
+        throw new UDFArgumentTypeException(i,
+            "\"" + Constants.BOOLEAN_TYPE_NAME + "\" is expected after WHEN, "
+            + "but \"" + arguments[i].getTypeName() + "\" is found");
+      }
+      if (!returnOIResolver.update(arguments[i+1])) {
+        throw new UDFArgumentTypeException(i+1,
+            "The expressions after THEN should have the same type: \""
+            + returnOIResolver.get().getTypeName() + "\" is expected but \""
+            + arguments[i+1].getTypeName() + "\" is found");
+      }
+    }
+    if (arguments.length % 2 == 1) {
+      int i = arguments.length - 2;
+      if (!returnOIResolver.update(arguments[i+1])) {
+        throw new UDFArgumentTypeException(i+1,
+            "The expression after ELSE should have the same type as those after THEN: \""
+            + returnOIResolver.get().getTypeName() + "\" is expected but \""
+            + arguments[i+1].getTypeName() + "\" is found");
+      }
+    }
+    
+    return returnOIResolver.get();
+  }
+
+  @Override
+  public Object evaluate(DeferredObject[] arguments) throws HiveException {
+    for (int i=0; i+1<arguments.length; i+=2) {
+      Object caseKey = arguments[i].get();
+      if (((BooleanObjectInspector)argumentOIs[i]).get(caseKey)) {
+        Object caseValue = arguments[i+1].get();
+        return returnOIResolver.convertIfNecessary(caseValue,
+            argumentOIs[i+1]);
+      }
+    }
+    // Process else statement
+    if (arguments.length % 2 == 1) {
+      int i = arguments.length - 2;
+      Object elseValue = arguments[i+1].get();
+      return returnOIResolver.convertIfNecessary(elseValue,
+          argumentOIs[i+1]);
+    }
+    return null;
+  }
+
+  @Override
+  public String getDisplayString(String[] children) {
+    assert(children.length >= 2);
+    StringBuilder sb = new StringBuilder();
+    sb.append("CASE");
+    for(int i=0; i+1<children.length; i+=2) {
+      sb.append(" WHEN (");
+      sb.append(children[i]);
+      sb.append(") THEN (");
+      sb.append(children[i+1]);
+      sb.append(")");
+    }
+    if (children.length % 2 == 1) {
+      sb.append(" ELSE (");
+      sb.append(children[children.length - 1]);
+      sb.append(")");
+    }
+    sb.append(" END");
+    return sb.toString();
+  }
+
+}

Modified: hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExpressionEvaluator.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExpressionEvaluator.java?rev=769826&r1=769825&r2=769826&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExpressionEvaluator.java (original)
+++ hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExpressionEvaluator.java Wed Apr 29 16:47:34 2009
@@ -24,6 +24,7 @@
 
 import org.apache.hadoop.hive.serde.Constants;
 import org.apache.hadoop.hive.serde2.objectinspector.InspectableObject;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
@@ -100,9 +101,10 @@
       ExprNodeEvaluator eval = ExprNodeEvaluatorFactory.get(exprDesc);
 
       // evaluate on row
-      InspectableObject result = new InspectableObject();
-      eval.evaluate(r.o, r.oi, result);
-      Object standardResult = ObjectInspectorUtils.copyToStandardObject(result.o, result.oi, ObjectInspectorCopyOption.WRITABLE);   
+      ObjectInspector resultOI = eval.initialize(r.oi);
+      Object resultO = eval.evaluate(r.o);
+      
+      Object standardResult = ObjectInspectorUtils.copyToStandardObject(resultO, resultOI, ObjectInspectorCopyOption.WRITABLE);   
       assertEquals(cola, standardResult);
       System.out.println("ExprNodeColumnEvaluator ok");
     } catch (Throwable e) {
@@ -122,10 +124,10 @@
       ExprNodeEvaluator eval = ExprNodeEvaluatorFactory.get(func1);
 
       // evaluate on row
-      InspectableObject result = new InspectableObject();
-      eval.evaluate(r.o, r.oi, result);
+      ObjectInspector resultOI = eval.initialize(r.oi);
+      Object resultO = eval.evaluate(r.o);
       assertEquals("1a",
-          ObjectInspectorUtils.copyToStandardObject(result.o, result.oi, ObjectInspectorCopyOption.JAVA));
+          ObjectInspectorUtils.copyToStandardObject(resultO, resultOI, ObjectInspectorCopyOption.JAVA));
       System.out.println("ExprNodeFuncEvaluator ok");
     } catch (Throwable e) {
       e.printStackTrace();
@@ -142,10 +144,10 @@
       ExprNodeEvaluator eval = ExprNodeEvaluatorFactory.get(func1);
 
       // evaluate on row
-      InspectableObject result = new InspectableObject();
-      eval.evaluate(r.o, r.oi, result);
+      ObjectInspector resultOI = eval.initialize(r.oi);
+      Object resultO = eval.evaluate(r.o);
       assertEquals(Double.valueOf("1"),
-          ObjectInspectorUtils.copyToStandardObject(result.o, result.oi, ObjectInspectorCopyOption.JAVA));
+          ObjectInspectorUtils.copyToStandardObject(resultO, resultOI, ObjectInspectorCopyOption.JAVA));
       System.out.println("testExprNodeConversionEvaluator ok");
     } catch (Throwable e) {
       e.printStackTrace();
@@ -157,13 +159,15 @@
     System.out.println("Evaluating " + expr + " for " + times + " times");
     // evaluate on row
     InspectableObject output = new InspectableObject(); 
+    ObjectInspector resultOI = eval.initialize(input.oi);
+    Object resultO = null;
     long start = System.currentTimeMillis();
     for (int i=0; i<times; i++) {
-      eval.evaluate(input.o, input.oi, output);
+      resultO = eval.evaluate(input.o);
     }
     long end = System.currentTimeMillis();
     assertEquals(standardJavaOutput,
-        ObjectInspectorUtils.copyToStandardObject(output.o, output.oi, ObjectInspectorCopyOption.JAVA));
+        ObjectInspectorUtils.copyToStandardObject(resultO, resultOI, ObjectInspectorCopyOption.JAVA));
     System.out.println("Evaluation finished: " + String.format("%2.3f", (end - start)*0.001) + " seconds, " 
         + String.format("%2.3f", (end - start)*1000.0/times) + " seconds/million call.");
   }

Modified: hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java?rev=769826&r1=769825&r2=769826&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java (original)
+++ hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java Wed Apr 29 16:47:34 2009
@@ -92,7 +92,7 @@
       op.initialize(null, null);
 
       for(InspectableObject oner: r) {
-        op.process(oner.o, oner.oi);
+        op.process(oner.o, oner.oi, 0);
       }
 
       Map<Enum<?>, Long> results = op.getStats();
@@ -137,17 +137,13 @@
       // fileSinkOperator to dump the output of the select
       fileSinkDesc fsd = new fileSinkDesc ("file:///tmp" + File.separator + System.getProperty("user.name") + File.separator + "TestFileSinkOperator",
                                            Utilities.defaultTd, false);
-      Operator<fileSinkDesc> flop = OperatorFactory.get(fileSinkDesc.class);
-      flop.setConf(fsd);
-      ArrayList<Operator<? extends Serializable>> nextOp = new ArrayList<Operator<? extends Serializable>> ();
-      nextOp.add(flop);
-
-      op.setChildOperators(nextOp);
+      Operator<fileSinkDesc> flop = OperatorFactory.getAndMakeChild(fsd, op);
+      
       op.initialize(new JobConf(TestOperators.class), Reporter.NULL);
 
       // evaluate on row
       for(int i=0; i<5; i++) {
-        op.process(r[i].o, r[i].oi);
+        op.process(r[i].o, r[i].oi, 0);
       }
       op.close(false);
 
@@ -183,31 +179,17 @@
       tableDesc scriptOutput = PlanUtils.getDefaultTableDesc("" + Utilities.tabCode, "a,b");
       tableDesc scriptInput  = PlanUtils.getDefaultTableDesc("" + Utilities.tabCode, "a,b");
       scriptDesc sd = new scriptDesc("cat", scriptOutput, scriptInput);
-      Operator<scriptDesc> sop = OperatorFactory.get(scriptDesc.class);
-      sop.setConf(sd);
-      ArrayList<Operator<? extends Serializable>> nextScriptOp = new ArrayList<Operator<? extends Serializable>> ();
-      nextScriptOp.add(sop);
-
+      Operator<scriptDesc> sop = OperatorFactory.getAndMakeChild(sd, op);
 
       // Collect operator to observe the output of the script
       collectDesc cd = new collectDesc (Integer.valueOf(10));
-      CollectOperator cdop = (CollectOperator) OperatorFactory.get(collectDesc.class);
-      cdop.setConf(cd);
-      ArrayList<Operator<? extends Serializable>> nextCollectOp = new ArrayList<Operator<? extends Serializable>> ();
-      nextCollectOp.add(cdop);
-
-
-      // chain the scriptOperator to the select operator
-      op.setChildOperators(nextScriptOp);
-      // chain the collect operator to the script operator
-      sop.setChildOperators(nextCollectOp);
-
+      CollectOperator cdop = (CollectOperator) OperatorFactory.getAndMakeChild(cd, sop);
 
       op.initialize(new JobConf(TestOperators.class), null);
 
       // evaluate on row
       for(int i=0; i<5; i++) {
-        op.process(r[i].o, r[i].oi);
+        op.process(r[i].o, r[i].oi, 0);
       }
       op.close(false);
 

Added: hadoop/hive/trunk/ql/src/test/queries/clientnegative/udf_case_type_wrong.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientnegative/udf_case_type_wrong.q?rev=769826&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientnegative/udf_case_type_wrong.q (added)
+++ hadoop/hive/trunk/ql/src/test/queries/clientnegative/udf_case_type_wrong.q Wed Apr 29 16:47:34 2009
@@ -0,0 +1,6 @@
+SELECT CASE '1'
+        WHEN 1 THEN 2
+        WHEN 3 THEN 4
+        ELSE 5
+       END
+FROM src LIMIT 1;

Added: hadoop/hive/trunk/ql/src/test/queries/clientnegative/udf_case_type_wrong2.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientnegative/udf_case_type_wrong2.q?rev=769826&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientnegative/udf_case_type_wrong2.q (added)
+++ hadoop/hive/trunk/ql/src/test/queries/clientnegative/udf_case_type_wrong2.q Wed Apr 29 16:47:34 2009
@@ -0,0 +1,6 @@
+SELECT CASE 1
+        WHEN 1 THEN '2'
+        WHEN 3 THEN 4
+        ELSE 5
+       END
+FROM src LIMIT 1;

Added: hadoop/hive/trunk/ql/src/test/queries/clientnegative/udf_case_type_wrong3.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientnegative/udf_case_type_wrong3.q?rev=769826&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientnegative/udf_case_type_wrong3.q (added)
+++ hadoop/hive/trunk/ql/src/test/queries/clientnegative/udf_case_type_wrong3.q Wed Apr 29 16:47:34 2009
@@ -0,0 +1,6 @@
+SELECT CASE 1
+        WHEN 1 THEN NULL
+        WHEN 3 THEN '2'
+        ELSE 7
+       END
+FROM src LIMIT 1;

Added: hadoop/hive/trunk/ql/src/test/queries/clientnegative/udf_when_type_wrong.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientnegative/udf_when_type_wrong.q?rev=769826&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientnegative/udf_when_type_wrong.q (added)
+++ hadoop/hive/trunk/ql/src/test/queries/clientnegative/udf_when_type_wrong.q Wed Apr 29 16:47:34 2009
@@ -0,0 +1,6 @@
+SELECT CASE
+        WHEN TRUE THEN 2
+        WHEN '1' THEN 4
+        ELSE 5
+       END
+FROM src LIMIT 1;

Added: hadoop/hive/trunk/ql/src/test/queries/clientnegative/udf_when_type_wrong2.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientnegative/udf_when_type_wrong2.q?rev=769826&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientnegative/udf_when_type_wrong2.q (added)
+++ hadoop/hive/trunk/ql/src/test/queries/clientnegative/udf_when_type_wrong2.q Wed Apr 29 16:47:34 2009
@@ -0,0 +1,6 @@
+SELECT CASE
+        WHEN 1=2 THEN '2'
+        WHEN 3=4 THEN 4
+        ELSE 5
+       END
+FROM src LIMIT 1;

Added: hadoop/hive/trunk/ql/src/test/queries/clientnegative/udf_when_type_wrong3.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientnegative/udf_when_type_wrong3.q?rev=769826&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientnegative/udf_when_type_wrong3.q (added)
+++ hadoop/hive/trunk/ql/src/test/queries/clientnegative/udf_when_type_wrong3.q Wed Apr 29 16:47:34 2009
@@ -0,0 +1,6 @@
+SELECT CASE
+        WHEN 1=2 THEN '2'
+        WHEN 3=4 THEN '5'
+        ELSE 5.3
+       END
+FROM src LIMIT 1;

Added: hadoop/hive/trunk/ql/src/test/queries/clientpositive/ppd_udf_case.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/ppd_udf_case.q?rev=769826&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/ppd_udf_case.q (added)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/ppd_udf_case.q Wed Apr 29 16:47:34 2009
@@ -0,0 +1,27 @@
+set hive.optimize.ppd=true;
+
+EXPLAIN
+SELECT *
+FROM srcpart a JOIN srcpart b
+ON a.key = b.key
+WHERE a.ds = '2008-04-08' AND
+      b.ds = '2008-04-08' AND
+      CASE a.key
+        WHEN '27' THEN TRUE
+        WHEN '38' THEN FALSE
+        ELSE NULL
+       END
+ORDER BY a.key, a.value, a.ds, a.hr, b.key, b.value, b.ds, b.hr;
+
+SELECT *
+FROM srcpart a JOIN srcpart b
+ON a.key = b.key
+WHERE a.ds = '2008-04-08' AND
+      b.ds = '2008-04-08' AND
+      CASE a.key
+        WHEN '27' THEN TRUE
+        WHEN '38' THEN FALSE
+        ELSE NULL
+       END
+ORDER BY a.key, a.value, a.ds, a.hr, b.key, b.value, b.ds, b.hr;
+

Added: hadoop/hive/trunk/ql/src/test/queries/clientpositive/udf_case.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/udf_case.q?rev=769826&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/udf_case.q (added)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/udf_case.q Wed Apr 29 16:47:34 2009
@@ -0,0 +1,54 @@
+EXPLAIN
+SELECT CASE 1
+        WHEN 1 THEN 2
+        WHEN 3 THEN 4
+        ELSE 5
+       END,
+       CASE 2
+        WHEN 1 THEN 2
+        ELSE 5
+       END,
+       CASE 14
+        WHEN 12 THEN 13
+        WHEN 14 THEN 15
+       END,
+       CASE 16
+        WHEN 12 THEN 13
+        WHEN 14 THEN 15
+       END,
+       CASE 17
+        WHEN 18 THEN NULL
+        WHEN 17 THEN 20
+       END,
+       CASE 21
+        WHEN 22 THEN 23
+        WHEN 21 THEN 24
+       END
+FROM src LIMIT 1;
+
+SELECT CASE 1
+        WHEN 1 THEN 2
+        WHEN 3 THEN 4
+        ELSE 5
+       END,
+       CASE 2
+        WHEN 1 THEN 2
+        ELSE 5
+       END,
+       CASE 14
+        WHEN 12 THEN 13
+        WHEN 14 THEN 15
+       END,
+       CASE 16
+        WHEN 12 THEN 13
+        WHEN 14 THEN 15
+       END,
+       CASE 17
+        WHEN 18 THEN NULL
+        WHEN 17 THEN 20
+       END,
+       CASE 21
+        WHEN 22 THEN 23
+        WHEN 21 THEN 24
+       END
+FROM src LIMIT 1;

Added: hadoop/hive/trunk/ql/src/test/queries/clientpositive/udf_case_column_pruning.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/udf_case_column_pruning.q?rev=769826&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/udf_case_column_pruning.q (added)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/udf_case_column_pruning.q Wed Apr 29 16:47:34 2009
@@ -0,0 +1,18 @@
+EXPLAIN
+SELECT CASE a.key
+        WHEN '1' THEN 2
+        WHEN '3' THEN 4
+        ELSE 5
+       END as key
+FROM src a JOIN src b
+ON a.key = b.key
+ORDER BY key LIMIT 10;
+
+SELECT CASE a.key
+        WHEN '1' THEN 2
+        WHEN '3' THEN 4
+        ELSE 5
+       END as key
+FROM src a JOIN src b
+ON a.key = b.key
+ORDER BY key LIMIT 10;

Added: hadoop/hive/trunk/ql/src/test/queries/clientpositive/udf_case_thrift.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/udf_case_thrift.q?rev=769826&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/udf_case_thrift.q (added)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/udf_case_thrift.q Wed Apr 29 16:47:34 2009
@@ -0,0 +1,34 @@
+EXPLAIN
+SELECT CASE src_thrift.lint[0]
+        WHEN 0 THEN src_thrift.lint[0] + 1
+        WHEN 1 THEN src_thrift.lint[0] + 2
+        WHEN 2 THEN 100
+        ELSE 5
+       END,
+       CASE src_thrift.lstring[0]
+        WHEN '0' THEN 'zero'
+        WHEN '10' THEN CONCAT(src_thrift.lstring[0], " is ten")
+        ELSE 'default'
+       END,
+       (CASE src_thrift.lstring[0]
+        WHEN '0' THEN src_thrift.lstring
+        ELSE NULL
+       END)[0]
+FROM src_thrift LIMIT 3;
+
+SELECT CASE src_thrift.lint[0]
+        WHEN 0 THEN src_thrift.lint[0] + 1
+        WHEN 1 THEN src_thrift.lint[0] + 2
+        WHEN 2 THEN 100
+        ELSE 5
+       END,
+       CASE src_thrift.lstring[0]
+        WHEN '0' THEN 'zero'
+        WHEN '10' THEN CONCAT(src_thrift.lstring[0], " is ten")
+        ELSE 'default'
+       END,
+       (CASE src_thrift.lstring[0]
+        WHEN '0' THEN src_thrift.lstring
+        ELSE NULL
+       END)[0]
+FROM src_thrift LIMIT 3;

Added: hadoop/hive/trunk/ql/src/test/queries/clientpositive/udf_when.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/udf_when.q?rev=769826&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/udf_when.q (added)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/udf_when.q Wed Apr 29 16:47:34 2009
@@ -0,0 +1,54 @@
+EXPLAIN
+SELECT CASE
+        WHEN 1=1 THEN 2
+        WHEN 1=3 THEN 4
+        ELSE 5
+       END,
+       CASE
+        WHEN 6=7 THEN 8
+        ELSE 9
+       END,
+       CASE
+        WHEN 10=11 THEN 12
+        WHEN 13=13 THEN 14
+       END,
+       CASE
+        WHEN 15=16 THEN 17
+        WHEN 18=19 THEN 20
+       END,
+       CASE
+        WHEN 21=22 THEN NULL
+        WHEN 23=23 THEN 24
+       END,
+       CASE
+        WHEN 25=26 THEN 27
+        WHEN 28=28 THEN NULL
+       END
+FROM src LIMIT 1;
+
+SELECT CASE
+        WHEN 1=1 THEN 2
+        WHEN 1=3 THEN 4
+        ELSE 5
+       END,
+       CASE
+        WHEN 6=7 THEN 8
+        ELSE 9
+       END,
+       CASE
+        WHEN 10=11 THEN 12
+        WHEN 13=13 THEN 14
+       END,
+       CASE
+        WHEN 15=16 THEN 17
+        WHEN 18=19 THEN 20
+       END,
+       CASE
+        WHEN 21=22 THEN NULL
+        WHEN 23=23 THEN 24
+       END,
+       CASE
+        WHEN 25=26 THEN 27
+        WHEN 28=28 THEN NULL
+       END
+FROM src LIMIT 1;

Added: hadoop/hive/trunk/ql/src/test/queries/positive/udf_case.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/positive/udf_case.q?rev=769826&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/positive/udf_case.q (added)
+++ hadoop/hive/trunk/ql/src/test/queries/positive/udf_case.q Wed Apr 29 16:47:34 2009
@@ -0,0 +1,10 @@
+SELECT CASE 1
+        WHEN 1 THEN 2
+        WHEN 3 THEN 4
+        ELSE 5
+       END,
+       CASE 11
+        WHEN 12 THEN 13
+        WHEN 14 THEN 15
+       END
+FROM src LIMIT 1

Added: hadoop/hive/trunk/ql/src/test/queries/positive/udf_when.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/positive/udf_when.q?rev=769826&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/positive/udf_when.q (added)
+++ hadoop/hive/trunk/ql/src/test/queries/positive/udf_when.q Wed Apr 29 16:47:34 2009
@@ -0,0 +1,10 @@
+SELECT CASE
+        WHEN 1=1 THEN 2
+        WHEN 3=5 THEN 4
+        ELSE 5
+       END,
+       CASE
+        WHEN 12=11 THEN 13
+        WHEN 14=10 THEN 15
+       END
+FROM src LIMIT 1

Added: hadoop/hive/trunk/ql/src/test/results/clientnegative/udf_case_type_wrong.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/udf_case_type_wrong.q.out?rev=769826&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/udf_case_type_wrong.q.out (added)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/udf_case_type_wrong.q.out Wed Apr 29 16:47:34 2009
@@ -0,0 +1 @@
+FAILED: Error in semantic analysis: line 2:13 Argument Type Mismatch 1: The expressions after WHEN should have the same type with that after CASE: "string" is expected but "int" is found

Added: hadoop/hive/trunk/ql/src/test/results/clientnegative/udf_case_type_wrong2.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/udf_case_type_wrong2.q.out?rev=769826&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/udf_case_type_wrong2.q.out (added)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/udf_case_type_wrong2.q.out Wed Apr 29 16:47:34 2009
@@ -0,0 +1 @@
+FAILED: Error in semantic analysis: line 3:20 Argument Type Mismatch 4: The expressions after THEN should have the same type: "string" is expected but "int" is found

Added: hadoop/hive/trunk/ql/src/test/results/clientnegative/udf_case_type_wrong3.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/udf_case_type_wrong3.q.out?rev=769826&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/udf_case_type_wrong3.q.out (added)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/udf_case_type_wrong3.q.out Wed Apr 29 16:47:34 2009
@@ -0,0 +1 @@
+FAILED: Error in semantic analysis: line 4:13 Argument Type Mismatch 7: The expression after ELSE should have the same type as those after THEN: "string" is expected but "int" is found

Added: hadoop/hive/trunk/ql/src/test/results/clientnegative/udf_when_type_wrong.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/udf_when_type_wrong.q.out?rev=769826&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/udf_when_type_wrong.q.out (added)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/udf_when_type_wrong.q.out Wed Apr 29 16:47:34 2009
@@ -0,0 +1 @@
+FAILED: Error in semantic analysis: line 3:13 Argument Type Mismatch '1': "boolean" is expected after WHEN, but "string" is found

Added: hadoop/hive/trunk/ql/src/test/results/clientnegative/udf_when_type_wrong2.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/udf_when_type_wrong2.q.out?rev=769826&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/udf_when_type_wrong2.q.out (added)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/udf_when_type_wrong2.q.out Wed Apr 29 16:47:34 2009
@@ -0,0 +1 @@
+FAILED: Error in semantic analysis: line 3:22 Argument Type Mismatch 4: The expressions after THEN should have the same type: "string" is expected but "int" is found

Added: hadoop/hive/trunk/ql/src/test/results/clientnegative/udf_when_type_wrong3.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/udf_when_type_wrong3.q.out?rev=769826&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/udf_when_type_wrong3.q.out (added)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/udf_when_type_wrong3.q.out Wed Apr 29 16:47:34 2009
@@ -0,0 +1 @@
+FAILED: Error in semantic analysis: line 4:13 Argument Type Mismatch 5.3: The expression after ELSE should have the same type as those after THEN: "string" is expected but "double" is found

Added: hadoop/hive/trunk/ql/src/test/results/clientpositive/ppd_udf_case.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/ppd_udf_case.q.out?rev=769826&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/ppd_udf_case.q.out (added)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/ppd_udf_case.q.out Wed Apr 29 16:47:34 2009
@@ -0,0 +1,154 @@
+ABSTRACT SYNTAX TREE:
+  (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_TABREF srcpart a) (TOK_TABREF srcpart b) (= (. (TOK_TABLE_OR_COL a) key) (. (TOK_TABLE_OR_COL b) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_WHERE (AND (AND (= (. (TOK_TABLE_OR_COL a) ds) '2008-04-08') (= (. (TOK_TABLE_OR_COL b) ds) '2008-04-08')) (TOK_FUNCTION CASE (. (TOK_TABLE_OR_COL a) key) '27' TRUE '38' FALSE TOK_NULL))) (TOK_ORDERBY (. (TOK_TABLE_OR_COL a) key) (. (TOK_TABLE_OR_COL a) value) (. (TOK_TABLE_OR_COL a) ds) (. (TOK_TABLE_OR_COL a) hr) (. (TOK_TABLE_OR_COL b) key) (. (TOK_TABLE_OR_COL b) value) (. (TOK_TABLE_OR_COL b) ds) (. (TOK_TABLE_OR_COL b) hr))))
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-2 depends on stages: Stage-1
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Alias -> Map Operator Tree:
+        b 
+            Filter Operator
+              predicate:
+                  expr: (ds = '2008-04-08')
+                  type: boolean
+              Reduce Output Operator
+                key expressions:
+                      expr: key
+                      type: string
+                sort order: +
+                Map-reduce partition columns:
+                      expr: key
+                      type: string
+                tag: 1
+                value expressions:
+                      expr: key
+                      type: string
+                      expr: value
+                      type: string
+                      expr: ds
+                      type: string
+                      expr: hr
+                      type: string
+        a 
+            Filter Operator
+              predicate:
+                  expr: ((ds = '2008-04-08') and CASE (key) WHEN ('27') THEN (true) WHEN ('38') THEN (false) ELSE (null) END)
+                  type: boolean
+              Reduce Output Operator
+                key expressions:
+                      expr: key
+                      type: string
+                sort order: +
+                Map-reduce partition columns:
+                      expr: key
+                      type: string
+                tag: 0
+                value expressions:
+                      expr: key
+                      type: string
+                      expr: value
+                      type: string
+                      expr: ds
+                      type: string
+                      expr: hr
+                      type: string
+      Reduce Operator Tree:
+        Join Operator
+          condition map:
+               Inner Join 0 to 1
+          condition expressions:
+            0 {VALUE.0} {VALUE.1} {VALUE.2} {VALUE.3}
+            1 {VALUE.0} {VALUE.1} {VALUE.2} {VALUE.3}
+          Filter Operator
+            predicate:
+                expr: (((2 = '2008-04-08') and (6 = '2008-04-08')) and CASE (0) WHEN ('27') THEN (true) WHEN ('38') THEN (false) ELSE (null) END)
+                type: boolean
+            Select Operator
+              expressions:
+                    expr: 0
+                    type: string
+                    expr: 1
+                    type: string
+                    expr: 2
+                    type: string
+                    expr: 3
+                    type: string
+                    expr: 4
+                    type: string
+                    expr: 5
+                    type: string
+                    expr: 6
+                    type: string
+                    expr: 7
+                    type: string
+              File Output Operator
+                compressed: false
+                GlobalTableId: 0
+                table:
+                    input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                    name: binary_table
+
+  Stage: Stage-2
+    Map Reduce
+      Alias -> Map Operator Tree:
+        /data/users/zshao/tools/405-trunk-apache-hive/build/ql/tmp/454747503/14473320.10002 
+          Reduce Output Operator
+            key expressions:
+                  expr: 0
+                  type: string
+                  expr: 1
+                  type: string
+                  expr: 2
+                  type: string
+                  expr: 3
+                  type: string
+                  expr: 4
+                  type: string
+                  expr: 5
+                  type: string
+                  expr: 6
+                  type: string
+                  expr: 7
+                  type: string
+            sort order: ++++++++
+            tag: -1
+            value expressions:
+                  expr: 0
+                  type: string
+                  expr: 1
+                  type: string
+                  expr: 2
+                  type: string
+                  expr: 3
+                  type: string
+                  expr: 4
+                  type: string
+                  expr: 5
+                  type: string
+                  expr: 6
+                  type: string
+                  expr: 7
+                  type: string
+      Reduce Operator Tree:
+        Extract
+          File Output Operator
+            compressed: false
+            GlobalTableId: 0
+            table:
+                input format: org.apache.hadoop.mapred.TextInputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+
+
+27	val_27	2008-04-08	11	27	val_27	2008-04-08	11
+27	val_27	2008-04-08	11	27	val_27	2008-04-08	12
+27	val_27	2008-04-08	12	27	val_27	2008-04-08	11
+27	val_27	2008-04-08	12	27	val_27	2008-04-08	12



Mime
View raw message