hadoop-hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From na...@apache.org
Subject svn commit: r788269 [1/4] - in /hadoop/hive/trunk: ./ ql/src/java/org/apache/hadoop/hive/ql/exec/ ql/src/java/org/apache/hadoop/hive/ql/optimizer/ ql/src/java/org/apache/hadoop/hive/ql/parse/ ql/src/java/org/apache/hadoop/hive/ql/plan/ ql/src/test/resu...
Date Thu, 25 Jun 2009 05:50:51 GMT
Author: namit
Date: Thu Jun 25 05:50:50 2009
New Revision: 788269

URL: http://svn.apache.org/viewvc?rev=788269&view=rev
Log:
HIVE-560. Column Pruning for mapjoins
(He Yongqiang via namit)


Modified:
    hadoop/hive/trunk/CHANGES.txt
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/JoinOperator.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MapJoinOperator.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPruner.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcCtx.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcFactory.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/MapJoinProcessor.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/joinDesc.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/mapJoinDesc.java
    hadoop/hive/trunk/ql/src/test/results/clientpositive/cluster.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/join1.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/join10.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/join11.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/join12.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/join13.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/join14.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/join16.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/join19.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/join2.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/join22.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/join25.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/join26.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/join27.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/join28.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/join29.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/join3.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/join30.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/join31.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/join32.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/join33.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/join34.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/join35.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/join36.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/join9.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/join_thrift.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/ppd_clusterby.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/ppd_gby_join.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/ppd_join2.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/ppd_join3.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/ppd_multi_insert.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/ppd_outer_join4.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/ppd_random.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/regex_col.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/sample8.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/udf_case_column_pruning.q.out
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/join1.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/join2.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/join3.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/join4.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/join5.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/join6.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/join7.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/join8.q.xml

Modified: hadoop/hive/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/CHANGES.txt?rev=788269&r1=788268&r2=788269&view=diff
==============================================================================
--- hadoop/hive/trunk/CHANGES.txt (original)
+++ hadoop/hive/trunk/CHANGES.txt Thu Jun 25 05:50:50 2009
@@ -270,6 +270,9 @@
     HIVE-472. HiveFileFormatUtils's checkInputFormat does not include RCFile.
     (He Yongqiang via namit)
 
+    HIVE-560. Column Pruning for mapjoins
+    (He Yongqiang via namit)
+
 Release 0.3.1 - Unreleased
 
   INCOMPATIBLE CHANGES

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/JoinOperator.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/JoinOperator.java?rev=788269&r1=788268&r2=788269&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/JoinOperator.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/JoinOperator.java Thu Jun 25 05:50:50 2009
@@ -24,16 +24,9 @@
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.plan.joinDesc;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
-import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
 import org.apache.hadoop.hive.serde2.objectinspector.StructField;
 import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils.ObjectInspectorCopyOption;
-import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
-import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
-import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapred.Reporter;
 
 
@@ -47,8 +40,6 @@
   public void initializeOp(Configuration hconf, Reporter reporter, ObjectInspector[] inputObjInspector) throws HiveException {
     super.initializeOp(hconf, reporter, inputObjInspector);
 
-    ArrayList<ObjectInspector> structFieldObjectInspectors = new ArrayList<ObjectInspector>(totalSz);
-
     initializeChildren(hconf, reporter, new ObjectInspector[]{joinOutputObjectInspector});
   }
   

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MapJoinOperator.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MapJoinOperator.java?rev=788269&r1=788268&r2=788269&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MapJoinOperator.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MapJoinOperator.java Thu Jun 25 05:50:50 2009
@@ -40,7 +40,10 @@
 import org.apache.hadoop.hive.serde2.SerDe;
 import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
+import org.apache.hadoop.hive.serde2.objectinspector.StructField;
+import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils.ObjectInspectorCopyOption;
 import org.apache.hadoop.mapred.Reporter;
 import org.apache.hadoop.util.ReflectionUtils;
@@ -169,6 +172,23 @@
       
       mapJoinRowsKey = HiveConf.getIntVar(hconf, HiveConf.ConfVars.HIVEMAPJOINROWSIZE);
       
+      List<? extends StructField> structFields = ((StructObjectInspector)joinOutputObjectInspector).getAllStructFieldRefs();
+      if (conf.getOutputColumnNames().size() < structFields.size()) {
+        List<ObjectInspector> structFieldObjectInspectors = new ArrayList<ObjectInspector>();
+        for (Byte alias : order) {
+          int sz = conf.getExprs().get(alias).size();
+          List<Integer> retained = conf.getRetainList().get(alias);
+          for (int i = 0; i < sz; i++) {
+            int pos = retained.get(i);
+            structFieldObjectInspectors.add(structFields.get(pos)
+                .getFieldObjectInspector());
+          }
+        }
+        joinOutputObjectInspector = ObjectInspectorFactory
+            .getStandardStructObjectInspector(conf.getOutputColumnNames(),
+                structFieldObjectInspectors);
+      }
+      
       initializeChildren(hconf, reporter, new ObjectInspector[]{joinOutputObjectInspector});
     } catch (IOException e) {
       e.printStackTrace();

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPruner.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPruner.java?rev=788269&r1=788268&r2=788269&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPruner.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPruner.java Thu Jun 25 05:50:50 2009
@@ -91,6 +91,8 @@
     opRules.put(new RuleRegExp("R2", "GBY%"), ColumnPrunerProcFactory.getGroupByProc());
     opRules.put(new RuleRegExp("R3", "RS%"), ColumnPrunerProcFactory.getReduceSinkProc());
     opRules.put(new RuleRegExp("R4", "SEL%"), ColumnPrunerProcFactory.getSelectProc());
+    opRules.put(new RuleRegExp("R5", "JOIN%"), ColumnPrunerProcFactory.getJoinProc());
+    opRules.put(new RuleRegExp("R6", "MAPJOIN%"), ColumnPrunerProcFactory.getMapJoinProc());
 
     // The dispatcher fires the processor corresponding to the closest matching rule and passes the context along
     Dispatcher disp = new DefaultRuleDispatcher(ColumnPrunerProcFactory.getDefaultProc(), opRules, cppCtx);

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcCtx.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcCtx.java?rev=788269&r1=788268&r2=788269&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcCtx.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcCtx.java Thu Jun 25 05:50:50 2009
@@ -24,6 +24,9 @@
 import java.util.List;
 import java.util.Map;
 
+import org.apache.hadoop.hive.ql.exec.CommonJoinOperator;
+import org.apache.hadoop.hive.ql.exec.JoinOperator;
+import org.apache.hadoop.hive.ql.exec.MapJoinOperator;
 import org.apache.hadoop.hive.ql.exec.Operator;
 import org.apache.hadoop.hive.ql.exec.SelectOperator;
 import org.apache.hadoop.hive.ql.exec.Utilities;
@@ -41,12 +44,20 @@
   private  Map<Operator<? extends Serializable>,List<String>> prunedColLists;
   
   private HashMap<Operator<? extends Serializable>, OpParseContext> opToParseCtxMap;
+  
+  private  Map<CommonJoinOperator,Map<Byte,List<String>>> joinPrunedColLists;
     
+
   public ColumnPrunerProcCtx(HashMap<Operator<? extends Serializable>, OpParseContext> opToParseContextMap) {
     prunedColLists = new HashMap<Operator<? extends Serializable>, List<String>>();
     this.opToParseCtxMap = opToParseContextMap;
+    joinPrunedColLists = new HashMap<CommonJoinOperator,Map<Byte,List<String>>>();
   }
 
+  public Map<CommonJoinOperator, Map<Byte, List<String>>> getJoinPrunedColLists() {
+    return joinPrunedColLists;
+  }
+  
   /**
    * @return the prunedColLists
    */
@@ -74,8 +85,18 @@
   public List<String> genColLists(Operator<? extends Serializable> curOp) throws SemanticException {
     List<String> colList = new ArrayList<String>();
     if(curOp.getChildOperators() != null) {
-      for(Operator<? extends Serializable> child: curOp.getChildOperators())
-        colList = Utilities.mergeUniqElems(colList, prunedColLists.get(child));
+      for (Operator<? extends Serializable> child : curOp.getChildOperators()) {
+        if (child instanceof CommonJoinOperator) {
+          int tag = child.getParentOperators().indexOf(curOp);
+          List<String> prunList = joinPrunedColLists.get((CommonJoinOperator) child).get(
+              (byte) tag);
+          colList = Utilities
+              .mergeUniqElems(colList, prunList);
+        } else {
+          colList = Utilities
+              .mergeUniqElems(colList, prunedColLists.get(child));
+        }
+      }
     }
     return colList;
   }
@@ -107,6 +128,12 @@
   public List<String> getSelectColsFromChildren(SelectOperator op, List<String> colList) {
     List<String> cols = new ArrayList<String>();
     selectDesc conf = op.getConf();
+    
+    if(conf.isSelStarNoCompute()){
+      cols.addAll(colList);
+      return cols;
+    }
+    
     ArrayList<exprNodeDesc> selectExprs = conf.getColList();
     
     // The colList is the output columns used by child operators, they are different

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcFactory.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcFactory.java?rev=788269&r1=788268&r2=788269&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcFactory.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcFactory.java Thu Jun 25 05:50:50 2009
@@ -20,21 +20,25 @@
 
 import java.io.Serializable;
 import java.util.ArrayList;
+import java.util.Collections;
 import java.util.HashMap;
+import java.util.Iterator;
 import java.util.List;
+import java.util.Map;
+import java.util.Set;
 import java.util.Stack;
 import java.util.Vector;
 
 import org.apache.hadoop.hive.ql.exec.ColumnInfo;
-import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator;
+import org.apache.hadoop.hive.ql.exec.CommonJoinOperator;
 import org.apache.hadoop.hive.ql.exec.FileSinkOperator;
 import org.apache.hadoop.hive.ql.exec.FilterOperator;
 import org.apache.hadoop.hive.ql.exec.GroupByOperator;
 import org.apache.hadoop.hive.ql.exec.JoinOperator;
 import org.apache.hadoop.hive.ql.exec.LimitOperator;
+import org.apache.hadoop.hive.ql.exec.MapJoinOperator;
 import org.apache.hadoop.hive.ql.exec.Operator;
 import org.apache.hadoop.hive.ql.exec.ReduceSinkOperator;
-import org.apache.hadoop.hive.ql.exec.RowSchema;
 import org.apache.hadoop.hive.ql.exec.ScriptOperator;
 import org.apache.hadoop.hive.ql.exec.SelectOperator;
 import org.apache.hadoop.hive.ql.exec.UnionOperator;
@@ -42,15 +46,18 @@
 import org.apache.hadoop.hive.ql.lib.Node;
 import org.apache.hadoop.hive.ql.lib.NodeProcessor;
 import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
-import org.apache.hadoop.hive.ql.optimizer.unionproc.UnionProcessor;
 import org.apache.hadoop.hive.ql.parse.OpParseContext;
 import org.apache.hadoop.hive.ql.parse.RowResolver;
 import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hadoop.hive.ql.plan.PlanUtils;
 import org.apache.hadoop.hive.ql.plan.aggregationDesc;
 import org.apache.hadoop.hive.ql.plan.exprNodeDesc;
 import org.apache.hadoop.hive.ql.plan.groupByDesc;
+import org.apache.hadoop.hive.ql.plan.joinDesc;
+import org.apache.hadoop.hive.ql.plan.mapJoinDesc;
 import org.apache.hadoop.hive.ql.plan.reduceSinkDesc;
 import org.apache.hadoop.hive.ql.plan.selectDesc;
+import org.apache.hadoop.hive.ql.plan.tableDesc;
 
 /**
  * Factory for generating the different node processors used by ColumnPruner.
@@ -148,10 +155,6 @@
       reduceSinkDesc conf = op.getConf();
       List<Operator<? extends Serializable>> childOperators = op.getChildOperators();
       List<Operator<? extends Serializable>> parentOperators = op.getParentOperators();
-      List<String> childColLists = new ArrayList<String>();
-
-      for(Operator<? extends Serializable> child: childOperators)
-        childColLists = Utilities.mergeUniqElems(childColLists, cppCtx.getPrunedColLists().get(child));
 
       List<String> colLists = new ArrayList<String>();
       ArrayList<exprNodeDesc> keys = conf.getKeyCols();
@@ -161,18 +164,28 @@
       if ((childOperators.size() == 1) && (childOperators.get(0) instanceof JoinOperator)) {
         assert parentOperators.size() == 1;
         Operator<? extends Serializable> par = parentOperators.get(0);
+        JoinOperator childJoin = (JoinOperator)childOperators.get(0);
         RowResolver parRR = opToParseCtxMap.get(par).getRR();
-        RowResolver childRR = opToParseCtxMap.get(childOperators.get(0)).getRR();
-
-        for (String childCol : childColLists) {
-          String [] nm = childRR.reverseLookup(childCol);
-          ColumnInfo cInfo = redSinkRR.get(nm[0],nm[1]);
-          if (cInfo != null) {
-            cInfo = parRR.get(nm[0], nm[1]);
-            if (!colLists.contains(cInfo.getInternalName()))
-              colLists.add(cInfo.getInternalName());
+        List<String> childJoinCols = cppCtx.getJoinPrunedColLists().get(childJoin).get((byte)conf.getTag());
+        boolean[] flags = new boolean[conf.getValueCols().size()];
+        for (int i = 0; i < flags.length; i++)
+          flags[i] = false;
+        if (childJoinCols != null && childJoinCols.size() > 0) {
+          Map<String,exprNodeDesc> exprMap = op.getColumnExprMap();
+          for (String childCol : childJoinCols) {
+            exprNodeDesc desc = exprMap.get(childCol);
+            int index = conf.getValueCols().indexOf(desc);
+            flags[index] = true;
+            String[] nm = redSinkRR.reverseLookup(childCol);
+            if (nm != null) {
+              ColumnInfo cInfo = parRR.get(nm[0], nm[1]);
+              if (!colLists.contains(cInfo.getInternalName()))
+                colLists.add(cInfo.getInternalName());
+            }
           }
         }
+        Collections.sort(colLists);
+        pruneReduceSinkOperator(flags, op, cppCtx);
       }
       else {
         // Reduce Sink contains the columns needed - no need to aggregate from children
@@ -214,15 +227,18 @@
             cppCtx.getPrunedColLists().put(op, cppCtx.getColsFromSelectExpr(op));
             return null;
           }
-          cols = Utilities.mergeUniqElems(cols, cppCtx.getPrunedColLists().get(child));
         }
       }
+      cols = cppCtx.genColLists(op);
 
       selectDesc conf = op.getConf();
       // The input to the select does not matter. Go over the expressions 
       // and return the ones which have a marked column
       cppCtx.getPrunedColLists().put(op, cppCtx.getSelectColsFromChildren(op, cols));
       
+      if(conf.isSelStarNoCompute())
+        return null;
+      
       // do we need to prune the select operator?
       List<exprNodeDesc> originalColList = op.getConf().getColList();
       List<String> columns = new ArrayList<String>();
@@ -251,7 +267,7 @@
         op.getSchema().setSignature(rs_newsignature);
         conf.setColList(newColList);
         conf.setOutputColumnNames(newOutputColumnNames);
-        handleChildren(op, cols);
+        handleChildren(op, cols, cppCtx);
       }
       return null;
     }
@@ -264,54 +280,102 @@
      * 
      * @param op
      * @param retainedSelOutputCols
+     * @throws SemanticException 
      */
     private void handleChildren(SelectOperator op,
-        List<String> retainedSelOutputCols) {
+        List<String> retainedSelOutputCols, ColumnPrunerProcCtx cppCtx) throws SemanticException {
       for(Operator<? extends Serializable> child: op.getChildOperators()) {
         if (child instanceof ReduceSinkOperator) {
-          pruneReduceSinkOperator(retainedSelOutputCols, (ReduceSinkOperator)child);
+          boolean[] flags = getPruneReduceSinkOpRetainFlags(retainedSelOutputCols, (ReduceSinkOperator)child);
+          pruneReduceSinkOperator(flags, (ReduceSinkOperator)child, cppCtx);
         }else if (child instanceof FilterOperator){
           //filter operator has the same output columns as its parent
           for(Operator<? extends Serializable> filterChild: child.getChildOperators()){
-            if (filterChild instanceof ReduceSinkOperator)
-              pruneReduceSinkOperator(retainedSelOutputCols, (ReduceSinkOperator)filterChild);
+            if (filterChild instanceof ReduceSinkOperator) {
+              boolean[] flags = getPruneReduceSinkOpRetainFlags(retainedSelOutputCols, (ReduceSinkOperator)filterChild);
+              pruneReduceSinkOperator(flags, (ReduceSinkOperator)filterChild, cppCtx);
+            }
           }
         }
       }
     }
-
-    private void pruneReduceSinkOperator(List<String> retainedSelOpOutputCols,
-        ReduceSinkOperator child) {
-      ReduceSinkOperator reduce = (ReduceSinkOperator) child;
-      reduceSinkDesc reduceConf = reduce.getConf();
-      ArrayList<String> originalValueOutputColNames = reduceConf
-          .getOutputValueColumnNames();
-      java.util.ArrayList<exprNodeDesc> originalValueEval = reduceConf
-          .getValueCols();
-      ArrayList<String> newOutputColNames = new ArrayList<String>();
-      java.util.ArrayList<exprNodeDesc> newValueEval = new ArrayList<exprNodeDesc>();
-      for (int i = 0; i < originalValueEval.size(); i++) {
-        boolean retain = false;
-        List<String> current = originalValueEval.get(i).getCols();
-        if (current != null) {
-          for (int j = 0; j < current.size(); j++) {
-            if (retainedSelOpOutputCols.contains(current.get(j))) {
-              retain = true;
-              break;
-            }
+  }
+  
+  private static boolean[] getPruneReduceSinkOpRetainFlags(List<String> retainedParentOpOutputCols, ReduceSinkOperator reduce){
+    reduceSinkDesc reduceConf = reduce.getConf();
+    java.util.ArrayList<exprNodeDesc> originalValueEval = reduceConf.getValueCols();
+    boolean[] flags = new boolean[originalValueEval.size()];
+    for (int i = 0; i < originalValueEval.size(); i++) {
+      flags[i] = false;
+      List<String> current = originalValueEval.get(i).getCols();
+      if (current != null) {
+        for (int j = 0; j < current.size(); j++) {
+          if (retainedParentOpOutputCols.contains(current.get(j))) {
+            flags[i] = true;
+            break;
           }
         }
-        if (retain) {
-          newOutputColNames.add(originalValueOutputColNames.get(i));
-          newValueEval.add(originalValueEval.get(i));
+      }
+    }
+    return flags;
+  }
+  
+  private static void pruneReduceSinkOperator(boolean[] retainFlags,
+      ReduceSinkOperator reduce, ColumnPrunerProcCtx cppCtx) throws SemanticException {
+    reduceSinkDesc reduceConf = reduce.getConf();
+    Map<String, exprNodeDesc> oldMap = reduce.getColumnExprMap();
+    Map<String, exprNodeDesc> newMap = new HashMap<String, exprNodeDesc>();
+    Vector<ColumnInfo> sig = new Vector<ColumnInfo>();
+    RowResolver oldRR = cppCtx.getOpToParseCtxMap().get(reduce).getRR();
+    RowResolver newRR = new RowResolver();
+    ArrayList<String> originalValueOutputColNames = reduceConf
+        .getOutputValueColumnNames();
+    java.util.ArrayList<exprNodeDesc> originalValueEval = reduceConf
+        .getValueCols();
+    ArrayList<String> newOutputColNames = new ArrayList<String>();
+    java.util.ArrayList<exprNodeDesc> newValueEval = new ArrayList<exprNodeDesc>();
+    for (int i = 0; i < retainFlags.length; i++) {
+      if (retainFlags[i]) {
+        newValueEval.add(originalValueEval.get(i));
+        String outputCol = originalValueOutputColNames.get(i);
+        newOutputColNames.add(outputCol);
+        String[] nm = oldRR.reverseLookup(outputCol);
+        if (nm == null) {
+          outputCol = Utilities.ReduceField.VALUE.toString() + "." + outputCol;
+          nm = oldRR.reverseLookup(outputCol);
         }
+        newMap.put(outputCol, oldMap.get(outputCol));
+        ColumnInfo colInfo = oldRR.get(nm[0], nm[1]);
+        newRR.put(nm[0], nm[1], colInfo);
+        sig.add(colInfo);
       }
-      reduceConf.setOutputValueColumnNames(newOutputColNames);
-      reduceConf.setValueCols(newValueEval);
     }
-
+    
+    ArrayList<exprNodeDesc> keyCols = reduceConf.getKeyCols();
+    List<String> keys = new ArrayList<String>();
+    RowResolver parResover = cppCtx.getOpToParseCtxMap().get(reduce.getParentOperators().get(0)).getRR();
+    for (int i = 0; i < keyCols.size(); i++) {
+      keys = Utilities.mergeUniqElems(keys, keyCols.get(i).getCols());
+    }
+    for (int i = 0; i < keys.size(); i++) {
+      String outputCol = keys.get(i);
+      String[] nm = parResover.reverseLookup(outputCol);
+      ColumnInfo colInfo = oldRR.get(nm[0], nm[1]);
+      if (colInfo != null)
+        newRR.put(nm[0], nm[1], colInfo);
+    }
+    
+    cppCtx.getOpToParseCtxMap().get(reduce).setRR(newRR);
+    reduce.setColumnExprMap(newMap);
+    reduce.getSchema().setSignature(sig);
+    reduceConf.setOutputValueColumnNames(newOutputColNames);
+    reduceConf.setValueCols(newValueEval);
+    tableDesc newValueTable = PlanUtils.getLazySimpleSerDeTableDesc(PlanUtils.getFieldSchemasFromColumnList(
+        reduceConf.getValueCols(), newOutputColNames, 0, ""));
+    reduceConf.setValueSerializeInfo(newValueTable);
   }
 
+
   /**
    * The Factory method to get the ColumnPrunerSelectProc class.
    * @return ColumnPrunerSelectProc
@@ -320,4 +384,150 @@
     return new ColumnPrunerSelectProc();
   }
   
+  /**
+   * The Node Processor for Column Pruning on Join Operators.
+   */
+  public static class ColumnPrunerJoinProc implements NodeProcessor {
+    public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx ctx,
+        Object... nodeOutputs) throws SemanticException {
+      JoinOperator op = (JoinOperator) nd;
+      pruneJoinOperator(ctx, op, op.getConf(), op.getColumnExprMap(), null, false);
+      return null;
+    }
+  }
+
+  /**
+   * The Factory method to get ColumnJoinProc class.
+   * 
+   * @return ColumnPrunerJoinProc
+   */
+  public static ColumnPrunerJoinProc getJoinProc() {
+    return new ColumnPrunerJoinProc();
+  }
+  
+  /**
+   * The Node Processor for Column Pruning on Join Operators.
+   */
+  public static class ColumnPrunerMapJoinProc implements NodeProcessor {
+    public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx ctx,
+        Object... nodeOutputs) throws SemanticException {
+      MapJoinOperator op = (MapJoinOperator) nd;
+      pruneJoinOperator(ctx, op, op.getConf(), op.getColumnExprMap(), op.getConf().getRetainList(), true);
+      return null;
+    }
+  }
+  
+  private static void pruneJoinOperator(NodeProcessorCtx ctx,
+      CommonJoinOperator op, joinDesc conf,
+      Map<String, exprNodeDesc> columnExprMap,
+      Map<Byte, List<Integer>> retainMap, boolean mapJoin) throws SemanticException {
+    ColumnPrunerProcCtx cppCtx = (ColumnPrunerProcCtx) ctx;
+    Map<Byte, List<String>> prunedColLists = new HashMap<Byte, List<String>>();
+    List<Operator<? extends Serializable>> childOperators = op
+        .getChildOperators();
+
+    for (Operator<? extends Serializable> child : childOperators) {
+      if (child instanceof FileSinkOperator)
+        return;
+    }
+
+    List<String> childColLists = cppCtx.genColLists((Operator<? extends Serializable>)op);
+    
+    RowResolver joinRR = cppCtx.getOpToParseCtxMap().get(op).getRR();
+    RowResolver newJoinRR = new RowResolver();
+    ArrayList<String> outputCols = new ArrayList<String>();
+    Vector<ColumnInfo> rs = new Vector<ColumnInfo>();
+    Map<String, exprNodeDesc> newColExprMap = new HashMap<String, exprNodeDesc>();
+
+    for (int i = 0; i < conf.getOutputColumnNames().size(); i++) {
+      String internalName = conf.getOutputColumnNames().get(i);
+      exprNodeDesc desc = columnExprMap.get(internalName);
+      Byte tag = conf.getReversedExprs().get(internalName);
+      if (!childColLists.contains(internalName)) {
+        int index = conf.getExprs().get(tag).indexOf(desc);
+        if (index < 0)
+          continue;
+        conf.getExprs().get(tag).remove(desc);
+        if (retainMap != null)
+          retainMap.get(tag).remove(index);
+      } else {
+        List<String> prunedRSList = prunedColLists.get(tag);
+        if (prunedRSList == null) {
+          prunedRSList = new ArrayList<String>();
+          prunedColLists.put(tag, prunedRSList);
+        }
+        prunedRSList = Utilities.mergeUniqElems(prunedRSList, desc.getCols());
+        outputCols.add(internalName);
+        newColExprMap.put(internalName, desc);
+      }
+    }
+    
+    if (mapJoin) {
+      // regenerate the valueTableDesc
+      List<tableDesc> valueTableDescs = new ArrayList<tableDesc>();
+      for (int pos = 0; pos < op.getParentOperators().size(); pos++) {
+        List<exprNodeDesc> valueCols = conf.getExprs()
+            .get(new Byte((byte) pos));
+        StringBuilder keyOrder = new StringBuilder();
+        for (int i = 0; i < valueCols.size(); i++) {
+          keyOrder.append("+");
+        }
+
+        tableDesc valueTableDesc = PlanUtils
+            .getLazySimpleSerDeTableDesc(PlanUtils
+                .getFieldSchemasFromColumnList(valueCols, "mapjoinvalue"));
+
+        valueTableDescs.add(valueTableDesc);
+      }
+      ((mapJoinDesc) conf).setValueTblDescs(valueTableDescs);
+
+      Set<Map.Entry<Byte, List<exprNodeDesc>>> exprs = ((mapJoinDesc) conf)
+          .getKeys().entrySet();
+      Iterator<Map.Entry<Byte, List<exprNodeDesc>>> iters = exprs.iterator();
+      while (iters.hasNext()) {
+        Map.Entry<Byte, List<exprNodeDesc>> entry = iters.next();
+        List<exprNodeDesc> lists = entry.getValue();
+        for (int j = 0; j < lists.size(); j++) {
+          exprNodeDesc desc = lists.get(j);
+          Byte tag = entry.getKey();
+          List<String> cols = prunedColLists.get(tag);
+          cols = Utilities.mergeUniqElems(cols, desc.getCols());
+          prunedColLists.put(tag, cols);
+        }
+      }
+
+    }
+
+    for (Operator<? extends Serializable> child : childOperators) {
+      if (child instanceof ReduceSinkOperator) {
+        boolean[] flags = getPruneReduceSinkOpRetainFlags(childColLists,
+            (ReduceSinkOperator) child);
+        pruneReduceSinkOperator(flags, (ReduceSinkOperator) child, cppCtx);
+      }
+    }
+
+    for (int i = 0; i < childColLists.size(); i++) {
+      String internalName = childColLists.get(i);
+      String[] nm = joinRR.reverseLookup(internalName);
+      ColumnInfo col = joinRR.get(nm[0], nm[1]);
+      newJoinRR.put(nm[0], nm[1], col);
+      rs.add(col);
+    }
+
+    op.setColumnExprMap(newColExprMap);
+    conf.setOutputColumnNames(outputCols);
+    op.getSchema().setSignature(rs);
+    cppCtx.getOpToParseCtxMap().get(op).setRR(newJoinRR);
+    cppCtx.getJoinPrunedColLists().put(op, prunedColLists);
+  }
+
+  /**
+   * The Factory method to get ColumnJoinProc class.
+   * 
+   * @return ColumnPrunerJoinProc
+   */
+  public static ColumnPrunerMapJoinProc getMapJoinProc() {
+    return new ColumnPrunerMapJoinProc();
+  }
+  
 }

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/MapJoinProcessor.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/MapJoinProcessor.java?rev=788269&r1=788268&r2=788269&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/MapJoinProcessor.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/MapJoinProcessor.java Thu Jun 25 05:50:50 2009
@@ -112,7 +112,7 @@
     List<Operator<? extends Serializable>> parentOps = op.getParentOperators();
     List<Operator<? extends Serializable>> newParentOps = new ArrayList<Operator<? extends Serializable>>();
     List<Operator<? extends Serializable>> oldReduceSinkParentOps = new ArrayList<Operator<? extends Serializable>>();
-    
+    Map<String, exprNodeDesc> colExprMap = new HashMap<String, exprNodeDesc>();
     // found a source which is not to be stored in memory
     if (leftSrc != null) {
       //      assert mapJoinPos == 0;
@@ -166,15 +166,17 @@
         {
           String field = fNamesIter.next();
           ColumnInfo valueInfo = inputRS.get(key, field);
-          values.add(new exprNodeColumnDesc(valueInfo.getType(), valueInfo.getInternalName()));
           ColumnInfo oldValueInfo = oldOutputRS.get(key, field);
-          String col = field;
-          if(oldValueInfo != null)
-            col = oldValueInfo.getInternalName();
-          if (outputRS.get(key, col) == null) {
-            outputColumnNames.add(col);
-            outputRS.put(key, col, new ColumnInfo(col, 
+          if(oldValueInfo == null)
+            continue;
+          String outputCol = oldValueInfo.getInternalName();
+          if (outputRS.get(key, field) == null) {
+            outputColumnNames.add(outputCol);
+            exprNodeDesc colDesc = new exprNodeColumnDesc(valueInfo.getType(), valueInfo.getInternalName());
+            values.add(colDesc);
+            outputRS.put(key, field, new ColumnInfo(outputCol, 
                 valueInfo.getType()));
+            colExprMap.put(outputCol, colDesc);
           }
         }
       }
@@ -238,6 +240,9 @@
       new mapJoinDesc(keyExprMap, keyTableDesc, valueExprMap, valueTableDescs, outputColumnNames, mapJoinPos, joinCondns),
       new RowSchema(outputRS.getColumnInfos()), newPar), outputRS);
     
+    mapJoinOp.getConf().setReversedExprs(op.getConf().getReversedExprs());
+    mapJoinOp.setColumnExprMap(colExprMap);
+    
     // change the children of the original join operator to point to the map join operator
     List<Operator<? extends Serializable>> childOps = op.getChildOperators();
     for (Operator<? extends Serializable> childOp : childOps) 
@@ -252,15 +257,40 @@
     genSelectPlan(pctx, mapJoinOp);
   }
 
-  private void genSelectPlan(ParseContext pctx, Operator<? extends Serializable> input) {
+  private void genSelectPlan(ParseContext pctx, MapJoinOperator input) throws SemanticException {
     List<Operator<? extends Serializable>> childOps = input.getChildOperators();
     input.setChildOperators(null);
 
     // create a dummy select - This select is needed by the walker to split the mapJoin later on
   	RowResolver inputRR = pctx.getOpParseCtx().get(input).getRR();
+  	
+  	ArrayList<exprNodeDesc> exprs = new ArrayList<exprNodeDesc>();
+  	ArrayList<String> outputs = new ArrayList<String>();
+    List<String> outputCols = input.getConf().getOutputColumnNames();
+    RowResolver outputRS = new RowResolver();
+    
+    Map<String, exprNodeDesc> colExprMap = new HashMap<String, exprNodeDesc>();
+    
+    for (int i = 0; i < outputCols.size(); i++) {
+      String internalName = outputCols.get(i);
+      String[] nm = inputRR.reverseLookup(internalName);
+      ColumnInfo valueInfo = inputRR.get(nm[0], nm[1]);
+      exprNodeDesc colDesc = new exprNodeColumnDesc(valueInfo.getType(),
+          valueInfo.getInternalName());
+      exprs.add(colDesc);
+      outputs.add(internalName);
+      outputRS .put(nm[0], nm[1], new ColumnInfo(internalName, 
+          valueInfo.getType()));
+      colExprMap.put(internalName, colDesc);
+    }
+  	
+  	selectDesc select = new selectDesc(exprs, outputs, false);
+  	
     SelectOperator sel = 
       (SelectOperator)putOpInsertMap(OperatorFactory.getAndMakeChild(
-                       new selectDesc(true), new RowSchema(inputRR.getColumnInfos()), input), inputRR);
+          select, new RowSchema(inputRR.getColumnInfos()), input), inputRR);
+    
+    sel.setColumnExprMap(colExprMap);
     
     // Insert the select operator in between. 
     sel.setChildOperators(childOps);

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java?rev=788269&r1=788268&r2=788269&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java Thu Jun 25 05:50:50 2009
@@ -2625,6 +2625,7 @@
     int pos = 0;
     int outputPos = 0;
 
+    Map<String, Byte> reversedExprs = new HashMap<String, Byte>(); 
     HashMap<Byte, List<exprNodeDesc>> exprMap = new HashMap<Byte, List<exprNodeDesc>>();
     Map<String, exprNodeDesc> colExprMap = new HashMap<String, exprNodeDesc>();
     HashMap<Integer, Set<String>> posToAliasMap = new HashMap<Integer, Set<String>>();
@@ -2660,6 +2661,7 @@
             colExprMap.put(colName, keyDesc.get(keyDesc.size() - 1));
             outputRS.put(key, field, new ColumnInfo(colName, 
                                                     valueInfo.getType()));
+            reversedExprs.put(colName, tag);
           }
         }
       }
@@ -2674,7 +2676,9 @@
       joinCondns[i] = new org.apache.hadoop.hive.ql.plan.joinCond(condn);
     }
 
-    JoinOperator joinOp = (JoinOperator) OperatorFactory.getAndMakeChild(new joinDesc(exprMap, outputColumnNames, joinCondns),
+    joinDesc desc = new joinDesc(exprMap, outputColumnNames, joinCondns);
+    desc.setReversedExprs(reversedExprs);
+    JoinOperator joinOp = (JoinOperator) OperatorFactory.getAndMakeChild(desc,
                                     new RowSchema(outputRS.getColumnInfos()), rightOps);
     joinOp.setColumnExprMap(colExprMap);
     joinOp.setPosToAliasMap(posToAliasMap);

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/joinDesc.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/joinDesc.java?rev=788269&r1=788268&r2=788269&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/joinDesc.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/joinDesc.java Thu Jun 25 05:50:50 2009
@@ -22,9 +22,13 @@
 
 import org.apache.hadoop.hive.ql.plan.exprNodeDesc;
 import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.Iterator;
 import java.util.LinkedHashMap;
 import java.util.List;
 import java.util.Map;
+import java.util.Set;
+import java.util.Map.Entry;
 
 /**
  * Join operator Descriptor implementation.
@@ -41,8 +45,12 @@
   // alias to key mapping
   private Map<Byte, List<exprNodeDesc>> exprs;
   
+  //used for create joinOutputObjectInspector
   protected java.util.ArrayList<java.lang.String> outputColumnNames;
   
+  // key:column output name, value:tag
+  transient private Map<String, Byte> reversedExprs;
+  
   // No outer join involved
   protected boolean noOuterJoin;
 
@@ -58,23 +66,25 @@
   }
   
   public joinDesc(final Map<Byte, List<exprNodeDesc>> exprs, ArrayList<String> outputColumnNames) {
-    this.exprs = exprs;
-    this.outputColumnNames = outputColumnNames;
-    this.noOuterJoin = true;
-    this.conds = null;
+    this(exprs, outputColumnNames, true, null);
   }
 
   public joinDesc(final Map<Byte, List<exprNodeDesc>> exprs, ArrayList<String> outputColumnNames, final joinCond[] conds) {
-    this.exprs = exprs;
-    this.outputColumnNames = outputColumnNames;
-    this.noOuterJoin = false;
-    this.conds = conds;
+    this(exprs, outputColumnNames, false, conds);
   }
   
   public Map<Byte, List<exprNodeDesc>> getExprs() {
     return this.exprs;
   }
+  
+  public Map<String, Byte> getReversedExprs() {
+    return reversedExprs;
+  }
 
+  public void setReversedExprs(Map<String, Byte> reversed_Exprs) {
+    this.reversedExprs = reversed_Exprs;
+  }
+  
   @explain(displayName="condition expressions")
   public Map<Byte, String> getExprsStringMap() {
     if (getExprs() == null) {

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/mapJoinDesc.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/mapJoinDesc.java?rev=788269&r1=788268&r2=788269&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/mapJoinDesc.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/mapJoinDesc.java Thu Jun 25 05:50:50 2009
@@ -23,8 +23,12 @@
 import org.apache.hadoop.hive.ql.plan.exprNodeDesc;
 
 import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
+import java.util.Set;
+import java.util.Map.Entry;
 
 /**
  * Map Join operator Descriptor implementation.
@@ -40,6 +44,8 @@
   
   private int posBigTable;
   
+  private Map<Byte, List<Integer>> retainList;
+  
   public mapJoinDesc() { }
 
   public mapJoinDesc(final Map<Byte, List<exprNodeDesc>> keys, 
@@ -54,8 +60,31 @@
     this.keyTblDesc  = keyTblDesc;
     this.valueTblDescs = valueTblDescs;
     this.posBigTable = posBigTable;
+    initRetainExprList();
   }
 
+  private void initRetainExprList() {
+    retainList = new HashMap<Byte, List<Integer>>();
+    Set<Entry<Byte, List<exprNodeDesc>>> set = super.getExprs().entrySet();
+    Iterator<Entry<Byte, List<exprNodeDesc>>> setIter = set.iterator();
+    while (setIter.hasNext()) {
+      Entry<Byte, List<exprNodeDesc>> current = setIter.next();
+      List<Integer> list = new ArrayList<Integer>();
+      for (int i = 0; i < current.getValue().size(); i++) {
+        list.add(i);
+      }
+      retainList.put(current.getKey(), list);
+    }
+  }
+  
+  public Map<Byte, List<Integer>> getRetainList() {
+    return retainList;
+  }
+
+  public void setRetainList(Map<Byte, List<Integer>> retainList) {
+    this.retainList = retainList;
+  }
+  
   /**
    * @return the keys
    */

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/cluster.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/cluster.q.out?rev=788269&r1=788268&r2=788269&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/cluster.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/cluster.q.out Thu Jun 25 05:50:50 2009
@@ -56,7 +56,7 @@
 
 query: SELECT * FROM SRC x where x.key = 10 CLUSTER BY x.key
 Input: default/src
-Output: file:/data/users/pchakka/workspace/oshive/build/ql/tmp/469446419/10000
+Output: file:/Users/char/Documents/workspace/Hive-460/build/ql/tmp/1095159989/10000
 10	val_10
 query: EXPLAIN
 SELECT * FROM SRC x  where x.key = 20 CLUSTER BY key
@@ -116,7 +116,7 @@
 
 query: SELECT * FROM SRC x where x.key = 20 CLUSTER BY key
 Input: default/src
-Output: file:/data/users/pchakka/workspace/oshive/build/ql/tmp/918784205/10000
+Output: file:/Users/char/Documents/workspace/Hive-460/build/ql/tmp/1519767132/10000
 20	val_20
 query: EXPLAIN
 SELECT x.* FROM SRC x where x.key = 20 CLUSTER BY key
@@ -176,7 +176,7 @@
 
 query: SELECT x.* FROM SRC x where x.key = 20 CLUSTER BY key
 Input: default/src
-Output: file:/data/users/pchakka/workspace/oshive/build/ql/tmp/787341560/10000
+Output: file:/Users/char/Documents/workspace/Hive-460/build/ql/tmp/1645256115/10000
 20	val_20
 query: EXPLAIN
 SELECT x.*  FROM SRC x where x.key = 20 CLUSTER BY x.key
@@ -236,7 +236,7 @@
 
 query: SELECT x.*  FROM SRC x where x.key = 20 CLUSTER BY x.key
 Input: default/src
-Output: file:/data/users/pchakka/workspace/oshive/build/ql/tmp/2112333625/10000
+Output: file:/Users/char/Documents/workspace/Hive-460/build/ql/tmp/97254899/10000
 20	val_20
 query: EXPLAIN
 SELECT x.key, x.value as v1 FROM SRC x where x.key = 20 CLUSTER BY key
@@ -296,7 +296,7 @@
 
 query: SELECT x.key, x.value as v1 FROM SRC x where x.key = 20 CLUSTER BY key
 Input: default/src
-Output: file:/data/users/pchakka/workspace/oshive/build/ql/tmp/82082573/10000
+Output: file:/Users/char/Documents/workspace/Hive-460/build/ql/tmp/519369730/10000
 20	val_20
 query: EXPLAIN
 SELECT x.key, x.value as v1 FROM SRC x where x.key = 20 CLUSTER BY x.key
@@ -356,7 +356,7 @@
 
 query: SELECT x.key, x.value as v1 FROM SRC x where x.key = 20 CLUSTER BY x.key
 Input: default/src
-Output: file:/data/users/pchakka/workspace/oshive/build/ql/tmp/585460645/10000
+Output: file:/Users/char/Documents/workspace/Hive-460/build/ql/tmp/1772518510/10000
 20	val_20
 query: EXPLAIN
 SELECT x.key, x.value as v1  FROM SRC x where x.key = 20 CLUSTER BY v1
@@ -416,7 +416,7 @@
 
 query: SELECT x.key, x.value as v1  FROM SRC x where x.key = 20 CLUSTER BY v1
 Input: default/src
-Output: file:/data/users/pchakka/workspace/oshive/build/ql/tmp/1403624942/10000
+Output: file:/Users/char/Documents/workspace/Hive-460/build/ql/tmp/1378918493/10000
 20	val_20
 query: EXPLAIN
 SELECT y.* from (SELECT x.* FROM SRC x CLUSTER BY x.key) y where y.key = 20
@@ -478,7 +478,7 @@
 
 query: SELECT y.* from (SELECT x.* FROM SRC x CLUSTER BY x.key) y where y.key = 20
 Input: default/src
-Output: file:/data/users/pchakka/workspace/oshive/build/ql/tmp/595719915/10000
+Output: file:/Users/char/Documents/workspace/Hive-460/build/ql/tmp/1355587729/10000
 20	val_20
 query: EXPLAIN 
 SELECT x.key, x.value as v1, y.key  FROM SRC x JOIN SRC y ON (x.key = y.key)  where x.key = 20 CLUSTER BY v1
@@ -507,8 +507,6 @@
               value expressions:
                     expr: key
                     type: string
-                    expr: value
-                    type: string
         x 
             Filter Operator
               predicate:
@@ -534,7 +532,7 @@
                Inner Join 0 to 1
           condition expressions:
             0 {VALUE._col0} {VALUE._col1}
-            1 {VALUE._col0} {VALUE._col1}
+            1 {VALUE._col0}
           Filter Operator
             predicate:
                 expr: (UDFToDouble(_col0) = UDFToDouble(20))
@@ -558,7 +556,7 @@
   Stage: Stage-2
     Map Reduce
       Alias -> Map Operator Tree:
-        file:/data/users/pchakka/workspace/oshive/build/ql/tmp/654289407/10002 
+        file:/Users/char/Documents/workspace/Hive-460/build/ql/tmp/347887659/10002 
             Reduce Output Operator
               key expressions:
                     expr: _col1
@@ -591,7 +589,7 @@
 
 query: SELECT x.key, x.value as v1, y.key  FROM SRC x JOIN SRC y ON (x.key = y.key) where x.key = 20 CLUSTER BY v1
 Input: default/src
-Output: file:/data/users/pchakka/workspace/oshive/build/ql/tmp/1865686895/10000
+Output: file:/Users/char/Documents/workspace/Hive-460/build/ql/tmp/1452481387/10000
 20	val_20	20
 query: EXPLAIN 
 SELECT x.key, x.value as v1, y.*  FROM SRC x JOIN SRC y ON (x.key = y.key) where x.key = 20 CLUSTER BY v1
@@ -673,7 +671,7 @@
   Stage: Stage-2
     Map Reduce
       Alias -> Map Operator Tree:
-        file:/data/users/pchakka/workspace/oshive/build/ql/tmp/782657504/10002 
+        file:/Users/char/Documents/workspace/Hive-460/build/ql/tmp/1377636554/10002 
             Reduce Output Operator
               key expressions:
                     expr: _col1
@@ -708,7 +706,7 @@
 
 query: SELECT x.key, x.value as v1, y.*  FROM SRC x JOIN SRC y ON (x.key = y.key) where x.key = 20 CLUSTER BY v1
 Input: default/src
-Output: file:/data/users/pchakka/workspace/oshive/build/ql/tmp/1662517786/10000
+Output: file:/Users/char/Documents/workspace/Hive-460/build/ql/tmp/270617627/10000
 20	val_20	20	val_20
 query: EXPLAIN
 SELECT x.key, x.value as v1, y.*  FROM SRC x JOIN SRC y ON (x.key = y.key) where x.key = 20 CLUSTER BY x.key
@@ -790,7 +788,7 @@
   Stage: Stage-2
     Map Reduce
       Alias -> Map Operator Tree:
-        file:/data/users/pchakka/workspace/oshive/build/ql/tmp/1094038570/10002 
+        file:/Users/char/Documents/workspace/Hive-460/build/ql/tmp/1638142206/10002 
             Reduce Output Operator
               key expressions:
                     expr: _col0
@@ -825,7 +823,7 @@
 
 query: SELECT x.key, x.value as v1, y.*  FROM SRC x JOIN SRC y ON (x.key = y.key) where x.key = 20 CLUSTER BY x.key
 Input: default/src
-Output: file:/data/users/pchakka/workspace/oshive/build/ql/tmp/1889196452/10000
+Output: file:/Users/char/Documents/workspace/Hive-460/build/ql/tmp/1510688507/10000
 20	val_20	20	val_20
 query: EXPLAIN
 SELECT x.key, x.value as v1, y.key as yk  FROM SRC x JOIN SRC y ON (x.key = y.key) where x.key = 20 CLUSTER BY key
@@ -854,8 +852,6 @@
               value expressions:
                     expr: key
                     type: string
-                    expr: value
-                    type: string
         x 
             Filter Operator
               predicate:
@@ -881,7 +877,7 @@
                Inner Join 0 to 1
           condition expressions:
             0 {VALUE._col0} {VALUE._col1}
-            1 {VALUE._col0} {VALUE._col1}
+            1 {VALUE._col0}
           Filter Operator
             predicate:
                 expr: (UDFToDouble(_col0) = UDFToDouble(20))
@@ -905,7 +901,7 @@
   Stage: Stage-2
     Map Reduce
       Alias -> Map Operator Tree:
-        file:/data/users/pchakka/workspace/oshive/build/ql/tmp/1162478443/10002 
+        file:/Users/char/Documents/workspace/Hive-460/build/ql/tmp/221814948/10002 
             Reduce Output Operator
               key expressions:
                     expr: _col0
@@ -938,7 +934,7 @@
 
 query: SELECT x.key, x.value as v1, y.key as yk  FROM SRC x JOIN SRC y ON (x.key = y.key) where x.key = 20 CLUSTER BY key
 Input: default/src
-Output: file:/data/users/pchakka/workspace/oshive/build/ql/tmp/681246043/10000
+Output: file:/Users/char/Documents/workspace/Hive-460/build/ql/tmp/1735292170/10000
 20	val_20	20
 query: EXPLAIN
 SELECT unioninput.*
@@ -1053,7 +1049,7 @@
 ) unioninput
 CLUSTER BY unioninput.key
 Input: default/src
-Output: file:/data/users/pchakka/workspace/oshive/build/ql/tmp/571624743/10000
+Output: file:/Users/char/Documents/workspace/Hive-460/build/ql/tmp/432509193/10000
 0	val_0
 0	val_0
 0	val_0

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/join1.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/join1.q.out?rev=788269&r1=788268&r2=788269&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/join1.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/join1.q.out Thu Jun 25 05:50:50 2009
@@ -24,8 +24,6 @@
                     type: string
               tag: 1
               value expressions:
-                    expr: key
-                    type: string
                     expr: value
                     type: string
         src1 
@@ -41,15 +39,13 @@
               value expressions:
                     expr: key
                     type: string
-                    expr: value
-                    type: string
       Reduce Operator Tree:
         Join Operator
           condition map:
                Inner Join 0 to 1
           condition expressions:
-            0 {VALUE._col0} {VALUE._col1}
-            1 {VALUE._col0} {VALUE._col1}
+            0 {VALUE._col0}
+            1 {VALUE._col1}
           Select Operator
             expressions:
                   expr: _col0
@@ -88,7 +84,7 @@
 Output: default/dest_j1
 query: SELECT dest_j1.* FROM dest_j1
 Input: default/dest_j1
-Output: file:/data/users/njain/hive4/hive4/build/ql/tmp/791244140/10000
+Output: file:/Users/char/Documents/workspace/Hive-460/build/ql/tmp/1850221773/10000
 0	val_0
 0	val_0
 0	val_0

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/join10.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/join10.q.out?rev=788269&r1=788268&r2=788269&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/join10.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/join10.q.out Thu Jun 25 05:50:50 2009
@@ -29,9 +29,6 @@
                       expr: _col0
                       type: string
                 tag: 0
-                value expressions:
-                      expr: _col0
-                      type: string
         y:src 
             Select Operator
               expressions:
@@ -58,7 +55,7 @@
           condition map:
                Inner Join 0 to 1
           condition expressions:
-            0 {VALUE._col0} {VALUE._col1}
+            0 
             1 {VALUE._col0} {VALUE._col1}
           Select Operator
             expressions:
@@ -85,7 +82,7 @@
 ON (x.key = Y.key)
 SELECT Y.*
 Input: default/src
-Output: file:/Users/char/Documents/workspace/Hive-460/build/ql/tmp/397770096/10000
+Output: file:/Users/char/Documents/workspace/Hive-460/build/ql/tmp/576477959/10000
 0	val_0
 0	val_0
 0	val_0

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/join11.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/join11.q.out?rev=788269&r1=788268&r2=788269&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/join11.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/join11.q.out Thu Jun 25 05:50:50 2009
@@ -33,8 +33,6 @@
                       type: string
                 tag: 1
                 value expressions:
-                      expr: _col0
-                      type: string
                       expr: _col1
                       type: string
         src1:src 
@@ -67,8 +65,8 @@
           condition map:
                Inner Join 0 to 1
           condition expressions:
-            0 {VALUE._col0} {VALUE._col1}
-            1 {VALUE._col0} {VALUE._col1}
+            0 {VALUE._col0}
+            1 {VALUE._col1}
           Select Operator
             expressions:
                   expr: _col0
@@ -94,7 +92,7 @@
 (SELECT src.key as c3, src.value as c4 from src) src2
 ON src1.c1 = src2.c3 AND src1.c1 < 100
 Input: default/src
-Output: file:/data/users/pchakka/workspace/oshive/build/ql/tmp/1092092252/10000
+Output: file:/Users/char/Documents/workspace/Hive-460/build/ql/tmp/774077443/10000
 0	val_0
 0	val_0
 0	val_0

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/join12.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/join12.q.out?rev=788269&r1=788268&r2=788269&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/join12.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/join12.q.out Thu Jun 25 05:50:50 2009
@@ -36,8 +36,6 @@
                       type: string
                 tag: 1
                 value expressions:
-                      expr: _col0
-                      type: string
                       expr: _col1
                       type: string
         src1:src 
@@ -87,18 +85,15 @@
                           expr: _col0
                           type: string
                     tag: 2
-                    value expressions:
-                          expr: _col0
-                          type: string
       Reduce Operator Tree:
         Join Operator
           condition map:
                Inner Join 0 to 1
                Inner Join 0 to 2
           condition expressions:
-            0 {VALUE._col0} {VALUE._col1}
-            1 {VALUE._col0} {VALUE._col1}
-            2 {VALUE._col0} {VALUE._col1}
+            0 {VALUE._col0}
+            1 {VALUE._col1}
+            2 
           Select Operator
             expressions:
                   expr: _col0
@@ -127,7 +122,7 @@
 (SELECT src.key as c5, src.value as c6 from src) src3
 ON src1.c1 = src3.c5 AND src3.c5 < 80
 Input: default/src
-Output: file:/data/users/pchakka/workspace/oshive/build/ql/tmp/2007793008/10000
+Output: file:/Users/char/Documents/workspace/Hive-460/build/ql/tmp/1217946262/10000
 0	val_0
 0	val_0
 0	val_0

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/join13.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/join13.q.out?rev=788269&r1=788268&r2=788269&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/join13.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/join13.q.out Thu Jun 25 05:50:50 2009
@@ -71,7 +71,7 @@
           condition map:
                Inner Join 0 to 1
           condition expressions:
-            0 {VALUE._col0} {VALUE._col1}
+            0 {VALUE._col0}
             1 {VALUE._col0} {VALUE._col1}
           File Output Operator
             compressed: false
@@ -95,14 +95,10 @@
                     type: double
               tag: 0
               value expressions:
-                    expr: _col2
-                    type: string
                     expr: _col3
                     type: string
                     expr: _col0
                     type: string
-                    expr: _col1
-                    type: string
         src3:src 
             Filter Operator
               predicate:
@@ -125,16 +121,13 @@
                           expr: UDFToDouble(_col0)
                           type: double
                     tag: 1
-                    value expressions:
-                          expr: _col0
-                          type: string
       Reduce Operator Tree:
         Join Operator
           condition map:
                Inner Join 0 to 1
           condition expressions:
-            0 {VALUE._col0} {VALUE._col1} {VALUE._col2} {VALUE._col3}
-            1 {VALUE._col0} {VALUE._col1}
+            0 {VALUE._col1} {VALUE._col2}
+            1 
           Select Operator
             expressions:
                   expr: _col2
@@ -163,7 +156,7 @@
 (SELECT src.key as c5, src.value as c6 from src) src3
 ON src1.c1 + src2.c3 = src3.c5 AND src3.c5 < 200
 Input: default/src
-Output: file:/data/users/pchakka/workspace/oshive/build/ql/tmp/1162005483/10000
+Output: file:/Users/char/Documents/workspace/Hive-460/build/ql/tmp/372904562/10000
 0	val_0
 0	val_0
 0	val_0

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/join14.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/join14.q.out?rev=788269&r1=788268&r2=788269&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/join14.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/join14.q.out Thu Jun 25 05:50:50 2009
@@ -32,14 +32,8 @@
                         type: string
                   tag: 1
                   value expressions:
-                        expr: key
-                        type: string
                         expr: value
                         type: string
-                        expr: ds
-                        type: string
-                        expr: hr
-                        type: string
         src 
             Filter Operator
               predicate:
@@ -61,15 +55,13 @@
                   value expressions:
                         expr: key
                         type: string
-                        expr: value
-                        type: string
       Reduce Operator Tree:
         Join Operator
           condition map:
                Inner Join 0 to 1
           condition expressions:
-            0 {VALUE._col0} {VALUE._col1}
-            1 {VALUE._col0} {VALUE._col1} {VALUE._col2} {VALUE._col3}
+            0 {VALUE._col0}
+            1 {VALUE._col1}
           Select Operator
             expressions:
                   expr: _col0
@@ -110,7 +102,7 @@
 Output: default/dest1
 query: select dest1.* from dest1
 Input: default/dest1
-Output: file:/data/users/pchakka/workspace/oshive/build/ql/tmp/285954832/10000
+Output: file:/Users/char/Documents/workspace/Hive-460/build/ql/tmp/500537057/10000
 103	val_103
 103	val_103
 103	val_103

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/join16.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/join16.q.out?rev=788269&r1=788268&r2=788269&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/join16.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/join16.q.out Thu Jun 25 05:50:50 2009
@@ -45,8 +45,6 @@
                       value expressions:
                             expr: _col0
                             type: string
-                            expr: _col1
-                            type: string
         tab 
             Filter Operator
               predicate:
@@ -66,8 +64,6 @@
                       type: string
                 tag: 1
                 value expressions:
-                      expr: key
-                      type: string
                       expr: value
                       type: string
       Reduce Operator Tree:
@@ -75,8 +71,8 @@
           condition map:
                Inner Join 0 to 1
           condition expressions:
-            0 {VALUE._col0} {VALUE._col1}
-            1 {VALUE._col0} {VALUE._col1}
+            0 {VALUE._col0}
+            1 {VALUE._col1}
           Filter Operator
             predicate:
                 expr: (UDFToDouble(_col3) < UDFToDouble(200))

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/join19.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/join19.q.out?rev=788269&r1=788268&r2=788269&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/join19.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/join19.q.out Thu Jun 25 05:50:50 2009
@@ -93,8 +93,6 @@
                           type: string
                     tag: 1
                     value expressions:
-                          expr: _col0
-                          type: string
                           expr: _col1
                           type: string
         t33:t3 
@@ -124,8 +122,6 @@
                     value expressions:
                           expr: _col0
                           type: string
-                          expr: _col1
-                          type: string
         t11:t1 
             Filter Operator
               predicate:
@@ -158,8 +154,8 @@
                Inner Join 0 to 2
           condition expressions:
             0 {VALUE._col0}
-            1 {VALUE._col0} {VALUE._col1}
-            2 {VALUE._col0} {VALUE._col1}
+            1 {VALUE._col1}
+            2 {VALUE._col0}
           File Output Operator
             compressed: false
             GlobalTableId: 0
@@ -184,12 +180,8 @@
               value expressions:
                     expr: _col3
                     type: string
-                    expr: _col4
-                    type: string
                     expr: _col0
                     type: string
-                    expr: _col1
-                    type: string
                     expr: _col2
                     type: string
         t55:t5 
@@ -217,8 +209,6 @@
                           type: string
                     tag: 2
                     value expressions:
-                          expr: _col0
-                          type: string
                           expr: _col1
                           type: string
         t44:t4 
@@ -243,18 +233,15 @@
                           expr: _col0
                           type: string
                     tag: 1
-                    value expressions:
-                          expr: _col0
-                          type: string
       Reduce Operator Tree:
         Join Operator
           condition map:
                Inner Join 0 to 1
                Inner Join 1 to 2
           condition expressions:
-            0 {VALUE._col0} {VALUE._col1} {VALUE._col2} {VALUE._col3} {VALUE._col4}
-            1 {VALUE._col0}
-            2 {VALUE._col0} {VALUE._col1}
+            0 {VALUE._col0} {VALUE._col2} {VALUE._col4}
+            1 
+            2 {VALUE._col1}
           File Output Operator
             compressed: false
             GlobalTableId: 0
@@ -291,8 +278,6 @@
                           type: string
                     tag: 1
                     value expressions:
-                          expr: _col0
-                          type: string
                           expr: _col1
                           type: string
         $INTNAME 
@@ -308,18 +293,10 @@
               value expressions:
                     expr: _col0
                     type: string
-                    expr: _col1
-                    type: string
-                    expr: _col5
-                    type: string
                     expr: _col2
                     type: string
-                    expr: _col3
-                    type: string
                     expr: _col4
                     type: string
-                    expr: _col6
-                    type: string
                     expr: _col7
                     type: string
       Reduce Operator Tree:
@@ -327,8 +304,8 @@
           condition map:
                Inner Join 0 to 1
           condition expressions:
-            0 {VALUE._col0} {VALUE._col1} {VALUE._col2} {VALUE._col3} {VALUE._col4} {VALUE._col5} {VALUE._col6} {VALUE._col7}
-            1 {VALUE._col0} {VALUE._col1}
+            0 {VALUE._col0} {VALUE._col3} {VALUE._col5} {VALUE._col7}
+            1 {VALUE._col1}
           Select Operator
             expressions:
                   expr: _col3

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/join2.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/join2.q.out?rev=788269&r1=788268&r2=788269&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/join2.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/join2.q.out Thu Jun 25 05:50:50 2009
@@ -27,8 +27,6 @@
               value expressions:
                     expr: key
                     type: string
-                    expr: value
-                    type: string
         src1 
             Reduce Output Operator
               key expressions:
@@ -42,15 +40,13 @@
               value expressions:
                     expr: key
                     type: string
-                    expr: value
-                    type: string
       Reduce Operator Tree:
         Join Operator
           condition map:
                Inner Join 0 to 1
           condition expressions:
-            0 {VALUE._col0} {VALUE._col1}
-            1 {VALUE._col0} {VALUE._col1}
+            0 {VALUE._col0}
+            1 {VALUE._col0}
           File Output Operator
             compressed: false
             GlobalTableId: 0
@@ -73,14 +69,8 @@
                     type: double
               tag: 0
               value expressions:
-                    expr: _col2
-                    type: string
-                    expr: _col3
-                    type: string
                     expr: _col0
                     type: string
-                    expr: _col1
-                    type: string
         src3 
             Reduce Output Operator
               key expressions:
@@ -92,8 +82,6 @@
                     type: double
               tag: 1
               value expressions:
-                    expr: key
-                    type: string
                     expr: value
                     type: string
       Reduce Operator Tree:
@@ -101,8 +89,8 @@
           condition map:
                Inner Join 0 to 1
           condition expressions:
-            0 {VALUE._col0} {VALUE._col1} {VALUE._col2} {VALUE._col3}
-            1 {VALUE._col0} {VALUE._col1}
+            0 {VALUE._col2}
+            1 {VALUE._col1}
           Select Operator
             expressions:
                   expr: _col2
@@ -141,7 +129,7 @@
 Output: default/dest_j2
 query: SELECT dest_j2.* FROM dest_j2
 Input: default/dest_j2
-Output: file:/data/users/njain/hive4/hive4/build/ql/tmp/1218468210/10000
+Output: file:/Users/char/Documents/workspace/Hive-460/build/ql/tmp/47427829/10000
 0	val_0
 0	val_0
 0	val_0

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/join22.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/join22.q.out?rev=788269&r1=788268&r2=788269&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/join22.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/join22.q.out Thu Jun 25 05:50:50 2009
@@ -22,11 +22,6 @@
                     expr: key
                     type: string
               tag: 1
-              value expressions:
-                    expr: key
-                    type: string
-                    expr: value
-                    type: string
         src5:src3:src1 
             Reduce Output Operator
               key expressions:
@@ -48,7 +43,7 @@
                Inner Join 0 to 1
           condition expressions:
             0 {VALUE._col0} {VALUE._col1}
-            1 {VALUE._col0} {VALUE._col1}
+            1 
           Select Operator
             expressions:
                   expr: _col0
@@ -77,8 +72,6 @@
                     type: string
               tag: 1
               value expressions:
-                    expr: _col2
-                    type: string
                     expr: _col3
                     type: string
         src5:src4 
@@ -91,18 +84,13 @@
                     expr: key
                     type: string
               tag: 0
-              value expressions:
-                    expr: key
-                    type: string
-                    expr: value
-                    type: string
       Reduce Operator Tree:
         Join Operator
           condition map:
                Inner Join 0 to 1
           condition expressions:
-            0 {VALUE._col0} {VALUE._col1}
-            1 {VALUE._col0} {VALUE._col1} {VALUE._col2} {VALUE._col3}
+            0 
+            1 {VALUE._col3}
           Select Operator
             expressions:
                   expr: _col5

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/join25.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/join25.q.out?rev=788269&r1=788268&r2=788269&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/join25.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/join25.q.out Thu Jun 25 05:50:50 2009
@@ -23,7 +23,7 @@
                    Inner Join 0 to 1
               condition expressions:
                 0 {key} {value}
-                1 {key} {value}
+                1 {value}
               keys:
                 0 
                 1 
@@ -47,7 +47,7 @@
                        Inner Join 0 to 1
                   condition expressions:
                     0 {key} {value}
-                    1 {key} {value}
+                    1 {value}
                   keys:
                     0 
                     1 
@@ -62,8 +62,15 @@
   Stage: Stage-2
     Map Reduce
       Alias -> Map Operator Tree:
-        file:/data/users/njain/hive4/hive4/build/ql/tmp/379733965/10002 
+        file:/Users/char/Documents/workspace/Hive-460/build/ql/tmp/1076139727/10002 
           Select Operator
+            expressions:
+                  expr: _col0
+                  type: string
+                  expr: _col1
+                  type: string
+                  expr: _col3
+                  type: string
             Select Operator
               expressions:
                     expr: _col0
@@ -95,10 +102,10 @@
           Move Operator
             files:
                 hdfs directory: true
-                destination: file:/data/users/njain/hive4/hive4/build/ql/tmp/757142377/10000
+                destination: file:/Users/char/Documents/workspace/Hive-460/build/ql/tmp/554961035/10000
           Map Reduce
             Alias -> Map Operator Tree:
-              file:/data/users/njain/hive4/hive4/build/ql/tmp/379733965/10003 
+              file:/Users/char/Documents/workspace/Hive-460/build/ql/tmp/1076139727/10003 
                   Reduce Output Operator
                     sort order: 
                     Map-reduce partition columns:
@@ -142,7 +149,7 @@
 Output: default/dest_j1
 query: select * from dest_j1 x order by x.key
 Input: default/dest_j1
-Output: file:/data/users/njain/hive4/hive4/build/ql/tmp/1000064941/10000
+Output: file:/Users/char/Documents/workspace/Hive-460/build/ql/tmp/2016361816/10000
 66	val_66	val_66
 98	val_98	val_98
 98	val_98	val_98

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/join26.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/join26.q.out?rev=788269&r1=788268&r2=788269&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/join26.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/join26.q.out Thu Jun 25 05:50:50 2009
@@ -35,9 +35,9 @@
                          Inner Join 0 to 1
                          Inner Join 0 to 2
                     condition expressions:
-                      0 {key} {value}
-                      1 {key} {value}
-                      2 {key} {value} {ds} {hr}
+                      0 {key}
+                      1 {value}
+                      2 {value}
                     keys:
                       0 
                       1 
@@ -46,13 +46,13 @@
                     File Output Operator
                       compressed: false
                       GlobalTableId: 0
-                      directory: file:/data/users/pchakka/workspace/oshive/build/ql/tmp/1173758888/10002
+                      directory: file:/Users/char/Documents/workspace/Hive-460/build/ql/tmp/1938408572/10002
                       table:
                           input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                           output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
                           properties:
-                            columns _col0,_col1,_col2,_col3,_col4,_col5,_col6,_col7
-                            columns.types string,string,string,string,string,string,string,string
+                            columns _col0,_col3,_col5
+                            columns.types string,string,string
       Local Work:
         Map Reduce Local Work
           Alias -> Map Local Tables:
@@ -69,9 +69,9 @@
                        Inner Join 0 to 1
                        Inner Join 0 to 2
                   condition expressions:
-                    0 {key} {value}
-                    1 {key} {value}
-                    2 {key} {value} {ds} {hr}
+                    0 {key}
+                    1 {value}
+                    2 {value}
                   keys:
                     0 
                     1 
@@ -80,22 +80,22 @@
                   File Output Operator
                     compressed: false
                     GlobalTableId: 0
-                    directory: file:/data/users/pchakka/workspace/oshive/build/ql/tmp/1173758888/10002
+                    directory: file:/Users/char/Documents/workspace/Hive-460/build/ql/tmp/1938408572/10002
                     table:
                         input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                         output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
                         properties:
-                          columns _col0,_col1,_col2,_col3,_col4,_col5,_col6,_col7
-                          columns.types string,string,string,string,string,string,string,string
+                          columns _col0,_col3,_col5
+                          columns.types string,string,string
             x 
                 Common Join Operator
                   condition map:
                        Inner Join 0 to 1
                        Inner Join 0 to 2
                   condition expressions:
-                    0 {key} {value}
-                    1 {key} {value}
-                    2 {key} {value} {ds} {hr}
+                    0 {key}
+                    1 {value}
+                    2 {value}
                   keys:
                     0 
                     1 
@@ -104,18 +104,18 @@
                   File Output Operator
                     compressed: false
                     GlobalTableId: 0
-                    directory: file:/data/users/pchakka/workspace/oshive/build/ql/tmp/1173758888/10002
+                    directory: file:/Users/char/Documents/workspace/Hive-460/build/ql/tmp/1938408572/10002
                     table:
                         input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                         output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
                         properties:
-                          columns _col0,_col1,_col2,_col3,_col4,_col5,_col6,_col7
-                          columns.types string,string,string,string,string,string,string,string
+                          columns _col0,_col3,_col5
+                          columns.types string,string,string
       Needs Tagging: false
       Path -> Alias:
-        file:/data/users/pchakka/workspace/oshive/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 
+        file:/Users/char/Documents/workspace/Hive-460/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 
       Path -> Partition:
-        file:/data/users/pchakka/workspace/oshive/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 
+        file:/Users/char/Documents/workspace/Hive-460/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 
           Partition
             partition values:
               ds 2008-04-08
@@ -134,15 +134,22 @@
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 file.inputformat org.apache.hadoop.mapred.TextInputFormat
                 file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                location file:/data/users/pchakka/workspace/oshive/build/ql/test/data/warehouse/srcpart
+                location file:/Users/char/Documents/workspace/Hive-460/build/ql/test/data/warehouse/srcpart
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: srcpart
 
   Stage: Stage-2
     Map Reduce
       Alias -> Map Operator Tree:
-        file:/data/users/pchakka/workspace/oshive/build/ql/tmp/1173758888/10002 
+        file:/Users/char/Documents/workspace/Hive-460/build/ql/tmp/1938408572/10002 
           Select Operator
+            expressions:
+                  expr: _col0
+                  type: string
+                  expr: _col3
+                  type: string
+                  expr: _col5
+                  type: string
             Select Operator
               expressions:
                     expr: _col0
@@ -154,7 +161,7 @@
               File Output Operator
                 compressed: false
                 GlobalTableId: 1
-                directory: file:/data/users/pchakka/workspace/oshive/build/ql/tmp/1173758888/10003
+                directory: file:/Users/char/Documents/workspace/Hive-460/build/ql/tmp/1938408572/10003
                 table:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -168,21 +175,21 @@
                       serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                       file.inputformat org.apache.hadoop.mapred.TextInputFormat
                       file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                      location file:/data/users/pchakka/workspace/oshive/build/ql/test/data/warehouse/dest_j1
+                      location file:/Users/char/Documents/workspace/Hive-460/build/ql/test/data/warehouse/dest_j1
                     serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                     name: dest_j1
       Needs Tagging: false
       Path -> Alias:
-        file:/data/users/pchakka/workspace/oshive/build/ql/tmp/1173758888/10002 
+        file:/Users/char/Documents/workspace/Hive-460/build/ql/tmp/1938408572/10002 
       Path -> Partition:
-        file:/data/users/pchakka/workspace/oshive/build/ql/tmp/1173758888/10002 
+        file:/Users/char/Documents/workspace/Hive-460/build/ql/tmp/1938408572/10002 
           Partition
           
               input format: org.apache.hadoop.mapred.SequenceFileInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
               properties:
-                columns _col0,_col1,_col2,_col3,_col4,_col5,_col6,_col7
-                columns.types string,string,string,string,string,string,string,string
+                columns _col0,_col3,_col5
+                columns.types string,string,string
 
   Stage: Stage-5
     Conditional Operator
@@ -190,11 +197,11 @@
           Move Operator
             files:
                 hdfs directory: true
-                source: file:/data/users/pchakka/workspace/oshive/build/ql/tmp/1173758888/10003
-                destination: file:/data/users/pchakka/workspace/oshive/build/ql/tmp/1120006262/10000
+                source: file:/Users/char/Documents/workspace/Hive-460/build/ql/tmp/1938408572/10003
+                destination: file:/Users/char/Documents/workspace/Hive-460/build/ql/tmp/934580575/10000
           Map Reduce
             Alias -> Map Operator Tree:
-              file:/data/users/pchakka/workspace/oshive/build/ql/tmp/1173758888/10003 
+              file:/Users/char/Documents/workspace/Hive-460/build/ql/tmp/1938408572/10003 
                   Reduce Output Operator
                     sort order: 
                     Map-reduce partition columns:
@@ -210,9 +217,9 @@
                           type: string
             Needs Tagging: false
             Path -> Alias:
-              file:/data/users/pchakka/workspace/oshive/build/ql/tmp/1173758888/10003 
+              file:/Users/char/Documents/workspace/Hive-460/build/ql/tmp/1938408572/10003 
             Path -> Partition:
-              file:/data/users/pchakka/workspace/oshive/build/ql/tmp/1173758888/10003 
+              file:/Users/char/Documents/workspace/Hive-460/build/ql/tmp/1938408572/10003 
                 Partition
                 
                     input format: org.apache.hadoop.mapred.TextInputFormat
@@ -227,7 +234,7 @@
                       serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                       file.inputformat org.apache.hadoop.mapred.TextInputFormat
                       file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                      location file:/data/users/pchakka/workspace/oshive/build/ql/test/data/warehouse/dest_j1
+                      location file:/Users/char/Documents/workspace/Hive-460/build/ql/test/data/warehouse/dest_j1
                     serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                     name: dest_j1
             Reduce Operator Tree:
@@ -235,7 +242,7 @@
                 File Output Operator
                   compressed: false
                   GlobalTableId: 0
-                  directory: file:/data/users/pchakka/workspace/oshive/build/ql/tmp/1120006262/10000
+                  directory: file:/Users/char/Documents/workspace/Hive-460/build/ql/tmp/934580575/10000
                   table:
                       input format: org.apache.hadoop.mapred.TextInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -249,7 +256,7 @@
                         serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                         file.inputformat org.apache.hadoop.mapred.TextInputFormat
                         file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                        location file:/data/users/pchakka/workspace/oshive/build/ql/test/data/warehouse/dest_j1
+                        location file:/Users/char/Documents/workspace/Hive-460/build/ql/test/data/warehouse/dest_j1
                       serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                       name: dest_j1
 
@@ -257,7 +264,7 @@
     Move Operator
       tables:
           replace: true
-          source: file:/data/users/pchakka/workspace/oshive/build/ql/tmp/1120006262/10000
+          source: file:/Users/char/Documents/workspace/Hive-460/build/ql/tmp/934580575/10000
           table:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -271,10 +278,10 @@
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 file.inputformat org.apache.hadoop.mapred.TextInputFormat
                 file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                location file:/data/users/pchakka/workspace/oshive/build/ql/test/data/warehouse/dest_j1
+                location file:/Users/char/Documents/workspace/Hive-460/build/ql/test/data/warehouse/dest_j1
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: dest_j1
-          tmp directory: file:/data/users/pchakka/workspace/oshive/build/ql/tmp/1120006262/10001
+          tmp directory: file:/Users/char/Documents/workspace/Hive-460/build/ql/tmp/934580575/10001
 
 
 query: INSERT OVERWRITE TABLE dest_j1
@@ -287,7 +294,7 @@
 Output: default/dest_j1
 query: select * from dest_j1 x order by x.key
 Input: default/dest_j1
-Output: file:/data/users/pchakka/workspace/oshive/build/ql/tmp/1885363957/10000
+Output: file:/Users/char/Documents/workspace/Hive-460/build/ql/tmp/1666620544/10000
 128	val_128	val_128
 128	val_128	val_128
 128	val_128	val_128

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/join27.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/join27.q.out?rev=788269&r1=788268&r2=788269&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/join27.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/join27.q.out Thu Jun 25 05:50:50 2009
@@ -23,7 +23,7 @@
                    Inner Join 0 to 1
               condition expressions:
                 0 {key} {value}
-                1 {key} {value}
+                1 {value}
               keys:
                 0 
                 1 
@@ -47,7 +47,7 @@
                        Inner Join 0 to 1
                   condition expressions:
                     0 {key} {value}
-                    1 {key} {value}
+                    1 {value}
                   keys:
                     0 
                     1 
@@ -62,8 +62,15 @@
   Stage: Stage-2
     Map Reduce
       Alias -> Map Operator Tree:
-        file:/data/users/njain/hive4/hive4/build/ql/tmp/2020334716/10002 
+        file:/Users/char/Documents/workspace/Hive-460/build/ql/tmp/1103571809/10002 
           Select Operator
+            expressions:
+                  expr: _col0
+                  type: string
+                  expr: _col1
+                  type: string
+                  expr: _col3
+                  type: string
             Select Operator
               expressions:
                     expr: _col0
@@ -95,10 +102,10 @@
           Move Operator
             files:
                 hdfs directory: true
-                destination: file:/data/users/njain/hive4/hive4/build/ql/tmp/1935107021/10000
+                destination: file:/Users/char/Documents/workspace/Hive-460/build/ql/tmp/1381867998/10000
           Map Reduce
             Alias -> Map Operator Tree:
-              file:/data/users/njain/hive4/hive4/build/ql/tmp/2020334716/10003 
+              file:/Users/char/Documents/workspace/Hive-460/build/ql/tmp/1103571809/10003 
                   Reduce Output Operator
                     sort order: 
                     Map-reduce partition columns:
@@ -142,7 +149,7 @@
 Output: default/dest_j1
 query: select * from dest_j1 x order by x.key, x.value
 Input: default/dest_j1
-Output: file:/data/users/njain/hive4/hive4/build/ql/tmp/744630343/10000
+Output: file:/Users/char/Documents/workspace/Hive-460/build/ql/tmp/1375214861/10000
 NULL	val_165	val_165
 NULL	val_165	val_165
 NULL	val_193	val_193



Mime
View raw message