hadoop-hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From zs...@apache.org
Subject svn commit: r811513 [1/3] - in /hadoop/hive/trunk: ./ ql/src/java/org/apache/hadoop/hive/ql/ppd/ ql/src/test/queries/clientpositive/ ql/src/test/results/clientpositive/ ql/src/test/results/compiler/plan/
Date Fri, 04 Sep 2009 18:49:43 GMT
Author: zshao
Date: Fri Sep  4 18:49:41 2009
New Revision: 811513

URL: http://svn.apache.org/viewvc?rev=811513&view=rev
Log:
HIVE-750. New partitionpruner does not work with test mode. (Namit Jain via zshao)

Added:
    hadoop/hive/trunk/ql/src/test/queries/clientpositive/input39.q
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input39.q.out
Modified:
    hadoop/hive/trunk/CHANGES.txt
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/OpProcFactory.java
    hadoop/hive/trunk/ql/src/test/results/clientpositive/sample1.q.out
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/sample1.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/sample2.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/sample3.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/sample4.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/sample5.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/sample6.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/sample7.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/subq.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/udf1.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/udf4.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/udf6.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/udf_case.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/udf_when.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/union.q.xml

Modified: hadoop/hive/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/CHANGES.txt?rev=811513&r1=811512&r2=811513&view=diff
==============================================================================
--- hadoop/hive/trunk/CHANGES.txt (original)
+++ hadoop/hive/trunk/CHANGES.txt Fri Sep  4 18:49:41 2009
@@ -303,6 +303,9 @@
     HIVE-737. Support having hadoop jars in HADOOP_HOME/build for running
     hive cli. (Johan Oskarsson via athusoo)
 
+    HIVE-750. New partitionpruner does not work with test mode.
+    (Namit Jain via zshao)
+
   OPTIMIZATIONS
 
     HIVE-279. Predicate Pushdown support (Prasad Chakka via athusoo).

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/OpProcFactory.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/OpProcFactory.java?rev=811513&r1=811512&r2=811513&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/OpProcFactory.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/OpProcFactory.java Fri Sep  4 18:49:41 2009
@@ -93,59 +93,10 @@
         Object... nodeOutputs) throws SemanticException {
       LOG.info("Processing for " +  nd.getName() + "(" + ((Operator)nd).getIdentifier() + ")");
       OpWalkerInfo owi = (OpWalkerInfo)procCtx;
-      RowResolver inputRR = owi.getRowResolver(nd);
       TableScanOperator tsOp = (TableScanOperator)nd;
       mergeWithChildrenPred(tsOp, owi, null, null, false);
       ExprWalkerInfo pushDownPreds = owi.getPrunedPreds(tsOp);
-
-      if (pushDownPreds == null 
-          || pushDownPreds.getFinalCandidates() == null 
-          || pushDownPreds.getFinalCandidates().size() == 0) {
-        return null;
-      }
-      
-      // combine all predicates into a single expression
-      List<exprNodeDesc> preds = null;
-      exprNodeDesc condn = null; 
-      Iterator<List<exprNodeDesc>> iterator = pushDownPreds.getFinalCandidates().values().iterator();
-      while (iterator.hasNext()) {
-        preds = iterator.next();
-        int i = 0;
-        if(condn == null) {
-          condn = preds.get(0);
-          i++;
-        }
-        for(; i < preds.size(); i++) {
-          List<exprNodeDesc> children = new ArrayList<exprNodeDesc>(2);
-          children.add(condn);
-          children.add((exprNodeDesc) preds.get(i));
-          condn = new exprNodeGenericFuncDesc(
-              TypeInfoFactory.booleanTypeInfo,
-              FunctionRegistry.getGenericUDFForAnd(),
-              children
-              );
-        }
-      }
-      if(condn == null)
-        return null;
-      // add new filter op
-      List<Operator<? extends Serializable>> originalChilren = tsOp.getChildOperators();
-      tsOp.setChildOperators(null);
-      Operator<filterDesc> output = 
-        OperatorFactory.getAndMakeChild(new filterDesc(condn, false),
-                              new RowSchema(inputRR.getColumnInfos()), 
-                              tsOp);
-      output.setChildOperators(originalChilren);
-      for (Operator<? extends Serializable> ch : originalChilren) {
-        List<Operator<? extends Serializable>> parentOperators = ch.getParentOperators();
-        int pos = parentOperators.indexOf(tsOp);
-        assert pos != -1;
-        parentOperators.remove(pos);
-        parentOperators.add(pos, output); // add the new op as the old
-      }
-      OpParseContext ctx = new OpParseContext(inputRR);
-      owi.put(output, ctx);
-      return output;
+      return createFilter(tsOp, pushDownPreds, owi);
     }
 
   }
@@ -163,10 +114,13 @@
       OpWalkerInfo owi = (OpWalkerInfo)procCtx;
       Operator<? extends Serializable> op = (Operator<? extends Serializable>) nd;
       exprNodeDesc predicate = (((FilterOperator)nd).getConf()).getPredicate();
-      // get pushdown predicates for this operato's predicate
+      // get pushdown predicates for this operator's predicate
       ExprWalkerInfo ewi = ExprWalkerProcFactory.extractPushdownPreds(owi, op, predicate);
       if (!ewi.isDeterministic()) {
         /* predicate is not deterministic */
+        if (op.getChildren() != null && op.getChildren().size() == 1)
+          createFilter(op, owi.getPrunedPreds((Operator<? extends Serializable>)(op.getChildren().get(0))), owi);
+
         return null;
       }
 
@@ -332,6 +286,62 @@
     }
   }
 
+  protected static Object createFilter(Operator op, ExprWalkerInfo pushDownPreds, OpWalkerInfo owi) {
+    if (pushDownPreds == null 
+        || pushDownPreds.getFinalCandidates() == null 
+        || pushDownPreds.getFinalCandidates().size() == 0) {
+      return null;
+    }
+      
+    RowResolver inputRR = owi.getRowResolver(op);
+
+    // combine all predicates into a single expression
+    List<exprNodeDesc> preds = null;
+    exprNodeDesc condn = null; 
+    Iterator<List<exprNodeDesc>> iterator = pushDownPreds.getFinalCandidates().values().iterator();
+    while (iterator.hasNext()) {
+      preds = iterator.next();
+      int i = 0;
+      if (condn == null) {
+        condn = preds.get(0);
+        i++;
+      }
+
+      for(; i < preds.size(); i++) {
+        List<exprNodeDesc> children = new ArrayList<exprNodeDesc>(2);
+        children.add(condn);
+        children.add((exprNodeDesc) preds.get(i));
+        condn = new exprNodeGenericFuncDesc(
+                                            TypeInfoFactory.booleanTypeInfo,
+                                            FunctionRegistry.getGenericUDFForAnd(),
+                                            children
+                                            );
+      }
+    }
+
+    if(condn == null)
+      return null;
+
+    // add new filter op
+    List<Operator<? extends Serializable>> originalChilren = op.getChildOperators();
+    op.setChildOperators(null);
+    Operator<filterDesc> output = 
+      OperatorFactory.getAndMakeChild(new filterDesc(condn, false),
+                                      new RowSchema(inputRR.getColumnInfos()), 
+                                      op);
+    output.setChildOperators(originalChilren);
+    for (Operator<? extends Serializable> ch : originalChilren) {
+      List<Operator<? extends Serializable>> parentOperators = ch.getParentOperators();
+      int pos = parentOperators.indexOf(op);
+      assert pos != -1;
+      parentOperators.remove(pos);
+      parentOperators.add(pos, output); // add the new op as the old
+    }
+    OpParseContext ctx = new OpParseContext(inputRR);
+    owi.put(output, ctx);
+    return output;
+  }
+  
   public static NodeProcessor getFilterProc() {
     return new FilterPPD();
   }

Added: hadoop/hive/trunk/ql/src/test/queries/clientpositive/input39.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/input39.q?rev=811513&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/input39.q (added)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/input39.q Fri Sep  4 18:49:41 2009
@@ -0,0 +1,27 @@
+drop table t1;
+drop table t2;
+
+create table t1(key string, value string) partitioned by (ds string);
+create table t2(key string, value string) partitioned by (ds string);
+
+insert overwrite table t1 partition (ds='1')
+select key, value from src;
+
+insert overwrite table t1 partition (ds='2')
+select key, value from src;
+
+insert overwrite table t2 partition (ds='1')
+select key, value from src;
+
+set hive.test.mode=true;
+set hive.mapred.mode=strict;
+
+explain
+select count(1) from t1 join t2 on t1.key=t2.key where t1.ds='1' and t2.ds='1';
+
+select count(1) from t1 join t2 on t1.key=t2.key where t1.ds='1' and t2.ds='1';
+
+set hive.test.mode=false;
+
+drop table t1;
+drop table t2;

Added: hadoop/hive/trunk/ql/src/test/results/clientpositive/input39.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/input39.q.out?rev=811513&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/input39.q.out (added)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/input39.q.out Fri Sep  4 18:49:41 2009
@@ -0,0 +1,141 @@
+query: drop table t1
+query: drop table t2
+query: create table t1(key string, value string) partitioned by (ds string)
+query: create table t2(key string, value string) partitioned by (ds string)
+query: insert overwrite table t1 partition (ds='1')
+select key, value from src
+Input: default/src
+Output: default/t1/ds=1
+query: insert overwrite table t1 partition (ds='2')
+select key, value from src
+Input: default/src
+Output: default/t1/ds=2
+query: insert overwrite table t2 partition (ds='1')
+select key, value from src
+Input: default/src
+Output: default/t2/ds=1
+query: explain
+select count(1) from t1 join t2 on t1.key=t2.key where t1.ds='1' and t2.ds='1'
+ABSTRACT SYNTAX TREE:
+  (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_TABREF t1) (TOK_TABREF t2) (= (. (TOK_TABLE_OR_COL t1) key) (. (TOK_TABLE_OR_COL t2) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION count 1))) (TOK_WHERE (and (= (. (TOK_TABLE_OR_COL t1) ds) '1') (= (. (TOK_TABLE_OR_COL t2) ds) '1')))))
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-2 depends on stages: Stage-1
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Alias -> Map Operator Tree:
+        t2 
+          TableScan
+            alias: t2
+            Filter Operator
+              predicate:
+                  expr: (((hash(rand(460476415)) & 2147483647) % 32) = 0)
+                  type: boolean
+              Filter Operator
+                predicate:
+                    expr: (ds = '1')
+                    type: boolean
+                Reduce Output Operator
+                  key expressions:
+                        expr: key
+                        type: string
+                  sort order: +
+                  Map-reduce partition columns:
+                        expr: key
+                        type: string
+                  tag: 1
+                  value expressions:
+                        expr: ds
+                        type: string
+        t1 
+          TableScan
+            alias: t1
+            Filter Operator
+              predicate:
+                  expr: (((hash(rand(460476415)) & 2147483647) % 32) = 0)
+                  type: boolean
+              Filter Operator
+                predicate:
+                    expr: (ds = '1')
+                    type: boolean
+                Reduce Output Operator
+                  key expressions:
+                        expr: key
+                        type: string
+                  sort order: +
+                  Map-reduce partition columns:
+                        expr: key
+                        type: string
+                  tag: 0
+                  value expressions:
+                        expr: ds
+                        type: string
+      Reduce Operator Tree:
+        Join Operator
+          condition map:
+               Inner Join 0 to 1
+          condition expressions:
+            0 {VALUE._col2}
+            1 {VALUE._col2}
+          outputColumnNames: _col2, _col5
+          Filter Operator
+            predicate:
+                expr: ((_col2 = '1') and (_col5 = '1'))
+                type: boolean
+            Select Operator
+              Group By Operator
+                aggregations:
+                      expr: count(1)
+                mode: hash
+                outputColumnNames: _col0
+                File Output Operator
+                  compressed: false
+                  GlobalTableId: 0
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+
+  Stage: Stage-2
+    Map Reduce
+      Alias -> Map Operator Tree:
+        file:/data/users/njain/hive4/hive4/build/ql/tmp/159384890/10002 
+            Reduce Output Operator
+              sort order: 
+              tag: -1
+              value expressions:
+                    expr: _col0
+                    type: bigint
+      Reduce Operator Tree:
+        Group By Operator
+          aggregations:
+                expr: count(VALUE._col0)
+          mode: mergepartial
+          outputColumnNames: _col0
+          Select Operator
+            expressions:
+                  expr: _col0
+                  type: bigint
+            outputColumnNames: _col0
+            File Output Operator
+              compressed: false
+              GlobalTableId: 0
+              table:
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+
+
+query: select count(1) from t1 join t2 on t1.key=t2.key where t1.ds='1' and t2.ds='1'
+Input: default/t2/ds=1
+Input: default/t1/ds=1
+Output: file:/data/users/njain/hive4/hive4/build/ql/tmp/104820129/10000
+18
+query: drop table t1
+query: drop table t2

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/sample1.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/sample1.q.out?rev=811513&r1=811512&r2=811513&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/sample1.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/sample1.q.out Fri Sep  4 18:49:41 2009
@@ -29,53 +29,58 @@
                 predicate:
                     expr: ((ds = '2008-04-08') and (hr = '11'))
                     type: boolean
-                Select Operator
-                  expressions:
-                        expr: key
-                        type: string
-                        expr: value
-                        type: string
-                        expr: ds
-                        type: string
-                        expr: hr
-                        type: string
-                  outputColumnNames: _col0, _col1, _col2, _col3
+                Filter Operator
+                  isSamplingPred: false
+                  predicate:
+                      expr: ((ds = '2008-04-08') and (hr = '11'))
+                      type: boolean
                   Select Operator
                     expressions:
-                          expr: UDFToInteger(_col0)
-                          type: int
-                          expr: _col1
+                          expr: key
                           type: string
-                          expr: _col2
+                          expr: value
                           type: string
-                          expr: _col3
+                          expr: ds
+                          type: string
+                          expr: hr
                           type: string
                     outputColumnNames: _col0, _col1, _col2, _col3
-                    File Output Operator
-                      compressed: false
-                      GlobalTableId: 1
-                      directory: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/212207906/10002
-                      table:
-                          input format: org.apache.hadoop.mapred.TextInputFormat
-                          output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                          properties:
-                            name dest1
-                            columns.types int:string:string:string
-                            serialization.ddl struct dest1 { i32 key, string value, string dt, string hr}
-                            serialization.format 1
-                            columns key,value,dt,hr
-                            bucket_count -1
-                            serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                            file.inputformat org.apache.hadoop.mapred.TextInputFormat
-                            file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                            location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest1
-                          serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                          name: dest1
+                    Select Operator
+                      expressions:
+                            expr: UDFToInteger(_col0)
+                            type: int
+                            expr: _col1
+                            type: string
+                            expr: _col2
+                            type: string
+                            expr: _col3
+                            type: string
+                      outputColumnNames: _col0, _col1, _col2, _col3
+                      File Output Operator
+                        compressed: false
+                        GlobalTableId: 1
+                        directory: file:/data/users/njain/hive4/hive4/build/ql/tmp/1917947212/10002
+                        table:
+                            input format: org.apache.hadoop.mapred.TextInputFormat
+                            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                            properties:
+                              name dest1
+                              columns.types int:string:string:string
+                              serialization.ddl struct dest1 { i32 key, string value, string dt, string hr}
+                              serialization.format 1
+                              columns key,value,dt,hr
+                              bucket_count -1
+                              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                              file.inputformat org.apache.hadoop.mapred.TextInputFormat
+                              file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                              location file:/data/users/njain/hive4/hive4/build/ql/test/data/warehouse/dest1
+                            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                            name: dest1
       Needs Tagging: false
       Path -> Alias:
-        file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 
+        file:/data/users/njain/hive4/hive4/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [s]
       Path -> Partition:
-        file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 
+        file:/data/users/njain/hive4/hive4/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 
           Partition
             partition values:
               ds 2008-04-08
@@ -94,7 +99,7 @@
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 file.inputformat org.apache.hadoop.mapred.TextInputFormat
                 file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart
+                location file:/data/users/njain/hive4/hive4/build/ql/test/data/warehouse/srcpart
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: srcpart
 
@@ -104,11 +109,11 @@
           Move Operator
             files:
                 hdfs directory: true
-                source: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/212207906/10002
-                destination: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1104113199/10000
+                source: file:/data/users/njain/hive4/hive4/build/ql/tmp/1917947212/10002
+                destination: file:/data/users/njain/hive4/hive4/build/ql/tmp/425720684/10000
           Map Reduce
             Alias -> Map Operator Tree:
-              file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/212207906/10002 
+              file:/data/users/njain/hive4/hive4/build/ql/tmp/1917947212/10002 
                   Reduce Output Operator
                     sort order: 
                     Map-reduce partition columns:
@@ -126,9 +131,9 @@
                           type: string
             Needs Tagging: false
             Path -> Alias:
-              file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/212207906/10002 
+              file:/data/users/njain/hive4/hive4/build/ql/tmp/1917947212/10002 [file:/data/users/njain/hive4/hive4/build/ql/tmp/1917947212/10002]
             Path -> Partition:
-              file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/212207906/10002 
+              file:/data/users/njain/hive4/hive4/build/ql/tmp/1917947212/10002 
                 Partition
                 
                     input format: org.apache.hadoop.mapred.TextInputFormat
@@ -143,7 +148,7 @@
                       serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                       file.inputformat org.apache.hadoop.mapred.TextInputFormat
                       file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                      location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest1
+                      location file:/data/users/njain/hive4/hive4/build/ql/test/data/warehouse/dest1
                     serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                     name: dest1
             Reduce Operator Tree:
@@ -151,7 +156,7 @@
                 File Output Operator
                   compressed: false
                   GlobalTableId: 0
-                  directory: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1104113199/10000
+                  directory: file:/data/users/njain/hive4/hive4/build/ql/tmp/425720684/10000
                   table:
                       input format: org.apache.hadoop.mapred.TextInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -164,7 +169,7 @@
                         bucket_count -1
                         serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                         file.inputformat org.apache.hadoop.mapred.TextInputFormat
-                        location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest1
+                        location file:/data/users/njain/hive4/hive4/build/ql/test/data/warehouse/dest1
                         file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                       serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                       name: dest1
@@ -173,7 +178,7 @@
     Move Operator
       tables:
           replace: true
-          source: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1104113199/10000
+          source: file:/data/users/njain/hive4/hive4/build/ql/tmp/425720684/10000
           table:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -187,10 +192,10 @@
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 file.inputformat org.apache.hadoop.mapred.TextInputFormat
                 file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest1
+                location file:/data/users/njain/hive4/hive4/build/ql/test/data/warehouse/dest1
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: dest1
-          tmp directory: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1104113199/10001
+          tmp directory: file:/data/users/njain/hive4/hive4/build/ql/tmp/425720684/10001
 
 
 query: INSERT OVERWRITE TABLE dest1 SELECT s.*
@@ -200,7 +205,7 @@
 Output: default/dest1
 query: SELECT dest1.* FROM dest1
 Input: default/dest1
-Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/128296418/10000
+Output: file:/data/users/njain/hive4/hive4/build/ql/tmp/1412070589/10000
 238	val_238	2008-04-08	11
 86	val_86	2008-04-08	11
 311	val_311	2008-04-08	11
@@ -703,5 +708,5 @@
 97	val_97	2008-04-08	11
 query: select count(1) from srcbucket
 Input: default/srcbucket
-Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/822380674/10000
+Output: file:/data/users/njain/hive4/hive4/build/ql/tmp/984906761/10000
 1000

Modified: hadoop/hive/trunk/ql/src/test/results/compiler/plan/sample1.q.xml
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/compiler/plan/sample1.q.xml?rev=811513&r1=811512&r2=811513&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/compiler/plan/sample1.q.xml (original)
+++ hadoop/hive/trunk/ql/src/test/results/compiler/plan/sample1.q.xml Fri Sep  4 18:49:41 2009
@@ -77,7 +77,7 @@
            </void> 
            <void method="put"> 
             <string>location</string> 
-            <string>file:/data/users/njain/hive3/hive3/build/ql/test/data/warehouse/srcpart</string> 
+            <string>file:/data/users/njain/hive4/hive4/build/ql/test/data/warehouse/srcpart</string> 
            </void> 
           </object> 
          </void> 
@@ -105,67 +105,249 @@
                 <void property="childOperators"> 
                  <object class="java.util.ArrayList"> 
                   <void method="add"> 
-                   <object id="SelectOperator0" class="org.apache.hadoop.hive.ql.exec.SelectOperator"> 
+                   <object id="FilterOperator2" class="org.apache.hadoop.hive.ql.exec.FilterOperator"> 
                     <void property="childOperators"> 
                      <object class="java.util.ArrayList"> 
                       <void method="add"> 
-                       <object class="org.apache.hadoop.hive.ql.exec.FileSinkOperator"> 
-                        <void property="conf"> 
-                         <object class="org.apache.hadoop.hive.ql.plan.fileSinkDesc"> 
-                          <void property="dirName"> 
-                           <string>file:/data/users/njain/hive3/hive3/build/ql/tmp/384076544/10001</string> 
-                          </void> 
-                          <void property="tableInfo"> 
-                           <object class="org.apache.hadoop.hive.ql.plan.tableDesc"> 
-                            <void property="deserializerClass"> 
-                             <class>org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe</class> 
-                            </void> 
-                            <void property="inputFileFormatClass"> 
-                             <class>org.apache.hadoop.mapred.TextInputFormat</class> 
-                            </void> 
-                            <void property="outputFileFormatClass"> 
-                             <class>org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat</class> 
-                            </void> 
-                            <void property="properties"> 
-                             <object class="java.util.Properties"> 
-                              <void method="put"> 
-                               <string>columns</string> 
-                               <string>_col0,_col1,_col2,_col3</string> 
-                              </void> 
-                              <void method="put"> 
-                               <string>serialization.format</string> 
-                               <string>1</string> 
-                              </void> 
-                              <void method="put"> 
-                               <string>columns.types</string> 
-                               <string>string:string:string:string</string> 
+                       <object id="SelectOperator0" class="org.apache.hadoop.hive.ql.exec.SelectOperator"> 
+                        <void property="childOperators"> 
+                         <object class="java.util.ArrayList"> 
+                          <void method="add"> 
+                           <object class="org.apache.hadoop.hive.ql.exec.FileSinkOperator"> 
+                            <void property="conf"> 
+                             <object class="org.apache.hadoop.hive.ql.plan.fileSinkDesc"> 
+                              <void property="dirName"> 
+                               <string>file:/data/users/njain/hive4/hive4/build/ql/tmp/1901117107/10001</string> 
+                              </void> 
+                              <void property="tableInfo"> 
+                               <object class="org.apache.hadoop.hive.ql.plan.tableDesc"> 
+                                <void property="deserializerClass"> 
+                                 <class>org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe</class> 
+                                </void> 
+                                <void property="inputFileFormatClass"> 
+                                 <class>org.apache.hadoop.mapred.TextInputFormat</class> 
+                                </void> 
+                                <void property="outputFileFormatClass"> 
+                                 <class>org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat</class> 
+                                </void> 
+                                <void property="properties"> 
+                                 <object class="java.util.Properties"> 
+                                  <void method="put"> 
+                                   <string>columns</string> 
+                                   <string>_col0,_col1,_col2,_col3</string> 
+                                  </void> 
+                                  <void method="put"> 
+                                   <string>serialization.format</string> 
+                                   <string>1</string> 
+                                  </void> 
+                                  <void method="put"> 
+                                   <string>columns.types</string> 
+                                   <string>string:string:string:string</string> 
+                                  </void> 
+                                 </object> 
+                                </void> 
+                               </object> 
+                              </void> 
+                             </object> 
+                            </void> 
+                            <void property="counterNames"> 
+                             <object class="java.util.ArrayList"> 
+                              <void method="add"> 
+                               <string>CNTR_NAME_FS_588_NUM_INPUT_ROWS</string> 
+                              </void> 
+                              <void method="add"> 
+                               <string>CNTR_NAME_FS_588_NUM_OUTPUT_ROWS</string> 
+                              </void> 
+                              <void method="add"> 
+                               <string>CNTR_NAME_FS_588_TIME_TAKEN</string> 
                               </void> 
                              </object> 
                             </void> 
+                            <void property="operatorId"> 
+                             <string>FS_588</string> 
+                            </void> 
+                            <void property="parentOperators"> 
+                             <object class="java.util.ArrayList"> 
+                              <void method="add"> 
+                               <object idref="SelectOperator0"/> 
+                              </void> 
+                             </object> 
+                            </void> 
+                            <void property="schema"> 
+                             <object class="org.apache.hadoop.hive.ql.exec.RowSchema"> 
+                              <void property="signature"> 
+                               <object class="java.util.Vector"> 
+                                <void method="add"> 
+                                 <object class="org.apache.hadoop.hive.ql.exec.ColumnInfo"> 
+                                  <void property="internalName"> 
+                                   <string>_col0</string> 
+                                  </void> 
+                                  <void property="type"> 
+                                   <object id="PrimitiveTypeInfo0" class="org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo"> 
+                                    <void property="typeName"> 
+                                     <string>string</string> 
+                                    </void> 
+                                   </object> 
+                                  </void> 
+                                 </object> 
+                                </void> 
+                                <void method="add"> 
+                                 <object class="org.apache.hadoop.hive.ql.exec.ColumnInfo"> 
+                                  <void property="internalName"> 
+                                   <string>_col1</string> 
+                                  </void> 
+                                  <void property="type"> 
+                                   <object idref="PrimitiveTypeInfo0"/> 
+                                  </void> 
+                                 </object> 
+                                </void> 
+                                <void method="add"> 
+                                 <object class="org.apache.hadoop.hive.ql.exec.ColumnInfo"> 
+                                  <void property="internalName"> 
+                                   <string>_col2</string> 
+                                  </void> 
+                                  <void property="type"> 
+                                   <object idref="PrimitiveTypeInfo0"/> 
+                                  </void> 
+                                 </object> 
+                                </void> 
+                                <void method="add"> 
+                                 <object class="org.apache.hadoop.hive.ql.exec.ColumnInfo"> 
+                                  <void property="internalName"> 
+                                   <string>_col3</string> 
+                                  </void> 
+                                  <void property="type"> 
+                                   <object idref="PrimitiveTypeInfo0"/> 
+                                  </void> 
+                                 </object> 
+                                </void> 
+                               </object> 
+                              </void> 
+                             </object> 
+                            </void> 
+                           </object> 
+                          </void> 
+                         </object> 
+                        </void> 
+                        <void property="columnExprMap"> 
+                         <object class="java.util.HashMap"> 
+                          <void method="put"> 
+                           <string>_col3</string> 
+                           <object id="exprNodeColumnDesc0" class="org.apache.hadoop.hive.ql.plan.exprNodeColumnDesc"> 
+                            <void property="column"> 
+                             <string>hr</string> 
+                            </void> 
+                            <void property="tabAlias"> 
+                             <string>s</string> 
+                            </void> 
+                            <void property="typeInfo"> 
+                             <object idref="PrimitiveTypeInfo0"/> 
+                            </void> 
+                           </object> 
+                          </void> 
+                          <void method="put"> 
+                           <string>_col2</string> 
+                           <object id="exprNodeColumnDesc1" class="org.apache.hadoop.hive.ql.plan.exprNodeColumnDesc"> 
+                            <void property="column"> 
+                             <string>ds</string> 
+                            </void> 
+                            <void property="tabAlias"> 
+                             <string>s</string> 
+                            </void> 
+                            <void property="typeInfo"> 
+                             <object idref="PrimitiveTypeInfo0"/> 
+                            </void> 
+                           </object> 
+                          </void> 
+                          <void method="put"> 
+                           <string>_col1</string> 
+                           <object id="exprNodeColumnDesc2" class="org.apache.hadoop.hive.ql.plan.exprNodeColumnDesc"> 
+                            <void property="column"> 
+                             <string>value</string> 
+                            </void> 
+                            <void property="tabAlias"> 
+                             <string>s</string> 
+                            </void> 
+                            <void property="typeInfo"> 
+                             <object idref="PrimitiveTypeInfo0"/> 
+                            </void> 
+                           </object> 
+                          </void> 
+                          <void method="put"> 
+                           <string>_col0</string> 
+                           <object id="exprNodeColumnDesc3" class="org.apache.hadoop.hive.ql.plan.exprNodeColumnDesc"> 
+                            <void property="column"> 
+                             <string>key</string> 
+                            </void> 
+                            <void property="tabAlias"> 
+                             <string>s</string> 
+                            </void> 
+                            <void property="typeInfo"> 
+                             <object idref="PrimitiveTypeInfo0"/> 
+                            </void> 
                            </object> 
                           </void> 
                          </object> 
                         </void> 
+                        <void property="conf"> 
+                         <object class="org.apache.hadoop.hive.ql.plan.selectDesc"> 
+                          <void property="colList"> 
+                           <object class="java.util.ArrayList"> 
+                            <void method="add"> 
+                             <object idref="exprNodeColumnDesc3"/> 
+                            </void> 
+                            <void method="add"> 
+                             <object idref="exprNodeColumnDesc2"/> 
+                            </void> 
+                            <void method="add"> 
+                             <object idref="exprNodeColumnDesc1"/> 
+                            </void> 
+                            <void method="add"> 
+                             <object idref="exprNodeColumnDesc0"/> 
+                            </void> 
+                           </object> 
+                          </void> 
+                          <void property="outputColumnNames"> 
+                           <object class="java.util.ArrayList"> 
+                            <void method="add"> 
+                             <string>_col0</string> 
+                            </void> 
+                            <void method="add"> 
+                             <string>_col1</string> 
+                            </void> 
+                            <void method="add"> 
+                             <string>_col2</string> 
+                            </void> 
+                            <void method="add"> 
+                             <string>_col3</string> 
+                            </void> 
+                           </object> 
+                          </void> 
+                          <void property="selectStar"> 
+                           <boolean>true</boolean> 
+                          </void> 
+                         </object> 
+                        </void> 
                         <void property="counterNames"> 
                          <object class="java.util.ArrayList"> 
                           <void method="add"> 
-                           <string>CNTR_NAME_FS_588_NUM_INPUT_ROWS</string> 
+                           <string>CNTR_NAME_SEL_587_NUM_INPUT_ROWS</string> 
                           </void> 
                           <void method="add"> 
-                           <string>CNTR_NAME_FS_588_NUM_OUTPUT_ROWS</string> 
+                           <string>CNTR_NAME_SEL_587_NUM_OUTPUT_ROWS</string> 
                           </void> 
                           <void method="add"> 
-                           <string>CNTR_NAME_FS_588_TIME_TAKEN</string> 
+                           <string>CNTR_NAME_SEL_587_TIME_TAKEN</string> 
                           </void> 
                          </object> 
                         </void> 
                         <void property="operatorId"> 
-                         <string>FS_588</string> 
+                         <string>SEL_587</string> 
                         </void> 
                         <void property="parentOperators"> 
                          <object class="java.util.ArrayList"> 
                           <void method="add"> 
-                           <object idref="SelectOperator0"/> 
+                           <object idref="FilterOperator2"/> 
                           </void> 
                          </object> 
                         </void> 
@@ -179,11 +361,7 @@
                                <string>_col0</string> 
                               </void> 
                               <void property="type"> 
-                               <object id="PrimitiveTypeInfo0" class="org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo"> 
-                                <void property="typeName"> 
-                                 <string>string</string> 
-                                </void> 
-                               </object> 
+                               <object idref="PrimitiveTypeInfo0"/> 
                               </void> 
                              </object> 
                             </void> 
@@ -225,120 +403,147 @@
                       </void> 
                      </object> 
                     </void> 
-                    <void property="columnExprMap"> 
-                     <object class="java.util.HashMap"> 
-                      <void method="put"> 
-                       <string>_col3</string> 
-                       <object id="exprNodeColumnDesc0" class="org.apache.hadoop.hive.ql.plan.exprNodeColumnDesc"> 
-                        <void property="column"> 
-                         <string>hr</string> 
-                        </void> 
-                        <void property="tabAlias"> 
-                         <string>s</string> 
-                        </void> 
-                        <void property="typeInfo"> 
-                         <object idref="PrimitiveTypeInfo0"/> 
-                        </void> 
-                       </object> 
-                      </void> 
-                      <void method="put"> 
-                       <string>_col2</string> 
-                       <object id="exprNodeColumnDesc1" class="org.apache.hadoop.hive.ql.plan.exprNodeColumnDesc"> 
-                        <void property="column"> 
-                         <string>ds</string> 
-                        </void> 
-                        <void property="tabAlias"> 
-                         <string>s</string> 
-                        </void> 
-                        <void property="typeInfo"> 
-                         <object idref="PrimitiveTypeInfo0"/> 
-                        </void> 
-                       </object> 
-                      </void> 
-                      <void method="put"> 
-                       <string>_col1</string> 
-                       <object id="exprNodeColumnDesc2" class="org.apache.hadoop.hive.ql.plan.exprNodeColumnDesc"> 
-                        <void property="column"> 
-                         <string>value</string> 
-                        </void> 
-                        <void property="tabAlias"> 
-                         <string>s</string> 
-                        </void> 
-                        <void property="typeInfo"> 
-                         <object idref="PrimitiveTypeInfo0"/> 
-                        </void> 
-                       </object> 
-                      </void> 
-                      <void method="put"> 
-                       <string>_col0</string> 
-                       <object id="exprNodeColumnDesc3" class="org.apache.hadoop.hive.ql.plan.exprNodeColumnDesc"> 
-                        <void property="column"> 
-                         <string>key</string> 
-                        </void> 
-                        <void property="tabAlias"> 
-                         <string>s</string> 
-                        </void> 
-                        <void property="typeInfo"> 
-                         <object idref="PrimitiveTypeInfo0"/> 
-                        </void> 
-                       </object> 
-                      </void> 
-                     </object> 
-                    </void> 
                     <void property="conf"> 
-                     <object class="org.apache.hadoop.hive.ql.plan.selectDesc"> 
-                      <void property="colList"> 
-                       <object class="java.util.ArrayList"> 
-                        <void method="add"> 
-                         <object idref="exprNodeColumnDesc3"/> 
-                        </void> 
-                        <void method="add"> 
-                         <object idref="exprNodeColumnDesc2"/> 
-                        </void> 
-                        <void method="add"> 
-                         <object idref="exprNodeColumnDesc1"/> 
-                        </void> 
-                        <void method="add"> 
-                         <object idref="exprNodeColumnDesc0"/> 
-                        </void> 
-                       </object> 
-                      </void> 
-                      <void property="outputColumnNames"> 
-                       <object class="java.util.ArrayList"> 
-                        <void method="add"> 
-                         <string>_col0</string> 
-                        </void> 
-                        <void method="add"> 
-                         <string>_col1</string> 
+                     <object class="org.apache.hadoop.hive.ql.plan.filterDesc"> 
+                      <void property="predicate"> 
+                       <object class="org.apache.hadoop.hive.ql.plan.exprNodeGenericFuncDesc"> 
+                        <void property="childExprs"> 
+                         <object class="java.util.ArrayList"> 
+                          <void method="add"> 
+                           <object class="org.apache.hadoop.hive.ql.plan.exprNodeGenericFuncDesc"> 
+                            <void property="childExprs"> 
+                             <object class="java.util.ArrayList"> 
+                              <void method="add"> 
+                               <object class="org.apache.hadoop.hive.ql.plan.exprNodeColumnDesc"> 
+                                <void property="column"> 
+                                 <string>ds</string> 
+                                </void> 
+                                <void property="tabAlias"> 
+                                 <string>s</string> 
+                                </void> 
+                                <void property="typeInfo"> 
+                                 <object idref="PrimitiveTypeInfo0"/> 
+                                </void> 
+                               </object> 
+                              </void> 
+                              <void method="add"> 
+                               <object class="org.apache.hadoop.hive.ql.plan.exprNodeConstantDesc"> 
+                                <void property="typeInfo"> 
+                                 <object idref="PrimitiveTypeInfo0"/> 
+                                </void> 
+                                <void property="value"> 
+                                 <string>2008-04-08</string> 
+                                </void> 
+                               </object> 
+                              </void> 
+                             </object> 
+                            </void> 
+                            <void property="genericUDF"> 
+                             <object class="org.apache.hadoop.hive.ql.udf.generic.GenericUDFBridge"> 
+                              <void property="operator"> 
+                               <boolean>true</boolean> 
+                              </void> 
+                              <void property="udfClass"> 
+                               <class>org.apache.hadoop.hive.ql.udf.UDFOPEqual</class> 
+                              </void> 
+                              <void property="udfName"> 
+                               <string>=</string> 
+                              </void> 
+                             </object> 
+                            </void> 
+                            <void property="typeInfo"> 
+                             <object id="PrimitiveTypeInfo1" class="org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo"> 
+                              <void property="typeName"> 
+                               <string>boolean</string> 
+                              </void> 
+                             </object> 
+                            </void> 
+                           </object> 
+                          </void> 
+                          <void method="add"> 
+                           <object class="org.apache.hadoop.hive.ql.plan.exprNodeGenericFuncDesc"> 
+                            <void property="childExprs"> 
+                             <object class="java.util.ArrayList"> 
+                              <void method="add"> 
+                               <object class="org.apache.hadoop.hive.ql.plan.exprNodeColumnDesc"> 
+                                <void property="column"> 
+                                 <string>hr</string> 
+                                </void> 
+                                <void property="tabAlias"> 
+                                 <string>s</string> 
+                                </void> 
+                                <void property="typeInfo"> 
+                                 <object idref="PrimitiveTypeInfo0"/> 
+                                </void> 
+                               </object> 
+                              </void> 
+                              <void method="add"> 
+                               <object class="org.apache.hadoop.hive.ql.plan.exprNodeConstantDesc"> 
+                                <void property="typeInfo"> 
+                                 <object idref="PrimitiveTypeInfo0"/> 
+                                </void> 
+                                <void property="value"> 
+                                 <string>11</string> 
+                                </void> 
+                               </object> 
+                              </void> 
+                             </object> 
+                            </void> 
+                            <void property="genericUDF"> 
+                             <object class="org.apache.hadoop.hive.ql.udf.generic.GenericUDFBridge"> 
+                              <void property="operator"> 
+                               <boolean>true</boolean> 
+                              </void> 
+                              <void property="udfClass"> 
+                               <class>org.apache.hadoop.hive.ql.udf.UDFOPEqual</class> 
+                              </void> 
+                              <void property="udfName"> 
+                               <string>=</string> 
+                              </void> 
+                             </object> 
+                            </void> 
+                            <void property="typeInfo"> 
+                             <object idref="PrimitiveTypeInfo1"/> 
+                            </void> 
+                           </object> 
+                          </void> 
+                         </object> 
                         </void> 
-                        <void method="add"> 
-                         <string>_col2</string> 
+                        <void property="genericUDF"> 
+                         <object class="org.apache.hadoop.hive.ql.udf.generic.GenericUDFBridge"> 
+                          <void property="operator"> 
+                           <boolean>true</boolean> 
+                          </void> 
+                          <void property="udfClass"> 
+                           <class>org.apache.hadoop.hive.ql.udf.UDFOPAnd</class> 
+                          </void> 
+                          <void property="udfName"> 
+                           <string>and</string> 
+                          </void> 
+                         </object> 
                         </void> 
-                        <void method="add"> 
-                         <string>_col3</string> 
+                        <void property="typeInfo"> 
+                         <object idref="PrimitiveTypeInfo1"/> 
                         </void> 
                        </object> 
                       </void> 
-                      <void property="selectStar"> 
-                       <boolean>true</boolean> 
-                      </void> 
                      </object> 
                     </void> 
                     <void property="counterNames"> 
                      <object class="java.util.ArrayList"> 
                       <void method="add"> 
-                       <string>CNTR_NAME_SEL_587_NUM_INPUT_ROWS</string> 
+                       <string>CNTR_NAME_FIL_586_NUM_INPUT_ROWS</string> 
                       </void> 
                       <void method="add"> 
-                       <string>CNTR_NAME_SEL_587_NUM_OUTPUT_ROWS</string> 
+                       <string>CNTR_NAME_FIL_586_NUM_OUTPUT_ROWS</string> 
                       </void> 
                       <void method="add"> 
-                       <string>CNTR_NAME_SEL_587_TIME_TAKEN</string> 
+                       <string>CNTR_NAME_FIL_586_TIME_TAKEN</string> 
                       </void> 
                      </object> 
                     </void> 
                     <void property="operatorId"> 
-                     <string>SEL_587</string> 
+                     <string>FIL_586</string> 
                     </void> 
                     <void property="parentOperators"> 
                      <object class="java.util.ArrayList"> 
@@ -350,11 +555,11 @@
                     <void property="schema"> 
                      <object class="org.apache.hadoop.hive.ql.exec.RowSchema"> 
                       <void property="signature"> 
-                       <object class="java.util.Vector"> 
+                       <object id="Vector0" class="java.util.Vector"> 
                         <void method="add"> 
                          <object class="org.apache.hadoop.hive.ql.exec.ColumnInfo"> 
                           <void property="internalName"> 
-                           <string>_col0</string> 
+                           <string>key</string> 
                           </void> 
                           <void property="type"> 
                            <object idref="PrimitiveTypeInfo0"/> 
@@ -364,7 +569,7 @@
                         <void method="add"> 
                          <object class="org.apache.hadoop.hive.ql.exec.ColumnInfo"> 
                           <void property="internalName"> 
-                           <string>_col1</string> 
+                           <string>value</string> 
                           </void> 
                           <void property="type"> 
                            <object idref="PrimitiveTypeInfo0"/> 
@@ -374,7 +579,7 @@
                         <void method="add"> 
                          <object class="org.apache.hadoop.hive.ql.exec.ColumnInfo"> 
                           <void property="internalName"> 
-                           <string>_col2</string> 
+                           <string>ds</string> 
                           </void> 
                           <void property="type"> 
                            <object idref="PrimitiveTypeInfo0"/> 
@@ -384,7 +589,7 @@
                         <void method="add"> 
                          <object class="org.apache.hadoop.hive.ql.exec.ColumnInfo"> 
                           <void property="internalName"> 
-                           <string>_col3</string> 
+                           <string>hr</string> 
                           </void> 
                           <void property="type"> 
                            <object idref="PrimitiveTypeInfo0"/> 
@@ -448,11 +653,7 @@
                          </object> 
                         </void> 
                         <void property="typeInfo"> 
-                         <object id="PrimitiveTypeInfo1" class="org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo"> 
-                          <void property="typeName"> 
-                           <string>boolean</string> 
-                          </void> 
-                         </object> 
+                         <object idref="PrimitiveTypeInfo1"/> 
                         </void> 
                        </object> 
                       </void> 
@@ -528,18 +729,18 @@
                 <void property="counterNames"> 
                  <object class="java.util.ArrayList"> 
                   <void method="add"> 
-                   <string>CNTR_NAME_FIL_586_NUM_INPUT_ROWS</string> 
+                   <string>CNTR_NAME_FIL_589_NUM_INPUT_ROWS</string> 
                   </void> 
                   <void method="add"> 
-                   <string>CNTR_NAME_FIL_586_NUM_OUTPUT_ROWS</string> 
+                   <string>CNTR_NAME_FIL_589_NUM_OUTPUT_ROWS</string> 
                   </void> 
                   <void method="add"> 
-                   <string>CNTR_NAME_FIL_586_TIME_TAKEN</string> 
+                   <string>CNTR_NAME_FIL_589_TIME_TAKEN</string> 
                   </void> 
                  </object> 
                 </void> 
                 <void property="operatorId"> 
-                 <string>FIL_586</string> 
+                 <string>FIL_589</string> 
                 </void> 
                 <void property="parentOperators"> 
                  <object class="java.util.ArrayList"> 
@@ -551,48 +752,7 @@
                 <void property="schema"> 
                  <object class="org.apache.hadoop.hive.ql.exec.RowSchema"> 
                   <void property="signature"> 
-                   <object id="Vector0" class="java.util.Vector"> 
-                    <void method="add"> 
-                     <object class="org.apache.hadoop.hive.ql.exec.ColumnInfo"> 
-                      <void property="internalName"> 
-                       <string>key</string> 
-                      </void> 
-                      <void property="type"> 
-                       <object idref="PrimitiveTypeInfo0"/> 
-                      </void> 
-                     </object> 
-                    </void> 
-                    <void method="add"> 
-                     <object class="org.apache.hadoop.hive.ql.exec.ColumnInfo"> 
-                      <void property="internalName"> 
-                       <string>value</string> 
-                      </void> 
-                      <void property="type"> 
-                       <object idref="PrimitiveTypeInfo0"/> 
-                      </void> 
-                     </object> 
-                    </void> 
-                    <void method="add"> 
-                     <object class="org.apache.hadoop.hive.ql.exec.ColumnInfo"> 
-                      <void property="internalName"> 
-                       <string>ds</string> 
-                      </void> 
-                      <void property="type"> 
-                       <object idref="PrimitiveTypeInfo0"/> 
-                      </void> 
-                     </object> 
-                    </void> 
-                    <void method="add"> 
-                     <object class="org.apache.hadoop.hive.ql.exec.ColumnInfo"> 
-                      <void property="internalName"> 
-                       <string>hr</string> 
-                      </void> 
-                      <void property="type"> 
-                       <object idref="PrimitiveTypeInfo0"/> 
-                      </void> 
-                     </object> 
-                    </void> 
-                   </object> 
+                   <object idref="Vector0"/> 
                   </void> 
                  </object> 
                 </void> 
@@ -831,7 +991,7 @@
     <void property="pathToAliases"> 
      <object class="java.util.LinkedHashMap"> 
       <void method="put"> 
-       <string>file:/data/users/njain/hive3/hive3/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11</string> 
+       <string>file:/data/users/njain/hive4/hive4/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11</string> 
        <object class="java.util.ArrayList"> 
         <void method="add"> 
          <string>s</string> 
@@ -843,7 +1003,7 @@
     <void property="pathToPartitionInfo"> 
      <object class="java.util.LinkedHashMap"> 
       <void method="put"> 
-       <string>file:/data/users/njain/hive3/hive3/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11</string> 
+       <string>file:/data/users/njain/hive4/hive4/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11</string> 
        <object class="org.apache.hadoop.hive.ql.plan.partitionDesc"> 
         <void property="partSpec"> 
          <object idref="LinkedHashMap0"/> 

Modified: hadoop/hive/trunk/ql/src/test/results/compiler/plan/sample2.q.xml
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/compiler/plan/sample2.q.xml?rev=811513&r1=811512&r2=811513&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/compiler/plan/sample2.q.xml (original)
+++ hadoop/hive/trunk/ql/src/test/results/compiler/plan/sample2.q.xml Fri Sep  4 18:49:41 2009
@@ -30,7 +30,7 @@
                <boolean>true</boolean> 
               </void> 
               <void property="sourceDir"> 
-               <string>file:/data/users/njain/hive3/hive3/build/ql/tmp/1164084646/10000</string> 
+               <string>file:/data/users/njain/hive4/hive4/build/ql/tmp/1191919557/10000</string> 
               </void> 
               <void property="table"> 
                <object id="tableDesc0" class="org.apache.hadoop.hive.ql.plan.tableDesc"> 
@@ -83,7 +83,7 @@
                   </void> 
                   <void method="put"> 
                    <string>location</string> 
-                   <string>file:/data/users/njain/hive3/hive3/build/ql/test/data/warehouse/dest1</string> 
+                   <string>file:/data/users/njain/hive4/hive4/build/ql/test/data/warehouse/dest1</string> 
                   </void> 
                  </object> 
                 </void> 
@@ -93,7 +93,7 @@
                </object> 
               </void> 
               <void property="tmpDir"> 
-               <string>file:/data/users/njain/hive3/hive3/build/ql/tmp/1164084646/10001</string> 
+               <string>file:/data/users/njain/hive4/hive4/build/ql/tmp/1191919557/10001</string> 
               </void> 
              </object> 
             </void> 
@@ -121,10 +121,10 @@
                <boolean>true</boolean> 
               </void> 
               <void property="sourceDir"> 
-               <string>file:/data/users/njain/hive3/hive3/build/ql/tmp/1002712615/10002</string> 
+               <string>file:/data/users/njain/hive4/hive4/build/ql/tmp/167250082/10002</string> 
               </void> 
               <void property="targetDir"> 
-               <string>file:/data/users/njain/hive3/hive3/build/ql/tmp/1164084646/10000</string> 
+               <string>file:/data/users/njain/hive4/hive4/build/ql/tmp/1191919557/10000</string> 
               </void> 
              </object> 
             </void> 
@@ -142,7 +142,7 @@
             <void property="aliasToWork"> 
              <object class="java.util.LinkedHashMap"> 
               <void method="put"> 
-               <string>file:/data/users/njain/hive3/hive3/build/ql/tmp/1002712615/10002</string> 
+               <string>file:/data/users/njain/hive4/hive4/build/ql/tmp/167250082/10002</string> 
                <object id="TableScanOperator0" class="org.apache.hadoop.hive.ql.exec.TableScanOperator"> 
                 <void property="childOperators"> 
                  <object class="java.util.ArrayList"> 
@@ -297,18 +297,18 @@
                     <void property="counterNames"> 
                      <object class="java.util.ArrayList"> 
                       <void method="add"> 
-                       <string>CNTR_NAME_RS_600_NUM_INPUT_ROWS</string> 
+                       <string>CNTR_NAME_RS_602_NUM_INPUT_ROWS</string> 
                       </void> 
                       <void method="add"> 
-                       <string>CNTR_NAME_RS_600_NUM_OUTPUT_ROWS</string> 
+                       <string>CNTR_NAME_RS_602_NUM_OUTPUT_ROWS</string> 
                       </void> 
                       <void method="add"> 
-                       <string>CNTR_NAME_RS_600_TIME_TAKEN</string> 
+                       <string>CNTR_NAME_RS_602_TIME_TAKEN</string> 
                       </void> 
                      </object> 
                     </void> 
                     <void property="operatorId"> 
-                     <string>RS_600</string> 
+                     <string>RS_602</string> 
                     </void> 
                     <void property="parentOperators"> 
                      <object class="java.util.ArrayList"> 
@@ -352,18 +352,18 @@
                 <void property="counterNames"> 
                  <object class="java.util.ArrayList"> 
                   <void method="add"> 
-                   <string>CNTR_NAME_TS_599_NUM_INPUT_ROWS</string> 
+                   <string>CNTR_NAME_TS_601_NUM_INPUT_ROWS</string> 
                   </void> 
                   <void method="add"> 
-                   <string>CNTR_NAME_TS_599_NUM_OUTPUT_ROWS</string> 
+                   <string>CNTR_NAME_TS_601_NUM_OUTPUT_ROWS</string> 
                   </void> 
                   <void method="add"> 
-                   <string>CNTR_NAME_TS_599_TIME_TAKEN</string> 
+                   <string>CNTR_NAME_TS_601_TIME_TAKEN</string> 
                   </void> 
                  </object> 
                 </void> 
                 <void property="operatorId"> 
-                 <string>TS_599</string> 
+                 <string>TS_601</string> 
                 </void> 
                 <void property="schema"> 
                  <object idref="RowSchema0"/> 
@@ -381,10 +381,10 @@
             <void property="pathToAliases"> 
              <object class="java.util.LinkedHashMap"> 
               <void method="put"> 
-               <string>file:/data/users/njain/hive3/hive3/build/ql/tmp/1002712615/10002</string> 
+               <string>file:/data/users/njain/hive4/hive4/build/ql/tmp/167250082/10002</string> 
                <object class="java.util.ArrayList"> 
                 <void method="add"> 
-                 <string>file:/data/users/njain/hive3/hive3/build/ql/tmp/1002712615/10002</string> 
+                 <string>file:/data/users/njain/hive4/hive4/build/ql/tmp/167250082/10002</string> 
                 </void> 
                </object> 
               </void> 
@@ -393,7 +393,7 @@
             <void property="pathToPartitionInfo"> 
              <object class="java.util.LinkedHashMap"> 
               <void method="put"> 
-               <string>file:/data/users/njain/hive3/hive3/build/ql/tmp/1002712615/10002</string> 
+               <string>file:/data/users/njain/hive4/hive4/build/ql/tmp/167250082/10002</string> 
                <object class="org.apache.hadoop.hive.ql.plan.partitionDesc"> 
                 <void property="tableDesc"> 
                  <object idref="tableDesc0"/> 
@@ -411,7 +411,7 @@
                   <void property="conf"> 
                    <object class="org.apache.hadoop.hive.ql.plan.fileSinkDesc"> 
                     <void property="dirName"> 
-                     <string>file:/data/users/njain/hive3/hive3/build/ql/tmp/1164084646/10000</string> 
+                     <string>file:/data/users/njain/hive4/hive4/build/ql/tmp/1191919557/10000</string> 
                     </void> 
                     <void property="tableInfo"> 
                      <object class="org.apache.hadoop.hive.ql.plan.tableDesc"> 
@@ -460,7 +460,7 @@
                         </void> 
                         <void method="put"> 
                          <string>location</string> 
-                         <string>file:/data/users/njain/hive3/hive3/build/ql/test/data/warehouse/dest1</string> 
+                         <string>file:/data/users/njain/hive4/hive4/build/ql/test/data/warehouse/dest1</string> 
                         </void> 
                         <void method="put"> 
                          <string>file.outputformat</string> 
@@ -478,18 +478,18 @@
                   <void property="counterNames"> 
                    <object class="java.util.ArrayList"> 
                     <void method="add"> 
-                     <string>CNTR_NAME_FS_602_NUM_INPUT_ROWS</string> 
+                     <string>CNTR_NAME_FS_604_NUM_INPUT_ROWS</string> 
                     </void> 
                     <void method="add"> 
-                     <string>CNTR_NAME_FS_602_NUM_OUTPUT_ROWS</string> 
+                     <string>CNTR_NAME_FS_604_NUM_OUTPUT_ROWS</string> 
                     </void> 
                     <void method="add"> 
-                     <string>CNTR_NAME_FS_602_TIME_TAKEN</string> 
+                     <string>CNTR_NAME_FS_604_TIME_TAKEN</string> 
                     </void> 
                    </object> 
                   </void> 
                   <void property="operatorId"> 
-                   <string>FS_602</string> 
+                   <string>FS_604</string> 
                   </void> 
                   <void property="parentOperators"> 
                    <object class="java.util.ArrayList"> 
@@ -525,18 +525,18 @@
               <void property="counterNames"> 
                <object class="java.util.ArrayList"> 
                 <void method="add"> 
-                 <string>CNTR_NAME_OP_601_NUM_INPUT_ROWS</string> 
+                 <string>CNTR_NAME_OP_603_NUM_INPUT_ROWS</string> 
                 </void> 
                 <void method="add"> 
-                 <string>CNTR_NAME_OP_601_NUM_OUTPUT_ROWS</string> 
+                 <string>CNTR_NAME_OP_603_NUM_OUTPUT_ROWS</string> 
                 </void> 
                 <void method="add"> 
-                 <string>CNTR_NAME_OP_601_TIME_TAKEN</string> 
+                 <string>CNTR_NAME_OP_603_TIME_TAKEN</string> 
                 </void> 
                </object> 
               </void> 
               <void property="operatorId"> 
-               <string>OP_601</string> 
+               <string>OP_603</string> 
               </void> 
               <void property="schema"> 
                <object class="org.apache.hadoop.hive.ql.exec.RowSchema"> 
@@ -598,7 +598,7 @@
       <void property="resolverCtx"> 
        <object class="org.apache.hadoop.hive.ql.plan.ConditionalResolverMergeFiles$ConditionalResolverMergeFilesCtx"> 
         <void property="dir"> 
-         <string>file:/data/users/njain/hive3/hive3/build/ql/tmp/1002712615/10002</string> 
+         <string>file:/data/users/njain/hive4/hive4/build/ql/tmp/167250082/10002</string> 
         </void> 
         <void property="listTasks"> 
          <object idref="ArrayList0"/> 
@@ -690,7 +690,7 @@
            </void> 
            <void method="put"> 
             <string>location</string> 
-            <string>file:/data/users/njain/hive3/hive3/build/ql/test/data/warehouse/srcbucket</string> 
+            <string>file:/data/users/njain/hive4/hive4/build/ql/test/data/warehouse/srcbucket</string> 
            </void> 
           </object> 
          </void> 
@@ -729,7 +729,7 @@
                            <int>1</int> 
                           </void> 
                           <void property="dirName"> 
-                           <string>file:/data/users/njain/hive3/hive3/build/ql/tmp/1002712615/10002</string> 
+                           <string>file:/data/users/njain/hive4/hive4/build/ql/tmp/167250082/10002</string> 
                           </void> 
                           <void property="tableInfo"> 
                            <object idref="tableDesc0"/> 
@@ -739,18 +739,18 @@
                         <void property="counterNames"> 
                          <object class="java.util.ArrayList"> 
                           <void method="add"> 
-                           <string>CNTR_NAME_FS_597_NUM_INPUT_ROWS</string> 
+                           <string>CNTR_NAME_FS_599_NUM_INPUT_ROWS</string> 
                           </void> 
                           <void method="add"> 
-                           <string>CNTR_NAME_FS_597_NUM_OUTPUT_ROWS</string> 
+                           <string>CNTR_NAME_FS_599_NUM_OUTPUT_ROWS</string> 
                           </void> 
                           <void method="add"> 
-                           <string>CNTR_NAME_FS_597_TIME_TAKEN</string> 
+                           <string>CNTR_NAME_FS_599_TIME_TAKEN</string> 
                           </void> 
                          </object> 
                         </void> 
                         <void property="operatorId"> 
-                         <string>FS_597</string> 
+                         <string>FS_599</string> 
                         </void> 
                         <void property="parentOperators"> 
                          <object class="java.util.ArrayList"> 
@@ -828,18 +828,18 @@
                     <void property="counterNames"> 
                      <object class="java.util.ArrayList"> 
                       <void method="add"> 
-                       <string>CNTR_NAME_SEL_596_NUM_INPUT_ROWS</string> 
+                       <string>CNTR_NAME_SEL_598_NUM_INPUT_ROWS</string> 
                       </void> 
                       <void method="add"> 
-                       <string>CNTR_NAME_SEL_596_NUM_OUTPUT_ROWS</string> 
+                       <string>CNTR_NAME_SEL_598_NUM_OUTPUT_ROWS</string> 
                       </void> 
                       <void method="add"> 
-                       <string>CNTR_NAME_SEL_596_TIME_TAKEN</string> 
+                       <string>CNTR_NAME_SEL_598_TIME_TAKEN</string> 
                       </void> 
                      </object> 
                     </void> 
                     <void property="operatorId"> 
-                     <string>SEL_596</string> 
+                     <string>SEL_598</string> 
                     </void> 
                     <void property="parentOperators"> 
                      <object class="java.util.ArrayList"> 
@@ -1023,18 +1023,18 @@
                 <void property="counterNames"> 
                  <object class="java.util.ArrayList"> 
                   <void method="add"> 
-                   <string>CNTR_NAME_FIL_595_NUM_INPUT_ROWS</string> 
+                   <string>CNTR_NAME_FIL_597_NUM_INPUT_ROWS</string> 
                   </void> 
                   <void method="add"> 
-                   <string>CNTR_NAME_FIL_595_NUM_OUTPUT_ROWS</string> 
+                   <string>CNTR_NAME_FIL_597_NUM_OUTPUT_ROWS</string> 
                   </void> 
                   <void method="add"> 
-                   <string>CNTR_NAME_FIL_595_TIME_TAKEN</string> 
+                   <string>CNTR_NAME_FIL_597_TIME_TAKEN</string> 
                   </void> 
                  </object> 
                 </void> 
                 <void property="operatorId"> 
-                 <string>FIL_595</string> 
+                 <string>FIL_597</string> 
                 </void> 
                 <void property="parentOperators"> 
                  <object class="java.util.ArrayList"> 
@@ -1183,18 +1183,18 @@
             <void property="counterNames"> 
              <object class="java.util.ArrayList"> 
               <void method="add"> 
-               <string>CNTR_NAME_FIL_598_NUM_INPUT_ROWS</string> 
+               <string>CNTR_NAME_FIL_600_NUM_INPUT_ROWS</string> 
               </void> 
               <void method="add"> 
-               <string>CNTR_NAME_FIL_598_NUM_OUTPUT_ROWS</string> 
+               <string>CNTR_NAME_FIL_600_NUM_OUTPUT_ROWS</string> 
               </void> 
               <void method="add"> 
-               <string>CNTR_NAME_FIL_598_TIME_TAKEN</string> 
+               <string>CNTR_NAME_FIL_600_TIME_TAKEN</string> 
               </void> 
              </object> 
             </void> 
             <void property="operatorId"> 
-             <string>FIL_598</string> 
+             <string>FIL_600</string> 
             </void> 
             <void property="parentOperators"> 
              <object class="java.util.ArrayList"> 
@@ -1245,13 +1245,13 @@
         <void property="counterNames"> 
          <object class="java.util.ArrayList"> 
           <void method="add"> 
-           <string>CNTR_NAME_TS_594_NUM_INPUT_ROWS</string> 
+           <string>CNTR_NAME_TS_596_NUM_INPUT_ROWS</string> 
           </void> 
           <void method="add"> 
-           <string>CNTR_NAME_TS_594_NUM_OUTPUT_ROWS</string> 
+           <string>CNTR_NAME_TS_596_NUM_OUTPUT_ROWS</string> 
           </void> 
           <void method="add"> 
-           <string>CNTR_NAME_TS_594_TIME_TAKEN</string> 
+           <string>CNTR_NAME_TS_596_TIME_TAKEN</string> 
           </void> 
          </object> 
         </void> 
@@ -1266,7 +1266,7 @@
          </object> 
         </void> 
         <void property="operatorId"> 
-         <string>TS_594</string> 
+         <string>TS_596</string> 
         </void> 
         <void property="schema"> 
          <object class="org.apache.hadoop.hive.ql.exec.RowSchema"> 
@@ -1282,7 +1282,7 @@
     <void property="pathToAliases"> 
      <object class="java.util.LinkedHashMap"> 
       <void method="put"> 
-       <string>file:/data/users/njain/hive3/hive3/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt</string> 
+       <string>file:/data/users/njain/hive4/hive4/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt</string> 
        <object class="java.util.ArrayList"> 
         <void method="add"> 
          <string>s</string> 
@@ -1294,7 +1294,7 @@
     <void property="pathToPartitionInfo"> 
      <object class="java.util.LinkedHashMap"> 
       <void method="put"> 
-       <string>file:/data/users/njain/hive3/hive3/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt</string> 
+       <string>file:/data/users/njain/hive4/hive4/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt</string> 
        <object class="org.apache.hadoop.hive.ql.plan.partitionDesc"> 
         <void property="partSpec"> 
          <object idref="LinkedHashMap0"/> 



Mime
View raw message