hadoop-hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From zs...@apache.org
Subject svn commit: r811580 - in /hadoop/hive/branches/branch-0.4: ./ ql/src/java/org/apache/hadoop/hive/ql/ppd/ ql/src/test/queries/clientpositive/ ql/src/test/results/clientpositive/ ql/src/test/results/compiler/plan/
Date Sat, 05 Sep 2009 00:46:20 GMT
Author: zshao
Date: Sat Sep  5 00:46:20 2009
New Revision: 811580

URL: http://svn.apache.org/viewvc?rev=811580&view=rev
Log:
HIVE-750. New partitionpruner does not work with test mode. (Namit Jain via zshao)

Added:
    hadoop/hive/branches/branch-0.4/ql/src/test/queries/clientpositive/input39.q
    hadoop/hive/branches/branch-0.4/ql/src/test/results/clientpositive/input39.q.out
Modified:
    hadoop/hive/branches/branch-0.4/CHANGES.txt
    hadoop/hive/branches/branch-0.4/ql/src/java/org/apache/hadoop/hive/ql/ppd/OpProcFactory.java
    hadoop/hive/branches/branch-0.4/ql/src/test/results/clientpositive/sample1.q.out
    hadoop/hive/branches/branch-0.4/ql/src/test/results/compiler/plan/sample1.q.xml

Modified: hadoop/hive/branches/branch-0.4/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/hive/branches/branch-0.4/CHANGES.txt?rev=811580&r1=811579&r2=811580&view=diff
==============================================================================
--- hadoop/hive/branches/branch-0.4/CHANGES.txt (original)
+++ hadoop/hive/branches/branch-0.4/CHANGES.txt Sat Sep  5 00:46:20 2009
@@ -536,12 +536,16 @@
 
     HIVE-790. Bug in union and script. (Ning Zhang via namit)
 
-    HIVE-755. Driver NullPointerException when calling getResults without first compiling
-    (Eric Hwang via namit)
+    HIVE-755. Driver NullPointerException when calling getResults without first
+    compiling (Eric Hwang via namit)
 
     HIVE-752. Bug in shimLoader for HWI server (Edward Capriolo via namit)
 
-    HIVE-812. ant package should work with all versions of hadoop (Zheng Shao via namit)
+    HIVE-812. ant package should work with all versions of hadoop.
+    (Zheng Shao via namit)
+
+    HIVE-750. New partitionpruner does not work with test mode.
+    (Namit Jain via zshao)
 
 Release 0.3.1 - Unreleased
 

Modified: hadoop/hive/branches/branch-0.4/ql/src/java/org/apache/hadoop/hive/ql/ppd/OpProcFactory.java
URL: http://svn.apache.org/viewvc/hadoop/hive/branches/branch-0.4/ql/src/java/org/apache/hadoop/hive/ql/ppd/OpProcFactory.java?rev=811580&r1=811579&r2=811580&view=diff
==============================================================================
--- hadoop/hive/branches/branch-0.4/ql/src/java/org/apache/hadoop/hive/ql/ppd/OpProcFactory.java (original)
+++ hadoop/hive/branches/branch-0.4/ql/src/java/org/apache/hadoop/hive/ql/ppd/OpProcFactory.java Sat Sep  5 00:46:20 2009
@@ -97,60 +97,8 @@
       TableScanOperator tsOp = (TableScanOperator)nd;
       mergeWithChildrenPred(tsOp, owi, null, null, false);
       ExprWalkerInfo pushDownPreds = owi.getPrunedPreds(tsOp);
-
-      if (pushDownPreds == null 
-          || pushDownPreds.getFinalCandidates() == null 
-          || pushDownPreds.getFinalCandidates().size() == 0) {
-        return null;
-      }
-      
-      // combine all predicates into a single expression
-      List<exprNodeDesc> preds = null;
-      exprNodeDesc condn = null; 
-      Iterator<List<exprNodeDesc>> iterator = pushDownPreds.getFinalCandidates().values().iterator();
-      while (iterator.hasNext()) {
-        preds = iterator.next();
-        int i = 0;
-        if(condn == null) {
-          condn = preds.get(0);
-          i++;
-        }
-        for(; i < preds.size(); i++) {
-          List<exprNodeDesc> children = new ArrayList<exprNodeDesc>(2);
-          children.add(condn);
-          children.add((exprNodeDesc) preds.get(i));
-          condn = new exprNodeFuncDesc(
-              "AND",
-              TypeInfoFactory.booleanTypeInfo,
-              FunctionRegistry.getUDFClass("AND"),
-              FunctionRegistry.getUDFMethod("AND",
-                  TypeInfoFactory.booleanTypeInfo,
-                  TypeInfoFactory.booleanTypeInfo),
-              children);
-        }
-      }
-      if(condn == null)
-        return null;
-      // add new filter op
-      List<Operator<? extends Serializable>> originalChilren = tsOp.getChildOperators();
-      tsOp.setChildOperators(null);
-      Operator<filterDesc> output = 
-        OperatorFactory.getAndMakeChild(new filterDesc(condn, false),
-                              new RowSchema(inputRR.getColumnInfos()), 
-                              tsOp);
-      output.setChildOperators(originalChilren);
-      for (Operator<? extends Serializable> ch : originalChilren) {
-        List<Operator<? extends Serializable>> parentOperators = ch.getParentOperators();
-        int pos = parentOperators.indexOf(tsOp);
-        assert pos != -1;
-        parentOperators.remove(pos);
-        parentOperators.add(pos, output); // add the new op as the old
-      }
-      OpParseContext ctx = new OpParseContext(inputRR);
-      owi.put(output, ctx);
-      return output;
+      return createFilter(tsOp, pushDownPreds, owi);
     }
-
   }
 
   /**
@@ -166,10 +114,13 @@
       OpWalkerInfo owi = (OpWalkerInfo)procCtx;
       Operator<? extends Serializable> op = (Operator<? extends Serializable>) nd;
       exprNodeDesc predicate = (((FilterOperator)nd).getConf()).getPredicate();
-      // get pushdown predicates for this operato's predicate
+      // get pushdown predicates for this operator's predicate
       ExprWalkerInfo ewi = ExprWalkerProcFactory.extractPushdownPreds(owi, op, predicate);
       if (!ewi.isDeterministic()) {
         /* predicate is not deterministic */
+        if (op.getChildren() != null && op.getChildren().size() == 1)
+          createFilter(op, owi.getPrunedPreds((Operator<? extends Serializable>)(op.getChildren().get(0))), owi);
+
         return null;
       }
 
@@ -335,6 +286,65 @@
     }
   }
 
+  protected static Object createFilter(Operator op, ExprWalkerInfo pushDownPreds, OpWalkerInfo owi) {
+    if (pushDownPreds == null 
+        || pushDownPreds.getFinalCandidates() == null 
+        || pushDownPreds.getFinalCandidates().size() == 0) {
+      return null;
+    }
+      
+    RowResolver inputRR = owi.getRowResolver(op);
+
+    // combine all predicates into a single expression
+    List<exprNodeDesc> preds = null;
+    exprNodeDesc condn = null; 
+    Iterator<List<exprNodeDesc>> iterator = pushDownPreds.getFinalCandidates().values().iterator();
+    while (iterator.hasNext()) {
+      preds = iterator.next();
+      int i = 0;
+      if (condn == null) {
+        condn = preds.get(0);
+        i++;
+      }
+
+      for(; i < preds.size(); i++) {
+        List<exprNodeDesc> children = new ArrayList<exprNodeDesc>(2);
+        children.add(condn);
+        children.add((exprNodeDesc) preds.get(i));
+        condn = new exprNodeFuncDesc(
+                                     "AND",
+                                     TypeInfoFactory.booleanTypeInfo,
+                                     FunctionRegistry.getUDFClass("AND"),
+                                     FunctionRegistry.getUDFMethod("AND",
+                                                                   TypeInfoFactory.booleanTypeInfo,
+                                                                   TypeInfoFactory.booleanTypeInfo),
+                                     children);
+      }
+    }
+
+    if(condn == null)
+      return null;
+
+    // add new filter op
+    List<Operator<? extends Serializable>> originalChilren = op.getChildOperators();
+    op.setChildOperators(null);
+    Operator<filterDesc> output = 
+      OperatorFactory.getAndMakeChild(new filterDesc(condn, false),
+                                      new RowSchema(inputRR.getColumnInfos()), 
+                                      op);
+    output.setChildOperators(originalChilren);
+    for (Operator<? extends Serializable> ch : originalChilren) {
+      List<Operator<? extends Serializable>> parentOperators = ch.getParentOperators();
+      int pos = parentOperators.indexOf(op);
+      assert pos != -1;
+      parentOperators.remove(pos);
+      parentOperators.add(pos, output); // add the new op as the old
+    }
+    OpParseContext ctx = new OpParseContext(inputRR);
+    owi.put(output, ctx);
+    return output;
+  }
+  
   public static NodeProcessor getFilterProc() {
     return new FilterPPD();
   }

Added: hadoop/hive/branches/branch-0.4/ql/src/test/queries/clientpositive/input39.q
URL: http://svn.apache.org/viewvc/hadoop/hive/branches/branch-0.4/ql/src/test/queries/clientpositive/input39.q?rev=811580&view=auto
==============================================================================
--- hadoop/hive/branches/branch-0.4/ql/src/test/queries/clientpositive/input39.q (added)
+++ hadoop/hive/branches/branch-0.4/ql/src/test/queries/clientpositive/input39.q Sat Sep  5 00:46:20 2009
@@ -0,0 +1,27 @@
+drop table t1;
+drop table t2;
+
+create table t1(key string, value string) partitioned by (ds string);
+create table t2(key string, value string) partitioned by (ds string);
+
+insert overwrite table t1 partition (ds='1')
+select key, value from src;
+
+insert overwrite table t1 partition (ds='2')
+select key, value from src;
+
+insert overwrite table t2 partition (ds='1')
+select key, value from src;
+
+set hive.test.mode=true;
+set hive.mapred.mode=strict;
+
+explain
+select count(1) from t1 join t2 on t1.key=t2.key where t1.ds='1' and t2.ds='1';
+
+select count(1) from t1 join t2 on t1.key=t2.key where t1.ds='1' and t2.ds='1';
+
+set hive.test.mode=false;
+
+drop table t1;
+drop table t2;

Added: hadoop/hive/branches/branch-0.4/ql/src/test/results/clientpositive/input39.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/branches/branch-0.4/ql/src/test/results/clientpositive/input39.q.out?rev=811580&view=auto
==============================================================================
--- hadoop/hive/branches/branch-0.4/ql/src/test/results/clientpositive/input39.q.out (added)
+++ hadoop/hive/branches/branch-0.4/ql/src/test/results/clientpositive/input39.q.out Sat Sep  5 00:46:20 2009
@@ -0,0 +1,141 @@
+query: drop table t1
+query: drop table t2
+query: create table t1(key string, value string) partitioned by (ds string)
+query: create table t2(key string, value string) partitioned by (ds string)
+query: insert overwrite table t1 partition (ds='1')
+select key, value from src
+Input: default/src
+Output: default/t1/ds=1
+query: insert overwrite table t1 partition (ds='2')
+select key, value from src
+Input: default/src
+Output: default/t1/ds=2
+query: insert overwrite table t2 partition (ds='1')
+select key, value from src
+Input: default/src
+Output: default/t2/ds=1
+query: explain
+select count(1) from t1 join t2 on t1.key=t2.key where t1.ds='1' and t2.ds='1'
+ABSTRACT SYNTAX TREE:
+  (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_TABREF t1) (TOK_TABREF t2) (= (. (TOK_TABLE_OR_COL t1) key) (. (TOK_TABLE_OR_COL t2) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION count 1))) (TOK_WHERE (and (= (. (TOK_TABLE_OR_COL t1) ds) '1') (= (. (TOK_TABLE_OR_COL t2) ds) '1')))))
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-2 depends on stages: Stage-1
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Alias -> Map Operator Tree:
+        t2 
+          TableScan
+            alias: t2
+            Filter Operator
+              predicate:
+                  expr: (((hash(rand(UDFToLong(460476415))) & 2147483647) % 32) = 0)
+                  type: boolean
+              Filter Operator
+                predicate:
+                    expr: (ds = '1')
+                    type: boolean
+                Reduce Output Operator
+                  key expressions:
+                        expr: key
+                        type: string
+                  sort order: +
+                  Map-reduce partition columns:
+                        expr: key
+                        type: string
+                  tag: 1
+                  value expressions:
+                        expr: ds
+                        type: string
+        t1 
+          TableScan
+            alias: t1
+            Filter Operator
+              predicate:
+                  expr: (((hash(rand(UDFToLong(460476415))) & 2147483647) % 32) = 0)
+                  type: boolean
+              Filter Operator
+                predicate:
+                    expr: (ds = '1')
+                    type: boolean
+                Reduce Output Operator
+                  key expressions:
+                        expr: key
+                        type: string
+                  sort order: +
+                  Map-reduce partition columns:
+                        expr: key
+                        type: string
+                  tag: 0
+                  value expressions:
+                        expr: ds
+                        type: string
+      Reduce Operator Tree:
+        Join Operator
+          condition map:
+               Inner Join 0 to 1
+          condition expressions:
+            0 {VALUE._col2}
+            1 {VALUE._col2}
+          outputColumnNames: _col2, _col5
+          Filter Operator
+            predicate:
+                expr: ((_col2 = '1') and (_col5 = '1'))
+                type: boolean
+            Select Operator
+              Group By Operator
+                aggregations:
+                      expr: count(1)
+                mode: hash
+                outputColumnNames: _col0
+                File Output Operator
+                  compressed: false
+                  GlobalTableId: 0
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+
+  Stage: Stage-2
+    Map Reduce
+      Alias -> Map Operator Tree:
+        file:/data/users/njain/hive_commit4/hive_commit4/build/ql/tmp/42098664/10002 
+            Reduce Output Operator
+              sort order: 
+              tag: -1
+              value expressions:
+                    expr: _col0
+                    type: bigint
+      Reduce Operator Tree:
+        Group By Operator
+          aggregations:
+                expr: count(VALUE._col0)
+          mode: mergepartial
+          outputColumnNames: _col0
+          Select Operator
+            expressions:
+                  expr: _col0
+                  type: bigint
+            outputColumnNames: _col0
+            File Output Operator
+              compressed: false
+              GlobalTableId: 0
+              table:
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+
+
+query: select count(1) from t1 join t2 on t1.key=t2.key where t1.ds='1' and t2.ds='1'
+Input: default/t2/ds=1
+Input: default/t1/ds=1
+Output: file:/data/users/njain/hive_commit4/hive_commit4/build/ql/tmp/536554986/10000
+18
+query: drop table t1
+query: drop table t2

Modified: hadoop/hive/branches/branch-0.4/ql/src/test/results/clientpositive/sample1.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/branches/branch-0.4/ql/src/test/results/clientpositive/sample1.q.out?rev=811580&r1=811579&r2=811580&view=diff
==============================================================================
--- hadoop/hive/branches/branch-0.4/ql/src/test/results/clientpositive/sample1.q.out (original)
+++ hadoop/hive/branches/branch-0.4/ql/src/test/results/clientpositive/sample1.q.out Sat Sep  5 00:46:20 2009
@@ -29,53 +29,58 @@
                 predicate:
                     expr: ((ds = '2008-04-08') and (hr = '11'))
                     type: boolean
-                Select Operator
-                  expressions:
-                        expr: key
-                        type: string
-                        expr: value
-                        type: string
-                        expr: ds
-                        type: string
-                        expr: hr
-                        type: string
-                  outputColumnNames: _col0, _col1, _col2, _col3
+                Filter Operator
+                  isSamplingPred: false
+                  predicate:
+                      expr: ((ds = '2008-04-08') and (hr = '11'))
+                      type: boolean
                   Select Operator
                     expressions:
-                          expr: UDFToInteger(_col0)
-                          type: int
-                          expr: _col1
+                          expr: key
                           type: string
-                          expr: _col2
+                          expr: value
                           type: string
-                          expr: _col3
+                          expr: ds
+                          type: string
+                          expr: hr
                           type: string
                     outputColumnNames: _col0, _col1, _col2, _col3
-                    File Output Operator
-                      compressed: false
-                      GlobalTableId: 1
-                      directory: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/212207906/10002
-                      table:
-                          input format: org.apache.hadoop.mapred.TextInputFormat
-                          output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                          properties:
-                            name dest1
-                            columns.types int:string:string:string
-                            serialization.ddl struct dest1 { i32 key, string value, string dt, string hr}
-                            serialization.format 1
-                            columns key,value,dt,hr
-                            bucket_count -1
-                            serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                            file.inputformat org.apache.hadoop.mapred.TextInputFormat
-                            file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                            location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest1
-                          serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                          name: dest1
+                    Select Operator
+                      expressions:
+                            expr: UDFToInteger(_col0)
+                            type: int
+                            expr: _col1
+                            type: string
+                            expr: _col2
+                            type: string
+                            expr: _col3
+                            type: string
+                      outputColumnNames: _col0, _col1, _col2, _col3
+                      File Output Operator
+                        compressed: false
+                        GlobalTableId: 1
+                        directory: file:/data/users/njain/hive4/hive4/build/ql/tmp/1917947212/10002
+                        table:
+                            input format: org.apache.hadoop.mapred.TextInputFormat
+                            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                            properties:
+                              name dest1
+                              columns.types int:string:string:string
+                              serialization.ddl struct dest1 { i32 key, string value, string dt, string hr}
+                              serialization.format 1
+                              columns key,value,dt,hr
+                              bucket_count -1
+                              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                              file.inputformat org.apache.hadoop.mapred.TextInputFormat
+                              file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                              location file:/data/users/njain/hive4/hive4/build/ql/test/data/warehouse/dest1
+                            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                            name: dest1
       Needs Tagging: false
       Path -> Alias:
-        file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 
+        file:/data/users/njain/hive4/hive4/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [s]
       Path -> Partition:
-        file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 
+        file:/data/users/njain/hive4/hive4/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 
           Partition
             partition values:
               ds 2008-04-08
@@ -94,7 +99,7 @@
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 file.inputformat org.apache.hadoop.mapred.TextInputFormat
                 file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart
+                location file:/data/users/njain/hive4/hive4/build/ql/test/data/warehouse/srcpart
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: srcpart
 
@@ -104,11 +109,11 @@
           Move Operator
             files:
                 hdfs directory: true
-                source: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/212207906/10002
-                destination: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1104113199/10000
+                source: file:/data/users/njain/hive4/hive4/build/ql/tmp/1917947212/10002
+                destination: file:/data/users/njain/hive4/hive4/build/ql/tmp/425720684/10000
           Map Reduce
             Alias -> Map Operator Tree:
-              file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/212207906/10002 
+              file:/data/users/njain/hive4/hive4/build/ql/tmp/1917947212/10002 
                   Reduce Output Operator
                     sort order: 
                     Map-reduce partition columns:
@@ -126,9 +131,9 @@
                           type: string
             Needs Tagging: false
             Path -> Alias:
-              file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/212207906/10002 
+              file:/data/users/njain/hive4/hive4/build/ql/tmp/1917947212/10002 [file:/data/users/njain/hive4/hive4/build/ql/tmp/1917947212/10002]
             Path -> Partition:
-              file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/212207906/10002 
+              file:/data/users/njain/hive4/hive4/build/ql/tmp/1917947212/10002 
                 Partition
                 
                     input format: org.apache.hadoop.mapred.TextInputFormat
@@ -143,7 +148,7 @@
                       serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                       file.inputformat org.apache.hadoop.mapred.TextInputFormat
                       file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                      location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest1
+                      location file:/data/users/njain/hive4/hive4/build/ql/test/data/warehouse/dest1
                     serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                     name: dest1
             Reduce Operator Tree:
@@ -151,7 +156,7 @@
                 File Output Operator
                   compressed: false
                   GlobalTableId: 0
-                  directory: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1104113199/10000
+                  directory: file:/data/users/njain/hive4/hive4/build/ql/tmp/425720684/10000
                   table:
                       input format: org.apache.hadoop.mapred.TextInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -164,7 +169,7 @@
                         bucket_count -1
                         serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                         file.inputformat org.apache.hadoop.mapred.TextInputFormat
-                        location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest1
+                        location file:/data/users/njain/hive4/hive4/build/ql/test/data/warehouse/dest1
                         file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                       serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                       name: dest1
@@ -173,7 +178,7 @@
     Move Operator
       tables:
           replace: true
-          source: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1104113199/10000
+          source: file:/data/users/njain/hive4/hive4/build/ql/tmp/425720684/10000
           table:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -187,10 +192,10 @@
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 file.inputformat org.apache.hadoop.mapred.TextInputFormat
                 file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                location file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/dest1
+                location file:/data/users/njain/hive4/hive4/build/ql/test/data/warehouse/dest1
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: dest1
-          tmp directory: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/1104113199/10001
+          tmp directory: file:/data/users/njain/hive4/hive4/build/ql/tmp/425720684/10001
 
 
 query: INSERT OVERWRITE TABLE dest1 SELECT s.*
@@ -200,7 +205,7 @@
 Output: default/dest1
 query: SELECT dest1.* FROM dest1
 Input: default/dest1
-Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/128296418/10000
+Output: file:/data/users/njain/hive4/hive4/build/ql/tmp/1412070589/10000
 238	val_238	2008-04-08	11
 86	val_86	2008-04-08	11
 311	val_311	2008-04-08	11
@@ -703,5 +708,5 @@
 97	val_97	2008-04-08	11
 query: select count(1) from srcbucket
 Input: default/srcbucket
-Output: file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/822380674/10000
+Output: file:/data/users/njain/hive4/hive4/build/ql/tmp/984906761/10000
 1000

Modified: hadoop/hive/branches/branch-0.4/ql/src/test/results/compiler/plan/sample1.q.xml
URL: http://svn.apache.org/viewvc/hadoop/hive/branches/branch-0.4/ql/src/test/results/compiler/plan/sample1.q.xml?rev=811580&r1=811579&r2=811580&view=diff
==============================================================================
--- hadoop/hive/branches/branch-0.4/ql/src/test/results/compiler/plan/sample1.q.xml (original)
+++ hadoop/hive/branches/branch-0.4/ql/src/test/results/compiler/plan/sample1.q.xml Sat Sep  5 00:46:20 2009
@@ -77,7 +77,7 @@
            </void> 
            <void method="put"> 
             <string>location</string> 
-            <string>file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart</string> 
+            <string>file:/data/users/njain/hive_commit4/hive_commit4/build/ql/test/data/warehouse/srcpart</string> 
            </void> 
           </object> 
          </void> 
@@ -105,40 +105,107 @@
                 <void property="childOperators"> 
                  <object class="java.util.ArrayList"> 
                   <void method="add"> 
-                   <object id="SelectOperator0" class="org.apache.hadoop.hive.ql.exec.SelectOperator"> 
+                   <object id="FilterOperator2" class="org.apache.hadoop.hive.ql.exec.FilterOperator"> 
                     <void property="childOperators"> 
                      <object class="java.util.ArrayList"> 
                       <void method="add"> 
-                       <object class="org.apache.hadoop.hive.ql.exec.FileSinkOperator"> 
-                        <void property="conf"> 
-                         <object class="org.apache.hadoop.hive.ql.plan.fileSinkDesc"> 
-                          <void property="dirName"> 
-                           <string>file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/tmp/422992274/10001</string> 
-                          </void> 
-                          <void property="tableInfo"> 
-                           <object class="org.apache.hadoop.hive.ql.plan.tableDesc"> 
-                            <void property="deserializerClass"> 
-                             <class>org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe</class> 
-                            </void> 
-                            <void property="inputFileFormatClass"> 
-                             <class>org.apache.hadoop.mapred.TextInputFormat</class> 
-                            </void> 
-                            <void property="outputFileFormatClass"> 
-                             <class>org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat</class> 
-                            </void> 
-                            <void property="properties"> 
-                             <object class="java.util.Properties"> 
-                              <void method="put"> 
-                               <string>columns</string> 
-                               <string>_col0,_col1,_col2,_col3</string> 
-                              </void> 
-                              <void method="put"> 
-                               <string>serialization.format</string> 
-                               <string>1</string> 
-                              </void> 
-                              <void method="put"> 
-                               <string>columns.types</string> 
-                               <string>string:string:string:string</string> 
+                       <object id="SelectOperator0" class="org.apache.hadoop.hive.ql.exec.SelectOperator"> 
+                        <void property="childOperators"> 
+                         <object class="java.util.ArrayList"> 
+                          <void method="add"> 
+                           <object class="org.apache.hadoop.hive.ql.exec.FileSinkOperator"> 
+                            <void property="conf"> 
+                             <object class="org.apache.hadoop.hive.ql.plan.fileSinkDesc"> 
+                              <void property="dirName"> 
+                               <string>file:/data/users/njain/hive_commit4/hive_commit4/build/ql/tmp/812476419/10001</string> 
+                              </void> 
+                              <void property="tableInfo"> 
+                               <object class="org.apache.hadoop.hive.ql.plan.tableDesc"> 
+                                <void property="deserializerClass"> 
+                                 <class>org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe</class> 
+                                </void> 
+                                <void property="inputFileFormatClass"> 
+                                 <class>org.apache.hadoop.mapred.TextInputFormat</class> 
+                                </void> 
+                                <void property="outputFileFormatClass"> 
+                                 <class>org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat</class> 
+                                </void> 
+                                <void property="properties"> 
+                                 <object class="java.util.Properties"> 
+                                  <void method="put"> 
+                                   <string>columns</string> 
+                                   <string>_col0,_col1,_col2,_col3</string> 
+                                  </void> 
+                                  <void method="put"> 
+                                   <string>serialization.format</string> 
+                                   <string>1</string> 
+                                  </void> 
+                                  <void method="put"> 
+                                   <string>columns.types</string> 
+                                   <string>string:string:string:string</string> 
+                                  </void> 
+                                 </object> 
+                                </void> 
+                               </object> 
+                              </void> 
+                             </object> 
+                            </void> 
+                            <void property="parentOperators"> 
+                             <object class="java.util.ArrayList"> 
+                              <void method="add"> 
+                               <object idref="SelectOperator0"/> 
+                              </void> 
+                             </object> 
+                            </void> 
+                            <void property="schema"> 
+                             <object class="org.apache.hadoop.hive.ql.exec.RowSchema"> 
+                              <void property="signature"> 
+                               <object class="java.util.Vector"> 
+                                <void method="add"> 
+                                 <object class="org.apache.hadoop.hive.ql.exec.ColumnInfo"> 
+                                  <void property="internalName"> 
+                                   <string>_col0</string> 
+                                  </void> 
+                                  <void property="type"> 
+                                   <object id="PrimitiveTypeInfo0" class="org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo"> 
+                                    <void property="typeName"> 
+                                     <string>string</string> 
+                                    </void> 
+                                   </object> 
+                                  </void> 
+                                 </object> 
+                                </void> 
+                                <void method="add"> 
+                                 <object class="org.apache.hadoop.hive.ql.exec.ColumnInfo"> 
+                                  <void property="internalName"> 
+                                   <string>_col1</string> 
+                                  </void> 
+                                  <void property="type"> 
+                                   <object idref="PrimitiveTypeInfo0"/> 
+                                  </void> 
+                                 </object> 
+                                </void> 
+                                <void method="add"> 
+                                 <object class="org.apache.hadoop.hive.ql.exec.ColumnInfo"> 
+                                  <void property="internalName"> 
+                                   <string>_col2</string> 
+                                  </void> 
+                                  <void property="type"> 
+                                   <object idref="PrimitiveTypeInfo0"/> 
+                                  </void> 
+                                 </object> 
+                                </void> 
+                                <void method="add"> 
+                                 <object class="org.apache.hadoop.hive.ql.exec.ColumnInfo"> 
+                                  <void property="internalName"> 
+                                   <string>_col3</string> 
+                                  </void> 
+                                  <void property="type"> 
+                                   <object idref="PrimitiveTypeInfo0"/> 
+                                  </void> 
+                                 </object> 
+                                </void> 
+                               </object> 
                               </void> 
                              </object> 
                             </void> 
@@ -146,10 +213,109 @@
                           </void> 
                          </object> 
                         </void> 
+                        <void property="columnExprMap"> 
+                         <object class="java.util.HashMap"> 
+                          <void method="put"> 
+                           <string>_col3</string> 
+                           <object id="exprNodeColumnDesc0" class="org.apache.hadoop.hive.ql.plan.exprNodeColumnDesc"> 
+                            <void property="column"> 
+                             <string>hr</string> 
+                            </void> 
+                            <void property="tabAlias"> 
+                             <string>s</string> 
+                            </void> 
+                            <void property="typeInfo"> 
+                             <object idref="PrimitiveTypeInfo0"/> 
+                            </void> 
+                           </object> 
+                          </void> 
+                          <void method="put"> 
+                           <string>_col2</string> 
+                           <object id="exprNodeColumnDesc1" class="org.apache.hadoop.hive.ql.plan.exprNodeColumnDesc"> 
+                            <void property="column"> 
+                             <string>ds</string> 
+                            </void> 
+                            <void property="tabAlias"> 
+                             <string>s</string> 
+                            </void> 
+                            <void property="typeInfo"> 
+                             <object idref="PrimitiveTypeInfo0"/> 
+                            </void> 
+                           </object> 
+                          </void> 
+                          <void method="put"> 
+                           <string>_col1</string> 
+                           <object id="exprNodeColumnDesc2" class="org.apache.hadoop.hive.ql.plan.exprNodeColumnDesc"> 
+                            <void property="column"> 
+                             <string>value</string> 
+                            </void> 
+                            <void property="tabAlias"> 
+                             <string>s</string> 
+                            </void> 
+                            <void property="typeInfo"> 
+                             <object idref="PrimitiveTypeInfo0"/> 
+                            </void> 
+                           </object> 
+                          </void> 
+                          <void method="put"> 
+                           <string>_col0</string> 
+                           <object id="exprNodeColumnDesc3" class="org.apache.hadoop.hive.ql.plan.exprNodeColumnDesc"> 
+                            <void property="column"> 
+                             <string>key</string> 
+                            </void> 
+                            <void property="tabAlias"> 
+                             <string>s</string> 
+                            </void> 
+                            <void property="typeInfo"> 
+                             <object idref="PrimitiveTypeInfo0"/> 
+                            </void> 
+                           </object> 
+                          </void> 
+                         </object> 
+                        </void> 
+                        <void property="conf"> 
+                         <object class="org.apache.hadoop.hive.ql.plan.selectDesc"> 
+                          <void property="colList"> 
+                           <object class="java.util.ArrayList"> 
+                            <void method="add"> 
+                             <object idref="exprNodeColumnDesc3"/> 
+                            </void> 
+                            <void method="add"> 
+                             <object idref="exprNodeColumnDesc2"/> 
+                            </void> 
+                            <void method="add"> 
+                             <object idref="exprNodeColumnDesc1"/> 
+                            </void> 
+                            <void method="add"> 
+                             <object idref="exprNodeColumnDesc0"/> 
+                            </void> 
+                           </object> 
+                          </void> 
+                          <void property="outputColumnNames"> 
+                           <object class="java.util.ArrayList"> 
+                            <void method="add"> 
+                             <string>_col0</string> 
+                            </void> 
+                            <void method="add"> 
+                             <string>_col1</string> 
+                            </void> 
+                            <void method="add"> 
+                             <string>_col2</string> 
+                            </void> 
+                            <void method="add"> 
+                             <string>_col3</string> 
+                            </void> 
+                           </object> 
+                          </void> 
+                          <void property="selectStar"> 
+                           <boolean>true</boolean> 
+                          </void> 
+                         </object> 
+                        </void> 
                         <void property="parentOperators"> 
                          <object class="java.util.ArrayList"> 
                           <void method="add"> 
-                           <object idref="SelectOperator0"/> 
+                           <object idref="FilterOperator2"/> 
                           </void> 
                          </object> 
                         </void> 
@@ -163,11 +329,7 @@
                                <string>_col0</string> 
                               </void> 
                               <void property="type"> 
-                               <object id="PrimitiveTypeInfo0" class="org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo"> 
-                                <void property="typeName"> 
-                                 <string>string</string> 
-                                </void> 
-                               </object> 
+                               <object idref="PrimitiveTypeInfo0"/> 
                               </void> 
                              </object> 
                             </void> 
@@ -209,103 +371,148 @@
                       </void> 
                      </object> 
                     </void> 
-                    <void property="columnExprMap"> 
-                     <object class="java.util.HashMap"> 
-                      <void method="put"> 
-                       <string>_col3</string> 
-                       <object id="exprNodeColumnDesc0" class="org.apache.hadoop.hive.ql.plan.exprNodeColumnDesc"> 
-                        <void property="column"> 
-                         <string>hr</string> 
-                        </void> 
-                        <void property="tabAlias"> 
-                         <string>s</string> 
-                        </void> 
-                        <void property="typeInfo"> 
-                         <object idref="PrimitiveTypeInfo0"/> 
-                        </void> 
-                       </object> 
-                      </void> 
-                      <void method="put"> 
-                       <string>_col2</string> 
-                       <object id="exprNodeColumnDesc1" class="org.apache.hadoop.hive.ql.plan.exprNodeColumnDesc"> 
-                        <void property="column"> 
-                         <string>ds</string> 
-                        </void> 
-                        <void property="tabAlias"> 
-                         <string>s</string> 
-                        </void> 
-                        <void property="typeInfo"> 
-                         <object idref="PrimitiveTypeInfo0"/> 
-                        </void> 
-                       </object> 
-                      </void> 
-                      <void method="put"> 
-                       <string>_col1</string> 
-                       <object id="exprNodeColumnDesc2" class="org.apache.hadoop.hive.ql.plan.exprNodeColumnDesc"> 
-                        <void property="column"> 
-                         <string>value</string> 
-                        </void> 
-                        <void property="tabAlias"> 
-                         <string>s</string> 
-                        </void> 
-                        <void property="typeInfo"> 
-                         <object idref="PrimitiveTypeInfo0"/> 
-                        </void> 
-                       </object> 
-                      </void> 
-                      <void method="put"> 
-                       <string>_col0</string> 
-                       <object id="exprNodeColumnDesc3" class="org.apache.hadoop.hive.ql.plan.exprNodeColumnDesc"> 
-                        <void property="column"> 
-                         <string>key</string> 
-                        </void> 
-                        <void property="tabAlias"> 
-                         <string>s</string> 
-                        </void> 
-                        <void property="typeInfo"> 
-                         <object idref="PrimitiveTypeInfo0"/> 
-                        </void> 
-                       </object> 
-                      </void> 
-                     </object> 
-                    </void> 
                     <void property="conf"> 
-                     <object class="org.apache.hadoop.hive.ql.plan.selectDesc"> 
-                      <void property="colList"> 
-                       <object class="java.util.ArrayList"> 
-                        <void method="add"> 
-                         <object idref="exprNodeColumnDesc3"/> 
-                        </void> 
-                        <void method="add"> 
-                         <object idref="exprNodeColumnDesc2"/> 
-                        </void> 
-                        <void method="add"> 
-                         <object idref="exprNodeColumnDesc1"/> 
-                        </void> 
-                        <void method="add"> 
-                         <object idref="exprNodeColumnDesc0"/> 
+                     <object class="org.apache.hadoop.hive.ql.plan.filterDesc"> 
+                      <void property="predicate"> 
+                       <object class="org.apache.hadoop.hive.ql.plan.exprNodeFuncDesc"> 
+                        <void property="UDFClass"> 
+                         <class>org.apache.hadoop.hive.ql.udf.UDFOPAnd</class> 
                         </void> 
-                       </object> 
-                      </void> 
-                      <void property="outputColumnNames"> 
-                       <object class="java.util.ArrayList"> 
-                        <void method="add"> 
-                         <string>_col0</string> 
+                        <void property="UDFMethod"> 
+                         <object id="Method0" class="org.apache.hadoop.hive.ql.udf.UDFOPAnd" method="getMethod"> 
+                          <string>evaluate</string> 
+                          <array class="java.lang.Class" length="2"> 
+                           <void index="0"> 
+                            <class>org.apache.hadoop.io.BooleanWritable</class> 
+                           </void> 
+                           <void index="1"> 
+                            <class>org.apache.hadoop.io.BooleanWritable</class> 
+                           </void> 
+                          </array> 
+                         </object> 
                         </void> 
-                        <void method="add"> 
-                         <string>_col1</string> 
+                        <void property="childExprs"> 
+                         <object class="java.util.ArrayList"> 
+                          <void method="add"> 
+                           <object class="org.apache.hadoop.hive.ql.plan.exprNodeFuncDesc"> 
+                            <void property="UDFClass"> 
+                             <class>org.apache.hadoop.hive.ql.udf.UDFOPEqual</class> 
+                            </void> 
+                            <void property="UDFMethod"> 
+                             <object id="Method1" class="org.apache.hadoop.hive.ql.udf.UDFOPEqual" method="getMethod"> 
+                              <string>evaluate</string> 
+                              <array class="java.lang.Class" length="2"> 
+                               <void index="0"> 
+                                <class>org.apache.hadoop.io.Text</class> 
+                               </void> 
+                               <void index="1"> 
+                                <class>org.apache.hadoop.io.Text</class> 
+                               </void> 
+                              </array> 
+                             </object> 
+                            </void> 
+                            <void property="childExprs"> 
+                             <object class="java.util.ArrayList"> 
+                              <void method="add"> 
+                               <object class="org.apache.hadoop.hive.ql.plan.exprNodeColumnDesc"> 
+                                <void property="column"> 
+                                 <string>ds</string> 
+                                </void> 
+                                <void property="tabAlias"> 
+                                 <string>s</string> 
+                                </void> 
+                                <void property="typeInfo"> 
+                                 <object idref="PrimitiveTypeInfo0"/> 
+                                </void> 
+                               </object> 
+                              </void> 
+                              <void method="add"> 
+                               <object class="org.apache.hadoop.hive.ql.plan.exprNodeConstantDesc"> 
+                                <void property="typeInfo"> 
+                                 <object idref="PrimitiveTypeInfo0"/> 
+                                </void> 
+                                <void property="value"> 
+                                 <string>2008-04-08</string> 
+                                </void> 
+                               </object> 
+                              </void> 
+                             </object> 
+                            </void> 
+                            <void property="methodName"> 
+                             <string>=</string> 
+                            </void> 
+                            <void property="typeInfo"> 
+                             <object id="PrimitiveTypeInfo1" class="org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo"> 
+                              <void property="typeName"> 
+                               <string>boolean</string> 
+                              </void> 
+                             </object> 
+                            </void> 
+                           </object> 
+                          </void> 
+                          <void method="add"> 
+                           <object class="org.apache.hadoop.hive.ql.plan.exprNodeFuncDesc"> 
+                            <void property="UDFClass"> 
+                             <class>org.apache.hadoop.hive.ql.udf.UDFOPEqual</class> 
+                            </void> 
+                            <void property="UDFMethod"> 
+                             <object id="Method2" class="org.apache.hadoop.hive.ql.udf.UDFOPEqual" method="getMethod"> 
+                              <string>evaluate</string> 
+                              <array class="java.lang.Class" length="2"> 
+                               <void index="0"> 
+                                <class>org.apache.hadoop.io.Text</class> 
+                               </void> 
+                               <void index="1"> 
+                                <class>org.apache.hadoop.io.Text</class> 
+                               </void> 
+                              </array> 
+                             </object> 
+                            </void> 
+                            <void property="childExprs"> 
+                             <object class="java.util.ArrayList"> 
+                              <void method="add"> 
+                               <object class="org.apache.hadoop.hive.ql.plan.exprNodeColumnDesc"> 
+                                <void property="column"> 
+                                 <string>hr</string> 
+                                </void> 
+                                <void property="tabAlias"> 
+                                 <string>s</string> 
+                                </void> 
+                                <void property="typeInfo"> 
+                                 <object idref="PrimitiveTypeInfo0"/> 
+                                </void> 
+                               </object> 
+                              </void> 
+                              <void method="add"> 
+                               <object class="org.apache.hadoop.hive.ql.plan.exprNodeConstantDesc"> 
+                                <void property="typeInfo"> 
+                                 <object idref="PrimitiveTypeInfo0"/> 
+                                </void> 
+                                <void property="value"> 
+                                 <string>11</string> 
+                                </void> 
+                               </object> 
+                              </void> 
+                             </object> 
+                            </void> 
+                            <void property="methodName"> 
+                             <string>=</string> 
+                            </void> 
+                            <void property="typeInfo"> 
+                             <object idref="PrimitiveTypeInfo1"/> 
+                            </void> 
+                           </object> 
+                          </void> 
+                         </object> 
                         </void> 
-                        <void method="add"> 
-                         <string>_col2</string> 
+                        <void property="methodName"> 
+                         <string>and</string> 
                         </void> 
-                        <void method="add"> 
-                         <string>_col3</string> 
+                        <void property="typeInfo"> 
+                         <object idref="PrimitiveTypeInfo1"/> 
                         </void> 
                        </object> 
                       </void> 
-                      <void property="selectStar"> 
-                       <boolean>true</boolean> 
-                      </void> 
                      </object> 
                     </void> 
                     <void property="parentOperators"> 
@@ -318,11 +525,11 @@
                     <void property="schema"> 
                      <object class="org.apache.hadoop.hive.ql.exec.RowSchema"> 
                       <void property="signature"> 
-                       <object class="java.util.Vector"> 
+                       <object id="Vector0" class="java.util.Vector"> 
                         <void method="add"> 
                          <object class="org.apache.hadoop.hive.ql.exec.ColumnInfo"> 
                           <void property="internalName"> 
-                           <string>_col0</string> 
+                           <string>key</string> 
                           </void> 
                           <void property="type"> 
                            <object idref="PrimitiveTypeInfo0"/> 
@@ -332,7 +539,7 @@
                         <void method="add"> 
                          <object class="org.apache.hadoop.hive.ql.exec.ColumnInfo"> 
                           <void property="internalName"> 
-                           <string>_col1</string> 
+                           <string>value</string> 
                           </void> 
                           <void property="type"> 
                            <object idref="PrimitiveTypeInfo0"/> 
@@ -342,7 +549,7 @@
                         <void method="add"> 
                          <object class="org.apache.hadoop.hive.ql.exec.ColumnInfo"> 
                           <void property="internalName"> 
-                           <string>_col2</string> 
+                           <string>ds</string> 
                           </void> 
                           <void property="type"> 
                            <object idref="PrimitiveTypeInfo0"/> 
@@ -352,7 +559,7 @@
                         <void method="add"> 
                          <object class="org.apache.hadoop.hive.ql.exec.ColumnInfo"> 
                           <void property="internalName"> 
-                           <string>_col3</string> 
+                           <string>hr</string> 
                           </void> 
                           <void property="type"> 
                            <object idref="PrimitiveTypeInfo0"/> 
@@ -375,17 +582,7 @@
                      <class>org.apache.hadoop.hive.ql.udf.UDFOPAnd</class> 
                     </void> 
                     <void property="UDFMethod"> 
-                     <object class="org.apache.hadoop.hive.ql.udf.UDFOPAnd" method="getMethod"> 
-                      <string>evaluate</string> 
-                      <array class="java.lang.Class" length="2"> 
-                       <void index="0"> 
-                        <class>org.apache.hadoop.io.BooleanWritable</class> 
-                       </void> 
-                       <void index="1"> 
-                        <class>org.apache.hadoop.io.BooleanWritable</class> 
-                       </void> 
-                      </array> 
-                     </object> 
+                     <object idref="Method0"/> 
                     </void> 
                     <void property="childExprs"> 
                      <object class="java.util.ArrayList"> 
@@ -395,17 +592,7 @@
                          <class>org.apache.hadoop.hive.ql.udf.UDFOPEqual</class> 
                         </void> 
                         <void property="UDFMethod"> 
-                         <object class="org.apache.hadoop.hive.ql.udf.UDFOPEqual" method="getMethod"> 
-                          <string>evaluate</string> 
-                          <array class="java.lang.Class" length="2"> 
-                           <void index="0"> 
-                            <class>org.apache.hadoop.io.Text</class> 
-                           </void> 
-                           <void index="1"> 
-                            <class>org.apache.hadoop.io.Text</class> 
-                           </void> 
-                          </array> 
-                         </object> 
+                         <object idref="Method1"/> 
                         </void> 
                         <void property="childExprs"> 
                          <object class="java.util.ArrayList"> 
@@ -438,11 +625,7 @@
                          <string>=</string> 
                         </void> 
                         <void property="typeInfo"> 
-                         <object id="PrimitiveTypeInfo1" class="org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo"> 
-                          <void property="typeName"> 
-                           <string>boolean</string> 
-                          </void> 
-                         </object> 
+                         <object idref="PrimitiveTypeInfo1"/> 
                         </void> 
                        </object> 
                       </void> 
@@ -452,17 +635,7 @@
                          <class>org.apache.hadoop.hive.ql.udf.UDFOPEqual</class> 
                         </void> 
                         <void property="UDFMethod"> 
-                         <object class="org.apache.hadoop.hive.ql.udf.UDFOPEqual" method="getMethod"> 
-                          <string>evaluate</string> 
-                          <array class="java.lang.Class" length="2"> 
-                           <void index="0"> 
-                            <class>org.apache.hadoop.io.Text</class> 
-                           </void> 
-                           <void index="1"> 
-                            <class>org.apache.hadoop.io.Text</class> 
-                           </void> 
-                          </array> 
-                         </object> 
+                         <object idref="Method2"/> 
                         </void> 
                         <void property="childExprs"> 
                          <object class="java.util.ArrayList"> 
@@ -521,48 +694,7 @@
                 <void property="schema"> 
                  <object class="org.apache.hadoop.hive.ql.exec.RowSchema"> 
                   <void property="signature"> 
-                   <object id="Vector0" class="java.util.Vector"> 
-                    <void method="add"> 
-                     <object class="org.apache.hadoop.hive.ql.exec.ColumnInfo"> 
-                      <void property="internalName"> 
-                       <string>key</string> 
-                      </void> 
-                      <void property="type"> 
-                       <object idref="PrimitiveTypeInfo0"/> 
-                      </void> 
-                     </object> 
-                    </void> 
-                    <void method="add"> 
-                     <object class="org.apache.hadoop.hive.ql.exec.ColumnInfo"> 
-                      <void property="internalName"> 
-                       <string>value</string> 
-                      </void> 
-                      <void property="type"> 
-                       <object idref="PrimitiveTypeInfo0"/> 
-                      </void> 
-                     </object> 
-                    </void> 
-                    <void method="add"> 
-                     <object class="org.apache.hadoop.hive.ql.exec.ColumnInfo"> 
-                      <void property="internalName"> 
-                       <string>ds</string> 
-                      </void> 
-                      <void property="type"> 
-                       <object idref="PrimitiveTypeInfo0"/> 
-                      </void> 
-                     </object> 
-                    </void> 
-                    <void method="add"> 
-                     <object class="org.apache.hadoop.hive.ql.exec.ColumnInfo"> 
-                      <void property="internalName"> 
-                       <string>hr</string> 
-                      </void> 
-                      <void property="type"> 
-                       <object idref="PrimitiveTypeInfo0"/> 
-                      </void> 
-                     </object> 
-                    </void> 
-                   </object> 
+                   <object idref="Vector0"/> 
                   </void> 
                  </object> 
                 </void> 
@@ -789,7 +921,7 @@
     <void property="pathToAliases"> 
      <object class="java.util.LinkedHashMap"> 
       <void method="put"> 
-       <string>file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11</string> 
+       <string>file:/data/users/njain/hive_commit4/hive_commit4/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11</string> 
        <object class="java.util.ArrayList"> 
         <void method="add"> 
          <string>s</string> 
@@ -801,7 +933,7 @@
     <void property="pathToPartitionInfo"> 
      <object class="java.util.LinkedHashMap"> 
       <void method="put"> 
-       <string>file:/data/users/athusoo/commits/hive_trunk_ws1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11</string> 
+       <string>file:/data/users/njain/hive_commit4/hive_commit4/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11</string> 
        <object class="org.apache.hadoop.hive.ql.plan.partitionDesc"> 
         <void property="partSpec"> 
          <object idref="LinkedHashMap0"/> 



Mime
View raw message