hadoop-hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From zs...@apache.org
Subject svn commit: r797643 [1/3] - in /hadoop/hive/trunk: ./ ql/src/java/org/apache/hadoop/hive/ql/exec/ ql/src/java/org/apache/hadoop/hive/ql/parse/ ql/src/java/org/apache/hadoop/hive/ql/plan/ ql/src/test/queries/clientpositive/ ql/src/test/results/clientpos...
Date Fri, 24 Jul 2009 20:46:53 GMT
Author: zshao
Date: Fri Jul 24 20:46:52 2009
New Revision: 797643

URL: http://svn.apache.org/viewvc?rev=797643&view=rev
Log:
HIVE-673. Bug in handling of null partitions. (Namit Jain via zshao)

Added:
    hadoop/hive/trunk/ql/src/test/queries/clientpositive/nullgroup5.q
    hadoop/hive/trunk/ql/src/test/results/clientpositive/nullgroup5.q.out
Modified:
    hadoop/hive/trunk/CHANGES.txt
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecDriver.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/partitionDesc.java
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input13.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input23.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input_part7.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input_part9.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/rand_partitionpruner1.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/rand_partitionpruner3.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/regexp_extract.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/sample8.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/sample9.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/subq.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/union.q.out
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/cast1.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/groupby2.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/groupby3.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/groupby4.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/groupby5.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/groupby6.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/input3.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/input8.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/input_part1.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/input_testxpath.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/input_testxpath2.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/join4.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/join5.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/join6.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/join7.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/join8.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/sample1.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/subq.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/udf1.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/udf4.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/udf6.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/udf_case.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/udf_when.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/union.q.xml

Modified: hadoop/hive/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/CHANGES.txt?rev=797643&r1=797642&r2=797643&view=diff
==============================================================================
--- hadoop/hive/trunk/CHANGES.txt (original)
+++ hadoop/hive/trunk/CHANGES.txt Fri Jul 24 20:46:52 2009
@@ -414,6 +414,9 @@
     HIVE-670. dump output column names in explain 
     (Zheng Shao via namit)
 
+    HIVE-673. Bug in handling of null partitions.
+    (Namit Jain via zshao)
+
 Release 0.3.1 - Unreleased
 
   INCOMPATIBLE CHANGES

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecDriver.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecDriver.java?rev=797643&r1=797642&r2=797643&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecDriver.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecDriver.java Fri Jul 24 20:46:52 2009
@@ -643,82 +643,117 @@
     return w.getReducer() != null;
   }
 
-  private void addInputPaths(JobConf job, mapredWork work, String hiveScratchDir) throws Exception {
-    int numEmptyPaths = 0;
+  private boolean isEmptyPath(JobConf job, String path) throws Exception {
+    Path dirPath = new Path(path);
+    FileSystem inpFs = dirPath.getFileSystem(job);
+
+    if (inpFs.exists(dirPath)) {
+      FileStatus[] fStats = inpFs.listStatus(dirPath);
+      if (fStats.length > 0)
+        return false;
+    }
+    return true;
+  }
+
+  /**
+   * Handle a empty/null path for a given alias
+   */
+  private int addInputPath(String path, JobConf job, mapredWork work, String hiveScratchDir, int numEmptyPaths, boolean isEmptyPath,
+                           String alias) 
+    throws Exception {
+    // either the directory does not exist or it is empty
+    assert path == null || isEmptyPath;
+
+    // The input file does not exist, replace it by a empty file
+    Class<? extends HiveOutputFormat> outFileFormat = null;
+ 
+    if (isEmptyPath) 
+      outFileFormat = work.getPathToPartitionInfo().get(path).getTableDesc().getOutputFileFormatClass();
+    else
+      outFileFormat = (Class<? extends HiveOutputFormat>)(HiveSequenceFileOutputFormat.class);
     
-    // If the query references non-existent partitions
-    if (work.getPathToAliases().isEmpty() &&
-        !work.getAliasToWork().isEmpty()) {
-      String oneAlias = (String)work.getAliasToWork().keySet().toArray()[0];
-      
-      Class<? extends HiveOutputFormat> outFileFormat = (Class<? extends HiveOutputFormat>)
-          job.getClassByName("org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat");
-      
-      String newFile = hiveScratchDir + File.separator + (++numEmptyPaths);
-      Path newPath = new Path(newFile);
-      LOG.info("Changed input file to " + newPath.toString());
-      
-      // add a dummy work
-      Map<String, ArrayList<String>> pathToAliases = work.getPathToAliases();
+    String newFile = hiveScratchDir + File.separator + (++numEmptyPaths);
+    Path newPath = new Path(newFile);
+    LOG.info("Changed input file to " + newPath.toString());
+    
+    // toggle the work
+    LinkedHashMap<String, ArrayList<String>> pathToAliases = work.getPathToAliases();
+    if (isEmptyPath) {
+      assert path != null;
+      pathToAliases.put(newPath.toUri().toString(), pathToAliases.get(path));
+      pathToAliases.remove(path);
+    }
+    else {
+      assert path == null;
       ArrayList<String> newList = new ArrayList<String>();
-      newList.add(oneAlias);
-      pathToAliases.put(newPath.toString(), newList);
-      
-      Map<String,partitionDesc> pathToPartitionInfo = work.getPathToPartitionInfo();
-      partitionDesc pDesc = work.getAliasToPartnInfo().get(oneAlias);
-      pathToPartitionInfo.put(newPath.toString(), pDesc);
-      
-      RecordWriter recWriter = outFileFormat.newInstance().getHiveRecordWriter(job, newPath, Text.class, false, new Properties(), null);
-      recWriter.close(false);
-      FileInputFormat.addInputPaths(job, newPath.toString());
+      newList.add(alias);
+      pathToAliases.put(newPath.toUri().toString(), newList);
+    }
+
+    work.setPathToAliases(pathToAliases);
+    
+    LinkedHashMap<String,partitionDesc> pathToPartitionInfo = work.getPathToPartitionInfo();
+    if (isEmptyPath) {
+      pathToPartitionInfo.put(newPath.toUri().toString(), pathToPartitionInfo.get(path));
+      pathToPartitionInfo.remove(path);
     }
     else {
-      List<String> emptyPaths = new ArrayList<String>();
+      partitionDesc pDesc = work.getAliasToPartnInfo().get(alias).clone();
+      Class<? extends InputFormat>      inputFormat = SequenceFileInputFormat.class;
+      pDesc.getTableDesc().setInputFileFormatClass(inputFormat);
+      pathToPartitionInfo.put(newPath.toUri().toString(), pDesc);
+    }
+    work.setPathToPartitionInfo(pathToPartitionInfo);
+    
+    String onefile = newPath.toString();
+    RecordWriter recWriter = outFileFormat.newInstance().getHiveRecordWriter(job, newPath, Text.class, false, new Properties(), null);
+    recWriter.close(false);
+    FileInputFormat.addInputPaths(job, onefile);
+    return numEmptyPaths;
+  }
+
+  private void addInputPaths(JobConf job, mapredWork work, String hiveScratchDir) throws Exception {
+    int numEmptyPaths = 0;
 
+    List<String> pathsProcessed = new ArrayList<String>();
+
+    // AliasToWork contains all the aliases
+    for (String oneAlias : work.getAliasToWork().keySet()) {
+      LOG.info("Processing alias " + oneAlias);
+      List<String> emptyPaths     = new ArrayList<String>();
+
+      // The alias may not have any path 
+      String path = null;
       for (String onefile : work.getPathToAliases().keySet()) {
-        LOG.info("Adding input file " + onefile);
-        
-        // If the input file does not exist, replace it by a empty file
-        Path dirPath = new Path(onefile);
-        FileSystem inpFs = dirPath.getFileSystem(job);
-        boolean emptyInput = true;
-        
-        if (inpFs.exists(dirPath)) {
-          FileStatus[] fStats = inpFs.listStatus(dirPath);
-          if (fStats.length > 0)
-            emptyInput = false;
+        List<String> aliases = work.getPathToAliases().get(onefile);
+        if (aliases.contains(oneAlias)) {
+          path = onefile;
+      
+          // Multiple aliases can point to the same path - it should be processed only once
+          if (pathsProcessed.contains(path))
+            continue;
+          pathsProcessed.add(path);
+
+          LOG.info("Adding input file " + path);
+
+          if (!isEmptyPath(job, path))
+            FileInputFormat.addInputPaths(job, path);
+          else
+            emptyPaths.add(path);
         }
-        
-        if (emptyInput)
-          emptyPaths.add(onefile);
-        else
-          FileInputFormat.addInputPaths(job, onefile);
       }
 
-      for (String emptyFile : emptyPaths) {
-        Class<? extends HiveOutputFormat> outFileFormat = work.getPathToPartitionInfo().get(emptyFile).getTableDesc().getOutputFileFormatClass();
-        
-        String newFile = hiveScratchDir + File.separator + (++numEmptyPaths);
-        Path newPath = new Path(newFile);
-        LOG.info("Changed input file to " + newPath.toString());
-        
-        // toggle the work
-        LinkedHashMap<String, ArrayList<String>> pathToAliases = work.getPathToAliases();
-        pathToAliases.put(newPath.toUri().toString(), pathToAliases.get(emptyFile));
-        pathToAliases.remove(emptyFile);
-        work.setPathToAliases(pathToAliases);
-        
-        LinkedHashMap<String,partitionDesc> pathToPartitionInfo = work.getPathToPartitionInfo();
-        pathToPartitionInfo.put(newPath.toUri().toString(), pathToPartitionInfo.get(emptyFile));
-        pathToPartitionInfo.remove(emptyFile);
-        work.setPathToPartitionInfo(pathToPartitionInfo);
-        
-        String onefile = newPath.toString();
-        RecordWriter recWriter = outFileFormat.newInstance().getHiveRecordWriter(job, newPath, Text.class, false, new Properties(), null);
-        recWriter.close(false);
-        FileInputFormat.addInputPaths(job, onefile);
-      }
-    }
+      // Create a empty file if the directory is empty
+      for (String emptyPath : emptyPaths)
+        numEmptyPaths = addInputPath(emptyPath, job, work, hiveScratchDir, numEmptyPaths, true, oneAlias);
 
+      // If the query references non-existent partitions
+      // We need to add a empty file, it is not acceptable to change the operator tree
+      // Consider the query:
+      //  select * from (select count(1) from T union all select count(1) from T2) x;
+      // If T is empty and T2 contains 100 rows, the user expects: 0, 100 (2 rows)
+      if (path == null)
+        numEmptyPaths = addInputPath(null, job, work, hiveScratchDir, numEmptyPaths, false, oneAlias);
+    }
   }
 }

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java?rev=797643&r1=797642&r2=797643&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java Fri Jul 24 20:46:52 2009
@@ -2495,10 +2495,7 @@
           }
           
           first = false;
-          if (nm[0] == null) 
-            cols = cols.concat(nm[1]);
-          else
-            cols = cols.concat(nm[0] + "." + nm[1]);
+          cols = cols.concat(colInfo.getInternalName());
           colTypes = colTypes.concat(colInfo.getType().getTypeName());
         }
 
@@ -2523,6 +2520,7 @@
 
     input = genConversionSelectOperator(dest, qb, input, table_desc);
     inputRR = opParseCtx.get(input).getRR();
+
     Vector<ColumnInfo> vecCol = new Vector<ColumnInfo>();
 
     try {
@@ -2547,6 +2545,7 @@
     LOG.debug("Created FileSink Plan for clause: " + dest + "dest_path: "
         + dest_path + " row schema: "
         + inputRR.toString());
+    
     return output;
   }
 
@@ -4223,7 +4222,7 @@
     // Do any sample pruning
     genSamplePruners(qb);
     LOG.info("Completed sample pruning");
-
+    
     // At this point we have the complete operator tree
     // from which we want to find the reduce operator
     genMapRedTasks(qb);

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/partitionDesc.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/partitionDesc.java?rev=797643&r1=797642&r2=797643&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/partitionDesc.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/partitionDesc.java Fri Jul 24 20:46:52 2009
@@ -21,7 +21,7 @@
 import java.io.Serializable;
 
 @explain(displayName="Partition")
-public class partitionDesc implements Serializable {
+public class partitionDesc implements Serializable, Cloneable {
   private static final long serialVersionUID = 1L;
   private tableDesc table;
   private java.util.LinkedHashMap<String, String> partSpec;
@@ -48,4 +48,8 @@
   public void setPartSpec(final java.util.LinkedHashMap<String, String> partSpec) {
     this.partSpec=partSpec;
   }
+  
+  public partitionDesc clone() throws CloneNotSupportedException {
+    return (partitionDesc)super.clone();
+  }
 }

Added: hadoop/hive/trunk/ql/src/test/queries/clientpositive/nullgroup5.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/nullgroup5.q?rev=797643&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/nullgroup5.q (added)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/nullgroup5.q Fri Jul 24 20:46:52 2009
@@ -0,0 +1,26 @@
+DROP TABLE tstparttbl;
+CREATE TABLE tstparttbl(KEY STRING, VALUE STRING) PARTITIONED BY(ds string) STORED AS TEXTFILE;
+LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE tstparttbl PARTITION (ds='2009-04-09');
+
+DROP TABLE tstparttbl2;
+CREATE TABLE tstparttbl2(KEY STRING, VALUE STRING) PARTITIONED BY(ds string) STORED AS TEXTFILE;
+LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE tstparttbl2 PARTITION (ds='2009-04-09');
+
+explain
+select u.* from
+(
+  select key, value from tstparttbl x where x.ds='2009-04-05'
+    union all  
+  select key, value from tstparttbl2 y where y.ds='2009-04-09'
+)u;
+
+select u.* from
+(
+  select key, value from tstparttbl x where x.ds='2009-04-05'
+    union all  
+  select key, value from tstparttbl2 y where y.ds='2009-04-09'
+)u;
+
+
+DROP TABLE tstparttbl;
+DROP TABLE tstparttbl2;

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/input13.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/input13.q.out?rev=797643&r1=797642&r2=797643&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/input13.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/input13.q.out Fri Jul 24 20:46:52 2009
@@ -122,10 +122,10 @@
           Move Operator
             files:
                 hdfs directory: true
-                destination: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/320546580/10000
+                destination: file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_1/build/ql/tmp/2103139823/10000
           Map Reduce
             Alias -> Map Operator Tree:
-              file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/1614807957/10007 
+              file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_1/build/ql/tmp/210340643/10007 
                   Reduce Output Operator
                     sort order: 
                     Map-reduce partition columns:
@@ -164,10 +164,10 @@
           Move Operator
             files:
                 hdfs directory: true
-                destination: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/320546580/10002
+                destination: file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_1/build/ql/tmp/2103139823/10002
           Map Reduce
             Alias -> Map Operator Tree:
-              file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/1614807957/10008 
+              file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_1/build/ql/tmp/210340643/10008 
                   Reduce Output Operator
                     sort order: 
                     Map-reduce partition columns:
@@ -206,10 +206,10 @@
           Move Operator
             files:
                 hdfs directory: true
-                destination: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/320546580/10004
+                destination: file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_1/build/ql/tmp/2103139823/10004
           Map Reduce
             Alias -> Map Operator Tree:
-              file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/1614807957/10009 
+              file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_1/build/ql/tmp/210340643/10009 
                   Reduce Output Operator
                     sort order: 
                     Map-reduce partition columns:
@@ -249,10 +249,10 @@
           Move Operator
             files:
                 hdfs directory: true
-                destination: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/320546580/10006
+                destination: file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_1/build/ql/tmp/2103139823/10006
           Map Reduce
             Alias -> Map Operator Tree:
-              file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/1614807957/10010 
+              file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_1/build/ql/tmp/210340643/10010 
                   Reduce Output Operator
                     sort order: 
                     Map-reduce partition columns:
@@ -260,7 +260,7 @@
                           type: double
                     tag: -1
                     value expressions:
-                          expr: src.value
+                          expr: _col0
                           type: string
             Reduce Operator Tree:
               Extract
@@ -290,7 +290,7 @@
 Output: ../build/ql/test/data/warehouse/dest4.out
 query: SELECT dest1.* FROM dest1
 Input: default/dest1
-Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/1615004704/10000
+Output: file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_1/build/ql/tmp/1733324289/10000
 86	val_86
 27	val_27
 98	val_98
@@ -377,7 +377,7 @@
 97	val_97
 query: SELECT dest2.* FROM dest2
 Input: default/dest2
-Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/603803318/10000
+Output: file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_1/build/ql/tmp/1779971956/10000
 165	val_165
 193	val_193
 150	val_150
@@ -485,7 +485,7 @@
 169	val_169
 query: SELECT dest3.* FROM dest3
 Input: default/dest3/ds=2008-04-08/hr=12
-Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/1267970623/10000
+Output: file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_1/build/ql/tmp/1888647376/10000
 238	2008-04-08	12
 255	2008-04-08	12
 278	2008-04-08	12

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/input23.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/input23.q.out?rev=797643&r1=797642&r2=797643&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/input23.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/input23.q.out Fri Jul 24 20:46:52 2009
@@ -47,9 +47,9 @@
                       type: string
       Needs Tagging: true
       Path -> Alias:
-        file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 
+        file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 
       Path -> Partition:
-        file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 
+        file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 
           Partition
             partition values:
               ds 2008-04-08
@@ -68,7 +68,7 @@
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 file.inputformat org.apache.hadoop.mapred.TextInputFormat
                 file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                location file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/test/data/warehouse/srcpart
+                location file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_1/build/ql/test/data/warehouse/srcpart
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: srcpart
       Reduce Operator Tree:
@@ -106,12 +106,12 @@
                 File Output Operator
                   compressed: false
                   GlobalTableId: 0
-                  directory: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/1563860214/10001
+                  directory: file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_1/build/ql/tmp/1797615685/10001
                   table:
                       input format: org.apache.hadoop.mapred.TextInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                       properties:
-                        columns a.key,a.value,a.ds,a.hr,b.key,b.value,b.ds,b.hr
+                        columns _col0,_col1,_col2,_col3,_col4,_col5,_col6,_col7
                         serialization.format 1
                         columns.types string:string:string:string:string:string:string:string
 
@@ -122,4 +122,4 @@
 
 query: select * from srcpart a join srcpart b where a.ds = '2008-04-08' and a.hr = '11' and b.ds = '2008-04-08' and b.hr = '14' limit 5
 Input: default/srcpart/ds=2008-04-08/hr=11
-Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/1754995949/10000
+Output: file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_1/build/ql/tmp/1293686104/10000

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/input_part7.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/input_part7.q.out?rev=797643&r1=797642&r2=797643&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/input_part7.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/input_part7.q.out Fri Jul 24 20:46:52 2009
@@ -112,10 +112,10 @@
                               type: string
       Needs Tagging: false
       Path -> Alias:
-        file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 
-        file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 
+        file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_2/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 
+        file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_2/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 
       Path -> Partition:
-        file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 
+        file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_2/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 
           Partition
             partition values:
               ds 2008-04-08
@@ -134,10 +134,10 @@
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 file.inputformat org.apache.hadoop.mapred.TextInputFormat
                 file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                location file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/test/data/warehouse/srcpart
+                location file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_2/build/ql/test/data/warehouse/srcpart
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: srcpart
-        file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 
+        file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_2/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 
           Partition
             partition values:
               ds 2008-04-08
@@ -156,7 +156,7 @@
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 file.inputformat org.apache.hadoop.mapred.TextInputFormat
                 file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                location file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/test/data/warehouse/srcpart
+                location file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_2/build/ql/test/data/warehouse/srcpart
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: srcpart
       Reduce Operator Tree:
@@ -164,12 +164,12 @@
           File Output Operator
             compressed: false
             GlobalTableId: 0
-            directory: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/766848066/10001
+            directory: file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_2/build/ql/tmp/526431721/10001
             table:
                 input format: org.apache.hadoop.mapred.TextInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                 properties:
-                  columns a.key,a.value,a.ds,a.hr
+                  columns _col0,_col1,_col2,_col3
                   serialization.format 1
                   columns.types string:string:string:string
 
@@ -186,7 +186,7 @@
 SORT BY A.key
 Input: default/srcpart/ds=2008-04-08/hr=11
 Input: default/srcpart/ds=2008-04-08/hr=12
-Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_2/build/ql/tmp/614766513/10000
+Output: file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_2/build/ql/tmp/891014726/10000
 0	val_0	2008-04-08	11
 0	val_0	2008-04-08	11
 0	val_0	2008-04-08	11

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/input_part9.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/input_part9.q.out?rev=797643&r1=797642&r2=797643&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/input_part9.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/input_part9.q.out Fri Jul 24 20:46:52 2009
@@ -34,20 +34,20 @@
                   File Output Operator
                     compressed: false
                     GlobalTableId: 0
-                    directory: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/1659335848/10001
+                    directory: file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_1/build/ql/tmp/741456423/10001
                     table:
                         input format: org.apache.hadoop.mapred.TextInputFormat
                         output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                         properties:
-                          columns x.key,x.value,x.ds,x.hr
+                          columns _col0,_col1,_col2,_col3
                           serialization.format 1
                           columns.types string:string:string:string
       Needs Tagging: false
       Path -> Alias:
-        file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 
-        file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 
+        file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 
+        file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 
       Path -> Partition:
-        file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 
+        file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 
           Partition
             partition values:
               ds 2008-04-08
@@ -66,10 +66,10 @@
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 file.inputformat org.apache.hadoop.mapred.TextInputFormat
                 file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                location file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/test/data/warehouse/srcpart
+                location file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_1/build/ql/test/data/warehouse/srcpart
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: srcpart
-        file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 
+        file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 
           Partition
             partition values:
               ds 2008-04-08
@@ -88,7 +88,7 @@
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 file.inputformat org.apache.hadoop.mapred.TextInputFormat
                 file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                location file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/test/data/warehouse/srcpart
+                location file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_1/build/ql/test/data/warehouse/srcpart
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: srcpart
 
@@ -100,7 +100,7 @@
 query: SELECT x.* FROM SRCPART x WHERE key IS NOT NULL AND ds = '2008-04-08'
 Input: default/srcpart/ds=2008-04-08/hr=11
 Input: default/srcpart/ds=2008-04-08/hr=12
-Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/460431348/10000
+Output: file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_1/build/ql/tmp/675875634/10000
 238	val_238	2008-04-08	11
 86	val_86	2008-04-08	11
 311	val_311	2008-04-08	11

Added: hadoop/hive/trunk/ql/src/test/results/clientpositive/nullgroup5.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/nullgroup5.q.out?rev=797643&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/nullgroup5.q.out (added)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/nullgroup5.q.out Fri Jul 24 20:46:52 2009
@@ -0,0 +1,596 @@
+query: DROP TABLE tstparttbl
+query: CREATE TABLE tstparttbl(KEY STRING, VALUE STRING) PARTITIONED BY(ds string) STORED AS TEXTFILE
+query: LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE tstparttbl PARTITION (ds='2009-04-09')
+query: DROP TABLE tstparttbl2
+query: CREATE TABLE tstparttbl2(KEY STRING, VALUE STRING) PARTITIONED BY(ds string) STORED AS TEXTFILE
+query: LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE tstparttbl2 PARTITION (ds='2009-04-09')
+query: explain
+select u.* from
+(
+  select key, value from tstparttbl x where x.ds='2009-04-05'
+    union all  
+  select key, value from tstparttbl2 y where y.ds='2009-04-09'
+)u
+ABSTRACT SYNTAX TREE:
+  (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_UNION (TOK_QUERY (TOK_FROM (TOK_TABREF tstparttbl x)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key)) (TOK_SELEXPR (TOK_TABLE_OR_COL value))) (TOK_WHERE (= (. (TOK_TABLE_OR_COL x) ds) '2009-04-05')))) (TOK_QUERY (TOK_FROM (TOK_TABREF tstparttbl2 y)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key)) (TOK_SELEXPR (TOK_TABLE_OR_COL value))) (TOK_WHERE (= (. (TOK_TABLE_OR_COL y) ds) '2009-04-09'))))) u)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF u)))))
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Alias -> Map Operator Tree:
+        null-subquery1:u-subquery1:x 
+            Filter Operator
+              predicate:
+                  expr: (ds = '2009-04-05')
+                  type: boolean
+              Filter Operator
+                predicate:
+                    expr: (ds = '2009-04-05')
+                    type: boolean
+                Select Operator
+                  expressions:
+                        expr: key
+                        type: string
+                        expr: value
+                        type: string
+                  Union
+                    Select Operator
+                      expressions:
+                            expr: _col0
+                            type: string
+                            expr: _col1
+                            type: string
+                      File Output Operator
+                        compressed: false
+                        GlobalTableId: 0
+                        table:
+                            input format: org.apache.hadoop.mapred.TextInputFormat
+                            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+        null-subquery2:u-subquery2:y 
+            Filter Operator
+              predicate:
+                  expr: (ds = '2009-04-09')
+                  type: boolean
+              Filter Operator
+                predicate:
+                    expr: (ds = '2009-04-09')
+                    type: boolean
+                Select Operator
+                  expressions:
+                        expr: key
+                        type: string
+                        expr: value
+                        type: string
+                  Union
+                    Select Operator
+                      expressions:
+                            expr: _col0
+                            type: string
+                            expr: _col1
+                            type: string
+                      File Output Operator
+                        compressed: false
+                        GlobalTableId: 0
+                        table:
+                            input format: org.apache.hadoop.mapred.TextInputFormat
+                            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+
+
+query: select u.* from
+(
+  select key, value from tstparttbl x where x.ds='2009-04-05'
+    union all  
+  select key, value from tstparttbl2 y where y.ds='2009-04-09'
+)u
+Input: default/tstparttbl2/ds=2009-04-09
+Output: file:/data/users/njain/deploy/hive1/tools/ahive1-trunk-apache-hive/build/ql/tmp/910140009/10000
+238	val_238
+86	val_86
+311	val_311
+27	val_27
+165	val_165
+409	val_409
+255	val_255
+278	val_278
+98	val_98
+484	val_484
+265	val_265
+193	val_193
+401	val_401
+150	val_150
+273	val_273
+224	val_224
+369	val_369
+66	val_66
+128	val_128
+213	val_213
+146	val_146
+406	val_406
+429	val_429
+374	val_374
+152	val_152
+469	val_469
+145	val_145
+495	val_495
+37	val_37
+327	val_327
+281	val_281
+277	val_277
+209	val_209
+15	val_15
+82	val_82
+403	val_403
+166	val_166
+417	val_417
+430	val_430
+252	val_252
+292	val_292
+219	val_219
+287	val_287
+153	val_153
+193	val_193
+338	val_338
+446	val_446
+459	val_459
+394	val_394
+237	val_237
+482	val_482
+174	val_174
+413	val_413
+494	val_494
+207	val_207
+199	val_199
+466	val_466
+208	val_208
+174	val_174
+399	val_399
+396	val_396
+247	val_247
+417	val_417
+489	val_489
+162	val_162
+377	val_377
+397	val_397
+309	val_309
+365	val_365
+266	val_266
+439	val_439
+342	val_342
+367	val_367
+325	val_325
+167	val_167
+195	val_195
+475	val_475
+17	val_17
+113	val_113
+155	val_155
+203	val_203
+339	val_339
+0	val_0
+455	val_455
+128	val_128
+311	val_311
+316	val_316
+57	val_57
+302	val_302
+205	val_205
+149	val_149
+438	val_438
+345	val_345
+129	val_129
+170	val_170
+20	val_20
+489	val_489
+157	val_157
+378	val_378
+221	val_221
+92	val_92
+111	val_111
+47	val_47
+72	val_72
+4	val_4
+280	val_280
+35	val_35
+427	val_427
+277	val_277
+208	val_208
+356	val_356
+399	val_399
+169	val_169
+382	val_382
+498	val_498
+125	val_125
+386	val_386
+437	val_437
+469	val_469
+192	val_192
+286	val_286
+187	val_187
+176	val_176
+54	val_54
+459	val_459
+51	val_51
+138	val_138
+103	val_103
+239	val_239
+213	val_213
+216	val_216
+430	val_430
+278	val_278
+176	val_176
+289	val_289
+221	val_221
+65	val_65
+318	val_318
+332	val_332
+311	val_311
+275	val_275
+137	val_137
+241	val_241
+83	val_83
+333	val_333
+180	val_180
+284	val_284
+12	val_12
+230	val_230
+181	val_181
+67	val_67
+260	val_260
+404	val_404
+384	val_384
+489	val_489
+353	val_353
+373	val_373
+272	val_272
+138	val_138
+217	val_217
+84	val_84
+348	val_348
+466	val_466
+58	val_58
+8	val_8
+411	val_411
+230	val_230
+208	val_208
+348	val_348
+24	val_24
+463	val_463
+431	val_431
+179	val_179
+172	val_172
+42	val_42
+129	val_129
+158	val_158
+119	val_119
+496	val_496
+0	val_0
+322	val_322
+197	val_197
+468	val_468
+393	val_393
+454	val_454
+100	val_100
+298	val_298
+199	val_199
+191	val_191
+418	val_418
+96	val_96
+26	val_26
+165	val_165
+327	val_327
+230	val_230
+205	val_205
+120	val_120
+131	val_131
+51	val_51
+404	val_404
+43	val_43
+436	val_436
+156	val_156
+469	val_469
+468	val_468
+308	val_308
+95	val_95
+196	val_196
+288	val_288
+481	val_481
+457	val_457
+98	val_98
+282	val_282
+197	val_197
+187	val_187
+318	val_318
+318	val_318
+409	val_409
+470	val_470
+137	val_137
+369	val_369
+316	val_316
+169	val_169
+413	val_413
+85	val_85
+77	val_77
+0	val_0
+490	val_490
+87	val_87
+364	val_364
+179	val_179
+118	val_118
+134	val_134
+395	val_395
+282	val_282
+138	val_138
+238	val_238
+419	val_419
+15	val_15
+118	val_118
+72	val_72
+90	val_90
+307	val_307
+19	val_19
+435	val_435
+10	val_10
+277	val_277
+273	val_273
+306	val_306
+224	val_224
+309	val_309
+389	val_389
+327	val_327
+242	val_242
+369	val_369
+392	val_392
+272	val_272
+331	val_331
+401	val_401
+242	val_242
+452	val_452
+177	val_177
+226	val_226
+5	val_5
+497	val_497
+402	val_402
+396	val_396
+317	val_317
+395	val_395
+58	val_58
+35	val_35
+336	val_336
+95	val_95
+11	val_11
+168	val_168
+34	val_34
+229	val_229
+233	val_233
+143	val_143
+472	val_472
+322	val_322
+498	val_498
+160	val_160
+195	val_195
+42	val_42
+321	val_321
+430	val_430
+119	val_119
+489	val_489
+458	val_458
+78	val_78
+76	val_76
+41	val_41
+223	val_223
+492	val_492
+149	val_149
+449	val_449
+218	val_218
+228	val_228
+138	val_138
+453	val_453
+30	val_30
+209	val_209
+64	val_64
+468	val_468
+76	val_76
+74	val_74
+342	val_342
+69	val_69
+230	val_230
+33	val_33
+368	val_368
+103	val_103
+296	val_296
+113	val_113
+216	val_216
+367	val_367
+344	val_344
+167	val_167
+274	val_274
+219	val_219
+239	val_239
+485	val_485
+116	val_116
+223	val_223
+256	val_256
+263	val_263
+70	val_70
+487	val_487
+480	val_480
+401	val_401
+288	val_288
+191	val_191
+5	val_5
+244	val_244
+438	val_438
+128	val_128
+467	val_467
+432	val_432
+202	val_202
+316	val_316
+229	val_229
+469	val_469
+463	val_463
+280	val_280
+2	val_2
+35	val_35
+283	val_283
+331	val_331
+235	val_235
+80	val_80
+44	val_44
+193	val_193
+321	val_321
+335	val_335
+104	val_104
+466	val_466
+366	val_366
+175	val_175
+403	val_403
+483	val_483
+53	val_53
+105	val_105
+257	val_257
+406	val_406
+409	val_409
+190	val_190
+406	val_406
+401	val_401
+114	val_114
+258	val_258
+90	val_90
+203	val_203
+262	val_262
+348	val_348
+424	val_424
+12	val_12
+396	val_396
+201	val_201
+217	val_217
+164	val_164
+431	val_431
+454	val_454
+478	val_478
+298	val_298
+125	val_125
+431	val_431
+164	val_164
+424	val_424
+187	val_187
+382	val_382
+5	val_5
+70	val_70
+397	val_397
+480	val_480
+291	val_291
+24	val_24
+351	val_351
+255	val_255
+104	val_104
+70	val_70
+163	val_163
+438	val_438
+119	val_119
+414	val_414
+200	val_200
+491	val_491
+237	val_237
+439	val_439
+360	val_360
+248	val_248
+479	val_479
+305	val_305
+417	val_417
+199	val_199
+444	val_444
+120	val_120
+429	val_429
+169	val_169
+443	val_443
+323	val_323
+325	val_325
+277	val_277
+230	val_230
+478	val_478
+178	val_178
+468	val_468
+310	val_310
+317	val_317
+333	val_333
+493	val_493
+460	val_460
+207	val_207
+249	val_249
+265	val_265
+480	val_480
+83	val_83
+136	val_136
+353	val_353
+172	val_172
+214	val_214
+462	val_462
+233	val_233
+406	val_406
+133	val_133
+175	val_175
+189	val_189
+454	val_454
+375	val_375
+401	val_401
+421	val_421
+407	val_407
+384	val_384
+256	val_256
+26	val_26
+134	val_134
+67	val_67
+384	val_384
+379	val_379
+18	val_18
+462	val_462
+492	val_492
+100	val_100
+298	val_298
+9	val_9
+341	val_341
+498	val_498
+146	val_146
+458	val_458
+362	val_362
+186	val_186
+285	val_285
+348	val_348
+167	val_167
+18	val_18
+273	val_273
+183	val_183
+281	val_281
+344	val_344
+97	val_97
+469	val_469
+315	val_315
+84	val_84
+28	val_28
+37	val_37
+448	val_448
+152	val_152
+348	val_348
+307	val_307
+194	val_194
+414	val_414
+477	val_477
+222	val_222
+126	val_126
+90	val_90
+169	val_169
+403	val_403
+400	val_400
+200	val_200
+97	val_97
+query: DROP TABLE tstparttbl
+query: DROP TABLE tstparttbl2

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/rand_partitionpruner1.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/rand_partitionpruner1.q.out?rev=797643&r1=797642&r2=797643&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/rand_partitionpruner1.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/rand_partitionpruner1.q.out Fri Jul 24 20:46:52 2009
@@ -26,19 +26,19 @@
                 File Output Operator
                   compressed: false
                   GlobalTableId: 0
-                  directory: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/828496123/10001
+                  directory: file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_2/build/ql/tmp/1509492939/10001
                   table:
                       input format: org.apache.hadoop.mapred.TextInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                       properties:
-                        columns src.key,src.value
+                        columns _col0,_col1
                         serialization.format 1
                         columns.types string:string
       Needs Tagging: false
       Path -> Alias:
-        file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/test/data/warehouse/src 
+        file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_2/build/ql/test/data/warehouse/src 
       Path -> Partition:
-        file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/test/data/warehouse/src 
+        file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_2/build/ql/test/data/warehouse/src 
           Partition
           
               input format: org.apache.hadoop.mapred.TextInputFormat
@@ -53,7 +53,7 @@
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 file.inputformat org.apache.hadoop.mapred.TextInputFormat
                 file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                location file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/test/data/warehouse/src
+                location file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_2/build/ql/test/data/warehouse/src
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: src
 
@@ -64,7 +64,7 @@
 
 query: select * from src where rand(1) < 0.1
 Input: default/src
-Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/1400098628/10000
+Output: file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_2/build/ql/tmp/1647273851/10000
 409	val_409
 429	val_429
 209	val_209

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/rand_partitionpruner3.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/rand_partitionpruner3.q.out?rev=797643&r1=797642&r2=797643&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/rand_partitionpruner3.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/rand_partitionpruner3.q.out Fri Jul 24 20:46:52 2009
@@ -31,19 +31,19 @@
                 File Output Operator
                   compressed: false
                   GlobalTableId: 0
-                  directory: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/702780213/10001
+                  directory: file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_1/build/ql/tmp/1921119763/10001
                   table:
                       input format: org.apache.hadoop.mapred.TextInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                       properties:
-                        columns a.key,a.value,a.ds,a.hr
+                        columns _col0,_col1,_col2,_col3
                         serialization.format 1
                         columns.types string:string:string:string
       Needs Tagging: false
       Path -> Alias:
-        file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 
+        file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 
       Path -> Partition:
-        file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 
+        file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 
           Partition
             partition values:
               ds 2008-04-08
@@ -62,7 +62,7 @@
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 file.inputformat org.apache.hadoop.mapred.TextInputFormat
                 file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                location file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/srcpart
+                location file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_1/build/ql/test/data/warehouse/srcpart
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: srcpart
 
@@ -73,7 +73,7 @@
 
 query: select a.* from srcpart a where rand(1) < 0.1 and a.ds = '2008-04-08' and not(key > 50 or key < 10) and a.hr like '%2'
 Input: default/srcpart/ds=2008-04-08/hr=12
-Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/271992236/10000
+Output: file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_1/build/ql/tmp/353265726/10000
 42	val_42	2008-04-08	12
 44	val_44	2008-04-08	12
 26	val_26	2008-04-08	12
@@ -115,19 +115,19 @@
                   File Output Operator
                     compressed: false
                     GlobalTableId: 0
-                    directory: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/728265966/10001
+                    directory: file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_1/build/ql/tmp/140562086/10001
                     table:
                         input format: org.apache.hadoop.mapred.TextInputFormat
                         output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                         properties:
-                          columns a.key,a.value,a.ds,a.hr
+                          columns _col0,_col1,_col2,_col3
                           serialization.format 1
                           columns.types string:string:string:string
       Needs Tagging: false
       Path -> Alias:
-        file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 
+        file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 
       Path -> Partition:
-        file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 
+        file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 
           Partition
             partition values:
               ds 2008-04-08
@@ -146,7 +146,7 @@
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 file.inputformat org.apache.hadoop.mapred.TextInputFormat
                 file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                location file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/srcpart
+                location file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_1/build/ql/test/data/warehouse/srcpart
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: srcpart
 
@@ -157,7 +157,7 @@
 
 query: select a.* from srcpart a where a.ds = '2008-04-08' and not(key > 50 or key < 10) and a.hr like '%2'
 Input: default/srcpart/ds=2008-04-08/hr=12
-Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/85515715/10000
+Output: file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_1/build/ql/tmp/671022462/10000
 27	val_27	2008-04-08	12
 37	val_37	2008-04-08	12
 15	val_15	2008-04-08	12

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/regexp_extract.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/regexp_extract.q.out?rev=797643&r1=797642&r2=797643&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/regexp_extract.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/regexp_extract.q.out Fri Jul 24 20:46:52 2009
@@ -54,9 +54,9 @@
                         type: string
       Needs Tagging: false
       Path -> Alias:
-        file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/test/data/warehouse/src 
+        file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_2/build/ql/test/data/warehouse/src 
       Path -> Partition:
-        file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/test/data/warehouse/src 
+        file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_2/build/ql/test/data/warehouse/src 
           Partition
           
               input format: org.apache.hadoop.mapred.TextInputFormat
@@ -71,7 +71,7 @@
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 file.inputformat org.apache.hadoop.mapred.TextInputFormat
                 file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                location file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/test/data/warehouse/src
+                location file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_2/build/ql/test/data/warehouse/src
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: src
       Reduce Operator Tree:
@@ -90,12 +90,12 @@
               File Output Operator
                 compressed: false
                 GlobalTableId: 0
-                directory: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/1541207901/10001
+                directory: file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_2/build/ql/tmp/692408203/10001
                 table:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                     properties:
-                      columns tmap.key,_c1
+                      columns _col0,_col1
                       serialization.format 1
                       columns.types string:string
 
@@ -112,7 +112,7 @@
 ) tmap
 SELECT tmap.key, regexp_extract(tmap.value, 'val_(\\d+\\t\\d+)',1) WHERE tmap.key < 100
 Input: default/src
-Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/1141997690/10000
+Output: file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_2/build/ql/tmp/136730416/10000
 0	0	3
 0	0	3
 0	0	3
@@ -253,9 +253,9 @@
                         type: string
       Needs Tagging: false
       Path -> Alias:
-        file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/test/data/warehouse/src 
+        file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_2/build/ql/test/data/warehouse/src 
       Path -> Partition:
-        file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/test/data/warehouse/src 
+        file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_2/build/ql/test/data/warehouse/src 
           Partition
           
               input format: org.apache.hadoop.mapred.TextInputFormat
@@ -270,7 +270,7 @@
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 file.inputformat org.apache.hadoop.mapred.TextInputFormat
                 file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                location file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/test/data/warehouse/src
+                location file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_2/build/ql/test/data/warehouse/src
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: src
       Reduce Operator Tree:
@@ -289,12 +289,12 @@
               File Output Operator
                 compressed: false
                 GlobalTableId: 0
-                directory: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/1608248618/10001
+                directory: file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_2/build/ql/tmp/1786530675/10001
                 table:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                     properties:
-                      columns tmap.key,_c1
+                      columns _col0,_col1
                       serialization.format 1
                       columns.types string:string
 
@@ -311,7 +311,7 @@
 ) tmap
 SELECT tmap.key, regexp_extract(tmap.value, 'val_(\\d+\\t\\d+)') WHERE tmap.key < 100
 Input: default/src
-Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/949594245/10000
+Output: file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_2/build/ql/tmp/1351305244/10000
 0	0	3
 0	0	3
 0	0	3

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/sample8.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/sample8.q.out?rev=797643&r1=797642&r2=797643&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/sample8.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/sample8.q.out Fri Jul 24 20:46:52 2009
@@ -58,12 +58,12 @@
                         type: string
       Needs Tagging: true
       Path -> Alias:
-        file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 
-        file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 
-        file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=11 
-        file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=12 
+        file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 
+        file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 
+        file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_1/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=11 
+        file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_1/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=12 
       Path -> Partition:
-        file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 
+        file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 
           Partition
             partition values:
               ds 2008-04-08
@@ -82,10 +82,10 @@
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 file.inputformat org.apache.hadoop.mapred.TextInputFormat
                 file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                location file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/srcpart
+                location file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_1/build/ql/test/data/warehouse/srcpart
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: srcpart
-        file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 
+        file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 
           Partition
             partition values:
               ds 2008-04-08
@@ -104,10 +104,10 @@
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 file.inputformat org.apache.hadoop.mapred.TextInputFormat
                 file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                location file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/srcpart
+                location file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_1/build/ql/test/data/warehouse/srcpart
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: srcpart
-        file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=11 
+        file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_1/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=11 
           Partition
             partition values:
               ds 2008-04-09
@@ -126,10 +126,10 @@
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 file.inputformat org.apache.hadoop.mapred.TextInputFormat
                 file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                location file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/srcpart
+                location file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_1/build/ql/test/data/warehouse/srcpart
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: srcpart
-        file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=12 
+        file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_1/build/ql/test/data/warehouse/srcpart/ds=2008-04-09/hr=12 
           Partition
             partition values:
               ds 2008-04-09
@@ -148,7 +148,7 @@
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 file.inputformat org.apache.hadoop.mapred.TextInputFormat
                 file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                location file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/test/data/warehouse/srcpart
+                location file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_1/build/ql/test/data/warehouse/srcpart
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: srcpart
       Reduce Operator Tree:
@@ -177,7 +177,7 @@
               File Output Operator
                 compressed: false
                 GlobalTableId: 0
-                directory: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1675856567/10002
+                directory: file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_1/build/ql/tmp/1558603137/10002
                 table:
                     input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -189,7 +189,7 @@
   Stage: Stage-2
     Map Reduce
       Alias -> Map Operator Tree:
-        file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1675856567/10002 
+        file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_1/build/ql/tmp/1558603137/10002 
             Reduce Output Operator
               key expressions:
                     expr: _col0
@@ -214,9 +214,9 @@
                     type: string
       Needs Tagging: false
       Path -> Alias:
-        file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1675856567/10002 
+        file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_1/build/ql/tmp/1558603137/10002 
       Path -> Partition:
-        file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1675856567/10002 
+        file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_1/build/ql/tmp/1558603137/10002 
           Partition
           
               input format: org.apache.hadoop.mapred.SequenceFileInputFormat
@@ -230,12 +230,12 @@
           File Output Operator
             compressed: false
             GlobalTableId: 0
-            directory: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1675856567/10001
+            directory: file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_1/build/ql/tmp/1558603137/10001
             table:
                 input format: org.apache.hadoop.mapred.TextInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                 properties:
-                  columns s.key,s.value,s.ds,s.hr
+                  columns _col0,_col1,_col2,_col3
                   serialization.format 1
                   columns.types string:string:string:string
 
@@ -254,7 +254,7 @@
 Input: default/srcpart/ds=2008-04-08/hr=12
 Input: default/srcpart/ds=2008-04-09/hr=11
 Input: default/srcpart/ds=2008-04-09/hr=12
-Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/1560234654/10000
+Output: file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_1/build/ql/tmp/1151844475/10000
 0	val_0
 0	val_0
 0	val_0

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/sample9.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/sample9.q.out?rev=797643&r1=797642&r2=797643&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/sample9.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/sample9.q.out Fri Jul 24 20:46:52 2009
@@ -38,19 +38,19 @@
                     File Output Operator
                       compressed: false
                       GlobalTableId: 0
-                      directory: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/613132925/10001
+                      directory: file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_2/build/ql/tmp/1998091453/10001
                       table:
                           input format: org.apache.hadoop.mapred.TextInputFormat
                           output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                           properties:
-                            columns s.key,s.value
+                            columns _col0,_col1
                             serialization.format 1
                             columns.types int:string
       Needs Tagging: false
       Path -> Alias:
-        file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/test/data/warehouse/srcbucket 
+        file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_2/build/ql/test/data/warehouse/srcbucket 
       Path -> Partition:
-        file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/test/data/warehouse/srcbucket 
+        file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_2/build/ql/test/data/warehouse/srcbucket 
           Partition
           
               input format: org.apache.hadoop.mapred.TextInputFormat
@@ -66,7 +66,7 @@
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 file.inputformat org.apache.hadoop.mapred.TextInputFormat
                 file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                location file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/test/data/warehouse/srcbucket
+                location file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_2/build/ql/test/data/warehouse/srcbucket
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: srcbucket
 
@@ -78,7 +78,7 @@
 query: SELECT s.*
 FROM (SELECT a.* FROM srcbucket TABLESAMPLE (BUCKET 1 OUT OF 2 on key) a) s
 Input: default/srcbucket
-Output: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/792049932/10000
+Output: file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_2/build/ql/tmp/974126813/10000
 474	val_475
 62	val_63
 468	val_469

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/subq.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/subq.q.out?rev=797643&r1=797642&r2=797643&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/subq.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/subq.q.out Fri Jul 24 20:46:52 2009
@@ -51,10 +51,10 @@
           Move Operator
             files:
                 hdfs directory: true
-                destination: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/1937455581/10000
+                destination: file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_2/build/ql/tmp/177397376/10000
           Map Reduce
             Alias -> Map Operator Tree:
-              file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_1/build/ql/tmp/23337865/10001 
+              file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_2/build/ql/tmp/1486120786/10001 
                   Reduce Output Operator
                     sort order: 
                     Map-reduce partition columns:
@@ -62,9 +62,9 @@
                           type: double
                     tag: -1
                     value expressions:
-                          expr: unioninput.key
+                          expr: _col0
                           type: string
-                          expr: unioninput.value
+                          expr: _col1
                           type: string
             Reduce Operator Tree:
               Extract

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/union.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/union.q.out?rev=797643&r1=797642&r2=797643&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/union.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/union.q.out Fri Jul 24 20:46:52 2009
@@ -86,10 +86,10 @@
           Move Operator
             files:
                 hdfs directory: true
-                destination: file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/246006693/10000
+                destination: file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_1/build/ql/tmp/516261269/10000
           Map Reduce
             Alias -> Map Operator Tree:
-              file:/data/users/zshao/tools/670-trunk-apache-hive/.ptest_0/build/ql/tmp/248467415/10001 
+              file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_1/build/ql/tmp/1480105708/10001 
                   Reduce Output Operator
                     sort order: 
                     Map-reduce partition columns:
@@ -97,9 +97,9 @@
                           type: double
                     tag: -1
                     value expressions:
-                          expr: unioninput.key
+                          expr: _col0
                           type: string
-                          expr: unioninput.value
+                          expr: _col1
                           type: string
             Reduce Operator Tree:
               Extract

Modified: hadoop/hive/trunk/ql/src/test/results/compiler/plan/cast1.q.xml
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/compiler/plan/cast1.q.xml?rev=797643&r1=797642&r2=797643&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/compiler/plan/cast1.q.xml (original)
+++ hadoop/hive/trunk/ql/src/test/results/compiler/plan/cast1.q.xml Fri Jul 24 20:46:52 2009
@@ -64,7 +64,7 @@
            </void> 
            <void method="put"> 
             <string>location</string> 
-            <string>file:/data/users/emil/hive1/hive1/build/ql/test/data/warehouse/src</string> 
+            <string>file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/test/data/warehouse/src</string> 
            </void> 
           </object> 
          </void> 
@@ -100,7 +100,7 @@
                         <void property="conf"> 
                          <object class="org.apache.hadoop.hive.ql.plan.fileSinkDesc"> 
                           <void property="dirName"> 
-                           <string>file:/data/users/emil/hive1/hive1/build/ql/tmp/1971318934/10001</string> 
+                           <string>file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/tmp/811031924/10001</string> 
                           </void> 
                           <void property="tableInfo"> 
                            <object class="org.apache.hadoop.hive.ql.plan.tableDesc"> 
@@ -117,7 +117,7 @@
                              <object class="java.util.Properties"> 
                               <void method="put"> 
                                <string>columns</string> 
-                               <string>_c0,_c1,_c2,_c3,_c4,_c5,_c6</string> 
+                               <string>_col0,_col1,_col2,_col3,_col4,_col5,_col6</string> 
                               </void> 
                               <void method="put"> 
                                <string>serialization.format</string> 
@@ -147,7 +147,7 @@
                             <void method="add"> 
                              <object class="org.apache.hadoop.hive.ql.exec.ColumnInfo"> 
                               <void property="internalName"> 
-                               <string>_c0</string> 
+                               <string>_col0</string> 
                               </void> 
                               <void property="type"> 
                                <object id="PrimitiveTypeInfo0" class="org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo"> 
@@ -161,7 +161,7 @@
                             <void method="add"> 
                              <object class="org.apache.hadoop.hive.ql.exec.ColumnInfo"> 
                               <void property="internalName"> 
-                               <string>_c1</string> 
+                               <string>_col1</string> 
                               </void> 
                               <void property="type"> 
                                <object id="PrimitiveTypeInfo1" class="org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo"> 
@@ -175,7 +175,7 @@
                             <void method="add"> 
                              <object class="org.apache.hadoop.hive.ql.exec.ColumnInfo"> 
                               <void property="internalName"> 
-                               <string>_c2</string> 
+                               <string>_col2</string> 
                               </void> 
                               <void property="type"> 
                                <object idref="PrimitiveTypeInfo1"/> 
@@ -185,7 +185,7 @@
                             <void method="add"> 
                              <object class="org.apache.hadoop.hive.ql.exec.ColumnInfo"> 
                               <void property="internalName"> 
-                               <string>_c3</string> 
+                               <string>_col3</string> 
                               </void> 
                               <void property="type"> 
                                <object idref="PrimitiveTypeInfo1"/> 
@@ -195,7 +195,7 @@
                             <void method="add"> 
                              <object class="org.apache.hadoop.hive.ql.exec.ColumnInfo"> 
                               <void property="internalName"> 
-                               <string>_c4</string> 
+                               <string>_col4</string> 
                               </void> 
                               <void property="type"> 
                                <object idref="PrimitiveTypeInfo0"/> 
@@ -205,7 +205,7 @@
                             <void method="add"> 
                              <object class="org.apache.hadoop.hive.ql.exec.ColumnInfo"> 
                               <void property="internalName"> 
-                               <string>_c5</string> 
+                               <string>_col5</string> 
                               </void> 
                               <void property="type"> 
                                <object id="PrimitiveTypeInfo2" class="org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo"> 
@@ -219,7 +219,7 @@
                             <void method="add"> 
                              <object class="org.apache.hadoop.hive.ql.exec.ColumnInfo"> 
                               <void property="internalName"> 
-                               <string>_c6</string> 
+                               <string>_col6</string> 
                               </void> 
                               <void property="type"> 
                                <object idref="PrimitiveTypeInfo0"/> 
@@ -1067,7 +1067,7 @@
     <void property="pathToAliases"> 
      <object class="java.util.LinkedHashMap"> 
       <void method="put"> 
-       <string>file:/data/users/emil/hive1/hive1/build/ql/test/data/warehouse/src</string> 
+       <string>file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/test/data/warehouse/src</string> 
        <object class="java.util.ArrayList"> 
         <void method="add"> 
          <string>src</string> 
@@ -1079,7 +1079,7 @@
     <void property="pathToPartitionInfo"> 
      <object class="java.util.LinkedHashMap"> 
       <void method="put"> 
-       <string>file:/data/users/emil/hive1/hive1/build/ql/test/data/warehouse/src</string> 
+       <string>file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/test/data/warehouse/src</string> 
        <object class="org.apache.hadoop.hive.ql.plan.partitionDesc"> 
         <void property="partSpec"> 
          <object idref="LinkedHashMap0"/> 

Modified: hadoop/hive/trunk/ql/src/test/results/compiler/plan/groupby2.q.xml
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/compiler/plan/groupby2.q.xml?rev=797643&r1=797642&r2=797643&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/compiler/plan/groupby2.q.xml (original)
+++ hadoop/hive/trunk/ql/src/test/results/compiler/plan/groupby2.q.xml Fri Jul 24 20:46:52 2009
@@ -64,7 +64,7 @@
            </void> 
            <void method="put"> 
             <string>location</string> 
-            <string>file:/data/users/emil/hive1/hive1/build/ql/test/data/warehouse/src</string> 
+            <string>file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/test/data/warehouse/src</string> 
            </void> 
           </object> 
          </void> 
@@ -789,7 +789,7 @@
     <void property="pathToAliases"> 
      <object class="java.util.LinkedHashMap"> 
       <void method="put"> 
-       <string>file:/data/users/emil/hive1/hive1/build/ql/test/data/warehouse/src</string> 
+       <string>file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/test/data/warehouse/src</string> 
        <object class="java.util.ArrayList"> 
         <void method="add"> 
          <string>src</string> 
@@ -801,7 +801,7 @@
     <void property="pathToPartitionInfo"> 
      <object class="java.util.LinkedHashMap"> 
       <void method="put"> 
-       <string>file:/data/users/emil/hive1/hive1/build/ql/test/data/warehouse/src</string> 
+       <string>file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/test/data/warehouse/src</string> 
        <object class="org.apache.hadoop.hive.ql.plan.partitionDesc"> 
         <void property="partSpec"> 
          <object idref="LinkedHashMap0"/> 
@@ -842,7 +842,7 @@
               <void property="conf"> 
                <object class="org.apache.hadoop.hive.ql.plan.fileSinkDesc"> 
                 <void property="dirName"> 
-                 <string>file:/data/users/emil/hive1/hive1/build/ql/tmp/1745071752/10001</string> 
+                 <string>file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/tmp/1735787192/10001</string> 
                 </void> 
                 <void property="tableInfo"> 
                  <object class="org.apache.hadoop.hive.ql.plan.tableDesc"> 
@@ -859,7 +859,7 @@
                    <object class="java.util.Properties"> 
                     <void method="put"> 
                      <string>columns</string> 
-                     <string>_c0,_c1,_c2</string> 
+                     <string>_col0,_col1,_col2</string> 
                     </void> 
                     <void method="put"> 
                      <string>serialization.format</string> 
@@ -889,7 +889,7 @@
                   <void method="add"> 
                    <object class="org.apache.hadoop.hive.ql.exec.ColumnInfo"> 
                     <void property="internalName"> 
-                     <string>_c0</string> 
+                     <string>_col0</string> 
                     </void> 
                     <void property="type"> 
                      <object idref="PrimitiveTypeInfo0"/> 
@@ -899,7 +899,7 @@
                   <void method="add"> 
                    <object class="org.apache.hadoop.hive.ql.exec.ColumnInfo"> 
                     <void property="internalName"> 
-                     <string>_c1</string> 
+                     <string>_col1</string> 
                     </void> 
                     <void property="type"> 
                      <object idref="PrimitiveTypeInfo1"/> 
@@ -909,7 +909,7 @@
                   <void method="add"> 
                    <object class="org.apache.hadoop.hive.ql.exec.ColumnInfo"> 
                     <void property="internalName"> 
-                     <string>_c2</string> 
+                     <string>_col2</string> 
                     </void> 
                     <void property="type"> 
                      <object idref="PrimitiveTypeInfo0"/> 

Modified: hadoop/hive/trunk/ql/src/test/results/compiler/plan/groupby3.q.xml
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/compiler/plan/groupby3.q.xml?rev=797643&r1=797642&r2=797643&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/compiler/plan/groupby3.q.xml (original)
+++ hadoop/hive/trunk/ql/src/test/results/compiler/plan/groupby3.q.xml Fri Jul 24 20:46:52 2009
@@ -64,7 +64,7 @@
            </void> 
            <void method="put"> 
             <string>location</string> 
-            <string>file:/data/users/emil/hive1/hive1/build/ql/test/data/warehouse/src</string> 
+            <string>file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/test/data/warehouse/src</string> 
            </void> 
           </object> 
          </void> 
@@ -979,7 +979,7 @@
     <void property="pathToAliases"> 
      <object class="java.util.LinkedHashMap"> 
       <void method="put"> 
-       <string>file:/data/users/emil/hive1/hive1/build/ql/test/data/warehouse/src</string> 
+       <string>file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/test/data/warehouse/src</string> 
        <object class="java.util.ArrayList"> 
         <void method="add"> 
          <string>src</string> 
@@ -991,7 +991,7 @@
     <void property="pathToPartitionInfo"> 
      <object class="java.util.LinkedHashMap"> 
       <void method="put"> 
-       <string>file:/data/users/emil/hive1/hive1/build/ql/test/data/warehouse/src</string> 
+       <string>file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/test/data/warehouse/src</string> 
        <object class="org.apache.hadoop.hive.ql.plan.partitionDesc"> 
         <void property="partSpec"> 
          <object idref="LinkedHashMap0"/> 
@@ -1032,7 +1032,7 @@
               <void property="conf"> 
                <object class="org.apache.hadoop.hive.ql.plan.fileSinkDesc"> 
                 <void property="dirName"> 
-                 <string>file:/data/users/emil/hive1/hive1/build/ql/tmp/743587804/10001</string> 
+                 <string>file:/data/users/zshao/tools/namit-trunk-apache-hive/.ptest_3/build/ql/tmp/523008270/10001</string> 
                 </void> 
                 <void property="tableInfo"> 
                  <object class="org.apache.hadoop.hive.ql.plan.tableDesc"> 
@@ -1049,7 +1049,7 @@
                    <object class="java.util.Properties"> 
                     <void method="put"> 
                      <string>columns</string> 
-                     <string>_c0,_c1,_c2,_c3,_c4</string> 
+                     <string>_col0,_col1,_col2,_col3,_col4</string> 
                     </void> 
                     <void method="put"> 
                      <string>serialization.format</string> 
@@ -1079,7 +1079,7 @@
                   <void method="add"> 
                    <object class="org.apache.hadoop.hive.ql.exec.ColumnInfo"> 
                     <void property="internalName"> 
-                     <string>_c0</string> 
+                     <string>_col0</string> 
                     </void> 
                     <void property="type"> 
                      <object idref="PrimitiveTypeInfo1"/> 
@@ -1089,7 +1089,7 @@
                   <void method="add"> 
                    <object class="org.apache.hadoop.hive.ql.exec.ColumnInfo"> 
                     <void property="internalName"> 
-                     <string>_c1</string> 
+                     <string>_col1</string> 
                     </void> 
                     <void property="type"> 
                      <object idref="PrimitiveTypeInfo1"/> 
@@ -1099,7 +1099,7 @@
                   <void method="add"> 
                    <object class="org.apache.hadoop.hive.ql.exec.ColumnInfo"> 
                     <void property="internalName"> 
-                     <string>_c2</string> 
+                     <string>_col2</string> 
                     </void> 
                     <void property="type"> 
                      <object idref="PrimitiveTypeInfo1"/> 
@@ -1109,7 +1109,7 @@
                   <void method="add"> 
                    <object class="org.apache.hadoop.hive.ql.exec.ColumnInfo"> 
                     <void property="internalName"> 
-                     <string>_c3</string> 
+                     <string>_col3</string> 
                     </void> 
                     <void property="type"> 
                      <object idref="PrimitiveTypeInfo0"/> 
@@ -1119,7 +1119,7 @@
                   <void method="add"> 
                    <object class="org.apache.hadoop.hive.ql.exec.ColumnInfo"> 
                     <void property="internalName"> 
-                     <string>_c4</string> 
+                     <string>_col4</string> 
                     </void> 
                     <void property="type"> 
                      <object idref="PrimitiveTypeInfo0"/> 



Mime
View raw message