hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From nzh...@apache.org
Subject svn commit: r940362 - in /hadoop/hive/trunk: ./ ql/src/java/org/apache/hadoop/hive/ql/exec/ ql/src/test/queries/clientpositive/ ql/src/test/results/clientpositive/
Date Mon, 03 May 2010 04:57:01 GMT
Author: nzhang
Date: Mon May  3 04:57:01 2010
New Revision: 940362

URL: http://svn.apache.org/viewvc?rev=940362&view=rev
Log:
HIVE-1331. select * does not work if different partitions contain different formats. (Namit
Jain via Ning Zhang)

Added:
    hadoop/hive/trunk/ql/src/test/queries/clientpositive/partition_wise_fileformat2.q
    hadoop/hive/trunk/ql/src/test/results/clientpositive/partition_wise_fileformat2.q.out
Modified:
    hadoop/hive/trunk/CHANGES.txt
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchOperator.java
    hadoop/hive/trunk/ql/src/test/queries/clientpositive/partition_wise_fileformat.q
    hadoop/hive/trunk/ql/src/test/results/clientpositive/partition_wise_fileformat.q.out

Modified: hadoop/hive/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/CHANGES.txt?rev=940362&r1=940361&r2=940362&view=diff
==============================================================================
--- hadoop/hive/trunk/CHANGES.txt (original)
+++ hadoop/hive/trunk/CHANGES.txt Mon May  3 04:57:01 2010
@@ -403,6 +403,10 @@ Trunk -  Unreleased
     HIVE-1330. Fatal error check omitted for reducer-side operators
     (Ning Zhang via namit)
 
+    HIVE-1331. select * does not work if different partitions contain
+    differnet format
+    (Namit Jain via Ning Zhang)
+
 Release 0.5.0 -  Unreleased
 
   INCOMPATIBLE CHANGES

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchOperator.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchOperator.java?rev=940362&r1=940361&r2=940362&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchOperator.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchOperator.java Mon May
 3 04:57:01 2010
@@ -266,12 +266,16 @@ public class FetchOperator implements Se
       job.set("mapred.input.dir", org.apache.hadoop.util.StringUtils
           .escapeString(currPath.toString()));
 
-      TableDesc tmp = currTbl;
-      if (tmp == null) {
-        tmp = currPart.getTableDesc();
+      PartitionDesc tmp;
+      if (currTbl == null) {
+        tmp = currPart;
       }
+      else {
+        tmp = new PartitionDesc(currTbl, null);
+      }
+
       inputFormat = getInputFormatFromCache(tmp.getInputFileFormatClass(), job);
-      Utilities.copyTableJobPropertiesToConf(tmp, job);
+      Utilities.copyTableJobPropertiesToConf(tmp.getTableDesc(), job);
       inputSplits = inputFormat.getSplits(job, 1);
       splitNum = 0;
       serde = tmp.getDeserializerClass().newInstance();

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/partition_wise_fileformat.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/partition_wise_fileformat.q?rev=940362&r1=940361&r2=940362&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/partition_wise_fileformat.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/partition_wise_fileformat.q Mon May
 3 04:57:01 2010
@@ -1,3 +1,5 @@
+drop table partition_test_partitioned;
+
 create table partition_test_partitioned(key string, value string) partitioned by (dt string);
 
 insert overwrite table partition_test_partitioned partition(dt=100) select * from src1;
@@ -26,4 +28,5 @@ select key from partition_test_partition
 select key from partition_test_partitioned where dt=102;
 select key from partition_test_partitioned;
 
-select key from partition_test_partitioned where dt >=100 and dt <= 102;
\ No newline at end of file
+select key from partition_test_partitioned where dt >=100 and dt <= 102;
+drop table partition_test_partitioned;

Added: hadoop/hive/trunk/ql/src/test/queries/clientpositive/partition_wise_fileformat2.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/partition_wise_fileformat2.q?rev=940362&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/partition_wise_fileformat2.q (added)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/partition_wise_fileformat2.q Mon
May  3 04:57:01 2010
@@ -0,0 +1,12 @@
+drop table partition_test_partitioned;
+
+create table partition_test_partitioned(key string, value string) partitioned by (dt string);
+
+insert overwrite table partition_test_partitioned partition(dt=100) select * from src1;
+alter table partition_test_partitioned set fileformat rcfile;
+insert overwrite table partition_test_partitioned partition(dt=101) select * from src1;
+alter table partition_test_partitioned set fileformat Sequencefile;
+insert overwrite table partition_test_partitioned partition(dt=102) select * from src1;
+
+select * from partition_test_partitioned where dt >=100 and dt <= 102;
+drop table partition_test_partitioned;

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/partition_wise_fileformat.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/partition_wise_fileformat.q.out?rev=940362&r1=940361&r2=940362&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/partition_wise_fileformat.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/partition_wise_fileformat.q.out Mon
May  3 04:57:01 2010
@@ -1,3 +1,7 @@
+PREHOOK: query: drop table partition_test_partitioned
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table partition_test_partitioned
+POSTHOOK: type: DROPTABLE
 PREHOOK: query: create table partition_test_partitioned(key string, value string) partitioned
by (dt string)
 PREHOOK: type: CREATETABLE
 POSTHOOK: query: create table partition_test_partitioned(key string, value string) partitioned
by (dt string)
@@ -20,8 +24,8 @@ POSTHOOK: type: SHOW_TABLESTATUS
 POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=100).key SIMPLE [(src1)src1.FieldSchema(name:key,
type:string, comment:default), ]
 POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=100).value SIMPLE [(src1)src1.FieldSchema(name:value,
type:string, comment:default), ]
 tableName:partition_test_partitioned
-owner:athusoo
-location:file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/.ptest_1/build/ql/test/data/warehouse/partition_test_partitioned
+owner:njain
+location:file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/partition_test_partitioned
 inputformat:org.apache.hadoop.mapred.TextInputFormat
 outputformat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
 columns:struct columns { string key, string value}
@@ -31,8 +35,8 @@ totalNumberFiles:1
 totalFileSize:216
 maxFileSize:216
 minFileSize:216
-lastAccessTime:unknown
-lastUpdateTime:1270517071000
+lastAccessTime:0
+lastUpdateTime:1272566036000
 
 PREHOOK: query: show table extended like partition_test_partitioned partition(dt=100)
 PREHOOK: type: SHOW_TABLESTATUS
@@ -41,8 +45,8 @@ POSTHOOK: type: SHOW_TABLESTATUS
 POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=100).key SIMPLE [(src1)src1.FieldSchema(name:key,
type:string, comment:default), ]
 POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=100).value SIMPLE [(src1)src1.FieldSchema(name:value,
type:string, comment:default), ]
 tableName:partition_test_partitioned
-owner:athusoo
-location:file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/.ptest_1/build/ql/test/data/warehouse/partition_test_partitioned/dt=100
+owner:njain
+location:file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/partition_test_partitioned/dt=100
 inputformat:org.apache.hadoop.mapred.TextInputFormat
 outputformat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
 columns:struct columns { string key, string value}
@@ -52,17 +56,17 @@ totalNumberFiles:1
 totalFileSize:216
 maxFileSize:216
 minFileSize:216
-lastAccessTime:unknown
-lastUpdateTime:1270517071000
+lastAccessTime:0
+lastUpdateTime:1272566036000
 
 PREHOOK: query: select key from partition_test_partitioned where dt=100
 PREHOOK: type: QUERY
 PREHOOK: Input: default@partition_test_partitioned@dt=100
-PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/.ptest_1/build/ql/scratchdir/hive_2010-04-05_18-24-31_667_6978568801664500169/10000
+PREHOOK: Output: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-04-29_11-33-56_574_6496604876539248967/10000
 POSTHOOK: query: select key from partition_test_partitioned where dt=100
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@partition_test_partitioned@dt=100
-POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/.ptest_1/build/ql/scratchdir/hive_2010-04-05_18-24-31_667_6978568801664500169/10000
+POSTHOOK: Output: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-04-29_11-33-56_574_6496604876539248967/10000
 POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=100).key SIMPLE [(src1)src1.FieldSchema(name:key,
type:string, comment:default), ]
 POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=100).value SIMPLE [(src1)src1.FieldSchema(name:value,
type:string, comment:default), ]
 238
@@ -93,11 +97,11 @@ POSTHOOK: Lineage: partition_test_partit
 PREHOOK: query: select key from partition_test_partitioned
 PREHOOK: type: QUERY
 PREHOOK: Input: default@partition_test_partitioned@dt=100
-PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/.ptest_1/build/ql/scratchdir/hive_2010-04-05_18-24-35_921_2508903442352936143/10000
+PREHOOK: Output: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-04-29_11-34-00_180_3082218449482865639/10000
 POSTHOOK: query: select key from partition_test_partitioned
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@partition_test_partitioned@dt=100
-POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/.ptest_1/build/ql/scratchdir/hive_2010-04-05_18-24-35_921_2508903442352936143/10000
+POSTHOOK: Output: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-04-29_11-34-00_180_3082218449482865639/10000
 POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=100).key SIMPLE [(src1)src1.FieldSchema(name:key,
type:string, comment:default), ]
 POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=100).value SIMPLE [(src1)src1.FieldSchema(name:value,
type:string, comment:default), ]
 238
@@ -154,8 +158,8 @@ POSTHOOK: Lineage: partition_test_partit
 POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=101).key SIMPLE [(src1)src1.FieldSchema(name:key,
type:string, comment:default), ]
 POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=101).value SIMPLE [(src1)src1.FieldSchema(name:value,
type:string, comment:default), ]
 tableName:partition_test_partitioned
-owner:athusoo
-location:file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/.ptest_1/build/ql/test/data/warehouse/partition_test_partitioned
+owner:njain
+location:file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/partition_test_partitioned
 inputformat:org.apache.hadoop.hive.ql.io.RCFileInputFormat
 outputformat:org.apache.hadoop.hive.ql.io.RCFileOutputFormat
 columns:struct columns { string key, string value}
@@ -165,8 +169,8 @@ totalNumberFiles:2
 totalFileSize:586
 maxFileSize:370
 minFileSize:216
-lastAccessTime:unknown
-lastUpdateTime:1270517083000
+lastAccessTime:0
+lastUpdateTime:1272566047000
 
 PREHOOK: query: show table extended like partition_test_partitioned partition(dt=100)
 PREHOOK: type: SHOW_TABLESTATUS
@@ -177,8 +181,8 @@ POSTHOOK: Lineage: partition_test_partit
 POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=101).key SIMPLE [(src1)src1.FieldSchema(name:key,
type:string, comment:default), ]
 POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=101).value SIMPLE [(src1)src1.FieldSchema(name:value,
type:string, comment:default), ]
 tableName:partition_test_partitioned
-owner:athusoo
-location:file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/.ptest_1/build/ql/test/data/warehouse/partition_test_partitioned/dt=100
+owner:njain
+location:file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/partition_test_partitioned/dt=100
 inputformat:org.apache.hadoop.mapred.TextInputFormat
 outputformat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
 columns:struct columns { string key, string value}
@@ -188,8 +192,8 @@ totalNumberFiles:1
 totalFileSize:216
 maxFileSize:216
 minFileSize:216
-lastAccessTime:unknown
-lastUpdateTime:1270517083000
+lastAccessTime:0
+lastUpdateTime:1272566047000
 
 PREHOOK: query: show table extended like partition_test_partitioned partition(dt=101)
 PREHOOK: type: SHOW_TABLESTATUS
@@ -200,8 +204,8 @@ POSTHOOK: Lineage: partition_test_partit
 POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=101).key SIMPLE [(src1)src1.FieldSchema(name:key,
type:string, comment:default), ]
 POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=101).value SIMPLE [(src1)src1.FieldSchema(name:value,
type:string, comment:default), ]
 tableName:partition_test_partitioned
-owner:athusoo
-location:file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/.ptest_1/build/ql/test/data/warehouse/partition_test_partitioned/dt=101
+owner:njain
+location:file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/partition_test_partitioned/dt=101
 inputformat:org.apache.hadoop.hive.ql.io.RCFileInputFormat
 outputformat:org.apache.hadoop.hive.ql.io.RCFileOutputFormat
 columns:struct columns { string key, string value}
@@ -211,17 +215,17 @@ totalNumberFiles:1
 totalFileSize:370
 maxFileSize:370
 minFileSize:370
-lastAccessTime:unknown
-lastUpdateTime:1270517083000
+lastAccessTime:0
+lastUpdateTime:1272566047000
 
 PREHOOK: query: select key from partition_test_partitioned where dt=100
 PREHOOK: type: QUERY
 PREHOOK: Input: default@partition_test_partitioned@dt=100
-PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/.ptest_1/build/ql/scratchdir/hive_2010-04-05_18-24-43_962_384006880402914566/10000
+PREHOOK: Output: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-04-29_11-34-08_064_4581555677801824990/10000
 POSTHOOK: query: select key from partition_test_partitioned where dt=100
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@partition_test_partitioned@dt=100
-POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/.ptest_1/build/ql/scratchdir/hive_2010-04-05_18-24-43_962_384006880402914566/10000
+POSTHOOK: Output: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-04-29_11-34-08_064_4581555677801824990/10000
 POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=100).key SIMPLE [(src1)src1.FieldSchema(name:key,
type:string, comment:default), ]
 POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=100).value SIMPLE [(src1)src1.FieldSchema(name:value,
type:string, comment:default), ]
 POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=101).key SIMPLE [(src1)src1.FieldSchema(name:key,
type:string, comment:default), ]
@@ -254,11 +258,11 @@ POSTHOOK: Lineage: partition_test_partit
 PREHOOK: query: select key from partition_test_partitioned where dt=101
 PREHOOK: type: QUERY
 PREHOOK: Input: default@partition_test_partitioned@dt=101
-PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/.ptest_1/build/ql/scratchdir/hive_2010-04-05_18-24-47_928_3970487296691026768/10000
+PREHOOK: Output: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-04-29_11-34-11_564_6764711767035125433/10000
 POSTHOOK: query: select key from partition_test_partitioned where dt=101
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@partition_test_partitioned@dt=101
-POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/.ptest_1/build/ql/scratchdir/hive_2010-04-05_18-24-47_928_3970487296691026768/10000
+POSTHOOK: Output: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-04-29_11-34-11_564_6764711767035125433/10000
 POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=100).key SIMPLE [(src1)src1.FieldSchema(name:key,
type:string, comment:default), ]
 POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=100).value SIMPLE [(src1)src1.FieldSchema(name:value,
type:string, comment:default), ]
 POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=101).key SIMPLE [(src1)src1.FieldSchema(name:key,
type:string, comment:default), ]
@@ -292,12 +296,12 @@ PREHOOK: query: select key from partitio
 PREHOOK: type: QUERY
 PREHOOK: Input: default@partition_test_partitioned@dt=100
 PREHOOK: Input: default@partition_test_partitioned@dt=101
-PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/.ptest_1/build/ql/scratchdir/hive_2010-04-05_18-24-51_953_3203198324412338554/10000
+PREHOOK: Output: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-04-29_11-34-15_008_8542909916045321913/10000
 POSTHOOK: query: select key from partition_test_partitioned
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@partition_test_partitioned@dt=100
 POSTHOOK: Input: default@partition_test_partitioned@dt=101
-POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/.ptest_1/build/ql/scratchdir/hive_2010-04-05_18-24-51_953_3203198324412338554/10000
+POSTHOOK: Output: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-04-29_11-34-15_008_8542909916045321913/10000
 POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=100).key SIMPLE [(src1)src1.FieldSchema(name:key,
type:string, comment:default), ]
 POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=100).value SIMPLE [(src1)src1.FieldSchema(name:value,
type:string, comment:default), ]
 POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=101).key SIMPLE [(src1)src1.FieldSchema(name:key,
type:string, comment:default), ]
@@ -387,8 +391,8 @@ POSTHOOK: Lineage: partition_test_partit
 POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=102).key SIMPLE [(src1)src1.FieldSchema(name:key,
type:string, comment:default), ]
 POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=102).value SIMPLE [(src1)src1.FieldSchema(name:value,
type:string, comment:default), ]
 tableName:partition_test_partitioned
-owner:athusoo
-location:file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/.ptest_1/build/ql/test/data/warehouse/partition_test_partitioned
+owner:njain
+location:file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/partition_test_partitioned
 inputformat:org.apache.hadoop.mapred.SequenceFileInputFormat
 outputformat:org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
 columns:struct columns { string key, string value}
@@ -398,8 +402,8 @@ totalNumberFiles:3
 totalFileSize:1474
 maxFileSize:888
 minFileSize:216
-lastAccessTime:unknown
-lastUpdateTime:1270517101000
+lastAccessTime:0
+lastUpdateTime:1272566061000
 
 PREHOOK: query: show table extended like partition_test_partitioned partition(dt=100)
 PREHOOK: type: SHOW_TABLESTATUS
@@ -412,8 +416,8 @@ POSTHOOK: Lineage: partition_test_partit
 POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=102).key SIMPLE [(src1)src1.FieldSchema(name:key,
type:string, comment:default), ]
 POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=102).value SIMPLE [(src1)src1.FieldSchema(name:value,
type:string, comment:default), ]
 tableName:partition_test_partitioned
-owner:athusoo
-location:file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/.ptest_1/build/ql/test/data/warehouse/partition_test_partitioned/dt=100
+owner:njain
+location:file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/partition_test_partitioned/dt=100
 inputformat:org.apache.hadoop.mapred.TextInputFormat
 outputformat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
 columns:struct columns { string key, string value}
@@ -423,8 +427,8 @@ totalNumberFiles:1
 totalFileSize:216
 maxFileSize:216
 minFileSize:216
-lastAccessTime:unknown
-lastUpdateTime:1270517101000
+lastAccessTime:0
+lastUpdateTime:1272566061000
 
 PREHOOK: query: show table extended like partition_test_partitioned partition(dt=101)
 PREHOOK: type: SHOW_TABLESTATUS
@@ -437,8 +441,8 @@ POSTHOOK: Lineage: partition_test_partit
 POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=102).key SIMPLE [(src1)src1.FieldSchema(name:key,
type:string, comment:default), ]
 POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=102).value SIMPLE [(src1)src1.FieldSchema(name:value,
type:string, comment:default), ]
 tableName:partition_test_partitioned
-owner:athusoo
-location:file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/.ptest_1/build/ql/test/data/warehouse/partition_test_partitioned/dt=101
+owner:njain
+location:file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/partition_test_partitioned/dt=101
 inputformat:org.apache.hadoop.hive.ql.io.RCFileInputFormat
 outputformat:org.apache.hadoop.hive.ql.io.RCFileOutputFormat
 columns:struct columns { string key, string value}
@@ -448,8 +452,8 @@ totalNumberFiles:1
 totalFileSize:370
 maxFileSize:370
 minFileSize:370
-lastAccessTime:unknown
-lastUpdateTime:1270517101000
+lastAccessTime:0
+lastUpdateTime:1272566061000
 
 PREHOOK: query: show table extended like partition_test_partitioned partition(dt=102)
 PREHOOK: type: SHOW_TABLESTATUS
@@ -462,8 +466,8 @@ POSTHOOK: Lineage: partition_test_partit
 POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=102).key SIMPLE [(src1)src1.FieldSchema(name:key,
type:string, comment:default), ]
 POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=102).value SIMPLE [(src1)src1.FieldSchema(name:value,
type:string, comment:default), ]
 tableName:partition_test_partitioned
-owner:athusoo
-location:file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/.ptest_1/build/ql/test/data/warehouse/partition_test_partitioned/dt=102
+owner:njain
+location:file:/data/users/njain/hive1/hive1/build/ql/test/data/warehouse/partition_test_partitioned/dt=102
 inputformat:org.apache.hadoop.mapred.SequenceFileInputFormat
 outputformat:org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
 columns:struct columns { string key, string value}
@@ -473,17 +477,17 @@ totalNumberFiles:1
 totalFileSize:888
 maxFileSize:888
 minFileSize:888
-lastAccessTime:unknown
-lastUpdateTime:1270517101000
+lastAccessTime:0
+lastUpdateTime:1272566061000
 
 PREHOOK: query: select key from partition_test_partitioned where dt=100
 PREHOOK: type: QUERY
 PREHOOK: Input: default@partition_test_partitioned@dt=100
-PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/.ptest_1/build/ql/scratchdir/hive_2010-04-05_18-25-01_385_4034333690110354601/10000
+PREHOOK: Output: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-04-29_11-34-22_555_533033657582480400/10000
 POSTHOOK: query: select key from partition_test_partitioned where dt=100
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@partition_test_partitioned@dt=100
-POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/.ptest_1/build/ql/scratchdir/hive_2010-04-05_18-25-01_385_4034333690110354601/10000
+POSTHOOK: Output: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-04-29_11-34-22_555_533033657582480400/10000
 POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=100).key SIMPLE [(src1)src1.FieldSchema(name:key,
type:string, comment:default), ]
 POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=100).value SIMPLE [(src1)src1.FieldSchema(name:value,
type:string, comment:default), ]
 POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=101).key SIMPLE [(src1)src1.FieldSchema(name:key,
type:string, comment:default), ]
@@ -518,11 +522,11 @@ POSTHOOK: Lineage: partition_test_partit
 PREHOOK: query: select key from partition_test_partitioned where dt=101
 PREHOOK: type: QUERY
 PREHOOK: Input: default@partition_test_partitioned@dt=101
-PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/.ptest_1/build/ql/scratchdir/hive_2010-04-05_18-25-05_471_5258259201295738909/10000
+PREHOOK: Output: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-04-29_11-34-26_057_6588328457143833363/10000
 POSTHOOK: query: select key from partition_test_partitioned where dt=101
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@partition_test_partitioned@dt=101
-POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/.ptest_1/build/ql/scratchdir/hive_2010-04-05_18-25-05_471_5258259201295738909/10000
+POSTHOOK: Output: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-04-29_11-34-26_057_6588328457143833363/10000
 POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=100).key SIMPLE [(src1)src1.FieldSchema(name:key,
type:string, comment:default), ]
 POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=100).value SIMPLE [(src1)src1.FieldSchema(name:value,
type:string, comment:default), ]
 POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=101).key SIMPLE [(src1)src1.FieldSchema(name:key,
type:string, comment:default), ]
@@ -557,11 +561,11 @@ POSTHOOK: Lineage: partition_test_partit
 PREHOOK: query: select key from partition_test_partitioned where dt=102
 PREHOOK: type: QUERY
 PREHOOK: Input: default@partition_test_partitioned@dt=102
-PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/.ptest_1/build/ql/scratchdir/hive_2010-04-05_18-25-09_919_4226420140921881636/10000
+PREHOOK: Output: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-04-29_11-34-29_653_6126813740681060035/10000
 POSTHOOK: query: select key from partition_test_partitioned where dt=102
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@partition_test_partitioned@dt=102
-POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/.ptest_1/build/ql/scratchdir/hive_2010-04-05_18-25-09_919_4226420140921881636/10000
+POSTHOOK: Output: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-04-29_11-34-29_653_6126813740681060035/10000
 POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=100).key SIMPLE [(src1)src1.FieldSchema(name:key,
type:string, comment:default), ]
 POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=100).value SIMPLE [(src1)src1.FieldSchema(name:value,
type:string, comment:default), ]
 POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=101).key SIMPLE [(src1)src1.FieldSchema(name:key,
type:string, comment:default), ]
@@ -598,13 +602,13 @@ PREHOOK: type: QUERY
 PREHOOK: Input: default@partition_test_partitioned@dt=100
 PREHOOK: Input: default@partition_test_partitioned@dt=101
 PREHOOK: Input: default@partition_test_partitioned@dt=102
-PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/.ptest_1/build/ql/scratchdir/hive_2010-04-05_18-25-14_183_5603938236610328309/10000
+PREHOOK: Output: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-04-29_11-34-33_143_8176565926876989902/10000
 POSTHOOK: query: select key from partition_test_partitioned
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@partition_test_partitioned@dt=100
 POSTHOOK: Input: default@partition_test_partitioned@dt=101
 POSTHOOK: Input: default@partition_test_partitioned@dt=102
-POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/.ptest_1/build/ql/scratchdir/hive_2010-04-05_18-25-14_183_5603938236610328309/10000
+POSTHOOK: Output: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-04-29_11-34-33_143_8176565926876989902/10000
 POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=100).key SIMPLE [(src1)src1.FieldSchema(name:key,
type:string, comment:default), ]
 POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=100).value SIMPLE [(src1)src1.FieldSchema(name:value,
type:string, comment:default), ]
 POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=101).key SIMPLE [(src1)src1.FieldSchema(name:key,
type:string, comment:default), ]
@@ -691,13 +695,13 @@ PREHOOK: type: QUERY
 PREHOOK: Input: default@partition_test_partitioned@dt=100
 PREHOOK: Input: default@partition_test_partitioned@dt=101
 PREHOOK: Input: default@partition_test_partitioned@dt=102
-PREHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/.ptest_1/build/ql/scratchdir/hive_2010-04-05_18-25-18_228_7207430461807867307/10000
+PREHOOK: Output: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-04-29_11-34-36_758_6312401358649427279/10000
 POSTHOOK: query: select key from partition_test_partitioned where dt >=100 and dt <=
102
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@partition_test_partitioned@dt=100
 POSTHOOK: Input: default@partition_test_partitioned@dt=101
 POSTHOOK: Input: default@partition_test_partitioned@dt=102
-POSTHOOK: Output: file:/data/users/athusoo/apache_workspaces/hive_trunk_ws1/.ptest_1/build/ql/scratchdir/hive_2010-04-05_18-25-18_228_7207430461807867307/10000
+POSTHOOK: Output: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-04-29_11-34-36_758_6312401358649427279/10000
 POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=100).key SIMPLE [(src1)src1.FieldSchema(name:key,
type:string, comment:default), ]
 POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=100).value SIMPLE [(src1)src1.FieldSchema(name:value,
type:string, comment:default), ]
 POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=101).key SIMPLE [(src1)src1.FieldSchema(name:key,
type:string, comment:default), ]
@@ -779,3 +783,14 @@ POSTHOOK: Lineage: partition_test_partit
 
 
 
+PREHOOK: query: drop table partition_test_partitioned
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table partition_test_partitioned
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Output: default@partition_test_partitioned
+POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=100).key SIMPLE [(src1)src1.FieldSchema(name:key,
type:string, comment:default), ]
+POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=100).value SIMPLE [(src1)src1.FieldSchema(name:value,
type:string, comment:default), ]
+POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=101).key SIMPLE [(src1)src1.FieldSchema(name:key,
type:string, comment:default), ]
+POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=101).value SIMPLE [(src1)src1.FieldSchema(name:value,
type:string, comment:default), ]
+POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=102).key SIMPLE [(src1)src1.FieldSchema(name:key,
type:string, comment:default), ]
+POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=102).value SIMPLE [(src1)src1.FieldSchema(name:value,
type:string, comment:default), ]

Added: hadoop/hive/trunk/ql/src/test/results/clientpositive/partition_wise_fileformat2.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/partition_wise_fileformat2.q.out?rev=940362&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/partition_wise_fileformat2.q.out
(added)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/partition_wise_fileformat2.q.out
Mon May  3 04:57:01 2010
@@ -0,0 +1,167 @@
+PREHOOK: query: drop table partition_test_partitioned
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table partition_test_partitioned
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: create table partition_test_partitioned(key string, value string) partitioned
by (dt string)
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table partition_test_partitioned(key string, value string) partitioned
by (dt string)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@partition_test_partitioned
+PREHOOK: query: insert overwrite table partition_test_partitioned partition(dt=100) select
* from src1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src1
+PREHOOK: Output: default@partition_test_partitioned@dt=100
+POSTHOOK: query: insert overwrite table partition_test_partitioned partition(dt=100) select
* from src1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src1
+POSTHOOK: Output: default@partition_test_partitioned@dt=100
+POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=100).key SIMPLE [(src1)src1.FieldSchema(name:key,
type:string, comment:default), ]
+POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=100).value SIMPLE [(src1)src1.FieldSchema(name:value,
type:string, comment:default), ]
+PREHOOK: query: alter table partition_test_partitioned set fileformat rcfile
+PREHOOK: type: null
+POSTHOOK: query: alter table partition_test_partitioned set fileformat rcfile
+POSTHOOK: type: null
+POSTHOOK: Input: default@partition_test_partitioned
+POSTHOOK: Output: default@partition_test_partitioned
+POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=100).key SIMPLE [(src1)src1.FieldSchema(name:key,
type:string, comment:default), ]
+POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=100).value SIMPLE [(src1)src1.FieldSchema(name:value,
type:string, comment:default), ]
+PREHOOK: query: insert overwrite table partition_test_partitioned partition(dt=101) select
* from src1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src1
+PREHOOK: Output: default@partition_test_partitioned@dt=101
+POSTHOOK: query: insert overwrite table partition_test_partitioned partition(dt=101) select
* from src1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src1
+POSTHOOK: Output: default@partition_test_partitioned@dt=101
+POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=100).key SIMPLE [(src1)src1.FieldSchema(name:key,
type:string, comment:default), ]
+POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=100).value SIMPLE [(src1)src1.FieldSchema(name:value,
type:string, comment:default), ]
+POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=101).key SIMPLE [(src1)src1.FieldSchema(name:key,
type:string, comment:default), ]
+POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=101).value SIMPLE [(src1)src1.FieldSchema(name:value,
type:string, comment:default), ]
+PREHOOK: query: alter table partition_test_partitioned set fileformat Sequencefile
+PREHOOK: type: null
+POSTHOOK: query: alter table partition_test_partitioned set fileformat Sequencefile
+POSTHOOK: type: null
+POSTHOOK: Input: default@partition_test_partitioned
+POSTHOOK: Output: default@partition_test_partitioned
+POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=100).key SIMPLE [(src1)src1.FieldSchema(name:key,
type:string, comment:default), ]
+POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=100).value SIMPLE [(src1)src1.FieldSchema(name:value,
type:string, comment:default), ]
+POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=101).key SIMPLE [(src1)src1.FieldSchema(name:key,
type:string, comment:default), ]
+POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=101).value SIMPLE [(src1)src1.FieldSchema(name:value,
type:string, comment:default), ]
+PREHOOK: query: insert overwrite table partition_test_partitioned partition(dt=102) select
* from src1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src1
+PREHOOK: Output: default@partition_test_partitioned@dt=102
+POSTHOOK: query: insert overwrite table partition_test_partitioned partition(dt=102) select
* from src1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src1
+POSTHOOK: Output: default@partition_test_partitioned@dt=102
+POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=100).key SIMPLE [(src1)src1.FieldSchema(name:key,
type:string, comment:default), ]
+POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=100).value SIMPLE [(src1)src1.FieldSchema(name:value,
type:string, comment:default), ]
+POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=101).key SIMPLE [(src1)src1.FieldSchema(name:key,
type:string, comment:default), ]
+POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=101).value SIMPLE [(src1)src1.FieldSchema(name:value,
type:string, comment:default), ]
+POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=102).key SIMPLE [(src1)src1.FieldSchema(name:key,
type:string, comment:default), ]
+POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=102).value SIMPLE [(src1)src1.FieldSchema(name:value,
type:string, comment:default), ]
+PREHOOK: query: select * from partition_test_partitioned where dt >=100 and dt <= 102
+PREHOOK: type: QUERY
+PREHOOK: Input: default@partition_test_partitioned@dt=100
+PREHOOK: Input: default@partition_test_partitioned@dt=101
+PREHOOK: Input: default@partition_test_partitioned@dt=102
+PREHOOK: Output: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-04-29_11-29-41_458_4880493720639558189/10000
+POSTHOOK: query: select * from partition_test_partitioned where dt >=100 and dt <=
102
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@partition_test_partitioned@dt=100
+POSTHOOK: Input: default@partition_test_partitioned@dt=101
+POSTHOOK: Input: default@partition_test_partitioned@dt=102
+POSTHOOK: Output: file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-04-29_11-29-41_458_4880493720639558189/10000
+POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=100).key SIMPLE [(src1)src1.FieldSchema(name:key,
type:string, comment:default), ]
+POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=100).value SIMPLE [(src1)src1.FieldSchema(name:value,
type:string, comment:default), ]
+POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=101).key SIMPLE [(src1)src1.FieldSchema(name:key,
type:string, comment:default), ]
+POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=101).value SIMPLE [(src1)src1.FieldSchema(name:value,
type:string, comment:default), ]
+POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=102).key SIMPLE [(src1)src1.FieldSchema(name:key,
type:string, comment:default), ]
+POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=102).value SIMPLE [(src1)src1.FieldSchema(name:value,
type:string, comment:default), ]
+238	val_238	100
+		100
+311	val_311	100
+	val_27	100
+	val_165	100
+	val_409	100
+255	val_255	100
+278	val_278	100
+98	val_98	100
+	val_484	100
+	val_265	100
+	val_193	100
+401	val_401	100
+150	val_150	100
+273	val_273	100
+224		100
+369		100
+66	val_66	100
+128		100
+213	val_213	100
+146	val_146	100
+406	val_406	100
+		100
+		100
+		100
+238	val_238	101
+		101
+311	val_311	101
+	val_27	101
+	val_165	101
+	val_409	101
+255	val_255	101
+278	val_278	101
+98	val_98	101
+	val_484	101
+	val_265	101
+	val_193	101
+401	val_401	101
+150	val_150	101
+273	val_273	101
+224		101
+369		101
+66	val_66	101
+128		101
+213	val_213	101
+146	val_146	101
+406	val_406	101
+		101
+		101
+		101
+238	val_238	102
+		102
+311	val_311	102
+	val_27	102
+	val_165	102
+	val_409	102
+255	val_255	102
+278	val_278	102
+98	val_98	102
+	val_484	102
+	val_265	102
+	val_193	102
+401	val_401	102
+150	val_150	102
+273	val_273	102
+224		102
+369		102
+66	val_66	102
+128		102
+213	val_213	102
+146	val_146	102
+406	val_406	102
+		102
+		102
+		102
+PREHOOK: query: drop table partition_test_partitioned
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table partition_test_partitioned
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Output: default@partition_test_partitioned
+POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=100).key SIMPLE [(src1)src1.FieldSchema(name:key,
type:string, comment:default), ]
+POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=100).value SIMPLE [(src1)src1.FieldSchema(name:value,
type:string, comment:default), ]
+POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=101).key SIMPLE [(src1)src1.FieldSchema(name:key,
type:string, comment:default), ]
+POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=101).value SIMPLE [(src1)src1.FieldSchema(name:value,
type:string, comment:default), ]
+POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=102).key SIMPLE [(src1)src1.FieldSchema(name:key,
type:string, comment:default), ]
+POSTHOOK: Lineage: partition_test_partitioned PARTITION(dt=102).value SIMPLE [(src1)src1.FieldSchema(name:value,
type:string, comment:default), ]



Mime
View raw message