hadoop-hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From na...@apache.org
Subject svn commit: r996387 - in /hadoop/hive/trunk: ./ ql/src/java/org/apache/hadoop/hive/ql/exec/ ql/src/test/queries/clientnegative/ ql/src/test/queries/clientpositive/ ql/src/test/results/clientnegative/ ql/src/test/results/clientpositive/
Date Sun, 12 Sep 2010 21:00:24 GMT
Author: namit
Date: Sun Sep 12 21:00:24 2010
New Revision: 996387

URL: http://svn.apache.org/viewvc?rev=996387&view=rev
Log:
HIVE-1630. Bug in NO_DROP
(Siying Dong via namit)


Modified:
    hadoop/hive/trunk/CHANGES.txt
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
    hadoop/hive/trunk/ql/src/test/queries/clientnegative/protectmode_part_no_drop.q
    hadoop/hive/trunk/ql/src/test/queries/clientpositive/protectmode.q
    hadoop/hive/trunk/ql/src/test/results/clientnegative/protectmode_part_no_drop.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/protectmode.q.out

Modified: hadoop/hive/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/CHANGES.txt?rev=996387&r1=996386&r2=996387&view=diff
==============================================================================
--- hadoop/hive/trunk/CHANGES.txt (original)
+++ hadoop/hive/trunk/CHANGES.txt Sun Sep 12 21:00:24 2010
@@ -244,6 +244,9 @@ Trunk -  Unreleased
     HIVE-1622. Use map-only tak to merge if available
     (Ning Zhang via namit)
 
+    HIVE-1630. Bug in NO_DROP
+    (Siying Dong via namit)
+
   TESTS
 
     HIVE-1464. improve  test query performance

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java?rev=996387&r1=996386&r2=996387&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java Sun Sep 12 21:00:24
2010
@@ -2151,10 +2151,6 @@ public class DDLTask extends Task<DDLWor
     Table tbl = null;
     try {
       tbl = db.getTable(dropTbl.getTableName());
-      if (!tbl.canDrop()) {
-        throw new HiveException("Table " + tbl.getTableName() +
-            " is protected from being dropped");
-      }
     } catch (InvalidTableException e) {
       // drop table is idempotent
     }
@@ -2172,6 +2168,11 @@ public class DDLTask extends Task<DDLWor
     }
 
     if (dropTbl.getPartSpecs() == null) {
+      if (tbl != null && !tbl.canDrop()) {
+        throw new HiveException("Table " + tbl.getTableName() +
+            " is protected from being dropped");
+      }
+
       // We should check that all the partitions of the table can be dropped
       if (tbl != null && tbl.isPartitioned()) {
         List<Partition> listPartitions = db.getPartitions(tbl);

Modified: hadoop/hive/trunk/ql/src/test/queries/clientnegative/protectmode_part_no_drop.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientnegative/protectmode_part_no_drop.q?rev=996387&r1=996386&r2=996387&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientnegative/protectmode_part_no_drop.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientnegative/protectmode_part_no_drop.q Sun Sep
12 21:00:24 2010
@@ -1,4 +1,4 @@
--- protect mode: syntax to change protect mode works and queries are not blocked if a table
or partition is not in protect mode
+-- protect mode: syntax to change protect mode works and queries to drop partitions are blocked
if it is marked no drop
 
 drop table tbl_protectmode_no_drop;
 
@@ -7,4 +7,4 @@ alter table tbl_protectmode_no_drop add 
 alter table tbl_protectmode_no_drop partition (p='p1') enable no_drop;
 desc extended tbl_protectmode_no_drop partition (p='p1');
 
-drop table tbl_protectmode_no_drop;
\ No newline at end of file
+alter table tbl_protectmode_no_drop drop partition (p='p1');

Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/protectmode.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/protectmode.q?rev=996387&r1=996386&r2=996387&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/protectmode.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/protectmode.q Sun Sep 12 21:00:24
2010
@@ -16,6 +16,7 @@ select col from tbl1;
 create table tbl2  (col string) partitioned by (p string);
 alter table tbl2 add partition (p='p1');
 alter table tbl2 add partition (p='p2');
+alter table tbl2 add partition (p='p3');
 alter table tbl2 drop partition (p='not_exist');
 
 select * from tbl2 where p='p1';
@@ -29,6 +30,7 @@ desc extended tbl2;
 
 alter table tbl2 enable no_drop;
 desc extended tbl2;
+alter table tbl2 drop partition (p='p3');
 
 alter table tbl2 disable offline;
 desc extended tbl2;

Modified: hadoop/hive/trunk/ql/src/test/results/clientnegative/protectmode_part_no_drop.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/protectmode_part_no_drop.q.out?rev=996387&r1=996386&r2=996387&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/protectmode_part_no_drop.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/protectmode_part_no_drop.q.out Sun
Sep 12 21:00:24 2010
@@ -1,8 +1,8 @@
-PREHOOK: query: -- protect mode: syntax to change protect mode works and queries are not
blocked if a table or partition is not in protect mode
+PREHOOK: query: -- protect mode: syntax to change protect mode works and queries to drop
partitions are blocked if it is marked no drop
 
 drop table tbl_protectmode_no_drop
 PREHOOK: type: DROPTABLE
-POSTHOOK: query: -- protect mode: syntax to change protect mode works and queries are not
blocked if a table or partition is not in protect mode
+POSTHOOK: query: -- protect mode: syntax to change protect mode works and queries to drop
partitions are blocked if it is marked no drop
 
 drop table tbl_protectmode_no_drop
 POSTHOOK: type: DROPTABLE
@@ -36,10 +36,10 @@ c1	string	
 c2	string	
 p	string	
 	 	 
-Detailed Partition Information	Partition(values:[p1], dbName:default, tableName:tbl_protectmode_no_drop,
createTime:1282025356, lastAccessTime:0, sd:StorageDescriptor(cols:[FieldSchema(name:c1, type:string,
comment:null), FieldSchema(name:c2, type:string, comment:null)], location:pfile:/data/users/njain/hive_commit2/hive_commit2/build/ql/test/data/warehouse/tbl_protectmode_no_drop/p=p1,
inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat,
compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe,
parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), parameters:{last_modified_by=njain,
last_modified_time=1282025356, PROTECT_MODE=NO_DROP, transient_lastDdlTime=1282025356})	
-PREHOOK: query: drop table tbl_protectmode_no_drop
-PREHOOK: type: DROPTABLE
+Detailed Partition Information	Partition(values:[p1], dbName:default, tableName:tbl_protectmode_no_drop,
createTime:1284168328, lastAccessTime:0, sd:StorageDescriptor(cols:[FieldSchema(name:c1, type:string,
comment:null), FieldSchema(name:c2, type:string, comment:null)], location:pfile:/data/users/sdong/www/trunk/VENDOR.hive/trunk/build/ql/test/data/warehouse/tbl_protectmode_no_drop/p=p1,
inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat,
compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe,
parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), parameters:{last_modified_by=sdong,
last_modified_time=1284168328, PROTECT_MODE=NO_DROP, transient_lastDdlTime=1284168328})	
+PREHOOK: query: alter table tbl_protectmode_no_drop drop partition (p='p1')
+PREHOOK: type: ALTERTABLE_DROPPARTS
 PREHOOK: Input: default@tbl_protectmode_no_drop
-PREHOOK: Output: default@tbl_protectmode_no_drop
-FAILED: Error in metadata: Table tbl_protectmode_no_drop Partitionp=p1 is protected from
being dropped
+PREHOOK: Output: default@tbl_protectmode_no_drop@p=p1
+FAILED: Error in metadata: Table tbl_protectmode_no_drop Partition p=p1 is protected from
being dropped
 FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/protectmode.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/protectmode.q.out?rev=996387&r1=996386&r2=996387&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/protectmode.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/protectmode.q.out Sun Sep 12 21:00:24
2010
@@ -18,19 +18,19 @@ POSTHOOK: Output: default@tbl1
 PREHOOK: query: select * from tbl1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@tbl1
-PREHOOK: Output: file:/tmp/njain/hive_2010-08-17_00-31-47_882_345822340790651676/-mr-10000
+PREHOOK: Output: file:/tmp/njain/hive_2010-09-12_13-56-01_521_1064444079313286790/-mr-10000
 POSTHOOK: query: select * from tbl1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@tbl1
-POSTHOOK: Output: file:/tmp/njain/hive_2010-08-17_00-31-47_882_345822340790651676/-mr-10000
+POSTHOOK: Output: file:/tmp/njain/hive_2010-09-12_13-56-01_521_1064444079313286790/-mr-10000
 PREHOOK: query: select col from tbl1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@tbl1
-PREHOOK: Output: file:/tmp/njain/hive_2010-08-17_00-31-48_026_4569994021296856299/-mr-10000
+PREHOOK: Output: file:/tmp/njain/hive_2010-09-12_13-56-01_868_8633104453520411771/-mr-10000
 POSTHOOK: query: select col from tbl1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@tbl1
-POSTHOOK: Output: file:/tmp/njain/hive_2010-08-17_00-31-48_026_4569994021296856299/-mr-10000
+POSTHOOK: Output: file:/tmp/njain/hive_2010-09-12_13-56-01_868_8633104453520411771/-mr-10000
 PREHOOK: query: alter table tbl1 enable offline
 PREHOOK: type: ALTERTABLE_PROTECTMODE
 PREHOOK: Input: default@tbl1
@@ -45,7 +45,7 @@ POSTHOOK: query: desc extended tbl1
 POSTHOOK: type: DESCTABLE
 col	string	
 	 	 
-Detailed Table Information	Table(tableName:tbl1, dbName:default, owner:njain, createTime:1282030307,
lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:col, type:string,
comment:null)], location:pfile:/data/users/njain/hive_commit2/hive_commit2/build/ql/test/data/warehouse/tbl1,
inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat,
compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe,
parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[],
parameters:{last_modified_by=njain, last_modified_time=1282030311, PROTECT_MODE=OFFLINE, transient_lastDdlTime=1282030311},
viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)	
+Detailed Table Information	Table(tableName:tbl1, dbName:default, owner:njain, createTime:1284324961,
lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:col, type:string,
comment:null)], location:pfile:/data/users/njain/hive_commit2/hive_commit2/build/ql/test/data/warehouse/tbl1,
inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat,
compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe,
parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[],
parameters:{last_modified_by=njain, last_modified_time=1284324965, PROTECT_MODE=OFFLINE, transient_lastDdlTime=1284324965},
viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)	
 PREHOOK: query: alter table tbl1 disable offline
 PREHOOK: type: ALTERTABLE_PROTECTMODE
 PREHOOK: Input: default@tbl1
@@ -60,23 +60,23 @@ POSTHOOK: query: desc extended tbl1
 POSTHOOK: type: DESCTABLE
 col	string	
 	 	 
-Detailed Table Information	Table(tableName:tbl1, dbName:default, owner:njain, createTime:1282030307,
lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:col, type:string,
comment:null)], location:pfile:/data/users/njain/hive_commit2/hive_commit2/build/ql/test/data/warehouse/tbl1,
inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat,
compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe,
parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[],
parameters:{last_modified_by=njain, last_modified_time=1282030311, transient_lastDdlTime=1282030311},
viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)	
+Detailed Table Information	Table(tableName:tbl1, dbName:default, owner:njain, createTime:1284324961,
lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:col, type:string,
comment:null)], location:pfile:/data/users/njain/hive_commit2/hive_commit2/build/ql/test/data/warehouse/tbl1,
inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat,
compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe,
parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[],
parameters:{last_modified_by=njain, last_modified_time=1284324965, transient_lastDdlTime=1284324965},
viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)	
 PREHOOK: query: select * from tbl1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@tbl1
-PREHOOK: Output: file:/tmp/njain/hive_2010-08-17_00-31-51_568_4690377861294328852/-mr-10000
+PREHOOK: Output: file:/tmp/njain/hive_2010-09-12_13-56-05_819_8890858681292563426/-mr-10000
 POSTHOOK: query: select * from tbl1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@tbl1
-POSTHOOK: Output: file:/tmp/njain/hive_2010-08-17_00-31-51_568_4690377861294328852/-mr-10000
+POSTHOOK: Output: file:/tmp/njain/hive_2010-09-12_13-56-05_819_8890858681292563426/-mr-10000
 PREHOOK: query: select col from tbl1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@tbl1
-PREHOOK: Output: file:/tmp/njain/hive_2010-08-17_00-31-51_719_2669398894679440279/-mr-10000
+PREHOOK: Output: file:/tmp/njain/hive_2010-09-12_13-56-05_969_128056030143576927/-mr-10000
 POSTHOOK: query: select col from tbl1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@tbl1
-POSTHOOK: Output: file:/tmp/njain/hive_2010-08-17_00-31-51_719_2669398894679440279/-mr-10000
+POSTHOOK: Output: file:/tmp/njain/hive_2010-09-12_13-56-05_969_128056030143576927/-mr-10000
 PREHOOK: query: create table tbl2  (col string) partitioned by (p string)
 PREHOOK: type: CREATETABLE
 POSTHOOK: query: create table tbl2  (col string) partitioned by (p string)
@@ -96,6 +96,13 @@ POSTHOOK: query: alter table tbl2 add pa
 POSTHOOK: type: ALTERTABLE_ADDPARTS
 POSTHOOK: Input: default@tbl2
 POSTHOOK: Output: default@tbl2@p=p2
+PREHOOK: query: alter table tbl2 add partition (p='p3')
+PREHOOK: type: ALTERTABLE_ADDPARTS
+PREHOOK: Input: default@tbl2
+POSTHOOK: query: alter table tbl2 add partition (p='p3')
+POSTHOOK: type: ALTERTABLE_ADDPARTS
+POSTHOOK: Input: default@tbl2
+POSTHOOK: Output: default@tbl2@p=p3
 PREHOOK: query: alter table tbl2 drop partition (p='not_exist')
 PREHOOK: type: ALTERTABLE_DROPPARTS
 PREHOOK: Input: default@tbl2
@@ -105,19 +112,19 @@ POSTHOOK: Input: default@tbl2
 PREHOOK: query: select * from tbl2 where p='p1'
 PREHOOK: type: QUERY
 PREHOOK: Input: default@tbl2@p=p1
-PREHOOK: Output: file:/tmp/njain/hive_2010-08-17_00-31-55_288_5009122664008890670/-mr-10000
+PREHOOK: Output: file:/tmp/njain/hive_2010-09-12_13-56-10_110_1357711074681566433/-mr-10000
 POSTHOOK: query: select * from tbl2 where p='p1'
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@tbl2@p=p1
-POSTHOOK: Output: file:/tmp/njain/hive_2010-08-17_00-31-55_288_5009122664008890670/-mr-10000
+POSTHOOK: Output: file:/tmp/njain/hive_2010-09-12_13-56-10_110_1357711074681566433/-mr-10000
 PREHOOK: query: select * from tbl2 where p='p2'
 PREHOOK: type: QUERY
 PREHOOK: Input: default@tbl2@p=p2
-PREHOOK: Output: file:/tmp/njain/hive_2010-08-17_00-31-55_468_198651221231416854/-mr-10000
+PREHOOK: Output: file:/tmp/njain/hive_2010-09-12_13-56-10_401_4217982769570687966/-mr-10000
 POSTHOOK: query: select * from tbl2 where p='p2'
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@tbl2@p=p2
-POSTHOOK: Output: file:/tmp/njain/hive_2010-08-17_00-31-55_468_198651221231416854/-mr-10000
+POSTHOOK: Output: file:/tmp/njain/hive_2010-09-12_13-56-10_401_4217982769570687966/-mr-10000
 PREHOOK: query: alter table tbl2 partition (p='p1') enable offline
 PREHOOK: type: ALTERPARTITION_PROTECTMODE
 PREHOOK: Input: default@tbl2
@@ -135,7 +142,7 @@ POSTHOOK: type: DESCTABLE
 col	string	
 p	string	
 	 	 
-Detailed Partition Information	Partition(values:[p1], dbName:default, tableName:tbl2, createTime:1282030315,
lastAccessTime:0, sd:StorageDescriptor(cols:[FieldSchema(name:col, type:string, comment:null)],
location:pfile:/data/users/njain/hive_commit2/hive_commit2/build/ql/test/data/warehouse/tbl2/p=p1,
inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat,
compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe,
parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), parameters:{last_modified_by=njain,
last_modified_time=1282030315, PROTECT_MODE=OFFLINE, transient_lastDdlTime=1282030315})	
+Detailed Partition Information	Partition(values:[p1], dbName:default, tableName:tbl2, createTime:1284324969,
lastAccessTime:0, sd:StorageDescriptor(cols:[FieldSchema(name:col, type:string, comment:null)],
location:pfile:/data/users/njain/hive_commit2/hive_commit2/build/ql/test/data/warehouse/tbl2/p=p1,
inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat,
compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe,
parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), parameters:{last_modified_by=njain,
last_modified_time=1284324970, PROTECT_MODE=OFFLINE, transient_lastDdlTime=1284324970})	
 PREHOOK: query: alter table tbl2 enable offline
 PREHOOK: type: ALTERTABLE_PROTECTMODE
 PREHOOK: Input: default@tbl2
@@ -151,7 +158,7 @@ POSTHOOK: type: DESCTABLE
 col	string	
 p	string	
 	 	 
-Detailed Table Information	Table(tableName:tbl2, dbName:default, owner:njain, createTime:1282030314,
lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:col, type:string,
comment:null)], location:pfile:/data/users/njain/hive_commit2/hive_commit2/build/ql/test/data/warehouse/tbl2,
inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat,
compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe,
parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[FieldSchema(name:p,
type:string, comment:null)], parameters:{last_modified_by=njain, last_modified_time=1282030315,
PROTECT_MODE=OFFLINE, transient_lastDdlTime=1282030315}, viewOriginalText:null, viewExpandedText:null,
tableType:MANAGED_TABLE)	
+Detailed Table Information	Table(tableName:tbl2, dbName:default, owner:njain, createTime:1284324969,
lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:col, type:string,
comment:null)], location:pfile:/data/users/njain/hive_commit2/hive_commit2/build/ql/test/data/warehouse/tbl2,
inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat,
compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe,
parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[FieldSchema(name:p,
type:string, comment:null)], parameters:{last_modified_by=njain, last_modified_time=1284324971,
PROTECT_MODE=OFFLINE, transient_lastDdlTime=1284324971}, viewOriginalText:null, viewExpandedText:null,
tableType:MANAGED_TABLE)	
 PREHOOK: query: alter table tbl2 enable no_drop
 PREHOOK: type: ALTERTABLE_PROTECTMODE
 PREHOOK: Input: default@tbl2
@@ -167,7 +174,15 @@ POSTHOOK: type: DESCTABLE
 col	string	
 p	string	
 	 	 
-Detailed Table Information	Table(tableName:tbl2, dbName:default, owner:njain, createTime:1282030314,
lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:col, type:string,
comment:null)], location:pfile:/data/users/njain/hive_commit2/hive_commit2/build/ql/test/data/warehouse/tbl2,
inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat,
compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe,
parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[FieldSchema(name:p,
type:string, comment:null)], parameters:{last_modified_by=njain, last_modified_time=1282030316,
PROTECT_MODE=OFFLINE,NO_DROP, transient_lastDdlTime=1282030316}, viewOriginalText:null, viewExpandedText:null,
tableType:MANAGED_TABLE)	
+Detailed Table Information	Table(tableName:tbl2, dbName:default, owner:njain, createTime:1284324969,
lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:col, type:string,
comment:null)], location:pfile:/data/users/njain/hive_commit2/hive_commit2/build/ql/test/data/warehouse/tbl2,
inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat,
compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe,
parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[FieldSchema(name:p,
type:string, comment:null)], parameters:{last_modified_by=njain, last_modified_time=1284324971,
PROTECT_MODE=OFFLINE,NO_DROP, transient_lastDdlTime=1284324971}, viewOriginalText:null, viewExpandedText:null,
tableType:MANAGED_TABLE)	
+PREHOOK: query: alter table tbl2 drop partition (p='p3')
+PREHOOK: type: ALTERTABLE_DROPPARTS
+PREHOOK: Input: default@tbl2
+PREHOOK: Output: default@tbl2@p=p3
+POSTHOOK: query: alter table tbl2 drop partition (p='p3')
+POSTHOOK: type: ALTERTABLE_DROPPARTS
+POSTHOOK: Input: default@tbl2
+POSTHOOK: Output: default@tbl2@p=p3
 PREHOOK: query: alter table tbl2 disable offline
 PREHOOK: type: ALTERTABLE_PROTECTMODE
 PREHOOK: Input: default@tbl2
@@ -183,7 +198,7 @@ POSTHOOK: type: DESCTABLE
 col	string	
 p	string	
 	 	 
-Detailed Table Information	Table(tableName:tbl2, dbName:default, owner:njain, createTime:1282030314,
lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:col, type:string,
comment:null)], location:pfile:/data/users/njain/hive_commit2/hive_commit2/build/ql/test/data/warehouse/tbl2,
inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat,
compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe,
parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[FieldSchema(name:p,
type:string, comment:null)], parameters:{last_modified_by=njain, last_modified_time=1282030316,
PROTECT_MODE=NO_DROP, transient_lastDdlTime=1282030316}, viewOriginalText:null, viewExpandedText:null,
tableType:MANAGED_TABLE)	
+Detailed Table Information	Table(tableName:tbl2, dbName:default, owner:njain, createTime:1284324969,
lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:col, type:string,
comment:null)], location:pfile:/data/users/njain/hive_commit2/hive_commit2/build/ql/test/data/warehouse/tbl2,
inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat,
compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe,
parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[FieldSchema(name:p,
type:string, comment:null)], parameters:{last_modified_by=njain, last_modified_time=1284324971,
PROTECT_MODE=NO_DROP, transient_lastDdlTime=1284324971}, viewOriginalText:null, viewExpandedText:null,
tableType:MANAGED_TABLE)	
 PREHOOK: query: alter table tbl2 disable no_drop
 PREHOOK: type: ALTERTABLE_PROTECTMODE
 PREHOOK: Input: default@tbl2
@@ -199,23 +214,23 @@ POSTHOOK: type: DESCTABLE
 col	string	
 p	string	
 	 	 
-Detailed Table Information	Table(tableName:tbl2, dbName:default, owner:njain, createTime:1282030314,
lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:col, type:string,
comment:null)], location:pfile:/data/users/njain/hive_commit2/hive_commit2/build/ql/test/data/warehouse/tbl2,
inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat,
compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe,
parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[FieldSchema(name:p,
type:string, comment:null)], parameters:{last_modified_by=njain, last_modified_time=1282030316,
transient_lastDdlTime=1282030316}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)

+Detailed Table Information	Table(tableName:tbl2, dbName:default, owner:njain, createTime:1284324969,
lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:col, type:string,
comment:null)], location:pfile:/data/users/njain/hive_commit2/hive_commit2/build/ql/test/data/warehouse/tbl2,
inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat,
compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe,
parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[FieldSchema(name:p,
type:string, comment:null)], parameters:{last_modified_by=njain, last_modified_time=1284324972,
transient_lastDdlTime=1284324972}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)

 PREHOOK: query: select * from tbl2 where p='p2'
 PREHOOK: type: QUERY
 PREHOOK: Input: default@tbl2@p=p2
-PREHOOK: Output: file:/tmp/njain/hive_2010-08-17_00-31-56_684_3044267555024428489/-mr-10000
+PREHOOK: Output: file:/tmp/njain/hive_2010-09-12_13-56-12_162_5488456688805727075/-mr-10000
 POSTHOOK: query: select * from tbl2 where p='p2'
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@tbl2@p=p2
-POSTHOOK: Output: file:/tmp/njain/hive_2010-08-17_00-31-56_684_3044267555024428489/-mr-10000
+POSTHOOK: Output: file:/tmp/njain/hive_2010-09-12_13-56-12_162_5488456688805727075/-mr-10000
 PREHOOK: query: select col from tbl2 where p='p2'
 PREHOOK: type: QUERY
 PREHOOK: Input: default@tbl2@p=p2
-PREHOOK: Output: file:/tmp/njain/hive_2010-08-17_00-31-56_865_5659313889717699465/-mr-10000
+PREHOOK: Output: file:/tmp/njain/hive_2010-09-12_13-56-12_384_4452747557336253545/-mr-10000
 POSTHOOK: query: select col from tbl2 where p='p2'
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@tbl2@p=p2
-POSTHOOK: Output: file:/tmp/njain/hive_2010-08-17_00-31-56_865_5659313889717699465/-mr-10000
+POSTHOOK: Output: file:/tmp/njain/hive_2010-09-12_13-56-12_384_4452747557336253545/-mr-10000
 PREHOOK: query: alter table tbl2 partition (p='p1') disable offline
 PREHOOK: type: ALTERPARTITION_PROTECTMODE
 PREHOOK: Input: default@tbl2
@@ -233,23 +248,23 @@ POSTHOOK: type: DESCTABLE
 col	string	
 p	string	
 	 	 
-Detailed Partition Information	Partition(values:[p1], dbName:default, tableName:tbl2, createTime:1282030315,
lastAccessTime:0, sd:StorageDescriptor(cols:[FieldSchema(name:col, type:string, comment:null)],
location:pfile:/data/users/njain/hive_commit2/hive_commit2/build/ql/test/data/warehouse/tbl2/p=p1,
inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat,
compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe,
parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), parameters:{last_modified_by=njain,
last_modified_time=1282030320, transient_lastDdlTime=1282030320})	
+Detailed Partition Information	Partition(values:[p1], dbName:default, tableName:tbl2, createTime:1284324969,
lastAccessTime:0, sd:StorageDescriptor(cols:[FieldSchema(name:col, type:string, comment:null)],
location:pfile:/data/users/njain/hive_commit2/hive_commit2/build/ql/test/data/warehouse/tbl2/p=p1,
inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat,
compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe,
parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), parameters:{last_modified_by=njain,
last_modified_time=1284324975, transient_lastDdlTime=1284324975})	
 PREHOOK: query: select * from tbl2 where p='p1'
 PREHOOK: type: QUERY
 PREHOOK: Input: default@tbl2@p=p1
-PREHOOK: Output: file:/tmp/njain/hive_2010-08-17_00-32-01_137_2604041819153026498/-mr-10000
+PREHOOK: Output: file:/tmp/njain/hive_2010-09-12_13-56-15_878_9044575727084946986/-mr-10000
 POSTHOOK: query: select * from tbl2 where p='p1'
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@tbl2@p=p1
-POSTHOOK: Output: file:/tmp/njain/hive_2010-08-17_00-32-01_137_2604041819153026498/-mr-10000
+POSTHOOK: Output: file:/tmp/njain/hive_2010-09-12_13-56-15_878_9044575727084946986/-mr-10000
 PREHOOK: query: select col from tbl2 where p='p1'
 PREHOOK: type: QUERY
 PREHOOK: Input: default@tbl2@p=p1
-PREHOOK: Output: file:/tmp/njain/hive_2010-08-17_00-32-01_326_194698586482359849/-mr-10000
+PREHOOK: Output: file:/tmp/njain/hive_2010-09-12_13-56-16_089_5658301458151509169/-mr-10000
 POSTHOOK: query: select col from tbl2 where p='p1'
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@tbl2@p=p1
-POSTHOOK: Output: file:/tmp/njain/hive_2010-08-17_00-32-01_326_194698586482359849/-mr-10000
+POSTHOOK: Output: file:/tmp/njain/hive_2010-09-12_13-56-16_089_5658301458151509169/-mr-10000
 PREHOOK: query: insert overwrite table tbl1 select col from tbl2 where p='p1'
 PREHOOK: type: QUERY
 PREHOOK: Input: default@tbl2@p=p1



Mime
View raw message