hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From heyongqi...@apache.org
Subject svn commit: r1181197 - in /hive/trunk/ql/src: java/org/apache/hadoop/hive/ql/exec/ java/org/apache/hadoop/hive/ql/parse/ java/org/apache/hadoop/hive/ql/plan/ test/queries/clientpositive/ test/results/clientpositive/
Date Mon, 10 Oct 2011 20:11:24 GMT
Author: heyongqiang
Date: Mon Oct 10 20:11:23 2011
New Revision: 1181197

URL: http://svn.apache.org/viewvc?rev=1181197&view=rev
Log:
HIVE-2484: Enable ALTER TABLE SET SERDE to work on partition level (Xiao Li via He Yongqiang)

Added:
    hive/trunk/ql/src/test/queries/clientpositive/alter_table_serde.q
    hive/trunk/ql/src/test/results/clientpositive/alter_table_serde.q.out
Modified:
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java?rev=1181197&r1=1181196&r2=1181197&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java Mon Oct 10 20:11:23
2011
@@ -2801,16 +2801,31 @@ public class DDLTask extends Task<DDLWor
     } else if (alterTbl.getOp() == AlterTableDesc.AlterTableTypes.ADDPROPS) {
       tbl.getTTable().getParameters().putAll(alterTbl.getProps());
     } else if (alterTbl.getOp() == AlterTableDesc.AlterTableTypes.ADDSERDEPROPS) {
-      tbl.getTTable().getSd().getSerdeInfo().getParameters().putAll(
-          alterTbl.getProps());
-    } else if (alterTbl.getOp() == AlterTableDesc.AlterTableTypes.ADDSERDE) {
-      tbl.setSerializationLib(alterTbl.getSerdeName());
-      if ((alterTbl.getProps() != null) && (alterTbl.getProps().size() > 0)) {
+      if (part != null) {
+        part.getTPartition().getSd().getSerdeInfo().getParameters().putAll(
+            alterTbl.getProps());
+      } else {
         tbl.getTTable().getSd().getSerdeInfo().getParameters().putAll(
             alterTbl.getProps());
       }
-      tbl.setFields(Hive.getFieldsFromDeserializer(tbl.getTableName(), tbl
-          .getDeserializer()));
+    } else if (alterTbl.getOp() == AlterTableDesc.AlterTableTypes.ADDSERDE) {
+      String serdeName = alterTbl.getSerdeName();
+      if (part != null) {
+        part.getTPartition().getSd().getSerdeInfo().setSerializationLib(serdeName);
+        if ((alterTbl.getProps() != null) && (alterTbl.getProps().size() > 0))
{
+          part.getTPartition().getSd().getSerdeInfo().getParameters().putAll(
+              alterTbl.getProps());
+        }
+        part.getTPartition().getSd().setCols(part.getTPartition().getSd().getCols());
+      } else {
+        tbl.setSerializationLib(alterTbl.getSerdeName());
+        if ((alterTbl.getProps() != null) && (alterTbl.getProps().size() > 0))
{
+          tbl.getTTable().getSd().getSerdeInfo().getParameters().putAll(
+              alterTbl.getProps());
+        }
+        tbl.setFields(Hive.getFieldsFromDeserializer(tbl.getTableName(), tbl.
+              getDeserializer()));
+      }
     } else if (alterTbl.getOp() == AlterTableDesc.AlterTableTypes.ADDFILEFORMAT) {
       if(part != null) {
         part.getTPartition().getSd().setInputFormat(alterTbl.getInputFormat());

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java?rev=1181197&r1=1181196&r2=1181197&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java Mon Oct
10 20:11:23 2011
@@ -195,6 +195,10 @@ public class DDLSemanticAnalyzer extends
         analyzeAlterTableLocation(ast, tableName, partSpec);
       } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_ALTERPARTS_MERGEFILES)
{
         analyzeAlterTablePartMergeFiles(tablePart, ast, tableName, partSpec);
+      } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_SERIALIZER) {
+        analyzeAlterTableSerde(ast, tableName, partSpec);
+      } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_SERDEPROPERTIES) {
+        analyzeAlterTableSerdeProps(ast, tableName, partSpec);
       }
       break;
     }
@@ -294,12 +298,6 @@ public class DDLSemanticAnalyzer extends
     case HiveParser.TOK_ALTERTABLE_PROPERTIES:
       analyzeAlterTableProps(ast, false);
       break;
-    case HiveParser.TOK_ALTERTABLE_SERDEPROPERTIES:
-      analyzeAlterTableSerdeProps(ast);
-      break;
-    case HiveParser.TOK_ALTERTABLE_SERIALIZER:
-      analyzeAlterTableSerde(ast);
-      break;
     case HiveParser.TOK_ALTERTABLE_CLUSTER_SORT:
       analyzeAlterTableClusterSort(ast);
       break;
@@ -945,52 +943,38 @@ public class DDLSemanticAnalyzer extends
         alterTblDesc), conf));
   }
 
-  private void analyzeAlterTableSerdeProps(ASTNode ast)
+  private void analyzeAlterTableSerdeProps(ASTNode ast, String tableName,
+      HashMap<String, String> partSpec)
       throws SemanticException {
-    String tableName = getUnescapedName((ASTNode)ast.getChild(0));
-    HashMap<String, String> mapProp = getProps((ASTNode) (ast.getChild(1))
+    HashMap<String, String> mapProp = getProps((ASTNode) (ast.getChild(0))
         .getChild(0));
     AlterTableDesc alterTblDesc = new AlterTableDesc(
         AlterTableTypes.ADDSERDEPROPS);
     alterTblDesc.setProps(mapProp);
     alterTblDesc.setOldName(tableName);
+    alterTblDesc.setPartSpec(partSpec);
 
-    try {
-      Table tab = db.getTable(db.getCurrentDatabase(), tableName, false);
-      if (tab != null) {
-        inputs.add(new ReadEntity(tab));
-        outputs.add(new WriteEntity(tab));
-      }
-    } catch (HiveException e) {
-      throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(tableName));
-    }
-
+    addInputsOutputsAlterTable(tableName, partSpec);
     rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),
         alterTblDesc), conf));
   }
 
-  private void analyzeAlterTableSerde(ASTNode ast) throws SemanticException {
-    String tableName = getUnescapedName((ASTNode)ast.getChild(0));
-    String serdeName = unescapeSQLString(ast.getChild(1).getText());
+  private void analyzeAlterTableSerde(ASTNode ast, String tableName,
+      HashMap<String, String> partSpec)
+      throws SemanticException {
+
+    String serdeName = unescapeSQLString(ast.getChild(0).getText());
     AlterTableDesc alterTblDesc = new AlterTableDesc(AlterTableTypes.ADDSERDE);
-    if (ast.getChildCount() > 2) {
-      HashMap<String, String> mapProp = getProps((ASTNode) (ast.getChild(2))
+    if (ast.getChildCount() > 1) {
+      HashMap<String, String> mapProp = getProps((ASTNode) (ast.getChild(1))
           .getChild(0));
       alterTblDesc.setProps(mapProp);
     }
     alterTblDesc.setOldName(tableName);
     alterTblDesc.setSerdeName(serdeName);
+    alterTblDesc.setPartSpec(partSpec);
 
-    try {
-      Table tab = db.getTable(db.getCurrentDatabase(), tableName, false);
-      if (tab != null) {
-        inputs.add(new ReadEntity(tab));
-        outputs.add(new WriteEntity(tab));
-      }
-    } catch (HiveException e) {
-      throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(tableName));
-    }
-
+    addInputsOutputsAlterTable(tableName, partSpec);
     rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),
         alterTblDesc), conf));
   }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g?rev=1181197&r1=1181196&r2=1181197&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g Mon Oct 10 20:11:23 2011
@@ -572,7 +572,6 @@ alterTableStatementSuffix
     | alterStatementSuffixArchive
     | alterStatementSuffixUnArchive
     | alterStatementSuffixProperties
-    | alterStatementSuffixSerdeProperties
     | alterTblPartitionStatement
     | alterStatementSuffixClusterbySortby
     ;
@@ -705,10 +704,10 @@ alterViewSuffixProperties
 alterStatementSuffixSerdeProperties
 @init { msgs.push("alter serdes statement"); }
 @after { msgs.pop(); }
-    : name=Identifier KW_SET KW_SERDE serdeName=StringLiteral (KW_WITH KW_SERDEPROPERTIES
tableProperties)?
-    -> ^(TOK_ALTERTABLE_SERIALIZER $name $serdeName tableProperties?)
-    | name=Identifier KW_SET KW_SERDEPROPERTIES tableProperties
-    -> ^(TOK_ALTERTABLE_SERDEPROPERTIES $name tableProperties)
+    : KW_SET KW_SERDE serdeName=StringLiteral (KW_WITH KW_SERDEPROPERTIES tableProperties)?
+    -> ^(TOK_ALTERTABLE_SERIALIZER $serdeName tableProperties?)
+    | KW_SET KW_SERDEPROPERTIES tableProperties
+    -> ^(TOK_ALTERTABLE_SERDEPROPERTIES tableProperties)
     ;
 
 tablePartitionPrefix
@@ -732,6 +731,7 @@ alterTblPartitionStatementSuffix
   | alterStatementSuffixLocation
   | alterStatementSuffixProtectMode
   | alterStatementSuffixMergeFiles
+  | alterStatementSuffixSerdeProperties
   ;
 
 alterStatementSuffixFileFormat

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java?rev=1181197&r1=1181196&r2=1181197&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java Mon
Oct 10 20:11:23 2011
@@ -56,8 +56,6 @@ public final class SemanticAnalyzerFacto
     commandType.put(HiveParser.TOK_ALTERTABLE_ARCHIVE, HiveOperation.ALTERTABLE_ARCHIVE);
     commandType.put(HiveParser.TOK_ALTERTABLE_UNARCHIVE, HiveOperation.ALTERTABLE_UNARCHIVE);
     commandType.put(HiveParser.TOK_ALTERTABLE_PROPERTIES, HiveOperation.ALTERTABLE_PROPERTIES);
-    commandType.put(HiveParser.TOK_ALTERTABLE_SERIALIZER, HiveOperation.ALTERTABLE_SERIALIZER);
-    commandType.put(HiveParser.TOK_ALTERTABLE_SERDEPROPERTIES, HiveOperation.ALTERTABLE_SERDEPROPERTIES);
     commandType.put(HiveParser.TOK_ALTERTABLE_CLUSTER_SORT, HiveOperation.ALTERTABLE_CLUSTER_SORT);
     commandType.put(HiveParser.TOK_SHOWDATABASES, HiveOperation.SHOWDATABASES);
     commandType.put(HiveParser.TOK_SHOWTABLES, HiveOperation.SHOWTABLES);
@@ -104,8 +102,14 @@ public final class SemanticAnalyzerFacto
         new HiveOperation[] { HiveOperation.ALTERTABLE_LOCATION,
             HiveOperation.ALTERPARTITION_LOCATION });
     tablePartitionCommandType.put(HiveParser.TOK_ALTERTABLE_ALTERPARTS_MERGEFILES,
-            new HiveOperation[] {HiveOperation.ALTERTABLE_MERGEFILES,
+        new HiveOperation[] {HiveOperation.ALTERTABLE_MERGEFILES,
             HiveOperation.ALTERPARTITION_MERGEFILES });
+    tablePartitionCommandType.put(HiveParser.TOK_ALTERTABLE_SERIALIZER,
+        new HiveOperation[] {HiveOperation.ALTERTABLE_SERIALIZER,
+            HiveOperation.ALTERPARTITION_SERIALIZER });
+    tablePartitionCommandType.put(HiveParser.TOK_ALTERTABLE_SERDEPROPERTIES,
+        new HiveOperation[] {HiveOperation.ALTERTABLE_SERDEPROPERTIES,
+            HiveOperation.ALTERPARTITION_SERDEPROPERTIES });
   }
 
   public static BaseSemanticAnalyzer get(HiveConf conf, ASTNode tree)

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java?rev=1181197&r1=1181196&r2=1181197&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java Mon Oct 10 20:11:23
2011
@@ -44,7 +44,9 @@ public enum HiveOperation {
   ALTERTABLE_UNARCHIVE("ALTERTABLE_UNARCHIVE", new Privilege[]{Privilege.ALTER_DATA}, null),
   ALTERTABLE_PROPERTIES("ALTERTABLE_PROPERTIES", new Privilege[]{Privilege.ALTER_METADATA},
null),
   ALTERTABLE_SERIALIZER("ALTERTABLE_SERIALIZER", new Privilege[]{Privilege.ALTER_METADATA},
null),
+  ALTERPARTITION_SERIALIZER("ALTERPARTITION_SERIALIZER", new Privilege[]{Privilege.ALTER_METADATA},
null),
   ALTERTABLE_SERDEPROPERTIES("ALTERTABLE_SERDEPROPERTIES", new Privilege[]{Privilege.ALTER_METADATA},
null),
+  ALTERPARTITION_SERDEPROPERTIES("ALTERPARTITION_SERDEPROPERTIES", new Privilege[]{Privilege.ALTER_METADATA},
null),
   ALTERTABLE_CLUSTER_SORT("ALTERTABLE_CLUSTER_SORT", new Privilege[]{Privilege.ALTER_METADATA},
null),
   SHOWDATABASES("SHOWDATABASES", new Privilege[]{Privilege.SHOW_DATABASE}, null),
   SHOWTABLES("SHOWTABLES", null, null),

Added: hive/trunk/ql/src/test/queries/clientpositive/alter_table_serde.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/alter_table_serde.q?rev=1181197&view=auto
==============================================================================
--- hive/trunk/ql/src/test/queries/clientpositive/alter_table_serde.q (added)
+++ hive/trunk/ql/src/test/queries/clientpositive/alter_table_serde.q Mon Oct 10 20:11:23
2011
@@ -0,0 +1,33 @@
+-- test table
+create table test_table (id int, query string, name string);
+describe extended test_table;
+
+alter table test_table set serde 'org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe';
+describe extended test_table;
+
+alter table test_table set serdeproperties ('field.delim' = ',');
+describe extended test_table;
+
+drop table test_table;
+
+--- test partitioned table
+create table test_table (id int, query string, name string) partitioned by (dt string);
+
+alter table test_table add partition (dt = '2011');
+describe extended test_table partition (dt='2011');
+
+alter table test_table set serde 'org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe';
+describe extended test_table partition (dt='2011');
+
+alter table test_table set serdeproperties ('field.delim' = ',');
+describe extended test_table partition (dt='2011');
+
+-- test partitions
+
+alter table test_table partition(dt='2011') set serde 'org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe';
+describe extended test_table partition (dt='2011');
+
+alter table test_table partition(dt='2011') set serdeproperties ('field.delim' = ',');
+describe extended test_table partition (dt='2011');
+
+drop table test_table

Added: hive/trunk/ql/src/test/results/clientpositive/alter_table_serde.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/alter_table_serde.q.out?rev=1181197&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/alter_table_serde.q.out (added)
+++ hive/trunk/ql/src/test/results/clientpositive/alter_table_serde.q.out Mon Oct 10 20:11:23
2011
@@ -0,0 +1,168 @@
+PREHOOK: query: -- test table
+create table test_table (id int, query string, name string)
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: -- test table
+create table test_table (id int, query string, name string)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@test_table
+PREHOOK: query: describe extended test_table
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: describe extended test_table
+POSTHOOK: type: DESCTABLE
+id	int	
+query	string	
+name	string	
+	 	 
+Detailed Table Information	Table(tableName:test_table, dbName:default, owner:xiaol, createTime:1317799666,
lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:id, type:int, comment:null),
FieldSchema(name:query, type:string, comment:null), FieldSchema(name:name, type:string, comment:null)],
location:pfile:/Users/xiaol/Tools/hive-trunk/build/ql/test/data/warehouse/test_table, inputFormat:org.apache.hadoop.mapred.TextInputFormat,
outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false,
numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe,
parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[],
parameters:{transient_lastDdlTime=1317799666}, viewOriginalText:null, viewExpandedText:null,
tableType:MANAGED_TABLE)	
+PREHOOK: query: alter table test_table set serde 'org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe'
+PREHOOK: type: ALTERTABLE_SERIALIZER
+PREHOOK: Input: default@test_table
+PREHOOK: Output: default@test_table
+POSTHOOK: query: alter table test_table set serde 'org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe'
+POSTHOOK: type: ALTERTABLE_SERIALIZER
+POSTHOOK: Input: default@test_table
+POSTHOOK: Output: default@test_table
+PREHOOK: query: describe extended test_table
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: describe extended test_table
+POSTHOOK: type: DESCTABLE
+id	int	from deserializer
+query	string	from deserializer
+name	string	from deserializer
+	 	 
+Detailed Table Information	Table(tableName:test_table, dbName:default, owner:xiaol, createTime:1317799666,
lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:id, type:int, comment:from
deserializer), FieldSchema(name:query, type:string, comment:from deserializer), FieldSchema(name:name,
type:string, comment:from deserializer)], location:pfile:/Users/xiaol/Tools/hive-trunk/build/ql/test/data/warehouse/test_table,
inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat,
compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe,
parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[],
parameters:{last_modified_by=xiaol, last_modified_time=1317799666, transient_lastDdlTime=1317799666},
viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)	
+PREHOOK: query: alter table test_table set serdeproperties ('field.delim' = ',')
+PREHOOK: type: ALTERTABLE_SERDEPROPERTIES
+PREHOOK: Input: default@test_table
+PREHOOK: Output: default@test_table
+POSTHOOK: query: alter table test_table set serdeproperties ('field.delim' = ',')
+POSTHOOK: type: ALTERTABLE_SERDEPROPERTIES
+POSTHOOK: Input: default@test_table
+POSTHOOK: Output: default@test_table
+PREHOOK: query: describe extended test_table
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: describe extended test_table
+POSTHOOK: type: DESCTABLE
+id	int	from deserializer
+query	string	from deserializer
+name	string	from deserializer
+	 	 
+Detailed Table Information	Table(tableName:test_table, dbName:default, owner:xiaol, createTime:1317799666,
lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:id, type:int, comment:from
deserializer), FieldSchema(name:query, type:string, comment:from deserializer), FieldSchema(name:name,
type:string, comment:from deserializer)], location:pfile:/Users/xiaol/Tools/hive-trunk/build/ql/test/data/warehouse/test_table,
inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat,
compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe,
parameters:{serialization.format=1, field.delim=,}), bucketCols:[], sortCols:[], parameters:{}),
partitionKeys:[], parameters:{last_modified_by=xiaol, last_modified_time=1317799666, transient_lastDdlTime=1317799666},
viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)
 	
+PREHOOK: query: drop table test_table
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@test_table
+PREHOOK: Output: default@test_table
+POSTHOOK: query: drop table test_table
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@test_table
+POSTHOOK: Output: default@test_table
+PREHOOK: query: --- test partitioned table
+create table test_table (id int, query string, name string) partitioned by (dt string)
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: --- test partitioned table
+create table test_table (id int, query string, name string) partitioned by (dt string)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@test_table
+PREHOOK: query: alter table test_table add partition (dt = '2011')
+PREHOOK: type: ALTERTABLE_ADDPARTS
+PREHOOK: Input: default@test_table
+POSTHOOK: query: alter table test_table add partition (dt = '2011')
+POSTHOOK: type: ALTERTABLE_ADDPARTS
+POSTHOOK: Input: default@test_table
+POSTHOOK: Output: default@test_table@dt=2011
+PREHOOK: query: describe extended test_table partition (dt='2011')
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: describe extended test_table partition (dt='2011')
+POSTHOOK: type: DESCTABLE
+id	int	
+query	string	
+name	string	
+dt	string	
+	 	 
+Detailed Partition Information	Partition(values:[2011], dbName:default, tableName:test_table,
createTime:1317799667, lastAccessTime:0, sd:StorageDescriptor(cols:[FieldSchema(name:id, type:int,
comment:null), FieldSchema(name:query, type:string, comment:null), FieldSchema(name:name,
type:string, comment:null)], location:pfile:/Users/xiaol/Tools/hive-trunk/build/ql/test/data/warehouse/test_table/dt=2011,
inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat,
compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe,
parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), parameters:{transient_lastDdlTime=1317799667})

+PREHOOK: query: alter table test_table set serde 'org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe'
+PREHOOK: type: ALTERTABLE_SERIALIZER
+PREHOOK: Input: default@test_table
+PREHOOK: Output: default@test_table
+POSTHOOK: query: alter table test_table set serde 'org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe'
+POSTHOOK: type: ALTERTABLE_SERIALIZER
+POSTHOOK: Input: default@test_table
+POSTHOOK: Output: default@test_table
+PREHOOK: query: describe extended test_table partition (dt='2011')
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: describe extended test_table partition (dt='2011')
+POSTHOOK: type: DESCTABLE
+id	int	from deserializer
+query	string	from deserializer
+name	string	from deserializer
+dt	string	
+	 	 
+Detailed Partition Information	Partition(values:[2011], dbName:default, tableName:test_table,
createTime:1317799667, lastAccessTime:0, sd:StorageDescriptor(cols:[FieldSchema(name:id, type:int,
comment:null), FieldSchema(name:query, type:string, comment:null), FieldSchema(name:name,
type:string, comment:null)], location:pfile:/Users/xiaol/Tools/hive-trunk/build/ql/test/data/warehouse/test_table/dt=2011,
inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat,
compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe,
parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), parameters:{transient_lastDdlTime=1317799667})

+PREHOOK: query: alter table test_table set serdeproperties ('field.delim' = ',')
+PREHOOK: type: ALTERTABLE_SERDEPROPERTIES
+PREHOOK: Input: default@test_table
+PREHOOK: Output: default@test_table
+POSTHOOK: query: alter table test_table set serdeproperties ('field.delim' = ',')
+POSTHOOK: type: ALTERTABLE_SERDEPROPERTIES
+POSTHOOK: Input: default@test_table
+POSTHOOK: Output: default@test_table
+PREHOOK: query: describe extended test_table partition (dt='2011')
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: describe extended test_table partition (dt='2011')
+POSTHOOK: type: DESCTABLE
+id	int	from deserializer
+query	string	from deserializer
+name	string	from deserializer
+dt	string	
+	 	 
+Detailed Partition Information	Partition(values:[2011], dbName:default, tableName:test_table,
createTime:1317799667, lastAccessTime:0, sd:StorageDescriptor(cols:[FieldSchema(name:id, type:int,
comment:null), FieldSchema(name:query, type:string, comment:null), FieldSchema(name:name,
type:string, comment:null)], location:pfile:/Users/xiaol/Tools/hive-trunk/build/ql/test/data/warehouse/test_table/dt=2011,
inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat,
compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe,
parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), parameters:{transient_lastDdlTime=1317799667})

+PREHOOK: query: -- test partitions
+
+alter table test_table partition(dt='2011') set serde 'org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe'
+PREHOOK: type: ALTERPARTITION_SERIALIZER
+PREHOOK: Input: default@test_table
+PREHOOK: Output: default@test_table@dt=2011
+POSTHOOK: query: -- test partitions
+
+alter table test_table partition(dt='2011') set serde 'org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe'
+POSTHOOK: type: ALTERPARTITION_SERIALIZER
+POSTHOOK: Input: default@test_table
+POSTHOOK: Input: default@test_table@dt=2011
+POSTHOOK: Output: default@test_table@dt=2011
+PREHOOK: query: describe extended test_table partition (dt='2011')
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: describe extended test_table partition (dt='2011')
+POSTHOOK: type: DESCTABLE
+id	int	from deserializer
+query	string	from deserializer
+name	string	from deserializer
+dt	string	
+	 	 
+Detailed Partition Information	Partition(values:[2011], dbName:default, tableName:test_table,
createTime:1317799667, lastAccessTime:0, sd:StorageDescriptor(cols:[FieldSchema(name:id, type:int,
comment:null), FieldSchema(name:query, type:string, comment:null), FieldSchema(name:name,
type:string, comment:null)], location:pfile:/Users/xiaol/Tools/hive-trunk/build/ql/test/data/warehouse/test_table/dt=2011,
inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat,
compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe,
parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), parameters:{last_modified_by=xiaol,
last_modified_time=1317799667, transient_lastDdlTime=1317799667})	
+PREHOOK: query: alter table test_table partition(dt='2011') set serdeproperties ('field.delim'
= ',')
+PREHOOK: type: ALTERPARTITION_SERDEPROPERTIES
+PREHOOK: Input: default@test_table
+PREHOOK: Output: default@test_table@dt=2011
+POSTHOOK: query: alter table test_table partition(dt='2011') set serdeproperties ('field.delim'
= ',')
+POSTHOOK: type: ALTERPARTITION_SERDEPROPERTIES
+POSTHOOK: Input: default@test_table
+POSTHOOK: Input: default@test_table@dt=2011
+POSTHOOK: Output: default@test_table@dt=2011
+PREHOOK: query: describe extended test_table partition (dt='2011')
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: describe extended test_table partition (dt='2011')
+POSTHOOK: type: DESCTABLE
+id	int	from deserializer
+query	string	from deserializer
+name	string	from deserializer
+dt	string	
+	 	 
+Detailed Partition Information	Partition(values:[2011], dbName:default, tableName:test_table,
createTime:1317799667, lastAccessTime:0, sd:StorageDescriptor(cols:[FieldSchema(name:id, type:int,
comment:null), FieldSchema(name:query, type:string, comment:null), FieldSchema(name:name,
type:string, comment:null)], location:pfile:/Users/xiaol/Tools/hive-trunk/build/ql/test/data/warehouse/test_table/dt=2011,
inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat,
compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe,
parameters:{serialization.format=1, field.delim=,}), bucketCols:[], sortCols:[], parameters:{}),
parameters:{last_modified_by=xiaol, last_modified_time=1317799668, transient_lastDdlTime=1317799668})

+PREHOOK: query: drop table test_table
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@test_table
+PREHOOK: Output: default@test_table
+POSTHOOK: query: drop table test_table
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@test_table
+POSTHOOK: Output: default@test_table



Mime
View raw message