hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From the...@apache.org
Subject svn commit: r1527029 - in /hive/branches/branch-0.12: common/src/java/org/apache/hadoop/hive/conf/ ql/src/java/org/apache/hadoop/hive/ql/ ql/src/java/org/apache/hadoop/hive/ql/parse/ ql/src/test/queries/clientnegative/ ql/src/test/queries/clientpositiv...
Date Fri, 27 Sep 2013 18:54:29 GMT
Author: thejas
Date: Fri Sep 27 18:54:28 2013
New Revision: 1527029

URL: http://svn.apache.org/r1527029
Log:
HIVE-5297 Hive does not honor type for partition columns (Vikram Dixit via Harish Butani)

Added:
    hive/branches/branch-0.12/ql/src/test/queries/clientnegative/illegal_partition_type.q
    hive/branches/branch-0.12/ql/src/test/queries/clientnegative/illegal_partition_type2.q
    hive/branches/branch-0.12/ql/src/test/queries/clientpositive/partition_type_check.q
    hive/branches/branch-0.12/ql/src/test/results/clientnegative/illegal_partition_type.q.out
    hive/branches/branch-0.12/ql/src/test/results/clientnegative/illegal_partition_type2.q.out
    hive/branches/branch-0.12/ql/src/test/results/clientpositive/partition_type_check.q.out
Modified:
    hive/branches/branch-0.12/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
    hive/branches/branch-0.12/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java
    hive/branches/branch-0.12/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
    hive/branches/branch-0.12/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
    hive/branches/branch-0.12/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java
    hive/branches/branch-0.12/ql/src/test/queries/clientpositive/alter_partition_coltype.q
    hive/branches/branch-0.12/ql/src/test/results/clientnegative/alter_table_add_partition.q.out
    hive/branches/branch-0.12/ql/src/test/results/clientnegative/alter_view_failure5.q.out
    hive/branches/branch-0.12/ql/src/test/results/clientpositive/alter_partition_coltype.q.out

Modified: hive/branches/branch-0.12/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.12/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java?rev=1527029&r1=1527028&r2=1527029&view=diff
==============================================================================
--- hive/branches/branch-0.12/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java (original)
+++ hive/branches/branch-0.12/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java Fri
Sep 27 18:54:28 2013
@@ -811,6 +811,8 @@ public class HiveConf extends Configurat
 
     // Whether to show the unquoted partition names in query results.
     HIVE_DECODE_PARTITION_NAME("hive.decode.partition.name", false),
+
+    HIVE_TYPE_CHECK_ON_INSERT("hive.typecheck.on.insert", true),
     ;
 
     public final String varname;

Modified: hive/branches/branch-0.12/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.12/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java?rev=1527029&r1=1527028&r2=1527029&view=diff
==============================================================================
--- hive/branches/branch-0.12/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java (original)
+++ hive/branches/branch-0.12/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java Fri Sep
27 18:54:28 2013
@@ -362,6 +362,7 @@ public enum ErrorMsg {
   UNSUPPORTED_ALTER_TBL_OP(10245, "{0} alter table options is not supported"),
   INVALID_BIGTABLE_MAPJOIN(10246, "{0} table chosen for streaming is not valid", true),
   MISSING_OVER_CLAUSE(10247, "Missing over clause for function : "),
+  PARTITION_SPEC_TYPE_MISMATCH(10248, "Cannot add partition column {0} of type {1} as it
cannot be converted to type {2}", true),
 
   SCRIPT_INIT_ERROR(20000, "Unable to initialize custom script."),
   SCRIPT_IO_ERROR(20001, "An error occurred while reading or writing to your custom script.
"

Modified: hive/branches/branch-0.12/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.12/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java?rev=1527029&r1=1527028&r2=1527029&view=diff
==============================================================================
--- hive/branches/branch-0.12/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
(original)
+++ hive/branches/branch-0.12/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
Fri Sep 27 18:54:28 2013
@@ -29,6 +29,7 @@ import java.util.LinkedHashMap;
 import java.util.LinkedHashSet;
 import java.util.List;
 import java.util.Map;
+import java.util.Map.Entry;
 
 import org.antlr.runtime.tree.CommonTree;
 import org.antlr.runtime.tree.Tree;
@@ -40,6 +41,7 @@ import org.apache.hadoop.hive.metastore.
 import org.apache.hadoop.hive.ql.Context;
 import org.apache.hadoop.hive.ql.ErrorMsg;
 import org.apache.hadoop.hive.ql.QueryProperties;
+import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluatorFactory;
 import org.apache.hadoop.hive.ql.exec.FetchTask;
 import org.apache.hadoop.hive.ql.exec.Task;
 import org.apache.hadoop.hive.ql.exec.Utilities;
@@ -59,10 +61,15 @@ import org.apache.hadoop.hive.ql.metadat
 import org.apache.hadoop.hive.ql.metadata.Partition;
 import org.apache.hadoop.hive.ql.metadata.Table;
 import org.apache.hadoop.hive.ql.optimizer.listbucketingpruner.ListBucketingPrunerUtils;
+import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
 import org.apache.hadoop.hive.ql.plan.ListBucketingCtx;
 import org.apache.hadoop.hive.ql.plan.PlanUtils;
 import org.apache.hadoop.hive.ql.session.SessionState.LogHelper;
 import org.apache.hadoop.hive.serde.serdeConstants;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
 import org.apache.hadoop.mapred.SequenceFileInputFormat;
 import org.apache.hadoop.mapred.SequenceFileOutputFormat;
 import org.apache.hadoop.mapred.TextInputFormat;
@@ -304,6 +311,13 @@ public abstract class BaseSemanticAnalyz
     rootTasks = new ArrayList<Task<? extends Serializable>>();
   }
 
+  public static String stripIdentifierQuotes(String val) {
+    if ((val.charAt(0) == '`' && val.charAt(val.length() - 1) == '`')) {
+      val = val.substring(1, val.length() - 1);
+    }
+    return val;
+  }
+
   public static String stripQuotes(String val) {
     return PlanUtils.stripQuotes(val);
   }
@@ -580,7 +594,7 @@ public abstract class BaseSemanticAnalyz
         // child 2 is the optional comment of the column
         if (child.getChildCount() == 3) {
           col.setComment(unescapeSQLString(child.getChild(2).getText()));
-        }        
+        }
       }
       colList.add(col);
     }
@@ -748,7 +762,7 @@ public abstract class BaseSemanticAnalyz
         }
 
         // check if the columns specified in the partition() clause are actually partition
columns
-        Utilities.validatePartSpec(tableHandle, partSpec);
+        validatePartSpec(tableHandle, partSpec, ast, conf);
 
         // check if the partition spec is valid
         if (numDynParts > 0) {
@@ -1115,4 +1129,79 @@ public abstract class BaseSemanticAnalyz
     return storedAsDirs;
   }
 
+  private static void getPartExprNodeDesc(ASTNode astNode,
+      Map<ASTNode, ExprNodeDesc> astExprNodeMap)
+          throws SemanticException, HiveException {
+
+    if ((astNode == null) || (astNode.getChildren() == null) ||
+        (astNode.getChildren().size() <= 1)) {
+      return;
+    }
+
+    TypeCheckCtx typeCheckCtx = new TypeCheckCtx(null);
+    for (Node childNode : astNode.getChildren()) {
+      ASTNode childASTNode = (ASTNode)childNode;
+
+      if (childASTNode.getType() != HiveParser.TOK_PARTVAL) {
+        getPartExprNodeDesc(childASTNode, astExprNodeMap);
+      } else {
+        if (childASTNode.getChildren().size() <= 1) {
+          throw new HiveException("This is dynamic partitioning");
+        }
+
+        ASTNode partValASTChild = (ASTNode)childASTNode.getChildren().get(1);
+        astExprNodeMap.put((ASTNode)childASTNode.getChildren().get(0),
+            TypeCheckProcFactory.genExprNode(partValASTChild, typeCheckCtx).get(partValASTChild));
+      }
+    }
+  }
+
+  public static void validatePartSpec(Table tbl,
+      Map<String, String> partSpec, ASTNode astNode, HiveConf conf) throws SemanticException
{
+
+    Map<ASTNode, ExprNodeDesc> astExprNodeMap = new HashMap<ASTNode, ExprNodeDesc>();
+
+    Utilities.validatePartSpec(tbl, partSpec);
+
+    if (HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVE_TYPE_CHECK_ON_INSERT)) {
+      try {
+        getPartExprNodeDesc(astNode, astExprNodeMap);
+      } catch (HiveException e) {
+        return;
+      }
+      List<FieldSchema> parts = tbl.getPartitionKeys();
+      Map<String, String> partCols = new HashMap<String, String>(parts.size());
+      for (FieldSchema col : parts) {
+        partCols.put(col.getName(), col.getType().toLowerCase());
+      }
+      for (Entry<ASTNode, ExprNodeDesc> astExprNodePair : astExprNodeMap.entrySet())
{
+
+        String astKeyName = astExprNodePair.getKey().toString().toLowerCase();
+        if (astExprNodePair.getKey().getType() == HiveParser.Identifier) {
+          astKeyName = stripIdentifierQuotes(astKeyName);
+        }
+        String colType = partCols.get(astKeyName);
+        ObjectInspector inputOI = astExprNodePair.getValue().getWritableObjectInspector();
+
+        TypeInfo expectedType =
+            TypeInfoUtils.getTypeInfoFromTypeString(colType);
+        ObjectInspector outputOI =
+            TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo(expectedType);
+        Object value = null;
+        try {
+          value =
+              ExprNodeEvaluatorFactory.get(astExprNodePair.getValue()).
+              evaluate(partSpec.get(astKeyName));
+        } catch (HiveException e) {
+          throw new SemanticException(e);
+        }
+        Object convertedValue =
+          ObjectInspectorConverters.getConverter(inputOI, outputOI).convert(value);
+        if (convertedValue == null) {
+          throw new SemanticException(ErrorMsg.PARTITION_SPEC_TYPE_MISMATCH.format(astKeyName,
+              inputOI.getTypeName(), outputOI.getTypeName()));
+        }
+      }
+    }
+  }
 }

Modified: hive/branches/branch-0.12/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.12/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java?rev=1527029&r1=1527028&r2=1527029&view=diff
==============================================================================
--- hive/branches/branch-0.12/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
(original)
+++ hive/branches/branch-0.12/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
Fri Sep 27 18:54:28 2013
@@ -2614,6 +2614,7 @@ public class DDLSemanticAnalyzer extends
           currentLocation = null;
         }
         currentPart = getPartSpec(child);
+        validatePartSpec(tab, currentPart, (ASTNode)child, conf);
         break;
       case HiveParser.TOK_PARTITIONLOCATION:
         // if location specified, set in partition

Modified: hive/branches/branch-0.12/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.12/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java?rev=1527029&r1=1527028&r2=1527029&view=diff
==============================================================================
--- hive/branches/branch-0.12/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java
(original)
+++ hive/branches/branch-0.12/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java
Fri Sep 27 18:54:28 2013
@@ -110,9 +110,14 @@ public final class TypeCheckProcFactory 
     // build the exprNodeFuncDesc with recursively built children.
     ASTNode expr = (ASTNode) nd;
     TypeCheckCtx ctx = (TypeCheckCtx) procCtx;
+
     RowResolver input = ctx.getInputRR();
     ExprNodeDesc desc = null;
 
+    if ((ctx == null) || (input == null)) {
+      return null;
+    }
+
     // If the current subExpression is pre-calculated, as in Group-By etc.
     ColumnInfo colInfo = input.getExpression(expr);
     if (colInfo != null) {

Added: hive/branches/branch-0.12/ql/src/test/queries/clientnegative/illegal_partition_type.q
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.12/ql/src/test/queries/clientnegative/illegal_partition_type.q?rev=1527029&view=auto
==============================================================================
--- hive/branches/branch-0.12/ql/src/test/queries/clientnegative/illegal_partition_type.q
(added)
+++ hive/branches/branch-0.12/ql/src/test/queries/clientnegative/illegal_partition_type.q
Fri Sep 27 18:54:28 2013
@@ -0,0 +1,7 @@
+-- begin part(string, int) pass(string, string)
+CREATE TABLE tab1 (id1 int,id2 string) PARTITIONED BY(month string,day int) ROW FORMAT DELIMITED
FIELDS TERMINATED BY ',' ;
+LOAD DATA LOCAL INPATH '../data/files/T1.txt' overwrite into table tab1 PARTITION(month='June',
day='second');
+
+select * from tab1;
+drop table tab1;
+

Added: hive/branches/branch-0.12/ql/src/test/queries/clientnegative/illegal_partition_type2.q
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.12/ql/src/test/queries/clientnegative/illegal_partition_type2.q?rev=1527029&view=auto
==============================================================================
--- hive/branches/branch-0.12/ql/src/test/queries/clientnegative/illegal_partition_type2.q
(added)
+++ hive/branches/branch-0.12/ql/src/test/queries/clientnegative/illegal_partition_type2.q
Fri Sep 27 18:54:28 2013
@@ -0,0 +1,3 @@
+create table tab1 (id1 int, id2 string) PARTITIONED BY(month string,day int) row format delimited
fields terminated by ',';
+alter table tab1 add partition (month='June', day='second');
+drop table tab1;

Modified: hive/branches/branch-0.12/ql/src/test/queries/clientpositive/alter_partition_coltype.q
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.12/ql/src/test/queries/clientpositive/alter_partition_coltype.q?rev=1527029&r1=1527028&r2=1527029&view=diff
==============================================================================
--- hive/branches/branch-0.12/ql/src/test/queries/clientpositive/alter_partition_coltype.q
(original)
+++ hive/branches/branch-0.12/ql/src/test/queries/clientpositive/alter_partition_coltype.q
Fri Sep 27 18:54:28 2013
@@ -24,6 +24,8 @@ select count(*) from alter_coltype where
 -- alter partition key column data type for ts column.
 alter table alter_coltype partition column (ts double);
 
+alter table alter_coltype partition column (dt string);
+
 -- load a new partition using new data type.
 insert overwrite table alter_coltype partition(dt='100x', ts=3.0) select * from src1;
 

Added: hive/branches/branch-0.12/ql/src/test/queries/clientpositive/partition_type_check.q
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.12/ql/src/test/queries/clientpositive/partition_type_check.q?rev=1527029&view=auto
==============================================================================
--- hive/branches/branch-0.12/ql/src/test/queries/clientpositive/partition_type_check.q (added)
+++ hive/branches/branch-0.12/ql/src/test/queries/clientpositive/partition_type_check.q Fri
Sep 27 18:54:28 2013
@@ -0,0 +1,24 @@
+set hive.typecheck.on.insert = true;
+
+-- begin part(string, string) pass(string, int)
+CREATE TABLE tab1 (id1 int,id2 string) PARTITIONED BY(month string,day string) stored as
textfile;
+LOAD DATA LOCAL INPATH '../data/files/T1.txt' overwrite into table tab1 PARTITION(month='June',
day=2);
+
+select * from tab1;
+drop table tab1;
+
+-- begin part(string, int) pass(string, string)
+CREATE TABLE tab1 (id1 int,id2 string) PARTITIONED BY(month string,day int) stored as textfile;
+LOAD DATA LOCAL INPATH '../data/files/T1.txt' overwrite into table tab1 PARTITION(month='June',
day='2');
+
+select * from tab1;
+drop table tab1;
+
+-- begin part(string, date) pass(string, date)
+create table tab1 (id1 int, id2 string) PARTITIONED BY(month string,day date) stored as textfile;
+alter table tab1 add partition (month='June', day='2008-01-01');
+LOAD DATA LOCAL INPATH '../data/files/T1.txt' overwrite into table tab1 PARTITION(month='June',
day='2008-01-01');
+
+select id1, id2, day from tab1 where day='2008-01-01';
+drop table tab1;
+

Modified: hive/branches/branch-0.12/ql/src/test/results/clientnegative/alter_table_add_partition.q.out
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.12/ql/src/test/results/clientnegative/alter_table_add_partition.q.out?rev=1527029&r1=1527028&r2=1527029&view=diff
==============================================================================
--- hive/branches/branch-0.12/ql/src/test/results/clientnegative/alter_table_add_partition.q.out
(original)
+++ hive/branches/branch-0.12/ql/src/test/results/clientnegative/alter_table_add_partition.q.out
Fri Sep 27 18:54:28 2013
@@ -3,4 +3,4 @@ PREHOOK: type: CREATETABLE
 POSTHOOK: query: create table mp (a int) partitioned by (b int)
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: default@mp
-FAILED: SemanticException [Error 10214]: Invalid partition spec specified table is partitioned
but partition spec is not specified or does not fully match table partitioning: {b=1, c=1}
+FAILED: SemanticException [Error 10098]: Non-Partition column appears in the partition specification:
 c

Modified: hive/branches/branch-0.12/ql/src/test/results/clientnegative/alter_view_failure5.q.out
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.12/ql/src/test/results/clientnegative/alter_view_failure5.q.out?rev=1527029&r1=1527028&r2=1527029&view=diff
==============================================================================
--- hive/branches/branch-0.12/ql/src/test/results/clientnegative/alter_view_failure5.q.out
(original)
+++ hive/branches/branch-0.12/ql/src/test/results/clientnegative/alter_view_failure5.q.out
Fri Sep 27 18:54:28 2013
@@ -13,4 +13,4 @@ AS 
 SELECT * FROM src
 POSTHOOK: type: CREATEVIEW
 POSTHOOK: Output: default@xxx6
-FAILED: SemanticException [Error 10214]: Invalid partition spec specified value not found
in table's partition spec: {v=val_86}
+FAILED: SemanticException [Error 10098]: Non-Partition column appears in the partition specification:
 v

Added: hive/branches/branch-0.12/ql/src/test/results/clientnegative/illegal_partition_type.q.out
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.12/ql/src/test/results/clientnegative/illegal_partition_type.q.out?rev=1527029&view=auto
==============================================================================
--- hive/branches/branch-0.12/ql/src/test/results/clientnegative/illegal_partition_type.q.out
(added)
+++ hive/branches/branch-0.12/ql/src/test/results/clientnegative/illegal_partition_type.q.out
Fri Sep 27 18:54:28 2013
@@ -0,0 +1,8 @@
+PREHOOK: query: -- begin part(string, int) pass(string, string)
+CREATE TABLE tab1 (id1 int,id2 string) PARTITIONED BY(month string,day int) ROW FORMAT DELIMITED
FIELDS TERMINATED BY ','
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: -- begin part(string, int) pass(string, string)
+CREATE TABLE tab1 (id1 int,id2 string) PARTITIONED BY(month string,day int) ROW FORMAT DELIMITED
FIELDS TERMINATED BY ','
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@tab1
+FAILED: SemanticException [Error 10248]: Cannot add partition column day of type string as
it cannot be converted to type int

Added: hive/branches/branch-0.12/ql/src/test/results/clientnegative/illegal_partition_type2.q.out
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.12/ql/src/test/results/clientnegative/illegal_partition_type2.q.out?rev=1527029&view=auto
==============================================================================
--- hive/branches/branch-0.12/ql/src/test/results/clientnegative/illegal_partition_type2.q.out
(added)
+++ hive/branches/branch-0.12/ql/src/test/results/clientnegative/illegal_partition_type2.q.out
Fri Sep 27 18:54:28 2013
@@ -0,0 +1,6 @@
+PREHOOK: query: create table tab1 (id1 int, id2 string) PARTITIONED BY(month string,day int)
row format delimited fields terminated by ','
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table tab1 (id1 int, id2 string) PARTITIONED BY(month string,day
int) row format delimited fields terminated by ','
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@tab1
+FAILED: SemanticException [Error 10248]: Cannot add partition column day of type string as
it cannot be converted to type int

Modified: hive/branches/branch-0.12/ql/src/test/results/clientpositive/alter_partition_coltype.q.out
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.12/ql/src/test/results/clientpositive/alter_partition_coltype.q.out?rev=1527029&r1=1527028&r2=1527029&view=diff
==============================================================================
--- hive/branches/branch-0.12/ql/src/test/results/clientpositive/alter_partition_coltype.q.out
(original)
+++ hive/branches/branch-0.12/ql/src/test/results/clientpositive/alter_partition_coltype.q.out
Fri Sep 27 18:54:28 2013
@@ -240,6 +240,17 @@ POSTHOOK: Lineage: alter_coltype PARTITI
 POSTHOOK: Lineage: alter_coltype PARTITION(dt=10,ts=3.0).value SIMPLE [(src1)src1.FieldSchema(name:value,
type:string, comment:default), ]
 POSTHOOK: Lineage: alter_coltype PARTITION(dt=100x,ts=6:30pm).key SIMPLE [(src1)src1.FieldSchema(name:key,
type:string, comment:default), ]
 POSTHOOK: Lineage: alter_coltype PARTITION(dt=100x,ts=6:30pm).value SIMPLE [(src1)src1.FieldSchema(name:value,
type:string, comment:default), ]
+PREHOOK: query: alter table alter_coltype partition column (dt string)
+PREHOOK: type: null
+PREHOOK: Input: default@alter_coltype
+POSTHOOK: query: alter table alter_coltype partition column (dt string)
+POSTHOOK: type: null
+POSTHOOK: Input: default@alter_coltype
+POSTHOOK: Output: default@alter_coltype
+POSTHOOK: Lineage: alter_coltype PARTITION(dt=10,ts=3.0).key SIMPLE [(src1)src1.FieldSchema(name:key,
type:string, comment:default), ]
+POSTHOOK: Lineage: alter_coltype PARTITION(dt=10,ts=3.0).value SIMPLE [(src1)src1.FieldSchema(name:value,
type:string, comment:default), ]
+POSTHOOK: Lineage: alter_coltype PARTITION(dt=100x,ts=6:30pm).key SIMPLE [(src1)src1.FieldSchema(name:key,
type:string, comment:default), ]
+POSTHOOK: Lineage: alter_coltype PARTITION(dt=100x,ts=6:30pm).value SIMPLE [(src1)src1.FieldSchema(name:value,
type:string, comment:default), ]
 PREHOOK: query: -- load a new partition using new data type.
 insert overwrite table alter_coltype partition(dt='100x', ts=3.0) select * from src1
 PREHOOK: type: QUERY
@@ -1229,13 +1240,13 @@ POSTHOOK: Lineage: alter_coltype PARTITI
 POSTHOOK: Lineage: alter_coltype PARTITION(dt=100x,ts=6:30pm).value SIMPLE [(src1)src1.FieldSchema(name:value,
type:string, comment:default), ]
 key                 	string              	None                
 value               	string              	None                
-dt                  	int                 	None                
+dt                  	string              	None                
 ts                  	double              	None                
 	 	 
 # Partition Information	 	 
 # col_name            	data_type           	comment             
 	 	 
-dt                  	int                 	None                
+dt                  	string              	None                
 ts                  	double              	None                
 PREHOOK: query: desc alter_coltype partition (dt='100x', ts='6:30pm')
 PREHOOK: type: DESCTABLE
@@ -1249,13 +1260,13 @@ POSTHOOK: Lineage: alter_coltype PARTITI
 POSTHOOK: Lineage: alter_coltype PARTITION(dt=100x,ts=6:30pm).value SIMPLE [(src1)src1.FieldSchema(name:value,
type:string, comment:default), ]
 key                 	string              	None                
 value               	string              	None                
-dt                  	int                 	None                
+dt                  	string              	None                
 ts                  	double              	None                
 	 	 
 # Partition Information	 	 
 # col_name            	data_type           	comment             
 	 	 
-dt                  	int                 	None                
+dt                  	string              	None                
 ts                  	double              	None                
 PREHOOK: query: desc alter_coltype partition (dt='100x', ts=3.0)
 PREHOOK: type: DESCTABLE
@@ -1269,13 +1280,13 @@ POSTHOOK: Lineage: alter_coltype PARTITI
 POSTHOOK: Lineage: alter_coltype PARTITION(dt=100x,ts=6:30pm).value SIMPLE [(src1)src1.FieldSchema(name:value,
type:string, comment:default), ]
 key                 	string              	None                
 value               	string              	None                
-dt                  	int                 	None                
+dt                  	string              	None                
 ts                  	double              	None                
 	 	 
 # Partition Information	 	 
 # col_name            	data_type           	comment             
 	 	 
-dt                  	int                 	None                
+dt                  	string              	None                
 ts                  	double              	None                
 PREHOOK: query: desc alter_coltype partition (dt=10, ts=3.0)
 PREHOOK: type: DESCTABLE
@@ -1289,13 +1300,13 @@ POSTHOOK: Lineage: alter_coltype PARTITI
 POSTHOOK: Lineage: alter_coltype PARTITION(dt=100x,ts=6:30pm).value SIMPLE [(src1)src1.FieldSchema(name:value,
type:string, comment:default), ]
 key                 	string              	None                
 value               	string              	None                
-dt                  	int                 	None                
+dt                  	string              	None                
 ts                  	double              	None                
 	 	 
 # Partition Information	 	 
 # col_name            	data_type           	comment             
 	 	 
-dt                  	int                 	None                
+dt                  	string              	None                
 ts                  	double              	None                
 PREHOOK: query: drop table alter_coltype
 PREHOOK: type: DROPTABLE

Added: hive/branches/branch-0.12/ql/src/test/results/clientpositive/partition_type_check.q.out
URL: http://svn.apache.org/viewvc/hive/branches/branch-0.12/ql/src/test/results/clientpositive/partition_type_check.q.out?rev=1527029&view=auto
==============================================================================
--- hive/branches/branch-0.12/ql/src/test/results/clientpositive/partition_type_check.q.out
(added)
+++ hive/branches/branch-0.12/ql/src/test/results/clientpositive/partition_type_check.q.out
Fri Sep 27 18:54:28 2013
@@ -0,0 +1,120 @@
+PREHOOK: query: -- begin part(string, string) pass(string, int)
+CREATE TABLE tab1 (id1 int,id2 string) PARTITIONED BY(month string,day string) stored as
textfile
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: -- begin part(string, string) pass(string, int)
+CREATE TABLE tab1 (id1 int,id2 string) PARTITIONED BY(month string,day string) stored as
textfile
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@tab1
+PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' overwrite into table tab1 PARTITION(month='June',
day=2)
+PREHOOK: type: LOAD
+PREHOOK: Output: default@tab1
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' overwrite into table tab1
PARTITION(month='June', day=2)
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@tab1
+POSTHOOK: Output: default@tab1@month=June/day=2
+PREHOOK: query: select * from tab1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@tab1
+PREHOOK: Input: default@tab1@month=June/day=2
+#### A masked pattern was here ####
+POSTHOOK: query: select * from tab1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@tab1
+POSTHOOK: Input: default@tab1@month=June/day=2
+#### A masked pattern was here ####
+1	11	June	2
+2	12	June	2
+3	13	June	2
+7	17	June	2
+8	18	June	2
+8	28	June	2
+PREHOOK: query: drop table tab1
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@tab1
+PREHOOK: Output: default@tab1
+POSTHOOK: query: drop table tab1
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@tab1
+POSTHOOK: Output: default@tab1
+PREHOOK: query: -- begin part(string, int) pass(string, string)
+CREATE TABLE tab1 (id1 int,id2 string) PARTITIONED BY(month string,day int) stored as textfile
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: -- begin part(string, int) pass(string, string)
+CREATE TABLE tab1 (id1 int,id2 string) PARTITIONED BY(month string,day int) stored as textfile
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@tab1
+PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' overwrite into table tab1 PARTITION(month='June',
day='2')
+PREHOOK: type: LOAD
+PREHOOK: Output: default@tab1
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' overwrite into table tab1
PARTITION(month='June', day='2')
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@tab1
+POSTHOOK: Output: default@tab1@month=June/day=2
+PREHOOK: query: select * from tab1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@tab1
+PREHOOK: Input: default@tab1@month=June/day=2
+#### A masked pattern was here ####
+POSTHOOK: query: select * from tab1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@tab1
+POSTHOOK: Input: default@tab1@month=June/day=2
+#### A masked pattern was here ####
+1	11	June	2
+2	12	June	2
+3	13	June	2
+7	17	June	2
+8	18	June	2
+8	28	June	2
+PREHOOK: query: drop table tab1
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@tab1
+PREHOOK: Output: default@tab1
+POSTHOOK: query: drop table tab1
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@tab1
+POSTHOOK: Output: default@tab1
+PREHOOK: query: -- begin part(string, date) pass(string, date)
+create table tab1 (id1 int, id2 string) PARTITIONED BY(month string,day date) stored as textfile
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: -- begin part(string, date) pass(string, date)
+create table tab1 (id1 int, id2 string) PARTITIONED BY(month string,day date) stored as textfile
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@tab1
+PREHOOK: query: alter table tab1 add partition (month='June', day='2008-01-01')
+PREHOOK: type: ALTERTABLE_ADDPARTS
+PREHOOK: Input: default@tab1
+POSTHOOK: query: alter table tab1 add partition (month='June', day='2008-01-01')
+POSTHOOK: type: ALTERTABLE_ADDPARTS
+POSTHOOK: Input: default@tab1
+POSTHOOK: Output: default@tab1@month=June/day=2008-01-01
+PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' overwrite into table tab1 PARTITION(month='June',
day='2008-01-01')
+PREHOOK: type: LOAD
+PREHOOK: Output: default@tab1@month=June/day=2008-01-01
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' overwrite into table tab1
PARTITION(month='June', day='2008-01-01')
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@tab1@month=June/day=2008-01-01
+PREHOOK: query: select id1, id2, day from tab1 where day='2008-01-01'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@tab1
+PREHOOK: Input: default@tab1@month=June/day=2008-01-01
+#### A masked pattern was here ####
+POSTHOOK: query: select id1, id2, day from tab1 where day='2008-01-01'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@tab1
+POSTHOOK: Input: default@tab1@month=June/day=2008-01-01
+#### A masked pattern was here ####
+1	11	2008-01-01
+2	12	2008-01-01
+3	13	2008-01-01
+7	17	2008-01-01
+8	18	2008-01-01
+8	28	2008-01-01
+PREHOOK: query: drop table tab1
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@tab1
+PREHOOK: Output: default@tab1
+POSTHOOK: query: drop table tab1
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@tab1
+POSTHOOK: Output: default@tab1



Mime
View raw message