hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From the...@apache.org
Subject svn commit: r1524930 - in /hive/trunk/ql/src: java/org/apache/hadoop/hive/ql/parse/ java/org/apache/hadoop/hive/ql/plan/ test/org/apache/hadoop/hive/ql/ test/queries/clientpositive/ test/results/clientpositive/
Date Fri, 20 Sep 2013 08:02:35 GMT
Author: thejas
Date: Fri Sep 20 08:02:35 2013
New Revision: 1524930

URL: http://svn.apache.org/r1524930
Log:
HIVE-5122: Add partition for multiple partition ignores locations for non-first partitions
(Navis via Thejas Nair)

Added:
    hive/trunk/ql/src/test/queries/clientpositive/add_part_multiple.q
    hive/trunk/ql/src/test/results/clientpositive/add_part_multiple.q.out
Modified:
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/AddPartitionDesc.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java
    hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java
    hive/trunk/ql/src/test/results/clientpositive/create_view_partitioned.q.out

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java?rev=1524930&r1=1524929&r2=1524930&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java Fri Sep
20 08:02:35 2013
@@ -38,7 +38,6 @@ import java.util.Set;
 
 import org.antlr.runtime.tree.CommonTree;
 import org.antlr.runtime.tree.Tree;
-import org.apache.commons.lang.StringUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.fs.Path;
@@ -130,7 +129,6 @@ import org.apache.hadoop.hive.serde.serd
 import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
-import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
 import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeParams;
 import org.apache.hadoop.mapred.InputFormat;
 import org.apache.hadoop.mapred.TextInputFormat;
@@ -2585,41 +2583,37 @@ public class DDLSemanticAnalyzer extends
   private void analyzeAlterTableAddParts(CommonTree ast, boolean expectView)
       throws SemanticException {
 
+    // ^(TOK_ALTERTABLE_ADDPARTS identifier ifNotExists? alterStatementSuffixAddPartitionsElement+)
     String tblName = getUnescapedName((ASTNode)ast.getChild(0));
+    boolean ifNotExists = ast.getChild(1).getType() == HiveParser.TOK_IFNOTEXISTS;
+
     Table tab = getTable(tblName, true);
     boolean isView = tab.isView();
     validateAlterTableType(tab, AlterTableTypes.ADDPARTITION, expectView);
     inputs.add(new ReadEntity(tab));
 
-    // partition name to value
-    List<Map<String, String>> partSpecs = getPartitionSpecs(ast);
-    addTablePartsOutputs(tblName, partSpecs);
+    List<AddPartitionDesc> partitionDescs = new ArrayList<AddPartitionDesc>();
 
-    Iterator<Map<String, String>> partIter = partSpecs.iterator();
+    int numCh = ast.getChildCount();
+    int start = ifNotExists ? 2 : 1;
 
     String currentLocation = null;
     Map<String, String> currentPart = null;
-    boolean ifNotExists = false;
-    List<AddPartitionDesc> partitionDescs = new ArrayList<AddPartitionDesc>();
-
-    int numCh = ast.getChildCount();
-    for (int num = 1; num < numCh; num++) {
-      CommonTree child = (CommonTree) ast.getChild(num);
+    for (int num = start; num < numCh; num++) {
+      ASTNode child = (ASTNode) ast.getChild(num);
       switch (child.getToken().getType()) {
-      case HiveParser.TOK_IFNOTEXISTS:
-        ifNotExists = true;
-        break;
       case HiveParser.TOK_PARTSPEC:
         if (currentPart != null) {
-          validatePartitionValues(currentPart);
-          AddPartitionDesc addPartitionDesc = new AddPartitionDesc(
-              SessionState.get().getCurrentDatabase(), tblName, currentPart,
+          Partition partition = getPartitionForOutput(tab, currentPart);
+          if (partition == null || !ifNotExists) {
+            AddPartitionDesc addPartitionDesc = new AddPartitionDesc(
+              tab.getDbName(), tblName, currentPart,
               currentLocation, ifNotExists, expectView);
-          partitionDescs.add(addPartitionDesc);
+            partitionDescs.add(addPartitionDesc);
+          }
+          currentLocation = null;
         }
-        // create new partition, set values
-        currentLocation = null;
-        currentPart = partIter.next();
+        currentPart = getPartSpec(child);
         break;
       case HiveParser.TOK_PARTITIONLOCATION:
         // if location specified, set in partition
@@ -2632,11 +2626,18 @@ public class DDLSemanticAnalyzer extends
 
     // add the last one
     if (currentPart != null) {
-      validatePartitionValues(currentPart);
-      AddPartitionDesc addPartitionDesc = new AddPartitionDesc(
-          SessionState.get().getCurrentDatabase(), tblName, currentPart,
+      Partition partition = getPartitionForOutput(tab, currentPart);
+      if (partition == null || !ifNotExists) {
+        AddPartitionDesc addPartitionDesc = new AddPartitionDesc(
+          tab.getDbName(), tblName, currentPart,
           currentLocation, ifNotExists, expectView);
-      partitionDescs.add(addPartitionDesc);
+        partitionDescs.add(addPartitionDesc);
+      }
+    }
+
+    if (partitionDescs.isEmpty()) {
+      // nothing to do
+      return;
     }
 
     for (AddPartitionDesc addPartitionDesc : partitionDescs) {
@@ -2696,6 +2697,21 @@ public class DDLSemanticAnalyzer extends
     }
   }
 
+  private Partition getPartitionForOutput(Table tab, Map<String, String> currentPart)
+    throws SemanticException {
+    validatePartitionValues(currentPart);
+    try {
+      Partition partition = db.getPartition(tab, currentPart, false);
+      if (partition != null) {
+        outputs.add(new WriteEntity(partition));
+      }
+      return partition;
+    } catch (HiveException e) {
+      LOG.warn("wrong partition spec " + currentPart);
+    }
+    return null;
+  }
+
   /**
    * Rewrite the metadata for one or more partitions in a table. Useful when
    * an external process modifies files on HDFS and you want the pre/post

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g?rev=1524930&r1=1524929&r2=1524930&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g Fri Sep 20 08:02:35
2013
@@ -952,8 +952,12 @@ alterStatementChangeColPosition
 alterStatementSuffixAddPartitions
 @init { msgs.push("add partition statement"); }
 @after { msgs.pop(); }
-    : identifier KW_ADD ifNotExists? partitionSpec partitionLocation? (partitionSpec partitionLocation?)*
-    -> ^(TOK_ALTERTABLE_ADDPARTS identifier ifNotExists? (partitionSpec partitionLocation?)+)
+    : identifier KW_ADD ifNotExists? alterStatementSuffixAddPartitionsElement+
+    -> ^(TOK_ALTERTABLE_ADDPARTS identifier ifNotExists? alterStatementSuffixAddPartitionsElement+)
+    ;
+
+alterStatementSuffixAddPartitionsElement
+    : partitionSpec partitionLocation?
     ;
 
 alterStatementSuffixTouch

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/AddPartitionDesc.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/AddPartitionDesc.java?rev=1524930&r1=1524929&r2=1524930&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/AddPartitionDesc.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/AddPartitionDesc.java Fri Sep 20
08:02:35 2013
@@ -131,6 +131,7 @@ public class AddPartitionDesc extends DD
   /**
    * @return location of partition in relation to table
    */
+  @Explain(displayName = "Location")
   public String getLocation() {
     return location;
   }
@@ -150,6 +151,11 @@ public class AddPartitionDesc extends DD
     return partSpec;
   }
 
+  @Explain(displayName = "Spec")
+  public String getPartSpecString() {
+    return partSpec.toString();
+  }
+
   /**
    * @param partSpec
    *          partition specification

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java?rev=1524930&r1=1524929&r2=1524930&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java Fri Sep 20 08:02:35
2013
@@ -802,6 +802,7 @@ public class DDLWork implements Serializ
   /**
    * @return information about the partitions we want to add.
    */
+  @Explain(displayName = "Add Partition Operator")
   public AddPartitionDesc getAddPartitionDesc() {
     return addPartitionDesc;
   }

Modified: hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java?rev=1524930&r1=1524929&r2=1524930&view=diff
==============================================================================
--- hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java (original)
+++ hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java Fri Sep 20 08:02:35 2013
@@ -873,6 +873,15 @@ public class QTestUtil {
     }
   }
 
+  private final Pattern[] xmlPlanMask = toPattern(new String[] {
+      "<java version=\".*\" class=\"java.beans.XMLDecoder\">",
+      "<string>.*/tmp/.*</string>",
+      "<string>file:.*</string>",
+      "<string>pfile:.*</string>",
+      "<string>[0-9]{10}</string>",
+      "<string>/.*/warehouse/.*</string>"
+  });
+
   public int checkPlan(String tname, List<Task<? extends Serializable>> tasks)
throws Exception {
 
     if (tasks == null) {
@@ -892,17 +901,8 @@ public class QTestUtil {
         Utilities.serializePlan(plan, ofs, conf);
       }
 
-      String[] patterns = new String[] {
-          "<java version=\".*\" class=\"java.beans.XMLDecoder\">",
-          "<string>.*/tmp/.*</string>",
-          "<string>file:.*</string>",
-          "<string>pfile:.*</string>",
-          "<string>[0-9]{10}</string>",
-          "<string>/.*/warehouse/.*</string>"
-      };
-
       fixXml4JDK7(outf.getPath());
-      maskPatterns(patterns, outf.getPath());
+      maskPatterns(xmlPlanMask, outf.getPath());
 
       int exitVal = executeDiffCommand(outf.getPath(), planFile, true, false);
 
@@ -1041,13 +1041,21 @@ public class QTestUtil {
    * Get the value of the element in input. (Note: the returned value has no quotes.)
    */
   private static String getElementValue(String line, String name) {
-    assert(line.indexOf("<" + name + ">") != -1);
+    assert(line.contains("<" + name + ">"));
     int start = line.indexOf("<" + name + ">") + name.length() + 2;
     int end = line.indexOf("</" + name + ">");
     return line.substring(start, end);
   }
 
-  private void maskPatterns(String[] patterns, String fname) throws Exception {
+  private Pattern[] toPattern(String[] patternStrs) {
+    Pattern[] patterns = new Pattern[patternStrs.length];
+    for (int i = 0; i < patternStrs.length; i++) {
+      patterns[i] = Pattern.compile(patternStrs[i]);
+    }
+    return patterns;
+  }
+
+  private void maskPatterns(Pattern[] patterns, String fname) throws Exception {
     String maskPattern = "#### A masked pattern was here ####";
 
     String line;
@@ -1068,8 +1076,8 @@ public class QTestUtil {
 
     boolean lastWasMasked = false;
     while (null != (line = in.readLine())) {
-      for (String pattern : patterns) {
-        line = line.replaceAll(pattern, maskPattern);
+      for (Pattern pattern : patterns) {
+        line = pattern.matcher(line).replaceAll(maskPattern);
       }
 
       if (line.equals(maskPattern)) {
@@ -1090,47 +1098,46 @@ public class QTestUtil {
     out.close();
   }
 
+  private final Pattern[] planMask = toPattern(new String[] {
+      ".*file:.*",
+      ".*pfile:.*",
+      ".*hdfs:.*",
+      ".*/tmp/.*",
+      ".*invalidscheme:.*",
+      ".*lastUpdateTime.*",
+      ".*lastAccessTime.*",
+      ".*lastModifiedTime.*",
+      ".*[Oo]wner.*",
+      ".*CreateTime.*",
+      ".*LastAccessTime.*",
+      ".*Location.*",
+      ".*LOCATION '.*",
+      ".*transient_lastDdlTime.*",
+      ".*last_modified_.*",
+      ".*at org.*",
+      ".*at sun.*",
+      ".*at java.*",
+      ".*at junit.*",
+      ".*Caused by:.*",
+      ".*LOCK_QUERYID:.*",
+      ".*LOCK_TIME:.*",
+      ".*grantTime.*",
+      ".*[.][.][.] [0-9]* more.*",
+      ".*job_[0-9_]*.*",
+      ".*job_local[0-9_]*.*",
+      ".*USING 'java -cp.*",
+      "^Deleted.*",
+  });
+
   public int checkCliDriverResults(String tname) throws Exception {
     String[] cmdArray;
-    String[] patterns;
     assert(qMap.containsKey(tname));
 
     String outFileName = outPath(outDir, tname + ".out");
 
-    patterns = new String[] {
-        ".*file:.*",
-        ".*pfile:.*",
-        ".*hdfs:.*",
-        ".*/tmp/.*",
-        ".*invalidscheme:.*",
-        ".*lastUpdateTime.*",
-        ".*lastAccessTime.*",
-        ".*lastModifiedTime.*",
-        ".*[Oo]wner.*",
-        ".*CreateTime.*",
-        ".*LastAccessTime.*",
-        ".*Location.*",
-        ".*LOCATION '.*",
-        ".*transient_lastDdlTime.*",
-        ".*last_modified_.*",
-        ".*at org.*",
-        ".*at sun.*",
-        ".*at java.*",
-        ".*at junit.*",
-        ".*Caused by:.*",
-        ".*LOCK_QUERYID:.*",
-        ".*LOCK_TIME:.*",
-        ".*grantTime.*",
-        ".*[.][.][.] [0-9]* more.*",
-        ".*job_[0-9_]*.*",
-        ".*job_local[0-9_]*.*",
-        ".*USING 'java -cp.*",
-        "^Deleted.*",
-    };
-
     File f = new File(logDir, tname + ".out");
 
-    maskPatterns(patterns, f.getPath());
+    maskPatterns(planMask, f.getPath());
     int exitVal = executeDiffCommand(f.getPath(),
                                      outFileName, false,
                                      qSortSet.contains(tname));

Added: hive/trunk/ql/src/test/queries/clientpositive/add_part_multiple.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/add_part_multiple.q?rev=1524930&view=auto
==============================================================================
--- hive/trunk/ql/src/test/queries/clientpositive/add_part_multiple.q (added)
+++ hive/trunk/ql/src/test/queries/clientpositive/add_part_multiple.q Fri Sep 20 08:02:35
2013
@@ -0,0 +1,24 @@
+-- HIVE-5122 locations for 2nd, 3rd... partition are ignored
+
+CREATE TABLE add_part_test (key STRING, value STRING) PARTITIONED BY (ds STRING);
+
+explain
+ALTER TABLE add_part_test ADD IF NOT EXISTS
+PARTITION (ds='2010-01-01') location 'A'
+PARTITION (ds='2010-02-01') location 'B'
+PARTITION (ds='2010-03-01')
+PARTITION (ds='2010-04-01') location 'C';
+
+ALTER TABLE add_part_test ADD IF NOT EXISTS
+PARTITION (ds='2010-01-01') location 'A'
+PARTITION (ds='2010-02-01') location 'B'
+PARTITION (ds='2010-03-01')
+PARTITION (ds='2010-04-01') location 'C';
+
+from src TABLESAMPLE (1 ROWS)
+insert into table add_part_test PARTITION (ds='2010-01-01') select 100,100
+insert into table add_part_test PARTITION (ds='2010-02-01') select 200,200
+insert into table add_part_test PARTITION (ds='2010-03-01') select 400,300
+insert into table add_part_test PARTITION (ds='2010-04-01') select 500,400;
+
+select * from add_part_test;

Added: hive/trunk/ql/src/test/results/clientpositive/add_part_multiple.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/add_part_multiple.q.out?rev=1524930&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/add_part_multiple.q.out (added)
+++ hive/trunk/ql/src/test/results/clientpositive/add_part_multiple.q.out Fri Sep 20 08:02:35
2013
@@ -0,0 +1,129 @@
+PREHOOK: query: -- HIVE-5122 locations for 2nd, 3rd... partition are ignored
+
+CREATE TABLE add_part_test (key STRING, value STRING) PARTITIONED BY (ds STRING)
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: -- HIVE-5122 locations for 2nd, 3rd... partition are ignored
+
+CREATE TABLE add_part_test (key STRING, value STRING) PARTITIONED BY (ds STRING)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@add_part_test
+PREHOOK: query: explain
+ALTER TABLE add_part_test ADD IF NOT EXISTS
+PARTITION (ds='2010-01-01') location 'A'
+PARTITION (ds='2010-02-01') location 'B'
+PARTITION (ds='2010-03-01')
+PARTITION (ds='2010-04-01') location 'C'
+PREHOOK: type: ALTERTABLE_ADDPARTS
+POSTHOOK: query: explain
+ALTER TABLE add_part_test ADD IF NOT EXISTS
+PARTITION (ds='2010-01-01') location 'A'
+PARTITION (ds='2010-02-01') location 'B'
+PARTITION (ds='2010-03-01')
+PARTITION (ds='2010-04-01') location 'C'
+POSTHOOK: type: ALTERTABLE_ADDPARTS
+ABSTRACT SYNTAX TREE:
+#### A masked pattern was here ####
+
+STAGE DEPENDENCIES:
+  Stage-0 is a root stage
+  Stage-1 is a root stage
+  Stage-2 is a root stage
+  Stage-3 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-0
+      Add Partition Operator:
+#### A masked pattern was here ####
+          Spec: {ds=2010-01-01}
+
+  Stage: Stage-1
+      Add Partition Operator:
+#### A masked pattern was here ####
+          Spec: {ds=2010-02-01}
+
+  Stage: Stage-2
+      Add Partition Operator:
+          Spec: {ds=2010-03-01}
+
+  Stage: Stage-3
+      Add Partition Operator:
+#### A masked pattern was here ####
+          Spec: {ds=2010-04-01}
+
+
+PREHOOK: query: ALTER TABLE add_part_test ADD IF NOT EXISTS
+PARTITION (ds='2010-01-01') location 'A'
+PARTITION (ds='2010-02-01') location 'B'
+PARTITION (ds='2010-03-01')
+PARTITION (ds='2010-04-01') location 'C'
+PREHOOK: type: ALTERTABLE_ADDPARTS
+PREHOOK: Input: default@add_part_test
+POSTHOOK: query: ALTER TABLE add_part_test ADD IF NOT EXISTS
+PARTITION (ds='2010-01-01') location 'A'
+PARTITION (ds='2010-02-01') location 'B'
+PARTITION (ds='2010-03-01')
+PARTITION (ds='2010-04-01') location 'C'
+POSTHOOK: type: ALTERTABLE_ADDPARTS
+POSTHOOK: Input: default@add_part_test
+POSTHOOK: Output: default@add_part_test@ds=2010-01-01
+POSTHOOK: Output: default@add_part_test@ds=2010-02-01
+POSTHOOK: Output: default@add_part_test@ds=2010-03-01
+POSTHOOK: Output: default@add_part_test@ds=2010-04-01
+PREHOOK: query: from src TABLESAMPLE (1 ROWS)
+insert into table add_part_test PARTITION (ds='2010-01-01') select 100,100
+insert into table add_part_test PARTITION (ds='2010-02-01') select 200,200
+insert into table add_part_test PARTITION (ds='2010-03-01') select 400,300
+insert into table add_part_test PARTITION (ds='2010-04-01') select 500,400
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@add_part_test@ds=2010-01-01
+PREHOOK: Output: default@add_part_test@ds=2010-02-01
+PREHOOK: Output: default@add_part_test@ds=2010-03-01
+PREHOOK: Output: default@add_part_test@ds=2010-04-01
+POSTHOOK: query: from src TABLESAMPLE (1 ROWS)
+insert into table add_part_test PARTITION (ds='2010-01-01') select 100,100
+insert into table add_part_test PARTITION (ds='2010-02-01') select 200,200
+insert into table add_part_test PARTITION (ds='2010-03-01') select 400,300
+insert into table add_part_test PARTITION (ds='2010-04-01') select 500,400
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@add_part_test@ds=2010-01-01
+POSTHOOK: Output: default@add_part_test@ds=2010-02-01
+POSTHOOK: Output: default@add_part_test@ds=2010-03-01
+POSTHOOK: Output: default@add_part_test@ds=2010-04-01
+POSTHOOK: Lineage: add_part_test PARTITION(ds=2010-01-01).key SIMPLE []
+POSTHOOK: Lineage: add_part_test PARTITION(ds=2010-01-01).value SIMPLE []
+POSTHOOK: Lineage: add_part_test PARTITION(ds=2010-02-01).key SIMPLE []
+POSTHOOK: Lineage: add_part_test PARTITION(ds=2010-02-01).value SIMPLE []
+POSTHOOK: Lineage: add_part_test PARTITION(ds=2010-03-01).key SIMPLE []
+POSTHOOK: Lineage: add_part_test PARTITION(ds=2010-03-01).value SIMPLE []
+POSTHOOK: Lineage: add_part_test PARTITION(ds=2010-04-01).key SIMPLE []
+POSTHOOK: Lineage: add_part_test PARTITION(ds=2010-04-01).value SIMPLE []
+PREHOOK: query: select * from add_part_test
+PREHOOK: type: QUERY
+PREHOOK: Input: default@add_part_test
+PREHOOK: Input: default@add_part_test@ds=2010-01-01
+PREHOOK: Input: default@add_part_test@ds=2010-02-01
+PREHOOK: Input: default@add_part_test@ds=2010-03-01
+PREHOOK: Input: default@add_part_test@ds=2010-04-01
+#### A masked pattern was here ####
+POSTHOOK: query: select * from add_part_test
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@add_part_test
+POSTHOOK: Input: default@add_part_test@ds=2010-01-01
+POSTHOOK: Input: default@add_part_test@ds=2010-02-01
+POSTHOOK: Input: default@add_part_test@ds=2010-03-01
+POSTHOOK: Input: default@add_part_test@ds=2010-04-01
+#### A masked pattern was here ####
+POSTHOOK: Lineage: add_part_test PARTITION(ds=2010-01-01).key SIMPLE []
+POSTHOOK: Lineage: add_part_test PARTITION(ds=2010-01-01).value SIMPLE []
+POSTHOOK: Lineage: add_part_test PARTITION(ds=2010-02-01).key SIMPLE []
+POSTHOOK: Lineage: add_part_test PARTITION(ds=2010-02-01).value SIMPLE []
+POSTHOOK: Lineage: add_part_test PARTITION(ds=2010-03-01).key SIMPLE []
+POSTHOOK: Lineage: add_part_test PARTITION(ds=2010-03-01).value SIMPLE []
+POSTHOOK: Lineage: add_part_test PARTITION(ds=2010-04-01).key SIMPLE []
+POSTHOOK: Lineage: add_part_test PARTITION(ds=2010-04-01).value SIMPLE []
+100	100	2010-01-01
+200	200	2010-02-01
+400	300	2010-03-01
+500	400	2010-04-01

Modified: hive/trunk/ql/src/test/results/clientpositive/create_view_partitioned.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/create_view_partitioned.q.out?rev=1524930&r1=1524929&r2=1524930&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/create_view_partitioned.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/create_view_partitioned.q.out Fri Sep 20
08:02:35 2013
@@ -133,14 +133,12 @@ PREHOOK: query: -- should work since we 
 ALTER VIEW vp1
 ADD IF NOT EXISTS PARTITION (value='val_xyz')
 PREHOOK: type: ALTERTABLE_ADDPARTS
-PREHOOK: Input: default@src
 PREHOOK: Input: default@vp1
 PREHOOK: Output: default@vp1@value=val_xyz
 POSTHOOK: query: -- should work since we use IF NOT EXISTS
 ALTER VIEW vp1
 ADD IF NOT EXISTS PARTITION (value='val_xyz')
 POSTHOOK: type: ALTERTABLE_ADDPARTS
-POSTHOOK: Input: default@src
 POSTHOOK: Input: default@vp1
 POSTHOOK: Output: default@vp1@value=val_xyz
 PREHOOK: query: SHOW PARTITIONS vp1



Mime
View raw message