hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From na...@apache.org
Subject svn commit: r1427681 - in /hive/trunk/ql/src: java/org/apache/hadoop/hive/ql/ java/org/apache/hadoop/hive/ql/exec/ java/org/apache/hadoop/hive/ql/parse/ java/org/apache/hadoop/hive/ql/plan/ test/queries/clientnegative/ test/queries/clientpositive/ test...
Date Wed, 02 Jan 2013 07:02:36 GMT
Author: namit
Date: Wed Jan  2 07:02:35 2013
New Revision: 1427681

URL: http://svn.apache.org/viewvc?rev=1427681&view=rev
Log:
HIVE-446 Implement TRUNCATE
(Navis via namit)


Added:
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/TruncateTableDesc.java
    hive/trunk/ql/src/test/queries/clientnegative/truncate_table_failure1.q
    hive/trunk/ql/src/test/queries/clientnegative/truncate_table_failure2.q
    hive/trunk/ql/src/test/queries/clientnegative/truncate_table_failure3.q
    hive/trunk/ql/src/test/queries/clientnegative/truncate_table_failure4.q
    hive/trunk/ql/src/test/queries/clientpositive/truncate_table.q
    hive/trunk/ql/src/test/results/clientnegative/truncate_table_failure1.q.out
    hive/trunk/ql/src/test/results/clientnegative/truncate_table_failure2.q.out
    hive/trunk/ql/src/test/results/clientnegative/truncate_table_failure3.q.out
    hive/trunk/ql/src/test/results/clientnegative/truncate_table_failure4.q.out
    hive/trunk/ql/src/test/results/clientpositive/truncate_table.q.out
Modified:
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java?rev=1427681&r1=1427680&r2=1427681&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java Wed Jan  2 07:02:35 2013
@@ -18,6 +18,7 @@
 
 package org.apache.hadoop.hive.ql;
 
+import java.text.MessageFormat;
 import java.util.HashMap;
 import java.util.Map;
 import java.util.regex.Matcher;
@@ -244,8 +245,11 @@ public enum ErrorMsg {
   INVALID_JDO_FILTER_EXPRESSION(10043, "Invalid expression for JDO filter"),
 
   SHOW_CREATETABLE_INDEX(10144, "SHOW CREATE TABLE does not support tables of type INDEX_TABLE."),
+  ALTER_BUCKETNUM_NONBUCKETIZED_TBL(10145, "Table is not bucketized."),
 
-  ALTER_BUCKETNUM_NONBUCKETIZED_TBL(10145, "Table is not bucketized"),
+  TRUNCATE_FOR_NON_MANAGED_TABLE(10146, "Cannot truncate non-managed table {0}.", true),
+  TRUNCATE_FOR_NON_NATIVE_TABLE(10147, "Cannot truncate non-native table {0}.", true),
+  PARTSPEC_FOR_NON_PARTITIONED_TABLE(10148, "Partition spec for non partitioned table {0}.", true),
 
   LOAD_INTO_STORED_AS_DIR(10195, "A stored-as-directories table cannot be used as target for LOAD"),
   ALTER_TBL_STOREDASDIR_NOT_SKEWED(10196, "This operation is only valid on skewed table."),
@@ -353,21 +357,27 @@ public enum ErrorMsg {
   private int errorCode;
   private String mesg;
   private String sqlState;
+  private MessageFormat format;
 
   private static final char SPACE = ' ';
   private static final Pattern ERROR_MESSAGE_PATTERN = Pattern.compile(".*Line [0-9]+:[0-9]+ (.*)");
   private static final Pattern ERROR_CODE_PATTERN =
     Pattern.compile("HiveException:\\s+\\[Error ([0-9]+)\\]: (.*)");
   private static Map<String, ErrorMsg> mesgToErrorMsgMap = new HashMap<String, ErrorMsg>();
+  private static Map<Pattern, ErrorMsg> formatToErrorMsgMap = new HashMap<Pattern, ErrorMsg>();
   private static int minMesgLength = -1;
 
   static {
     for (ErrorMsg errorMsg : values()) {
-      mesgToErrorMsgMap.put(errorMsg.getMsg().trim(), errorMsg);
-
-      int length = errorMsg.getMsg().trim().length();
-      if (minMesgLength == -1 || length < minMesgLength) {
-        minMesgLength = length;
+      if (errorMsg.format != null) {
+        String pattern = errorMsg.mesg.replaceAll("\\{.*\\}", ".*");
+        formatToErrorMsgMap.put(Pattern.compile("^" + pattern + "$"), errorMsg);
+      } else {
+        mesgToErrorMsgMap.put(errorMsg.getMsg().trim(), errorMsg);
+        int length = errorMsg.getMsg().trim().length();
+        if (minMesgLength == -1 || length < minMesgLength) {
+          minMesgLength = length;
+        }
       }
     }
   }
@@ -388,6 +398,12 @@ public enum ErrorMsg {
       return errorMsg;
     }
 
+    for (Map.Entry<Pattern, ErrorMsg> entry : formatToErrorMsgMap.entrySet()) {
+      if (entry.getKey().matcher(mesg).matches()) {
+        return entry.getValue();
+      }
+    }
+
     // if not see if the mesg follows type of format, which is typically the
     // case:
     // line 1:14 Table not found table_name
@@ -449,14 +465,23 @@ public enum ErrorMsg {
   }
 
   private ErrorMsg(int errorCode, String mesg) {
+    this(errorCode, mesg, "42000", false);
+  }
+
+  private ErrorMsg(int errorCode, String mesg, boolean format) {
     // 42000 is the generic SQLState for syntax error.
-    this(errorCode, mesg, "42000");
+    this(errorCode, mesg, "42000", format);
   }
 
   private ErrorMsg(int errorCode, String mesg, String sqlState) {
+    this(errorCode, mesg, sqlState, false);
+  }
+
+  private ErrorMsg(int errorCode, String mesg, String sqlState, boolean format) {
     this.errorCode = errorCode;
     this.mesg = mesg;
     this.sqlState = sqlState;
+    this.format = format ? new MessageFormat(mesg) : null;
   }
 
   private static int getLine(ASTNode tree) {
@@ -538,6 +563,15 @@ public enum ErrorMsg {
     return mesg + " " + reason;
   }
 
+  public String format(String reason) {
+    return format(new String[]{reason});
+  }
+
+  public String format(String... reasons) {
+    assert format != null;
+    return format.format(reasons);
+  }
+
   public String getErrorCodedMsg() {
     return "[Error " + errorCode + "]: " + mesg;
   }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java?rev=1427681&r1=1427680&r2=1427681&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java Wed Jan  2 07:02:35 2013
@@ -148,6 +148,7 @@ import org.apache.hadoop.hive.ql.plan.Sh
 import org.apache.hadoop.hive.ql.plan.ShowTablesDesc;
 import org.apache.hadoop.hive.ql.plan.ShowTblPropertiesDesc;
 import org.apache.hadoop.hive.ql.plan.SwitchDatabaseDesc;
+import org.apache.hadoop.hive.ql.plan.TruncateTableDesc;
 import org.apache.hadoop.hive.ql.plan.UnlockTableDesc;
 import org.apache.hadoop.hive.ql.plan.api.StageType;
 import org.apache.hadoop.hive.ql.security.authorization.Privilege;
@@ -412,10 +413,15 @@ public class DDLTask extends Task<DDLWor
       }
 
       AlterTablePartMergeFilesDesc mergeFilesDesc = work.getMergeFilesDesc();
-      if(mergeFilesDesc != null) {
+      if (mergeFilesDesc != null) {
         return mergeFiles(db, mergeFilesDesc);
       }
 
+      TruncateTableDesc truncateTableDesc = work.getTruncateTblDesc();
+      if (truncateTableDesc != null) {
+        return truncateTable(db, truncateTableDesc);
+      }
+
     } catch (InvalidTableException e) {
       formatter.consoleError(console, "Table " + e.getTableName() + " does not exist",
                              formatter.MISSING);
@@ -3886,6 +3892,49 @@ public class DDLTask extends Task<DDLWor
     return 0;
   }
 
+  private int truncateTable(Hive db, TruncateTableDesc truncateTableDesc) throws HiveException {
+    String tableName = truncateTableDesc.getTableName();
+    Map<String, String> partSpec = truncateTableDesc.getPartSpec();
+
+    Table table = db.getTable(tableName, true);
+
+    FsShell fshell = new FsShell(conf);
+    try {
+      for (Path location : getLocations(db, table, partSpec)) {
+        fshell.run(new String[]{"-rmr", location.toString()});
+        location.getFileSystem(conf).mkdirs(location);
+      }
+    } catch (Exception e) {
+      throw new HiveException(e);
+    } finally {
+      try {
+        fshell.close();
+      } catch (IOException e) {
+        // ignore
+      }
+    }
+    return 0;
+  }
+
+  private List<Path> getLocations(Hive db, Table table, Map<String, String> partSpec)
+      throws HiveException {
+    List<Path> locations = new ArrayList<Path>();
+    if (partSpec == null) {
+      if (table.isPartitioned()) {
+        for (Partition partition : db.getPartitions(table)) {
+          locations.add(partition.getPartitionPath());
+        }
+      } else {
+        locations.add(table.getPath());
+      }
+    } else {
+      for (Partition partition : db.getPartitionsByNames(table, partSpec)) {
+        locations.add(partition.getPartitionPath());
+      }
+    }
+    return locations;
+  }
+
   private int setGenericTableAttributes(Table tbl) {
     try {
       tbl.setOwner(conf.getUser());

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java?rev=1427681&r1=1427680&r2=1427681&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java Wed Jan  2 07:02:35 2013
@@ -43,6 +43,7 @@ import org.apache.commons.logging.LogFac
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
+import org.apache.hadoop.hive.metastore.TableType;
 import org.apache.hadoop.hive.metastore.Warehouse;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.metastore.api.Index;
@@ -116,6 +117,7 @@ import org.apache.hadoop.hive.ql.plan.Sh
 import org.apache.hadoop.hive.ql.plan.StatsWork;
 import org.apache.hadoop.hive.ql.plan.SwitchDatabaseDesc;
 import org.apache.hadoop.hive.ql.plan.TableDesc;
+import org.apache.hadoop.hive.ql.plan.TruncateTableDesc;
 import org.apache.hadoop.hive.ql.plan.UnlockTableDesc;
 import org.apache.hadoop.hive.ql.security.authorization.Privilege;
 import org.apache.hadoop.hive.ql.security.authorization.PrivilegeRegistry;
@@ -221,6 +223,9 @@ public class DDLSemanticAnalyzer extends
     case HiveParser.TOK_DROPTABLE:
       analyzeDropTable(ast, false);
       break;
+    case HiveParser.TOK_TRUNCATETABLE:
+      analyzeTruncateTable(ast);
+      break;
     case HiveParser.TOK_CREATEINDEX:
       analyzeCreateIndex(ast);
       break;
@@ -533,18 +538,14 @@ public class DDLSemanticAnalyzer extends
       }
     }
 
-    try {
-      if (subject.getTable()) {
-        Table tbl = db.getTable(subject.getObject());
-        if (subject.getPartSpec() != null) {
-          Partition part = db.getPartition(tbl, subject.getPartSpec(), false);
-          outputs.add(new WriteEntity(part));
-        } else {
-          outputs.add(new WriteEntity(tbl));
-        }
+    if (subject.getTable()) {
+      Table tbl = getTable(subject.getObject(), true);
+      if (subject.getPartSpec() != null) {
+        Partition part = getPartition(tbl, subject.getPartSpec(), true);
+        outputs.add(new WriteEntity(part));
+      } else {
+        outputs.add(new WriteEntity(tbl));
       }
-    } catch (HiveException e) {
-      throw new SemanticException(e);
     }
 
     return subject;
@@ -729,8 +730,9 @@ public class DDLSemanticAnalyzer extends
     // configured not to fail silently
     boolean throwException =
         !ifExists && !HiveConf.getBoolVar(conf, ConfVars.DROPIGNORESNONEXISTENT);
-    Table tab = getTable(db.getCurrentDatabase(), tableName, throwException);
+    Table tab = getTable(tableName, throwException);
     if (tab != null) {
+      inputs.add(new ReadEntity(tab));
       outputs.add(new WriteEntity(tab));
     }
 
@@ -740,6 +742,53 @@ public class DDLSemanticAnalyzer extends
         dropTblDesc), conf));
   }
 
+  private void analyzeTruncateTable(ASTNode ast) throws SemanticException {
+    ASTNode root = (ASTNode) ast.getChild(0); // TOK_TABLE_PARTITION
+    String tableName = getUnescapedName((ASTNode) root.getChild(0));
+
+    Table table = getTable(tableName, true);
+    if (table.getTableType() != TableType.MANAGED_TABLE) {
+      throw new SemanticException(ErrorMsg.TRUNCATE_FOR_NON_MANAGED_TABLE.format(tableName));
+    }
+    if (table.isNonNative()) {
+      throw new SemanticException(ErrorMsg.TRUNCATE_FOR_NON_NATIVE_TABLE.format(tableName)); //TODO
+    }
+    if (!table.isPartitioned() && root.getChildCount() > 1) {
+      throw new SemanticException(ErrorMsg.PARTSPEC_FOR_NON_PARTITIONED_TABLE.format(tableName));
+    }
+    Map<String, String> partSpec = getPartSpec((ASTNode) root.getChild(1));
+    if (partSpec == null) {
+      if (!table.isPartitioned()) {
+        outputs.add(new WriteEntity(table));
+      } else {
+        for (Partition partition : getPartitions(table, null, false)) {
+          outputs.add(new WriteEntity(partition));
+        }
+      }
+    } else {
+      if (isFullSpec(table, partSpec)) {
+        Partition partition = getPartition(table, partSpec, true);
+        outputs.add(new WriteEntity(partition));
+      } else {
+        for (Partition partition : getPartitions(table, partSpec, false)) {
+          outputs.add(new WriteEntity(partition));
+        }
+      }
+    }
+
+    TruncateTableDesc truncateTblDesc = new TruncateTableDesc(tableName, partSpec);
+    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), truncateTblDesc), conf));
+  }
+
+  private boolean isFullSpec(Table table, Map<String, String> partSpec) {
+    for (FieldSchema partCol : table.getPartCols()) {
+      if (partSpec.get(partCol.getName()) == null) {
+        return false;
+      }
+    }
+    return true;
+  }
+
   private void analyzeCreateIndex(ASTNode ast) throws SemanticException {
     String indexName = unescapeIdentifier(ast.getChild(0).getText());
     String typeName = unescapeSQLString(ast.getChild(1).getText());
@@ -890,9 +939,9 @@ public class DDLSemanticAnalyzer extends
     try {
       String dbName = db.getCurrentDatabase();
       Index index = db.getIndex(dbName, baseTableName, indexName);
-      Table indexTbl = db.getTable(dbName, index.getIndexTableName());
+      Table indexTbl = getTable(index.getIndexTableName());
       String baseTblName = index.getOrigTableName();
-      Table baseTbl = db.getTable(dbName, baseTblName);
+      Table baseTbl = getTable(baseTblName);
 
       String handlerCls = index.getIndexHandlerClass();
       HiveIndexHandler handler = HiveUtils.getIndexHandler(conf, handlerCls);
@@ -1103,37 +1152,26 @@ public class DDLSemanticAnalyzer extends
         alterTblDesc), conf));
   }
 
-  private void addInputsOutputsAlterTable(String tableName, HashMap<String, String> partSpec)
+  private void addInputsOutputsAlterTable(String tableName, Map<String, String> partSpec)
       throws SemanticException {
     addInputsOutputsAlterTable(tableName, partSpec, null);
   }
 
-  private void addInputsOutputsAlterTable(String tableName, HashMap<String, String> partSpec,
+  private void addInputsOutputsAlterTable(String tableName, Map<String, String> partSpec,
       AlterTableDesc desc) throws SemanticException {
-    Table tab = getTable(db.getCurrentDatabase(), tableName, true);
-    if ((partSpec == null) || (partSpec.isEmpty())) {
+    Table tab = getTable(tableName, true);
+    if (partSpec == null || partSpec.isEmpty()) {
+      inputs.add(new ReadEntity(tab));
       outputs.add(new WriteEntity(tab));
     }
     else {
-      List<Partition> allPartitions = null;
-      try {
-        if (desc == null || desc.getOp() != AlterTableDesc.AlterTableTypes.ALTERPROTECTMODE) {
-          Partition part = db.getPartition(tab, partSpec, false);
-          allPartitions = new ArrayList<Partition>(1);
-          allPartitions.add(part);
-        }
-        else {
-          allPartitions = db.getPartitions(tab, partSpec);
-          if (allPartitions == null || allPartitions.size() == 0) {
-            throw new SemanticException(ErrorMsg.INVALID_PARTITION.getMsg(partSpec.toString()));
-          }
-        }
-      } catch (HiveException e) {
-        throw new SemanticException(ErrorMsg.INVALID_PARTITION.getMsg(partSpec.toString()), e);
+      inputs.add(new ReadEntity(tab));
+      if (desc == null || desc.getOp() != AlterTableDesc.AlterTableTypes.ALTERPROTECTMODE) {
+        Partition part = getPartition(tab, partSpec, true);
+        outputs.add(new WriteEntity(part));
       }
-
-      if (allPartitions != null) {
-        for (Partition part : allPartitions) {
+      else {
+        for (Partition part : getPartitions(tab, partSpec, true)) {
           outputs.add(new WriteEntity(part));
         }
       }
@@ -1235,7 +1273,7 @@ public class DDLSemanticAnalyzer extends
     ListBucketingCtx lbCtx = null;
 
     try {
-      tblObj = db.getTable(tableName);
+      tblObj = getTable(tableName);
 
       List<String> bucketCols = null;
       Class<? extends InputFormat> inputFormatClass = null;
@@ -1256,7 +1294,7 @@ public class DDLSemanticAnalyzer extends
           throw new SemanticException("source table " + tableName
               + " is partitioned but no partition desc found.");
         } else {
-          Partition part = db.getPartition(tblObj, partSpec, false);
+          Partition part = getPartition(tblObj, partSpec, false);
           if (part == null) {
             throw new SemanticException("source table " + tableName
                 + " is partitioned but partition not found.");
@@ -1352,7 +1390,7 @@ public class DDLSemanticAnalyzer extends
   private void analyzeAlterTableClusterSort(ASTNode ast)
       throws SemanticException {
     String tableName = getUnescapedName((ASTNode)ast.getChild(0));
-    Table tab = getTable(db.getCurrentDatabase(), tableName, true);
+    Table tab = getTable(tableName, true);
 
     inputs.add(new ReadEntity(tab));
     outputs.add(new WriteEntity(tab));
@@ -1660,24 +1698,9 @@ public class DDLSemanticAnalyzer extends
 
   private void validateTable(String tableName, Map<String, String> partSpec)
       throws SemanticException {
-    Table tab = null;
-    try {
-      tab = db.getTable(tableName);
-    } catch (HiveException e) {
-      throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(tableName), e);
-    }
-
+    Table tab = getTable(tableName);
     if (partSpec != null) {
-      Partition part = null;
-      try {
-        part = db.getPartition(tab, partSpec, false);
-      } catch (HiveException e) {
-        throw new SemanticException(ErrorMsg.INVALID_PARTITION.getMsg(partSpec.toString()), e);
-      }
-
-      if (part == null) {
-        throw new SemanticException(ErrorMsg.INVALID_PARTITION.getMsg(partSpec.toString()));
-      }
+      getPartition(tab, partSpec, true);
     }
   }
 
@@ -1749,11 +1772,18 @@ public class DDLSemanticAnalyzer extends
 
   private static HashMap<String, String> getPartSpec(ASTNode partspec)
       throws SemanticException {
+    if (partspec == null) {
+      return null;
+    }
     HashMap<String, String> partSpec = new LinkedHashMap<String, String>();
     for (int i = 0; i < partspec.getChildCount(); ++i) {
       ASTNode partspec_val = (ASTNode) partspec.getChild(i);
-      String val = stripQuotes(partspec_val.getChild(1).getText());
-      partSpec.put(partspec_val.getChild(0).getText().toLowerCase(), val);
+      String key = partspec_val.getChild(0).getText();
+      String val = null;
+      if (partspec_val.getChildCount() > 1) {
+        val = stripQuotes(partspec_val.getChild(1).getText());
+      }
+      partSpec.put(key.toLowerCase(), val);
     }
     return partSpec;
   }
@@ -1781,18 +1811,13 @@ public class DDLSemanticAnalyzer extends
     ShowCreateTableDesc showCreateTblDesc;
     String tableName = getUnescapedName((ASTNode)ast.getChild(0));
     showCreateTblDesc = new ShowCreateTableDesc(tableName, ctx.getResFile().toString());
-    try {
-      Table tab = db.getTable(tableName, true);
-      if (tab.getTableType() == org.apache.hadoop.hive.metastore.TableType.INDEX_TABLE) {
-        throw new SemanticException(ErrorMsg.SHOW_CREATETABLE_INDEX.getMsg(tableName
-            + " has table type INDEX_TABLE"));
-      }
-      inputs.add(new ReadEntity(tab));
-    } catch (SemanticException e) {
-      throw e;
-    } catch (HiveException e) {
-      throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(tableName));
+
+    Table tab = getTable(tableName);
+    if (tab.getTableType() == org.apache.hadoop.hive.metastore.TableType.INDEX_TABLE) {
+      throw new SemanticException(ErrorMsg.SHOW_CREATETABLE_INDEX.getMsg(tableName
+          + " has table type INDEX_TABLE"));
     }
+    inputs.add(new ReadEntity(tab));
     rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),
         showCreateTblDesc), conf));
     setFetchTask(createFetchTask(showCreateTblDesc.getSchema()));
@@ -1864,6 +1889,8 @@ public class DDLSemanticAnalyzer extends
     }
 
     Table tab = getTable(dbName, tableName, true);
+    inputs.add(new ReadEntity(tab));
+
     showColumnsDesc = new ShowColumnsDesc(ctx.getResFile(), dbName, tableName);
     rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),
         showColumnsDesc), conf));
@@ -2134,12 +2161,8 @@ public class DDLSemanticAnalyzer extends
     String newColName = ast.getChild(2).getText();
 
     /* Validate the operation of renaming a column name. */
-    Table tab = null;
-    try {
-      tab = db.getTable(tblName);
-    } catch (HiveException e) {
-      throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(tblName), e);
-    }
+    Table tab = getTable(tblName);
+
     SkewedInfo skewInfo = tab.getTTable().getSd().getSkewedInfo();
     if ((null != skewInfo)
         && (null != skewInfo.getSkewedColNames())
@@ -2163,8 +2186,9 @@ public class DDLSemanticAnalyzer extends
     if (newPartSpec == null) {
       throw new SemanticException("RENAME PARTITION Missing Destination" + ast);
     }
-    Table tab = getTable(db.getCurrentDatabase(), tblName, true);
+    Table tab = getTable(tblName, true);
     validateAlterTableType(tab, AlterTableTypes.RENAMEPARTITION);
+    inputs.add(new ReadEntity(tab));
 
     List<Map<String, String>> partSpecs = new ArrayList<Map<String, String>>();
     partSpecs.add(oldPartSpec);
@@ -2178,11 +2202,12 @@ public class DDLSemanticAnalyzer extends
 
   private void analyzeAlterTableBucketNum(ASTNode ast, String tblName,
       HashMap<String, String> partSpec) throws SemanticException {
-    Table tab = getTable(db.getCurrentDatabase(), tblName, true);
+    Table tab = getTable(tblName, true);
     if (tab.getBucketCols() == null || tab.getBucketCols().isEmpty()) {
       throw new SemanticException(ErrorMsg.ALTER_BUCKETNUM_NONBUCKETIZED_TBL.getMsg());
     }
     validateAlterTableType(tab, AlterTableTypes.ALTERBUCKETNUM);
+    inputs.add(new ReadEntity(tab));
 
     int bucketNum = Integer.parseInt(ast.getChild(0).getText());
     AlterTableDesc alterBucketNum = new AlterTableDesc(tblName, partSpec, bucketNum);
@@ -2209,8 +2234,9 @@ public class DDLSemanticAnalyzer extends
     String tblName = getUnescapedName((ASTNode) ast.getChild(0));
     // get table metadata
     List<PartitionSpec> partSpecs = getFullPartitionSpecs(ast);
-    Table tab = getTable(db.getCurrentDatabase(), tblName, true);
+    Table tab = getTable(tblName, true);
     validateAlterTableType(tab, AlterTableTypes.DROPPARTITION, expectView);
+    inputs.add(new ReadEntity(tab));
 
     // Find out if all partition columns are strings. This is needed for JDO
     boolean stringPartitionColumns = true;
@@ -2266,9 +2292,10 @@ public class DDLSemanticAnalyzer extends
       throws SemanticException {
 
     String tblName = getUnescapedName((ASTNode)ast.getChild(0));
-    Table tab = getTable(db.getCurrentDatabase(), tblName, true);
+    Table tab = getTable(tblName, true);
     boolean isView = tab.isView();
     validateAlterTableType(tab, AlterTableTypes.ADDPARTITION, expectView);
+    inputs.add(new ReadEntity(tab));
 
     // partition name to value
     List<Map<String, String>> partSpecs = getPartitionSpecs(ast);
@@ -2389,8 +2416,9 @@ public class DDLSemanticAnalyzer extends
       throws SemanticException {
 
     String tblName = getUnescapedName((ASTNode)ast.getChild(0));
-    Table tab = getTable(db.getCurrentDatabase(), tblName, true);
+    Table tab = getTable(tblName, true);
     validateAlterTableType(tab, AlterTableTypes.TOUCH);
+    inputs.add(new ReadEntity(tab));
 
     // partition name to value
     List<Map<String, String>> partSpecs = getPartitionSpecs(ast);
@@ -2425,9 +2453,10 @@ public class DDLSemanticAnalyzer extends
     // partition name to value
     List<Map<String, String>> partSpecs = getPartitionSpecs(ast);
 
-    Table tab = getTable(db.getCurrentDatabase(), tblName, true);
+    Table tab = getTable(tblName, true);
     addTablePartsOutputs(tblName, partSpecs, true);
     validateAlterTableType(tab, AlterTableTypes.ARCHIVE);
+    inputs.add(new ReadEntity(tab));
 
     if (partSpecs.size() > 1) {
       throw new SemanticException(isUnArchive ?
@@ -2495,13 +2524,7 @@ public class DDLSemanticAnalyzer extends
       Tree partspec = ast.getChild(childIndex);
       // sanity check
       if (partspec.getType() == HiveParser.TOK_PARTSPEC) {
-        Map<String, String> partSpec = new LinkedHashMap<String, String>();
-        for (int i = 0; i < partspec.getChildCount(); ++i) {
-          CommonTree partspec_val = (CommonTree) partspec.getChild(i);
-          String val = stripQuotes(partspec_val.getChild(1).getText());
-          partSpec.put(partspec_val.getChild(0).getText().toLowerCase(), val);
-        }
-        partSpecs.add(partSpec);
+        partSpecs.add(getPartSpec((ASTNode) partspec));
       }
     }
     return partSpecs;
@@ -2589,12 +2612,7 @@ public class DDLSemanticAnalyzer extends
   private void addTablePartsOutputs(String tblName, List<Map<String, String>> partSpecs,
       boolean throwIfNonExistent, boolean allowMany, ASTNode ast)
       throws SemanticException {
-    Table tab;
-    try {
-      tab = db.getTable(tblName);
-    } catch (HiveException e) {
-      throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(tblName));
-    }
+    Table tab = getTable(tblName);
 
     Iterator<Map<String, String>> i;
     int index;
@@ -2637,12 +2655,7 @@ public class DDLSemanticAnalyzer extends
   private void addTableDropPartsOutputs(String tblName, List<PartitionSpec> partSpecs,
       boolean throwIfNonExistent, boolean stringPartitionColumns)
       throws SemanticException {
-    Table tab;
-    try {
-      tab = db.getTable(tblName);
-    } catch (HiveException e) {
-      throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(tblName));
-    }
+    Table tab = getTable(tblName);
 
     Iterator<PartitionSpec> i;
     int index;
@@ -2690,13 +2703,8 @@ public class DDLSemanticAnalyzer extends
     HiveConf hiveConf = SessionState.get().getConf();
 
     String tableName = getUnescapedName((ASTNode) ast.getChild(0));
-    Table tab = null;
+    Table tab = getTable(tableName, true);
 
-    try {
-      tab = db.getTable(db.getCurrentDatabase(), tableName, true);
-    } catch (HiveException e) {
-      throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(tableName));
-    }
     inputs.add(new ReadEntity(tab));
     outputs.add(new WriteEntity(tab));
 
@@ -2947,6 +2955,14 @@ public class DDLSemanticAnalyzer extends
     }
   }
 
+  private Table getTable(String tblName) throws SemanticException {
+    return getTable(null, tblName, true);
+  }
+
+  private Table getTable(String tblName, boolean throwException) throws SemanticException {
+    return getTable(db.getCurrentDatabase(), tblName, throwException);
+  }
+
   private Table getTable(String database, String tblName, boolean throwException)
       throws SemanticException {
     try {
@@ -2955,12 +2971,40 @@ public class DDLSemanticAnalyzer extends
       if (tab == null && throwException) {
         throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(tblName));
       }
-      if (tab != null) {
-        inputs.add(new ReadEntity(tab));
-      }
       return tab;
     } catch (HiveException e) {
       throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(tblName));
     }
   }
+
+  private Partition getPartition(Table table, Map<String, String> partSpec, boolean throwException)
+      throws SemanticException {
+    try {
+      Partition partition = db.getPartition(table, partSpec, false);
+      if (partition == null && throwException) {
+        throw new SemanticException(toMessage(ErrorMsg.INVALID_PARTITION, partSpec));
+      }
+      return partition;
+    } catch (HiveException e) {
+      throw new SemanticException(toMessage(ErrorMsg.INVALID_PARTITION, partSpec), e);
+    }
+  }
+
+  private List<Partition> getPartitions(Table table, Map<String, String> partSpec,
+      boolean throwException) throws SemanticException {
+    try {
+      List<Partition> partitions = partSpec == null ? db.getPartitions(table) :
+          db.getPartitions(table, partSpec);
+      if (partitions.isEmpty() && throwException) {
+        throw new SemanticException(toMessage(ErrorMsg.INVALID_PARTITION, partSpec));
+      }
+      return partitions;
+    } catch (HiveException e) {
+      throw new SemanticException(toMessage(ErrorMsg.INVALID_PARTITION, partSpec), e);
+    }
+  }
+
+  private String toMessage(ErrorMsg message, Object detail) {
+    return detail == null ? message.getMsg() : message.getMsg(detail.toString());
+  }
 }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g?rev=1427681&r1=1427680&r2=1427681&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g Wed Jan  2 07:02:35 2013
@@ -115,6 +115,7 @@ TOK_UNIONTYPE;
 TOK_COLTYPELIST;
 TOK_CREATEDATABASE;
 TOK_CREATETABLE;
+TOK_TRUNCATETABLE;
 TOK_CREATEINDEX;
 TOK_CREATEINDEX_INDEXTBLNAME;
 TOK_DEFERRED_REBUILDINDEX;
@@ -354,6 +355,7 @@ ddlStatement
     | dropDatabaseStatement
     | createTableStatement
     | dropTableStatement
+    | truncateTableStatement
     | alterStatement
     | descStatement
     | showStatement
@@ -508,6 +510,11 @@ createTableStatement
         )
     ;
 
+truncateTableStatement
+@init { msgs.push("truncate table statement"); }
+@after { msgs.pop(); }
+    : KW_TRUNCATE KW_TABLE tablePartitionPrefix -> ^(TOK_TRUNCATETABLE tablePartitionPrefix);
+
 createIndexStatement
 @init { msgs.push("create index statement");}
 @after {msgs.pop();}
@@ -2594,6 +2601,7 @@ KW_DIRECTORIES: 'DIRECTORIES';
 KW_FOR: 'FOR';
 KW_GROUPING: 'GROUPING';
 KW_SETS: 'SETS';
+KW_TRUNCATE: 'TRUNCATE';
 
 // Operators
 // NOTE: if you add a new function/operator, add it to sysFuncNames so that describe function _FUNC_ will work.

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java?rev=1427681&r1=1427680&r2=1427681&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java Wed Jan  2 07:02:35 2013
@@ -42,6 +42,7 @@ public final class SemanticAnalyzerFacto
     commandType.put(HiveParser.TOK_DROPDATABASE, HiveOperation.DROPDATABASE);
     commandType.put(HiveParser.TOK_SWITCHDATABASE, HiveOperation.SWITCHDATABASE);
     commandType.put(HiveParser.TOK_CREATETABLE, HiveOperation.CREATETABLE);
+    commandType.put(HiveParser.TOK_TRUNCATETABLE, HiveOperation.TRUNCATETABLE);
     commandType.put(HiveParser.TOK_DROPTABLE, HiveOperation.DROPTABLE);
     commandType.put(HiveParser.TOK_DESCTABLE, HiveOperation.DESCTABLE);
     commandType.put(HiveParser.TOK_DESCFUNCTION, HiveOperation.DESCFUNCTION);
@@ -197,6 +198,7 @@ public final class SemanticAnalyzerFacto
       case HiveParser.TOK_SHOW_ROLE_GRANT:
       case HiveParser.TOK_ALTERDATABASE_PROPERTIES:
       case HiveParser.TOK_ALTERTABLE_SKEWED:
+      case HiveParser.TOK_TRUNCATETABLE:
         return new DDLSemanticAnalyzer(conf);
       case HiveParser.TOK_ALTERTABLE_PARTITION:
         HiveOperation commandType = null;

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java?rev=1427681&r1=1427680&r2=1427681&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java Wed Jan  2 07:02:35 2013
@@ -62,6 +62,7 @@ public class DDLWork implements Serializ
   private ShowIndexesDesc showIndexesDesc;
   private DescDatabaseDesc descDbDesc;
   private AlterDatabaseDesc alterDbDesc;
+  private TruncateTableDesc truncateTblDesc;
 
   private RoleDDLDesc roleDDLDesc;
   private GrantDesc grantDesc;
@@ -126,6 +127,12 @@ public class DDLWork implements Serializ
     this.alterDbDesc = alterDbDesc;
   }
 
+  public DDLWork(HashSet<ReadEntity> inputs, HashSet<WriteEntity> outputs,
+      TruncateTableDesc truncateTblDesc) {
+    this(inputs, outputs);
+    this.truncateTblDesc = truncateTblDesc;
+  }
+
   public DescDatabaseDesc getDescDatabaseDesc() {
     return descDbDesc;
   }
@@ -988,4 +995,12 @@ public class DDLWork implements Serializ
     this.needLock = needLock;
   }
 
+  @Explain(displayName = "Truncate Table Operator")
+  public TruncateTableDesc getTruncateTblDesc() {
+    return truncateTblDesc;
+  }
+
+  public void setTruncateTblDesc(TruncateTableDesc truncateTblDesc) {
+    this.truncateTblDesc = truncateTblDesc;
+  }
 }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java?rev=1427681&r1=1427680&r2=1427681&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java Wed Jan  2 07:02:35 2013
@@ -90,6 +90,7 @@ public enum HiveOperation {
   ALTERTABLE_LOCATION("ALTERTABLE_LOCATION", new Privilege[]{Privilege.ALTER_DATA}, null),
   ALTERPARTITION_LOCATION("ALTERPARTITION_LOCATION", new Privilege[]{Privilege.ALTER_DATA}, null),
   CREATETABLE("CREATETABLE", null, new Privilege[]{Privilege.CREATE}),
+  TRUNCATETABLE("TRUNCATETABLE", null, new Privilege[]{Privilege.DROP}),
   CREATETABLE_AS_SELECT("CREATETABLE_AS_SELECT", new Privilege[]{Privilege.SELECT}, new Privilege[]{Privilege.CREATE}),
   QUERY("QUERY", new Privilege[]{Privilege.SELECT}, new Privilege[]{Privilege.ALTER_DATA, Privilege.CREATE}),
   ALTERINDEX_PROPS("ALTERINDEX_PROPS",null, null),

Added: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/TruncateTableDesc.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/TruncateTableDesc.java?rev=1427681&view=auto
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/TruncateTableDesc.java (added)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/TruncateTableDesc.java Wed Jan  2 07:02:35 2013
@@ -0,0 +1,59 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.plan;
+
+import java.util.Map;
+
+/**
+ * Truncates managed table or partition
+ */
+@Explain(displayName = "Truncate Table or Partition")
+public class TruncateTableDesc extends DDLDesc {
+
+  private static final long serialVersionUID = 1L;
+
+  private String tableName;
+  private Map<String, String> partSpec;
+
+  public TruncateTableDesc() {
+  }
+
+  public TruncateTableDesc(String tableName, Map<String, String> partSpec) {
+    this.tableName = tableName;
+    this.partSpec = partSpec;
+  }
+
+  @Explain(displayName = "TableName")
+  public String getTableName() {
+    return tableName;
+  }
+
+  public void setTableName(String tableName) {
+    this.tableName = tableName;
+  }
+
+  @Explain(displayName = "Partition Spec")
+  public Map<String, String> getPartSpec() {
+    return partSpec;
+  }
+
+  public void setPartSpec(Map<String, String> partSpec) {
+    this.partSpec = partSpec;
+  }
+}

Added: hive/trunk/ql/src/test/queries/clientnegative/truncate_table_failure1.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientnegative/truncate_table_failure1.q?rev=1427681&view=auto
==============================================================================
--- hive/trunk/ql/src/test/queries/clientnegative/truncate_table_failure1.q (added)
+++ hive/trunk/ql/src/test/queries/clientnegative/truncate_table_failure1.q Wed Jan  2 07:02:35 2013
@@ -0,0 +1,2 @@
+-- partition spec for non-partitioned table
+TRUNCATE TABLE src partition (ds='2008-04-08', hr='11');

Added: hive/trunk/ql/src/test/queries/clientnegative/truncate_table_failure2.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientnegative/truncate_table_failure2.q?rev=1427681&view=auto
==============================================================================
--- hive/trunk/ql/src/test/queries/clientnegative/truncate_table_failure2.q (added)
+++ hive/trunk/ql/src/test/queries/clientnegative/truncate_table_failure2.q Wed Jan  2 07:02:35 2013
@@ -0,0 +1,2 @@
+-- full partition spec for not existing partition
+TRUNCATE TABLE srcpart partition (ds='2012-12-17', hr='15');

Added: hive/trunk/ql/src/test/queries/clientnegative/truncate_table_failure3.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientnegative/truncate_table_failure3.q?rev=1427681&view=auto
==============================================================================
--- hive/trunk/ql/src/test/queries/clientnegative/truncate_table_failure3.q (added)
+++ hive/trunk/ql/src/test/queries/clientnegative/truncate_table_failure3.q Wed Jan  2 07:02:35 2013
@@ -0,0 +1,4 @@
+create external table external1 (a int, b int) partitioned by (ds string);
+
+-- trucate for non-managed table
+TRUNCATE TABLE external1;

Added: hive/trunk/ql/src/test/queries/clientnegative/truncate_table_failure4.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientnegative/truncate_table_failure4.q?rev=1427681&view=auto
==============================================================================
--- hive/trunk/ql/src/test/queries/clientnegative/truncate_table_failure4.q (added)
+++ hive/trunk/ql/src/test/queries/clientnegative/truncate_table_failure4.q Wed Jan  2 07:02:35 2013
@@ -0,0 +1,5 @@
+CREATE TABLE non_native(key int, value string)
+STORED BY 'org.apache.hadoop.hive.ql.metadata.DefaultStorageHandler';
+
+-- trucate for non-native table
+TRUNCATE TABLE non_native;

Added: hive/trunk/ql/src/test/queries/clientpositive/truncate_table.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/truncate_table.q?rev=1427681&view=auto
==============================================================================
--- hive/trunk/ql/src/test/queries/clientpositive/truncate_table.q (added)
+++ hive/trunk/ql/src/test/queries/clientpositive/truncate_table.q Wed Jan  2 07:02:35 2013
@@ -0,0 +1,35 @@
+create table src_truncate (key string, value string);
+load data local inpath '../data/files/kv1.txt' into table src_truncate;;
+
+create table srcpart_truncate (key string, value string) partitioned by (ds string, hr string);
+alter table srcpart_truncate add partition (ds='2008-04-08', hr='11');        
+alter table srcpart_truncate add partition (ds='2008-04-08', hr='12');
+alter table srcpart_truncate add partition (ds='2008-04-09', hr='11');
+alter table srcpart_truncate add partition (ds='2008-04-09', hr='12');
+
+load data local inpath '../data/files/kv1.txt' into table srcpart_truncate partition (ds='2008-04-08', hr='11');
+load data local inpath '../data/files/kv1.txt' into table srcpart_truncate partition (ds='2008-04-08', hr='12');
+load data local inpath '../data/files/kv1.txt' into table srcpart_truncate partition (ds='2008-04-09', hr='11');
+load data local inpath '../data/files/kv1.txt' into table srcpart_truncate partition (ds='2008-04-09', hr='12');
+
+set hive.fetch.task.convertion=more;
+
+-- truncate non-partitioned table
+explain TRUNCATE TABLE src_truncate;
+TRUNCATE TABLE src_truncate;
+select * from src_truncate;
+
+-- truncate a partition
+explain TRUNCATE TABLE srcpart_truncate partition (ds='2008-04-08', hr='11');
+TRUNCATE TABLE srcpart_truncate partition (ds='2008-04-08', hr='11');
+select * from srcpart_truncate where ds='2008-04-08' and hr='11';
+
+-- truncate partitions with partial spec
+explain TRUNCATE TABLE srcpart_truncate partition (ds, hr='12');
+TRUNCATE TABLE srcpart_truncate partition (ds, hr='12');
+select * from srcpart_truncate where hr='12';
+
+-- truncate partitioned table
+explain TRUNCATE TABLE srcpart_truncate;
+TRUNCATE TABLE srcpart_truncate;
+select * from srcpart_truncate;

Added: hive/trunk/ql/src/test/results/clientnegative/truncate_table_failure1.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/truncate_table_failure1.q.out?rev=1427681&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/truncate_table_failure1.q.out (added)
+++ hive/trunk/ql/src/test/results/clientnegative/truncate_table_failure1.q.out Wed Jan  2 07:02:35 2013
@@ -0,0 +1 @@
+FAILED: SemanticException [Error 10148]: Partition spec for non partitioned table src.

Added: hive/trunk/ql/src/test/results/clientnegative/truncate_table_failure2.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/truncate_table_failure2.q.out?rev=1427681&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/truncate_table_failure2.q.out (added)
+++ hive/trunk/ql/src/test/results/clientnegative/truncate_table_failure2.q.out Wed Jan  2 07:02:35 2013
@@ -0,0 +1 @@
+FAILED: SemanticException [Error 10006]: Partition not found {ds=2012-12-17, hr=15}

Added: hive/trunk/ql/src/test/results/clientnegative/truncate_table_failure3.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/truncate_table_failure3.q.out?rev=1427681&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/truncate_table_failure3.q.out (added)
+++ hive/trunk/ql/src/test/results/clientnegative/truncate_table_failure3.q.out Wed Jan  2 07:02:35 2013
@@ -0,0 +1,6 @@
+PREHOOK: query: create external table external1 (a int, b int) partitioned by (ds string)
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create external table external1 (a int, b int) partitioned by (ds string)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@external1
+FAILED: SemanticException [Error 10146]: Cannot truncate non-managed table external1.

Added: hive/trunk/ql/src/test/results/clientnegative/truncate_table_failure4.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/truncate_table_failure4.q.out?rev=1427681&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/truncate_table_failure4.q.out (added)
+++ hive/trunk/ql/src/test/results/clientnegative/truncate_table_failure4.q.out Wed Jan  2 07:02:35 2013
@@ -0,0 +1,8 @@
+PREHOOK: query: CREATE TABLE non_native(key int, value string)
+STORED BY 'org.apache.hadoop.hive.ql.metadata.DefaultStorageHandler'
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE non_native(key int, value string)
+STORED BY 'org.apache.hadoop.hive.ql.metadata.DefaultStorageHandler'
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@non_native
+FAILED: SemanticException [Error 10147]: Cannot truncate non-native table non_native.

Added: hive/trunk/ql/src/test/results/clientpositive/truncate_table.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/truncate_table.q.out?rev=1427681&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/truncate_table.q.out (added)
+++ hive/trunk/ql/src/test/results/clientpositive/truncate_table.q.out Wed Jan  2 07:02:35 2013
@@ -0,0 +1,222 @@
+PREHOOK: query: create table src_truncate (key string, value string)
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table src_truncate (key string, value string)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@src_truncate
+PREHOOK: query: load data local inpath '../data/files/kv1.txt' into table src_truncate
+PREHOOK: type: LOAD
+PREHOOK: Output: default@src_truncate
+POSTHOOK: query: load data local inpath '../data/files/kv1.txt' into table src_truncate
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@src_truncate
+PREHOOK: query: create table srcpart_truncate (key string, value string) partitioned by (ds string, hr string)
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table srcpart_truncate (key string, value string) partitioned by (ds string, hr string)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@srcpart_truncate
+PREHOOK: query: alter table srcpart_truncate add partition (ds='2008-04-08', hr='11')
+PREHOOK: type: ALTERTABLE_ADDPARTS
+PREHOOK: Input: default@srcpart_truncate
+POSTHOOK: query: alter table srcpart_truncate add partition (ds='2008-04-08', hr='11')
+POSTHOOK: type: ALTERTABLE_ADDPARTS
+POSTHOOK: Input: default@srcpart_truncate
+POSTHOOK: Output: default@srcpart_truncate@ds=2008-04-08/hr=11
+PREHOOK: query: alter table srcpart_truncate add partition (ds='2008-04-08', hr='12')
+PREHOOK: type: ALTERTABLE_ADDPARTS
+PREHOOK: Input: default@srcpart_truncate
+POSTHOOK: query: alter table srcpart_truncate add partition (ds='2008-04-08', hr='12')
+POSTHOOK: type: ALTERTABLE_ADDPARTS
+POSTHOOK: Input: default@srcpart_truncate
+POSTHOOK: Output: default@srcpart_truncate@ds=2008-04-08/hr=12
+PREHOOK: query: alter table srcpart_truncate add partition (ds='2008-04-09', hr='11')
+PREHOOK: type: ALTERTABLE_ADDPARTS
+PREHOOK: Input: default@srcpart_truncate
+POSTHOOK: query: alter table srcpart_truncate add partition (ds='2008-04-09', hr='11')
+POSTHOOK: type: ALTERTABLE_ADDPARTS
+POSTHOOK: Input: default@srcpart_truncate
+POSTHOOK: Output: default@srcpart_truncate@ds=2008-04-09/hr=11
+PREHOOK: query: alter table srcpart_truncate add partition (ds='2008-04-09', hr='12')
+PREHOOK: type: ALTERTABLE_ADDPARTS
+PREHOOK: Input: default@srcpart_truncate
+POSTHOOK: query: alter table srcpart_truncate add partition (ds='2008-04-09', hr='12')
+POSTHOOK: type: ALTERTABLE_ADDPARTS
+POSTHOOK: Input: default@srcpart_truncate
+POSTHOOK: Output: default@srcpart_truncate@ds=2008-04-09/hr=12
+PREHOOK: query: load data local inpath '../data/files/kv1.txt' into table srcpart_truncate partition (ds='2008-04-08', hr='11')
+PREHOOK: type: LOAD
+PREHOOK: Output: default@srcpart_truncate@ds=2008-04-08/hr=11
+POSTHOOK: query: load data local inpath '../data/files/kv1.txt' into table srcpart_truncate partition (ds='2008-04-08', hr='11')
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@srcpart_truncate@ds=2008-04-08/hr=11
+PREHOOK: query: load data local inpath '../data/files/kv1.txt' into table srcpart_truncate partition (ds='2008-04-08', hr='12')
+PREHOOK: type: LOAD
+PREHOOK: Output: default@srcpart_truncate@ds=2008-04-08/hr=12
+POSTHOOK: query: load data local inpath '../data/files/kv1.txt' into table srcpart_truncate partition (ds='2008-04-08', hr='12')
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@srcpart_truncate@ds=2008-04-08/hr=12
+PREHOOK: query: load data local inpath '../data/files/kv1.txt' into table srcpart_truncate partition (ds='2008-04-09', hr='11')
+PREHOOK: type: LOAD
+PREHOOK: Output: default@srcpart_truncate@ds=2008-04-09/hr=11
+POSTHOOK: query: load data local inpath '../data/files/kv1.txt' into table srcpart_truncate partition (ds='2008-04-09', hr='11')
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@srcpart_truncate@ds=2008-04-09/hr=11
+PREHOOK: query: load data local inpath '../data/files/kv1.txt' into table srcpart_truncate partition (ds='2008-04-09', hr='12')
+PREHOOK: type: LOAD
+PREHOOK: Output: default@srcpart_truncate@ds=2008-04-09/hr=12
+POSTHOOK: query: load data local inpath '../data/files/kv1.txt' into table srcpart_truncate partition (ds='2008-04-09', hr='12')
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@srcpart_truncate@ds=2008-04-09/hr=12
+PREHOOK: query: -- truncate non-partitioned table
+explain TRUNCATE TABLE src_truncate
+PREHOOK: type: TRUNCATETABLE
+POSTHOOK: query: -- truncate non-partitioned table
+explain TRUNCATE TABLE src_truncate
+POSTHOOK: type: TRUNCATETABLE
+ABSTRACT SYNTAX TREE:
+  (TOK_TRUNCATETABLE (TOK_TABLE_PARTITION src_truncate))
+
+STAGE DEPENDENCIES:
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-0
+      Truncate Table Operator:
+        Truncate Table or Partition
+          TableName: src_truncate
+
+
+PREHOOK: query: TRUNCATE TABLE src_truncate
+PREHOOK: type: TRUNCATETABLE
+PREHOOK: Output: default@src_truncate
+POSTHOOK: query: TRUNCATE TABLE src_truncate
+POSTHOOK: type: TRUNCATETABLE
+POSTHOOK: Output: default@src_truncate
+PREHOOK: query: select * from src_truncate
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src_truncate
+#### A masked pattern was here ####
+POSTHOOK: query: select * from src_truncate
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src_truncate
+#### A masked pattern was here ####
+PREHOOK: query: -- truncate a partition
+explain TRUNCATE TABLE srcpart_truncate partition (ds='2008-04-08', hr='11')
+PREHOOK: type: TRUNCATETABLE
+POSTHOOK: query: -- truncate a partition
+explain TRUNCATE TABLE srcpart_truncate partition (ds='2008-04-08', hr='11')
+POSTHOOK: type: TRUNCATETABLE
+ABSTRACT SYNTAX TREE:
+  (TOK_TRUNCATETABLE (TOK_TABLE_PARTITION srcpart_truncate (TOK_PARTSPEC (TOK_PARTVAL ds '2008-04-08') (TOK_PARTVAL hr '11'))))
+
+STAGE DEPENDENCIES:
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-0
+      Truncate Table Operator:
+        Truncate Table or Partition
+          Partition Spec:
+            ds 2008-04-08
+            hr 11
+          TableName: srcpart_truncate
+
+
+PREHOOK: query: TRUNCATE TABLE srcpart_truncate partition (ds='2008-04-08', hr='11')
+PREHOOK: type: TRUNCATETABLE
+PREHOOK: Output: default@srcpart_truncate@ds=2008-04-08/hr=11
+POSTHOOK: query: TRUNCATE TABLE srcpart_truncate partition (ds='2008-04-08', hr='11')
+POSTHOOK: type: TRUNCATETABLE
+POSTHOOK: Output: default@srcpart_truncate@ds=2008-04-08/hr=11
+PREHOOK: query: select * from srcpart_truncate where ds='2008-04-08' and hr='11'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@srcpart_truncate@ds=2008-04-08/hr=11
+#### A masked pattern was here ####
+POSTHOOK: query: select * from srcpart_truncate where ds='2008-04-08' and hr='11'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@srcpart_truncate@ds=2008-04-08/hr=11
+#### A masked pattern was here ####
+PREHOOK: query: -- truncate partitions with partial spec
+explain TRUNCATE TABLE srcpart_truncate partition (ds, hr='12')
+PREHOOK: type: TRUNCATETABLE
+POSTHOOK: query: -- truncate partitions with partial spec
+explain TRUNCATE TABLE srcpart_truncate partition (ds, hr='12')
+POSTHOOK: type: TRUNCATETABLE
+ABSTRACT SYNTAX TREE:
+  (TOK_TRUNCATETABLE (TOK_TABLE_PARTITION srcpart_truncate (TOK_PARTSPEC (TOK_PARTVAL ds) (TOK_PARTVAL hr '12'))))
+
+STAGE DEPENDENCIES:
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-0
+      Truncate Table Operator:
+        Truncate Table or Partition
+          Partition Spec:
+            ds 
+            hr 12
+          TableName: srcpart_truncate
+
+
+PREHOOK: query: TRUNCATE TABLE srcpart_truncate partition (ds, hr='12')
+PREHOOK: type: TRUNCATETABLE
+PREHOOK: Output: default@srcpart_truncate@ds=2008-04-08/hr=12
+PREHOOK: Output: default@srcpart_truncate@ds=2008-04-09/hr=12
+POSTHOOK: query: TRUNCATE TABLE srcpart_truncate partition (ds, hr='12')
+POSTHOOK: type: TRUNCATETABLE
+POSTHOOK: Output: default@srcpart_truncate@ds=2008-04-08/hr=12
+POSTHOOK: Output: default@srcpart_truncate@ds=2008-04-09/hr=12
+PREHOOK: query: select * from srcpart_truncate where hr='12'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@srcpart_truncate@ds=2008-04-08/hr=12
+PREHOOK: Input: default@srcpart_truncate@ds=2008-04-09/hr=12
+#### A masked pattern was here ####
+POSTHOOK: query: select * from srcpart_truncate where hr='12'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@srcpart_truncate@ds=2008-04-08/hr=12
+POSTHOOK: Input: default@srcpart_truncate@ds=2008-04-09/hr=12
+#### A masked pattern was here ####
+PREHOOK: query: -- truncate partitioned table
+explain TRUNCATE TABLE srcpart_truncate
+PREHOOK: type: TRUNCATETABLE
+POSTHOOK: query: -- truncate partitioned table
+explain TRUNCATE TABLE srcpart_truncate
+POSTHOOK: type: TRUNCATETABLE
+ABSTRACT SYNTAX TREE:
+  (TOK_TRUNCATETABLE (TOK_TABLE_PARTITION srcpart_truncate))
+
+STAGE DEPENDENCIES:
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-0
+      Truncate Table Operator:
+        Truncate Table or Partition
+          TableName: srcpart_truncate
+
+
+PREHOOK: query: TRUNCATE TABLE srcpart_truncate
+PREHOOK: type: TRUNCATETABLE
+PREHOOK: Output: default@srcpart_truncate@ds=2008-04-08/hr=11
+PREHOOK: Output: default@srcpart_truncate@ds=2008-04-08/hr=12
+PREHOOK: Output: default@srcpart_truncate@ds=2008-04-09/hr=11
+PREHOOK: Output: default@srcpart_truncate@ds=2008-04-09/hr=12
+POSTHOOK: query: TRUNCATE TABLE srcpart_truncate
+POSTHOOK: type: TRUNCATETABLE
+POSTHOOK: Output: default@srcpart_truncate@ds=2008-04-08/hr=11
+POSTHOOK: Output: default@srcpart_truncate@ds=2008-04-08/hr=12
+POSTHOOK: Output: default@srcpart_truncate@ds=2008-04-09/hr=11
+POSTHOOK: Output: default@srcpart_truncate@ds=2008-04-09/hr=12
+PREHOOK: query: select * from srcpart_truncate
+PREHOOK: type: QUERY
+PREHOOK: Input: default@srcpart_truncate@ds=2008-04-08/hr=11
+PREHOOK: Input: default@srcpart_truncate@ds=2008-04-08/hr=12
+PREHOOK: Input: default@srcpart_truncate@ds=2008-04-09/hr=11
+PREHOOK: Input: default@srcpart_truncate@ds=2008-04-09/hr=12
+#### A masked pattern was here ####
+POSTHOOK: query: select * from srcpart_truncate
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@srcpart_truncate@ds=2008-04-08/hr=11
+POSTHOOK: Input: default@srcpart_truncate@ds=2008-04-08/hr=12
+POSTHOOK: Input: default@srcpart_truncate@ds=2008-04-09/hr=11
+POSTHOOK: Input: default@srcpart_truncate@ds=2008-04-09/hr=12
+#### A masked pattern was here ####



Mime
View raw message