hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From ser...@apache.org
Subject svn commit: r1673969 [9/19] - in /hive/branches/llap: ./ beeline/src/java/org/apache/hive/beeline/ bin/ cli/src/java/org/apache/hadoop/hive/cli/ cli/src/test/org/apache/hadoop/hive/cli/ common/ common/src/java/org/apache/hadoop/hive/common/jsonexplain/...
Date Wed, 15 Apr 2015 22:04:07 GMT
Modified: hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/ReduceWork.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/ReduceWork.java?rev=1673969&r1=1673968&r2=1673969&view=diff
==============================================================================
--- hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/ReduceWork.java (original)
+++ hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/ReduceWork.java Wed Apr 15 22:04:00 2015
@@ -23,6 +23,7 @@ import java.util.HashMap;
 import java.util.LinkedHashSet;
 import java.util.List;
 import java.util.Map;
+import java.util.Map.Entry;
 import java.util.Set;
 
 import org.apache.commons.logging.Log;
@@ -30,6 +31,7 @@ import org.apache.commons.logging.LogFac
 import org.apache.hadoop.hive.ql.exec.FileSinkOperator;
 import org.apache.hadoop.hive.ql.exec.Operator;
 import org.apache.hadoop.hive.ql.exec.OperatorUtils;
+import org.apache.hadoop.hive.ql.plan.Explain.Level;
 import org.apache.hadoop.hive.serde2.Deserializer;
 import org.apache.hadoop.hive.serde2.SerDeUtils;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
@@ -162,7 +164,7 @@ public class ReduceWork extends BaseWork
     return null;
   }
 
-  @Explain(displayName = "Reduce Operator Tree")
+  @Explain(displayName = "Reduce Operator Tree", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
   public Operator<?> getReducer() {
     return reducer;
   }
@@ -171,7 +173,7 @@ public class ReduceWork extends BaseWork
     this.reducer = reducer;
   }
 
-  @Explain(displayName = "Needs Tagging", normalExplain = false)
+  @Explain(displayName = "Needs Tagging", explainLevels = { Level.EXTENDED })
   public boolean getNeedsTagging() {
     return needsTagging;
   }

Modified: hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/RevokeDesc.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/RevokeDesc.java?rev=1673969&r1=1673968&r2=1673969&view=diff
==============================================================================
--- hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/RevokeDesc.java (original)
+++ hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/RevokeDesc.java Wed Apr 15 22:04:00 2015
@@ -20,8 +20,10 @@ package org.apache.hadoop.hive.ql.plan;
 
 import java.io.Serializable;
 import java.util.List;
+import org.apache.hadoop.hive.ql.plan.Explain.Level;
 
-@Explain(displayName="Revoke")
+
+@Explain(displayName="Revoke", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
 public class RevokeDesc extends DDLDesc implements Serializable, Cloneable {
 
   private static final long serialVersionUID = 1L;

Modified: hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/RoleDDLDesc.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/RoleDDLDesc.java?rev=1673969&r1=1673968&r2=1673969&view=diff
==============================================================================
--- hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/RoleDDLDesc.java (original)
+++ hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/RoleDDLDesc.java Wed Apr 15 22:04:00 2015
@@ -21,8 +21,10 @@ package org.apache.hadoop.hive.ql.plan;
 import java.io.Serializable;
 
 import org.apache.hadoop.hive.metastore.api.PrincipalType;
+import org.apache.hadoop.hive.ql.plan.Explain.Level;
 
-@Explain(displayName = "Create Role")
+
+@Explain(displayName = "Create Role", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
 public class RoleDDLDesc extends DDLDesc implements Serializable {
 
   private static final long serialVersionUID = 1L;
@@ -108,12 +110,12 @@ public class RoleDDLDesc extends DDLDesc
     this.roleOwnerName = roleOwnerName;
   }
 
-  @Explain(displayName = "name")
+  @Explain(displayName = "name", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
   public String getName() {
     return name;
   }
 
-  @Explain(displayName = "role operation")
+  @Explain(displayName = "role operation", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
   public RoleOperation getOperation() {
     return operation;
   }

Modified: hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/SMBJoinDesc.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/SMBJoinDesc.java?rev=1673969&r1=1673968&r2=1673969&view=diff
==============================================================================
--- hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/SMBJoinDesc.java (original)
+++ hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/SMBJoinDesc.java Wed Apr 15 22:04:00 2015
@@ -23,8 +23,10 @@ import java.util.HashMap;
 import java.util.Map;
 
 import org.apache.hadoop.hive.ql.exec.DummyStoreOperator;
+import org.apache.hadoop.hive.ql.plan.Explain.Level;
 
-@Explain(displayName = "Sorted Merge Bucket Map Join Operator")
+
+@Explain(displayName = "Sorted Merge Bucket Map Join Operator", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
 public class SMBJoinDesc extends MapJoinDesc implements Serializable {
 
   private static final long serialVersionUID = 1L;

Modified: hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/ScriptDesc.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/ScriptDesc.java?rev=1673969&r1=1673968&r2=1673969&view=diff
==============================================================================
--- hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/ScriptDesc.java (original)
+++ hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/ScriptDesc.java Wed Apr 15 22:04:00 2015
@@ -20,12 +20,14 @@ package org.apache.hadoop.hive.ql.plan;
 
 import org.apache.hadoop.hive.ql.exec.RecordReader;
 import org.apache.hadoop.hive.ql.exec.RecordWriter;
+import org.apache.hadoop.hive.ql.plan.Explain.Level;
+
 
 /**
  * ScriptDesc.
  *
  */
-@Explain(displayName = "Transform Operator")
+@Explain(displayName = "Transform Operator", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
 public class ScriptDesc extends AbstractOperatorDesc {
   private static final long serialVersionUID = 1L;
   private String scriptCmd;
@@ -59,7 +61,7 @@ public class ScriptDesc extends Abstract
     this.scriptErrInfo = scriptErrInfo;
   }
 
-  @Explain(displayName = "command")
+  @Explain(displayName = "command", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
   public String getScriptCmd() {
     return scriptCmd;
   }
@@ -68,7 +70,7 @@ public class ScriptDesc extends Abstract
     this.scriptCmd = scriptCmd;
   }
 
-  @Explain(displayName = "output info")
+  @Explain(displayName = "output info", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
   public TableDesc getScriptOutputInfo() {
     return scriptOutputInfo;
   }

Modified: hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/SelectDesc.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/SelectDesc.java?rev=1673969&r1=1673968&r2=1673969&view=diff
==============================================================================
--- hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/SelectDesc.java (original)
+++ hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/SelectDesc.java Wed Apr 15 22:04:00 2015
@@ -20,13 +20,14 @@ package org.apache.hadoop.hive.ql.plan;
 
 import java.util.ArrayList;
 import java.util.List;
+import org.apache.hadoop.hive.ql.plan.Explain.Level;
 
 
 /**
  * SelectDesc.
  *
  */
-@Explain(displayName = "Select Operator")
+@Explain(displayName = "Select Operator", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
 public class SelectDesc extends AbstractOperatorDesc {
   private static final long serialVersionUID = 1L;
   private List<org.apache.hadoop.hive.ql.plan.ExprNodeDesc> colList;
@@ -81,7 +82,7 @@ public class SelectDesc extends Abstract
     this.colList = colList;
   }
 
-  @Explain(displayName = "outputColumnNames")
+  @Explain(displayName = "outputColumnNames", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
   public List<java.lang.String> getOutputColumnNames() {
     return outputColumnNames;
   }

Modified: hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowColumnsDesc.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowColumnsDesc.java?rev=1673969&r1=1673968&r2=1673969&view=diff
==============================================================================
--- hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowColumnsDesc.java (original)
+++ hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowColumnsDesc.java Wed Apr 15 22:04:00 2015
@@ -20,6 +20,7 @@ package org.apache.hadoop.hive.ql.plan;
 import java.io.Serializable;
 
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.ql.plan.Explain.Level;
 
 public class ShowColumnsDesc extends DDLDesc implements Serializable {
   private static final long serialVersionUID = 1L;
@@ -64,7 +65,7 @@ public class ShowColumnsDesc extends DDL
   /**
    * @return the tableName
    */
-  @Explain(displayName = "table name")
+  @Explain(displayName = "table name", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
   public String getTableName() {
     return tableName;
   }
@@ -80,7 +81,7 @@ public class ShowColumnsDesc extends DDL
   /**
    * @return the resFile
    */
-  @Explain(displayName = "result file", normalExplain = false)
+  @Explain(displayName = "result file", explainLevels = { Level.EXTENDED })
   public String getResFile() {
     return resFile;
   }

Modified: hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowConfDesc.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowConfDesc.java?rev=1673969&r1=1673968&r2=1673969&view=diff
==============================================================================
--- hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowConfDesc.java (original)
+++ hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowConfDesc.java Wed Apr 15 22:04:00 2015
@@ -18,6 +18,7 @@
 package org.apache.hadoop.hive.ql.plan;
 
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.ql.plan.Explain.Level;
 
 import java.io.Serializable;
 
@@ -41,7 +42,7 @@ public class ShowConfDesc extends DDLDes
     this.confName = confName;
   }
 
-  @Explain(displayName = "result file", normalExplain = false)
+  @Explain(displayName = "result file", explainLevels = { Level.EXTENDED })
   public Path getResFile() {
     return resFile;
   }
@@ -50,7 +51,7 @@ public class ShowConfDesc extends DDLDes
     this.resFile = resFile;
   }
 
-  @Explain(displayName = "conf name", normalExplain = false)
+  @Explain(displayName = "conf name", explainLevels = { Level.EXTENDED })
   public String getConfName() {
     return confName;
   }

Modified: hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowCreateTableDesc.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowCreateTableDesc.java?rev=1673969&r1=1673968&r2=1673969&view=diff
==============================================================================
--- hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowCreateTableDesc.java (original)
+++ hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowCreateTableDesc.java Wed Apr 15 22:04:00 2015
@@ -19,12 +19,14 @@
 package org.apache.hadoop.hive.ql.plan;
 
 import java.io.Serializable;
+import org.apache.hadoop.hive.ql.plan.Explain.Level;
+
 
 /**
  * ShowCreateTableDesc.
  *
  */
-@Explain(displayName = "Show Create Table")
+@Explain(displayName = "Show Create Table", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
 public class ShowCreateTableDesc extends DDLDesc implements Serializable {
   private static final long serialVersionUID = 1L;
   String resFile;
@@ -66,7 +68,7 @@ public class ShowCreateTableDesc extends
   /**
    * @return the resFile
    */
-  @Explain(displayName = "result file", normalExplain = false)
+  @Explain(displayName = "result file", explainLevels = { Level.EXTENDED })
   public String getResFile() {
     return resFile;
   }
@@ -82,7 +84,7 @@ public class ShowCreateTableDesc extends
   /**
    * @return the tableName
    */
-  @Explain(displayName = "table name")
+  @Explain(displayName = "table name", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
   public String getTableName() {
     return tableName;
   }

Modified: hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowDatabasesDesc.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowDatabasesDesc.java?rev=1673969&r1=1673968&r2=1673969&view=diff
==============================================================================
--- hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowDatabasesDesc.java (original)
+++ hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowDatabasesDesc.java Wed Apr 15 22:04:00 2015
@@ -21,12 +21,14 @@ package org.apache.hadoop.hive.ql.plan;
 import java.io.Serializable;
 
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.ql.plan.Explain.Level;
+
 
 /**
  * ShowDatabasesDesc.
  *
  */
-@Explain(displayName = "Show Databases")
+@Explain(displayName = "Show Databases", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
 public class ShowDatabasesDesc extends DDLDesc implements Serializable {
   private static final long serialVersionUID = 1L;
   String pattern;
@@ -89,7 +91,7 @@ public class ShowDatabasesDesc extends D
   /**
    * @return the resFile
    */
-  @Explain(displayName = "result file", normalExplain = false)
+  @Explain(displayName = "result file", explainLevels = { Level.EXTENDED })
   public String getResFile() {
     return resFile;
   }

Modified: hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowFunctionsDesc.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowFunctionsDesc.java?rev=1673969&r1=1673968&r2=1673969&view=diff
==============================================================================
--- hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowFunctionsDesc.java (original)
+++ hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowFunctionsDesc.java Wed Apr 15 22:04:00 2015
@@ -21,12 +21,14 @@ package org.apache.hadoop.hive.ql.plan;
 import java.io.Serializable;
 
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.ql.plan.Explain.Level;
+
 
 /**
  * ShowFunctionsDesc.
  *
  */
-@Explain(displayName = "Show Functions")
+@Explain(displayName = "Show Functions", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
 public class ShowFunctionsDesc extends DDLDesc implements Serializable {
   private static final long serialVersionUID = 1L;
   String pattern;
@@ -103,7 +105,7 @@ public class ShowFunctionsDesc extends D
   /**
    * @return the resFile
    */
-  @Explain(displayName = "result file", normalExplain = false)
+  @Explain(displayName = "result file", explainLevels = { Level.EXTENDED })
   public String getResFile() {
     return resFile;
   }

Modified: hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowGrantDesc.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowGrantDesc.java?rev=1673969&r1=1673968&r2=1673969&view=diff
==============================================================================
--- hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowGrantDesc.java (original)
+++ hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowGrantDesc.java Wed Apr 15 22:04:00 2015
@@ -16,8 +16,10 @@
  * limitations under the License.
  */
 package org.apache.hadoop.hive.ql.plan;
+import org.apache.hadoop.hive.ql.plan.Explain.Level;
 
-@Explain(displayName="show grant desc")
+
+@Explain(displayName="show grant desc", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
 public class ShowGrantDesc {
   
   private PrincipalDesc principalDesc;
@@ -48,7 +50,7 @@ public class ShowGrantDesc {
     return tabularSchema;
   }
 
-  @Explain(displayName="principal desc")
+  @Explain(displayName="principal desc", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
   public PrincipalDesc getPrincipalDesc() {
     return principalDesc;
   }
@@ -57,7 +59,7 @@ public class ShowGrantDesc {
     this.principalDesc = principalDesc;
   }
 
-  @Explain(displayName="object")
+  @Explain(displayName="object", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
   public PrivilegeObjectDesc getHiveObj() {
     return hiveObj;
   }

Modified: hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowIndexesDesc.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowIndexesDesc.java?rev=1673969&r1=1673968&r2=1673969&view=diff
==============================================================================
--- hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowIndexesDesc.java (original)
+++ hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowIndexesDesc.java Wed Apr 15 22:04:00 2015
@@ -21,12 +21,14 @@ package org.apache.hadoop.hive.ql.plan;
 import java.io.Serializable;
 
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.ql.plan.Explain.Level;
+
 
 /**
  * ShowIndexesDesc.
  * Returns table index information per SQL syntax.
  */
-@Explain(displayName = "Show Indexes")
+@Explain(displayName = "Show Indexes", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
 public class ShowIndexesDesc extends DDLDesc implements Serializable {
   private static final long serialVersionUID = 1L;
   String tableName;

Modified: hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowLocksDesc.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowLocksDesc.java?rev=1673969&r1=1673968&r2=1673969&view=diff
==============================================================================
--- hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowLocksDesc.java (original)
+++ hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowLocksDesc.java Wed Apr 15 22:04:00 2015
@@ -23,12 +23,14 @@ import java.util.HashMap;
 
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.ql.lockmgr.DbTxnManager;
+import org.apache.hadoop.hive.ql.plan.Explain.Level;
+
 
 /**
  * ShowLocksDesc.
  *
  */
-@Explain(displayName = "Show Locks")
+@Explain(displayName = "Show Locks", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
 public class ShowLocksDesc extends DDLDesc implements Serializable {
   private static final long serialVersionUID = 1L;
   String resFile;
@@ -105,7 +107,7 @@ public class ShowLocksDesc extends DDLDe
   /**
    * @return the tableName
    */
-  @Explain(displayName = "table")
+  @Explain(displayName = "table", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
   public String getTableName() {
     return tableName;
   }
@@ -121,7 +123,7 @@ public class ShowLocksDesc extends DDLDe
   /**
    * @return the partSpec
    */
-  @Explain(displayName = "partition")
+  @Explain(displayName = "partition", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
   public HashMap<String, String> getPartSpec() {
     return partSpec;
   }
@@ -137,7 +139,7 @@ public class ShowLocksDesc extends DDLDe
   /**
    * @return the resFile
    */
-  @Explain(displayName = "result file", normalExplain = false)
+  @Explain(displayName = "result file", explainLevels = { Level.EXTENDED })
   public String getResFile() {
     return resFile;
   }

Modified: hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowPartitionsDesc.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowPartitionsDesc.java?rev=1673969&r1=1673968&r2=1673969&view=diff
==============================================================================
--- hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowPartitionsDesc.java (original)
+++ hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowPartitionsDesc.java Wed Apr 15 22:04:00 2015
@@ -22,12 +22,14 @@ import java.io.Serializable;
 import java.util.Map;
 
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.ql.plan.Explain.Level;
+
 
 /**
  * ShowPartitionsDesc.
  *
  */
-@Explain(displayName = "Show Partitions")
+@Explain(displayName = "Show Partitions", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
 public class ShowPartitionsDesc extends DDLDesc implements Serializable {
   private static final long serialVersionUID = 1L;
   String tabName;
@@ -71,7 +73,7 @@ public class ShowPartitionsDesc extends
   /**
    * @return the name of the table.
    */
-  @Explain(displayName = "table")
+  @Explain(displayName = "table", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
   public String getTabName() {
     return tabName;
   }
@@ -87,7 +89,7 @@ public class ShowPartitionsDesc extends
   /**
    * @return the name of the table.
    */
-  @Explain(displayName = "partSpec")
+  @Explain(displayName = "partSpec", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
   public Map<String, String> getPartSpec() {
     return partSpec;
   }
@@ -102,7 +104,7 @@ public class ShowPartitionsDesc extends
   /**
    * @return the results file
    */
-  @Explain(displayName = "result file", normalExplain = false)
+  @Explain(displayName = "result file", explainLevels = { Level.EXTENDED })
   public String getResFile() {
     return resFile;
   }

Modified: hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowTableStatusDesc.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowTableStatusDesc.java?rev=1673969&r1=1673968&r2=1673969&view=diff
==============================================================================
--- hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowTableStatusDesc.java (original)
+++ hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowTableStatusDesc.java Wed Apr 15 22:04:00 2015
@@ -22,12 +22,14 @@ import java.io.Serializable;
 import java.util.HashMap;
 
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.ql.plan.Explain.Level;
+
 
 /**
  * ShowTableStatusDesc.
  *
  */
-@Explain(displayName = "Show Table Status")
+@Explain(displayName = "Show Table Status", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
 public class ShowTableStatusDesc extends DDLDesc implements Serializable {
   private static final long serialVersionUID = 1L;
   String pattern;
@@ -108,7 +110,7 @@ public class ShowTableStatusDesc extends
     return resFile;
   }
 
-  @Explain(displayName = "result file", normalExplain = false)
+  @Explain(displayName = "result file", explainLevels = { Level.EXTENDED })
   public String getResFileString() {
     return getResFile();
   }
@@ -124,7 +126,7 @@ public class ShowTableStatusDesc extends
   /**
    * @return the database name
    */
-  @Explain(displayName = "database")
+  @Explain(displayName = "database", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
   public String getDbName() {
     return dbName;
   }
@@ -140,7 +142,7 @@ public class ShowTableStatusDesc extends
   /**
    * @return the partSpec
    */
-  @Explain(displayName = "partition")
+  @Explain(displayName = "partition", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
   public HashMap<String, String> getPartSpec() {
     return partSpec;
   }

Modified: hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowTablesDesc.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowTablesDesc.java?rev=1673969&r1=1673968&r2=1673969&view=diff
==============================================================================
--- hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowTablesDesc.java (original)
+++ hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowTablesDesc.java Wed Apr 15 22:04:00 2015
@@ -21,12 +21,14 @@ package org.apache.hadoop.hive.ql.plan;
 import java.io.Serializable;
 
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.ql.plan.Explain.Level;
+
 
 /**
  * ShowTablesDesc.
  *
  */
-@Explain(displayName = "Show Tables")
+@Explain(displayName = "Show Tables", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
 public class ShowTablesDesc extends DDLDesc implements Serializable {
   private static final long serialVersionUID = 1L;
   String pattern;
@@ -98,7 +100,7 @@ public class ShowTablesDesc extends DDLD
   /**
    * @return the resFile
    */
-  @Explain(displayName = "result file", normalExplain = false)
+  @Explain(displayName = "result file", explainLevels = { Level.EXTENDED })
   public String getResFile() {
     return resFile;
   }
@@ -114,7 +116,7 @@ public class ShowTablesDesc extends DDLD
   /**
    * @return the dbName
    */
-  @Explain(displayName = "database name")
+  @Explain(displayName = "database name", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
   public String getDbName() {
     return dbName;
   }

Modified: hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowTblPropertiesDesc.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowTblPropertiesDesc.java?rev=1673969&r1=1673968&r2=1673969&view=diff
==============================================================================
--- hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowTblPropertiesDesc.java (original)
+++ hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowTblPropertiesDesc.java Wed Apr 15 22:04:00 2015
@@ -22,12 +22,14 @@ import java.io.Serializable;
 import java.util.HashMap;
 
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.ql.plan.Explain.Level;
+
 
 /**
  * ShowTblPropertiesDesc.
  *
  */
-@Explain(displayName = "Show Table Properties")
+@Explain(displayName = "Show Table Properties", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
 public class ShowTblPropertiesDesc extends DDLDesc implements Serializable {
   private static final long serialVersionUID = 1L;
   String resFile;
@@ -77,7 +79,7 @@ public class ShowTblPropertiesDesc exten
     return resFile;
   }
 
-  @Explain(displayName = "result file", normalExplain = false)
+  @Explain(displayName = "result file", explainLevels = { Level.EXTENDED })
   public String getResFileString() {
     return getResFile();
   }
@@ -93,7 +95,7 @@ public class ShowTblPropertiesDesc exten
   /**
    * @return the tableName
    */
-  @Explain(displayName = "table name")
+  @Explain(displayName = "table name", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
   public String getTableName() {
     return tableName;
   }

Modified: hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/Statistics.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/Statistics.java?rev=1673969&r1=1673968&r2=1673969&view=diff
==============================================================================
--- hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/Statistics.java (original)
+++ hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/Statistics.java Wed Apr 15 22:04:00 2015
@@ -22,6 +22,7 @@ import java.io.Serializable;
 import java.util.List;
 import java.util.Map;
 
+import org.apache.hadoop.hive.ql.plan.Explain.Level;
 import org.apache.hadoop.hive.ql.stats.StatsUtils;
 
 import com.google.common.collect.Lists;
@@ -101,7 +102,7 @@ public class Statistics implements Seria
   }
 
   @Override
-  @Explain(displayName = "Statistics")
+  @Explain(displayName = "Statistics", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
   public String toString() {
     StringBuilder sb = new StringBuilder();
     sb.append("Num rows: ");

Modified: hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/StatsNoJobWork.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/StatsNoJobWork.java?rev=1673969&r1=1673968&r2=1673969&view=diff
==============================================================================
--- hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/StatsNoJobWork.java (original)
+++ hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/StatsNoJobWork.java Wed Apr 15 22:04:00 2015
@@ -22,12 +22,14 @@ import java.io.Serializable;
 
 import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer.TableSpec;
 import org.apache.hadoop.hive.ql.parse.PrunedPartitionList;
+import org.apache.hadoop.hive.ql.plan.Explain.Level;
+
 
 
 /**
  * Client-side stats aggregator task.
  */
-@Explain(displayName = "Stats-Aggr Operator")
+@Explain(displayName = "Stats-Aggr Operator", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
 public class StatsNoJobWork implements Serializable {
   private static final long serialVersionUID = 1L;
 

Modified: hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/StatsWork.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/StatsWork.java?rev=1673969&r1=1673968&r2=1673969&view=diff
==============================================================================
--- hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/StatsWork.java (original)
+++ hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/StatsWork.java Wed Apr 15 22:04:00 2015
@@ -22,12 +22,14 @@ import java.io.Serializable;
 
 import org.apache.hadoop.hive.ql.exec.Task;
 import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer.TableSpec;
+import org.apache.hadoop.hive.ql.plan.Explain.Level;
+
 
 /**
  * ConditionalStats.
  *
  */
-@Explain(displayName = "Stats-Aggr Operator")
+@Explain(displayName = "Stats-Aggr Operator", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
 public class StatsWork implements Serializable {
   private static final long serialVersionUID = 1L;
 
@@ -90,7 +92,7 @@ public class StatsWork implements Serial
     aggKey = aggK;
   }
 
-  @Explain(displayName = "Stats Aggregation Key Prefix", normalExplain = false)
+  @Explain(displayName = "Stats Aggregation Key Prefix", explainLevels = { Level.EXTENDED })
   public String getAggKey() {
     return aggKey;
   }

Modified: hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/SwitchDatabaseDesc.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/SwitchDatabaseDesc.java?rev=1673969&r1=1673968&r2=1673969&view=diff
==============================================================================
--- hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/SwitchDatabaseDesc.java (original)
+++ hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/SwitchDatabaseDesc.java Wed Apr 15 22:04:00 2015
@@ -19,12 +19,14 @@
 package org.apache.hadoop.hive.ql.plan;
 
 import java.io.Serializable;
+import org.apache.hadoop.hive.ql.plan.Explain.Level;
+
 
 /**
  * SwitchDatabaseDesc.
  *
  */
-@Explain(displayName = "Switch Database")
+@Explain(displayName = "Switch Database", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
 public class SwitchDatabaseDesc extends DDLDesc implements Serializable {
 
   private static final long serialVersionUID = 1L;
@@ -39,7 +41,7 @@ public class SwitchDatabaseDesc extends
     this.databaseName = databaseName;
   }
 
-  @Explain(displayName = "name")
+  @Explain(displayName = "name", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
   public String getDatabaseName() {
     return databaseName;
   }

Modified: hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/TableDesc.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/TableDesc.java?rev=1673969&r1=1673968&r2=1673969&view=diff
==============================================================================
--- hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/TableDesc.java (original)
+++ hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/TableDesc.java Wed Apr 15 22:04:00 2015
@@ -28,6 +28,7 @@ import org.apache.hadoop.conf.Configurat
 import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants;
 import org.apache.hadoop.hive.ql.exec.Utilities;
 import org.apache.hadoop.hive.ql.io.HiveFileFormatUtils;
+import org.apache.hadoop.hive.ql.plan.Explain.Level;
 import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.Deserializer;
 import org.apache.hadoop.hive.serde2.SerDeUtils;
@@ -118,7 +119,7 @@ public class TableDesc implements Serial
     return properties;
   }
 
-  @Explain(displayName = "properties", normalExplain = false)
+  @Explain(displayName = "properties", explainLevels = { Level.EXTENDED })
   public Map getPropertiesExplain() {
     return HiveStringUtils.getPropertiesExplain(getProperties());
   }
@@ -131,7 +132,7 @@ public class TableDesc implements Serial
     this.jobProperties = jobProperties;
   }
 
-  @Explain(displayName = "jobProperties", normalExplain = false)
+  @Explain(displayName = "jobProperties", explainLevels = { Level.EXTENDED })
   public Map<String, String> getJobProperties() {
     return jobProperties;
   }
@@ -139,23 +140,23 @@ public class TableDesc implements Serial
   /**
    * @return the serdeClassName
    */
-  @Explain(displayName = "serde")
+  @Explain(displayName = "serde", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
   public String getSerdeClassName() {
     return properties.getProperty(serdeConstants.SERIALIZATION_LIB);
   }
 
-  @Explain(displayName = "name")
+  @Explain(displayName = "name", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
   public String getTableName() {
     return properties
         .getProperty(hive_metastoreConstants.META_TABLE_NAME);
   }
 
-  @Explain(displayName = "input format")
+  @Explain(displayName = "input format", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
   public String getInputFileFormatClassName() {
     return getInputFileFormatClass().getName();
   }
 
-  @Explain(displayName = "output format")
+  @Explain(displayName = "output format", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
   public String getOutputFileFormatClassName() {
     return getOutputFileFormatClass().getName();
   }

Modified: hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/TableScanDesc.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/TableScanDesc.java?rev=1673969&r1=1673968&r2=1673969&view=diff
==============================================================================
--- hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/TableScanDesc.java (original)
+++ hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/TableScanDesc.java Wed Apr 15 22:04:00 2015
@@ -27,13 +27,15 @@ import org.apache.hadoop.hive.ql.exec.PT
 import org.apache.hadoop.hive.ql.metadata.Table;
 import org.apache.hadoop.hive.ql.metadata.VirtualColumn;
 import org.apache.hadoop.hive.ql.parse.TableSample;
+import org.apache.hadoop.hive.ql.plan.Explain.Level;
+
 
 /**
  * Table Scan Descriptor Currently, data is only read from a base source as part
  * of map-reduce framework. So, nothing is stored in the descriptor. But, more
  * things will be added here as table scan is invoked as part of local work.
  **/
-@Explain(displayName = "TableScan")
+@Explain(displayName = "TableScan", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
 public class TableScanDesc extends AbstractOperatorDesc {
   private static final long serialVersionUID = 1L;
 
@@ -125,7 +127,7 @@ public class TableScanDesc extends Abstr
     return new TableScanDesc(getAlias(), vcs, this.tableMetadata);
   }
 
-  @Explain(displayName = "alias")
+  @Explain(displayName = "alias", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
   public String getAlias() {
     return alias;
   }
@@ -193,7 +195,7 @@ public class TableScanDesc extends Abstr
     this.gatherStats = gatherStats;
   }
 
-  @Explain(displayName = "GatherStats", normalExplain = false)
+  @Explain(displayName = "GatherStats", explainLevels = { Level.EXTENDED })
   public boolean isGatherStats() {
     return gatherStats;
   }
@@ -218,7 +220,7 @@ public class TableScanDesc extends Abstr
     statsAggKeyPrefix = k;
   }
 
-  @Explain(displayName = "Statistics Aggregation Key Prefix", normalExplain = false)
+  @Explain(displayName = "Statistics Aggregation Key Prefix", explainLevels = { Level.EXTENDED })
   public String getStatsAggPrefix() {
     return statsAggKeyPrefix;
   }

Modified: hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/TezWork.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/TezWork.java?rev=1673969&r1=1673968&r2=1673969&view=diff
==============================================================================
--- hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/TezWork.java (original)
+++ hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/TezWork.java Wed Apr 15 22:04:00 2015
@@ -36,6 +36,8 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hive.ql.plan.TezEdgeProperty.EdgeType;
 import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.hive.ql.plan.Explain.Level;
+
 
 /**
  * TezWork. This class encapsulates all the work objects that can be executed
@@ -44,7 +46,7 @@ import org.apache.hadoop.mapred.JobConf;
  *
  */
 @SuppressWarnings("serial")
-@Explain(displayName = "Tez")
+@Explain(displayName = "Tez", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
 public class TezWork extends AbstractOperatorDesc {
 
   public enum VertexType {
@@ -87,7 +89,7 @@ public class TezWork extends AbstractOpe
   /**
    * getWorkMap returns a map of "vertex name" to BaseWork
    */
-  @Explain(displayName = "Vertices")
+  @Explain(displayName = "Vertices", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
   public Map<String, BaseWork> getWorkMap() {
     Map<String, BaseWork> result = new LinkedHashMap<String, BaseWork>();
     for (BaseWork w: getAllWork()) {
@@ -286,7 +288,7 @@ public class TezWork extends AbstractOpe
     }
   }
 
-  @Explain(displayName = "Edges")
+  @Explain(displayName = "Edges", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
   public Map<String, List<Dependency>> getDependencyMap() {
     Map<String, List<Dependency>> result = new LinkedHashMap<String, List<Dependency>>();
     for (Map.Entry<BaseWork, List<BaseWork>> entry: invertedWorkGraph.entrySet()) {

Modified: hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/TruncateTableDesc.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/TruncateTableDesc.java?rev=1673969&r1=1673968&r2=1673969&view=diff
==============================================================================
--- hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/TruncateTableDesc.java (original)
+++ hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/TruncateTableDesc.java Wed Apr 15 22:04:00 2015
@@ -22,11 +22,13 @@ import java.util.List;
 import java.util.Map;
 
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.ql.plan.Explain.Level;
+
 
 /**
  * Truncates managed table or partition
  */
-@Explain(displayName = "Truncate Table or Partition")
+@Explain(displayName = "Truncate Table or Partition", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
 public class TruncateTableDesc extends DDLDesc {
 
   private static final long serialVersionUID = 1L;
@@ -46,7 +48,7 @@ public class TruncateTableDesc extends D
     this.partSpec = partSpec;
   }
 
-  @Explain(displayName = "TableName")
+  @Explain(displayName = "TableName", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
   public String getTableName() {
     return tableName;
   }
@@ -55,7 +57,7 @@ public class TruncateTableDesc extends D
     this.tableName = tableName;
   }
 
-  @Explain(displayName = "Partition Spec")
+  @Explain(displayName = "Partition Spec", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
   public Map<String, String> getPartSpec() {
     return partSpec;
   }

Modified: hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/UDTFDesc.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/UDTFDesc.java?rev=1673969&r1=1673968&r2=1673969&view=diff
==============================================================================
--- hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/UDTFDesc.java (original)
+++ hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/UDTFDesc.java Wed Apr 15 22:04:00 2015
@@ -19,6 +19,8 @@
 package org.apache.hadoop.hive.ql.plan;
 
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDTF;
+import org.apache.hadoop.hive.ql.plan.Explain.Level;
+
 
 /**
  * All member variables should have a setters and getters of the form get<member
@@ -26,7 +28,7 @@ import org.apache.hadoop.hive.ql.udf.gen
  * time.
  *
  */
-@Explain(displayName = "UDTF Operator")
+@Explain(displayName = "UDTF Operator", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
 public class UDTFDesc extends AbstractOperatorDesc {
   private static final long serialVersionUID = 1L;
 
@@ -49,7 +51,7 @@ public class UDTFDesc extends AbstractOp
     this.genericUDTF = genericUDTF;
   }
 
-  @Explain(displayName = "function name")
+  @Explain(displayName = "function name", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
   public String getUDTFName() {
     return genericUDTF.toString();
   }

Modified: hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/UnionDesc.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/UnionDesc.java?rev=1673969&r1=1673968&r2=1673969&view=diff
==============================================================================
--- hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/UnionDesc.java (original)
+++ hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/UnionDesc.java Wed Apr 15 22:04:00 2015
@@ -17,13 +17,15 @@
  */
 
 package org.apache.hadoop.hive.ql.plan;
+import org.apache.hadoop.hive.ql.plan.Explain.Level;
+
 
 
 /**
  * unionDesc is a empty class currently. However, union has more than one input
  * (as compared with forward), and therefore, we need a separate class.
  **/
-@Explain(displayName = "Union")
+@Explain(displayName = "Union", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
 public class UnionDesc extends AbstractOperatorDesc {
   private static final long serialVersionUID = 1L;
   private transient int numInputs;

Modified: hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/UnionWork.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/UnionWork.java?rev=1673969&r1=1673968&r2=1673969&view=diff
==============================================================================
--- hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/UnionWork.java (original)
+++ hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/UnionWork.java Wed Apr 15 22:04:00 2015
@@ -25,7 +25,9 @@ import java.util.List;
 import java.util.Map;
 import java.util.Set;
 import java.util.HashSet;
+
 import org.apache.hadoop.hive.ql.plan.BaseWork;
+import org.apache.hadoop.hive.ql.plan.Explain.Level;
 import org.apache.hadoop.hive.ql.exec.Operator;
 import org.apache.hadoop.hive.ql.exec.UnionOperator;
 import org.apache.hadoop.mapred.JobConf;
@@ -47,8 +49,8 @@ public class UnionWork extends BaseWork
     super(name);
   }
 
-  @Explain(displayName = "Vertex")
   @Override
+  @Explain(displayName = "Vertex", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
   public String getName() {
     return super.getName();
   }

Modified: hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/UnlockDatabaseDesc.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/UnlockDatabaseDesc.java?rev=1673969&r1=1673968&r2=1673969&view=diff
==============================================================================
--- hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/UnlockDatabaseDesc.java (original)
+++ hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/UnlockDatabaseDesc.java Wed Apr 15 22:04:00 2015
@@ -19,12 +19,14 @@
 package org.apache.hadoop.hive.ql.plan;
 
 import java.io.Serializable;
+import org.apache.hadoop.hive.ql.plan.Explain.Level;
+
 
 /**
  * UnlockDatabaseDesc.
  *
  */
-@Explain(displayName = "Unlock Database")
+@Explain(displayName = "Unlock Database", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
 public class UnlockDatabaseDesc extends DDLDesc implements Serializable {
   private static final long serialVersionUID = 1L;
 
@@ -34,7 +36,7 @@ public class UnlockDatabaseDesc extends
     this.databaseName = databaseName;
   }
 
-  @Explain(displayName = "database")
+  @Explain(displayName = "database", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
   public String getDatabaseName() {
     return databaseName;
   }

Modified: hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/UnlockTableDesc.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/UnlockTableDesc.java?rev=1673969&r1=1673968&r2=1673969&view=diff
==============================================================================
--- hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/UnlockTableDesc.java (original)
+++ hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/UnlockTableDesc.java Wed Apr 15 22:04:00 2015
@@ -22,12 +22,14 @@ import java.io.Serializable;
 import java.util.Map;
 
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.ql.plan.Explain.Level;
+
 
 /**
  * UnlockTableDesc.
  *
  */
-@Explain(displayName = "Unlock Table")
+@Explain(displayName = "Unlock Table", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
 public class UnlockTableDesc extends DDLDesc implements Serializable {
   private static final long serialVersionUID = 1L;
 

Modified: hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/ptf/PTFExpressionDef.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/ptf/PTFExpressionDef.java?rev=1673969&r1=1673968&r2=1673969&view=diff
==============================================================================
--- hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/ptf/PTFExpressionDef.java (original)
+++ hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/ptf/PTFExpressionDef.java Wed Apr 15 22:04:00 2015
@@ -22,6 +22,7 @@ import org.apache.hadoop.hive.ql.exec.Ex
 import org.apache.hadoop.hive.ql.exec.PTFUtils;
 import org.apache.hadoop.hive.ql.plan.Explain;
 import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
+import org.apache.hadoop.hive.ql.plan.Explain.Level;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 
 public class PTFExpressionDef {
@@ -59,7 +60,7 @@ public class PTFExpressionDef {
     this.exprNode = exprNode;
   }
 
-  @Explain(displayName = "expr")
+  @Explain(displayName = "expr", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
   public String getExprNodeExplain() {
     return exprNode == null ? null : exprNode.getExprString();
   }

Modified: hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/ptf/PTFQueryInputDef.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/ptf/PTFQueryInputDef.java?rev=1673969&r1=1673968&r2=1673969&view=diff
==============================================================================
--- hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/ptf/PTFQueryInputDef.java (original)
+++ hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/ptf/PTFQueryInputDef.java Wed Apr 15 22:04:00 2015
@@ -20,13 +20,14 @@ package org.apache.hadoop.hive.ql.plan.p
 
 import org.apache.hadoop.hive.ql.parse.PTFInvocationSpec.PTFQueryInputType;
 import org.apache.hadoop.hive.ql.plan.Explain;
+import org.apache.hadoop.hive.ql.plan.Explain.Level;
 
-@Explain(displayName = "Input definition")
+@Explain(displayName = "Input definition", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
 public class PTFQueryInputDef extends PTFInputDef {
   private String destination;
   private PTFQueryInputType type;
 
-  @Explain(displayName = "destination")
+  @Explain(displayName = "destination", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
   public String getDestination() {
     return destination;
   }
@@ -43,7 +44,7 @@ public class PTFQueryInputDef extends PT
     this.type = type;
   }
 
-  @Explain(displayName = "type")
+  @Explain(displayName = "type", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
   public String getTypeExplain() {
     return type.name();
   }

Modified: hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/ptf/PartitionedTableFunctionDef.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/ptf/PartitionedTableFunctionDef.java?rev=1673969&r1=1673968&r2=1673969&view=diff
==============================================================================
--- hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/ptf/PartitionedTableFunctionDef.java (original)
+++ hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/plan/ptf/PartitionedTableFunctionDef.java Wed Apr 15 22:04:00 2015
@@ -23,9 +23,10 @@ import java.util.List;
 
 import org.apache.hadoop.hive.ql.parse.PTFInvocationSpec;
 import org.apache.hadoop.hive.ql.plan.Explain;
+import org.apache.hadoop.hive.ql.plan.Explain.Level;
 import org.apache.hadoop.hive.ql.udf.ptf.TableFunctionEvaluator;
 
-@Explain(displayName = "Partition table definition")
+@Explain(displayName = "Partition table definition", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
 public class PartitionedTableFunctionDef extends PTFInputDef {
   private String name;
   private String resolverClassName;
@@ -40,7 +41,7 @@ public class PartitionedTableFunctionDef
   
   private transient List<String> referencedColumns;
 
-  @Explain(displayName = "name")
+  @Explain(displayName = "name", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
   public String getName() {
     return name;
   }
@@ -83,7 +84,7 @@ public class PartitionedTableFunctionDef
     return partition;
   }
 
-  @Explain(displayName = "partition by")
+  @Explain(displayName = "partition by", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
   public String getPartitionExplain() {
     if (partition == null || partition.getExpressions() == null) {
       return null;
@@ -110,7 +111,7 @@ public class PartitionedTableFunctionDef
     this.order = order;
   }
 
-  @Explain(displayName = "order by")
+  @Explain(displayName = "order by", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
   public String getOrderExplain() {
     if (order == null || order.getExpressions() == null) {
       return null;
@@ -144,7 +145,7 @@ public class PartitionedTableFunctionDef
     this.args = args;
   }
 
-  @Explain(displayName = "arguments")
+  @Explain(displayName = "arguments", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
   public String getArgsExplain() {
     if (args == null) {
       return null;
@@ -188,7 +189,7 @@ public class PartitionedTableFunctionDef
     this.resolverClassName = resolverClassName;
   }
 
-  @Explain(displayName = "referenced columns")
+  @Explain(displayName = "referenced columns", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
   public List<String> getReferencedColumns() {
     return referencedColumns;
   }

Modified: hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/session/DependencyResolver.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/session/DependencyResolver.java?rev=1673969&r1=1673968&r2=1673969&view=diff
==============================================================================
--- hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/session/DependencyResolver.java (original)
+++ hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/session/DependencyResolver.java Wed Apr 15 22:04:00 2015
@@ -20,6 +20,7 @@ package org.apache.hadoop.hive.ql.sessio
 
 import java.net.URI;
 import java.net.URISyntaxException;
+import java.net.URL;
 import java.util.Arrays;
 import java.util.HashMap;
 import java.util.LinkedList;
@@ -58,8 +59,11 @@ public class DependencyResolver {
 
     // If HIVE_HOME is not defined or file is not found in HIVE_HOME/conf then load default ivysettings.xml from class loader
     if (ivysettingsPath == null || !(new File(ivysettingsPath).exists())) {
-      ivysettingsPath = ClassLoader.getSystemResource("ivysettings.xml").getFile();
-      _console.printInfo("ivysettings.xml file not found in HIVE_HOME or HIVE_CONF_DIR," + ivysettingsPath + " will be used");
+      URL ivysetttingsResource = ClassLoader.getSystemResource("ivysettings.xml");
+      if (ivysetttingsResource != null){
+        ivysettingsPath = ivysetttingsResource.getFile();
+        _console.printInfo("ivysettings.xml file not found in HIVE_HOME or HIVE_CONF_DIR," + ivysettingsPath + " will be used");
+      }
     }
 
   }

Modified: hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/session/OperationLog.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/session/OperationLog.java?rev=1673969&r1=1673968&r2=1673969&view=diff
==============================================================================
--- hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/session/OperationLog.java (original)
+++ hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/session/OperationLog.java Wed Apr 15 22:04:00 2015
@@ -20,6 +20,7 @@ package org.apache.hadoop.hive.ql.sessio
 import org.apache.commons.io.FileUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.io.IOUtils;
 
 import java.io.*;
@@ -36,10 +37,38 @@ public class OperationLog {
 
   private final String operationName;
   private final LogFile logFile;
+  private LoggingLevel opLoggingLevel = LoggingLevel.UNKNOWN;
 
-  public OperationLog(String name, File file) throws FileNotFoundException{
+  public static enum LoggingLevel {
+    NONE, EXECUTION, PERFORMANCE, VERBOSE, UNKNOWN
+  }
+
+  public OperationLog(String name, File file, HiveConf hiveConf) throws FileNotFoundException {
     operationName = name;
     logFile = new LogFile(file);
+
+    if (hiveConf.getBoolVar(HiveConf.ConfVars.HIVE_SERVER2_LOGGING_OPERATION_ENABLED)) {
+      String logLevel = hiveConf.getVar(HiveConf.ConfVars.HIVE_SERVER2_LOGGING_OPERATION_LEVEL);
+      opLoggingLevel = getLoggingLevel(logLevel);
+    }
+  }
+
+  public static LoggingLevel getLoggingLevel (String mode) {
+    if (mode.equalsIgnoreCase("none")) {
+      return LoggingLevel.NONE;
+    } else if (mode.equalsIgnoreCase("execution")) {
+      return LoggingLevel.EXECUTION;
+    } else if (mode.equalsIgnoreCase("verbose")) {
+      return LoggingLevel.VERBOSE;
+    } else if (mode.equalsIgnoreCase("performance")) {
+      return LoggingLevel.PERFORMANCE;
+    } else {
+      return LoggingLevel.UNKNOWN;
+    }
+  }
+
+  public LoggingLevel getOpLoggingLevel() {
+    return opLoggingLevel;
   }
 
   /**

Modified: hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/stats/jdbc/JDBCStatsAggregator.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/stats/jdbc/JDBCStatsAggregator.java?rev=1673969&r1=1673968&r2=1673969&view=diff
==============================================================================
--- hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/stats/jdbc/JDBCStatsAggregator.java (original)
+++ hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/stats/jdbc/JDBCStatsAggregator.java Wed Apr 15 22:04:00 2015
@@ -82,7 +82,7 @@ public class JDBCStatsAggregator impleme
     Utilities.SQLCommand<Void> setQueryTimeout = new Utilities.SQLCommand<Void>() {
       @Override
       public Void run(PreparedStatement stmt) throws SQLException {
-        stmt.setQueryTimeout(timeout);
+        Utilities.setQueryTimeout(stmt, timeout);
         return null;
       }
     };

Modified: hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/stats/jdbc/JDBCStatsPublisher.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/stats/jdbc/JDBCStatsPublisher.java?rev=1673969&r1=1673968&r2=1673969&view=diff
==============================================================================
--- hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/stats/jdbc/JDBCStatsPublisher.java (original)
+++ hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/stats/jdbc/JDBCStatsPublisher.java Wed Apr 15 22:04:00 2015
@@ -82,7 +82,7 @@ public class JDBCStatsPublisher implemen
     Utilities.SQLCommand<Void> setQueryTimeout = new Utilities.SQLCommand<Void>() {
       @Override
       public Void run(PreparedStatement stmt) throws SQLException {
-        stmt.setQueryTimeout(timeout);
+        Utilities.setQueryTimeout(stmt, timeout);
         return null;
       }
     };
@@ -279,7 +279,7 @@ public class JDBCStatsPublisher implemen
         conn = DriverManager.getConnection(connectionString);
 
         stmt = conn.createStatement();
-        stmt.setQueryTimeout(timeout);
+        Utilities.setQueryTimeout(stmt, timeout);
 
         // TODO: why is this not done using Hive db scripts?
         // Check if the table exists

Modified: hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDayOfMonth.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDayOfMonth.java?rev=1673969&r1=1673968&r2=1673969&view=diff
==============================================================================
--- hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDayOfMonth.java (original)
+++ hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDayOfMonth.java Wed Apr 15 22:04:00 2015
@@ -29,6 +29,7 @@ import org.apache.hadoop.hive.ql.exec.ve
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFDayOfMonthLong;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFDayOfMonthString;
 import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable;
 import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.io.IntWritable;
 import org.apache.hadoop.io.Text;
@@ -38,9 +39,12 @@ import org.apache.hadoop.io.Text;
  *
  */
 @Description(name = "day,dayofmonth",
-    value = "_FUNC_(date) - Returns the date of the month of date",
-    extended = "date is a string in the format of 'yyyy-MM-dd HH:mm:ss' or "
-    + "'yyyy-MM-dd'.\n"
+    value = "_FUNC_(param) - Returns the day of the month of date/timestamp, or day component of interval",
+    extended = "param can be one of:\n"
+    + "1. A string in the format of 'yyyy-MM-dd HH:mm:ss' or 'yyyy-MM-dd'.\n"
+    + "2. A date value\n"
+    + "3. A timestamp value\n"
+    + "4. A day-time interval value"
     + "Example:\n "
     + "  > SELECT _FUNC_('2009-07-30') FROM src LIMIT 1;\n" + "  30")
 @VectorizedExpressions({VectorUDFDayOfMonthLong.class, VectorUDFDayOfMonthString.class})
@@ -98,4 +102,12 @@ public class UDFDayOfMonth extends UDF {
     return result;
   }
 
+  public IntWritable evaluate(HiveIntervalDayTimeWritable i) {
+    if (i == null) {
+      return null;
+    }
+
+    result.set(i.getHiveIntervalDayTime().getDays());
+    return result;
+  }
 }

Modified: hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFHour.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFHour.java?rev=1673969&r1=1673968&r2=1673969&view=diff
==============================================================================
--- hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFHour.java (original)
+++ hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFHour.java Wed Apr 15 22:04:00 2015
@@ -28,6 +28,7 @@ import org.apache.hadoop.hive.ql.exec.UD
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFHourLong;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFHourString;
+import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable;
 import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.io.IntWritable;
 import org.apache.hadoop.io.Text;
@@ -37,9 +38,11 @@ import org.apache.hadoop.io.Text;
  *
  */
 @Description(name = "hour",
-    value = "_FUNC_(date) - Returns the hour of date",
-    extended = "date is a string in the format of 'yyyy-MM-dd HH:mm:ss' or "
-    + "'HH:mm:ss'.\n"
+    value = "_FUNC_(param) - Returns the hour componemnt of the string/timestamp/interval",
+    extended ="param can be one of:\n"
+    + "1. A string in the format of 'yyyy-MM-dd HH:mm:ss' or 'HH:mm:ss'.\n"
+    + "2. A timestamp value\n"
+    + "3. A day-time interval value"
     + "Example:\n "
     + "  > SELECT _FUNC_('2009-07-30 12:58:59') FROM src LIMIT 1;\n"
     + "  12\n"
@@ -95,4 +98,12 @@ public class UDFHour extends UDF {
     return result;
   }
 
+  public IntWritable evaluate(HiveIntervalDayTimeWritable i) {
+    if (i == null) {
+      return null;
+    }
+
+    result.set(i.getHiveIntervalDayTime().getHours());
+    return result;
+  }
 }

Modified: hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFMinute.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFMinute.java?rev=1673969&r1=1673968&r2=1673969&view=diff
==============================================================================
--- hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFMinute.java (original)
+++ hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFMinute.java Wed Apr 15 22:04:00 2015
@@ -28,6 +28,7 @@ import org.apache.hadoop.hive.ql.exec.UD
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFMinuteLong;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFMinuteString;
+import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable;
 import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.io.IntWritable;
 import org.apache.hadoop.io.Text;
@@ -37,9 +38,11 @@ import org.apache.hadoop.io.Text;
  *
  */
 @Description(name = "minute",
-    value = "_FUNC_(date) - Returns the minute of date",
-    extended = "date is a string in the format of 'yyyy-MM-dd HH:mm:ss' or "
-    + "'HH:mm:ss'.\n"
+    value = "_FUNC_(param) - Returns the minute component of the string/timestamp/interval",
+    extended = "param can be one of:\n"
+    + "1. A string in the format of 'yyyy-MM-dd HH:mm:ss' or 'HH:mm:ss'.\n"
+    + "2. A timestamp value\n"
+    + "3. A day-time interval value"
     + "Example:\n "
     + "  > SELECT _FUNC_('2009-07-30 12:58:59') FROM src LIMIT 1;\n"
     + "  58\n"
@@ -95,4 +98,12 @@ public class UDFMinute extends UDF {
     return result;
   }
 
+  public IntWritable evaluate(HiveIntervalDayTimeWritable i) {
+    if (i == null) {
+      return null;
+    }
+
+    result.set(i.getHiveIntervalDayTime().getMinutes());
+    return result;
+  }
 }

Modified: hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFMonth.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFMonth.java?rev=1673969&r1=1673968&r2=1673969&view=diff
==============================================================================
--- hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFMonth.java (original)
+++ hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFMonth.java Wed Apr 15 22:04:00 2015
@@ -29,6 +29,7 @@ import org.apache.hadoop.hive.ql.exec.ve
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFMonthLong;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFMonthString;
 import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable;
 import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.io.IntWritable;
 import org.apache.hadoop.io.Text;
@@ -38,8 +39,13 @@ import org.apache.hadoop.io.Text;
  *
  */
 @Description(name = "month",
-    value = "_FUNC_(date) - Returns the month of date",
-    extended = "Example:\n"
+    value = "_FUNC_(param) - Returns the month component of the date/timestamp/interval",
+    extended = "param can be one of:\n"
+    + "1. A string in the format of 'yyyy-MM-dd HH:mm:ss' or 'yyyy-MM-dd'.\n"
+    + "2. A date value\n"
+    + "3. A timestamp value\n"
+    + "4. A year-month interval value"
+    + "Example:\n"
     + "  > SELECT _FUNC_('2009-07-30') FROM src LIMIT 1;\n" + "  7")
 @VectorizedExpressions({VectorUDFMonthLong.class, VectorUDFMonthString.class})
 public class UDFMonth extends UDF {
@@ -94,4 +100,12 @@ public class UDFMonth extends UDF {
     return result;
   }
 
+  public IntWritable evaluate(HiveIntervalYearMonthWritable i) {
+    if (i == null) {
+      return null;
+    }
+
+    result.set(i.getHiveIntervalYearMonth().getMonths());
+    return result;
+  }
 }

Modified: hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFSecond.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFSecond.java?rev=1673969&r1=1673968&r2=1673969&view=diff
==============================================================================
--- hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFSecond.java (original)
+++ hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFSecond.java Wed Apr 15 22:04:00 2015
@@ -23,23 +23,29 @@ import java.text.SimpleDateFormat;
 import java.util.Calendar;
 import java.util.Date;
 
+import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDF;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFSecondLong;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFSecondString;
+import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable;
 import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.io.IntWritable;
 import org.apache.hadoop.io.Text;
+import org.apache.hive.common.util.DateUtils;
 
 /**
  * UDFSecond.
  *
  */
 @Description(name = "second",
-    value = "_FUNC_(date) - Returns the second of date",
-    extended = "date is a string in the format of 'yyyy-MM-dd HH:mm:ss' or "
-    + "'HH:mm:ss'.\n"
+    value = "_FUNC_(date) - Returns the second component of the string/timestamp/interval",
+    extended = "param can be one of:\n"
+    + "1. A string in the format of 'yyyy-MM-dd HH:mm:ss' or 'HH:mm:ss'.\n"
+    + "2. A timestamp value\n"
+    + "3. A day-time interval value"
     + "Example:\n "
     + "  > SELECT _FUNC_('2009-07-30 12:58:59') FROM src LIMIT 1;\n"
     + "  59\n"
@@ -96,4 +102,13 @@ public class UDFSecond extends UDF {
     return result;
   }
 
+  public IntWritable evaluate(HiveIntervalDayTimeWritable i) {
+    if (i == null) {
+      return null;
+    }
+
+    HiveIntervalDayTime idt = i.getHiveIntervalDayTime();
+    result.set(idt.getSeconds());
+    return result;
+  }
 }

Modified: hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFYear.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFYear.java?rev=1673969&r1=1673968&r2=1673969&view=diff
==============================================================================
--- hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFYear.java (original)
+++ hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFYear.java Wed Apr 15 22:04:00 2015
@@ -29,6 +29,7 @@ import org.apache.hadoop.hive.ql.exec.ve
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFYearLong;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFYearString;
 import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable;
 import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.io.IntWritable;
 import org.apache.hadoop.io.Text;
@@ -38,9 +39,12 @@ import org.apache.hadoop.io.Text;
  *
  */
 @Description(name = "year",
-    value = "_FUNC_(date) - Returns the year of date",
-    extended = "date is a string in the format of 'yyyy-MM-dd HH:mm:ss' or "
-    + "'yyyy-MM-dd'.\n"
+    value = "_FUNC_(param) - Returns the year component of the date/timestamp/interval",
+    extended = "param can be one of:\n"
+    + "1. A string in the format of 'yyyy-MM-dd HH:mm:ss' or 'yyyy-MM-dd'.\n"
+    + "2. A date value\n"
+    + "3. A timestamp value\n"
+    + "4. A year-month interval value"
     + "Example:\n "
     + "  > SELECT _FUNC_('2009-07-30') FROM src LIMIT 1;\n" + "  2009")
 @VectorizedExpressions({VectorUDFYearLong.class, VectorUDFYearString.class})
@@ -98,4 +102,12 @@ public class UDFYear extends UDF {
     return result;
   }
 
+  public IntWritable evaluate(HiveIntervalYearMonthWritable i) {
+    if (i == null) {
+      return null;
+    }
+
+    result.set(i.getHiveIntervalYearMonth().getYears());
+    return result;
+  }
 }

Modified: hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFComputeStats.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFComputeStats.java?rev=1673969&r1=1673968&r2=1673969&view=diff
==============================================================================
--- hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFComputeStats.java (original)
+++ hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFComputeStats.java Wed Apr 15 22:04:00 2015
@@ -28,6 +28,7 @@ import org.apache.hadoop.hive.ql.exec.UD
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.parse.SemanticException;
 import org.apache.hadoop.hive.ql.util.JavaDataModel;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
@@ -86,9 +87,11 @@ public class GenericUDAFComputeStats ext
       return new GenericUDAFBinaryStatsEvaluator();
     case DECIMAL:
       return new GenericUDAFDecimalStatsEvaluator();
+    case DATE:
+      return new GenericUDAFDateStatsEvaluator();
     default:
       throw new UDFArgumentTypeException(0,
-          "Only integer/long/timestamp/float/double/string/binary/boolean/decimal type argument " +
+          "Only integer/long/timestamp/date/float/double/string/binary/boolean/decimal type argument " +
           "is accepted but "
           + parameters[0].getTypeName() + " is passed.");
     }
@@ -1314,4 +1317,73 @@ public class GenericUDAFComputeStats ext
       ((NumericStatsAgg)agg).reset("Decimal");
     }
   }
+
+  /**
+   * GenericUDAFDateStatsEvaluator
+   * High/low value will be saved in stats DB as long value representing days since epoch.
+   */
+  public static class GenericUDAFDateStatsEvaluator
+      extends GenericUDAFNumericStatsEvaluator<DateWritable, DateObjectInspector> {
+
+    @Override
+    protected DateObjectInspector getValueObjectInspector() {
+      return PrimitiveObjectInspectorFactory.writableDateObjectInspector;
+    }
+
+    @AggregationType(estimable = true)
+    public class DateStatsAgg extends NumericStatsAgg {
+      @Override
+      public int estimate() {
+        JavaDataModel model = JavaDataModel.get();
+        return super.estimate() + model.primitive2() * 2;
+      }
+
+      @Override
+      protected void update(Object p, PrimitiveObjectInspector inputOI) {
+        // DateWritable is mutable, DateStatsAgg needs its own copy
+        DateWritable v = new DateWritable((DateWritable) inputOI.getPrimitiveWritableObject(p));
+
+        //Update min counter if new value is less than min seen so far
+        if (min == null || v.compareTo(min) < 0) {
+          min = v;
+        }
+        //Update max counter if new value is greater than max seen so far
+        if (max == null || v.compareTo(max) > 0) {
+          max = v;
+        }
+        // Add value to NumDistinctValue Estimator
+        numDV.addToEstimator(v.getDays());
+      }
+
+      @Override
+      protected void updateMin(Object minValue, DateObjectInspector minFieldOI) {
+        if ((minValue != null) && (min == null ||
+            min.compareTo(minFieldOI.getPrimitiveWritableObject(minValue)) > 0)) {
+          // DateWritable is mutable, DateStatsAgg needs its own copy
+          min = new DateWritable(minFieldOI.getPrimitiveWritableObject(minValue));
+        }
+      }
+
+      @Override
+      protected void updateMax(Object maxValue, DateObjectInspector maxFieldOI) {
+        if ((maxValue != null) && (max == null ||
+            max.compareTo(maxFieldOI.getPrimitiveWritableObject(maxValue)) < 0)) {
+          // DateWritable is mutable, DateStatsAgg needs its own copy
+          max = new DateWritable(maxFieldOI.getPrimitiveWritableObject(maxValue));
+        }
+      }
+    };
+
+    @Override
+    public AggregationBuffer getNewAggregationBuffer() throws HiveException {
+      AggregationBuffer result = new DateStatsAgg();
+      reset(result);
+      return result;
+    }
+
+    @Override
+    public void reset(AggregationBuffer agg) throws HiveException {
+      ((NumericStatsAgg)agg).reset("Date");
+    }
+  }
 }

Modified: hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDF.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDF.java?rev=1673969&r1=1673968&r2=1673969&view=diff
==============================================================================
--- hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDF.java (original)
+++ hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDF.java Wed Apr 15 22:04:00 2015
@@ -501,6 +501,10 @@ public abstract class GenericUDF impleme
       return null;
     }
     Object writableValue = converters[i].convert(obj);
+    // if string can not be parsed converter will return null
+    if (writableValue == null) {
+      return null;
+    }
     Timestamp ts = ((TimestampWritable) writableValue).getTimestamp();
     return ts;
   }

Modified: hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/WindowingTableFunction.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/WindowingTableFunction.java?rev=1673969&r1=1673968&r2=1673969&view=diff
==============================================================================
--- hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/WindowingTableFunction.java (original)
+++ hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/WindowingTableFunction.java Wed Apr 15 22:04:00 2015
@@ -20,10 +20,14 @@ package org.apache.hadoop.hive.ql.udf.pt
 
 import java.util.AbstractList;
 import java.util.ArrayList;
+import java.util.HashMap;
 import java.util.Iterator;
 import java.util.List;
+import java.util.Map;
 
 import org.apache.commons.lang.ArrayUtils;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
@@ -60,10 +64,42 @@ import org.apache.hadoop.hive.serde2.obj
 
 @SuppressWarnings("deprecation")
 public class WindowingTableFunction extends TableFunctionEvaluator {
+  public static final Log LOG =LogFactory.getLog(WindowingTableFunction.class.getName());
+  static class WindowingFunctionInfoHelper {
+    private boolean supportsWindow;
+
+    WindowingFunctionInfoHelper() {
+    }
+
+    public WindowingFunctionInfoHelper(boolean supportsWindow) {
+      this.supportsWindow = supportsWindow;
+    }
+
+    public boolean isSupportsWindow() {
+      return supportsWindow;
+    }
+    public void setSupportsWindow(boolean supportsWindow) {
+      this.supportsWindow = supportsWindow;
+    }
+  }
 
   StreamingState streamingState;
   RankLimit rnkLimitDef;
+
+  // There is some information about the windowing functions that needs to be initialized
+  // during query compilation time, and made available to during the map/reduce tasks via
+  // plan serialization.
+  Map<String, WindowingFunctionInfoHelper> windowingFunctionHelpers = null;
   
+  public Map<String, WindowingFunctionInfoHelper> getWindowingFunctionHelpers() {
+    return windowingFunctionHelpers;
+  }
+
+  public void setWindowingFunctionHelpers(
+      Map<String, WindowingFunctionInfoHelper> windowingFunctionHelpers) {
+    this.windowingFunctionHelpers = windowingFunctionHelpers;
+  }
+
   @SuppressWarnings({ "unchecked", "rawtypes" })
   @Override
   public void execute(PTFPartitionIterator<Object> pItr, PTFPartition outP) throws HiveException {
@@ -147,9 +183,8 @@ public class WindowingTableFunction exte
   private boolean streamingPossible(Configuration cfg, WindowFunctionDef wFnDef)
       throws HiveException {
     WindowFrameDef wdwFrame = wFnDef.getWindowFrame();
-    WindowFunctionInfo wFnInfo = FunctionRegistry.getWindowFunctionInfo(wFnDef
-        .getName());
 
+    WindowingFunctionInfoHelper wFnInfo = getWindowingFunctionInfoHelper(wFnDef.getName());
     if (!wFnInfo.isSupportsWindow()) {
       return true;
     }
@@ -259,6 +294,45 @@ public class WindowingTableFunction exte
     return new int[] {precedingSpan, followingSpan};
   }
 
+  private void initializeWindowingFunctionInfoHelpers() throws SemanticException {
+    // getWindowFunctionInfo() cannot be called during map/reduce tasks. So cache necessary
+    // values during query compilation, and rely on plan serialization to bring this info
+    // to the object during the map/reduce tasks.
+    if (windowingFunctionHelpers != null) {
+      return;
+    }
+
+    windowingFunctionHelpers = new HashMap<String, WindowingFunctionInfoHelper>();
+    WindowTableFunctionDef tabDef = (WindowTableFunctionDef) getTableDef();
+    for (int i = 0; i < tabDef.getWindowFunctions().size(); i++) {
+      WindowFunctionDef wFn = tabDef.getWindowFunctions().get(i);
+      GenericUDAFEvaluator fnEval = wFn.getWFnEval();
+      WindowFunctionInfo wFnInfo = FunctionRegistry.getWindowFunctionInfo(wFn.getName());
+      boolean supportsWindow = wFnInfo.isSupportsWindow();
+      windowingFunctionHelpers.put(wFn.getName(), new WindowingFunctionInfoHelper(supportsWindow));
+    }
+  }
+
+  @Override
+  protected void setOutputOI(StructObjectInspector outputOI) {
+    super.setOutputOI(outputOI);
+    // Call here because at this point the WindowTableFunctionDef has been set
+    try {
+      initializeWindowingFunctionInfoHelpers();
+    } catch (SemanticException err) {
+      throw new RuntimeException("Unexpected error while setting up windowing function", err);
+    }
+  }
+
+  private WindowingFunctionInfoHelper getWindowingFunctionInfoHelper(String fnName) {
+    WindowingFunctionInfoHelper wFnInfoHelper = windowingFunctionHelpers.get(fnName);
+    if (wFnInfoHelper == null) {
+      // Should not happen
+      throw new RuntimeException("No cached WindowingFunctionInfoHelper for " + fnName);
+    }
+    return wFnInfoHelper;
+  }
+
   @Override
   public void initializeStreaming(Configuration cfg,
       StructObjectInspector inputOI, boolean isMapSide) throws HiveException {
@@ -412,8 +486,7 @@ public class WindowingTableFunction exte
       if (fnEval instanceof ISupportStreamingModeForWindowing) {
         fnEval.terminate(streamingState.aggBuffers[i]);
 
-        WindowFunctionInfo wFnInfo = FunctionRegistry.getWindowFunctionInfo(wFn
-            .getName());
+        WindowingFunctionInfoHelper wFnInfo = getWindowingFunctionInfoHelper(wFn.getName());
         if (!wFnInfo.isSupportsWindow()) {
           numRowsRemaining = ((ISupportStreamingModeForWindowing) fnEval)
               .getRowsRemainingAfterTerminate();

Modified: hive/branches/llap/ql/src/test/org/apache/hadoop/hive/ql/TestErrorMsg.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/test/org/apache/hadoop/hive/ql/TestErrorMsg.java?rev=1673969&r1=1673968&r2=1673969&view=diff
==============================================================================
--- hive/branches/llap/ql/src/test/org/apache/hadoop/hive/ql/TestErrorMsg.java (original)
+++ hive/branches/llap/ql/src/test/org/apache/hadoop/hive/ql/TestErrorMsg.java Wed Apr 15 22:04:00 2015
@@ -23,9 +23,11 @@ import java.util.Set;
 
 import junit.framework.Assert;
 import junit.framework.TestCase;
+import org.junit.Test;
 
-public class TestErrorMsg extends TestCase {
+public class TestErrorMsg {
 
+  @Test
   public void testUniqueErrorCode() {
     Set<Integer> numbers = new HashSet<Integer>();
     for (ErrorMsg err : ErrorMsg.values()) {
@@ -33,4 +35,15 @@ public class TestErrorMsg extends TestCa
       Assert.assertTrue("duplicated error number " + code, numbers.add(code));
     }
   }
+  @Test
+  public void testReverseMatch() {
+    testReverseMatch(ErrorMsg.OP_NOT_ALLOWED_IN_AUTOCOMMIT, "COMMIT");
+    testReverseMatch(ErrorMsg.OP_NOT_ALLOWED_IN_TXN, "ALTER TABLE", "1");
+    testReverseMatch(ErrorMsg.OP_NOT_ALLOWED_WITHOUT_TXN, "ROLLBACK");
+  }
+  private void testReverseMatch(ErrorMsg errorMsg, String... args) {
+    String parametrizedMsg = errorMsg.format(args);
+    ErrorMsg canonicalMsg = ErrorMsg.getErrorMsg(parametrizedMsg);
+    Assert.assertEquals("Didn't find expected msg", errorMsg.getErrorCode(), canonicalMsg.getErrorCode());
+  }
 }

Modified: hive/branches/llap/ql/src/test/org/apache/hadoop/hive/ql/exec/persistence/TestBytesBytesMultiHashMap.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/test/org/apache/hadoop/hive/ql/exec/persistence/TestBytesBytesMultiHashMap.java?rev=1673969&r1=1673968&r2=1673969&view=diff
==============================================================================
--- hive/branches/llap/ql/src/test/org/apache/hadoop/hive/ql/exec/persistence/TestBytesBytesMultiHashMap.java (original)
+++ hive/branches/llap/ql/src/test/org/apache/hadoop/hive/ql/exec/persistence/TestBytesBytesMultiHashMap.java Wed Apr 15 22:04:00 2015
@@ -50,10 +50,10 @@ public class TestBytesBytesMultiHashMap
     BytesBytesMultiHashMap map = new BytesBytesMultiHashMap(CAPACITY, LOAD_FACTOR, WB_SIZE);
     RandomKvSource kv = new RandomKvSource(0, 0);
     map.put(kv, -1);
-    verifyResults(map, kv.getLastKey(), kv.getLastValue());
+    verifyHashMapResult(map, kv.getLastKey(), kv.getLastValue());
     kv = new RandomKvSource(10, 100);
     map.put(kv, -1);
-    verifyResults(map, kv.getLastKey(), kv.getLastValue());
+    verifyHashMapResult(map, kv.getLastKey(), kv.getLastValue());
   }
 
   @Test
@@ -61,12 +61,12 @@ public class TestBytesBytesMultiHashMap
     BytesBytesMultiHashMap map = new BytesBytesMultiHashMap(CAPACITY, LOAD_FACTOR, WB_SIZE);
     RandomKvSource kv = new RandomKvSource(0, 100);
     map.put(kv, -1);
-    verifyResults(map, kv.getLastKey(), kv.getLastValue());
+    verifyHashMapResult(map, kv.getLastKey(), kv.getLastValue());
     FixedKeyKvSource kv2 = new FixedKeyKvSource(kv.getLastKey(), 0, 100);
     kv2.values.add(kv.getLastValue());
     for (int i = 0; i < 3; ++i) {
       map.put(kv2, -1);
-      verifyResults(map, kv2.key, kv2.values.toArray(new byte[kv2.values.size()][]));
+      verifyHashMapResult(map, kv2.key, kv2.values.toArray(new byte[kv2.values.size()][]));
     }
   }
 
@@ -80,11 +80,11 @@ public class TestBytesBytesMultiHashMap
     FixedKeyKvSource kv2 = new FixedKeyKvSource(kv.getLastKey(), 0, 100);
     map.put(kv2, -1);
     key[0] = (byte)(key[0] + 1);
-    List<WriteBuffers.ByteSegmentRef> results = new ArrayList<WriteBuffers.ByteSegmentRef>(0);
-    map.getValueRefs(key, key.length, results);
-    assertTrue(results.isEmpty());
-    map.getValueRefs(key, 0, results);
-    assertTrue(results.isEmpty());
+    BytesBytesMultiHashMap.Result hashMapResult = new BytesBytesMultiHashMap.Result();
+    map.getValueResult(key, 0, key.length, hashMapResult);
+    assertTrue(!hashMapResult.hasRows());
+    map.getValueResult(key, 0, 0, hashMapResult);
+    assertTrue(!hashMapResult.hasRows());
   }
 
   @Test
@@ -96,13 +96,12 @@ public class TestBytesBytesMultiHashMap
       map.put(kv, -1);
     }
     for (int i = 0; i < kv.keys.size(); ++i) {
-      verifyResults(map, kv.keys.get(i), kv.values.get(i));
+      verifyHashMapResult(map, kv.keys.get(i), kv.values.get(i));
     }
     assertEquals(CAPACITY, map.getCapacity());
     // Get of non-existent key should terminate..
-    List<WriteBuffers.ByteSegmentRef> results = new ArrayList<WriteBuffers.ByteSegmentRef>(0);
-    map.getValueRefs(new byte[0], 0, results);
-    assertTrue(results.isEmpty());
+    BytesBytesMultiHashMap.Result hashMapResult = new BytesBytesMultiHashMap.Result();
+    map.getValueResult(new byte[0], 0, 0, hashMapResult);
   }
 
   @Test
@@ -113,23 +112,29 @@ public class TestBytesBytesMultiHashMap
     for (int i = 0; i < 18; ++i) {
       map.put(kv, -1);
       for (int j = 0; j <= i; ++j) {
-        verifyResults(map, kv.keys.get(j), kv.values.get(j));
+        verifyHashMapResult(map, kv.keys.get(j), kv.values.get(j));
       }
     }
     assertEquals(1 << 18, map.getCapacity());
   }
 
-  private void verifyResults(BytesBytesMultiHashMap map, byte[] key, byte[]... values) {
-    List<WriteBuffers.ByteSegmentRef> results = new ArrayList<WriteBuffers.ByteSegmentRef>(0);
-    byte state = map.getValueRefs(key, key.length, results);
-    assertEquals(state, results.size());
-    assertEquals(values.length, results.size());
+  private void verifyHashMapResult(BytesBytesMultiHashMap map, byte[] key, byte[]... values) {
+    BytesBytesMultiHashMap.Result hashMapResult = new BytesBytesMultiHashMap.Result();
+    byte state = map.getValueResult(key, 0, key.length, hashMapResult);
     HashSet<ByteBuffer> hs = new HashSet<ByteBuffer>();
-    for (int i = 0; i < results.size(); ++i) {
-      WriteBuffers.ByteSegmentRef result = results.get(i);
-      map.populateValue(result);
-      hs.add(result.copy());
+    int count = 0;
+    if (hashMapResult.hasRows()) {
+      WriteBuffers.ByteSegmentRef ref = hashMapResult.first();
+      while (ref != null) {
+        count++;
+        hs.add(ref.copy());
+        ref = hashMapResult.next();
+      }
+    } else {
+      assertTrue(hashMapResult.isEof());
     }
+    assertEquals(state, count);
+    assertEquals(values.length, count);
     for (int i = 0; i < values.length; ++i) {
       assertTrue(hs.contains(ByteBuffer.wrap(values[i])));
     }

Modified: hive/branches/llap/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorFilterOperator.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorFilterOperator.java?rev=1673969&r1=1673968&r2=1673969&view=diff
==============================================================================
--- hive/branches/llap/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorFilterOperator.java (original)
+++ hive/branches/llap/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorFilterOperator.java Wed Apr 15 22:04:00 2015
@@ -88,7 +88,7 @@ public class TestVectorFilterOperator {
     ExprNodeColumnDesc col1Expr = new  ExprNodeColumnDesc(Long.class, "col1", "table", false);
     List<String> columns = new ArrayList<String>();
     columns.add("col1");
-    VectorizationContext vc = new VectorizationContext(columns);
+    VectorizationContext vc = new VectorizationContext("name", columns);
     FilterDesc fdesc = new FilterDesc();
     fdesc.setPredicate(col1Expr);
     return new VectorFilterOperator(vc, fdesc);



Mime
View raw message