hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From br...@apache.org
Subject svn commit: r1664455 [10/30] - in /hive/branches/parquet: ./ accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/ accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/ accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/serde...
Date Thu, 05 Mar 2015 18:51:39 GMT
Modified: hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/plan/PTFDesc.java
URL: http://svn.apache.org/viewvc/hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/plan/PTFDesc.java?rev=1664455&r1=1664454&r2=1664455&view=diff
==============================================================================
--- hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/plan/PTFDesc.java (original)
+++ hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/plan/PTFDesc.java Thu Mar  5 18:51:32 2015
@@ -22,13 +22,16 @@ package org.apache.hadoop.hive.ql.plan;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.exec.PTFUtils;
 import org.apache.hadoop.hive.ql.parse.LeadLagInfo;
-import org.apache.hadoop.hive.ql.parse.PTFInvocationSpec.Order;
-import org.apache.hadoop.hive.ql.parse.PTFInvocationSpec.PTFQueryInputType;
+import org.apache.hadoop.hive.ql.plan.ptf.PTFInputDef;
 import org.apache.hadoop.hive.ql.plan.ptf.PartitionedTableFunctionDef;
 import org.apache.hadoop.hive.ql.plan.ptf.WindowTableFunctionDef;
+import org.apache.hadoop.hive.ql.udf.ptf.Noop;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
 
 @Explain(displayName = "PTF Operator")
 public class PTFDesc extends AbstractOperatorDesc {
@@ -62,6 +65,19 @@ public class PTFDesc extends AbstractOpe
     return funcDef == null ? null : funcDef.getStartOfChain();
   }
 
+  @Explain(displayName = "Function definitions")
+  public List<PTFInputDef> getFuncDefExplain() {
+    if (funcDef == null) {
+      return null;
+    }
+    List<PTFInputDef> inputs = new ArrayList<PTFInputDef>();
+    for (PTFInputDef current = funcDef; current != null; current = current.getInput()) {
+      inputs.add(current);
+    }
+    Collections.reverse(inputs);
+    return inputs;
+  }
+
   public LeadLagInfo getLlInfo() {
     return llInfo;
   }
@@ -70,10 +86,23 @@ public class PTFDesc extends AbstractOpe
     this.llInfo = llInfo;
   }
 
+  @Explain(displayName = "Lead/Lag information")
+  public String getLlInfoExplain() {
+    if (llInfo != null && llInfo.getLeadLagExprs() != null) {
+      return PlanUtils.getExprListString(llInfo.getLeadLagExprs());
+    }
+    return null;
+  }
+
   public boolean forWindowing() {
-    return funcDef != null && (funcDef instanceof WindowTableFunctionDef);
+    return funcDef instanceof WindowTableFunctionDef;
+  }
+
+  public boolean forNoop() {
+    return funcDef.getTFunction() instanceof Noop;
   }
 
+  @Explain(displayName = "Map-side function", displayOnlyOnTrue = true)
   public boolean isMapSide() {
     return isMapSide;
   }
@@ -89,5 +118,4 @@ public class PTFDesc extends AbstractOpe
   public void setCfg(Configuration cfg) {
     this.cfg = cfg;
   }
-
 }

Modified: hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/plan/PTFDeserializer.java
URL: http://svn.apache.org/viewvc/hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/plan/PTFDeserializer.java?rev=1664455&r1=1664454&r2=1664455&view=diff
==============================================================================
--- hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/plan/PTFDeserializer.java (original)
+++ hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/plan/PTFDeserializer.java Thu Mar  5 18:51:32 2015
@@ -27,7 +27,7 @@ import java.util.Map;
 import java.util.Properties;
 
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.common.JavaUtils;
 import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator;
 import org.apache.hadoop.hive.ql.exec.PTFPartition;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
@@ -288,7 +288,7 @@ public class PTFDeserializer {
     try {
       @SuppressWarnings("unchecked")
       Class<? extends TableFunctionResolver> rCls = (Class<? extends TableFunctionResolver>)
-          Class.forName(className);
+          JavaUtils.loadClass(className);
       return ReflectionUtils.newInstance(rCls, null);
     } catch (Exception e) {
       throw new HiveException(e);

Modified: hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java
URL: http://svn.apache.org/viewvc/hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java?rev=1664455&r1=1664454&r2=1664455&view=diff
==============================================================================
--- hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java (original)
+++ hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java Thu Mar  5 18:51:32 2015
@@ -31,6 +31,7 @@ import java.util.Set;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.common.JavaUtils;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
 import org.apache.hadoop.hive.metastore.MetaStoreUtils;
@@ -62,6 +63,7 @@ import org.apache.hadoop.hive.serde2.Met
 import org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe;
 import org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe;
 import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe;
+import org.apache.hadoop.hive.serde2.lazy.LazySerDeParameters;
 import org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
@@ -143,7 +145,7 @@ public final class PlanUtils {
             serdeConstants.SERIALIZATION_LIB, localDirectoryDesc.getSerName());
       }
       if (localDirectoryDesc.getOutputFormat() != null){
-          ret.setOutputFileFormatClass(Class.forName(localDirectoryDesc.getOutputFormat()));
+        ret.setOutputFileFormatClass(JavaUtils.loadClass(localDirectoryDesc.getOutputFormat()));
       }
       if (localDirectoryDesc.getNullFormat() != null) {
         properties.setProperty(serdeConstants.SERIALIZATION_NULL_FORMAT,
@@ -287,7 +289,7 @@ public final class PlanUtils {
     tblDesc.getProperties().setProperty(serdeConstants.ESCAPE_CHAR, "\\");
     //enable extended nesting levels
     tblDesc.getProperties().setProperty(
-        LazySimpleSerDe.SERIALIZATION_EXTEND_NESTING_LEVELS, "true");
+    		LazySerDeParameters.SERIALIZATION_EXTEND_ADDITIONAL_NESTING_LEVELS, "true");
     return tblDesc;
   }
 
@@ -306,7 +308,7 @@ public final class PlanUtils {
 
     try {
       if (crtTblDesc.getSerName() != null) {
-        Class c = Class.forName(crtTblDesc.getSerName());
+        Class c = JavaUtils.loadClass(crtTblDesc.getSerName());
         serdeClass = c;
       }
 
@@ -355,8 +357,8 @@ public final class PlanUtils {
 
       // replace the default input & output file format with those found in
       // crtTblDesc
-      Class c1 = Class.forName(crtTblDesc.getInputFormat());
-      Class c2 = Class.forName(crtTblDesc.getOutputFormat());
+      Class c1 = JavaUtils.loadClass(crtTblDesc.getInputFormat());
+      Class c2 = JavaUtils.loadClass(crtTblDesc.getOutputFormat());
       Class<? extends InputFormat> in_class = c1;
       Class<? extends HiveOutputFormat> out_class = c2;
 
@@ -923,7 +925,7 @@ public final class PlanUtils {
     return null;
   }
 
-  public static String getExprListString(Collection<ExprNodeDesc> exprs) {
+  public static String getExprListString(Collection<?  extends ExprNodeDesc> exprs) {
     StringBuffer sb = new StringBuffer();
     boolean first = true;
     for (ExprNodeDesc expr: exprs) {

Modified: hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/plan/StatsNoJobWork.java
URL: http://svn.apache.org/viewvc/hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/plan/StatsNoJobWork.java?rev=1664455&r1=1664454&r2=1664455&view=diff
==============================================================================
--- hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/plan/StatsNoJobWork.java (original)
+++ hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/plan/StatsNoJobWork.java Thu Mar  5 18:51:32 2015
@@ -20,9 +20,10 @@ package org.apache.hadoop.hive.ql.plan;
 
 import java.io.Serializable;
 
-import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer.tableSpec;
+import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer.TableSpec;
 import org.apache.hadoop.hive.ql.parse.PrunedPartitionList;
 
+
 /**
  * Client-side stats aggregator task.
  */
@@ -30,14 +31,14 @@ import org.apache.hadoop.hive.ql.parse.P
 public class StatsNoJobWork implements Serializable {
   private static final long serialVersionUID = 1L;
 
-  private tableSpec tableSpecs;
+  private TableSpec tableSpecs;
   private boolean statsReliable;
   private PrunedPartitionList prunedPartitionList;
 
   public StatsNoJobWork() {
   }
 
-  public StatsNoJobWork(tableSpec tableSpecs) {
+  public StatsNoJobWork(TableSpec tableSpecs) {
     this.tableSpecs = tableSpecs;
   }
 
@@ -45,7 +46,7 @@ public class StatsNoJobWork implements S
     this.statsReliable = statsReliable;
   }
 
-  public tableSpec getTableSpecs() {
+  public TableSpec getTableSpecs() {
     return tableSpecs;
   }
 

Modified: hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/plan/StatsWork.java
URL: http://svn.apache.org/viewvc/hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/plan/StatsWork.java?rev=1664455&r1=1664454&r2=1664455&view=diff
==============================================================================
--- hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/plan/StatsWork.java (original)
+++ hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/plan/StatsWork.java Thu Mar  5 18:51:32 2015
@@ -21,7 +21,7 @@ package org.apache.hadoop.hive.ql.plan;
 import java.io.Serializable;
 
 import org.apache.hadoop.hive.ql.exec.Task;
-import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer.tableSpec;
+import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer.TableSpec;
 
 /**
  * ConditionalStats.
@@ -31,7 +31,7 @@ import org.apache.hadoop.hive.ql.parse.B
 public class StatsWork implements Serializable {
   private static final long serialVersionUID = 1L;
 
-  private tableSpec tableSpecs;         // source table spec -- for TableScanOperator
+  private TableSpec tableSpecs;         // source table spec -- for TableScanOperator
   private LoadTableDesc loadTableDesc;  // same as MoveWork.loadTableDesc -- for FileSinkOperator
   private LoadFileDesc loadFileDesc;    // same as MoveWork.loadFileDesc -- for FileSinkOperator
   private String aggKey;                // aggregation key prefix
@@ -58,7 +58,7 @@ public class StatsWork implements Serial
   public StatsWork() {
   }
 
-  public StatsWork(tableSpec tableSpecs) {
+  public StatsWork(TableSpec tableSpecs) {
     this.tableSpecs = tableSpecs;
   }
 
@@ -74,7 +74,7 @@ public class StatsWork implements Serial
     this.statsReliable = statsReliable;
   }
 
-  public tableSpec getTableSpecs() {
+  public TableSpec getTableSpecs() {
     return tableSpecs;
   }
 

Modified: hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/plan/TableScanDesc.java
URL: http://svn.apache.org/viewvc/hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/plan/TableScanDesc.java?rev=1664455&r1=1664454&r2=1664455&view=diff
==============================================================================
--- hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/plan/TableScanDesc.java (original)
+++ hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/plan/TableScanDesc.java Thu Mar  5 18:51:32 2015
@@ -26,6 +26,7 @@ import java.util.Map;
 import org.apache.hadoop.hive.ql.exec.PTFUtils;
 import org.apache.hadoop.hive.ql.metadata.Table;
 import org.apache.hadoop.hive.ql.metadata.VirtualColumn;
+import org.apache.hadoop.hive.ql.parse.TableSample;
 
 /**
  * Table Scan Descriptor Currently, data is only read from a base source as part
@@ -91,9 +92,11 @@ public class TableScanDesc extends Abstr
 
   // input file name (big) to bucket number
   private Map<String, Integer> bucketFileNameMapping;
-  
+
   private boolean isMetadataOnly = false;
 
+  private transient TableSample tableSample;
+
   private transient final Table tableMetadata;
 
 
@@ -268,4 +271,12 @@ public class TableScanDesc extends Abstr
   public Table getTableMetadata() {
     return tableMetadata;
   }
+
+  public TableSample getTableSample() {
+    return tableSample;
+  }
+
+  public void setTableSample(TableSample tableSample) {
+    this.tableSample = tableSample;
+  }
 }

Modified: hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/plan/ptf/BoundaryDef.java
URL: http://svn.apache.org/viewvc/hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/plan/ptf/BoundaryDef.java?rev=1664455&r1=1664454&r2=1664455&view=diff
==============================================================================
--- hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/plan/ptf/BoundaryDef.java (original)
+++ hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/plan/ptf/BoundaryDef.java Thu Mar  5 18:51:32 2015
@@ -32,4 +32,10 @@ public abstract class BoundaryDef {
   }
 
   public abstract int getAmt();
+
+  @Override
+  public String toString() {
+    return direction == null ? "" :
+        direction + "(" + (getAmt() == Integer.MAX_VALUE ? "MAX" : getAmt()) + ")";
+  }
 }
\ No newline at end of file

Modified: hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/plan/ptf/PTFExpressionDef.java
URL: http://svn.apache.org/viewvc/hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/plan/ptf/PTFExpressionDef.java?rev=1664455&r1=1664454&r2=1664455&view=diff
==============================================================================
--- hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/plan/ptf/PTFExpressionDef.java (original)
+++ hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/plan/ptf/PTFExpressionDef.java Thu Mar  5 18:51:32 2015
@@ -20,6 +20,7 @@ package org.apache.hadoop.hive.ql.plan.p
 
 import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator;
 import org.apache.hadoop.hive.ql.exec.PTFUtils;
+import org.apache.hadoop.hive.ql.plan.Explain;
 import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 
@@ -58,6 +59,11 @@ public class PTFExpressionDef {
     this.exprNode = exprNode;
   }
 
+  @Explain(displayName = "expr")
+  public String getExprNodeExplain() {
+    return exprNode == null ? null : exprNode.getExprString();
+  }
+
   public ExprNodeEvaluator getExprEvaluator() {
     return exprEvaluator;
   }

Modified: hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/plan/ptf/PTFInputDef.java
URL: http://svn.apache.org/viewvc/hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/plan/ptf/PTFInputDef.java?rev=1664455&r1=1664454&r2=1664455&view=diff
==============================================================================
--- hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/plan/ptf/PTFInputDef.java (original)
+++ hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/plan/ptf/PTFInputDef.java Thu Mar  5 18:51:32 2015
@@ -19,6 +19,10 @@
 package org.apache.hadoop.hive.ql.plan.ptf;
 
 
+import org.apache.commons.lang.StringUtils;
+import org.apache.hadoop.hive.ql.exec.RowSchema;
+import org.apache.hadoop.hive.ql.plan.Explain;
+
 public abstract class PTFInputDef {
   private String expressionTreeString;
   private ShapeDetails outputShape;
@@ -36,9 +40,17 @@ public abstract class PTFInputDef {
     return outputShape;
   }
 
+  @Explain(displayName = "output shape")
+  public String getOutputShapeExplain() {
+    RowSchema schema = outputShape.getRr().getRowSchema();
+    return StringUtils.join(schema.getSignature(), ", ");
+  }
+
   public void setOutputShape(ShapeDetails outputShape) {
     this.outputShape = outputShape;
   }
+
+  @Explain(displayName = "input alias")
   public String getAlias() {
     return alias;
   }

Modified: hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/plan/ptf/PTFQueryInputDef.java
URL: http://svn.apache.org/viewvc/hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/plan/ptf/PTFQueryInputDef.java?rev=1664455&r1=1664454&r2=1664455&view=diff
==============================================================================
--- hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/plan/ptf/PTFQueryInputDef.java (original)
+++ hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/plan/ptf/PTFQueryInputDef.java Thu Mar  5 18:51:32 2015
@@ -19,11 +19,14 @@
 package org.apache.hadoop.hive.ql.plan.ptf;
 
 import org.apache.hadoop.hive.ql.parse.PTFInvocationSpec.PTFQueryInputType;
+import org.apache.hadoop.hive.ql.plan.Explain;
 
+@Explain(displayName = "Input definition")
 public class PTFQueryInputDef extends PTFInputDef {
   private String destination;
   private PTFQueryInputType type;
 
+  @Explain(displayName = "destination")
   public String getDestination() {
     return destination;
   }
@@ -40,6 +43,11 @@ public class PTFQueryInputDef extends PT
     this.type = type;
   }
 
+  @Explain(displayName = "type")
+  public String getTypeExplain() {
+    return type.name();
+  }
+
   @Override
   public PTFInputDef getInput() {
     return null;

Modified: hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/plan/ptf/PartitionedTableFunctionDef.java
URL: http://svn.apache.org/viewvc/hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/plan/ptf/PartitionedTableFunctionDef.java?rev=1664455&r1=1664454&r2=1664455&view=diff
==============================================================================
--- hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/plan/ptf/PartitionedTableFunctionDef.java (original)
+++ hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/plan/ptf/PartitionedTableFunctionDef.java Thu Mar  5 18:51:32 2015
@@ -21,8 +21,11 @@ package org.apache.hadoop.hive.ql.plan.p
 import java.util.ArrayList;
 import java.util.List;
 
+import org.apache.hadoop.hive.ql.parse.PTFInvocationSpec;
+import org.apache.hadoop.hive.ql.plan.Explain;
 import org.apache.hadoop.hive.ql.udf.ptf.TableFunctionEvaluator;
 
+@Explain(displayName = "Partition table definition")
 public class PartitionedTableFunctionDef extends PTFInputDef {
   private String name;
   private String resolverClassName;
@@ -34,7 +37,10 @@ public class PartitionedTableFunctionDef
   private OrderDef order;
   private TableFunctionEvaluator tFunction;
   boolean transformsRawInput;
+  
+  private transient List<String> referencedColumns;
 
+  @Explain(displayName = "name")
   public String getName() {
     return name;
   }
@@ -47,6 +53,11 @@ public class PartitionedTableFunctionDef
     return rawInputShape;
   }
 
+  @Explain(displayName = "raw input shape")
+  public ShapeDetails getRawInputShapeExplain() {
+    return rawInputShape;
+  }
+
   public void setRawInputShape(ShapeDetails rawInputShape) {
     this.rawInputShape = rawInputShape;
   }
@@ -72,6 +83,21 @@ public class PartitionedTableFunctionDef
     return partition;
   }
 
+  @Explain(displayName = "partition by")
+  public String getPartitionExplain() {
+    if (partition == null || partition.getExpressions() == null) {
+      return null;
+    }
+    StringBuilder builder = new StringBuilder();
+    for (PTFExpressionDef expression : partition.getExpressions()) {
+      if (builder.length() > 0) {
+        builder.append(", ");
+      }
+      builder.append(expression.getExprNode().getExprString());
+    }
+    return builder.toString();
+  }
+
   public void setPartition(PartitionDef partition) {
     this.partition = partition;
   }
@@ -84,9 +110,28 @@ public class PartitionedTableFunctionDef
     this.order = order;
   }
 
+  @Explain(displayName = "order by")
+  public String getOrderExplain() {
+    if (order == null || order.getExpressions() == null) {
+      return null;
+    }
+    StringBuilder builder = new StringBuilder();
+    for (OrderExpressionDef expression : order.getExpressions()) {
+      if (builder.length() > 0) {
+        builder.append(", ");
+      }
+      builder.append(expression.getExprNode().getExprString());
+      if (expression.getOrder() == PTFInvocationSpec.Order.DESC) {
+        builder.append("(DESC)");
+      }
+    }
+    return builder.toString();
+  }
+
   public TableFunctionEvaluator getTFunction() {
     return tFunction;
   }
+
   public void setTFunction(TableFunctionEvaluator tFunction) {
     this.tFunction = tFunction;
   }
@@ -99,6 +144,21 @@ public class PartitionedTableFunctionDef
     this.args = args;
   }
 
+  @Explain(displayName = "arguments")
+  public String getArgsExplain() {
+    if (args == null) {
+      return null;
+    }
+    StringBuilder builder = new StringBuilder();
+    for (PTFExpressionDef expression : args) {
+      if (builder.length() > 0) {
+        builder.append(", ");
+      }
+      builder.append(expression.getExprNode().getExprString());
+    }
+    return builder.toString();
+  }
+
   public void addArg(PTFExpressionDef arg) {
     args = args == null ? new ArrayList<PTFExpressionDef>() : args;
     args.add(arg);
@@ -111,6 +171,7 @@ public class PartitionedTableFunctionDef
     return this;
   }
 
+  @Explain(displayName = "transforms raw input", displayOnlyOnTrue=true)
   public boolean isTransformsRawInput() {
     return transformsRawInput;
   }
@@ -126,4 +187,13 @@ public class PartitionedTableFunctionDef
   public void setResolverClassName(String resolverClassName) {
     this.resolverClassName = resolverClassName;
   }
+
+  @Explain(displayName = "referenced columns")
+  public List<String> getReferencedColumns() {
+    return referencedColumns;
+  }
+
+  public void setReferencedColumns(List<String> referencedColumns) {
+    this.referencedColumns = referencedColumns;
+  }
 }
\ No newline at end of file

Modified: hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/plan/ptf/WindowExpressionDef.java
URL: http://svn.apache.org/viewvc/hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/plan/ptf/WindowExpressionDef.java?rev=1664455&r1=1664454&r2=1664455&view=diff
==============================================================================
--- hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/plan/ptf/WindowExpressionDef.java (original)
+++ hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/plan/ptf/WindowExpressionDef.java Thu Mar  5 18:51:32 2015
@@ -19,15 +19,12 @@
 package org.apache.hadoop.hive.ql.plan.ptf;
 
 
-public class WindowExpressionDef extends PTFExpressionDef {
-  private String alias;
-
-  public WindowExpressionDef() {}
+import org.apache.hadoop.hive.ql.plan.Explain;
 
-  public WindowExpressionDef(PTFExpressionDef eDef) {
-    super(eDef);
-  }
+public abstract class WindowExpressionDef extends PTFExpressionDef {
+  private String alias;
 
+  @Explain(displayName = "alias")
   public String getAlias() {
     return alias;
   }

Modified: hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/plan/ptf/WindowFrameDef.java
URL: http://svn.apache.org/viewvc/hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/plan/ptf/WindowFrameDef.java?rev=1664455&r1=1664454&r2=1664455&view=diff
==============================================================================
--- hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/plan/ptf/WindowFrameDef.java (original)
+++ hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/plan/ptf/WindowFrameDef.java Thu Mar  5 18:51:32 2015
@@ -38,4 +38,9 @@ public class WindowFrameDef {
   public void setEnd(BoundaryDef end) {
     this.end = end;
   }
+
+  @Override
+  public String toString() {
+    return start + "~" + end;
+  }
 }
\ No newline at end of file

Modified: hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/plan/ptf/WindowFunctionDef.java
URL: http://svn.apache.org/viewvc/hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/plan/ptf/WindowFunctionDef.java?rev=1664455&r1=1664454&r2=1664455&view=diff
==============================================================================
--- hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/plan/ptf/WindowFunctionDef.java (original)
+++ hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/plan/ptf/WindowFunctionDef.java Thu Mar  5 18:51:32 2015
@@ -21,8 +21,10 @@ package org.apache.hadoop.hive.ql.plan.p
 import java.util.ArrayList;
 import java.util.List;
 
+import org.apache.hadoop.hive.ql.plan.Explain;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator;
 
+@Explain(displayName = "window function definition")
 public class WindowFunctionDef extends WindowExpressionDef {
   String name;
   boolean isStar;
@@ -32,6 +34,7 @@ public class WindowFunctionDef extends W
   GenericUDAFEvaluator wFnEval;
   boolean pivotResult;
 
+  @Explain(displayName = "name")
   public String getName() {
     return name;
   }
@@ -40,6 +43,7 @@ public class WindowFunctionDef extends W
     this.name = name;
   }
 
+  @Explain(displayName = "isStar", displayOnlyOnTrue = true)
   public boolean isStar() {
     return isStar;
   }
@@ -48,6 +52,7 @@ public class WindowFunctionDef extends W
     this.isStar = isStar;
   }
 
+  @Explain(displayName = "isDistinct", displayOnlyOnTrue = true)
   public boolean isDistinct() {
     return isDistinct;
   }
@@ -69,6 +74,21 @@ public class WindowFunctionDef extends W
     args.add(arg);
   }
 
+  @Explain(displayName = "arguments")
+  public String getArgsExplain() {
+    if (args == null) {
+      return null;
+    }
+    StringBuilder builder = new StringBuilder();
+    for (PTFExpressionDef expression : args) {
+      if (builder.length() > 0) {
+        builder.append(", ");
+      }
+      builder.append(expression.getExprNode().getExprString());
+    }
+    return builder.toString();
+  }
+
   public WindowFrameDef getWindowFrame() {
     return windowFrame;
   }
@@ -77,6 +97,11 @@ public class WindowFunctionDef extends W
     this.windowFrame = windowFrame;
   }
 
+  @Explain(displayName = "window frame")
+  public String getWindowFrameExplain() {
+    return windowFrame == null ? null : windowFrame.toString();
+  }
+
   public GenericUDAFEvaluator getWFnEval() {
     return wFnEval;
   }
@@ -85,6 +110,12 @@ public class WindowFunctionDef extends W
     this.wFnEval = wFnEval;
   }
 
+  @Explain(displayName = "window function")
+  public String getWFnEvalExplain() {
+    return wFnEval == null ? null : wFnEval.getClass().getSimpleName();
+  }
+
+  @Explain(displayName = "isPivotResult", displayOnlyOnTrue = true)
   public boolean isPivotResult() {
     return pivotResult;
   }

Modified: hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/plan/ptf/WindowTableFunctionDef.java
URL: http://svn.apache.org/viewvc/hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/plan/ptf/WindowTableFunctionDef.java?rev=1664455&r1=1664454&r2=1664455&view=diff
==============================================================================
--- hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/plan/ptf/WindowTableFunctionDef.java (original)
+++ hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/plan/ptf/WindowTableFunctionDef.java Thu Mar  5 18:51:32 2015
@@ -18,21 +18,26 @@
 
 package org.apache.hadoop.hive.ql.plan.ptf;
 
-import java.util.List;
+import org.apache.hadoop.hive.ql.plan.Explain;
 
+import java.util.List;
 
+@Explain(displayName = "Windowing table definition")
 public class WindowTableFunctionDef extends PartitionedTableFunctionDef {
   List<WindowFunctionDef> windowFunctions;
   
   int rankLimit = -1;
   int rankLimitFunction;
 
+  @Explain(displayName = "window functions")
   public List<WindowFunctionDef> getWindowFunctions() {
     return windowFunctions;
   }
+  
   public void setWindowFunctions(List<WindowFunctionDef> windowFunctions) {
     this.windowFunctions = windowFunctions;
   }
+
   public int getRankLimit() {
     return rankLimit;
   }

Modified: hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAccessControlException.java
URL: http://svn.apache.org/viewvc/hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAccessControlException.java?rev=1664455&r1=1664454&r2=1664455&view=diff
==============================================================================
--- hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAccessControlException.java (original)
+++ hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAccessControlException.java Thu Mar  5 18:51:32 2015
@@ -24,8 +24,7 @@ import org.apache.hadoop.hive.ql.metadat
 
 /**
  * Exception thrown by the Authorization plugin api (v2). Indicates
- * an error while performing authorization, and not a authorization being
- * denied.
+ * a authorization check denying permissions for an action.
  */
 @LimitedPrivate(value = { "Apache Argus (incubating)" })
 @Evolving

Modified: hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizationValidator.java
URL: http://svn.apache.org/viewvc/hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizationValidator.java?rev=1664455&r1=1664454&r2=1664455&view=diff
==============================================================================
--- hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizationValidator.java (original)
+++ hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizationValidator.java Thu Mar  5 18:51:32 2015
@@ -29,17 +29,15 @@ import org.apache.hadoop.classification.
 public interface HiveAuthorizationValidator {
 
   /**
-   * Check if current user has privileges to perform given operation type
-   * hiveOpType on the given input and output objects
-   *
-   * @param hiveOpType
-   * @param inputHObjs
-   * @param outputHObjs
-   * @param context
-   * @throws HiveAuthzPluginException
-   * @throws HiveAccessControlException
+   * see HiveAuthorizer.checkPrivileges
    */
   void checkPrivileges(HiveOperationType hiveOpType, List<HivePrivilegeObject> inputHObjs,
       List<HivePrivilegeObject> outputHObjs, HiveAuthzContext context) throws HiveAuthzPluginException, HiveAccessControlException;
 
+  /**
+   * see HiveAuthorizer.filterListCmdObjects
+   */
+  List<HivePrivilegeObject> filterListCmdObjects(List<HivePrivilegeObject> listObjs,
+      HiveAuthzContext context);
+
 }

Modified: hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizer.java
URL: http://svn.apache.org/viewvc/hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizer.java?rev=1664455&r1=1664454&r2=1664455&view=diff
==============================================================================
--- hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizer.java (original)
+++ hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizer.java Thu Mar  5 18:51:32 2015
@@ -154,6 +154,21 @@ public interface HiveAuthorizer {
       List<HivePrivilegeObject> outputHObjs, HiveAuthzContext context)
       throws HiveAuthzPluginException, HiveAccessControlException;
 
+
+  /**
+   * Filter out any objects that should not be shown to the user, from the list of
+   * tables or databases coming from a 'show tables' or 'show databases' command
+   * @param listObjs List of all objects obtained as result of a show command
+   * @param context
+   * @return filtered list of objects that will be returned to the user invoking the command
+   * @throws HiveAuthzPluginException
+   * @throws HiveAccessControlException
+   */
+  List<HivePrivilegeObject> filterListCmdObjects(List<HivePrivilegeObject> listObjs,
+      HiveAuthzContext context)
+          throws HiveAuthzPluginException, HiveAccessControlException;
+
+
   /**
    * @return all existing roles
    * @throws HiveAuthzPluginException

Modified: hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizerImpl.java
URL: http://svn.apache.org/viewvc/hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizerImpl.java?rev=1664455&r1=1664454&r2=1664455&view=diff
==============================================================================
--- hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizerImpl.java (original)
+++ hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizerImpl.java Thu Mar  5 18:51:32 2015
@@ -85,6 +85,13 @@ public class HiveAuthorizerImpl implemen
     authValidator.checkPrivileges(hiveOpType, inputHObjs, outputHObjs, context);
   }
 
+
+  @Override
+  public List<HivePrivilegeObject> filterListCmdObjects(List<HivePrivilegeObject> listObjs,
+      HiveAuthzContext context) throws HiveAuthzPluginException, HiveAccessControlException {
+    return authValidator.filterListCmdObjects(listObjs, context);
+  }
+
   @Override
   public List<String> getAllRoles() throws HiveAuthzPluginException, HiveAccessControlException {
     return accessController.getAllRoles();

Modified: hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveV1Authorizer.java
URL: http://svn.apache.org/viewvc/hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveV1Authorizer.java?rev=1664455&r1=1664454&r2=1664455&view=diff
==============================================================================
--- hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveV1Authorizer.java (original)
+++ hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveV1Authorizer.java Thu Mar  5 18:51:32 2015
@@ -21,7 +21,6 @@ package org.apache.hadoop.hive.ql.securi
 import java.util.ArrayList;
 import java.util.List;
 import java.util.Map;
-import java.util.Set;
 
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.Warehouse;
@@ -372,4 +371,11 @@ public class HiveV1Authorizer implements
   @Override
   public void applyAuthorizationConfigPolicy(HiveConf hiveConf) {
   }
+
+  @Override
+  public List<HivePrivilegeObject> filterListCmdObjects(List<HivePrivilegeObject> listObjs,
+      HiveAuthzContext context) throws HiveAuthzPluginException, HiveAccessControlException {
+    // do no filtering in old authorizer
+    return listObjs;
+  }
 }

Modified: hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/DummyHiveAuthorizationValidator.java
URL: http://svn.apache.org/viewvc/hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/DummyHiveAuthorizationValidator.java?rev=1664455&r1=1664454&r2=1664455&view=diff
==============================================================================
--- hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/DummyHiveAuthorizationValidator.java (original)
+++ hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/DummyHiveAuthorizationValidator.java Thu Mar  5 18:51:32 2015
@@ -42,4 +42,10 @@ public class DummyHiveAuthorizationValid
     // no-op
   }
 
+  @Override
+  public List<HivePrivilegeObject> filterListCmdObjects(List<HivePrivilegeObject> listObjs,
+      HiveAuthzContext context) {
+    return listObjs;
+  }
+
 }

Modified: hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizationValidator.java
URL: http://svn.apache.org/viewvc/hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizationValidator.java?rev=1664455&r1=1664454&r2=1664455&view=diff
==============================================================================
--- hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizationValidator.java (original)
+++ hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizationValidator.java Thu Mar  5 18:51:32 2015
@@ -149,4 +149,10 @@ public class SQLStdHiveAuthorizationVali
     }
   }
 
+  @Override
+  public List<HivePrivilegeObject> filterListCmdObjects(List<HivePrivilegeObject> listObjs,
+      HiveAuthzContext context) {
+    return listObjs;
+  }
+
 }

Modified: hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java
URL: http://svn.apache.org/viewvc/hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java?rev=1664455&r1=1664454&r2=1664455&view=diff
==============================================================================
--- hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java (original)
+++ hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java Thu Mar  5 18:51:32 2015
@@ -26,7 +26,16 @@ import java.io.PrintStream;
 import java.net.URI;
 import java.net.URLClassLoader;
 import java.sql.Timestamp;
-import java.util.*;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.UUID;
 
 import org.apache.commons.io.FileUtils;
 import org.apache.commons.lang.StringUtils;
@@ -42,6 +51,7 @@ import org.apache.hadoop.hive.conf.HiveC
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
 import org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj;
 import org.apache.hadoop.hive.ql.MapRedStats;
+import org.apache.hadoop.hive.ql.exec.Registry;
 import org.apache.hadoop.hive.ql.exec.Utilities;
 import org.apache.hadoop.hive.ql.exec.spark.session.SparkSession;
 import org.apache.hadoop.hive.ql.exec.spark.session.SparkSessionManagerImpl;
@@ -252,6 +262,8 @@ public class SessionState {
    */
   private final Set<String> preReloadableAuxJars = new HashSet<String>();
 
+  private final Registry registry = new Registry();
+
   /**
    * CURRENT_TIMESTAMP value for query
    */
@@ -407,11 +419,33 @@ public class SessionState {
     return hdfsEncryptionShim;
   }
 
+  // SessionState is not available in runtime and Hive.get().getConf() is not safe to call
+  private static class SessionStates {
+    private SessionState state;
+    private HiveConf conf;
+    private void attach(SessionState state) {
+      this.state = state;
+      attach(state.getConf());
+    }
+    private void attach(HiveConf conf) {
+      this.conf = conf;
+      ClassLoader classLoader = conf.getClassLoader();
+      if (classLoader != null) {
+        Thread.currentThread().setContextClassLoader(classLoader);
+      }
+    }
+  }
+
   /**
    * Singleton Session object per thread.
    *
    **/
-  private static ThreadLocal<SessionState> tss = new ThreadLocal<SessionState>();
+  private static ThreadLocal<SessionStates> tss = new ThreadLocal<SessionStates>() {
+    @Override
+    protected SessionStates initialValue() {
+      return new SessionStates();
+    }
+  };
 
   /**
    * start a new session and set it to current session.
@@ -425,8 +459,7 @@ public class SessionState {
    * Sets the given session state in the thread local var for sessions.
    */
   public static void setCurrentSessionState(SessionState startSs) {
-    tss.set(startSs);
-    Thread.currentThread().setContextClassLoader(startSs.getConf().getClassLoader());
+    tss.get().attach(startSs);
   }
 
   public static void detachSession() {
@@ -672,7 +705,7 @@ public class SessionState {
           clsStr, authenticator, true);
 
       if (authorizer == null) {
-        // if it was null, the new authorization plugin must be specified in
+        // if it was null, the new (V2) authorization plugin must be specified in
         // config
         HiveAuthorizerFactory authorizerFactory = HiveUtils.getAuthorizerFactory(conf,
             HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER);
@@ -684,13 +717,14 @@ public class SessionState {
 
         authorizerV2 = authorizerFactory.createHiveAuthorizer(new HiveMetastoreClientFactoryImpl(),
             conf, authenticator, authzContextBuilder.build());
+        setAuthorizerV2Config();
 
-        authorizerV2.applyAuthorizationConfigPolicy(conf);
       }
       // create the create table grants with new config
       createTableGrants = CreateTableAutomaticGrant.create(conf);
 
     } catch (HiveException e) {
+      LOG.error("Error setting up authorization: " + e.getMessage(), e);
       throw new RuntimeException(e);
     }
 
@@ -701,6 +735,28 @@ public class SessionState {
     return;
   }
 
+  private void setAuthorizerV2Config() throws HiveException {
+    // avoid processing the same config multiple times, check marker
+    if (conf.get(CONFIG_AUTHZ_SETTINGS_APPLIED_MARKER, "").equals(Boolean.TRUE.toString())) {
+      return;
+    }
+    conf.setVar(ConfVars.METASTORE_FILTER_HOOK,
+        "org.apache.hadoop.hive.ql.security.authorization.plugin.AuthorizationMetaStoreFilterHook");
+
+    authorizerV2.applyAuthorizationConfigPolicy(conf);
+    // update config in Hive thread local as well and init the metastore client
+    try {
+      Hive.get(conf).getMSC();
+    } catch (Exception e) {
+      // catch-all due to some exec time dependencies on session state
+      // that would cause ClassNoFoundException otherwise
+      throw new HiveException(e.getMessage(), e);
+    }
+
+    // set a marker that this conf has been processed.
+    conf.set(CONFIG_AUTHZ_SETTINGS_APPLIED_MARKER, Boolean.TRUE.toString());
+  }
+
   public Object getActiveAuthorizer() {
     return getAuthorizationMode() == AuthorizationMode.V1 ?
         getAuthorizer() : getAuthorizerV2();
@@ -752,11 +808,32 @@ public class SessionState {
    * get the current session.
    */
   public static SessionState get() {
-    return tss.get();
+    return tss.get().state;
+  }
+
+  public static HiveConf getSessionConf() {
+    SessionStates state = tss.get();
+    if (state.conf == null) {
+      state.attach(new HiveConf());
+    }
+    return state.conf;
+  }
+
+  public static Registry getRegistry() {
+    SessionState session = get();
+    return session != null ? session.registry : null;
+  }
+
+  public static Registry getRegistryForWrite() {
+    Registry registry = getRegistry();
+    if (registry == null) {
+      throw new RuntimeException("Function registery for session is not initialized");
+    }
+    return registry;
   }
 
   /**
-   * get hiveHitsory object which does structured logging.
+   * get hiveHistory object which does structured logging.
    *
    * @return The hive history object
    */
@@ -1051,13 +1128,13 @@ public class SessionState {
     return added.get(0);
   }
 
-  public List<String> add_resources(ResourceType t, List<String> values)
+  public List<String> add_resources(ResourceType t, Collection<String> values)
       throws RuntimeException {
     // By default don't convert to unix
     return add_resources(t, values, false);
   }
 
-  public List<String> add_resources(ResourceType t, List<String> values, boolean convertToUnix)
+  public List<String> add_resources(ResourceType t, Collection<String> values, boolean convertToUnix)
       throws RuntimeException {
     Set<String> resourceMap = getResourceMap(t);
 
@@ -1271,6 +1348,7 @@ public class SessionState {
   }
 
   public void close() throws IOException {
+    registry.clear();;
     if (txnMgr != null) txnMgr.closeTxnManager();
     JavaUtils.closeClassLoadersTo(conf.getClassLoader(), parentLoader);
     File resourceDir =
@@ -1361,20 +1439,7 @@ public class SessionState {
    * any security configuration changes.
    */
   public void applyAuthorizationPolicy() throws HiveException {
-    if(!isAuthorizationModeV2()){
-      // auth v1 interface does not have this functionality
-      return;
-    }
-
-    // avoid processing the same config multiple times, check marker
-    if (conf.get(CONFIG_AUTHZ_SETTINGS_APPLIED_MARKER, "").equals(Boolean.TRUE.toString())) {
-      return;
-    }
-
-    authorizerV2.applyAuthorizationConfigPolicy(conf);
-    // set a marker that this conf has been processed.
-    conf.set(CONFIG_AUTHZ_SETTINGS_APPLIED_MARKER, Boolean.TRUE.toString());
-
+    setupAuth();
   }
 
   public Map<String, Map<String, Table>> getTempTables() {

Modified: hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/stats/jdbc/JDBCStatsAggregator.java
URL: http://svn.apache.org/viewvc/hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/stats/jdbc/JDBCStatsAggregator.java?rev=1664455&r1=1664454&r2=1664455&view=diff
==============================================================================
--- hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/stats/jdbc/JDBCStatsAggregator.java (original)
+++ hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/stats/jdbc/JDBCStatsAggregator.java Thu Mar  5 18:51:32 2015
@@ -32,6 +32,7 @@ import java.util.concurrent.TimeUnit;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.common.JavaUtils;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.exec.Task;
 import org.apache.hadoop.hive.ql.exec.Utilities;
@@ -69,7 +70,7 @@ public class JDBCStatsAggregator impleme
     this.sourceTask = sourceTask;
 
     try {
-      Class.forName(driver).newInstance();
+      JavaUtils.loadClass(driver).newInstance();
     } catch (Exception e) {
       LOG.error("Error during instantiating JDBC driver " + driver + ". ", e);
       return false;

Modified: hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/stats/jdbc/JDBCStatsPublisher.java
URL: http://svn.apache.org/viewvc/hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/stats/jdbc/JDBCStatsPublisher.java?rev=1664455&r1=1664454&r2=1664455&view=diff
==============================================================================
--- hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/stats/jdbc/JDBCStatsPublisher.java (original)
+++ hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/stats/jdbc/JDBCStatsPublisher.java Thu Mar  5 18:51:32 2015
@@ -35,6 +35,7 @@ import java.util.concurrent.TimeUnit;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.common.JavaUtils;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.exec.Utilities;
 import org.apache.hadoop.hive.ql.stats.StatsPublisher;
@@ -69,7 +70,7 @@ public class JDBCStatsPublisher implemen
     String driver = HiveConf.getVar(hiveconf, HiveConf.ConfVars.HIVESTATSJDBCDRIVER);
 
     try {
-      Class.forName(driver).newInstance();
+      JavaUtils.loadClass(driver).newInstance();
     } catch (Exception e) {
       LOG.error("Error during instantiating JDBC driver " + driver + ". ", e);
       return false;
@@ -272,7 +273,7 @@ public class JDBCStatsPublisher implemen
       this.hiveconf = hconf;
       connectionString = HiveConf.getVar(hconf, HiveConf.ConfVars.HIVESTATSDBCONNECTIONSTRING);
       String driver = HiveConf.getVar(hconf, HiveConf.ConfVars.HIVESTATSJDBCDRIVER);
-      Class.forName(driver).newInstance();
+      JavaUtils.loadClass(driver).newInstance();
       synchronized(DriverManager.class) {
         DriverManager.setLoginTimeout(timeout);
         conn = DriverManager.getConnection(connectionString);

Modified: hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/CompactorMR.java
URL: http://svn.apache.org/viewvc/hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/CompactorMR.java?rev=1664455&r1=1664454&r2=1664455&view=diff
==============================================================================
--- hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/CompactorMR.java (original)
+++ hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/CompactorMR.java Thu Mar  5 18:51:32 2015
@@ -24,6 +24,7 @@ import org.apache.hadoop.fs.BlockLocatio
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.common.JavaUtils;
 import org.apache.hadoop.hive.common.ValidTxnList;
 import org.apache.hadoop.hive.common.ValidReadTxnList;
 import org.apache.hadoop.hive.conf.HiveConf;
@@ -647,7 +648,7 @@ public class CompactorMR {
   private static <T> T instantiate(Class<T> classType, String classname) throws IOException {
     T t = null;
     try {
-      Class c = Class.forName(classname);
+      Class c = JavaUtils.loadClass(classname);
       Object o = c.newInstance();
       if (classType.isAssignableFrom(o.getClass())) {
         t = (T)o;

Modified: hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/AbstractGenericUDFEWAHBitmapBop.java
URL: http://svn.apache.org/viewvc/hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/AbstractGenericUDFEWAHBitmapBop.java?rev=1664455&r1=1664454&r2=1664455&view=diff
==============================================================================
--- hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/AbstractGenericUDFEWAHBitmapBop.java (original)
+++ hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/AbstractGenericUDFEWAHBitmapBop.java Thu Mar  5 18:51:32 2015
@@ -141,16 +141,6 @@ abstract public class AbstractGenericUDF
   
   @Override
   public String getDisplayString(String[] children) {
-    StringBuilder sb = new StringBuilder();
-    sb.append(name);
-    sb.append("(");
-    for (int i = 0; i < children.length; i++) {
-      sb.append(children[i]);
-      if (i + 1 != children.length) {
-        sb.append(",");
-      }
-    }
-    sb.append(")");
-    return sb.toString();
+    return getStandardDisplayString(name, children, ",");
   }
 }

Modified: hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/AbstractGenericUDFReflect.java
URL: http://svn.apache.org/viewvc/hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/AbstractGenericUDFReflect.java?rev=1664455&r1=1664454&r2=1664455&view=diff
==============================================================================
--- hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/AbstractGenericUDFReflect.java (original)
+++ hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/AbstractGenericUDFReflect.java Thu Mar  5 18:51:32 2015
@@ -101,16 +101,7 @@ public abstract class AbstractGenericUDF
 
   @Override
   public String getDisplayString(String[] children) {
-    StringBuilder sb = new StringBuilder();
-    sb.append(functionName()).append('(');
-    for (int i = 0; i < children.length; i++) {
-      if (i > 0) {
-        sb.append(',');
-      }
-      sb.append(children[i]);
-    }
-    sb.append(')');
-    return sb.toString();
+    return getStandardDisplayString(functionName(), children, ",");
   }
 
   protected abstract String functionName();

Modified: hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFnGrams.java
URL: http://svn.apache.org/viewvc/hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFnGrams.java?rev=1664455&r1=1664454&r2=1664455&view=diff
==============================================================================
--- hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFnGrams.java (original)
+++ hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFnGrams.java Thu Mar  5 18:51:32 2015
@@ -231,6 +231,13 @@ public class GenericUDAFnGrams implement
       NGramAggBuf myagg = (NGramAggBuf) agg;
       List<Text> partialNGrams = (List<Text>) loi.getList(partial);
       int n = Integer.parseInt(partialNGrams.get(partialNGrams.size()-1).toString());
+
+      // A value of 0 for n indicates that the mapper processed data that does not meet
+      // filter criteria, so merge() should be NO-OP.
+      if (n == 0) {
+        return;
+      }
+
       if(myagg.n > 0 && myagg.n != n) {
         throw new HiveException(getClass().getSimpleName() + ": mismatch in value for 'n'"
             + ", which usually is caused by a non-constant expression. Found '"+n+"' and '"

Modified: hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDF.java
URL: http://svn.apache.org/viewvc/hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDF.java?rev=1664455&r1=1664454&r2=1664455&view=diff
==============================================================================
--- hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDF.java (original)
+++ hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDF.java Thu Mar  5 18:51:32 2015
@@ -20,15 +20,34 @@ package org.apache.hadoop.hive.ql.udf.ge
 
 import java.io.Closeable;
 import java.io.IOException;
+import java.sql.Timestamp;
+import java.text.ParseException;
+import java.util.Date;
 
-import org.apache.hadoop.hive.ql.exec.MapredContext;
 import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
+import org.apache.hadoop.hive.ql.exec.MapredContext;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.udf.UDFType;
+import org.apache.hadoop.hive.serde2.io.ByteWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.ShortWritable;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters.Converter;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils.PrimitiveGrouping;
+import org.apache.hadoop.io.IntWritable;
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hive.common.util.DateUtils;
 
 /**
  * A Generic User-defined function (GenericUDF) for the use with Hive.
@@ -45,6 +64,9 @@ import org.apache.hadoop.hive.serde2.obj
 @UDFType(deterministic = true)
 public abstract class GenericUDF implements Closeable {
 
+  private static final String[] ORDINAL_SUFFIXES = new String[] { "th", "st", "nd", "rd", "th",
+      "th", "th", "th", "th", "th" };
+
   /**
    * A Defered Object allows us to do lazy-evaluation and short-circuiting.
    * GenericUDF use DeferedObject to pass arguments.
@@ -210,4 +232,314 @@ public abstract class GenericUDF impleme
           + " and " + newInstance.getClass().getName());
     }
   }
+
+  protected String getStandardDisplayString(String name, String[] children) {
+    return getStandardDisplayString(name, children, ", ");
+  }
+
+  protected String getStandardDisplayString(String name, String[] children, String delim) {
+    StringBuilder sb = new StringBuilder();
+    sb.append(name);
+    sb.append("(");
+    if (children.length > 0) {
+      sb.append(children[0]);
+      for (int i = 1; i < children.length; i++) {
+        sb.append(delim);
+        sb.append(children[i]);
+      }
+    }
+    sb.append(")");
+    return sb.toString();
+  }
+
+  protected String getFuncName() {
+    return getClass().getSimpleName().substring(10).toLowerCase();
+  }
+
+  protected void checkArgsSize(ObjectInspector[] arguments, int min, int max)
+      throws UDFArgumentLengthException {
+    if (arguments.length < min || arguments.length > max) {
+      StringBuilder sb = new StringBuilder();
+      sb.append(getFuncName());
+      sb.append(" requires ");
+      if (min == max) {
+        sb.append(min);
+      } else {
+        sb.append(min).append("..").append(max);
+      }
+      sb.append(" argument(s), got ");
+      sb.append(arguments.length);
+      throw new UDFArgumentLengthException(sb.toString());
+    }
+  }
+
+  protected void checkArgPrimitive(ObjectInspector[] arguments, int i)
+      throws UDFArgumentTypeException {
+    ObjectInspector.Category oiCat = arguments[i].getCategory();
+    if (oiCat != ObjectInspector.Category.PRIMITIVE) {
+      throw new UDFArgumentTypeException(i, getFuncName() + " only takes primitive types as "
+          + getArgOrder(i) + " argument, got " + oiCat);
+    }
+  }
+
+  protected void checkArgGroups(ObjectInspector[] arguments, int i, PrimitiveCategory[] inputTypes,
+      PrimitiveGrouping... grps) throws UDFArgumentTypeException {
+    PrimitiveCategory inputType = ((PrimitiveObjectInspector) arguments[i]).getPrimitiveCategory();
+    for (PrimitiveGrouping grp : grps) {
+      if (PrimitiveObjectInspectorUtils.getPrimitiveGrouping(inputType) == grp) {
+        inputTypes[i] = inputType;
+        return;
+      }
+    }
+    // build error message
+    StringBuilder sb = new StringBuilder();
+    sb.append(getFuncName());
+    sb.append(" only takes ");
+    sb.append(grps[0]);
+    for (int j = 1; j < grps.length; j++) {
+      sb.append(", ");
+      sb.append(grps[j]);
+    }
+    sb.append(" types as ");
+    sb.append(getArgOrder(i));
+    sb.append(" argument, got ");
+    sb.append(inputType);
+    throw new UDFArgumentTypeException(i, sb.toString());
+  }
+
+  protected void obtainStringConverter(ObjectInspector[] arguments, int i,
+      PrimitiveCategory[] inputTypes, Converter[] converters) throws UDFArgumentTypeException {
+    PrimitiveObjectInspector inOi = (PrimitiveObjectInspector) arguments[i];
+    PrimitiveCategory inputType = inOi.getPrimitiveCategory();
+
+    Converter converter = ObjectInspectorConverters.getConverter(
+        (PrimitiveObjectInspector) arguments[i],
+        PrimitiveObjectInspectorFactory.writableStringObjectInspector);
+    converters[i] = converter;
+    inputTypes[i] = inputType;
+  }
+
+  protected void obtainIntConverter(ObjectInspector[] arguments, int i,
+      PrimitiveCategory[] inputTypes, Converter[] converters) throws UDFArgumentTypeException {
+    PrimitiveObjectInspector inOi = (PrimitiveObjectInspector) arguments[i];
+    PrimitiveCategory inputType = inOi.getPrimitiveCategory();
+    switch (inputType) {
+    case BYTE:
+    case SHORT:
+    case INT:
+      break;
+    default:
+      throw new UDFArgumentTypeException(i, getFuncName() + " only takes INT/SHORT/BYTE types as "
+          + getArgOrder(i) + " argument, got " + inputType);
+    }
+
+    Converter converter = ObjectInspectorConverters.getConverter(
+        (PrimitiveObjectInspector) arguments[i],
+        PrimitiveObjectInspectorFactory.writableIntObjectInspector);
+    converters[i] = converter;
+    inputTypes[i] = inputType;
+  }
+
+  protected void obtainLongConverter(ObjectInspector[] arguments, int i,
+      PrimitiveCategory[] inputTypes, Converter[] converters) throws UDFArgumentTypeException {
+    PrimitiveObjectInspector inOi = (PrimitiveObjectInspector) arguments[i];
+    PrimitiveCategory inputType = inOi.getPrimitiveCategory();
+    switch (inputType) {
+    case BYTE:
+    case SHORT:
+    case INT:
+    case LONG:
+      break;
+    default:
+      throw new UDFArgumentTypeException(i, getFuncName()
+          + " only takes LONG/INT/SHORT/BYTE types as " + getArgOrder(i) + " argument, got "
+          + inputType);
+    }
+
+    Converter converter = ObjectInspectorConverters.getConverter(
+        (PrimitiveObjectInspector) arguments[i],
+        PrimitiveObjectInspectorFactory.writableIntObjectInspector);
+    converters[i] = converter;
+    inputTypes[i] = inputType;
+  }
+
+  protected void obtainDateConverter(ObjectInspector[] arguments, int i,
+      PrimitiveCategory[] inputTypes, Converter[] converters) throws UDFArgumentTypeException {
+    PrimitiveObjectInspector inOi = (PrimitiveObjectInspector) arguments[i];
+    PrimitiveCategory inputType = inOi.getPrimitiveCategory();
+    ObjectInspector outOi;
+    switch (inputType) {
+    case STRING:
+    case VARCHAR:
+    case CHAR:
+      outOi = PrimitiveObjectInspectorFactory.writableStringObjectInspector;
+      break;
+    case TIMESTAMP:
+    case DATE:
+      outOi = PrimitiveObjectInspectorFactory.writableDateObjectInspector;
+      break;
+    default:
+      throw new UDFArgumentTypeException(i, getFuncName()
+          + " only takes STRING_GROUP or DATE_GROUP types as " + getArgOrder(i) + " argument, got "
+          + inputType);
+    }
+    converters[i] = ObjectInspectorConverters.getConverter(inOi, outOi);
+    inputTypes[i] = inputType;
+  }
+
+  protected void obtainTimestampConverter(ObjectInspector[] arguments, int i,
+      PrimitiveCategory[] inputTypes, Converter[] converters) throws UDFArgumentTypeException {
+    PrimitiveObjectInspector inOi = (PrimitiveObjectInspector) arguments[i];
+    PrimitiveCategory inputType = inOi.getPrimitiveCategory();
+    ObjectInspector outOi;
+    switch (inputType) {
+    case STRING:
+    case VARCHAR:
+    case CHAR:
+    case TIMESTAMP:
+    case DATE:
+      break;
+    default:
+      throw new UDFArgumentTypeException(i, getFuncName()
+          + " only takes STRING_GROUP or DATE_GROUP types as " + getArgOrder(i) + " argument, got "
+          + inputType);
+    }
+    outOi = PrimitiveObjectInspectorFactory.writableTimestampObjectInspector;
+    converters[i] = ObjectInspectorConverters.getConverter(inOi, outOi);
+    inputTypes[i] = inputType;
+  }
+
+  protected String getStringValue(DeferredObject[] arguments, int i, Converter[] converters)
+      throws HiveException {
+    Object obj;
+    if ((obj = arguments[i].get()) == null) {
+      return null;
+    }
+    return converters[i].convert(obj).toString();
+  }
+
+  protected Integer getIntValue(DeferredObject[] arguments, int i, Converter[] converters)
+      throws HiveException {
+    Object obj;
+    if ((obj = arguments[i].get()) == null) {
+      return null;
+    }
+    Object writableValue = converters[i].convert(obj);
+    int v = ((IntWritable) writableValue).get();
+    return v;
+  }
+
+  protected Long getLongValue(DeferredObject[] arguments, int i, Converter[] converters)
+      throws HiveException {
+    Object obj;
+    if ((obj = arguments[i].get()) == null) {
+      return null;
+    }
+    Object writableValue = converters[i].convert(obj);
+    long v = ((LongWritable) writableValue).get();
+    return v;
+  }
+
+  protected Date getDateValue(DeferredObject[] arguments, int i, PrimitiveCategory[] inputTypes,
+      Converter[] converters) throws HiveException {
+    Object obj;
+    if ((obj = arguments[i].get()) == null) {
+      return null;
+    }
+
+    Date date;
+    switch (inputTypes[i]) {
+    case STRING:
+    case VARCHAR:
+    case CHAR:
+      String dateStr = converters[i].convert(obj).toString();
+      try {
+        date = DateUtils.getDateFormat().parse(dateStr);
+      } catch (ParseException e) {
+        return null;
+      }
+      break;
+    case TIMESTAMP:
+    case DATE:
+      Object writableValue = converters[i].convert(obj);
+      date = ((DateWritable) writableValue).get();
+      break;
+    default:
+      throw new UDFArgumentTypeException(0, getFuncName()
+          + " only takes STRING_GROUP and DATE_GROUP types, got " + inputTypes[i]);
+    }
+    return date;
+  }
+
+  protected Date getTimestampValue(DeferredObject[] arguments, int i, Converter[] converters)
+      throws HiveException {
+    Object obj;
+    if ((obj = arguments[i].get()) == null) {
+      return null;
+    }
+    Object writableValue = converters[i].convert(obj);
+    Timestamp ts = ((TimestampWritable) writableValue).getTimestamp();
+    return ts;
+  }
+
+  protected String getConstantStringValue(ObjectInspector[] arguments, int i) {
+    Object constValue = ((ConstantObjectInspector) arguments[i]).getWritableConstantValue();
+    String str = constValue == null ? null : constValue.toString();
+    return str;
+  }
+
+  protected Integer getConstantIntValue(ObjectInspector[] arguments, int i)
+      throws UDFArgumentTypeException {
+    Object constValue = ((ConstantObjectInspector) arguments[i]).getWritableConstantValue();
+    if (constValue == null) {
+      return null;
+    }
+    int v;
+    if (constValue instanceof IntWritable) {
+      v = ((IntWritable) constValue).get();
+    } else if (constValue instanceof ShortWritable) {
+      v = ((ShortWritable) constValue).get();
+    } else if (constValue instanceof ByteWritable) {
+      v = ((ByteWritable) constValue).get();
+    } else {
+      throw new UDFArgumentTypeException(i, getFuncName() + " only takes INT/SHORT/BYTE types as "
+          + getArgOrder(i) + " argument, got " + constValue.getClass());
+    }
+    return v;
+  }
+
+  protected Long getConstantLongValue(ObjectInspector[] arguments, int i)
+      throws UDFArgumentTypeException {
+    Object constValue = ((ConstantObjectInspector) arguments[i]).getWritableConstantValue();
+    if (constValue == null) {
+      return null;
+    }
+    long v;
+    if (constValue instanceof IntWritable) {
+      v = ((LongWritable) constValue).get();
+    } else if (constValue instanceof IntWritable) {
+      v = ((IntWritable) constValue).get();
+    } else if (constValue instanceof ShortWritable) {
+      v = ((ShortWritable) constValue).get();
+    } else if (constValue instanceof ByteWritable) {
+      v = ((ByteWritable) constValue).get();
+    } else {
+      throw new UDFArgumentTypeException(i, getFuncName()
+          + " only takes LONG/INT/SHORT/BYTE types as " + getArgOrder(i) + " argument, got "
+          + constValue.getClass());
+    }
+    return v;
+  }
+
+  protected String getArgOrder(int i) {
+    i++;
+    switch (i % 100) {
+    case 11:
+    case 12:
+    case 13:
+      return i + "th";
+    default:
+      return i + ORDINAL_SUFFIXES[i % 10];
+    }
+  }
 }

Modified: hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFAbs.java
URL: http://svn.apache.org/viewvc/hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFAbs.java?rev=1664455&r1=1664454&r2=1664455&view=diff
==============================================================================
--- hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFAbs.java (original)
+++ hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFAbs.java Thu Mar  5 18:51:32 2015
@@ -150,17 +150,7 @@ public class GenericUDFAbs extends Gener
 
   @Override
   public String getDisplayString(String[] children) {
-    StringBuilder sb = new StringBuilder();
-    sb.append("abs(");
-    if (children.length > 0) {
-      sb.append(children[0]);
-      for (int i = 1; i < children.length; i++) {
-        sb.append(",");
-        sb.append(children[i]);
-      }
-    }
-    sb.append(")");
-    return sb.toString();
+    return getStandardDisplayString("abs", children);
   }
 
 }

Modified: hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFAddMonths.java
URL: http://svn.apache.org/viewvc/hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFAddMonths.java?rev=1664455&r1=1664454&r2=1664455&view=diff
==============================================================================
--- hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFAddMonths.java (original)
+++ hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFAddMonths.java Thu Mar  5 18:51:32 2015
@@ -17,29 +17,23 @@
  */
 package org.apache.hadoop.hive.ql.udf.generic;
 
-import java.sql.Timestamp;
-import java.text.ParseException;
-import java.text.SimpleDateFormat;
+import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils.PrimitiveGrouping.DATE_GROUP;
+import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils.PrimitiveGrouping.NUMERIC_GROUP;
+import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils.PrimitiveGrouping.STRING_GROUP;
+
 import java.util.Calendar;
 import java.util.Date;
 
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
-import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
-import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritable;
+import org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters.Converter;
-import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
-import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorConverter.TimestampConverter;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
-import org.apache.hadoop.io.IntWritable;
 import org.apache.hadoop.io.Text;
+import org.apache.hive.common.util.DateUtils;
 
 /**
  * GenericUDFAddMonths.
@@ -55,118 +49,68 @@ import org.apache.hadoop.io.Text;
         + "ignored.\n"
         + "Example:\n " + " > SELECT _FUNC_('2009-08-31', 1) FROM src LIMIT 1;\n" + " '2009-09-30'")
 public class GenericUDFAddMonths extends GenericUDF {
-  private transient SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd");
-  private transient TimestampConverter timestampConverter;
-  private transient Converter textConverter;
-  private transient Converter dateWritableConverter;
-  private transient Converter intWritableConverter;
-  private transient PrimitiveCategory inputType1;
-  private transient PrimitiveCategory inputType2;
+  private transient Converter[] converters = new Converter[2];
+  private transient PrimitiveCategory[] inputTypes = new PrimitiveCategory[2];
   private final Calendar calendar = Calendar.getInstance();
   private final Text output = new Text();
+  private transient Integer numMonthsConst;
+  private transient boolean isNumMonthsConst;
 
   @Override
   public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
-    if (arguments.length != 2) {
-      throw new UDFArgumentLengthException("add_months() requires 2 argument, got "
-          + arguments.length);
-    }
-    if (arguments[0].getCategory() != ObjectInspector.Category.PRIMITIVE) {
-      throw new UDFArgumentTypeException(0, "Only primitive type arguments are accepted but "
-          + arguments[0].getTypeName() + " is passed as first arguments");
-    }
-    if (arguments[1].getCategory() != ObjectInspector.Category.PRIMITIVE) {
-      throw new UDFArgumentTypeException(1, "Only primitive type arguments are accepted but "
-          + arguments[1].getTypeName() + " is passed as second arguments");
+    checkArgsSize(arguments, 2, 2);
+
+    checkArgPrimitive(arguments, 0);
+    checkArgPrimitive(arguments, 1);
+
+    checkArgGroups(arguments, 0, inputTypes, STRING_GROUP, DATE_GROUP);
+    checkArgGroups(arguments, 1, inputTypes, NUMERIC_GROUP);
+
+    obtainDateConverter(arguments, 0, inputTypes, converters);
+    obtainIntConverter(arguments, 1, inputTypes, converters);
+
+    if (arguments[1] instanceof ConstantObjectInspector) {
+      numMonthsConst = getConstantIntValue(arguments, 1);
+      isNumMonthsConst = true;
     }
-    inputType1 = ((PrimitiveObjectInspector) arguments[0]).getPrimitiveCategory();
+
     ObjectInspector outputOI = PrimitiveObjectInspectorFactory.writableStringObjectInspector;
-    switch (inputType1) {
-    case STRING:
-    case VARCHAR:
-    case CHAR:
-      inputType1 = PrimitiveCategory.STRING;
-      textConverter = ObjectInspectorConverters.getConverter(
-          (PrimitiveObjectInspector) arguments[0],
-          PrimitiveObjectInspectorFactory.writableStringObjectInspector);
-      break;
-    case TIMESTAMP:
-      timestampConverter = new TimestampConverter((PrimitiveObjectInspector) arguments[0],
-          PrimitiveObjectInspectorFactory.writableTimestampObjectInspector);
-      break;
-    case DATE:
-      dateWritableConverter = ObjectInspectorConverters.getConverter(
-          (PrimitiveObjectInspector) arguments[0],
-          PrimitiveObjectInspectorFactory.writableDateObjectInspector);
-      break;
-    default:
-      throw new UDFArgumentTypeException(0,
-          "ADD_MONTHS() only takes STRING/TIMESTAMP/DATEWRITABLE types as first argument, got "
-              + inputType1);
-    }
-    inputType2 = ((PrimitiveObjectInspector) arguments[1]).getPrimitiveCategory();
-    if (inputType2 != PrimitiveCategory.INT) {
-      throw new UDFArgumentTypeException(1,
-          "ADD_MONTHS() only takes INT types as second argument, got " + inputType2);
-    }
-    intWritableConverter = ObjectInspectorConverters.getConverter(
-        (PrimitiveObjectInspector) arguments[1],
-        PrimitiveObjectInspectorFactory.writableIntObjectInspector);
     return outputOI;
   }
 
   @Override
   public Object evaluate(DeferredObject[] arguments) throws HiveException {
-    if (arguments[0].get() == null) {
-      return null;
+    Integer numMonthV;
+    if (isNumMonthsConst) {
+      numMonthV = numMonthsConst;
+    } else {
+      numMonthV = getIntValue(arguments, 1, converters);
     }
-    IntWritable toBeAdded = (IntWritable) intWritableConverter.convert(arguments[1].get());
-    if (toBeAdded == null) {
+
+    if (numMonthV == null) {
       return null;
     }
-    Date date;
-    switch (inputType1) {
-    case STRING:
-      String dateString = textConverter.convert(arguments[0].get()).toString();
-      try {
-        date = formatter.parse(dateString.toString());
-      } catch (ParseException e) {
-        return null;
-      }
-      break;
-    case TIMESTAMP:
-      Timestamp ts = ((TimestampWritable) timestampConverter.convert(arguments[0].get()))
-          .getTimestamp();
-      date = ts;
-      break;
-    case DATE:
-      DateWritable dw = (DateWritable) dateWritableConverter.convert(arguments[0].get());
-      date = dw.get();
-      break;
-    default:
-      throw new UDFArgumentTypeException(0,
-          "ADD_MONTHS() only takes STRING/TIMESTAMP/DATEWRITABLE types, got " + inputType1);
+
+    int numMonthInt = numMonthV.intValue();
+    Date date = getDateValue(arguments, 0, inputTypes, converters);
+    if (date == null) {
+      return null;
     }
-    int numMonth = toBeAdded.get();
-    addMonth(date, numMonth);
+
+    addMonth(date, numMonthInt);
     Date newDate = calendar.getTime();
-    output.set(formatter.format(newDate));
+    output.set(DateUtils.getDateFormat().format(newDate));
     return output;
   }
 
   @Override
   public String getDisplayString(String[] children) {
-    StringBuilder sb = new StringBuilder();
-    sb.append("add_months(");
-    if (children.length > 0) {
-      sb.append(children[0]);
-      for (int i = 1; i < children.length; i++) {
-        sb.append(", ");
-        sb.append(children[i]);
-      }
-    }
-    sb.append(")");
-    return sb.toString();
+    return getStandardDisplayString(getFuncName(), children);
+  }
+
+  @Override
+  protected String getFuncName() {
+    return "add_months";
   }
 
   protected Calendar addMonth(Date d, int numMonths) {

Modified: hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFArray.java
URL: http://svn.apache.org/viewvc/hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFArray.java?rev=1664455&r1=1664454&r2=1664455&view=diff
==============================================================================
--- hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFArray.java (original)
+++ hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFArray.java Thu Mar  5 18:51:32 2015
@@ -80,15 +80,6 @@ public class GenericUDFArray extends Gen
 
   @Override
   public String getDisplayString(String[] children) {
-    StringBuilder sb = new StringBuilder();
-    sb.append("array(");
-    for (int i = 0; i < children.length; i++) {
-      sb.append(children[i]);
-      if (i + 1 != children.length) {
-        sb.append(",");
-      }
-    }
-    sb.append(")");
-    return sb.toString();
+    return getStandardDisplayString("array", children, ",");
   }
 }

Modified: hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFAssertTrue.java
URL: http://svn.apache.org/viewvc/hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFAssertTrue.java?rev=1664455&r1=1664454&r2=1664455&view=diff
==============================================================================
--- hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFAssertTrue.java (original)
+++ hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFAssertTrue.java Thu Mar  5 18:51:32 2015
@@ -75,15 +75,6 @@ public class GenericUDFAssertTrue extend
 
  @Override
   public String getDisplayString(String[] children) {
-    StringBuilder sb = new StringBuilder();
-    sb.append("assert_true(");
-    for (int i = 0; i < children.length; ++i) {
-      sb.append(children[i]);
-      if (i != children.length - 1) {
-        sb.append(", ");
-      }
-    }
-    sb.append(")");
-    return sb.toString();
+    return getStandardDisplayString("assert_true", children);
   }  
 }

Modified: hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBaseCompare.java
URL: http://svn.apache.org/viewvc/hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBaseCompare.java?rev=1664455&r1=1664454&r2=1664455&view=diff
==============================================================================
--- hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBaseCompare.java (original)
+++ hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBaseCompare.java Thu Mar  5 18:51:32 2015
@@ -44,7 +44,7 @@ import org.apache.hadoop.io.BooleanWrita
  * GenericUDF Base Class for operations.
  */
 @Description(name = "op", value = "a op b - Returns the result of operation")
-public abstract class GenericUDFBaseCompare extends GenericUDF {
+public abstract class GenericUDFBaseCompare extends GenericUDFBaseBinary {
   public enum CompareType {
     // Now only string, text, int, long, byte and boolean comparisons are
     // treated as special cases.
@@ -53,9 +53,6 @@ public abstract class GenericUDFBaseComp
     COMPARE_BOOL, SAME_TYPE, NEED_CONVERT
   }
 
-  protected String opName;
-  protected String opDisplayName;
-
   protected transient ObjectInspector[] argumentOIs;
 
   protected transient ReturnObjectInspectorResolver conversionHelper = null;
@@ -182,12 +179,4 @@ public abstract class GenericUDFBaseComp
           o0, argumentOIs[0], o1, argumentOIs[1]);
     }
   }
-
-  @Override
-  public String getDisplayString(String[] children) {
-    assert (children.length == 2);
-    return "(" + children[0] + " " + opDisplayName + " " + children[1] + ")";
-
-  }
-
 }

Modified: hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBaseNumeric.java
URL: http://svn.apache.org/viewvc/hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBaseNumeric.java?rev=1664455&r1=1664454&r2=1664455&view=diff
==============================================================================
--- hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBaseNumeric.java (original)
+++ hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBaseNumeric.java Thu Mar  5 18:51:32 2015
@@ -22,7 +22,6 @@ import java.util.ArrayList;
 import java.util.List;
 
 import org.apache.hadoop.hive.common.type.HiveDecimal;
-import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
 import org.apache.hadoop.hive.ql.exec.NoMatchingMethodException;
@@ -60,9 +59,7 @@ import org.apache.hive.common.HiveCompat
  * GenericUDF Base Class for operations.
  */
 @Description(name = "op", value = "a op b - Returns the result of operation")
-public abstract class GenericUDFBaseNumeric extends GenericUDF {
-  protected String opName;
-  protected String opDisplayName;
+public abstract class GenericUDFBaseNumeric extends GenericUDFBaseBinary {
 
   protected transient PrimitiveObjectInspector leftOI;
   protected transient PrimitiveObjectInspector rightOI;
@@ -83,7 +80,6 @@ public abstract class GenericUDFBaseNume
   protected boolean ansiSqlArithmetic = false;
 
   public GenericUDFBaseNumeric() {
-    opName = getClass().getSimpleName();
   }
 
   @Override
@@ -292,12 +288,6 @@ public abstract class GenericUDFBaseNume
 
   protected abstract DecimalTypeInfo deriveResultDecimalTypeInfo(int prec1, int scale1, int prec2, int scale2);
 
-  @Override
-  public String getDisplayString(String[] children) {
-    assert (children.length == 2) : opDisplayName + " with " + children.length + " children";
-    return "(" + children[0] + " " + opDisplayName + " " + children[1] + ")";
-  }
-
   public void copyToNewInstance(Object newInstance) throws UDFArgumentException {
     super.copyToNewInstance(newInstance);
     GenericUDFBaseNumeric other = (GenericUDFBaseNumeric) newInstance;
@@ -320,17 +310,4 @@ public abstract class GenericUDFBaseNume
   public void setAnsiSqlArithmetic(boolean ansiSqlArithmetic) {
     this.ansiSqlArithmetic = ansiSqlArithmetic;
   }
-
-  public PrimitiveTypeInfo deriveMinArgumentCast(
-      ExprNodeDesc childExpr, TypeInfo targetType) {
-    assert targetType instanceof PrimitiveTypeInfo : "Not a primitive type" + targetType;
-    PrimitiveTypeInfo pti = (PrimitiveTypeInfo)targetType;
-    // We only do the minimum cast for decimals. Other types are assumed safe; fix if needed.
-    // We also don't do anything for non-primitive children (maybe we should assert).
-    if ((pti.getPrimitiveCategory() != PrimitiveCategory.DECIMAL)
-        || (!(childExpr.getTypeInfo() instanceof PrimitiveTypeInfo))) return pti;
-    PrimitiveTypeInfo childTi = (PrimitiveTypeInfo)childExpr.getTypeInfo();
-    // If the child is also decimal, no cast is needed (we hope - can target type be narrower?).
-    return HiveDecimalUtils.getDecimalTypeForPrimitiveCategory(childTi);
-  }
 }

Modified: hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBasePad.java
URL: http://svn.apache.org/viewvc/hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBasePad.java?rev=1664455&r1=1664454&r2=1664455&view=diff
==============================================================================
--- hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBasePad.java (original)
+++ hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBasePad.java Thu Mar  5 18:51:32 2015
@@ -18,16 +18,15 @@
 
 package org.apache.hadoop.hive.ql.udf.generic;
 
-import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters;
-import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters.Converter;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
 import org.apache.hadoop.io.IntWritable;
 import org.apache.hadoop.io.Text;
 
@@ -85,7 +84,7 @@ public abstract class GenericUDFBasePad
 
   @Override
   public String getDisplayString(String[] children) {
-    return udfName + "(" + StringUtils.join(children, ", ") + ")";
+    return getStandardDisplayString(udfName, children);
   }
 
   protected abstract void performOp(byte[] data, byte[] txt, byte[] padTxt, int len, Text str,

Modified: hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBaseTrim.java
URL: http://svn.apache.org/viewvc/hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBaseTrim.java?rev=1664455&r1=1664454&r2=1664455&view=diff
==============================================================================
--- hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBaseTrim.java (original)
+++ hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBaseTrim.java Thu Mar  5 18:51:32 2015
@@ -78,7 +78,7 @@ public abstract class GenericUDFBaseTrim
 
   @Override
   public String getDisplayString(String[] children) {
-    return udfName + "(" + StringUtils.join(children, ", ") + ")";
+    return getStandardDisplayString(udfName, children);
   }
 
   protected abstract String performOp(String val);

Modified: hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFCoalesce.java
URL: http://svn.apache.org/viewvc/hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFCoalesce.java?rev=1664455&r1=1664454&r2=1664455&view=diff
==============================================================================
--- hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFCoalesce.java (original)
+++ hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFCoalesce.java Thu Mar  5 18:51:32 2015
@@ -69,17 +69,7 @@ public class GenericUDFCoalesce extends
 
   @Override
   public String getDisplayString(String[] children) {
-    StringBuilder sb = new StringBuilder();
-    sb.append("COALESCE(");
-    if (children.length > 0) {
-      sb.append(children[0]);
-      for (int i = 1; i < children.length; i++) {
-        sb.append(",");
-        sb.append(children[i]);
-      }
-    }
-    sb.append(")");
-    return sb.toString();
+    return getStandardDisplayString("COALESCE", children, ",");
   }
 
 }

Modified: hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFConcat.java
URL: http://svn.apache.org/viewvc/hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFConcat.java?rev=1664455&r1=1664454&r2=1664455&view=diff
==============================================================================
--- hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFConcat.java (original)
+++ hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFConcat.java Thu Mar  5 18:51:32 2015
@@ -205,17 +205,7 @@ public class GenericUDFConcat extends Ge
 
   @Override
   public String getDisplayString(String[] children) {
-    StringBuilder sb = new StringBuilder();
-    sb.append("concat(");
-    if (children.length > 0) {
-      sb.append(children[0]);
-      for (int i = 1; i < children.length; i++) {
-        sb.append(", ");
-        sb.append(children[i]);
-      }
-    }
-    sb.append(")");
-    return sb.toString();
+    return getStandardDisplayString("concat", children);
   }
 
 }



Mime
View raw message