hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From hashut...@apache.org
Subject svn commit: r1660751 [2/23] - in /hive/branches/cbo: ./ accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/serde/ accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/ accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/mr/ accumulo-h...
Date Wed, 18 Feb 2015 22:28:40 GMT
Modified: hive/branches/cbo/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/TestLazyAccumuloRow.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/TestLazyAccumuloRow.java?rev=1660751&r1=1660750&r2=1660751&view=diff
==============================================================================
--- hive/branches/cbo/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/TestLazyAccumuloRow.java (original)
+++ hive/branches/cbo/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/TestLazyAccumuloRow.java Wed Feb 18 22:28:35 2015
@@ -33,7 +33,7 @@ import org.apache.hadoop.hive.serde2.Ser
 import org.apache.hadoop.hive.serde2.SerDeUtils;
 import org.apache.hadoop.hive.serde2.lazy.LazyFactory;
 import org.apache.hadoop.hive.serde2.lazy.LazyInteger;
-import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe;
+import org.apache.hadoop.hive.serde2.lazy.LazySerDeParameters;
 import org.apache.hadoop.hive.serde2.lazy.LazyString;
 import org.apache.hadoop.hive.serde2.lazy.objectinspector.LazySimpleStructObjectInspector;
 import org.apache.hadoop.hive.serde2.lazydio.LazyDioInteger;
@@ -58,7 +58,7 @@ public class TestLazyAccumuloRow {
         TypeInfoFactory.intTypeInfo, TypeInfoFactory.intTypeInfo, TypeInfoFactory.intTypeInfo);
 
     LazySimpleStructObjectInspector objectInspector = (LazySimpleStructObjectInspector) LazyFactory
-        .createLazyStructInspector(columns, types, LazySimpleSerDe.DefaultSeparators, new Text(
+        .createLazyStructInspector(columns, types, LazySerDeParameters.DefaultSeparators, new Text(
             "\\N"), false, false, (byte) '\\');
 
     DefaultAccumuloRowIdFactory rowIdFactory = new DefaultAccumuloRowIdFactory();
@@ -119,7 +119,7 @@ public class TestLazyAccumuloRow {
         TypeInfoFactory.intTypeInfo, TypeInfoFactory.intTypeInfo, TypeInfoFactory.intTypeInfo);
 
     LazySimpleStructObjectInspector objectInspector = (LazySimpleStructObjectInspector) LazyFactory
-        .createLazyStructInspector(columns, types, LazySimpleSerDe.DefaultSeparators, new Text(
+        .createLazyStructInspector(columns, types, LazySerDeParameters.DefaultSeparators, new Text(
             "\\N"), false, false, (byte) '\\');
 
     DefaultAccumuloRowIdFactory rowIdFactory = new DefaultAccumuloRowIdFactory();
@@ -202,7 +202,7 @@ public class TestLazyAccumuloRow {
         TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.STRING_TYPE_NAME));
 
     LazySimpleStructObjectInspector objectInspector = (LazySimpleStructObjectInspector) LazyFactory
-        .createLazyStructInspector(columns, types, LazySimpleSerDe.DefaultSeparators, new Text(
+        .createLazyStructInspector(columns, types, LazySerDeParameters.DefaultSeparators, new Text(
             "\\N"), false, false, (byte) '\\');
 
     DefaultAccumuloRowIdFactory rowIdFactory = new DefaultAccumuloRowIdFactory();

Modified: hive/branches/cbo/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/mr/TestHiveAccumuloTableOutputFormat.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/mr/TestHiveAccumuloTableOutputFormat.java?rev=1660751&r1=1660750&r2=1660751&view=diff
==============================================================================
--- hive/branches/cbo/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/mr/TestHiveAccumuloTableOutputFormat.java (original)
+++ hive/branches/cbo/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/mr/TestHiveAccumuloTableOutputFormat.java Wed Feb 18 22:28:35 2015
@@ -43,7 +43,7 @@ import org.apache.hadoop.hive.accumulo.s
 import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.lazy.ByteArrayRef;
 import org.apache.hadoop.hive.serde2.lazy.LazyFactory;
-import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe.SerDeParameters;
+import org.apache.hadoop.hive.serde2.lazy.LazySerDeParameters;
 import org.apache.hadoop.hive.serde2.lazy.LazyStruct;
 import org.apache.hadoop.hive.serde2.lazy.objectinspector.LazyMapObjectInspector;
 import org.apache.hadoop.hive.serde2.lazy.objectinspector.LazyObjectInspectorFactory;
@@ -150,7 +150,7 @@ public class TestHiveAccumuloTableOutput
     tableProperties.setProperty(serdeConstants.LIST_COLUMN_TYPES, Joiner.on(',').join(types));
     AccumuloSerDeParameters accumuloSerDeParams = new AccumuloSerDeParameters(new Configuration(),
         tableProperties, AccumuloSerDe.class.getSimpleName());
-    SerDeParameters serDeParams = accumuloSerDeParams.getSerDeParameters();
+    LazySerDeParameters serDeParams = accumuloSerDeParams.getSerDeParameters();
 
     AccumuloRowSerializer serializer = new AccumuloRowSerializer(0, serDeParams,
         accumuloSerDeParams.getColumnMappings(), AccumuloSerDeParameters.DEFAULT_VISIBILITY_LABEL,
@@ -243,7 +243,7 @@ public class TestHiveAccumuloTableOutput
     tableProperties.setProperty(serdeConstants.LIST_COLUMN_TYPES, Joiner.on(',').join(types));
     AccumuloSerDeParameters accumuloSerDeParams = new AccumuloSerDeParameters(new Configuration(),
         tableProperties, AccumuloSerDe.class.getSimpleName());
-    SerDeParameters serDeParams = accumuloSerDeParams.getSerDeParameters();
+    LazySerDeParameters serDeParams = accumuloSerDeParams.getSerDeParameters();
 
     AccumuloRowSerializer serializer = new AccumuloRowSerializer(0, serDeParams,
         accumuloSerDeParams.getColumnMappings(), new ColumnVisibility("foo"),
@@ -332,7 +332,7 @@ public class TestHiveAccumuloTableOutput
     tableProperties.setProperty(serdeConstants.LIST_COLUMN_TYPES, Joiner.on(',').join(types));
     AccumuloSerDeParameters accumuloSerDeParams = new AccumuloSerDeParameters(new Configuration(),
         tableProperties, AccumuloSerDe.class.getSimpleName());
-    SerDeParameters serDeParams = accumuloSerDeParams.getSerDeParameters();
+    LazySerDeParameters serDeParams = accumuloSerDeParams.getSerDeParameters();
 
     AccumuloRowSerializer serializer = new AccumuloRowSerializer(0, serDeParams,
         accumuloSerDeParams.getColumnMappings(), AccumuloSerDeParameters.DEFAULT_VISIBILITY_LABEL,
@@ -432,7 +432,7 @@ public class TestHiveAccumuloTableOutput
     tableProperties.setProperty(AccumuloSerDeParameters.DEFAULT_STORAGE_TYPE, ColumnEncoding.BINARY.getName());
     AccumuloSerDeParameters accumuloSerDeParams = new AccumuloSerDeParameters(new Configuration(),
         tableProperties, AccumuloSerDe.class.getSimpleName());
-    SerDeParameters serDeParams = accumuloSerDeParams.getSerDeParameters();
+    LazySerDeParameters serDeParams = accumuloSerDeParams.getSerDeParameters();
 
     AccumuloRowSerializer serializer = new AccumuloRowSerializer(0, serDeParams,
         accumuloSerDeParams.getColumnMappings(), AccumuloSerDeParameters.DEFAULT_VISIBILITY_LABEL,

Modified: hive/branches/cbo/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/serde/TestAccumuloRowSerializer.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/serde/TestAccumuloRowSerializer.java?rev=1660751&r1=1660750&r2=1660751&view=diff
==============================================================================
--- hive/branches/cbo/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/serde/TestAccumuloRowSerializer.java (original)
+++ hive/branches/cbo/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/serde/TestAccumuloRowSerializer.java Wed Feb 18 22:28:35 2015
@@ -35,7 +35,7 @@ import org.apache.hadoop.hive.serde2.Byt
 import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.hive.serde2.lazy.ByteArrayRef;
 import org.apache.hadoop.hive.serde2.lazy.LazyFactory;
-import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe.SerDeParameters;
+import org.apache.hadoop.hive.serde2.lazy.LazySerDeParameters;
 import org.apache.hadoop.hive.serde2.lazy.LazyStruct;
 import org.apache.hadoop.hive.serde2.lazy.objectinspector.LazyMapObjectInspector;
 import org.apache.hadoop.hive.serde2.lazy.objectinspector.LazyObjectInspectorFactory;
@@ -109,7 +109,7 @@ public class TestAccumuloRowSerializer {
     tableProperties.setProperty(serdeConstants.LIST_COLUMN_TYPES, Joiner.on(',').join(typeNames));
     AccumuloSerDeParameters accumuloSerDeParams = new AccumuloSerDeParameters(new Configuration(),
         tableProperties, AccumuloSerDe.class.getSimpleName());
-    SerDeParameters serDeParams = accumuloSerDeParams.getSerDeParameters();
+    LazySerDeParameters serDeParams = accumuloSerDeParams.getSerDeParameters();
 
     LazySimpleStructObjectInspector oi = (LazySimpleStructObjectInspector) LazyFactory
         .createLazyStructInspector(columns, types, serDeParams.getSeparators(),
@@ -178,7 +178,7 @@ public class TestAccumuloRowSerializer {
     tableProperties.setProperty(serdeConstants.LIST_COLUMN_TYPES, Joiner.on(',').join(typeNames));
     AccumuloSerDeParameters accumuloSerDeParams = new AccumuloSerDeParameters(new Configuration(),
         tableProperties, AccumuloSerDe.class.getSimpleName());
-    SerDeParameters serDeParams = accumuloSerDeParams.getSerDeParameters();
+    LazySerDeParameters serDeParams = accumuloSerDeParams.getSerDeParameters();
 
     LazySimpleStructObjectInspector oi = (LazySimpleStructObjectInspector) LazyFactory
         .createLazyStructInspector(columns, types, serDeParams.getSeparators(),
@@ -251,7 +251,7 @@ public class TestAccumuloRowSerializer {
     tableProperties.setProperty(serdeConstants.LIST_COLUMN_TYPES, Joiner.on(',').join(typeNames));
     AccumuloSerDeParameters accumuloSerDeParams = new AccumuloSerDeParameters(new Configuration(),
         tableProperties, AccumuloSerDe.class.getSimpleName());
-    SerDeParameters serDeParams = accumuloSerDeParams.getSerDeParameters();
+    LazySerDeParameters serDeParams = accumuloSerDeParams.getSerDeParameters();
 
     TypeInfo stringTypeInfo = TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.STRING_TYPE_NAME);
     LazyStringObjectInspector stringOI = (LazyStringObjectInspector) LazyFactory

Modified: hive/branches/cbo/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/serde/TestAccumuloSerDe.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/serde/TestAccumuloSerDe.java?rev=1660751&r1=1660750&r2=1660751&view=diff
==============================================================================
--- hive/branches/cbo/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/serde/TestAccumuloSerDe.java (original)
+++ hive/branches/cbo/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/serde/TestAccumuloSerDe.java Wed Feb 18 22:28:35 2015
@@ -44,7 +44,7 @@ import org.apache.hadoop.hive.serde2.laz
 import org.apache.hadoop.hive.serde2.lazy.LazyArray;
 import org.apache.hadoop.hive.serde2.lazy.LazyFactory;
 import org.apache.hadoop.hive.serde2.lazy.LazyMap;
-import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe.SerDeParameters;
+import org.apache.hadoop.hive.serde2.lazy.LazySerDeParameters;
 import org.apache.hadoop.hive.serde2.lazy.LazyString;
 import org.apache.hadoop.hive.serde2.lazy.LazyStruct;
 import org.apache.hadoop.hive.serde2.lazy.objectinspector.LazyMapObjectInspector;
@@ -389,7 +389,7 @@ public class TestAccumuloSerDe {
     tableProperties.setProperty(serdeConstants.LIST_COLUMN_TYPES, Joiner.on(',').join(types));
     AccumuloSerDeParameters accumuloSerDeParams = new AccumuloSerDeParameters(new Configuration(),
         tableProperties, AccumuloSerDe.class.getSimpleName());
-    SerDeParameters serDeParams = accumuloSerDeParams.getSerDeParameters();
+    LazySerDeParameters serDeParams = accumuloSerDeParams.getSerDeParameters();
 
     byte[] seps = serDeParams.getSeparators();
 

Modified: hive/branches/cbo/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/serde/TestDefaultAccumuloRowIdFactory.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/serde/TestDefaultAccumuloRowIdFactory.java?rev=1660751&r1=1660750&r2=1660751&view=diff
==============================================================================
--- hive/branches/cbo/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/serde/TestDefaultAccumuloRowIdFactory.java (original)
+++ hive/branches/cbo/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/serde/TestDefaultAccumuloRowIdFactory.java Wed Feb 18 22:28:35 2015
@@ -25,7 +25,7 @@ import org.apache.hadoop.hive.accumulo.c
 import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.hive.serde2.lazy.LazyObjectBase;
-import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe.SerDeParameters;
+import org.apache.hadoop.hive.serde2.lazy.LazySerDeParameters;
 import org.apache.hadoop.hive.serde2.lazy.LazyString;
 import org.apache.hadoop.hive.serde2.lazy.objectinspector.LazyMapObjectInspector;
 import org.apache.hadoop.hive.serde2.lazy.objectinspector.LazySimpleStructObjectInspector;
@@ -33,8 +33,6 @@ import org.apache.hadoop.hive.serde2.laz
 import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyPrimitiveObjectInspectorFactory;
 import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyStringObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.primitive.JavaStringObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
 import org.junit.Assert;
 import org.junit.Test;
@@ -60,7 +58,7 @@ public class TestDefaultAccumuloRowIdFac
     AccumuloRowIdFactory factory = accumuloSerDe.getParams().getRowIdFactory();
     List<TypeInfo> columnTypes = accumuloSerDe.getParams().getHiveColumnTypes();
     ColumnMapper mapper = accumuloSerDe.getParams().getColumnMapper();
-    SerDeParameters serDeParams = accumuloSerDe.getParams().getSerDeParameters();
+    LazySerDeParameters serDeParams = accumuloSerDe.getParams().getSerDeParameters();
 
     List<ObjectInspector> OIs = accumuloSerDe.getColumnObjectInspectors(columnTypes, serDeParams, mapper.getColumnMappings(), factory);
 
@@ -85,7 +83,7 @@ public class TestDefaultAccumuloRowIdFac
     AccumuloRowIdFactory factory = accumuloSerDe.getParams().getRowIdFactory();
     List<TypeInfo> columnTypes = accumuloSerDe.getParams().getHiveColumnTypes();
     ColumnMapper mapper = accumuloSerDe.getParams().getColumnMapper();
-    SerDeParameters serDeParams = accumuloSerDe.getParams().getSerDeParameters();
+    LazySerDeParameters serDeParams = accumuloSerDe.getParams().getSerDeParameters();
 
     List<ObjectInspector> OIs = accumuloSerDe.getColumnObjectInspectors(columnTypes, serDeParams, mapper.getColumnMappings(), factory);
 

Modified: hive/branches/cbo/bin/ext/beeline.sh
URL: http://svn.apache.org/viewvc/hive/branches/cbo/bin/ext/beeline.sh?rev=1660751&r1=1660750&r2=1660751&view=diff
==============================================================================
--- hive/branches/cbo/bin/ext/beeline.sh (original)
+++ hive/branches/cbo/bin/ext/beeline.sh Wed Feb 18 22:28:35 2015
@@ -25,7 +25,12 @@ beeline () {
   superCsvJarPath=`ls ${HIVE_LIB}/super-csv-*.jar`
   jlineJarPath=`ls ${HIVE_LIB}/jline-*.jar`
   jdbcStandaloneJarPath=`ls ${HIVE_LIB}/hive-jdbc-*-standalone.jar`
-  export HADOOP_CLASSPATH=${HIVE_CONF_DIR}:${beelineJarPath}:${superCsvJarPath}:${jlineJarPath}:${jdbcStandaloneJarPath}
+  hadoopClasspath=""
+  if [[ -n "${HADOOP_CLASSPATH}" ]]
+  then
+    hadoopClasspath="${HADOOP_CLASSPATH}:"
+  fi
+  export HADOOP_CLASSPATH="${hadoopClasspath}${HIVE_CONF_DIR}:${beelineJarPath}:${superCsvJarPath}:${jlineJarPath}:${jdbcStandaloneJarPath}"
   export HADOOP_CLIENT_OPTS="$HADOOP_CLIENT_OPTS -Dlog4j.configuration=beeline-log4j.properties "
 
   exec $HADOOP jar ${beelineJarPath} $CLASS $HIVE_OPTS "$@"

Modified: hive/branches/cbo/common/src/java/org/apache/hadoop/hive/common/JavaUtils.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/common/src/java/org/apache/hadoop/hive/common/JavaUtils.java?rev=1660751&r1=1660750&r2=1660751&view=diff
==============================================================================
--- hive/branches/cbo/common/src/java/org/apache/hadoop/hive/common/JavaUtils.java (original)
+++ hive/branches/cbo/common/src/java/org/apache/hadoop/hive/common/JavaUtils.java Wed Feb 18 22:28:35 2015
@@ -70,9 +70,9 @@ public final class JavaUtils {
     return classLoader;
   }
 
-  public static void closeClassLoadersTo(ClassLoader current, ClassLoader stop) {
+  public static boolean closeClassLoadersTo(ClassLoader current, ClassLoader stop) {
     if (!isValidHierarchy(current, stop)) {
-      return;
+      return false;
     }
     for (; current != null && current != stop; current = current.getParent()) {
       try {
@@ -82,6 +82,7 @@ public final class JavaUtils {
             Arrays.toString(((URLClassLoader) current).getURLs()), e);
       }
     }
+    return true;
   }
 
   // check before closing loaders, not to close app-classloader, etc. by mistake

Modified: hive/branches/cbo/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java?rev=1660751&r1=1660750&r2=1660751&view=diff
==============================================================================
--- hive/branches/cbo/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java (original)
+++ hive/branches/cbo/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java Wed Feb 18 22:28:35 2015
@@ -149,6 +149,7 @@ public class HiveConf extends Configurat
       HiveConf.ConfVars.METASTORE_EVENT_LISTENERS,
       HiveConf.ConfVars.METASTORE_EVENT_CLEAN_FREQ,
       HiveConf.ConfVars.METASTORE_EVENT_EXPIRY_DURATION,
+      HiveConf.ConfVars.METASTORE_FILTER_HOOK,
       HiveConf.ConfVars.METASTORE_RAW_STORE_IMPL,
       HiveConf.ConfVars.METASTORE_END_FUNCTION_LISTENERS,
       HiveConf.ConfVars.METASTORE_PART_INHERIT_TBL_PROPS,
@@ -332,7 +333,7 @@ public class HiveConf extends Configurat
         "When hive.exec.mode.local.auto is true, the number of tasks should less than this for local mode."),
 
     DROPIGNORESNONEXISTENT("hive.exec.drop.ignorenonexistent", true,
-        "Do not report an error if DROP TABLE/VIEW/Index specifies a non-existent table/view/index"),
+        "Do not report an error if DROP TABLE/VIEW/Index/Function specifies a non-existent table/view/index/function"),
 
     HIVEIGNOREMAPJOINHINT("hive.ignore.mapjoin.hint", true, "Ignore the mapjoin hint"),
 
@@ -592,7 +593,8 @@ public class HiveConf extends Configurat
         "List of comma separated keys occurring in table properties which will get inherited to newly created partitions. \n" +
         "* implies all the keys will get inherited."),
     METASTORE_FILTER_HOOK("hive.metastore.filter.hook", "org.apache.hadoop.hive.metastore.DefaultMetaStoreFilterHookImpl",
-        "Metastore hook class for filtering the metadata read results"),
+        "Metastore hook class for filtering the metadata read results. If hive.security.authorization.manager"
+        + "is set to instance of HiveAuthorizerFactory, then this value is ignored."),
     FIRE_EVENTS_FOR_DML("hive.metastore.dml.events", false, "If true, the metastore will be asked" +
         " to fire events for DML operations"),
 

Modified: hive/branches/cbo/contrib/src/java/org/apache/hadoop/hive/contrib/genericudf/example/GenericUDFDBOutput.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/contrib/src/java/org/apache/hadoop/hive/contrib/genericudf/example/GenericUDFDBOutput.java?rev=1660751&r1=1660750&r2=1660751&view=diff
==============================================================================
--- hive/branches/cbo/contrib/src/java/org/apache/hadoop/hive/contrib/genericudf/example/GenericUDFDBOutput.java (original)
+++ hive/branches/cbo/contrib/src/java/org/apache/hadoop/hive/contrib/genericudf/example/GenericUDFDBOutput.java Wed Feb 18 22:28:35 2015
@@ -29,7 +29,6 @@ import org.apache.hadoop.hive.ql.exec.UD
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.udf.UDFType;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDFUtils;
 import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
@@ -159,17 +158,7 @@ public class GenericUDFDBOutput extends
 
   @Override
   public String getDisplayString(String[] children) {
-    StringBuilder sb = new StringBuilder();
-    sb.append("dboutput(");
-    if (children.length > 0) {
-      sb.append(children[0]);
-      for (int i = 1; i < children.length; i++) {
-        sb.append(",");
-        sb.append(children[i]);
-      }
-    }
-    sb.append(")");
-    return sb.toString();
+    return getStandardDisplayString("dboutput", children, ",");
   }
 
 }

Modified: hive/branches/cbo/contrib/src/java/org/apache/hadoop/hive/contrib/serde2/MultiDelimitSerDe.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/contrib/src/java/org/apache/hadoop/hive/contrib/serde2/MultiDelimitSerDe.java?rev=1660751&r1=1660750&r2=1660751&view=diff
==============================================================================
--- hive/branches/cbo/contrib/src/java/org/apache/hadoop/hive/contrib/serde2/MultiDelimitSerDe.java (original)
+++ hive/branches/cbo/contrib/src/java/org/apache/hadoop/hive/contrib/serde2/MultiDelimitSerDe.java Wed Feb 18 22:28:35 2015
@@ -35,6 +35,7 @@ import org.apache.hadoop.hive.serde2.laz
 import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe;
 import org.apache.hadoop.hive.serde2.lazy.LazyStruct;
 import org.apache.hadoop.hive.serde2.lazy.LazyUtils;
+import org.apache.hadoop.hive.serde2.lazy.LazySerDeParameters;
 import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
@@ -62,7 +63,8 @@ import org.apache.hadoop.io.Writable;
     serdeConstants.SERIALIZATION_LAST_COLUMN_TAKES_REST,
     serdeConstants.ESCAPE_CHAR,
     serdeConstants.SERIALIZATION_ENCODING,
-    LazySimpleSerDe.SERIALIZATION_EXTEND_NESTING_LEVELS})
+    LazySerDeParameters.SERIALIZATION_EXTEND_NESTING_LEVELS,
+    LazySerDeParameters.SERIALIZATION_EXTEND_ADDITIONAL_NESTING_LEVELS})
 public class MultiDelimitSerDe extends AbstractSerDe {
   private static final Log LOG = LogFactory.getLog(MultiDelimitSerDe.class.getName());
   private static final byte[] DEFAULT_SEPARATORS = {(byte) 1, (byte) 2, (byte) 3};
@@ -85,7 +87,7 @@ public class MultiDelimitSerDe extends A
   // The wrapper for byte array
   private ByteArrayRef byteArrayRef;
 
-  private LazySimpleSerDe.SerDeParameters serdeParams = null;
+  private LazySerDeParameters serdeParams = null;
   // The output stream of serialized objects
   private final ByteStream.Output serializeStream = new ByteStream.Output();
   // The Writable to return in serialize
@@ -94,7 +96,7 @@ public class MultiDelimitSerDe extends A
   @Override
   public void initialize(Configuration conf, Properties tbl) throws SerDeException {
     // get the SerDe parameters
-    serdeParams = LazySimpleSerDe.initSerdeParams(conf, tbl, getClass().getName());
+    serdeParams = new LazySerDeParameters(conf, tbl, getClass().getName());
 
     fieldDelimited = tbl.getProperty(serdeConstants.FIELD_DELIM);
     if (fieldDelimited == null || fieldDelimited.isEmpty()) {
@@ -103,12 +105,12 @@ public class MultiDelimitSerDe extends A
 
     // get the collection separator and map key separator
     // TODO: use serdeConstants.COLLECTION_DELIM when the typo is fixed
-    collSep = LazySimpleSerDe.getByte(tbl.getProperty(COLLECTION_DELIM),
+    collSep = LazyUtils.getByte(tbl.getProperty(COLLECTION_DELIM),
         DEFAULT_SEPARATORS[1]);
-    keySep = LazySimpleSerDe.getByte(tbl.getProperty(serdeConstants.MAPKEY_DELIM),
+    keySep = LazyUtils.getByte(tbl.getProperty(serdeConstants.MAPKEY_DELIM),
         DEFAULT_SEPARATORS[2]);
-    serdeParams.getSeparators()[1] = collSep;
-    serdeParams.getSeparators()[2] = keySep;
+    serdeParams.setSeparator(1, collSep);
+    serdeParams.setSeparator(2, keySep);
 
     // Create the ObjectInspectors for the fields
     cachedObjectInspector = LazyFactory.createLazyStructInspector(serdeParams

Modified: hive/branches/cbo/contrib/src/test/results/clientnegative/invalid_row_sequence.q.out
URL: http://svn.apache.org/viewvc/hive/branches/cbo/contrib/src/test/results/clientnegative/invalid_row_sequence.q.out?rev=1660751&r1=1660750&r2=1660751&view=diff
==============================================================================
--- hive/branches/cbo/contrib/src/test/results/clientnegative/invalid_row_sequence.q.out (original)
+++ hive/branches/cbo/contrib/src/test/results/clientnegative/invalid_row_sequence.q.out Wed Feb 18 22:28:35 2015
@@ -2,12 +2,10 @@ PREHOOK: query: -- Verify that a statefu
 
 drop temporary function row_sequence
 PREHOOK: type: DROPFUNCTION
-PREHOOK: Output: row_sequence
 POSTHOOK: query: -- Verify that a stateful UDF cannot be used outside of the SELECT list
 
 drop temporary function row_sequence
 POSTHOOK: type: DROPFUNCTION
-POSTHOOK: Output: row_sequence
 PREHOOK: query: create temporary function row_sequence as 
 'org.apache.hadoop.hive.contrib.udf.UDFRowSequence'
 PREHOOK: type: CREATEFUNCTION

Propchange: hive/branches/cbo/hbase-handler/pom.xml
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Wed Feb 18 22:28:35 2015
@@ -1,5 +1,5 @@
 /hive/branches/branch-0.11/hbase-handler/pom.xml:1480385,1480458,1481120,1481344,1481346,1481348,1481352,1483872,1505184
-/hive/branches/spark/hbase-handler/pom.xml:1608589-1657401
+/hive/branches/spark/hbase-handler/pom.xml:1608589-1660298
 /hive/branches/tez/hbase-handler/pom.xml:1494760-1622766
 /hive/branches/vectorization/hbase-handler/pom.xml:1466908-1527856
-/hive/trunk/hbase-handler/pom.xml:1494760-1537575,1605012-1659342
+/hive/trunk/hbase-handler/pom.xml:1494760-1537575,1605012-1660746

Modified: hive/branches/cbo/hbase-handler/src/java/org/apache/hadoop/hive/hbase/DefaultHBaseKeyFactory.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/hbase-handler/src/java/org/apache/hadoop/hive/hbase/DefaultHBaseKeyFactory.java?rev=1660751&r1=1660750&r2=1660751&view=diff
==============================================================================
--- hive/branches/cbo/hbase-handler/src/java/org/apache/hadoop/hive/hbase/DefaultHBaseKeyFactory.java (original)
+++ hive/branches/cbo/hbase-handler/src/java/org/apache/hadoop/hive/hbase/DefaultHBaseKeyFactory.java Wed Feb 18 22:28:35 2015
@@ -22,10 +22,11 @@ import java.io.IOException;
 import java.util.Properties;
 
 import com.google.common.annotations.VisibleForTesting;
+
 import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.hive.serde2.lazy.LazyFactory;
 import org.apache.hadoop.hive.serde2.lazy.LazyObjectBase;
-import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe.SerDeParameters;
+import org.apache.hadoop.hive.serde2.lazy.LazySerDeParameters;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.StructField;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory.ObjectInspectorOptions;
@@ -33,7 +34,7 @@ import org.apache.hadoop.hive.serde2.typ
 
 public class DefaultHBaseKeyFactory extends AbstractHBaseKeyFactory implements HBaseKeyFactory {
 
-  protected SerDeParameters serdeParams;
+  protected LazySerDeParameters serdeParams;
   protected HBaseRowSerializer serializer;
 
   @Override
@@ -59,7 +60,7 @@ public class DefaultHBaseKeyFactory exte
   }
 
   @VisibleForTesting
-  static DefaultHBaseKeyFactory forTest(SerDeParameters params, ColumnMappings mappings) {
+  static DefaultHBaseKeyFactory forTest(LazySerDeParameters params, ColumnMappings mappings) {
     DefaultHBaseKeyFactory factory = new DefaultHBaseKeyFactory();
     factory.serdeParams = params;
     factory.keyMapping = mappings.getKeyMapping();

Modified: hive/branches/cbo/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseLazyObjectFactory.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseLazyObjectFactory.java?rev=1660751&r1=1660750&r2=1660751&view=diff
==============================================================================
--- hive/branches/cbo/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseLazyObjectFactory.java (original)
+++ hive/branches/cbo/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseLazyObjectFactory.java Wed Feb 18 22:28:35 2015
@@ -23,7 +23,7 @@ import java.util.List;
 
 import org.apache.hadoop.hive.hbase.struct.HBaseValueFactory;
 import org.apache.hadoop.hive.serde2.SerDeException;
-import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe.SerDeParameters;
+import org.apache.hadoop.hive.serde2.lazy.LazySerDeParameters;
 import org.apache.hadoop.hive.serde2.lazy.objectinspector.LazyObjectInspectorFactory;
 import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyObjectInspectorParameters;
 import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyObjectInspectorParametersImpl;
@@ -37,7 +37,7 @@ import org.apache.hadoop.io.Text;
 public class HBaseLazyObjectFactory {
 
   public static ObjectInspector createLazyHBaseStructInspector(
-      SerDeParameters serdeParams, int index, HBaseKeyFactory keyFactory, List<HBaseValueFactory> valueFactories) throws SerDeException {
+      LazySerDeParameters serdeParams, int index, HBaseKeyFactory keyFactory, List<HBaseValueFactory> valueFactories) throws SerDeException {
     List<TypeInfo> columnTypes = serdeParams.getColumnTypes();
     ArrayList<ObjectInspector> columnObjectInspectors = new ArrayList<ObjectInspector>(
         columnTypes.size());

Modified: hive/branches/cbo/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseRowSerializer.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseRowSerializer.java?rev=1660751&r1=1660750&r2=1660751&view=diff
==============================================================================
--- hive/branches/cbo/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseRowSerializer.java (original)
+++ hive/branches/cbo/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseRowSerializer.java Wed Feb 18 22:28:35 2015
@@ -27,8 +27,8 @@ import org.apache.hadoop.hive.hbase.Colu
 import org.apache.hadoop.hive.serde2.ByteStream;
 import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.hive.serde2.SerDeUtils;
-import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe;
 import org.apache.hadoop.hive.serde2.lazy.LazyUtils;
+import org.apache.hadoop.hive.serde2.lazy.LazySerDeParameters;
 import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
@@ -44,7 +44,7 @@ public class HBaseRowSerializer {
 
   private final HBaseKeyFactory keyFactory;
   private final HBaseSerDeParameters hbaseParam;
-  private final LazySimpleSerDe.SerDeParameters serdeParam;
+  private final LazySerDeParameters serdeParam;
 
   private final int keyIndex;
   private final int timestampIndex;
@@ -54,9 +54,7 @@ public class HBaseRowSerializer {
   private final byte[] separators;      // the separators array
   private final boolean escaped;        // whether we need to escape the data when writing out
   private final byte escapeChar;        // which char to use as the escape char, e.g. '\\'
-  private final boolean[] needsEscape;  // which chars need to be escaped. This array should have size
-                                        // of 128. Negative byte values (or byte values >= 128) are
-                                        // never escaped.
+  private final boolean[] needsEscape;  // which chars need to be escaped. 
 
   private final long putTimestamp;
   private final ByteStream.Output output = new ByteStream.Output();

Modified: hive/branches/cbo/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDe.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDe.java?rev=1660751&r1=1660750&r2=1660751&view=diff
==============================================================================
--- hive/branches/cbo/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDe.java (original)
+++ hive/branches/cbo/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDe.java Wed Feb 18 22:28:35 2015
@@ -34,7 +34,7 @@ import org.apache.hadoop.hive.serde2.Ser
 import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.hive.serde2.SerDeSpec;
 import org.apache.hadoop.hive.serde2.SerDeStats;
-import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe;
+import org.apache.hadoop.hive.serde2.lazy.LazySerDeParameters;
 import org.apache.hadoop.hive.serde2.lazy.objectinspector.LazySimpleStructObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.io.Writable;
@@ -51,7 +51,8 @@ import org.apache.hadoop.mapred.JobConf;
     serdeConstants.SERIALIZATION_LAST_COLUMN_TAKES_REST,
     serdeConstants.ESCAPE_CHAR,
     serdeConstants.SERIALIZATION_ENCODING,
-    LazySimpleSerDe.SERIALIZATION_EXTEND_NESTING_LEVELS,
+    LazySerDeParameters.SERIALIZATION_EXTEND_NESTING_LEVELS,
+    LazySerDeParameters.SERIALIZATION_EXTEND_ADDITIONAL_NESTING_LEVELS,
     HBaseSerDe.HBASE_COLUMNS_MAPPING,
     HBaseSerDe.HBASE_TABLE_NAME,
     HBaseSerDe.HBASE_TABLE_DEFAULT_STORAGE_TYPE,
@@ -240,7 +241,7 @@ public class HBaseSerDe extends Abstract
     return new ColumnMappings(columnsMapping, rowKeyIndex, timestampIndex);
   }
 
-  public LazySimpleSerDe.SerDeParameters getSerdeParams() {
+  public LazySerDeParameters getSerdeParams() {
     return serdeParams.getSerdeParams();
   }
 

Modified: hive/branches/cbo/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDeParameters.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDeParameters.java?rev=1660751&r1=1660750&r2=1660751&view=diff
==============================================================================
--- hive/branches/cbo/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDeParameters.java (original)
+++ hive/branches/cbo/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDeParameters.java Wed Feb 18 22:28:35 2015
@@ -33,8 +33,7 @@ import org.apache.hadoop.hive.hbase.stru
 import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.hive.serde2.avro.AvroSerdeUtils;
-import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe;
-import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe.SerDeParameters;
+import org.apache.hadoop.hive.serde2.lazy.LazySerDeParameters;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
 import org.apache.hadoop.util.ReflectionUtils;
 
@@ -50,7 +49,7 @@ public class HBaseSerDeParameters {
   public static final String AVRO_SERIALIZATION_TYPE = "avro";
   public static final String STRUCT_SERIALIZATION_TYPE = "struct";
 
-  private final SerDeParameters serdeParams;
+  private final LazySerDeParameters serdeParams;
 
   private final Configuration job;
 
@@ -92,7 +91,7 @@ public class HBaseSerDeParameters {
           columnMappings.toTypesString(tbl, job, autogenerate));
     }
 
-    this.serdeParams = LazySimpleSerDe.initSerdeParams(job, tbl, serdeName);
+    this.serdeParams = new LazySerDeParameters(job, tbl, serdeName);
     this.putTimestamp = Long.valueOf(tbl.getProperty(HBaseSerDe.HBASE_PUT_TIMESTAMP, "-1"));
 
     columnMappings.setHiveColumnDescription(serdeName, serdeParams.getColumnNames(),
@@ -114,7 +113,7 @@ public class HBaseSerDeParameters {
     return serdeParams.getColumnTypes();
   }
 
-  public SerDeParameters getSerdeParams() {
+  public LazySerDeParameters getSerdeParams() {
     return serdeParams;
   }
 

Modified: hive/branches/cbo/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseStorageHandler.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseStorageHandler.java?rev=1660751&r1=1660750&r2=1660751&view=diff
==============================================================================
--- hive/branches/cbo/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseStorageHandler.java (original)
+++ hive/branches/cbo/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseStorageHandler.java Wed Feb 18 22:28:35 2015
@@ -88,10 +88,16 @@ public class HBaseStorageHandler extends
   private static final String HBASE_SNAPSHOT_TABLE_DIR_KEY = "hbase.TableSnapshotInputFormat.table.dir";
   /** HBase-internal config by which input format received restore dir after HBASE-11335. */
   private static final String HBASE_SNAPSHOT_RESTORE_DIR_KEY = "hbase.TableSnapshotInputFormat.restore.dir";
-  /** HBase config by which a SlabCache is sized. */
-  private static final String HBASE_OFFHEAP_PCT_KEY = "hbase.offheapcache.percentage";
-  /** HBase config by which a BucketCache is sized. */
-  private static final String HBASE_BUCKETCACHE_SIZE_KEY = "hbase.bucketcache.size";
+  private static final String[] HBASE_CACHE_KEYS = new String[] {
+      /** HBase config by which a SlabCache is sized. From HBase [0.98.3, 1.0.0) */
+      "hbase.offheapcache.percentage",
+      /** HBase config by which a BucketCache is sized. */
+      "hbase.bucketcache.size",
+      /** HBase config by which the bucket cache implementation is chosen. From HBase 0.98.10+ */
+      "hbase.bucketcache.ioengine",
+      /** HBase config by which a BlockCache is sized. */
+      "hfile.block.cache.size"
+  };
 
   final static public String DEFAULT_PREFIX = "default.";
 
@@ -395,8 +401,14 @@ public class HBaseStorageHandler extends
 
           TableMapReduceUtil.resetCacheConfig(hbaseConf);
           // copy over configs touched by above method
-          jobProperties.put(HBASE_OFFHEAP_PCT_KEY, hbaseConf.get(HBASE_OFFHEAP_PCT_KEY));
-          jobProperties.put(HBASE_BUCKETCACHE_SIZE_KEY, hbaseConf.get(HBASE_BUCKETCACHE_SIZE_KEY));
+          for (String cacheKey : HBASE_CACHE_KEYS) {
+            final String value = hbaseConf.get(cacheKey);
+            if (value != null) {
+              jobProperties.put(cacheKey, value);
+            } else {
+              jobProperties.remove(cacheKey);
+            }
+          }
         } catch (IOException e) {
           throw new IllegalArgumentException(e);
         }

Modified: hive/branches/cbo/hbase-handler/src/java/org/apache/hadoop/hive/hbase/struct/DefaultHBaseValueFactory.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/hbase-handler/src/java/org/apache/hadoop/hive/hbase/struct/DefaultHBaseValueFactory.java?rev=1660751&r1=1660750&r2=1660751&view=diff
==============================================================================
--- hive/branches/cbo/hbase-handler/src/java/org/apache/hadoop/hive/hbase/struct/DefaultHBaseValueFactory.java (original)
+++ hive/branches/cbo/hbase-handler/src/java/org/apache/hadoop/hive/hbase/struct/DefaultHBaseValueFactory.java Wed Feb 18 22:28:35 2015
@@ -27,7 +27,7 @@ import org.apache.hadoop.hive.hbase.HBas
 import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.hive.serde2.lazy.LazyFactory;
 import org.apache.hadoop.hive.serde2.lazy.LazyObjectBase;
-import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe;
+import org.apache.hadoop.hive.serde2.lazy.LazySerDeParameters;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory.ObjectInspectorOptions;
 import org.apache.hadoop.hive.serde2.objectinspector.StructField;
@@ -38,7 +38,7 @@ import org.apache.hadoop.hive.serde2.typ
  * */
 public class DefaultHBaseValueFactory implements HBaseValueFactory {
 
-  protected LazySimpleSerDe.SerDeParameters serdeParams;
+  protected LazySerDeParameters serdeParams;
   protected ColumnMappings columnMappings;
   protected HBaseSerDeParameters hbaseParams;
   protected Properties properties;

Modified: hive/branches/cbo/hbase-handler/src/test/org/apache/hadoop/hive/hbase/TestHBaseSerDe.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/hbase-handler/src/test/org/apache/hadoop/hive/hbase/TestHBaseSerDe.java?rev=1660751&r1=1660750&r2=1660751&view=diff
==============================================================================
--- hive/branches/cbo/hbase-handler/src/test/org/apache/hadoop/hive/hbase/TestHBaseSerDe.java (original)
+++ hive/branches/cbo/hbase-handler/src/test/org/apache/hadoop/hive/hbase/TestHBaseSerDe.java Wed Feb 18 22:28:35 2015
@@ -61,8 +61,8 @@ import org.apache.hadoop.hive.serde2.io.
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
 import org.apache.hadoop.hive.serde2.lazy.LazyPrimitive;
-import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe;
 import org.apache.hadoop.hive.serde2.lazy.LazyStruct;
+import org.apache.hadoop.hive.serde2.lazy.LazySerDeParameters;
 import org.apache.hadoop.hive.serde2.objectinspector.StructField;
 import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
 import org.apache.hadoop.io.BooleanWritable;
@@ -1413,7 +1413,7 @@ public class TestHBaseSerDe extends Test
         "org.apache.hadoop.hive.hbase.avro.Employee");
     tbl.setProperty(HBaseSerDe.HBASE_COLUMNS_MAPPING, "cola:prefixB_.*");
     tbl.setProperty(HBaseSerDe.HBASE_AUTOGENERATE_STRUCT, "true");
-    tbl.setProperty(LazySimpleSerDe.SERIALIZATION_EXTEND_NESTING_LEVELS, "true");
+    tbl.setProperty(LazySerDeParameters.SERIALIZATION_EXTEND_NESTING_LEVELS, "true");
 
     return tbl;
   }

Modified: hive/branches/cbo/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/DelimitedInputWriter.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/DelimitedInputWriter.java?rev=1660751&r1=1660750&r2=1660751&view=diff
==============================================================================
--- hive/branches/cbo/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/DelimitedInputWriter.java (original)
+++ hive/branches/cbo/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/DelimitedInputWriter.java Wed Feb 18 22:28:35 2015
@@ -31,6 +31,7 @@ import org.apache.hadoop.hive.serde2.Ser
 import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.hive.serde2.SerDeUtils;
 import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe;
+import org.apache.hadoop.hive.serde2.lazy.LazySerDeParameters;
 import org.apache.hadoop.io.BytesWritable;
 
 import java.io.IOException;
@@ -89,7 +90,7 @@ public class DelimitedInputWriter extend
           throws ClassNotFoundException, ConnectionError, SerializationError,
                  InvalidColumn, StreamingException {
      this(colNamesForFields, delimiter, endPoint, conf,
-             (char) LazySimpleSerDe.DefaultSeparators[0]);
+             (char) LazySerDeParameters.DefaultSeparators[0]);
    }
 
   /**

Modified: hive/branches/cbo/itests/hive-jmh/pom.xml
URL: http://svn.apache.org/viewvc/hive/branches/cbo/itests/hive-jmh/pom.xml?rev=1660751&r1=1660750&r2=1660751&view=diff
==============================================================================
--- hive/branches/cbo/itests/hive-jmh/pom.xml (original)
+++ hive/branches/cbo/itests/hive-jmh/pom.xml Wed Feb 18 22:28:35 2015
@@ -62,6 +62,16 @@
 
   <profiles>
     <profile>
+      <id>hadoop-1</id>
+      <dependencies>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-core</artifactId>
+          <version>${hadoop-20S.version}</version>
+        </dependency>
+      </dependencies>
+    </profile>
+    <profile>
       <id>hadoop-2</id>
       <dependencies>
         <dependency>

Modified: hive/branches/cbo/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestFilterHooks.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestFilterHooks.java?rev=1660751&r1=1660750&r2=1660751&view=diff
==============================================================================
--- hive/branches/cbo/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestFilterHooks.java (original)
+++ hive/branches/cbo/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestFilterHooks.java Wed Feb 18 22:28:35 2015
@@ -17,19 +17,19 @@
  */
 package org.apache.hadoop.hive.metastore;
 
-import static org.junit.Assert.*;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.fail;
 
 import java.util.ArrayList;
 import java.util.List;
 
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.hive.cli.CliSessionState;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
 import org.apache.hadoop.hive.metastore.api.Database;
 import org.apache.hadoop.hive.metastore.api.Index;
+import org.apache.hadoop.hive.metastore.api.MetaException;
 import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
 import org.apache.hadoop.hive.metastore.api.Partition;
 import org.apache.hadoop.hive.metastore.api.PartitionSpec;
@@ -53,7 +53,7 @@ public class TestFilterHooks {
     }
 
     @Override
-    public List<String> filterDatabases(List<String> dbList) {
+    public List<String> filterDatabases(List<String> dbList) throws MetaException  {
       if (blockResults) {
         return new ArrayList<String>();
       }
@@ -69,7 +69,7 @@ public class TestFilterHooks {
     }
 
     @Override
-    public List<String> filterTableNames(String dbName, List<String> tableList) {
+    public List<String> filterTableNames(String dbName, List<String> tableList) throws MetaException {
       if (blockResults) {
         return new ArrayList<String>();
       }
@@ -85,7 +85,7 @@ public class TestFilterHooks {
     }
 
     @Override
-    public List<Table> filterTables(List<Table> tableList) {
+    public List<Table> filterTables(List<Table> tableList) throws MetaException {
       if (blockResults) {
         return new ArrayList<Table>();
       }
@@ -93,7 +93,7 @@ public class TestFilterHooks {
     }
 
     @Override
-    public List<Partition> filterPartitions(List<Partition> partitionList) {
+    public List<Partition> filterPartitions(List<Partition> partitionList) throws MetaException {
       if (blockResults) {
         return new ArrayList<Partition>();
       }
@@ -102,7 +102,7 @@ public class TestFilterHooks {
 
     @Override
     public List<PartitionSpec> filterPartitionSpecs(
-        List<PartitionSpec> partitionSpecList) {
+        List<PartitionSpec> partitionSpecList) throws MetaException {
       if (blockResults) {
         return new ArrayList<PartitionSpec>();
       }
@@ -119,7 +119,7 @@ public class TestFilterHooks {
 
     @Override
     public List<String> filterPartitionNames(String dbName, String tblName,
-        List<String> partitionNames) {
+        List<String> partitionNames) throws MetaException {
       if (blockResults) {
         return new ArrayList<String>();
       }
@@ -136,7 +136,7 @@ public class TestFilterHooks {
 
     @Override
     public List<String> filterIndexNames(String dbName, String tblName,
-        List<String> indexList) {
+        List<String> indexList) throws MetaException {
       if (blockResults) {
         return new ArrayList<String>();
       }
@@ -144,7 +144,7 @@ public class TestFilterHooks {
     }
 
     @Override
-    public List<Index> filterIndexes(List<Index> indexeList) {
+    public List<Index> filterIndexes(List<Index> indexeList) throws MetaException {
       if (blockResults) {
         return new ArrayList<Index>();
       }

Modified: hive/branches/cbo/itests/src/test/resources/testconfiguration.properties
URL: http://svn.apache.org/viewvc/hive/branches/cbo/itests/src/test/resources/testconfiguration.properties?rev=1660751&r1=1660750&r2=1660751&view=diff
==============================================================================
--- hive/branches/cbo/itests/src/test/resources/testconfiguration.properties (original)
+++ hive/branches/cbo/itests/src/test/resources/testconfiguration.properties Wed Feb 18 22:28:35 2015
@@ -140,6 +140,7 @@ minitez.query.files.shared=alter_merge_2
   orc_vectorization_ppd.q,\
   parallel.q,\
   ptf.q,\
+  ptf_matchpath.q,\
   ptf_streaming.q,\
   sample1.q,\
   selectDistinctStar.q,\
@@ -573,6 +574,7 @@ spark.query.files=add_part_multiple.q, \
   bucketsortoptimize_insert_6.q, \
   bucketsortoptimize_insert_7.q, \
   bucketsortoptimize_insert_8.q, \
+  cbo_gby_empty.q, \
   column_access_stats.q, \
   count.q, \
   create_merge_compressed.q, \
@@ -908,6 +910,7 @@ spark.query.files=add_part_multiple.q, \
   transform_ppr1.q, \
   transform_ppr2.q, \
   udf_example_add.q, \
+  udf_in_file.q, \
   union.q, \
   union10.q, \
   union11.q, \
@@ -954,6 +957,7 @@ spark.query.files=add_part_multiple.q, \
   union_remove_8.q, \
   union_remove_9.q, \
   uniquejoin.q, \
+  union_view.q, \
   varchar_join1.q, \
   vector_between_in.q, \
   vector_cast_constant.q, \

Modified: hive/branches/cbo/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFEvaluateNPE.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFEvaluateNPE.java?rev=1660751&r1=1660750&r2=1660751&view=diff
==============================================================================
--- hive/branches/cbo/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFEvaluateNPE.java (original)
+++ hive/branches/cbo/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFEvaluateNPE.java Wed Feb 18 22:28:35 2015
@@ -76,6 +76,6 @@ public class GenericUDFEvaluateNPE exten
   @Override
   public String getDisplayString(String[] children) {
     assert (children.length == 1);
-    return "evaluate_npe(" + children[0] + ")";
+    return getStandardDisplayString("evaluate_npe", children);
   }
 }

Modified: hive/branches/cbo/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTestGetJavaBoolean.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTestGetJavaBoolean.java?rev=1660751&r1=1660750&r2=1660751&view=diff
==============================================================================
--- hive/branches/cbo/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTestGetJavaBoolean.java (original)
+++ hive/branches/cbo/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTestGetJavaBoolean.java Wed Feb 18 22:28:35 2015
@@ -54,6 +54,6 @@ public class GenericUDFTestGetJavaBoolea
   @Override
   public String getDisplayString(String[] children) {
     assert (children.length == 1);
-    return "TestGetJavaBoolean(" + children[0] + ")";
+    return getStandardDisplayString("TestGetJavaBoolean", children);
   }
 }

Modified: hive/branches/cbo/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTestGetJavaString.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTestGetJavaString.java?rev=1660751&r1=1660750&r2=1660751&view=diff
==============================================================================
--- hive/branches/cbo/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTestGetJavaString.java (original)
+++ hive/branches/cbo/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTestGetJavaString.java Wed Feb 18 22:28:35 2015
@@ -50,6 +50,6 @@ public class GenericUDFTestGetJavaString
   @Override
   public String getDisplayString(String[] children) {
     assert (children.length == 1);
-    return "GenericUDFTestGetJavaString(" + children[0] + ")";
+    return getStandardDisplayString("GenericUDFTestGetJavaString", children);
   }
 }

Modified: hive/branches/cbo/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTestTranslate.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTestTranslate.java?rev=1660751&r1=1660750&r2=1660751&view=diff
==============================================================================
--- hive/branches/cbo/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTestTranslate.java (original)
+++ hive/branches/cbo/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTestTranslate.java Wed Feb 18 22:28:35 2015
@@ -119,7 +119,6 @@ public class GenericUDFTestTranslate ext
   @Override
   public String getDisplayString(String[] children) {
     assert (children.length == 3);
-    return "translate(" + children[0] + "," + children[1] + "," + children[2]
-        + ")";
+    return getStandardDisplayString("translate", children, ",");
   }
 }

Modified: hive/branches/cbo/metastore/src/java/org/apache/hadoop/hive/metastore/DefaultMetaStoreFilterHookImpl.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/metastore/src/java/org/apache/hadoop/hive/metastore/DefaultMetaStoreFilterHookImpl.java?rev=1660751&r1=1660750&r2=1660751&view=diff
==============================================================================
--- hive/branches/cbo/metastore/src/java/org/apache/hadoop/hive/metastore/DefaultMetaStoreFilterHookImpl.java (original)
+++ hive/branches/cbo/metastore/src/java/org/apache/hadoop/hive/metastore/DefaultMetaStoreFilterHookImpl.java Wed Feb 18 22:28:35 2015
@@ -23,6 +23,7 @@ import java.util.List;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.api.Database;
 import org.apache.hadoop.hive.metastore.api.Index;
+import org.apache.hadoop.hive.metastore.api.MetaException;
 import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
 import org.apache.hadoop.hive.metastore.api.Partition;
 import org.apache.hadoop.hive.metastore.api.PartitionSpec;
@@ -37,7 +38,7 @@ public class DefaultMetaStoreFilterHookI
   }
 
   @Override
-  public List<String> filterDatabases(List<String> dbList) {
+  public List<String> filterDatabases(List<String> dbList) throws MetaException {
     return dbList;
   }
 
@@ -47,7 +48,7 @@ public class DefaultMetaStoreFilterHookI
   }
 
   @Override
-  public List<String> filterTableNames(String dbName, List<String> tableList) {
+  public List<String> filterTableNames(String dbName, List<String> tableList) throws MetaException {
     return tableList;
   }
 
@@ -57,18 +58,18 @@ public class DefaultMetaStoreFilterHookI
   }
 
   @Override
-  public List<Table> filterTables(List<Table> tableList) {
+  public List<Table> filterTables(List<Table> tableList) throws MetaException {
     return tableList;
   }
 
   @Override
-  public List<Partition> filterPartitions(List<Partition> partitionList) {
+  public List<Partition> filterPartitions(List<Partition> partitionList) throws MetaException {
     return partitionList;
   }
 
   @Override
   public List<PartitionSpec> filterPartitionSpecs(
-      List<PartitionSpec> partitionSpecList) {
+      List<PartitionSpec> partitionSpecList) throws MetaException {
     return partitionSpecList;
   }
 
@@ -79,7 +80,7 @@ public class DefaultMetaStoreFilterHookI
 
   @Override
   public List<String> filterPartitionNames(String dbName, String tblName,
-      List<String> partitionNames) {
+      List<String> partitionNames) throws MetaException {
     return partitionNames;
   }
 
@@ -90,12 +91,12 @@ public class DefaultMetaStoreFilterHookI
 
   @Override
   public List<String> filterIndexNames(String dbName, String tblName,
-      List<String> indexList) {
+      List<String> indexList) throws MetaException {
     return indexList;
   }
 
   @Override
-  public List<Index> filterIndexes(List<Index> indexeList) {
+  public List<Index> filterIndexes(List<Index> indexeList) throws MetaException {
     return indexeList;
   }
 }

Modified: hive/branches/cbo/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java?rev=1660751&r1=1660750&r2=1660751&view=diff
==============================================================================
--- hive/branches/cbo/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java (original)
+++ hive/branches/cbo/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java Wed Feb 18 22:28:35 2015
@@ -346,9 +346,11 @@ public class HiveMetaStore extends Thrif
       final Formatter fmt = auditFormatter.get();
       ((StringBuilder) fmt.out()).setLength(0);
 
-      String address;
+      String address = null;
       if (useSasl) {
-        address = saslServer.getRemoteAddress().toString();
+        if (saslServer != null && saslServer.getRemoteAddress() != null) {
+          address = String.valueOf(saslServer.getRemoteAddress());
+        }
       } else {
         address = getIpAddress();
       }

Modified: hive/branches/cbo/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreFilterHook.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreFilterHook.java?rev=1660751&r1=1660750&r2=1660751&view=diff
==============================================================================
--- hive/branches/cbo/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreFilterHook.java (original)
+++ hive/branches/cbo/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreFilterHook.java Wed Feb 18 22:28:35 2015
@@ -24,6 +24,7 @@ import org.apache.hadoop.hive.common.cla
 import org.apache.hadoop.hive.common.classification.InterfaceStability.Evolving;
 import org.apache.hadoop.hive.metastore.api.Database;
 import org.apache.hadoop.hive.metastore.api.Index;
+import org.apache.hadoop.hive.metastore.api.MetaException;
 import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
 import org.apache.hadoop.hive.metastore.api.Partition;
 import org.apache.hadoop.hive.metastore.api.PartitionSpec;
@@ -43,7 +44,7 @@ public interface MetaStoreFilterHook {
    * @param dbList
    * @return List of filtered Db names
    */
-  public List<String> filterDatabases(List<String> dbList);
+  public List<String> filterDatabases(List<String> dbList) throws MetaException;
 
   /**
    * filter to given database object if applicable
@@ -51,7 +52,7 @@ public interface MetaStoreFilterHook {
    * @return the same database if it's not filtered out
    * @throws NoSuchObjectException
    */
-  public Database filterDatabase(Database dataBase) throws NoSuchObjectException;
+  public Database filterDatabase(Database dataBase) throws MetaException, NoSuchObjectException;
 
   /**
    * Filter given list of tables
@@ -59,7 +60,7 @@ public interface MetaStoreFilterHook {
    * @param tableList
    * @returnList of filtered table names
    */
-  public List<String> filterTableNames(String dbName, List<String> tableList);
+  public List<String> filterTableNames(String dbName, List<String> tableList) throws MetaException;
 
   /**
    * filter to given table object if applicable
@@ -67,7 +68,7 @@ public interface MetaStoreFilterHook {
    * @return the same table if it's not filtered out
    * @throws NoSuchObjectException
    */
-  public Table filterTable(Table table) throws NoSuchObjectException;
+  public Table filterTable(Table table) throws MetaException, NoSuchObjectException;
 
   /**
    * Filter given list of tables
@@ -75,21 +76,22 @@ public interface MetaStoreFilterHook {
    * @param tableList
    * @returnList of filtered table names
    */
-  public List<Table> filterTables(List<Table> tableList);
+  public List<Table> filterTables(List<Table> tableList) throws MetaException;
 
   /**
    * Filter given list of partitions
    * @param partitionList
    * @return
    */
-  public List<Partition> filterPartitions(List<Partition> partitionList);
+  public List<Partition> filterPartitions(List<Partition> partitionList) throws MetaException;
 
   /**
    * Filter given list of partition specs
    * @param partitionSpecList
    * @return
    */
-  public List<PartitionSpec> filterPartitionSpecs(List<PartitionSpec> partitionSpecList);
+  public List<PartitionSpec> filterPartitionSpecs(List<PartitionSpec> partitionSpecList)
+      throws MetaException;
 
   /**
    * filter to given partition object if applicable
@@ -97,7 +99,7 @@ public interface MetaStoreFilterHook {
    * @return the same partition object if it's not filtered out
    * @throws NoSuchObjectException
    */
-  public Partition filterPartition(Partition partition) throws NoSuchObjectException;
+  public Partition filterPartition(Partition partition) throws MetaException, NoSuchObjectException;
 
   /**
    * Filter given list of partition names
@@ -107,9 +109,9 @@ public interface MetaStoreFilterHook {
    * @return
    */
   public List<String> filterPartitionNames(String dbName, String tblName,
-      List<String> partitionNames);
+      List<String> partitionNames) throws MetaException;
 
-  public Index filterIndex(Index index) throws NoSuchObjectException;
+  public Index filterIndex(Index index) throws MetaException, NoSuchObjectException;
 
   /**
    * Filter given list of index names
@@ -119,13 +121,13 @@ public interface MetaStoreFilterHook {
    * @return
    */
   public List<String> filterIndexNames(String dbName, String tblName,
-      List<String> indexList);
+      List<String> indexList) throws MetaException;
 
   /**
    * Filter given list of index objects
    * @param indexeList
    * @return
    */
-  public List<Index> filterIndexes(List<Index> indexeList);
+  public List<Index> filterIndexes(List<Index> indexeList) throws MetaException;
 }
 

Modified: hive/branches/cbo/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreForJdoConnection.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreForJdoConnection.java?rev=1660751&r1=1660750&r2=1660751&view=diff
==============================================================================
--- hive/branches/cbo/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreForJdoConnection.java (original)
+++ hive/branches/cbo/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreForJdoConnection.java Wed Feb 18 22:28:35 2015
@@ -18,9 +18,9 @@
 
 package org.apache.hadoop.hive.metastore;
 
+import java.util.Collections;
 import java.util.List;
 import java.util.Map;
-import java.util.SortedSet;
 
 import junit.framework.Assert;
 
@@ -28,7 +28,6 @@ import org.apache.hadoop.conf.Configurat
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.api.AggrStats;
 import org.apache.hadoop.hive.metastore.api.ColumnStatistics;
-import org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj;
 import org.apache.hadoop.hive.metastore.api.CurrentNotificationEventId;
 import org.apache.hadoop.hive.metastore.api.Database;
 import org.apache.hadoop.hive.metastore.api.Function;
@@ -44,12 +43,10 @@ import org.apache.hadoop.hive.metastore.
 import org.apache.hadoop.hive.metastore.api.NotificationEventResponse;
 import org.apache.hadoop.hive.metastore.api.Partition;
 import org.apache.hadoop.hive.metastore.api.PartitionEventType;
-import org.apache.hadoop.hive.metastore.api.PartitionsStatsRequest;
 import org.apache.hadoop.hive.metastore.api.PrincipalPrivilegeSet;
 import org.apache.hadoop.hive.metastore.api.PrincipalType;
 import org.apache.hadoop.hive.metastore.api.PrivilegeBag;
 import org.apache.hadoop.hive.metastore.api.Role;
-import org.apache.hadoop.hive.metastore.api.SetPartitionsStatsRequest;
 import org.apache.hadoop.hive.metastore.api.Table;
 import org.apache.hadoop.hive.metastore.api.Type;
 import org.apache.hadoop.hive.metastore.api.UnknownDBException;
@@ -145,13 +142,13 @@ public class DummyRawStoreForJdoConnecti
   @Override
   public List<String> getDatabases(String pattern) throws MetaException {
 
-    return null;
+    return Collections.emptyList();
   }
 
   @Override
   public List<String> getAllDatabases() throws MetaException {
 
-    return null;
+    return Collections.emptyList();
   }
 
   @Override
@@ -214,7 +211,7 @@ public class DummyRawStoreForJdoConnecti
   public List<Partition> getPartitions(String dbName, String tableName, int max)
       throws MetaException {
 
-    return null;
+    return Collections.emptyList();
   }
 
   @Override
@@ -227,41 +224,41 @@ public class DummyRawStoreForJdoConnecti
   @Override
   public List<String> getTables(String dbName, String pattern) throws MetaException {
 
-    return null;
+    return Collections.emptyList();
   }
 
   @Override
   public List<Table> getTableObjectsByName(String dbname, List<String> tableNames)
       throws MetaException, UnknownDBException {
 
-    return null;
+    return Collections.emptyList();
   }
 
   @Override
   public List<String> getAllTables(String dbName) throws MetaException {
 
-    return null;
+    return Collections.emptyList();
   }
 
   @Override
   public List<String> listTableNamesByFilter(String dbName, String filter, short max_tables)
       throws MetaException, UnknownDBException {
 
-    return null;
+    return Collections.emptyList();
   }
 
   @Override
   public List<String> listPartitionNames(String db_name, String tbl_name, short max_parts)
       throws MetaException {
 
-    return null;
+    return Collections.emptyList();
   }
 
   @Override
   public List<String> listPartitionNamesByFilter(String db_name, String tbl_name, String filter,
       short max_parts) throws MetaException {
 
-    return null;
+    return Collections.emptyList();
   }
 
   @Override
@@ -310,7 +307,7 @@ public class DummyRawStoreForJdoConnecti
   public List<String> listIndexNames(String dbName, String origTableName, short max)
       throws MetaException {
 
-    return null;
+    return Collections.emptyList();
   }
 
   @Override
@@ -324,14 +321,14 @@ public class DummyRawStoreForJdoConnecti
   public List<Partition> getPartitionsByFilter(String dbName, String tblName, String filter,
       short maxParts) throws MetaException, NoSuchObjectException {
 
-    return null;
+    return Collections.emptyList();
   }
 
   @Override
   public List<Partition> getPartitionsByNames(String dbName, String tblName,
       List<String> partNames) throws MetaException, NoSuchObjectException {
 
-    return null;
+    return Collections.emptyList();
   }
 
   @Override
@@ -425,35 +422,35 @@ public class DummyRawStoreForJdoConnecti
   public List<MGlobalPrivilege> listPrincipalGlobalGrants(String principalName,
       PrincipalType principalType) {
 
-    return null;
+    return Collections.emptyList();
   }
 
   @Override
   public List<MDBPrivilege> listPrincipalDBGrants(String principalName,
       PrincipalType principalType, String dbName) {
 
-    return null;
+    return Collections.emptyList();
   }
 
   @Override
   public List<MTablePrivilege> listAllTableGrants(String principalName,
       PrincipalType principalType, String dbName, String tableName) {
 
-    return null;
+    return Collections.emptyList();
   }
 
   @Override
   public List<MPartitionPrivilege> listPrincipalPartitionGrants(String principalName,
       PrincipalType principalType, String dbName, String tableName, String partName) {
 
-    return null;
+    return Collections.emptyList();
   }
 
   @Override
   public List<MTableColumnPrivilege> listPrincipalTableColumnGrants(String principalName,
       PrincipalType principalType, String dbName, String tableName, String columnName) {
 
-    return null;
+    return Collections.emptyList();
   }
 
   @Override
@@ -461,7 +458,7 @@ public class DummyRawStoreForJdoConnecti
       PrincipalType principalType, String dbName, String tableName, String partName,
       String columnName) {
 
-    return null;
+    return Collections.emptyList();
   }
 
   @Override
@@ -487,18 +484,18 @@ public class DummyRawStoreForJdoConnecti
   @Override
   public List<String> listRoleNames() {
 
-    return null;
+    return Collections.emptyList();
   }
 
   @Override
   public List<MRoleMap> listRoles(String principalName, PrincipalType principalType) {
 
-    return null;
+    return Collections.emptyList();
   }
 
   @Override
   public List<MRoleMap> listRoleMembers(String roleName) {
-    return null;
+    return Collections.emptyList();
   }
 
   @Override
@@ -514,14 +511,14 @@ public class DummyRawStoreForJdoConnecti
       String userName, List<String> groupNames) throws MetaException, NoSuchObjectException,
       InvalidObjectException {
 
-    return null;
+    return Collections.emptyList();
   }
 
   @Override
   public List<String> listPartitionNamesPs(String db_name, String tbl_name, List<String> part_vals,
       short max_parts) throws MetaException, NoSuchObjectException {
 
-    return null;
+    return Collections.emptyList();
   }
 
   @Override
@@ -529,7 +526,7 @@ public class DummyRawStoreForJdoConnecti
       List<String> part_vals, short max_parts, String userName, List<String> groupNames)
       throws MetaException, InvalidObjectException, NoSuchObjectException {
 
-    return null;
+    return Collections.emptyList();
   }
 
   @Override
@@ -555,7 +552,7 @@ public class DummyRawStoreForJdoConnecti
 
   @Override
   public List<String> getAllTokenIdentifiers() {
-    return null;
+    return Collections.emptyList();
   }
 
   @Override
@@ -574,67 +571,67 @@ public class DummyRawStoreForJdoConnecti
 
   @Override
   public String[] getMasterKeys() {
-    return null;
+    return new String[0];
   }
 
   @Override
   public List<HiveObjectPrivilege> listPrincipalDBGrantsAll(
       String principalName, PrincipalType principalType) {
-    return null;
+    return Collections.emptyList();
   }
 
   @Override
   public List<HiveObjectPrivilege> listPrincipalTableGrantsAll(
       String principalName, PrincipalType principalType) {
-    return null;
+    return Collections.emptyList();
   }
 
   @Override
   public List<HiveObjectPrivilege> listPrincipalPartitionGrantsAll(
       String principalName, PrincipalType principalType) {
-    return null;
+    return Collections.emptyList();
   }
 
   @Override
   public List<HiveObjectPrivilege> listPrincipalTableColumnGrantsAll(
       String principalName, PrincipalType principalType) {
-    return null;
+    return Collections.emptyList();
   }
 
   @Override
   public List<HiveObjectPrivilege> listPrincipalPartitionColumnGrantsAll(
       String principalName, PrincipalType principalType) {
-    return null;
+    return Collections.emptyList();
   }
 
   @Override
   public List<HiveObjectPrivilege> listGlobalGrantsAll() {
-    return null;
+    return Collections.emptyList();
   }
 
   @Override
   public List<HiveObjectPrivilege> listDBGrantsAll(String dbName) {
-    return null;
+    return Collections.emptyList();
   }
 
   @Override
   public List<HiveObjectPrivilege> listPartitionColumnGrantsAll(String dbName, String tableName, String partitionName, String columnName) {
-    return null;
+    return Collections.emptyList();
   }
 
   @Override
   public List<HiveObjectPrivilege> listTableGrantsAll(String dbName, String tableName) {
-    return null;
+    return Collections.emptyList();
   }
 
   @Override
   public List<HiveObjectPrivilege> listPartitionGrantsAll(String dbName, String tableName, String partitionName) {
-    return null;
+    return Collections.emptyList();
   }
 
   @Override
   public List<HiveObjectPrivilege> listTableColumnGrantsAll(String dbName, String tableName, String columnName) {
-    return null;
+    return Collections.emptyList();
   }
 
   @Override
@@ -689,7 +686,7 @@ public class DummyRawStoreForJdoConnecti
   public List<ColumnStatistics> getPartitionColumnStatistics(String dbName,
       String tblName, List<String> colNames, List<String> partNames)
       throws MetaException, NoSuchObjectException {
-    return null;
+    return Collections.emptyList();
   }
 
   @Override
@@ -738,7 +735,7 @@ public class DummyRawStoreForJdoConnecti
   @Override
   public List<String> getFunctions(String dbName, String pattern)
       throws MetaException {
-    return null;
+    return Collections.emptyList();
   }
 
   @Override

Modified: hive/branches/cbo/packaging/pom.xml
URL: http://svn.apache.org/viewvc/hive/branches/cbo/packaging/pom.xml?rev=1660751&r1=1660750&r2=1660751&view=diff
==============================================================================
--- hive/branches/cbo/packaging/pom.xml (original)
+++ hive/branches/cbo/packaging/pom.xml Wed Feb 18 22:28:35 2015
@@ -86,28 +86,6 @@
               </execution>
             </executions>
           </plugin>
-          <plugin>
-            <groupId>org.codehaus.mojo</groupId>
-            <artifactId>build-helper-maven-plugin</artifactId>
-            <executions>
-              <execution>
-                <id>attach-jdbc-driver</id>
-                <phase>package</phase>
-                <goals>
-                  <goal>attach-artifact</goal>
-                </goals>
-                <configuration>
-                  <artifacts>
-                    <artifact>
-                      <file>${project.build.directory}/${hive.jdbc.driver.jar}</file>
-                      <type>jar</type>
-                      <classifier>${hive.jdbc.driver.classifier}</classifier>
-                    </artifact>
-                  </artifacts>
-                </configuration>
-              </execution>
-            </executions>
-          </plugin>
         </plugins>
       </build>
     </profile>

Modified: hive/branches/cbo/packaging/src/main/assembly/src.xml
URL: http://svn.apache.org/viewvc/hive/branches/cbo/packaging/src/main/assembly/src.xml?rev=1660751&r1=1660750&r2=1660751&view=diff
==============================================================================
--- hive/branches/cbo/packaging/src/main/assembly/src.xml (original)
+++ hive/branches/cbo/packaging/src/main/assembly/src.xml Wed Feb 18 22:28:35 2015
@@ -40,6 +40,7 @@
         <exclude>**/.project</exclude>
         <exclude>**/.settings/**</exclude>
         <exclude>itests/thirdparty/**</exclude>
+        <exclude>testutils/ptest2/velocity.log*</exclude>
       </excludes>
 
       <includes>
@@ -74,6 +75,8 @@
         <include>jdbc/**/*</include>
         <include>contrib/**/*</include>
         <include>metastore/**/*</include>
+        <include>lib/**/*</include>
+        <include>findbugs/**/*</include>
         <include>odbc/**/*</include>
         <include>packaging/pom.xml</include>
         <include>packaging/src/**/*</include>
@@ -83,7 +86,6 @@
         <include>service/**/*</include>
         <include>shims/**/*</include>
         <include>spark-client/**/*</include>
-        <include>testlibs/**/*</include>
         <include>testutils/**/*</include>
       </includes>
       <outputDirectory>/</outputDirectory>

Modified: hive/branches/cbo/pom.xml
URL: http://svn.apache.org/viewvc/hive/branches/cbo/pom.xml?rev=1660751&r1=1660750&r2=1660751&view=diff
==============================================================================
--- hive/branches/cbo/pom.xml (original)
+++ hive/branches/cbo/pom.xml Wed Feb 18 22:28:35 2015
@@ -487,10 +487,20 @@
       </dependency>
       <dependency>
         <groupId>org.apache.curator</groupId>
+        <artifactId>curator-client</artifactId>
+        <version>${curator.version}</version>
+      </dependency>
+      <dependency>
+        <groupId>org.apache.curator</groupId>
         <artifactId>curator-framework</artifactId>
         <version>${curator.version}</version>
       </dependency>
       <dependency>
+        <groupId>org.apache.curator</groupId>
+        <artifactId>curator-recipes</artifactId>
+        <version>${curator.version}</version>
+      </dependency>
+      <dependency>
         <groupId>org.codehaus.groovy</groupId>
         <artifactId>groovy-all</artifactId>
         <version>${groovy.version}</version>

Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/Driver.java?rev=1660751&r1=1660750&r2=1660751&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/Driver.java (original)
+++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/Driver.java Wed Feb 18 22:28:35 2015
@@ -47,7 +47,6 @@ import org.apache.hadoop.hive.metastore.
 import org.apache.hadoop.hive.ql.exec.ConditionalTask;
 import org.apache.hadoop.hive.ql.exec.ExplainTask;
 import org.apache.hadoop.hive.ql.exec.FetchTask;
-import org.apache.hadoop.hive.ql.exec.MoveTask;
 import org.apache.hadoop.hive.ql.exec.Operator;
 import org.apache.hadoop.hive.ql.exec.TableScanOperator;
 import org.apache.hadoop.hive.ql.exec.Task;
@@ -64,8 +63,8 @@ import org.apache.hadoop.hive.ql.hooks.H
 import org.apache.hadoop.hive.ql.hooks.PostExecute;
 import org.apache.hadoop.hive.ql.hooks.PreExecute;
 import org.apache.hadoop.hive.ql.hooks.ReadEntity;
-import org.apache.hadoop.hive.ql.hooks.WriteEntity;
 import org.apache.hadoop.hive.ql.hooks.Redactor;
+import org.apache.hadoop.hive.ql.hooks.WriteEntity;
 import org.apache.hadoop.hive.ql.lockmgr.HiveLock;
 import org.apache.hadoop.hive.ql.lockmgr.HiveLockMode;
 import org.apache.hadoop.hive.ql.lockmgr.HiveLockObj;
@@ -153,6 +152,9 @@ public class Driver implements CommandPr
 
   private String userName;
 
+  // HS2 operation handle guid string
+  private String operationId;
+
   private boolean checkConcurrency() {
     boolean supportConcurrency = conf.getBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY);
     if (!supportConcurrency) {
@@ -1328,7 +1330,8 @@ public class Driver implements CommandPr
       resStream = null;
 
       SessionState ss = SessionState.get();
-      HookContext hookContext = new HookContext(plan, conf, ctx.getPathToCS(), ss.getUserName(), ss.getUserIpAddress());
+      HookContext hookContext = new HookContext(plan, conf, ctx.getPathToCS(), ss.getUserName(),
+          ss.getUserIpAddress(), operationId);
       hookContext.setHookType(HookContext.HookType.PRE_EXEC_HOOK);
 
       for (Hook peh : getHooks(HiveConf.ConfVars.PREEXECHOOKS)) {
@@ -1625,8 +1628,7 @@ public class Driver implements CommandPr
 
     cxt.launching(tskRun);
     // Launch Task
-    if (HiveConf.getBoolVar(conf, HiveConf.ConfVars.EXECPARALLEL)
-        && (tsk.isMapRedTask() || (tsk instanceof MoveTask))) {
+    if (HiveConf.getBoolVar(conf, HiveConf.ConfVars.EXECPARALLEL) && tsk.isMapRedTask()) {
       // Launch it in the parallel mode, as a separate thread only for MR tasks
       if (LOG.isInfoEnabled()){
         LOG.info("Starting task [" + tsk + "] in parallel");
@@ -1784,4 +1786,12 @@ public class Driver implements CommandPr
     return errorMessage;
   }
 
+  /**
+   * Set the HS2 operation handle's guid string
+   * @param opId base64 encoded guid string
+   */
+  public void setOperationId(String opId) {
+    this.operationId = opId;
+  }
+
 }

Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java?rev=1660751&r1=1660750&r2=1660751&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java (original)
+++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java Wed Feb 18 22:28:35 2015
@@ -428,6 +428,7 @@ public enum ErrorMsg {
   ALTER_TABLE_PARTITION_CASCADE_NOT_SUPPORTED(10300,
       "Alter table partition type {0} does not support cascade", true),
 
+  DROP_NATIVE_FUNCTION(10301, "Cannot drop native function"),
 
   //========================== 20000 range starts here ========================//
   SCRIPT_INIT_ERROR(20000, "Unable to initialize custom script."),

Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java?rev=1660751&r1=1660750&r2=1660751&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java (original)
+++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java Wed Feb 18 22:28:35 2015
@@ -33,7 +33,6 @@ import java.util.Comparator;
 import java.util.HashMap;
 import java.util.HashSet;
 import java.util.LinkedHashMap;
-import java.util.LinkedList;
 import java.util.List;
 import java.util.Map;
 import java.util.Map.Entry;
@@ -478,11 +477,11 @@ public class ExplainTask extends Task<Ex
           json.put(ent.getKey().toString(), ent.getValue().toString());
         }
       }
-      else if (ent.getValue() instanceof Serializable) {
+      else if (ent.getValue() != null) {
         if (out != null) {
           out.println();
         }
-        JSONObject jsonOut = outputPlan((Serializable) ent.getValue(), out,
+        JSONObject jsonOut = outputPlan(ent.getValue(), out,
             extended, jsonOutput, jsonOutput ? 0 : indent + 2);
         if (jsonOutput) {
           json.put(ent.getKey().toString(), jsonOut);
@@ -518,11 +517,11 @@ public class ExplainTask extends Task<Ex
         }
         nl = true;
       }
-      else if (o instanceof Serializable) {
+      else {
         if (first_el && (out != null) && hasHeader) {
           out.println();
         }
-        JSONObject jsonOut = outputPlan((Serializable) o, out, extended,
+        JSONObject jsonOut = outputPlan(o, out, extended,
             jsonOutput, jsonOutput ? 0 : (hasHeader ? indent + 2 : indent));
         if (jsonOutput) {
           outputArray.put(jsonOut);
@@ -553,12 +552,12 @@ public class ExplainTask extends Task<Ex
     return false;
   }
 
-  private JSONObject outputPlan(Serializable work,
+  private JSONObject outputPlan(Object work,
       PrintStream out, boolean extended, boolean jsonOutput, int indent) throws Exception {
     return outputPlan(work, out, extended, jsonOutput, indent, "");
   }
 
-  private JSONObject outputPlan(Serializable work, PrintStream out,
+  private JSONObject outputPlan(Object work, PrintStream out,
       boolean extended, boolean jsonOutput, int indent, String appendToHeader) throws Exception {
     // Check if work has an explain annotation
     Annotation note = AnnotationUtils.getAnnotation(work.getClass(), Explain.class);
@@ -678,7 +677,7 @@ public class ExplainTask extends Task<Ex
           }
 
           // Try this as a map
-          try {
+          if (val instanceof Map) {
             // Go through the map and print out the stuff
             Map<?, ?> mp = (Map<?, ?>) val;
 
@@ -692,22 +691,10 @@ public class ExplainTask extends Task<Ex
             }
             continue;
           }
-          catch (ClassCastException ce) {
-            // Ignore - all this means is that this is not a map
-          }
 
           // Try this as a list
-          try {
-            List l;
-
-            try {
-              l = (List) val;
-            } catch (ClassCastException e) {
-              Set s = (Set) val;
-              l = new LinkedList();
-              l.addAll(s);
-            }
-
+          if (val instanceof List || val instanceof Set) {
+            List l = val instanceof List ? (List)val : new ArrayList((Set)val);
             if (out != null && !skipHeader && l != null && !l.isEmpty()) {
               out.print(header);
             }
@@ -720,18 +707,13 @@ public class ExplainTask extends Task<Ex
 
             continue;
           }
-          catch (ClassCastException ce) {
-            // Ignore
-          }
 
           // Finally check if it is serializable
           try {
-            Serializable s = (Serializable) val;
-
             if (!skipHeader && out != null) {
               out.println(header);
             }
-            JSONObject jsonOut = outputPlan(s, out, extended, jsonOutput, ind);
+            JSONObject jsonOut = outputPlan(val, out, extended, jsonOutput, ind);
             if (jsonOutput) {
               if (!skipHeader) {
                 json.put(header, jsonOut);
@@ -779,7 +761,7 @@ public class ExplainTask extends Task<Ex
     return true;
   }
 
-  private JSONObject outputPlan(Task<? extends Serializable> task,
+  private JSONObject outputPlan(Task<?> task,
       PrintStream out, JSONObject parentJSON, boolean extended,
       boolean jsonOutput, int indent) throws Exception {
 
@@ -805,7 +787,7 @@ public class ExplainTask extends Task<Ex
     return null;
   }
 
-  private JSONObject outputDependencies(Task<? extends Serializable> task,
+  private JSONObject outputDependencies(Task<?> task,
       PrintStream out, JSONObject parentJson, boolean jsonOutput, boolean taskType, int indent)
       throws Exception {
 
@@ -830,7 +812,7 @@ public class ExplainTask extends Task<Ex
     else {
       StringBuffer s = new StringBuffer();
       first = true;
-      for (Task<? extends Serializable> parent : task.getParentTasks()) {
+      for (Task<?> parent : task.getParentTasks()) {
         if (!first) {
           s.append(", ");
         }
@@ -847,7 +829,7 @@ public class ExplainTask extends Task<Ex
       }
     }
 
-    Task<? extends Serializable> currBackupTask = task.getBackupTask();
+    Task<?> currBackupTask = task.getBackupTask();
     if (currBackupTask != null) {
       if (out != null) {
         out.print(" has a backup stage: ");
@@ -862,7 +844,7 @@ public class ExplainTask extends Task<Ex
         && ((ConditionalTask) task).getListTasks() != null) {
       StringBuffer s = new StringBuffer();
       first = true;
-      for (Task<? extends Serializable> con : ((ConditionalTask) task).getListTasks()) {
+      for (Task<?> con : ((ConditionalTask) task).getListTasks()) {
         if (!first) {
           s.append(", ");
         }

Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java?rev=1660751&r1=1660750&r2=1660751&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java (original)
+++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java Wed Feb 18 22:28:35 2015
@@ -418,11 +418,13 @@ public class FileSinkOperator extends Te
 
       numRows = 0;
 
-      String context = jc.get(Operator.CONTEXT_NAME_KEY, "");
-      if (context != null && !context.isEmpty()) {
-        context = "_" + context.replace(" ","_");
+      String suffix = Integer.toString(conf.getDestTableId());
+      String fullName = conf.getTableInfo().getTableName();
+      if (fullName != null) {
+        suffix = suffix + "_" + fullName.toLowerCase();
       }
-      statsMap.put(Counter.RECORDS_OUT + context, row_count);
+
+      statsMap.put(Counter.RECORDS_OUT + "_" + suffix, row_count);
 
       initializeChildren(hconf);
     } catch (HiveException e) {



Mime
View raw message