hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From br...@apache.org
Subject svn commit: r1664455 [3/30] - in /hive/branches/parquet: ./ accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/ accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/ accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/serde/...
Date Thu, 05 Mar 2015 18:51:39 GMT
Modified: hive/branches/parquet/data/files/parquet_types.txt
URL: http://svn.apache.org/viewvc/hive/branches/parquet/data/files/parquet_types.txt?rev=1664455&r1=1664454&r2=1664455&view=diff
==============================================================================
--- hive/branches/parquet/data/files/parquet_types.txt (original)
+++ hive/branches/parquet/data/files/parquet_types.txt Thu Mar  5 18:51:32 2015
@@ -1,22 +1,22 @@
-100|1|1|1.0|0.0|abc|2011-01-01 01:01:01.111111111|a   |a  |B4F3CAFDBEDD|k1:v1|101,200|10,abc
-101|2|2|1.1|0.3|def|2012-02-02 02:02:02.222222222|ab  |ab |68692CCAC0BDE7|k2:v2|102,200|10,def
-102|3|3|1.2|0.6|ghi|2013-03-03 03:03:03.333333333|abc|abc|B4F3CAFDBEDD|k3:v3|103,200|10,ghi
-103|1|4|1.3|0.9|jkl|2014-04-04 04:04:04.444444444|abcd|abcd|68692CCAC0BDE7|k4:v4|104,200|10,jkl
-104|2|5|1.4|1.2|mno|2015-05-05 05:05:05.555555555|abcde|abcde|B4F3CAFDBEDD|k5:v5|105,200|10,mno
-105|3|1|1.0|1.5|pqr|2016-06-06 06:06:06.666666666|abcdef|abcdef|68692CCAC0BDE7|k6:v6|106,200|10,pqr
-106|1|2|1.1|1.8|stu|2017-07-07 07:07:07.777777777|abcdefg|abcdefg|B4F3CAFDBEDD|k7:v7|107,200|10,stu
-107|2|3|1.2|2.1|vwx|2018-08-08 08:08:08.888888888|bcdefg|abcdefgh|68692CCAC0BDE7|k8:v8|108,200|10,vwx
-108|3|4|1.3|2.4|yza|2019-09-09 09:09:09.999999999|cdefg|B4F3CAFDBEDD|68656C6C6F|k9:v9|109,200|10,yza
-109|1|5|1.4|2.7|bcd|2020-10-10 10:10:10.101010101|klmno|abcdedef|68692CCAC0BDE7|k10:v10|110,200|10,bcd
-110|2|1|1.0|3.0|efg|2021-11-11 11:11:11.111111111|pqrst|abcdede|B4F3CAFDBEDD|k11:v11|111,200|10,efg
-111|3|2|1.1|3.3|hij|2022-12-12 12:12:12.121212121|nopqr|abcded|68692CCAC0BDE7|k12:v12|112,200|10,hij
-112|1|3|1.2|3.6|klm|2023-01-02 13:13:13.131313131|opqrs|abcdd|B4F3CAFDBEDD|k13:v13|113,200|10,klm
-113|2|4|1.3|3.9|nop|2024-02-02 14:14:14.141414141|pqrst|abc|68692CCAC0BDE7|k14:v14|114,200|10,nop
-114|3|5|1.4|4.2|qrs|2025-03-03 15:15:15.151515151|qrstu|b|B4F3CAFDBEDD|k15:v15|115,200|10,qrs
-115|1|1|1.0|4.5|qrs|2026-04-04 16:16:16.161616161|rstuv|abcded|68692CCAC0BDE7|k16:v16|116,200|10,qrs
-116|2|2|1.1|4.8|wxy|2027-05-05 17:17:17.171717171|stuvw|abcded|B4F3CAFDBEDD|k17:v17|117,200|10,wxy
-117|3|3|1.2|5.1|zab|2028-06-06 18:18:18.181818181|tuvwx|abcded|68692CCAC0BDE7|k18:v18|118,200|10,zab
-118|1|4|1.3|5.4|cde|2029-07-07 19:19:19.191919191|uvwzy|abcdede|B4F3CAFDBEDD|k19:v19|119,200|10,cde
-119|2|5|1.4|5.7|fgh|2030-08-08 20:20:20.202020202|vwxyz|abcdede|68692CCAC0BDE7|k20:v20|120,200|10,fgh
-120|3|1|1.0|6.0|ijk|2031-09-09 21:21:21.212121212|wxyza|abcde|B4F3CAFDBEDD|k21:v21|121,200|10,ijk
-121|1|2|1.1|6.3|lmn|2032-10-10 22:22:22.222222222|bcdef|abcde||k22:v22|122,200|10,lmn
\ No newline at end of file
+100|1|1|1.0|0.0|abc|2011-01-01 01:01:01.111111111|a   |a  |B4F3CAFDBEDD|k1:v1|101,200|10,abc|2011-01-01
+101|2|2|1.1|0.3|def|2012-02-02 02:02:02.222222222|ab  |ab |68692CCAC0BDE7|k2:v2|102,200|10,def|2012-02-02
+102|3|3|1.2|0.6|ghi|2013-03-03 03:03:03.333333333|abc|abc|B4F3CAFDBEDD|k3:v3|103,200|10,ghi|2013-03-03
+103|1|4|1.3|0.9|jkl|2014-04-04 04:04:04.444444444|abcd|abcd|68692CCAC0BDE7|k4:v4|104,200|10,jkl|2014-04-04
+104|2|5|1.4|1.2|mno|2015-05-05 05:05:05.555555555|abcde|abcde|B4F3CAFDBEDD|k5:v5|105,200|10,mno|2015-05-05
+105|3|1|1.0|1.5|pqr|2016-06-06 06:06:06.666666666|abcdef|abcdef|68692CCAC0BDE7|k6:v6|106,200|10,pqr|2016-06-06
+106|1|2|1.1|1.8|stu|2017-07-07 07:07:07.777777777|abcdefg|abcdefg|B4F3CAFDBEDD|k7:v7|107,200|10,stu|2017-07-07
+107|2|3|1.2|2.1|vwx|2018-08-08 08:08:08.888888888|bcdefg|abcdefgh|68692CCAC0BDE7|k8:v8|108,200|10,vwx|2018-08-08
+108|3|4|1.3|2.4|yza|2019-09-09 09:09:09.999999999|cdefg|B4F3CAFDBEDD|68656C6C6F|k9:v9|109,200|10,yza|2019-09-09
+109|1|5|1.4|2.7|bcd|2020-10-10 10:10:10.101010101|klmno|abcdedef|68692CCAC0BDE7|k10:v10|110,200|10,bcd|2020-10-10
+110|2|1|1.0|3.0|efg|2021-11-11 11:11:11.111111111|pqrst|abcdede|B4F3CAFDBEDD|k11:v11|111,200|10,efg|2021-11-11
+111|3|2|1.1|3.3|hij|2022-12-12 12:12:12.121212121|nopqr|abcded|68692CCAC0BDE7|k12:v12|112,200|10,hij|2022-12-12
+112|1|3|1.2|3.6|klm|2023-01-02 13:13:13.131313131|opqrs|abcdd|B4F3CAFDBEDD|k13:v13|113,200|10,klm|2023-01-02
+113|2|4|1.3|3.9|nop|2024-02-02 14:14:14.141414141|pqrst|abc|68692CCAC0BDE7|k14:v14|114,200|10,nop|2024-02-02
+114|3|5|1.4|4.2|qrs|2025-03-03 15:15:15.151515151|qrstu|b|B4F3CAFDBEDD|k15:v15|115,200|10,qrs|2025-03-03
+115|1|1|1.0|4.5|qrs|2026-04-04 16:16:16.161616161|rstuv|abcded|68692CCAC0BDE7|k16:v16|116,200|10,qrs|2026-04-04
+116|2|2|1.1|4.8|wxy|2027-05-05 17:17:17.171717171|stuvw|abcded|B4F3CAFDBEDD|k17:v17|117,200|10,wxy|2027-05-05
+117|3|3|1.2|5.1|zab|2028-06-06 18:18:18.181818181|tuvwx|abcded|68692CCAC0BDE7|k18:v18|118,200|10,zab|2028-06-06
+118|1|4|1.3|5.4|cde|2029-07-07 19:19:19.191919191|uvwzy|abcdede|B4F3CAFDBEDD|k19:v19|119,200|10,cde|2029-07-07
+119|2|5|1.4|5.7|fgh|2030-08-08 20:20:20.202020202|vwxyz|abcdede|68692CCAC0BDE7|k20:v20|120,200|10,fgh|2030-08-08
+120|3|1|1.0|6.0|ijk|2031-09-09 21:21:21.212121212|wxyza|abcde|B4F3CAFDBEDD|k21:v21|121,200|10,ijk|2031-09-09
+121|1|2|1.1|6.3|lmn|2032-10-10 22:22:22.222222222|bcdef|abcde||k22:v22|122,200|10,lmn|2032-10-10
\ No newline at end of file

Propchange: hive/branches/parquet/hbase-handler/pom.xml
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Thu Mar  5 18:51:32 2015
@@ -1,6 +1,6 @@
 /hive/branches/branch-0.11/hbase-handler/pom.xml:1480385,1480458,1481120,1481344,1481346,1481348,1481352,1483872,1505184
 /hive/branches/cbo/hbase-handler/pom.xml:1605012-1627125
-/hive/branches/spark/hbase-handler/pom.xml:1608589-1657401
+/hive/branches/spark/hbase-handler/pom.xml:1608589-1660298
 /hive/branches/tez/hbase-handler/pom.xml:1494760-1622766
 /hive/branches/vectorization/hbase-handler/pom.xml:1466908-1527856
-/hive/trunk/hbase-handler/pom.xml:1494760-1537575,1656617-1659011
+/hive/trunk/hbase-handler/pom.xml:1494760-1537575,1656617-1664450

Modified: hive/branches/parquet/hbase-handler/src/java/org/apache/hadoop/hive/hbase/DefaultHBaseKeyFactory.java
URL: http://svn.apache.org/viewvc/hive/branches/parquet/hbase-handler/src/java/org/apache/hadoop/hive/hbase/DefaultHBaseKeyFactory.java?rev=1664455&r1=1664454&r2=1664455&view=diff
==============================================================================
--- hive/branches/parquet/hbase-handler/src/java/org/apache/hadoop/hive/hbase/DefaultHBaseKeyFactory.java (original)
+++ hive/branches/parquet/hbase-handler/src/java/org/apache/hadoop/hive/hbase/DefaultHBaseKeyFactory.java Thu Mar  5 18:51:32 2015
@@ -22,10 +22,11 @@ import java.io.IOException;
 import java.util.Properties;
 
 import com.google.common.annotations.VisibleForTesting;
+
 import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.hive.serde2.lazy.LazyFactory;
 import org.apache.hadoop.hive.serde2.lazy.LazyObjectBase;
-import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe.SerDeParameters;
+import org.apache.hadoop.hive.serde2.lazy.LazySerDeParameters;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.StructField;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory.ObjectInspectorOptions;
@@ -33,7 +34,7 @@ import org.apache.hadoop.hive.serde2.typ
 
 public class DefaultHBaseKeyFactory extends AbstractHBaseKeyFactory implements HBaseKeyFactory {
 
-  protected SerDeParameters serdeParams;
+  protected LazySerDeParameters serdeParams;
   protected HBaseRowSerializer serializer;
 
   @Override
@@ -59,7 +60,7 @@ public class DefaultHBaseKeyFactory exte
   }
 
   @VisibleForTesting
-  static DefaultHBaseKeyFactory forTest(SerDeParameters params, ColumnMappings mappings) {
+  static DefaultHBaseKeyFactory forTest(LazySerDeParameters params, ColumnMappings mappings) {
     DefaultHBaseKeyFactory factory = new DefaultHBaseKeyFactory();
     factory.serdeParams = params;
     factory.keyMapping = mappings.getKeyMapping();

Modified: hive/branches/parquet/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseLazyObjectFactory.java
URL: http://svn.apache.org/viewvc/hive/branches/parquet/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseLazyObjectFactory.java?rev=1664455&r1=1664454&r2=1664455&view=diff
==============================================================================
--- hive/branches/parquet/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseLazyObjectFactory.java (original)
+++ hive/branches/parquet/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseLazyObjectFactory.java Thu Mar  5 18:51:32 2015
@@ -23,7 +23,7 @@ import java.util.List;
 
 import org.apache.hadoop.hive.hbase.struct.HBaseValueFactory;
 import org.apache.hadoop.hive.serde2.SerDeException;
-import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe.SerDeParameters;
+import org.apache.hadoop.hive.serde2.lazy.LazySerDeParameters;
 import org.apache.hadoop.hive.serde2.lazy.objectinspector.LazyObjectInspectorFactory;
 import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyObjectInspectorParameters;
 import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyObjectInspectorParametersImpl;
@@ -37,7 +37,7 @@ import org.apache.hadoop.io.Text;
 public class HBaseLazyObjectFactory {
 
   public static ObjectInspector createLazyHBaseStructInspector(
-      SerDeParameters serdeParams, int index, HBaseKeyFactory keyFactory, List<HBaseValueFactory> valueFactories) throws SerDeException {
+      LazySerDeParameters serdeParams, int index, HBaseKeyFactory keyFactory, List<HBaseValueFactory> valueFactories) throws SerDeException {
     List<TypeInfo> columnTypes = serdeParams.getColumnTypes();
     ArrayList<ObjectInspector> columnObjectInspectors = new ArrayList<ObjectInspector>(
         columnTypes.size());

Modified: hive/branches/parquet/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseRowSerializer.java
URL: http://svn.apache.org/viewvc/hive/branches/parquet/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseRowSerializer.java?rev=1664455&r1=1664454&r2=1664455&view=diff
==============================================================================
--- hive/branches/parquet/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseRowSerializer.java (original)
+++ hive/branches/parquet/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseRowSerializer.java Thu Mar  5 18:51:32 2015
@@ -27,8 +27,8 @@ import org.apache.hadoop.hive.hbase.Colu
 import org.apache.hadoop.hive.serde2.ByteStream;
 import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.hive.serde2.SerDeUtils;
-import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe;
 import org.apache.hadoop.hive.serde2.lazy.LazyUtils;
+import org.apache.hadoop.hive.serde2.lazy.LazySerDeParameters;
 import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
@@ -44,7 +44,7 @@ public class HBaseRowSerializer {
 
   private final HBaseKeyFactory keyFactory;
   private final HBaseSerDeParameters hbaseParam;
-  private final LazySimpleSerDe.SerDeParameters serdeParam;
+  private final LazySerDeParameters serdeParam;
 
   private final int keyIndex;
   private final int timestampIndex;
@@ -54,9 +54,7 @@ public class HBaseRowSerializer {
   private final byte[] separators;      // the separators array
   private final boolean escaped;        // whether we need to escape the data when writing out
   private final byte escapeChar;        // which char to use as the escape char, e.g. '\\'
-  private final boolean[] needsEscape;  // which chars need to be escaped. This array should have size
-                                        // of 128. Negative byte values (or byte values >= 128) are
-                                        // never escaped.
+  private final boolean[] needsEscape;  // which chars need to be escaped. 
 
   private final long putTimestamp;
   private final ByteStream.Output output = new ByteStream.Output();

Modified: hive/branches/parquet/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDe.java
URL: http://svn.apache.org/viewvc/hive/branches/parquet/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDe.java?rev=1664455&r1=1664454&r2=1664455&view=diff
==============================================================================
--- hive/branches/parquet/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDe.java (original)
+++ hive/branches/parquet/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDe.java Thu Mar  5 18:51:32 2015
@@ -34,7 +34,7 @@ import org.apache.hadoop.hive.serde2.Ser
 import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.hive.serde2.SerDeSpec;
 import org.apache.hadoop.hive.serde2.SerDeStats;
-import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe;
+import org.apache.hadoop.hive.serde2.lazy.LazySerDeParameters;
 import org.apache.hadoop.hive.serde2.lazy.objectinspector.LazySimpleStructObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.io.Writable;
@@ -51,7 +51,8 @@ import org.apache.hadoop.mapred.JobConf;
     serdeConstants.SERIALIZATION_LAST_COLUMN_TAKES_REST,
     serdeConstants.ESCAPE_CHAR,
     serdeConstants.SERIALIZATION_ENCODING,
-    LazySimpleSerDe.SERIALIZATION_EXTEND_NESTING_LEVELS,
+    LazySerDeParameters.SERIALIZATION_EXTEND_NESTING_LEVELS,
+    LazySerDeParameters.SERIALIZATION_EXTEND_ADDITIONAL_NESTING_LEVELS,
     HBaseSerDe.HBASE_COLUMNS_MAPPING,
     HBaseSerDe.HBASE_TABLE_NAME,
     HBaseSerDe.HBASE_TABLE_DEFAULT_STORAGE_TYPE,
@@ -240,7 +241,7 @@ public class HBaseSerDe extends Abstract
     return new ColumnMappings(columnsMapping, rowKeyIndex, timestampIndex);
   }
 
-  public LazySimpleSerDe.SerDeParameters getSerdeParams() {
+  public LazySerDeParameters getSerdeParams() {
     return serdeParams.getSerdeParams();
   }
 

Modified: hive/branches/parquet/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDeHelper.java
URL: http://svn.apache.org/viewvc/hive/branches/parquet/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDeHelper.java?rev=1664455&r1=1664454&r2=1664455&view=diff
==============================================================================
--- hive/branches/parquet/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDeHelper.java (original)
+++ hive/branches/parquet/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDeHelper.java Thu Mar  5 18:51:32 2015
@@ -36,6 +36,7 @@ import org.apache.hadoop.conf.Configurat
 import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.common.JavaUtils;
 import org.apache.hadoop.hive.hbase.ColumnMappings.ColumnMapping;
 import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.SerDeException;
@@ -466,7 +467,7 @@ public class HBaseSerDeHelper {
       throws SerDeException {
     Class<?> serClass;
     try {
-      serClass = Class.forName(serClassName);
+      serClass = JavaUtils.loadClass(serClassName);
     } catch (ClassNotFoundException e) {
       throw new SerDeException("Error obtaining descriptor for " + serClassName, e);
     }
@@ -562,7 +563,7 @@ public class HBaseSerDeHelper {
 
     Class<?> keyClass;
     try {
-      keyClass = Class.forName(compKeyClassName);
+      keyClass = JavaUtils.loadClass(compKeyClassName);
       keyFactory = new CompositeHBaseKeyFactory(keyClass);
     } catch (Exception e) {
       throw new SerDeException(e);

Modified: hive/branches/parquet/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDeParameters.java
URL: http://svn.apache.org/viewvc/hive/branches/parquet/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDeParameters.java?rev=1664455&r1=1664454&r2=1664455&view=diff
==============================================================================
--- hive/branches/parquet/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDeParameters.java (original)
+++ hive/branches/parquet/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDeParameters.java Thu Mar  5 18:51:32 2015
@@ -25,6 +25,7 @@ import java.util.Properties;
 import org.apache.avro.Schema;
 import org.apache.avro.reflect.ReflectData;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.common.JavaUtils;
 import org.apache.hadoop.hive.hbase.ColumnMappings.ColumnMapping;
 import org.apache.hadoop.hive.hbase.struct.AvroHBaseValueFactory;
 import org.apache.hadoop.hive.hbase.struct.DefaultHBaseValueFactory;
@@ -33,8 +34,7 @@ import org.apache.hadoop.hive.hbase.stru
 import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.hive.serde2.avro.AvroSerdeUtils;
-import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe;
-import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe.SerDeParameters;
+import org.apache.hadoop.hive.serde2.lazy.LazySerDeParameters;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
 import org.apache.hadoop.util.ReflectionUtils;
 
@@ -50,7 +50,7 @@ public class HBaseSerDeParameters {
   public static final String AVRO_SERIALIZATION_TYPE = "avro";
   public static final String STRUCT_SERIALIZATION_TYPE = "struct";
 
-  private final SerDeParameters serdeParams;
+  private final LazySerDeParameters serdeParams;
 
   private final Configuration job;
 
@@ -92,7 +92,7 @@ public class HBaseSerDeParameters {
           columnMappings.toTypesString(tbl, job, autogenerate));
     }
 
-    this.serdeParams = LazySimpleSerDe.initSerdeParams(job, tbl, serdeName);
+    this.serdeParams = new LazySerDeParameters(job, tbl, serdeName);
     this.putTimestamp = Long.valueOf(tbl.getProperty(HBaseSerDe.HBASE_PUT_TIMESTAMP, "-1"));
 
     columnMappings.setHiveColumnDescription(serdeName, serdeParams.getColumnNames(),
@@ -114,7 +114,7 @@ public class HBaseSerDeParameters {
     return serdeParams.getColumnTypes();
   }
 
-  public SerDeParameters getSerdeParams() {
+  public LazySerDeParameters getSerdeParams() {
     return serdeParams;
   }
 
@@ -201,7 +201,7 @@ public class HBaseSerDeParameters {
     if (configuration != null) {
       return configuration.getClassByName(className);
     }
-    return Class.forName(className);
+    return JavaUtils.loadClass(className);
   }
 
   private List<HBaseValueFactory> initValueFactories(Configuration conf, Properties tbl)

Modified: hive/branches/parquet/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseStorageHandler.java
URL: http://svn.apache.org/viewvc/hive/branches/parquet/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseStorageHandler.java?rev=1664455&r1=1664454&r2=1664455&view=diff
==============================================================================
--- hive/branches/parquet/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseStorageHandler.java (original)
+++ hive/branches/parquet/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseStorageHandler.java Thu Mar  5 18:51:32 2015
@@ -37,6 +37,8 @@ import org.apache.hadoop.hbase.HBaseConf
 import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hbase.client.HConnection;
+import org.apache.hadoop.hbase.client.HConnectionManager;
 import org.apache.hadoop.hbase.client.HTable;
 import org.apache.hadoop.hbase.mapred.TableOutputFormat;
 import org.apache.hadoop.hbase.mapreduce.TableInputFormatBase;
@@ -65,6 +67,7 @@ import org.apache.hadoop.hive.serde2.Des
 import org.apache.hadoop.hive.serde2.SerDe;
 import org.apache.hadoop.hive.shims.ShimLoader;
 import org.apache.hadoop.io.Text;
+import org.apache.hadoop.hbase.security.token.TokenUtil;
 import org.apache.hadoop.mapred.InputFormat;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.OutputFormat;
@@ -88,10 +91,16 @@ public class HBaseStorageHandler extends
   private static final String HBASE_SNAPSHOT_TABLE_DIR_KEY = "hbase.TableSnapshotInputFormat.table.dir";
   /** HBase-internal config by which input format received restore dir after HBASE-11335. */
   private static final String HBASE_SNAPSHOT_RESTORE_DIR_KEY = "hbase.TableSnapshotInputFormat.restore.dir";
-  /** HBase config by which a SlabCache is sized. */
-  private static final String HBASE_OFFHEAP_PCT_KEY = "hbase.offheapcache.percentage";
-  /** HBase config by which a BucketCache is sized. */
-  private static final String HBASE_BUCKETCACHE_SIZE_KEY = "hbase.bucketcache.size";
+  private static final String[] HBASE_CACHE_KEYS = new String[] {
+      /** HBase config by which a SlabCache is sized. From HBase [0.98.3, 1.0.0) */
+      "hbase.offheapcache.percentage",
+      /** HBase config by which a BucketCache is sized. */
+      "hbase.bucketcache.size",
+      /** HBase config by which the bucket cache implementation is chosen. From HBase 0.98.10+ */
+      "hbase.bucketcache.ioengine",
+      /** HBase config by which a BlockCache is sized. */
+      "hfile.block.cache.size"
+  };
 
   final static public String DEFAULT_PREFIX = "default.";
 
@@ -395,8 +404,14 @@ public class HBaseStorageHandler extends
 
           TableMapReduceUtil.resetCacheConfig(hbaseConf);
           // copy over configs touched by above method
-          jobProperties.put(HBASE_OFFHEAP_PCT_KEY, hbaseConf.get(HBASE_OFFHEAP_PCT_KEY));
-          jobProperties.put(HBASE_BUCKETCACHE_SIZE_KEY, hbaseConf.get(HBASE_BUCKETCACHE_SIZE_KEY));
+          for (String cacheKey : HBASE_CACHE_KEYS) {
+            final String value = hbaseConf.get(cacheKey);
+            if (value != null) {
+              jobProperties.put(cacheKey, value);
+            } else {
+              jobProperties.remove(cacheKey);
+            }
+          }
         } catch (IOException e) {
           throw new IllegalArgumentException(e);
         }
@@ -456,35 +471,17 @@ public class HBaseStorageHandler extends
 
   private void addHBaseDelegationToken(Configuration conf) throws IOException {
     if (User.isHBaseSecurityEnabled(conf)) {
+      HConnection conn = HConnectionManager.createConnection(conf);
       try {
         User curUser = User.getCurrent();
-        Token<AuthenticationTokenIdentifier> authToken = getAuthToken(conf, curUser);
         Job job = new Job(conf);
-        if (authToken == null) {
-          curUser.obtainAuthTokenForJob(conf,job);
-        } else {
-          job.getCredentials().addToken(authToken.getService(), authToken);
-        }
+        TokenUtil.addTokenForJob(conn, curUser, job);
       } catch (InterruptedException e) {
         throw new IOException("Error while obtaining hbase delegation token", e);
       }
-    }
-  }
-
-  /**
-   * Get the authentication token of the user for the cluster specified in the configuration
-   * @return null if the user does not have the token, otherwise the auth token for the cluster.
-   */
-  private static Token<AuthenticationTokenIdentifier> getAuthToken(Configuration conf, User user)
-      throws IOException, InterruptedException {
-    ZooKeeperWatcher zkw = new ZooKeeperWatcher(conf, "mr-init-credentials", null);
-    try {
-      String clusterId = ZKClusterId.readClusterIdZNode(zkw);
-      return new AuthenticationTokenSelector().selectToken(new Text(clusterId), user.getUGI().getTokens());
-    } catch (KeeperException e) {
-      throw new IOException(e);
-    } finally {
-      zkw.close();
+      finally {
+        conn.close();
+      }
     }
   }
 

Modified: hive/branches/parquet/hbase-handler/src/java/org/apache/hadoop/hive/hbase/struct/DefaultHBaseValueFactory.java
URL: http://svn.apache.org/viewvc/hive/branches/parquet/hbase-handler/src/java/org/apache/hadoop/hive/hbase/struct/DefaultHBaseValueFactory.java?rev=1664455&r1=1664454&r2=1664455&view=diff
==============================================================================
--- hive/branches/parquet/hbase-handler/src/java/org/apache/hadoop/hive/hbase/struct/DefaultHBaseValueFactory.java (original)
+++ hive/branches/parquet/hbase-handler/src/java/org/apache/hadoop/hive/hbase/struct/DefaultHBaseValueFactory.java Thu Mar  5 18:51:32 2015
@@ -27,7 +27,7 @@ import org.apache.hadoop.hive.hbase.HBas
 import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.hive.serde2.lazy.LazyFactory;
 import org.apache.hadoop.hive.serde2.lazy.LazyObjectBase;
-import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe;
+import org.apache.hadoop.hive.serde2.lazy.LazySerDeParameters;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory.ObjectInspectorOptions;
 import org.apache.hadoop.hive.serde2.objectinspector.StructField;
@@ -38,7 +38,7 @@ import org.apache.hadoop.hive.serde2.typ
  * */
 public class DefaultHBaseValueFactory implements HBaseValueFactory {
 
-  protected LazySimpleSerDe.SerDeParameters serdeParams;
+  protected LazySerDeParameters serdeParams;
   protected ColumnMappings columnMappings;
   protected HBaseSerDeParameters hbaseParams;
   protected Properties properties;

Modified: hive/branches/parquet/hbase-handler/src/test/org/apache/hadoop/hive/hbase/TestHBaseSerDe.java
URL: http://svn.apache.org/viewvc/hive/branches/parquet/hbase-handler/src/test/org/apache/hadoop/hive/hbase/TestHBaseSerDe.java?rev=1664455&r1=1664454&r2=1664455&view=diff
==============================================================================
--- hive/branches/parquet/hbase-handler/src/test/org/apache/hadoop/hive/hbase/TestHBaseSerDe.java (original)
+++ hive/branches/parquet/hbase-handler/src/test/org/apache/hadoop/hive/hbase/TestHBaseSerDe.java Thu Mar  5 18:51:32 2015
@@ -61,8 +61,8 @@ import org.apache.hadoop.hive.serde2.io.
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
 import org.apache.hadoop.hive.serde2.lazy.LazyPrimitive;
-import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe;
 import org.apache.hadoop.hive.serde2.lazy.LazyStruct;
+import org.apache.hadoop.hive.serde2.lazy.LazySerDeParameters;
 import org.apache.hadoop.hive.serde2.objectinspector.StructField;
 import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
 import org.apache.hadoop.io.BooleanWritable;
@@ -1413,7 +1413,7 @@ public class TestHBaseSerDe extends Test
         "org.apache.hadoop.hive.hbase.avro.Employee");
     tbl.setProperty(HBaseSerDe.HBASE_COLUMNS_MAPPING, "cola:prefixB_.*");
     tbl.setProperty(HBaseSerDe.HBASE_AUTOGENERATE_STRUCT, "true");
-    tbl.setProperty(LazySimpleSerDe.SERIALIZATION_EXTEND_NESTING_LEVELS, "true");
+    tbl.setProperty(LazySerDeParameters.SERIALIZATION_EXTEND_NESTING_LEVELS, "true");
 
     return tbl;
   }

Modified: hive/branches/parquet/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FosterStorageHandler.java
URL: http://svn.apache.org/viewvc/hive/branches/parquet/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FosterStorageHandler.java?rev=1664455&r1=1664454&r2=1664455&view=diff
==============================================================================
--- hive/branches/parquet/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FosterStorageHandler.java (original)
+++ hive/branches/parquet/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FosterStorageHandler.java Thu Mar  5 18:51:32 2015
@@ -22,6 +22,7 @@ package org.apache.hive.hcatalog.mapredu
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.common.FileUtils;
+import org.apache.hadoop.hive.common.JavaUtils;
 import org.apache.hadoop.hive.metastore.HiveMetaHook;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.metadata.DefaultStorageHandler;
@@ -56,9 +57,9 @@ public class FosterStorageHandler extend
   private Class<? extends SerDe> serDeClass;
 
   public FosterStorageHandler(String ifName, String ofName, String serdeName) throws ClassNotFoundException {
-    this((Class<? extends InputFormat>) Class.forName(ifName),
-      (Class<? extends OutputFormat>) Class.forName(ofName),
-      (Class<? extends SerDe>) Class.forName(serdeName));
+    this((Class<? extends InputFormat>) JavaUtils.loadClass(ifName),
+      (Class<? extends OutputFormat>) JavaUtils.loadClass(ofName),
+      (Class<? extends SerDe>) JavaUtils.loadClass(serdeName));
   }
 
   public FosterStorageHandler(Class<? extends InputFormat> ifClass,

Modified: hive/branches/parquet/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/HCatSplit.java
URL: http://svn.apache.org/viewvc/hive/branches/parquet/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/HCatSplit.java?rev=1664455&r1=1664454&r2=1664455&view=diff
==============================================================================
--- hive/branches/parquet/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/HCatSplit.java (original)
+++ hive/branches/parquet/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/HCatSplit.java Thu Mar  5 18:51:32 2015
@@ -23,6 +23,7 @@ import java.io.DataOutput;
 import java.io.IOException;
 import java.lang.reflect.Constructor;
 
+import org.apache.hadoop.hive.common.JavaUtils;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.WritableUtils;
@@ -142,7 +143,7 @@ public class HCatSplit extends InputSpli
     org.apache.hadoop.mapred.InputSplit split;
     try {
       Class<? extends org.apache.hadoop.mapred.InputSplit> splitClass =
-        (Class<? extends org.apache.hadoop.mapred.InputSplit>) Class.forName(baseSplitClassName);
+        (Class<? extends org.apache.hadoop.mapred.InputSplit>) JavaUtils.loadClass(baseSplitClassName);
 
       //Class.forName().newInstance() does not work if the underlying
       //InputSplit has package visibility

Modified: hive/branches/parquet/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/messaging/MessageFactory.java
URL: http://svn.apache.org/viewvc/hive/branches/parquet/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/messaging/MessageFactory.java?rev=1664455&r1=1664454&r2=1664455&view=diff
==============================================================================
--- hive/branches/parquet/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/messaging/MessageFactory.java (original)
+++ hive/branches/parquet/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/messaging/MessageFactory.java Thu Mar  5 18:51:32 2015
@@ -19,6 +19,7 @@
 
 package org.apache.hive.hcatalog.messaging;
 
+import org.apache.hadoop.hive.common.JavaUtils;
 import org.apache.hadoop.hive.common.classification.InterfaceAudience;
 import org.apache.hadoop.hive.common.classification.InterfaceStability;
 import org.apache.hadoop.hive.conf.HiveConf;
@@ -66,7 +67,7 @@ public abstract class MessageFactory {
 
   private static MessageFactory getInstance(String className) {
     try {
-      return (MessageFactory)ReflectionUtils.newInstance(Class.forName(className), hiveConf);
+      return (MessageFactory)ReflectionUtils.newInstance(JavaUtils.loadClass(className), hiveConf);
     }
     catch (ClassNotFoundException classNotFound) {
       throw new IllegalStateException("Could not construct MessageFactory implementation: ", classNotFound);

Modified: hive/branches/parquet/hcatalog/src/test/e2e/templeton/drivers/TestDriverCurl.pm
URL: http://svn.apache.org/viewvc/hive/branches/parquet/hcatalog/src/test/e2e/templeton/drivers/TestDriverCurl.pm?rev=1664455&r1=1664454&r2=1664455&view=diff
==============================================================================
--- hive/branches/parquet/hcatalog/src/test/e2e/templeton/drivers/TestDriverCurl.pm (original)
+++ hive/branches/parquet/hcatalog/src/test/e2e/templeton/drivers/TestDriverCurl.pm Thu Mar  5 18:51:32 2015
@@ -917,6 +917,9 @@ sub compare
 
           my %r_userargs = %{$res_hash->{'userargs'}};
           foreach my $key( keys %exp_userargs){
+            if($key eq 'inputreader'){
+              next;
+            }
             if( !defined $r_userargs{$key}){
               print $log "$0::$subName INFO $key not found in userargs \n";
               $result = 0;

Modified: hive/branches/parquet/hcatalog/src/test/e2e/templeton/tests/jobsubmission_streaming.conf
URL: http://svn.apache.org/viewvc/hive/branches/parquet/hcatalog/src/test/e2e/templeton/tests/jobsubmission_streaming.conf?rev=1664455&r1=1664454&r2=1664455&view=diff
==============================================================================
--- hive/branches/parquet/hcatalog/src/test/e2e/templeton/tests/jobsubmission_streaming.conf (original)
+++ hive/branches/parquet/hcatalog/src/test/e2e/templeton/tests/jobsubmission_streaming.conf Thu Mar  5 18:51:32 2015
@@ -110,6 +110,20 @@ $cfg =
      'check_job_exit_value' => 1,
      'check_call_back' => 1,
     },
+    {
+     #streaming xml
+     'num' => 5,
+     'method' => 'POST',
+     'url' => ':TEMPLETON_URL:/templeton/v1/mapreduce/streaming',
+     'post_options' => ['user.name=:UNAME:','input=:INPDIR_HDFS:/xml/file1.xml','input=:INPDIR_HDFS:/xml/file2.xml','output=:OUTDIR:/xmlout',
+                        'mapper=python :INPDIR_HDFS:/xmlmapper.py','reducer=python :INPDIR_HDFS:/xmlreducer.py', 'inputreader="StreamXmlRecordReader,begin=xml,end=/xml"'],
+     'json_field_substr_match' => { 'id' => '\d+'},
+     				                 #results
+     'status_code' => 200,
+     'check_job_created' => 1,
+     'check_job_complete' => 'SUCCESS',
+     'check_call_back' => 1,
+    },
    ]
   },
 ##=============================================================================================================

Modified: hive/branches/parquet/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/AbstractRecordWriter.java
URL: http://svn.apache.org/viewvc/hive/branches/parquet/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/AbstractRecordWriter.java?rev=1664455&r1=1664454&r2=1664455&view=diff
==============================================================================
--- hive/branches/parquet/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/AbstractRecordWriter.java (original)
+++ hive/branches/parquet/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/AbstractRecordWriter.java Thu Mar  5 18:51:32 2015
@@ -22,6 +22,7 @@ package org.apache.hive.hcatalog.streami
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.common.JavaUtils;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
 import org.apache.hadoop.hive.metastore.api.MetaException;
@@ -70,7 +71,7 @@ abstract class AbstractRecordWriter impl
                 + endPoint);
       }
       String outFormatName = this.tbl.getSd().getOutputFormat();
-      outf = (AcidOutputFormat<?,?>) ReflectionUtils.newInstance(Class.forName(outFormatName), conf);
+      outf = (AcidOutputFormat<?,?>) ReflectionUtils.newInstance(JavaUtils.loadClass(outFormatName), conf);
     } catch (MetaException e) {
       throw new ConnectionError(endPoint, e);
     } catch (NoSuchObjectException e) {

Modified: hive/branches/parquet/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/DelimitedInputWriter.java
URL: http://svn.apache.org/viewvc/hive/branches/parquet/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/DelimitedInputWriter.java?rev=1664455&r1=1664454&r2=1664455&view=diff
==============================================================================
--- hive/branches/parquet/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/DelimitedInputWriter.java (original)
+++ hive/branches/parquet/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/DelimitedInputWriter.java Thu Mar  5 18:51:32 2015
@@ -31,6 +31,7 @@ import org.apache.hadoop.hive.serde2.Ser
 import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.hive.serde2.SerDeUtils;
 import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe;
+import org.apache.hadoop.hive.serde2.lazy.LazySerDeParameters;
 import org.apache.hadoop.io.BytesWritable;
 
 import java.io.IOException;
@@ -89,7 +90,7 @@ public class DelimitedInputWriter extend
           throws ClassNotFoundException, ConnectionError, SerializationError,
                  InvalidColumn, StreamingException {
      this(colNamesForFields, delimiter, endPoint, conf,
-             (char) LazySimpleSerDe.DefaultSeparators[0]);
+             (char) LazySerDeParameters.DefaultSeparators[0]);
    }
 
   /**

Modified: hive/branches/parquet/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/Server.java
URL: http://svn.apache.org/viewvc/hive/branches/parquet/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/Server.java?rev=1664455&r1=1664454&r2=1664455&view=diff
==============================================================================
--- hive/branches/parquet/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/Server.java (original)
+++ hive/branches/parquet/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/Server.java Thu Mar  5 18:51:32 2015
@@ -635,6 +635,7 @@ public class Server {
   @Path("mapreduce/streaming")
   @Produces({MediaType.APPLICATION_JSON})
   public EnqueueBean mapReduceStreaming(@FormParam("input") List<String> inputs,
+		      @FormParam("inputreader") String inputreader,
                       @FormParam("output") String output,
                       @FormParam("mapper") String mapper,
                       @FormParam("reducer") String reducer,
@@ -657,6 +658,7 @@ public class Server {
     Map<String, Object> userArgs = new HashMap<String, Object>();
     userArgs.put("user.name", getDoAsUser());
     userArgs.put("input", inputs);
+    userArgs.put("inputreader", inputreader);
     userArgs.put("output", output);
     userArgs.put("mapper", mapper);
     userArgs.put("reducer", reducer);
@@ -672,7 +674,7 @@ public class Server {
     checkEnableLogPrerequisite(enablelog, statusdir);
 
     StreamingDelegator d = new StreamingDelegator(appConf);
-    return d.run(getDoAsUser(), userArgs, inputs, output, mapper, reducer, combiner,
+    return d.run(getDoAsUser(), userArgs, inputs, inputreader, output, mapper, reducer, combiner,
       fileList, files, defines, cmdenvs, args,
       statusdir, callback, getCompletedUrl(), enablelog, JobType.STREAMING);
   }

Modified: hive/branches/parquet/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/StreamingDelegator.java
URL: http://svn.apache.org/viewvc/hive/branches/parquet/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/StreamingDelegator.java?rev=1664455&r1=1664454&r2=1664455&view=diff
==============================================================================
--- hive/branches/parquet/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/StreamingDelegator.java (original)
+++ hive/branches/parquet/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/StreamingDelegator.java Thu Mar  5 18:51:32 2015
@@ -38,7 +38,7 @@ public class StreamingDelegator extends
   }
 
   public EnqueueBean run(String user, Map<String, Object> userArgs,
-               List<String> inputs, String output,
+               List<String> inputs, String inputreader, String output,
                String mapper, String reducer, String combiner,
                List<String> fileList,
                String files, List<String> defines,
@@ -51,7 +51,7 @@ public class StreamingDelegator extends
                JobType jobType)
     throws NotAuthorizedException, BadParam, BusyException, QueueException,
     ExecuteException, IOException, InterruptedException {
-    List<String> args = makeArgs(inputs, output, mapper, reducer, combiner,
+      List<String> args = makeArgs(inputs, inputreader, output, mapper, reducer, combiner,
       fileList, cmdenvs, jarArgs);
 
     JarDelegator d = new JarDelegator(appConf);
@@ -62,6 +62,7 @@ public class StreamingDelegator extends
   }
 
   private List<String> makeArgs(List<String> inputs,
+                  String inputreader,
                   String output,
                   String mapper,
                   String reducer,
@@ -82,6 +83,11 @@ public class StreamingDelegator extends
     args.add(mapper);
     args.add("-reducer");
     args.add(reducer);
+    
+    if (inputreader != null && !inputreader.isEmpty()) {
+      args.add("-inputreader");
+      args.add(inputreader);
+    }
 
     if (TempletonUtils.isset(combiner)) {
       args.add("-combiner");

Modified: hive/branches/parquet/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/JobState.java
URL: http://svn.apache.org/viewvc/hive/branches/parquet/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/JobState.java?rev=1664455&r1=1664454&r2=1664455&view=diff
==============================================================================
--- hive/branches/parquet/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/JobState.java (original)
+++ hive/branches/parquet/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/JobState.java Thu Mar  5 18:51:32 2015
@@ -26,6 +26,7 @@ import java.util.Map;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.common.JavaUtils;
 import org.apache.hive.hcatalog.templeton.JsonBuilder;
 
 /**
@@ -70,7 +71,7 @@ public class JobState {
     TempletonStorage storage = null;
     try {
       storage = (TempletonStorage)
-        Class.forName(conf.get(TempletonStorage.STORAGE_CLASS))
+          JavaUtils.loadClass(conf.get(TempletonStorage.STORAGE_CLASS))
           .newInstance();
     } catch (Exception e) {
       LOG.warn("No storage method found: " + e.getMessage());

Modified: hive/branches/parquet/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/listener/TestDbNotificationListener.java
URL: http://svn.apache.org/viewvc/hive/branches/parquet/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/listener/TestDbNotificationListener.java?rev=1664455&r1=1664454&r2=1664455&view=diff
==============================================================================
--- hive/branches/parquet/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/listener/TestDbNotificationListener.java (original)
+++ hive/branches/parquet/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/listener/TestDbNotificationListener.java Thu Mar  5 18:51:32 2015
@@ -438,6 +438,41 @@ public class TestDbNotificationListener
   }
 
   @Test
+  public void sqlCTAS() throws Exception {
+
+    driver.run("create table ctas_source (c int)");
+    driver.run("insert into table ctas_source values (1)");
+    driver.run("create table ctas_target as select c from ctas_source");
+
+    NotificationEventResponse rsp = msClient.getNextNotification(firstEventId, 0, null);
+
+    assertEquals(6, rsp.getEventsSize());
+    NotificationEvent event = rsp.getEvents().get(0);
+    assertEquals(firstEventId + 1, event.getEventId());
+    assertEquals(HCatConstants.HCAT_CREATE_TABLE_EVENT, event.getEventType());
+    event = rsp.getEvents().get(2);
+    assertEquals(firstEventId + 3, event.getEventId());
+    assertEquals(HCatConstants.HCAT_INSERT_EVENT, event.getEventType());
+    // Make sure the files are listed in the insert
+    assertTrue(event.getMessage().matches(".*\"files\":\\[\"pfile.*"));
+    event = rsp.getEvents().get(4);
+    assertEquals(firstEventId + 5, event.getEventId());
+    assertEquals(HCatConstants.HCAT_CREATE_TABLE_EVENT, event.getEventType());
+  }
+
+  @Test
+  public void sqlTempTable() throws Exception {
+
+    LOG.info("XXX Starting temp table");
+    driver.run("create temporary table tmp1 (c int)");
+    driver.run("insert into table tmp1 values (1)");
+
+    NotificationEventResponse rsp = msClient.getNextNotification(firstEventId, 0, null);
+
+    assertEquals(0, rsp.getEventsSize());
+  }
+
+  @Test
   public void sqlDb() throws Exception {
 
     driver.run("create database sd");

Modified: hive/branches/parquet/itests/hive-jmh/pom.xml
URL: http://svn.apache.org/viewvc/hive/branches/parquet/itests/hive-jmh/pom.xml?rev=1664455&r1=1664454&r2=1664455&view=diff
==============================================================================
--- hive/branches/parquet/itests/hive-jmh/pom.xml (original)
+++ hive/branches/parquet/itests/hive-jmh/pom.xml Thu Mar  5 18:51:32 2015
@@ -62,6 +62,16 @@
 
   <profiles>
     <profile>
+      <id>hadoop-1</id>
+      <dependencies>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-core</artifactId>
+          <version>${hadoop-20S.version}</version>
+        </dependency>
+      </dependencies>
+    </profile>
+    <profile>
       <id>hadoop-2</id>
       <dependencies>
         <dependency>

Modified: hive/branches/parquet/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestFilterHooks.java
URL: http://svn.apache.org/viewvc/hive/branches/parquet/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestFilterHooks.java?rev=1664455&r1=1664454&r2=1664455&view=diff
==============================================================================
--- hive/branches/parquet/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestFilterHooks.java (original)
+++ hive/branches/parquet/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestFilterHooks.java Thu Mar  5 18:51:32 2015
@@ -17,19 +17,19 @@
  */
 package org.apache.hadoop.hive.metastore;
 
-import static org.junit.Assert.*;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.fail;
 
 import java.util.ArrayList;
 import java.util.List;
 
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.hive.cli.CliSessionState;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
 import org.apache.hadoop.hive.metastore.api.Database;
 import org.apache.hadoop.hive.metastore.api.Index;
+import org.apache.hadoop.hive.metastore.api.MetaException;
 import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
 import org.apache.hadoop.hive.metastore.api.Partition;
 import org.apache.hadoop.hive.metastore.api.PartitionSpec;
@@ -53,7 +53,7 @@ public class TestFilterHooks {
     }
 
     @Override
-    public List<String> filterDatabases(List<String> dbList) {
+    public List<String> filterDatabases(List<String> dbList) throws MetaException  {
       if (blockResults) {
         return new ArrayList<String>();
       }
@@ -69,7 +69,7 @@ public class TestFilterHooks {
     }
 
     @Override
-    public List<String> filterTableNames(String dbName, List<String> tableList) {
+    public List<String> filterTableNames(String dbName, List<String> tableList) throws MetaException {
       if (blockResults) {
         return new ArrayList<String>();
       }
@@ -85,7 +85,7 @@ public class TestFilterHooks {
     }
 
     @Override
-    public List<Table> filterTables(List<Table> tableList) {
+    public List<Table> filterTables(List<Table> tableList) throws MetaException {
       if (blockResults) {
         return new ArrayList<Table>();
       }
@@ -93,7 +93,7 @@ public class TestFilterHooks {
     }
 
     @Override
-    public List<Partition> filterPartitions(List<Partition> partitionList) {
+    public List<Partition> filterPartitions(List<Partition> partitionList) throws MetaException {
       if (blockResults) {
         return new ArrayList<Partition>();
       }
@@ -102,7 +102,7 @@ public class TestFilterHooks {
 
     @Override
     public List<PartitionSpec> filterPartitionSpecs(
-        List<PartitionSpec> partitionSpecList) {
+        List<PartitionSpec> partitionSpecList) throws MetaException {
       if (blockResults) {
         return new ArrayList<PartitionSpec>();
       }
@@ -119,7 +119,7 @@ public class TestFilterHooks {
 
     @Override
     public List<String> filterPartitionNames(String dbName, String tblName,
-        List<String> partitionNames) {
+        List<String> partitionNames) throws MetaException {
       if (blockResults) {
         return new ArrayList<String>();
       }
@@ -136,7 +136,7 @@ public class TestFilterHooks {
 
     @Override
     public List<String> filterIndexNames(String dbName, String tblName,
-        List<String> indexList) {
+        List<String> indexList) throws MetaException {
       if (blockResults) {
         return new ArrayList<String>();
       }
@@ -144,7 +144,7 @@ public class TestFilterHooks {
     }
 
     @Override
-    public List<Index> filterIndexes(List<Index> indexeList) {
+    public List<Index> filterIndexes(List<Index> indexeList) throws MetaException {
       if (blockResults) {
         return new ArrayList<Index>();
       }

Modified: hive/branches/parquet/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetastoreVersion.java
URL: http://svn.apache.org/viewvc/hive/branches/parquet/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetastoreVersion.java?rev=1664455&r1=1664454&r2=1664455&view=diff
==============================================================================
--- hive/branches/parquet/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetastoreVersion.java (original)
+++ hive/branches/parquet/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetastoreVersion.java Thu Mar  5 18:51:32 2015
@@ -23,21 +23,24 @@ import java.util.Random;
 
 import junit.framework.TestCase;
 
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
 import org.apache.commons.io.FileUtils;
 import org.apache.hadoop.hive.cli.CliSessionState;
 import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hive.common.util.HiveStringUtils;
 import org.apache.hadoop.hive.metastore.api.MetaException;
+import org.apache.hadoop.hive.metastore.ObjectStore;
 import org.apache.hadoop.hive.ql.Driver;
 import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
 import org.apache.hadoop.hive.ql.session.SessionState;
 
 public class TestMetastoreVersion extends TestCase {
-
+  private static final Log LOG = LogFactory.getLog(TestMetastoreVersion.class);
   protected HiveConf hiveConf;
   private Driver driver;
   private String metaStoreRoot;
   private String testMetastoreDB;
-  Random randomNum = new Random();
 
   @Override
   protected void setUp() throws Exception {
@@ -45,13 +48,19 @@ public class TestMetastoreVersion extend
     Field defDb = HiveMetaStore.HMSHandler.class.getDeclaredField("currentUrl");
     defDb.setAccessible(true);
     defDb.set(null, null);
+    // reset defaults
+    ObjectStore.setSchemaVerified(false);
+    System.setProperty(HiveConf.ConfVars.METASTORE_SCHEMA_VERIFICATION.toString(), "false");
+    System.setProperty(HiveConf.ConfVars.METASTORE_AUTO_CREATE_SCHEMA.toString(), "true");
+    System.setProperty(HiveConf.ConfVars.METASTORE_FIXED_DATASTORE.toString(), "false");
     hiveConf = new HiveConf(this.getClass());
+    System.setProperty("hive.support.concurrency", "false");
     System.setProperty("hive.metastore.event.listeners",
         DummyListener.class.getName());
     System.setProperty("hive.metastore.pre.event.listeners",
         DummyPreListener.class.getName());
     testMetastoreDB = System.getProperty("java.io.tmpdir") +
-    File.separator + "test_metastore-" + randomNum.nextInt();
+      File.separator + "test_metastore-" + System.currentTimeMillis();
     System.setProperty(HiveConf.ConfVars.METASTORECONNECTURLKEY.varname,
         "jdbc:derby:" + testMetastoreDB + ";create=true");
     metaStoreRoot = System.getProperty("test.tmp.dir");
@@ -92,7 +101,10 @@ public class TestMetastoreVersion extend
       SessionState.start(new CliSessionState(hiveConf));
       fail("Expected exception");
     } catch (RuntimeException re) {
-      assertTrue(re.getCause().getCause().getCause() instanceof MetaException);
+      LOG.info("Exception in testVersionRestriction: " + re, re);
+      String msg = HiveStringUtils.stringifyException(re);
+      assertTrue("Expected 'Version information not found in metastore' in: " + msg, msg
+        .contains("Version information not found in metastore"));
     }
   }
 
@@ -149,7 +161,7 @@ public class TestMetastoreVersion extend
     SessionState.start(new CliSessionState(hiveConf));
     driver = new Driver(hiveConf);
     CommandProcessorResponse proc = driver.run("show tables");
-    assertFalse(proc.getResponseCode() == 0);
+    assertEquals(0, proc.getResponseCode());
   }
 
   //  write the given version to metastore

Modified: hive/branches/parquet/itests/hive-unit/src/test/java/org/apache/hive/beeline/TestBeeLineWithArgs.java
URL: http://svn.apache.org/viewvc/hive/branches/parquet/itests/hive-unit/src/test/java/org/apache/hive/beeline/TestBeeLineWithArgs.java?rev=1664455&r1=1664454&r2=1664455&view=diff
==============================================================================
--- hive/branches/parquet/itests/hive-unit/src/test/java/org/apache/hive/beeline/TestBeeLineWithArgs.java (original)
+++ hive/branches/parquet/itests/hive-unit/src/test/java/org/apache/hive/beeline/TestBeeLineWithArgs.java Thu Mar  5 18:51:32 2015
@@ -352,7 +352,7 @@ public class TestBeeLineWithArgs {
     argList.add("--outputformat=dsv");
     argList.add("--delimiterForDSV=;");
 
-    final String EXPECTED_PATTERN = "1;NULL;defg;\"ab\"\"c\";1.0";
+    final String EXPECTED_PATTERN = "1;NULL;defg;ab\"c;1.0";
     testScriptFile(SCRIPT_TEXT, EXPECTED_PATTERN, true, argList);
   }
 
@@ -365,7 +365,7 @@ public class TestBeeLineWithArgs {
     List<String> argList = getBaseArgs(miniHS2.getBaseJdbcURL());
     argList.add("--outputformat=tsv2");
 
-    final String EXPECTED_PATTERN = "1\tNULL\tdefg\t\"ab\"\"c\"\t1.0";
+    final String EXPECTED_PATTERN = "1\tNULL\tdefg\tab\"c\t1.0";
     testScriptFile(SCRIPT_TEXT, EXPECTED_PATTERN, true, argList);
   }
 
@@ -382,6 +382,81 @@ public class TestBeeLineWithArgs {
     testScriptFile(SCRIPT_TEXT, EXPECTED_PATTERN, true, argList);
   }
 
+  /**
+   * Test writing output using new TSV format
+   */
+  @Test
+  public void testTSV2OutputWithDoubleQuotes() throws Throwable {
+    String SCRIPT_TEXT = getFormatTestQueryForEableQuotes();
+    List<String> argList = getBaseArgs(miniHS2.getBaseJdbcURL());
+    argList.add("--outputformat=tsv2");
+    System.setProperty(SeparatedValuesOutputFormat.DISABLE_QUOTING_FOR_SV,"false");
+
+    final String EXPECTED_PATTERN = "1\tNULL\tdefg\t\"ab\"\"c\"\t\"\"\"aa\"\"\"\t1.0";
+    testScriptFile(SCRIPT_TEXT, EXPECTED_PATTERN, true, argList);
+    System.setProperty(SeparatedValuesOutputFormat.DISABLE_QUOTING_FOR_SV, "true");
+  }
+
+  /**
+   * Test writing output using TSV deprecated format
+   */
+  @Test
+  public void testTSVOutputWithDoubleQuotes() throws Throwable {
+    String SCRIPT_TEXT = getFormatTestQueryForEableQuotes();
+    List<String> argList = getBaseArgs(miniHS2.getBaseJdbcURL());
+    argList.add("--outputformat=tsv");
+    System.setProperty(SeparatedValuesOutputFormat.DISABLE_QUOTING_FOR_SV, "false");
+
+    final String EXPECTED_PATTERN = "'1'\t'NULL'\t'defg'\t'ab\"c'\t'\"aa\"'\t'1.0'";
+    testScriptFile(SCRIPT_TEXT, EXPECTED_PATTERN, true, argList);
+    System.setProperty(SeparatedValuesOutputFormat.DISABLE_QUOTING_FOR_SV, "true");
+  }
+
+  /**
+   * Test writing output using new CSV format
+   */
+  @Test
+  public void testCSV2OutputWithDoubleQuotes() throws Throwable {
+    String SCRIPT_TEXT = getFormatTestQueryForEableQuotes();
+    List<String> argList = getBaseArgs(miniHS2.getBaseJdbcURL());
+    argList.add("--outputformat=csv2");
+    System.setProperty(SeparatedValuesOutputFormat.DISABLE_QUOTING_FOR_SV, "false");
+
+    final String EXPECTED_PATTERN = "1,NULL,defg,\"ab\"\"c\",\"\"\"aa\"\"\",1.0";
+    testScriptFile(SCRIPT_TEXT, EXPECTED_PATTERN, true, argList);
+    System.setProperty(SeparatedValuesOutputFormat.DISABLE_QUOTING_FOR_SV, "true");
+  }
+
+  /**
+   * Test writing output using CSV deprecated format
+   */
+  @Test
+  public void testCSVOutputWithDoubleQuotes() throws Throwable {
+    String SCRIPT_TEXT = getFormatTestQueryForEableQuotes();
+    List<String> argList = getBaseArgs(miniHS2.getBaseJdbcURL());
+    argList.add("--outputformat=csv");
+    System.setProperty(SeparatedValuesOutputFormat.DISABLE_QUOTING_FOR_SV, "false");
+
+    final String EXPECTED_PATTERN = "'1','NULL','defg','ab\"c','\"aa\"','1.0'";
+    testScriptFile(SCRIPT_TEXT, EXPECTED_PATTERN, true, argList);
+    System.setProperty(SeparatedValuesOutputFormat.DISABLE_QUOTING_FOR_SV, "true");
+  }
+
+  /**
+   * Test writing output using DSV format, with custom delimiter ";"
+   */
+  @Test
+  public void testDSVOutputWithDoubleQuotes() throws Throwable {
+    String SCRIPT_TEXT = getFormatTestQueryForEableQuotes();
+    List<String> argList = getBaseArgs(miniHS2.getBaseJdbcURL());
+    argList.add("--outputformat=dsv");
+    argList.add("--delimiterForDSV=;");
+    System.setProperty(SeparatedValuesOutputFormat.DISABLE_QUOTING_FOR_SV, "false");
+
+    final String EXPECTED_PATTERN = "1;NULL;defg;\"ab\"\"c\";\"\"\"aa\"\"\";1.0";
+    testScriptFile(SCRIPT_TEXT, EXPECTED_PATTERN, true, argList);
+    System.setProperty(SeparatedValuesOutputFormat.DISABLE_QUOTING_FOR_SV, "true");
+  }
 
   /**
    * Test writing output using TSV deprecated format
@@ -428,6 +503,12 @@ public class TestBeeLineWithArgs {
     return "set hive.support.concurrency = false;\n" +
         "select 1, null, 'defg', 'ab\"c', 1.0D from " + tableName + " limit 1 ;\n";
   }
+
+  private String getFormatTestQueryForEableQuotes() {
+    return "set hive.support.concurrency = false;\n" +
+        "select 1, null, 'defg', 'ab\"c', '\"aa\"', 1.0D from " + tableName + " limit 1 ;\n";
+  }
+
   /**
    * Select null from table , check if setting null to empty string works - Using beeling cmd line
    *  argument.

Modified: hive/branches/parquet/itests/src/test/resources/testconfiguration.properties
URL: http://svn.apache.org/viewvc/hive/branches/parquet/itests/src/test/resources/testconfiguration.properties?rev=1664455&r1=1664454&r2=1664455&view=diff
==============================================================================
--- hive/branches/parquet/itests/src/test/resources/testconfiguration.properties (original)
+++ hive/branches/parquet/itests/src/test/resources/testconfiguration.properties Thu Mar  5 18:51:32 2015
@@ -140,6 +140,7 @@ minitez.query.files.shared=alter_merge_2
   orc_vectorization_ppd.q,\
   parallel.q,\
   ptf.q,\
+  ptf_matchpath.q,\
   ptf_streaming.q,\
   sample1.q,\
   selectDistinctStar.q,\
@@ -573,6 +574,7 @@ spark.query.files=add_part_multiple.q, \
   bucketsortoptimize_insert_6.q, \
   bucketsortoptimize_insert_7.q, \
   bucketsortoptimize_insert_8.q, \
+  cbo_gby_empty.q, \
   column_access_stats.q, \
   count.q, \
   create_merge_compressed.q, \
@@ -908,6 +910,7 @@ spark.query.files=add_part_multiple.q, \
   transform_ppr1.q, \
   transform_ppr2.q, \
   udf_example_add.q, \
+  udf_in_file.q, \
   union.q, \
   union10.q, \
   union11.q, \
@@ -954,6 +957,7 @@ spark.query.files=add_part_multiple.q, \
   union_remove_8.q, \
   union_remove_9.q, \
   uniquejoin.q, \
+  union_view.q, \
   varchar_join1.q, \
   vector_between_in.q, \
   vector_cast_constant.q, \

Modified: hive/branches/parquet/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFEvaluateNPE.java
URL: http://svn.apache.org/viewvc/hive/branches/parquet/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFEvaluateNPE.java?rev=1664455&r1=1664454&r2=1664455&view=diff
==============================================================================
--- hive/branches/parquet/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFEvaluateNPE.java (original)
+++ hive/branches/parquet/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFEvaluateNPE.java Thu Mar  5 18:51:32 2015
@@ -76,6 +76,6 @@ public class GenericUDFEvaluateNPE exten
   @Override
   public String getDisplayString(String[] children) {
     assert (children.length == 1);
-    return "evaluate_npe(" + children[0] + ")";
+    return getStandardDisplayString("evaluate_npe", children);
   }
 }

Modified: hive/branches/parquet/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTestGetJavaBoolean.java
URL: http://svn.apache.org/viewvc/hive/branches/parquet/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTestGetJavaBoolean.java?rev=1664455&r1=1664454&r2=1664455&view=diff
==============================================================================
--- hive/branches/parquet/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTestGetJavaBoolean.java (original)
+++ hive/branches/parquet/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTestGetJavaBoolean.java Thu Mar  5 18:51:32 2015
@@ -54,6 +54,6 @@ public class GenericUDFTestGetJavaBoolea
   @Override
   public String getDisplayString(String[] children) {
     assert (children.length == 1);
-    return "TestGetJavaBoolean(" + children[0] + ")";
+    return getStandardDisplayString("TestGetJavaBoolean", children);
   }
 }

Modified: hive/branches/parquet/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTestGetJavaString.java
URL: http://svn.apache.org/viewvc/hive/branches/parquet/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTestGetJavaString.java?rev=1664455&r1=1664454&r2=1664455&view=diff
==============================================================================
--- hive/branches/parquet/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTestGetJavaString.java (original)
+++ hive/branches/parquet/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTestGetJavaString.java Thu Mar  5 18:51:32 2015
@@ -50,6 +50,6 @@ public class GenericUDFTestGetJavaString
   @Override
   public String getDisplayString(String[] children) {
     assert (children.length == 1);
-    return "GenericUDFTestGetJavaString(" + children[0] + ")";
+    return getStandardDisplayString("GenericUDFTestGetJavaString", children);
   }
 }

Modified: hive/branches/parquet/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTestTranslate.java
URL: http://svn.apache.org/viewvc/hive/branches/parquet/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTestTranslate.java?rev=1664455&r1=1664454&r2=1664455&view=diff
==============================================================================
--- hive/branches/parquet/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTestTranslate.java (original)
+++ hive/branches/parquet/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTestTranslate.java Thu Mar  5 18:51:32 2015
@@ -119,7 +119,6 @@ public class GenericUDFTestTranslate ext
   @Override
   public String getDisplayString(String[] children) {
     assert (children.length == 3);
-    return "translate(" + children[0] + "," + children[1] + "," + children[2]
-        + ")";
+    return getStandardDisplayString("translate", children, ",");
   }
 }

Modified: hive/branches/parquet/metastore/src/java/org/apache/hadoop/hive/metastore/DefaultMetaStoreFilterHookImpl.java
URL: http://svn.apache.org/viewvc/hive/branches/parquet/metastore/src/java/org/apache/hadoop/hive/metastore/DefaultMetaStoreFilterHookImpl.java?rev=1664455&r1=1664454&r2=1664455&view=diff
==============================================================================
--- hive/branches/parquet/metastore/src/java/org/apache/hadoop/hive/metastore/DefaultMetaStoreFilterHookImpl.java (original)
+++ hive/branches/parquet/metastore/src/java/org/apache/hadoop/hive/metastore/DefaultMetaStoreFilterHookImpl.java Thu Mar  5 18:51:32 2015
@@ -23,6 +23,7 @@ import java.util.List;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.api.Database;
 import org.apache.hadoop.hive.metastore.api.Index;
+import org.apache.hadoop.hive.metastore.api.MetaException;
 import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
 import org.apache.hadoop.hive.metastore.api.Partition;
 import org.apache.hadoop.hive.metastore.api.PartitionSpec;
@@ -37,7 +38,7 @@ public class DefaultMetaStoreFilterHookI
   }
 
   @Override
-  public List<String> filterDatabases(List<String> dbList) {
+  public List<String> filterDatabases(List<String> dbList) throws MetaException {
     return dbList;
   }
 
@@ -47,7 +48,7 @@ public class DefaultMetaStoreFilterHookI
   }
 
   @Override
-  public List<String> filterTableNames(String dbName, List<String> tableList) {
+  public List<String> filterTableNames(String dbName, List<String> tableList) throws MetaException {
     return tableList;
   }
 
@@ -57,18 +58,18 @@ public class DefaultMetaStoreFilterHookI
   }
 
   @Override
-  public List<Table> filterTables(List<Table> tableList) {
+  public List<Table> filterTables(List<Table> tableList) throws MetaException {
     return tableList;
   }
 
   @Override
-  public List<Partition> filterPartitions(List<Partition> partitionList) {
+  public List<Partition> filterPartitions(List<Partition> partitionList) throws MetaException {
     return partitionList;
   }
 
   @Override
   public List<PartitionSpec> filterPartitionSpecs(
-      List<PartitionSpec> partitionSpecList) {
+      List<PartitionSpec> partitionSpecList) throws MetaException {
     return partitionSpecList;
   }
 
@@ -79,7 +80,7 @@ public class DefaultMetaStoreFilterHookI
 
   @Override
   public List<String> filterPartitionNames(String dbName, String tblName,
-      List<String> partitionNames) {
+      List<String> partitionNames) throws MetaException {
     return partitionNames;
   }
 
@@ -90,12 +91,12 @@ public class DefaultMetaStoreFilterHookI
 
   @Override
   public List<String> filterIndexNames(String dbName, String tblName,
-      List<String> indexList) {
+      List<String> indexList) throws MetaException {
     return indexList;
   }
 
   @Override
-  public List<Index> filterIndexes(List<Index> indexeList) {
+  public List<Index> filterIndexes(List<Index> indexeList) throws MetaException {
     return indexeList;
   }
 }

Modified: hive/branches/parquet/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
URL: http://svn.apache.org/viewvc/hive/branches/parquet/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java?rev=1664455&r1=1664454&r2=1664455&view=diff
==============================================================================
--- hive/branches/parquet/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java (original)
+++ hive/branches/parquet/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java Thu Mar  5 18:51:32 2015
@@ -220,6 +220,7 @@ import org.apache.thrift.transport.TTran
 
 import com.facebook.fb303.FacebookBase;
 import com.facebook.fb303.fb_status;
+import com.google.common.annotations.VisibleForTesting;
 import com.google.common.base.Splitter;
 import com.google.common.collect.ImmutableList;
 import com.google.common.collect.ImmutableListMultimap;
@@ -237,6 +238,12 @@ public class HiveMetaStore extends Thrif
   // embedded metastore or a remote one
   private static boolean isMetaStoreRemote = false;
 
+  // Used for testing to simulate method timeout.
+  @VisibleForTesting
+  static boolean TEST_TIMEOUT_ENABLED = false;
+  @VisibleForTesting
+  static long TEST_TIMEOUT_VALUE = -1;
+
   /** A fixed date format to be used for hive partition column values. */
   public static final ThreadLocal<DateFormat> PARTITION_DATE_FORMAT =
        new ThreadLocal<DateFormat>() {
@@ -346,9 +353,11 @@ public class HiveMetaStore extends Thrif
       final Formatter fmt = auditFormatter.get();
       ((StringBuilder) fmt.out()).setLength(0);
 
-      String address;
+      String address = null;
       if (useSasl) {
-        address = saslServer.getRemoteAddress().toString();
+        if (saslServer != null && saslServer.getRemoteAddress() != null) {
+          address = String.valueOf(saslServer.getRemoteAddress());
+        }
       } else {
         address = getIpAddress();
       }
@@ -468,6 +477,7 @@ public class HiveMetaStore extends Thrif
           hiveConf.getVar(HiveConf.ConfVars.METASTORE_PRE_EVENT_LISTENERS));
       listeners = MetaStoreUtils.getMetaStoreListeners(MetaStoreEventListener.class, hiveConf,
           hiveConf.getVar(HiveConf.ConfVars.METASTORE_EVENT_LISTENERS));
+      listeners.add(new SessionPropertiesListener(hiveConf));
       endFunctionListeners = MetaStoreUtils.getMetaStoreListeners(
           MetaStoreEndFunctionListener.class, hiveConf,
           hiveConf.getVar(HiveConf.ConfVars.METASTORE_END_FUNCTION_LISTENERS));
@@ -881,6 +891,15 @@ public class HiveMetaStore extends Thrif
           // expected
         }
 
+        if (TEST_TIMEOUT_ENABLED) {
+          try {
+            Thread.sleep(TEST_TIMEOUT_VALUE);
+          } catch (InterruptedException e) {
+            // do nothing
+          }
+          Deadline.checkTimeout();
+        }
+
         create_database_core(getMS(), db);
         success = true;
       } catch (Exception e) {
@@ -1516,8 +1535,16 @@ public class HiveMetaStore extends Thrif
         if (!success) {
           ms.rollbackTransaction();
         } else if (deleteData && !isExternal) {
-          boolean ifPurge = envContext != null &&
-              Boolean.parseBoolean(envContext.getProperties().get("ifPurge"));
+          // Data needs deletion. Check if trash may be skipped.
+          // Trash may be skipped iff:
+          //  1. deleteData == true, obviously.
+          //  2. tbl is external.
+          //  3. Either
+          //    3.1. User has specified PURGE from the commandline, and if not,
+          //    3.2. User has set the table to auto-purge.
+          boolean ifPurge = ((envContext != null) && Boolean.parseBoolean(envContext.getProperties().get("ifPurge")))
+                            ||
+                             (tbl.isSetParameters() && "true".equalsIgnoreCase(tbl.getParameters().get("auto.purge")));
           // Delete the data in the partitions which have other locations
           deletePartitionData(partPaths, ifPurge);
           // Delete the data in the table
@@ -2559,15 +2586,31 @@ public class HiveMetaStore extends Thrif
           ms.rollbackTransaction();
         } else if (deleteData && ((partPath != null) || (archiveParentDir != null))) {
           if (tbl != null && !isExternal(tbl)) {
+            // Data needs deletion. Check if trash may be skipped.
+            // Trash may be skipped iff:
+            //  1. deleteData == true, obviously.
+            //  2. tbl is external.
+            //  3. Either
+            //    3.1. User has specified PURGE from the commandline, and if not,
+            //    3.2. User has set the table to auto-purge.
+            boolean mustPurge = ((envContext != null) && Boolean.parseBoolean(envContext.getProperties().get("ifPurge")))
+                                ||
+                                 (tbl.isSetParameters() && "true".equalsIgnoreCase(tbl.getParameters().get("auto.purge")));
+            if (mustPurge) {
+              LOG.info("dropPartition() will purge " + partPath + " directly, skipping trash.");
+            }
+            else {
+              LOG.info("dropPartition() will move " + partPath + " to trash-directory.");
+            }
             // Archived partitions have har:/to_har_file as their location.
             // The original directory was saved in params
             if (isArchived) {
               assert (archiveParentDir != null);
-              wh.deleteDir(archiveParentDir, true);
+              wh.deleteDir(archiveParentDir, true, mustPurge);
             } else {
               assert (partPath != null);
-              wh.deleteDir(partPath, true);
-              deleteParentRecursive(partPath.getParent(), part_vals.size() - 1);
+              wh.deleteDir(partPath, true, mustPurge);
+              deleteParentRecursive(partPath.getParent(), part_vals.size() - 1, mustPurge);
             }
             // ok even if the data is not deleted
           }
@@ -2582,10 +2625,10 @@ public class HiveMetaStore extends Thrif
       return true;
     }
 
-    private void deleteParentRecursive(Path parent, int depth) throws IOException, MetaException {
+    private void deleteParentRecursive(Path parent, int depth, boolean mustPurge) throws IOException, MetaException {
       if (depth > 0 && parent != null && wh.isWritable(parent) && wh.isEmpty(parent)) {
-        wh.deleteDir(parent, true);
-        deleteParentRecursive(parent.getParent(), depth - 1);
+        wh.deleteDir(parent, true, mustPurge);
+        deleteParentRecursive(parent.getParent(), depth - 1, mustPurge);
       }
     }
 
@@ -2712,15 +2755,28 @@ public class HiveMetaStore extends Thrif
         if (!success) {
           ms.rollbackTransaction();
         } else if (deleteData && !isExternal(tbl)) {
+          // Data needs deletion. Check if trash may be skipped.
+          // Trash may be skipped iff:
+          //  1. deleteData == true, obviously.
+          //  2. tbl is external.
+          //  3. Either
+          //    3.1. User has specified PURGE from the commandline, and if not,
+          //    3.2. User has set the table to auto-purge.
+          boolean mustPurge = ((envContext != null) && Boolean.parseBoolean(envContext.getProperties().get("ifPurge")))
+                              ||
+                              (tbl.isSetParameters() && "true".equalsIgnoreCase(tbl.getParameters().get("auto.purge")));
+          LOG.info( mustPurge?
+                      "dropPartition() will purge partition-directories directly, skipping trash."
+                    :  "dropPartition() will move partition-directories to trash-directory.");
           // Archived partitions have har:/to_har_file as their location.
           // The original directory was saved in params
           for (Path path : archToDelete) {
-            wh.deleteDir(path, true);
+            wh.deleteDir(path, true, mustPurge);
           }
           for (PathAndPartValSize p : dirsToDelete) {
-            wh.deleteDir(p.path, true);
+            wh.deleteDir(p.path, true, mustPurge);
             try {
-              deleteParentRecursive(p.path.getParent(), p.partValSize - 1);
+              deleteParentRecursive(p.path.getParent(), p.partValSize - 1, mustPurge);
             } catch (IOException ex) {
               LOG.warn("Error from deleteParentRecursive", ex);
               throw new MetaException("Failed to delete parent: " + ex.getMessage());

Modified: hive/branches/parquet/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
URL: http://svn.apache.org/viewvc/hive/branches/parquet/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java?rev=1664455&r1=1664454&r2=1664455&view=diff
==============================================================================
--- hive/branches/parquet/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java (original)
+++ hive/branches/parquet/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java Thu Mar  5 18:51:32 2015
@@ -34,6 +34,7 @@ import java.nio.ByteBuffer;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collection;
+import java.util.EnumSet;
 import java.util.HashMap;
 import java.util.LinkedHashMap;
 import java.util.List;
@@ -729,7 +730,6 @@ public class HiveMetaStoreClient impleme
     client.drop_database(name, deleteData, cascade);
   }
 
-
   /**
    * @param tbl_name
    * @param db_name
@@ -758,6 +758,21 @@ public class HiveMetaStoreClient impleme
     return dropPartition(dbName, tableName, partName, deleteData, null);
   }
 
+  private static EnvironmentContext getEnvironmentContextWithIfPurgeSet() {
+    Map<String, String> warehouseOptions = new HashMap<String, String>();
+    warehouseOptions.put("ifPurge", "TRUE");
+    return new EnvironmentContext(warehouseOptions);
+  }
+
+  /*
+  public boolean dropPartition(String dbName, String tableName, String partName, boolean deleteData, boolean ifPurge)
+      throws NoSuchObjectException, MetaException, TException {
+
+    return dropPartition(dbName, tableName, partName, deleteData,
+                         ifPurge? getEnvironmentContextWithIfPurgeSet() : null);
+  }
+  */
+
   public boolean dropPartition(String dbName, String tableName, String partName, boolean deleteData,
       EnvironmentContext envContext) throws NoSuchObjectException, MetaException, TException {
     return client.drop_partition_by_name_with_environment_context(dbName, tableName, partName,
@@ -784,6 +799,13 @@ public class HiveMetaStoreClient impleme
     return dropPartition(db_name, tbl_name, part_vals, deleteData, null);
   }
 
+  @Override
+  public boolean dropPartition(String db_name, String tbl_name,
+      List<String> part_vals, PartitionDropOptions options) throws TException {
+    return dropPartition(db_name, tbl_name, part_vals, options.deleteData,
+                         options.purgeData? getEnvironmentContextWithIfPurgeSet() : null);
+  }
+
   public boolean dropPartition(String db_name, String tbl_name, List<String> part_vals,
       boolean deleteData, EnvironmentContext envContext) throws NoSuchObjectException,
       MetaException, TException {
@@ -793,8 +815,8 @@ public class HiveMetaStoreClient impleme
 
   @Override
   public List<Partition> dropPartitions(String dbName, String tblName,
-      List<ObjectPair<Integer, byte[]>> partExprs, boolean deleteData, boolean ignoreProtection,
-      boolean ifExists) throws NoSuchObjectException, MetaException, TException {
+                                        List<ObjectPair<Integer, byte[]>> partExprs, PartitionDropOptions options)
+      throws TException {
     RequestPartsSpec rps = new RequestPartsSpec();
     List<DropPartitionsExpr> exprs = new ArrayList<DropPartitionsExpr>(partExprs.size());
     for (ObjectPair<Integer, byte[]> partExpr : partExprs) {
@@ -805,13 +827,43 @@ public class HiveMetaStoreClient impleme
     }
     rps.setExprs(exprs);
     DropPartitionsRequest req = new DropPartitionsRequest(dbName, tblName, rps);
-    req.setDeleteData(deleteData);
-    req.setIgnoreProtection(ignoreProtection);
-    req.setNeedResult(true);
-    req.setIfExists(ifExists);
+    req.setDeleteData(options.deleteData);
+    req.setIgnoreProtection(options.ignoreProtection);
+    req.setNeedResult(options.returnResults);
+    req.setIfExists(options.ifExists);
+    if (options.purgeData) {
+      LOG.info("Dropped partitions will be purged!");
+      req.setEnvironmentContext(getEnvironmentContextWithIfPurgeSet());
+    }
     return client.drop_partitions_req(req).getPartitions();
   }
 
+  @Override
+  public List<Partition> dropPartitions(String dbName, String tblName,
+      List<ObjectPair<Integer, byte[]>> partExprs, boolean deleteData, boolean ignoreProtection,
+      boolean ifExists, boolean needResult) throws NoSuchObjectException, MetaException, TException {
+
+    return dropPartitions(dbName, tblName, partExprs,
+                          PartitionDropOptions.instance()
+                                              .deleteData(deleteData)
+                                              .ignoreProtection(ignoreProtection)
+                                              .ifExists(ifExists)
+                                              .returnResults(needResult));
+
+  }
+
+  @Override
+  public List<Partition> dropPartitions(String dbName, String tblName,
+      List<ObjectPair<Integer, byte[]>> partExprs, boolean deleteData, boolean ignoreProtection,
+      boolean ifExists) throws NoSuchObjectException, MetaException, TException {
+    // By default, we need the results from dropPartitions();
+    return dropPartitions(dbName, tblName, partExprs,
+                          PartitionDropOptions.instance()
+                                              .deleteData(deleteData)
+                                              .ignoreProtection(ignoreProtection)
+                                              .ifExists(ifExists));
+  }
+
   /**
    * {@inheritDoc}
    * @see #dropTable(String, String, boolean, boolean, EnvironmentContext)

Modified: hive/branches/parquet/metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java
URL: http://svn.apache.org/viewvc/hive/branches/parquet/metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java?rev=1664455&r1=1664454&r2=1664455&view=diff
==============================================================================
--- hive/branches/parquet/metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java (original)
+++ hive/branches/parquet/metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java Thu Mar  5 18:51:32 2015
@@ -23,6 +23,7 @@ import org.apache.hadoop.hive.common.Val
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.api.CompactionType;
 import org.apache.hadoop.hive.metastore.api.CurrentNotificationEventId;
+import org.apache.hadoop.hive.metastore.api.EnvironmentContext;
 import org.apache.hadoop.hive.metastore.api.FireEventRequest;
 import org.apache.hadoop.hive.metastore.api.FireEventResponse;
 import org.apache.hadoop.hive.metastore.api.GetOpenTxnsInfoResponse;
@@ -660,10 +661,39 @@ public interface IMetaStoreClient {
       List<String> part_vals, boolean deleteData) throws NoSuchObjectException,
       MetaException, TException;
 
+  /**
+   * Method to dropPartitions() with the option to purge the partition data directly,
+   * rather than to move data to trash.
+   * @param db_name Name of the database.
+   * @param tbl_name Name of the table.
+   * @param part_vals Specification of the partitions being dropped.
+   * @param options PartitionDropOptions for the operation.
+   * @return True (if partitions are dropped), else false.
+   * @throws TException
+   */
+  boolean dropPartition(String db_name, String tbl_name, List<String> part_vals,
+                        PartitionDropOptions options) throws TException;
+
   List<Partition> dropPartitions(String dbName, String tblName,
       List<ObjectPair<Integer, byte[]>> partExprs, boolean deleteData, boolean ignoreProtection,
       boolean ifExists) throws NoSuchObjectException, MetaException, TException;
 
+  List<Partition> dropPartitions(String dbName, String tblName,
+      List<ObjectPair<Integer, byte[]>> partExprs, boolean deleteData, boolean ignoreProtection,
+      boolean ifExists, boolean needResults) throws NoSuchObjectException, MetaException, TException;
+
+  /**
+   * Generalization of dropPartitions(),
+   * @param dbName Name of the database
+   * @param tblName Name of the table
+   * @param partExprs Partition-specification
+   * @param options Boolean options for dropping partitions
+   * @return List of Partitions dropped
+   * @throws TException On failure
+   */
+  List<Partition> dropPartitions(String dbName, String tblName,
+                                 List<ObjectPair<Integer, byte[]>> partExprs, PartitionDropOptions options) throws TException;
+
   boolean dropPartition(String db_name, String tbl_name,
       String name, boolean deleteData) throws NoSuchObjectException,
       MetaException, TException;

Modified: hive/branches/parquet/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreDirectSql.java
URL: http://svn.apache.org/viewvc/hive/branches/parquet/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreDirectSql.java?rev=1664455&r1=1664454&r2=1664455&view=diff
==============================================================================
--- hive/branches/parquet/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreDirectSql.java (original)
+++ hive/branches/parquet/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreDirectSql.java Thu Mar  5 18:51:32 2015
@@ -32,7 +32,6 @@ import java.util.List;
 import java.util.Map;
 import java.util.TreeMap;
 
-import javax.jdo.JDODataStoreException;
 import javax.jdo.PersistenceManager;
 import javax.jdo.Query;
 import javax.jdo.Transaction;
@@ -41,7 +40,6 @@ import javax.jdo.datastore.JDOConnection
 import org.apache.commons.lang.StringUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-import org.apache.derby.iapi.error.StandardException;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
@@ -497,6 +495,7 @@ class MetaStoreDirectSql {
     @SuppressWarnings("unchecked")
     List<Object[]> sqlResult = executeWithArray(query, null, queryText);
     long queryTime = doTrace ? System.nanoTime() : 0;
+    Deadline.checkTimeout();
 
     // Read all the fields and create partitions, SDs and serdes.
     TreeMap<Long, Partition> partitions = new TreeMap<Long, Partition>();
@@ -585,6 +584,7 @@ class MetaStoreDirectSql {
       serde.setSerializationLib((String)fields[13]);
       serdeSb.append(serdeId).append(",");
       sd.setSerdeInfo(serde);
+      Deadline.checkTimeout();
     }
     query.closeAll();
     timingTrace(doTrace, queryText, start, queryTime);



Mime
View raw message