hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From hashut...@apache.org
Subject svn commit: r1660751 [23/23] - in /hive/branches/cbo: ./ accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/serde/ accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/ accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/mr/ accumulo-...
Date Wed, 18 Feb 2015 22:28:40 GMT
Modified: hive/branches/cbo/ql/src/test/results/clientpositive/windowing_streaming.q.out
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/test/results/clientpositive/windowing_streaming.q.out?rev=1660751&r1=1660750&r2=1660751&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/test/results/clientpositive/windowing_streaming.q.out (original)
+++ hive/branches/cbo/ql/src/test/results/clientpositive/windowing_streaming.q.out Wed Feb 18 22:28:35 2015
@@ -80,6 +80,25 @@ STAGE PLANS:
           outputColumnNames: _col0, _col1
           Statistics: Num rows: 26 Data size: 3147 Basic stats: COMPLETE Column stats: NONE
           PTF Operator
+            Function definitions:
+                Input definition
+                  input alias: ptf_0
+                  output shape: _col0: string, _col1: string
+                  type: WINDOWING
+                Windowing table definition
+                  input alias: ptf_1
+                  name: windowingtablefunction
+                  order by: _col0
+                  partition by: _col1
+                  raw input shape:
+                  window functions:
+                      window function definition
+                        alias: _wcol0
+                        arguments: _col0
+                        name: rank
+                        window function: GenericUDAFRankEvaluator
+                        window frame: PRECEDING(MAX)~FOLLOWING(MAX)
+                        isPivotResult: true
             Statistics: Num rows: 26 Data size: 3147 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: _col1 (type: string), _wcol0 (type: int)
@@ -136,6 +155,25 @@ STAGE PLANS:
           outputColumnNames: _col0, _col1
           Statistics: Num rows: 26 Data size: 3147 Basic stats: COMPLETE Column stats: NONE
           PTF Operator
+            Function definitions:
+                Input definition
+                  input alias: ptf_0
+                  output shape: _col0: string, _col1: string
+                  type: WINDOWING
+                Windowing table definition
+                  input alias: ptf_1
+                  name: windowingtablefunction
+                  order by: _col0
+                  partition by: _col1
+                  raw input shape:
+                  window functions:
+                      window function definition
+                        alias: _wcol0
+                        arguments: _col0
+                        name: rank
+                        window function: GenericUDAFRankEvaluator
+                        window frame: PRECEDING(MAX)~FOLLOWING(MAX)
+                        isPivotResult: true
             Statistics: Num rows: 26 Data size: 3147 Basic stats: COMPLETE Column stats: NONE
             Filter Operator
               predicate: (_wcol0 < 4) (type: boolean)
@@ -290,6 +328,25 @@ STAGE PLANS:
           outputColumnNames: _col0, _col1
           Statistics: Num rows: 12288 Data size: 2641964 Basic stats: COMPLETE Column stats: NONE
           PTF Operator
+            Function definitions:
+                Input definition
+                  input alias: ptf_0
+                  output shape: _col0: tinyint, _col1: double
+                  type: WINDOWING
+                Windowing table definition
+                  input alias: ptf_1
+                  name: windowingtablefunction
+                  order by: _col1
+                  partition by: _col0
+                  raw input shape:
+                  window functions:
+                      window function definition
+                        alias: _wcol0
+                        arguments: _col1
+                        name: rank
+                        window function: GenericUDAFRankEvaluator
+                        window frame: PRECEDING(MAX)~FOLLOWING(MAX)
+                        isPivotResult: true
             Statistics: Num rows: 12288 Data size: 2641964 Basic stats: COMPLETE Column stats: NONE
             Filter Operator
               predicate: (_wcol0 < 5) (type: boolean)

Modified: hive/branches/cbo/serde/src/java/org/apache/hadoop/hive/serde2/DelimitedJSONSerDe.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/serde/src/java/org/apache/hadoop/hive/serde2/DelimitedJSONSerDe.java?rev=1660751&r1=1660750&r2=1660751&view=diff
==============================================================================
--- hive/branches/cbo/serde/src/java/org/apache/hadoop/hive/serde2/DelimitedJSONSerDe.java (original)
+++ hive/branches/cbo/serde/src/java/org/apache/hadoop/hive/serde2/DelimitedJSONSerDe.java Wed Feb 18 22:28:35 2015
@@ -24,6 +24,7 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe;
+import org.apache.hadoop.hive.serde2.lazy.LazySerDeParameters;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
@@ -53,7 +54,7 @@ public class DelimitedJSONSerDe extends
 
   @Override
   protected void serializeField(ByteStream.Output out, Object obj, ObjectInspector objInspector,
-      SerDeParameters serdeParams) throws SerDeException {
+      LazySerDeParameters serdeParams) throws SerDeException {
     if (!objInspector.getCategory().equals(Category.PRIMITIVE) || (objInspector.getTypeName().equalsIgnoreCase(serdeConstants.BINARY_TYPE_NAME))) {
       //do this for all complex types and binary
       try {

Modified: hive/branches/cbo/serde/src/java/org/apache/hadoop/hive/serde2/columnar/ColumnarSerDe.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/serde/src/java/org/apache/hadoop/hive/serde2/columnar/ColumnarSerDe.java?rev=1660751&r1=1660750&r2=1660751&view=diff
==============================================================================
--- hive/branches/cbo/serde/src/java/org/apache/hadoop/hive/serde2/columnar/ColumnarSerDe.java (original)
+++ hive/branches/cbo/serde/src/java/org/apache/hadoop/hive/serde2/columnar/ColumnarSerDe.java Wed Feb 18 22:28:35 2015
@@ -29,14 +29,12 @@ import org.apache.commons.logging.LogFac
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.ColumnProjectionUtils;
-import org.apache.hadoop.hive.serde2.SerDe;
 import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.hive.serde2.SerDeSpec;
 import org.apache.hadoop.hive.serde2.SerDeUtils;
 import org.apache.hadoop.hive.serde2.lazy.LazyFactory;
 import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe;
-import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe.SerDeParameters;
-import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyObjectInspectorParametersImpl;
+import org.apache.hadoop.hive.serde2.lazy.LazySerDeParameters;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
 import org.apache.hadoop.hive.serde2.objectinspector.StructField;
@@ -60,7 +58,9 @@ import org.apache.hadoop.io.Writable;
     serdeConstants.SERIALIZATION_LAST_COLUMN_TAKES_REST,
     serdeConstants.ESCAPE_CHAR,
     serdeConstants.SERIALIZATION_ENCODING,
-    LazySimpleSerDe.SERIALIZATION_EXTEND_NESTING_LEVELS})
+    LazySerDeParameters.SERIALIZATION_EXTEND_NESTING_LEVELS,
+    LazySerDeParameters.SERIALIZATION_EXTEND_ADDITIONAL_NESTING_LEVELS
+    })
 public class ColumnarSerDe extends ColumnarSerDeBase {
 
   @Override
@@ -82,7 +82,7 @@ public class ColumnarSerDe extends Colum
   public ColumnarSerDe() throws SerDeException {
   }
 
-  protected SerDeParameters serdeParams = null;
+  protected LazySerDeParameters serdeParams = null;
 
   /**
    * Initialize the SerDe given the parameters.
@@ -92,7 +92,7 @@ public class ColumnarSerDe extends Colum
   @Override
   public void initialize(Configuration conf, Properties tbl) throws SerDeException {
 
-    serdeParams = LazySimpleSerDe.initSerdeParams(conf, tbl, getClass().getName());
+    serdeParams = new LazySerDeParameters(conf, tbl, getClass().getName());
 
     // Create the ObjectInspectors for the fields. Note: Currently
     // ColumnarObject uses same ObjectInpector as LazyStruct

Modified: hive/branches/cbo/serde/src/java/org/apache/hadoop/hive/serde2/columnar/LazyBinaryColumnarSerDe.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/serde/src/java/org/apache/hadoop/hive/serde2/columnar/LazyBinaryColumnarSerDe.java?rev=1660751&r1=1660750&r2=1660751&view=diff
==============================================================================
--- hive/branches/cbo/serde/src/java/org/apache/hadoop/hive/serde2/columnar/LazyBinaryColumnarSerDe.java (original)
+++ hive/branches/cbo/serde/src/java/org/apache/hadoop/hive/serde2/columnar/LazyBinaryColumnarSerDe.java Wed Feb 18 22:28:35 2015
@@ -26,8 +26,7 @@ import org.apache.hadoop.hive.serde.serd
 import org.apache.hadoop.hive.serde2.ColumnProjectionUtils;
 import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.hive.serde2.SerDeSpec;
-import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe.SerDeParameters;
-import org.apache.hadoop.hive.serde2.lazy.LazyUtils;
+import org.apache.hadoop.hive.serde2.lazy.LazySerDeParameters;
 import org.apache.hadoop.hive.serde2.lazybinary.LazyBinaryFactory;
 import org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
@@ -63,8 +62,8 @@ public class LazyBinaryColumnarSerDe ext
 
   @Override
   public void initialize(Configuration conf, Properties tbl) throws SerDeException {
-    SerDeParameters serdeParams = new SerDeParameters();
-    LazyUtils.extractColumnInfo(tbl, serdeParams, getClass().getName());
+    LazySerDeParameters serdeParams = new LazySerDeParameters(conf, tbl, getClass().getName());
+    
     columnNames = serdeParams.getColumnNames();
     columnTypes = serdeParams.getColumnTypes();
 

Modified: hive/branches/cbo/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyFactory.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyFactory.java?rev=1660751&r1=1660750&r2=1660751&view=diff
==============================================================================
--- hive/branches/cbo/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyFactory.java (original)
+++ hive/branches/cbo/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyFactory.java Wed Feb 18 22:28:35 2015
@@ -220,9 +220,9 @@ public final class LazyFactory {
    */
   @Deprecated
   public static ObjectInspector createLazyObjectInspector(TypeInfo typeInfo,
-      byte[] separator, int separatorIndex, Text nullSequence, boolean escaped,
+  		byte[] separators, int separatorIndex, Text nullSequence, boolean escaped,
       byte escapeChar, ObjectInspectorOptions option) throws SerDeException {
-    return createLazyObjectInspector(typeInfo, separator, separatorIndex, nullSequence,
+    return createLazyObjectInspector(typeInfo, separators, separatorIndex, nullSequence,
         escaped, escapeChar, false, option);
   }
   
@@ -245,9 +245,9 @@ public final class LazyFactory {
    */
   @Deprecated
   public static ObjectInspector createLazyObjectInspector(TypeInfo typeInfo,
-      byte[] separator, int separatorIndex, Text nullSequence, boolean escaped,
+  		byte[] separators, int separatorIndex, Text nullSequence, boolean escaped,
       byte escapeChar) throws SerDeException {
-    return createLazyObjectInspector(typeInfo, separator, separatorIndex, nullSequence,
+    return createLazyObjectInspector(typeInfo, separators, separatorIndex, nullSequence,
         escaped, escapeChar, false, ObjectInspectorOptions.JAVA);
   }
 
@@ -267,9 +267,9 @@ public final class LazyFactory {
    */
   @Deprecated
   public static ObjectInspector createLazyObjectInspector(TypeInfo typeInfo,
-      byte[] separator, int separatorIndex, Text nullSequence, boolean escaped,
+  		byte[] separators, int separatorIndex, Text nullSequence, boolean escaped,
       byte escapeChar, boolean extendedBooleanLiteral) throws SerDeException {
-    return createLazyObjectInspector(typeInfo, separator, separatorIndex, nullSequence, escaped,
+    return createLazyObjectInspector(typeInfo, separators, separatorIndex, nullSequence, escaped,
         escapeChar, extendedBooleanLiteral, ObjectInspectorOptions.JAVA);
   }
   
@@ -289,10 +289,10 @@ public final class LazyFactory {
    */
   @Deprecated
   public static ObjectInspector createLazyObjectInspector(TypeInfo typeInfo,
-      byte[] separator, int separatorIndex, Text nullSequence, boolean escaped,
+  		byte[] separators, int separatorIndex, Text nullSequence, boolean escaped,
       byte escapeChar, boolean extendedBooleanLiteral, ObjectInspectorOptions option) throws SerDeException {
     LazyObjectInspectorParametersImpl lazyParams = new LazyObjectInspectorParametersImpl(
-        escaped, escapeChar, extendedBooleanLiteral, null, separator, nullSequence);
+        escaped, escapeChar, extendedBooleanLiteral, null, separators, nullSequence);
     return createLazyObjectInspector(typeInfo, separatorIndex, lazyParams, option);
   }
 
@@ -332,7 +332,7 @@ public final class LazyFactory {
           .getListElementTypeInfo(), separatorIndex + 1,
           lazyParams, option), LazyUtils.getSeparator(lazyParams.getSeparators(), separatorIndex),
           lazyParams);
-    case STRUCT:
+   case STRUCT:
       StructTypeInfo structTypeInfo = (StructTypeInfo) typeInfo;
       List<String> fieldNames = structTypeInfo.getAllStructFieldNames();
       List<TypeInfo> fieldTypeInfos = structTypeInfo
@@ -347,7 +347,7 @@ public final class LazyFactory {
           fieldNames, fieldObjectInspectors, null,
           LazyUtils.getSeparator(lazyParams.getSeparators(), separatorIndex),
           lazyParams, option);
-    case UNION:
+   case UNION:
       UnionTypeInfo unionTypeInfo = (UnionTypeInfo) typeInfo;
       List<ObjectInspector> lazyOIs = new ArrayList<ObjectInspector>();
       for (TypeInfo uti : unionTypeInfo.getAllUnionObjectTypeInfos()) {
@@ -357,7 +357,7 @@ public final class LazyFactory {
       return LazyObjectInspectorFactory.getLazyUnionObjectInspector(lazyOIs,
           LazyUtils.getSeparator(lazyParams.getSeparators(), separatorIndex),
           lazyParams);
-    }
+   }
 
     throw new RuntimeException("Hive LazySerDe Internal error.");
   }
@@ -396,7 +396,7 @@ public final class LazyFactory {
    */
   @Deprecated
   public static ObjectInspector createLazyStructInspector(
-      List<String> columnNames, List<TypeInfo> typeInfos, byte[] separators,
+     List<String> columnNames, List<TypeInfo> typeInfos, byte[] separators,
       Text nullSequence, boolean lastColumnTakesRest, boolean escaped,
       byte escapeChar, boolean extendedBooleanLiteral) throws SerDeException {
     LazyObjectInspectorParametersImpl lazyParams = new LazyObjectInspectorParametersImpl(

Modified: hive/branches/cbo/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazySimpleSerDe.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazySimpleSerDe.java?rev=1660751&r1=1660750&r2=1660751&view=diff
==============================================================================
--- hive/branches/cbo/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazySimpleSerDe.java (original)
+++ hive/branches/cbo/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazySimpleSerDe.java Wed Feb 18 22:28:35 2015
@@ -19,13 +19,14 @@
 package org.apache.hadoop.hive.serde2.lazy;
 
 import java.io.IOException;
-import java.nio.charset.Charset;
 import java.util.ArrayList;
 import java.util.Arrays;
+import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 import java.util.Properties;
 
+import org.apache.commons.lang.ArrayUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
@@ -33,7 +34,6 @@ import org.apache.hadoop.hive.conf.HiveC
 import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.AbstractEncodingAwareSerDe;
 import org.apache.hadoop.hive.serde2.ByteStream;
-import org.apache.hadoop.hive.serde2.SerDe;
 import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.hive.serde2.SerDeSpec;
 import org.apache.hadoop.hive.serde2.SerDeStats;
@@ -58,6 +58,7 @@ import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.Writable;
 import org.apache.hive.common.util.HiveStringUtils;
 
+
 /**
  * LazySimpleSerDe can be used to read the same data format as
  * MetadataTypedColumnsetSerDe and TCTLSeparatedProtocol.
@@ -75,16 +76,15 @@ import org.apache.hive.common.util.HiveS
     serdeConstants.SERIALIZATION_LAST_COLUMN_TAKES_REST,
     serdeConstants.ESCAPE_CHAR,
     serdeConstants.SERIALIZATION_ENCODING,
-    LazySimpleSerDe.SERIALIZATION_EXTEND_NESTING_LEVELS})
+    LazySerDeParameters.SERIALIZATION_EXTEND_NESTING_LEVELS,
+    LazySerDeParameters.SERIALIZATION_EXTEND_ADDITIONAL_NESTING_LEVELS
+    })
 public class LazySimpleSerDe extends AbstractEncodingAwareSerDe {
 
   public static final Log LOG = LogFactory.getLog(LazySimpleSerDe.class
       .getName());
 
-  public static final String SERIALIZATION_EXTEND_NESTING_LEVELS
-    = "hive.serialization.extend.nesting.levels";
-
-  public static final byte[] DefaultSeparators = {(byte) 1, (byte) 2, (byte) 3};
+  private LazySerDeParameters serdeParams = null;
 
   private ObjectInspector cachedObjectInspector;
 
@@ -97,11 +97,11 @@ public class LazySimpleSerDe extends Abs
   public String toString() {
     return getClass().toString()
         + "["
-        + Arrays.asList(serdeParams.separators)
+        + Arrays.asList(serdeParams.getSeparators())
         + ":"
-        + ((StructTypeInfo) serdeParams.rowTypeInfo).getAllStructFieldNames()
+        + ((StructTypeInfo) serdeParams.getRowTypeInfo()).getAllStructFieldNames()
         + ":"
-        + ((StructTypeInfo) serdeParams.rowTypeInfo)
+        + ((StructTypeInfo) serdeParams.getRowTypeInfo())
             .getAllStructFieldTypeInfos() + "]";
   }
 
@@ -109,100 +109,6 @@ public class LazySimpleSerDe extends Abs
   }
 
   /**
-   * Return the byte value of the number string.
-   *
-   * @param altValue
-   *          The string containing a number.
-   * @param defaultVal
-   *          If the altValue does not represent a number, return the
-   *          defaultVal.
-   */
-  public static byte getByte(String altValue, byte defaultVal) {
-    if (altValue != null && altValue.length() > 0) {
-      try {
-        return Byte.valueOf(altValue).byteValue();
-      } catch (NumberFormatException e) {
-        return (byte) altValue.charAt(0);
-      }
-    }
-    return defaultVal;
-  }
-
-  /**
-   * SerDeParameters.
-   *
-   */
-  public static class SerDeParameters implements LazyObjectInspectorParameters {
-    byte[] separators = DefaultSeparators;
-    String nullString;
-    Text nullSequence;
-    TypeInfo rowTypeInfo;
-    boolean lastColumnTakesRest;
-    List<String> columnNames;
-    List<TypeInfo> columnTypes;
-
-    boolean escaped;
-    byte escapeChar;
-    boolean[] needsEscape;
-
-    boolean extendedBooleanLiteral;
-    List<String> timestampFormats;
-
-    public SerDeParameters() {
-    }
-
-    public List<TypeInfo> getColumnTypes() {
-      return columnTypes;
-    }
-
-    public List<String> getColumnNames() {
-      return columnNames;
-    }
-
-    public byte[] getSeparators() {
-      return separators;
-    }
-
-    public String getNullString() {
-      return nullString;
-    }
-
-    public Text getNullSequence() {
-      return nullSequence;
-    }
-
-    public TypeInfo getRowTypeInfo() {
-      return rowTypeInfo;
-    }
-
-    public boolean isLastColumnTakesRest() {
-      return lastColumnTakesRest;
-    }
-
-    public boolean isEscaped() {
-      return escaped;
-    }
-
-    public byte getEscapeChar() {
-      return escapeChar;
-    }
-
-    public boolean[] getNeedsEscape() {
-      return needsEscape;
-    }
-
-    public boolean isExtendedBooleanLiteral() {
-      return extendedBooleanLiteral;
-    }
-
-    public List<String> getTimestampFormats() {
-      return timestampFormats;
-    }
-  }
-
-  SerDeParameters serdeParams = null;
-
-  /**
    * Initialize the SerDe given the parameters. serialization.format: separator
    * char or byte code (only supports byte-value up to 127) columns:
    * ","-separated column names columns.types: ",", ":", or ";"-separated column
@@ -216,8 +122,7 @@ public class LazySimpleSerDe extends Abs
 
     super.initialize(job, tbl);
 
-    serdeParams = LazySimpleSerDe.initSerdeParams(job, tbl, getClass()
-        .getName());
+    serdeParams = new LazySerDeParameters(job, tbl, getClass().getName());
 
     // Create the ObjectInspectors for the fields
     cachedObjectInspector = LazyFactory.createLazyStructInspector(serdeParams
@@ -227,10 +132,10 @@ public class LazySimpleSerDe extends Abs
         .createLazyObject(cachedObjectInspector);
 
     LOG.debug(getClass().getName() + " initialized with: columnNames="
-        + serdeParams.columnNames + " columnTypes=" + serdeParams.columnTypes
-        + " separator=" + Arrays.asList(serdeParams.separators)
-        + " nullstring=" + serdeParams.nullString + " lastColumnTakesRest="
-        + serdeParams.lastColumnTakesRest + " timestampFormats=" + serdeParams.timestampFormats);
+        + serdeParams.getColumnNames() + " columnTypes=" + serdeParams.getColumnTypes()
+        + " separator=" + Arrays.asList(serdeParams.getSeparators())
+        + " nullstring=" + serdeParams.getNullString() + " lastColumnTakesRest="
+        + serdeParams.isLastColumnTakesRest() + " timestampFormats=" + serdeParams.getTimestampFormats());
 
     serializedSize = 0;
     stats = new SerDeStats();
@@ -238,115 +143,6 @@ public class LazySimpleSerDe extends Abs
     lastOperationDeserialize = false;
   }
 
-  public static SerDeParameters initSerdeParams(Configuration job,
-      Properties tbl, String serdeName) throws SerDeException {
-    SerDeParameters serdeParams = new SerDeParameters();
-    // Read the separators: We use 8 levels of separators by default,
-    // and 24 if SERIALIZATION_EXTEND_NESTING_LEVELS is set to true
-    // The levels possible are the set of control chars that we can use as
-    // special delimiters, ie they should absent in the data or escaped.
-    // To increase this level further, we need to stop relying
-    // on single control chars delimiters
-
-    serdeParams.separators = new byte[8];
-    serdeParams.separators[0] = getByte(tbl.getProperty(serdeConstants.FIELD_DELIM,
-        tbl.getProperty(serdeConstants.SERIALIZATION_FORMAT)), DefaultSeparators[0]);
-    serdeParams.separators[1] = getByte(tbl
-        .getProperty(serdeConstants.COLLECTION_DELIM), DefaultSeparators[1]);
-    serdeParams.separators[2] = getByte(
-        tbl.getProperty(serdeConstants.MAPKEY_DELIM), DefaultSeparators[2]);
-    String extendedNesting =
-        tbl.getProperty(SERIALIZATION_EXTEND_NESTING_LEVELS);
-    if(extendedNesting == null || !extendedNesting.equalsIgnoreCase("true")){
-      //use the default smaller set of separators for backward compatibility
-      for (int i = 3; i < serdeParams.separators.length; i++) {
-        serdeParams.separators[i] = (byte) (i + 1);
-      }
-    }
-    else{
-      //If extended nesting is enabled, set the extended set of separator chars
-
-      final int MAX_CTRL_CHARS = 29;
-      byte[] extendedSeparators = new byte[MAX_CTRL_CHARS];
-      int extendedSeparatorsIdx = 0;
-
-      //get the first 3 separators that have already been set (defaults to 1,2,3)
-      for(int i = 0; i < 3; i++){
-        extendedSeparators[extendedSeparatorsIdx++] = serdeParams.separators[i];
-      }
-
-      for (byte asciival = 4; asciival <= MAX_CTRL_CHARS; asciival++) {
-
-        //use only control chars that are very unlikely to be part of the string
-        // the following might/likely to be used in text files for strings
-        // 9 (horizontal tab, HT, \t, ^I)
-        // 10 (line feed, LF, \n, ^J),
-        // 12 (form feed, FF, \f, ^L),
-        // 13 (carriage return, CR, \r, ^M),
-        // 27 (escape, ESC, \e [GCC only], ^[).
-
-        //reserving the following values for future dynamic level impl
-        // 30
-        // 31
-
-        switch(asciival){
-        case 9:
-        case 10:
-        case 12:
-        case 13:
-        case 27:
-          continue;
-        }
-        extendedSeparators[extendedSeparatorsIdx++] = asciival;
-      }
-
-      serdeParams.separators =
-          Arrays.copyOfRange(extendedSeparators, 0, extendedSeparatorsIdx);
-    }
-
-    serdeParams.nullString = tbl.getProperty(
-        serdeConstants.SERIALIZATION_NULL_FORMAT, "\\N");
-    serdeParams.nullSequence = new Text(serdeParams.nullString);
-
-    String lastColumnTakesRestString = tbl
-        .getProperty(serdeConstants.SERIALIZATION_LAST_COLUMN_TAKES_REST);
-    serdeParams.lastColumnTakesRest = (lastColumnTakesRestString != null && lastColumnTakesRestString
-        .equalsIgnoreCase("true"));
-
-    LazyUtils.extractColumnInfo(tbl, serdeParams, serdeName);
-
-    // Create the LazyObject for storing the rows
-    serdeParams.rowTypeInfo = TypeInfoFactory.getStructTypeInfo(
-        serdeParams.columnNames, serdeParams.columnTypes);
-
-    // Get the escape information
-    String escapeProperty = tbl.getProperty(serdeConstants.ESCAPE_CHAR);
-    serdeParams.escaped = (escapeProperty != null);
-    if (serdeParams.escaped) {
-      serdeParams.escapeChar = getByte(escapeProperty, (byte) '\\');
-    }
-    if (serdeParams.escaped) {
-      serdeParams.needsEscape = new boolean[128];
-      for (int i = 0; i < 128; i++) {
-        serdeParams.needsEscape[i] = false;
-      }
-      serdeParams.needsEscape[serdeParams.escapeChar] = true;
-      for (int i = 0; i < serdeParams.separators.length; i++) {
-        serdeParams.needsEscape[serdeParams.separators[i]] = true;
-      }
-    }
-
-    serdeParams.extendedBooleanLiteral = job == null ? false :
-        job.getBoolean(ConfVars.HIVE_LAZYSIMPLE_EXTENDED_BOOLEAN_LITERAL.varname, false);
-
-    String[] timestampFormatsArray =
-        HiveStringUtils.splitAndUnEscape(tbl.getProperty(serdeConstants.TIMESTAMP_FORMATS));
-    if (timestampFormatsArray != null) {
-      serdeParams.timestampFormats = Arrays.asList(timestampFormatsArray);
-    }
-    return serdeParams;
-  }
-
   // The object for storing row data
   LazyStruct cachedLazyStruct;
 
@@ -420,7 +216,7 @@ public class LazySimpleSerDe extends Abs
     StructObjectInspector soi = (StructObjectInspector) objInspector;
     List<? extends StructField> fields = soi.getAllStructFieldRefs();
     List<Object> list = soi.getStructFieldsDataAsList(obj);
-    List<? extends StructField> declaredFields = (serdeParams.rowTypeInfo != null && ((StructTypeInfo) serdeParams.rowTypeInfo)
+    List<? extends StructField> declaredFields = (serdeParams.getRowTypeInfo() != null && ((StructTypeInfo) serdeParams.getRowTypeInfo())
         .getAllStructFieldNames().size() > 0) ? ((StructObjectInspector) getObjectInspector())
         .getAllStructFieldRefs()
         : null;
@@ -432,7 +228,7 @@ public class LazySimpleSerDe extends Abs
     for (int i = 0; i < fields.size(); i++) {
       // Append the separator if needed.
       if (i > 0) {
-        serializeStream.write(serdeParams.separators[0]);
+        serializeStream.write(serdeParams.getSeparators()[0]);
       }
       // Get the field objectInspector and the field object.
       ObjectInspector foi = fields.get(i).getFieldObjectInspector();
@@ -441,7 +237,7 @@ public class LazySimpleSerDe extends Abs
       if (declaredFields != null && i >= declaredFields.size()) {
         throw new SerDeException("Error: expecting " + declaredFields.size()
             + " but asking for field " + i + "\n" + "data=" + obj + "\n"
-            + "tableType=" + serdeParams.rowTypeInfo.toString() + "\n"
+            + "tableType=" + serdeParams.getRowTypeInfo().toString() + "\n"
             + "dataType="
             + TypeInfoUtils.getTypeInfoFromObjectInspector(objInspector));
       }
@@ -460,10 +256,10 @@ public class LazySimpleSerDe extends Abs
   }
 
   protected void serializeField(ByteStream.Output out, Object obj, ObjectInspector objInspector,
-      SerDeParameters serdeParams) throws SerDeException {
+      LazySerDeParameters serdeParams) throws SerDeException {
     try {
-      serialize(out, obj, objInspector, serdeParams.separators, 1, serdeParams.nullSequence,
-          serdeParams.escaped, serdeParams.escapeChar, serdeParams.needsEscape);
+      serialize(out, obj, objInspector, serdeParams.getSeparators(), 1, serdeParams.getNullSequence(),
+          serdeParams.isEscaped(), serdeParams.getEscapeChar(), serdeParams.getNeedsEscape());
     } catch (IOException e) {
       throw new SerDeException(e);
     }
@@ -489,9 +285,7 @@ public class LazySimpleSerDe extends Abs
    * @param escapeChar
    *          Which char to use as the escape char, e.g. '\\'
    * @param needsEscape
-   *          Which chars needs to be escaped. This array should have size of
-   *          128. Negative byte values (or byte values >= 128) are never
-   *          escaped.
+   *          Which byte needs to be escaped for 256 bytes. 
    * @throws IOException
    * @throws SerDeException
    */

Modified: hive/branches/cbo/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyUtils.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyUtils.java?rev=1660751&r1=1660750&r2=1660751&view=diff
==============================================================================
--- hive/branches/cbo/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyUtils.java (original)
+++ hive/branches/cbo/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyUtils.java Wed Feb 18 22:28:35 2015
@@ -23,16 +23,13 @@ import java.io.IOException;
 import java.io.OutputStream;
 import java.nio.ByteBuffer;
 import java.nio.charset.CharacterCodingException;
-import java.util.ArrayList;
 import java.util.Arrays;
-import java.util.Properties;
+import java.util.Map;
 
 import org.apache.commons.codec.binary.Base64;
-import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.hive.serde2.io.HiveCharWritable;
 import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable;
-import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe.SerDeParameters;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.BinaryObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.BooleanObjectInspector;
@@ -48,7 +45,6 @@ import org.apache.hadoop.hive.serde2.obj
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.ShortObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampObjectInspector;
-import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
 import org.apache.hadoop.io.BytesWritable;
 import org.apache.hadoop.io.Text;
 
@@ -140,10 +136,10 @@ public final class LazyUtils {
    * @param escaped
    *          Whether the data should be written out in an escaped way.
    * @param escapeChar
-   *          if escaped, the char for prefixing special characters.
+   *          If escaped, the char for prefixing special characters.
    * @param needsEscape
-   *          if escaped, whether a specific character needs escaping. This
-   *          array should have size of 128.
+   *          If escaped, whether a specific character needs escaping. This
+   *          array should have size of 256.
    */
   public static void writeEscaped(OutputStream out, byte[] bytes, int start,
       int len, boolean escaped, byte escapeChar, boolean[] needsEscape)
@@ -151,7 +147,7 @@ public final class LazyUtils {
     if (escaped) {
       int end = start + len;
       for (int i = start; i <= end; i++) {
-        if (i == end || (bytes[i] >= 0 && needsEscape[bytes[i]])) {
+        if (i == end || needsEscape[bytes[i] & 0xFF]) {  // Converts negative byte to positive index
           if (i > start) {
             out.write(bytes, start, i - start);
           }
@@ -176,8 +172,7 @@ public final class LazyUtils {
    * @param o
    *          The primitive Object
    * @param needsEscape
-   *          Whether a character needs escaping. This array should have size of
-   *          128.
+   *          Whether a character needs escaping. 
    */
   public static void writePrimitiveUTF8(OutputStream out, Object o,
       PrimitiveObjectInspector oi, boolean escaped, byte escapeChar,
@@ -341,42 +336,7 @@ public final class LazyUtils {
     return hash;
   }
 
-  public static void extractColumnInfo(Properties tbl, SerDeParameters serdeParams,
-      String serdeName) throws SerDeException {
-    // Read the configuration parameters
-    String columnNameProperty = tbl.getProperty(serdeConstants.LIST_COLUMNS);
-    // NOTE: if "columns.types" is missing, all columns will be of String type
-    String columnTypeProperty = tbl.getProperty(serdeConstants.LIST_COLUMN_TYPES);
 
-    // Parse the configuration parameters
-
-    if (columnNameProperty != null && columnNameProperty.length() > 0) {
-      serdeParams.columnNames = Arrays.asList(columnNameProperty.split(","));
-    } else {
-      serdeParams.columnNames = new ArrayList<String>();
-    }
-    if (columnTypeProperty == null) {
-      // Default type: all string
-      StringBuilder sb = new StringBuilder();
-      for (int i = 0; i < serdeParams.columnNames.size(); i++) {
-        if (i > 0) {
-          sb.append(":");
-        }
-        sb.append(serdeConstants.STRING_TYPE_NAME);
-      }
-      columnTypeProperty = sb.toString();
-    }
-
-    serdeParams.columnTypes = TypeInfoUtils
-        .getTypeInfosFromTypeString(columnTypeProperty);
-
-    if (serdeParams.columnNames.size() != serdeParams.columnTypes.size()) {
-      throw new SerDeException(serdeName + ": columns has "
-          + serdeParams.columnNames.size()
-          + " elements while columns.types has "
-          + serdeParams.columnTypes.size() + " elements!");
-    }
-  }
 
   /**
    * gets a byte[] with copy of data from source BytesWritable
@@ -404,10 +364,15 @@ public final class LazyUtils {
       String msg = "Number of levels of nesting supported for " +
           "LazySimpleSerde is " + (separators.length - 1) +
           " Unable to work with level " + level;
+      
+      String txt = ". Use %s serde property for tables using LazySimpleSerde.";
+      
       if(separators.length < 9){
-        msg += ". Use " + LazySimpleSerDe.SERIALIZATION_EXTEND_NESTING_LEVELS +
-            " serde property for tables using LazySimpleSerde.";
+        msg += String.format(txt, LazySerDeParameters.SERIALIZATION_EXTEND_NESTING_LEVELS);
+      } else if (separators.length < 25) {
+      	msg += String.format(txt, LazySerDeParameters.SERIALIZATION_EXTEND_ADDITIONAL_NESTING_LEVELS);
       }
+      
       throw new SerDeException(msg, e);
     }
   }
@@ -450,6 +415,26 @@ public final class LazyUtils {
     }
   }
 
+  /**
+   * Return the byte value of the number string.
+   *
+   * @param altValue
+   *          The string containing a number.
+   * @param defaultVal
+   *          If the altValue does not represent a number, return the
+   *          defaultVal.
+   */
+  public static byte getByte(String altValue, byte defaultVal) {
+    if (altValue != null && altValue.length() > 0) {
+      try {
+        return Byte.valueOf(altValue).byteValue();
+      } catch (NumberFormatException e) {
+        return (byte) altValue.charAt(0);
+      }
+    }
+    return defaultVal;
+  }
+  
   private LazyUtils() {
     // prevent instantiation
   }

Modified: hive/branches/cbo/serde/src/test/org/apache/hadoop/hive/serde2/lazy/TestLazyArrayMapStruct.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/serde/src/test/org/apache/hadoop/hive/serde2/lazy/TestLazyArrayMapStruct.java?rev=1660751&r1=1660750&r2=1660751&view=diff
==============================================================================
--- hive/branches/cbo/serde/src/test/org/apache/hadoop/hive/serde2/lazy/TestLazyArrayMapStruct.java (original)
+++ hive/branches/cbo/serde/src/test/org/apache/hadoop/hive/serde2/lazy/TestLazyArrayMapStruct.java Wed Feb 18 22:28:35 2015
@@ -638,7 +638,7 @@ public class TestLazyArrayMapStruct exte
   private void testNestedinArrayAtLevelExtended(int nestingLevel,
       ObjectInspector.Category dtype) throws SerDeException {
     Properties tableProp = new Properties();
-    tableProp.setProperty(LazySimpleSerDe.SERIALIZATION_EXTEND_NESTING_LEVELS, "true");
+    tableProp.setProperty(LazySerDeParameters.SERIALIZATION_EXTEND_NESTING_LEVELS, "true");
     testNestedinArrayAtLevel(nestingLevel, dtype, tableProp);
   }
 
@@ -693,9 +693,10 @@ public class TestLazyArrayMapStruct exte
     tableProp.setProperty("columns", "narray");
     tableProp.setProperty("columns.types", schema.toString());
     SerDeUtils.initializeSerDe(serDe, conf, tableProp, null);
-
+    LazySerDeParameters serdeParams = new LazySerDeParameters(conf, tableProp, LazySimpleSerDe.class.getName());
+    
     //create the serialized string for type
-    byte[] separators = serDe.serdeParams.getSeparators();
+    byte[] separators = serdeParams.getSeparators();
     System.err.println("Using separator " +  (char)separators[nestingLevel]);
     byte [] serializedRow = null;
     switch(dtype){

Modified: hive/branches/cbo/serde/src/test/org/apache/hadoop/hive/serde2/lazy/TestLazySimpleSerDe.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/serde/src/test/org/apache/hadoop/hive/serde2/lazy/TestLazySimpleSerDe.java?rev=1660751&r1=1660750&r2=1660751&view=diff
==============================================================================
--- hive/branches/cbo/serde/src/test/org/apache/hadoop/hive/serde2/lazy/TestLazySimpleSerDe.java (original)
+++ hive/branches/cbo/serde/src/test/org/apache/hadoop/hive/serde2/lazy/TestLazySimpleSerDe.java Wed Feb 18 22:28:35 2015
@@ -17,6 +17,8 @@
  */
 package org.apache.hadoop.hive.serde2.lazy;
 
+import java.io.IOException;
+import java.util.ArrayList;
 import java.util.List;
 import java.util.Properties;
 
@@ -25,13 +27,20 @@ import junit.framework.TestCase;
 import org.apache.commons.codec.binary.Base64;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.serde.serdeConstants;
+import org.apache.hadoop.hive.serde2.ByteStream;
 import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.hive.serde2.SerDeUtils;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
+import org.apache.hadoop.hive.serde2.objectinspector.SimpleMapEqualComparer;
 import org.apache.hadoop.hive.serde2.objectinspector.StructField;
 import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory.ObjectInspectorOptions;
+import org.apache.hadoop.hive.serde2.objectinspector.TestSimpleMapEqualComparer.TextStringMapHolder;
 import org.apache.hadoop.io.BytesWritable;
 import org.apache.hadoop.io.IntWritable;
 import org.apache.hadoop.io.LongWritable;
@@ -79,41 +88,7 @@ public class TestLazySimpleSerDe extends
     }
   }
 
-  private void deserializeAndSerialize(LazySimpleSerDe serDe, Text t, String s,
-      Object[] expectedFieldsData) throws SerDeException {
-    // Get the row structure
-    StructObjectInspector oi = (StructObjectInspector) serDe
-        .getObjectInspector();
-    List<? extends StructField> fieldRefs = oi.getAllStructFieldRefs();
-    assertEquals(expectedFieldsData.length, fieldRefs.size());
 
-    // Deserialize
-    Object row = serDe.deserialize(t);
-    for (int i = 0; i < fieldRefs.size(); i++) {
-      Object fieldData = oi.getStructFieldData(row, fieldRefs.get(i));
-      if (fieldData != null) {
-        fieldData = ((LazyPrimitive) fieldData).getWritableObject();
-      }
-      assertEquals("Field " + i, expectedFieldsData[i], fieldData);
-    }
-    // Serialize
-    assertEquals(Text.class, serDe.getSerializedClass());
-    Text serializedText = (Text) serDe.serialize(row, oi);
-    assertEquals("Serialized data", s, serializedText.toString());
-  }
-
-  private Properties createProperties() {
-    Properties tbl = new Properties();
-
-    // Set the configuration parameters
-    tbl.setProperty(serdeConstants.SERIALIZATION_FORMAT, "9");
-    tbl.setProperty("columns",
-        "abyte,ashort,aint,along,adouble,astring,anullint,anullstring");
-    tbl.setProperty("columns.types",
-        "tinyint:smallint:int:bigint:double:string:int:string");
-    tbl.setProperty(serdeConstants.SERIALIZATION_NULL_FORMAT, "NULL");
-    return tbl;
-  }
 
   /**
    * Test the LazySimpleSerDe class with LastColumnTakesRest option.
@@ -199,5 +174,53 @@ public class TestLazySimpleSerDe extends
       throw e;
     }
   }
+  
+  Object serializeAndDeserialize(List<Integer> o1, StructObjectInspector oi1,
+      LazySimpleSerDe serde,
+      LazySerDeParameters serdeParams) throws IOException, SerDeException {
+    ByteStream.Output serializeStream = new ByteStream.Output();
+    LazySimpleSerDe.serialize(serializeStream, o1, oi1, serdeParams
+        .getSeparators(), 0, serdeParams.getNullSequence(), serdeParams
+        .isEscaped(), serdeParams.getEscapeChar(), serdeParams
+        .getNeedsEscape());
+    Text t = new Text(serializeStream.toByteArray());
+    return serde.deserialize(t);
+  }
+  
+  
+  private void deserializeAndSerialize(LazySimpleSerDe serDe, Text t, String s,
+      Object[] expectedFieldsData) throws SerDeException {
+    // Get the row structure
+    StructObjectInspector oi = (StructObjectInspector) serDe
+        .getObjectInspector();
+    List<? extends StructField> fieldRefs = oi.getAllStructFieldRefs();
+    assertEquals(expectedFieldsData.length, fieldRefs.size());
+
+    // Deserialize
+    Object row = serDe.deserialize(t);
+    for (int i = 0; i < fieldRefs.size(); i++) {
+      Object fieldData = oi.getStructFieldData(row, fieldRefs.get(i));
+      if (fieldData != null) {
+        fieldData = ((LazyPrimitive) fieldData).getWritableObject();
+      }
+      assertEquals("Field " + i, expectedFieldsData[i], fieldData);
+    }
+    // Serialize
+    assertEquals(Text.class, serDe.getSerializedClass());
+    Text serializedText = (Text) serDe.serialize(row, oi);
+    assertEquals("Serialized data", s, serializedText.toString());
+  }
 
+  private Properties createProperties() {
+    Properties tbl = new Properties();
+
+    // Set the configuration parameters
+    tbl.setProperty(serdeConstants.SERIALIZATION_FORMAT, "9");
+    tbl.setProperty("columns",
+        "abyte,ashort,aint,along,adouble,astring,anullint,anullstring");
+    tbl.setProperty("columns.types",
+        "tinyint:smallint:int:bigint:double:string:int:string");
+    tbl.setProperty(serdeConstants.SERIALIZATION_NULL_FORMAT, "NULL");
+    return tbl;
+  }
 }

Modified: hive/branches/cbo/serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/TestCrossMapEqualComparer.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/TestCrossMapEqualComparer.java?rev=1660751&r1=1660750&r2=1660751&view=diff
==============================================================================
--- hive/branches/cbo/serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/TestCrossMapEqualComparer.java (original)
+++ hive/branches/cbo/serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/TestCrossMapEqualComparer.java Wed Feb 18 22:28:35 2015
@@ -28,7 +28,7 @@ import org.apache.hadoop.hive.serde2.Byt
 import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.hive.serde2.SerDeUtils;
 import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe;
-import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe.SerDeParameters;
+import org.apache.hadoop.hive.serde2.lazy.LazySerDeParameters;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory.ObjectInspectorOptions;
 import org.apache.hadoop.io.Text;
 
@@ -78,7 +78,7 @@ public class TestCrossMapEqualComparer e
 
   Object serializeAndDeserialize(TextStringMapHolder o1, StructObjectInspector oi1,
       LazySimpleSerDe serde,
-      SerDeParameters serdeParams) throws IOException, SerDeException {
+      LazySerDeParameters serdeParams) throws IOException, SerDeException {
     ByteStream.Output serializeStream = new ByteStream.Output();
     LazySimpleSerDe.serialize(serializeStream, o1, oi1, serdeParams
         .getSeparators(), 0, serdeParams.getNullSequence(), serdeParams
@@ -99,8 +99,7 @@ public class TestCrossMapEqualComparer e
     Properties tbl = new Properties();
     tbl.setProperty(serdeConstants.LIST_COLUMNS, ObjectInspectorUtils.getFieldNames(oi1));
     tbl.setProperty(serdeConstants.LIST_COLUMN_TYPES, ObjectInspectorUtils.getFieldTypes(oi1));
-    SerDeParameters serdeParams = LazySimpleSerDe.initSerdeParams(conf, tbl,
-        LazySimpleSerDe.class.getName());
+    LazySerDeParameters serdeParams = new LazySerDeParameters(conf, tbl, LazySimpleSerDe.class.getName());
     SerDeUtils.initializeSerDe(serde, conf, tbl, null);
     ObjectInspector oi2 = serde.getObjectInspector();
 
@@ -132,7 +131,7 @@ public class TestCrossMapEqualComparer e
 
   Object serializeAndDeserialize(StringTextMapHolder o1, StructObjectInspector oi1,
       LazySimpleSerDe serde,
-      SerDeParameters serdeParams) throws IOException, SerDeException {
+      LazySerDeParameters serdeParams) throws IOException, SerDeException {
     ByteStream.Output serializeStream = new ByteStream.Output();
     LazySimpleSerDe.serialize(serializeStream, o1, oi1, serdeParams
         .getSeparators(), 0, serdeParams.getNullSequence(), serdeParams
@@ -153,8 +152,7 @@ public class TestCrossMapEqualComparer e
     Properties tbl = new Properties();
     tbl.setProperty(serdeConstants.LIST_COLUMNS, ObjectInspectorUtils.getFieldNames(oi1));
     tbl.setProperty(serdeConstants.LIST_COLUMN_TYPES, ObjectInspectorUtils.getFieldTypes(oi1));
-    SerDeParameters serdeParams = LazySimpleSerDe.initSerdeParams(conf, tbl,
-        LazySimpleSerDe.class.getName());
+    LazySerDeParameters serdeParams = new LazySerDeParameters(conf, tbl, LazySimpleSerDe.class.getName());
     SerDeUtils.initializeSerDe(serde, conf, tbl, null);
     ObjectInspector oi2 = serde.getObjectInspector();
 

Modified: hive/branches/cbo/serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/TestSimpleMapEqualComparer.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/TestSimpleMapEqualComparer.java?rev=1660751&r1=1660750&r2=1660751&view=diff
==============================================================================
--- hive/branches/cbo/serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/TestSimpleMapEqualComparer.java (original)
+++ hive/branches/cbo/serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/TestSimpleMapEqualComparer.java Wed Feb 18 22:28:35 2015
@@ -28,7 +28,7 @@ import org.apache.hadoop.hive.serde2.Byt
 import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.hive.serde2.SerDeUtils;
 import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe;
-import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe.SerDeParameters;
+import org.apache.hadoop.hive.serde2.lazy.LazySerDeParameters;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory.ObjectInspectorOptions;
 import org.apache.hadoop.io.Text;
 
@@ -78,7 +78,7 @@ public class TestSimpleMapEqualComparer
 
   Object serializeAndDeserialize(TextStringMapHolder o1, StructObjectInspector oi1,
       LazySimpleSerDe serde,
-      SerDeParameters serdeParams) throws IOException, SerDeException {
+      LazySerDeParameters serdeParams) throws IOException, SerDeException {
     ByteStream.Output serializeStream = new ByteStream.Output();
     LazySimpleSerDe.serialize(serializeStream, o1, oi1, serdeParams
         .getSeparators(), 0, serdeParams.getNullSequence(), serdeParams
@@ -99,8 +99,7 @@ public class TestSimpleMapEqualComparer
     Properties tbl = new Properties();
     tbl.setProperty(serdeConstants.LIST_COLUMNS, ObjectInspectorUtils.getFieldNames(oi1));
     tbl.setProperty(serdeConstants.LIST_COLUMN_TYPES, ObjectInspectorUtils.getFieldTypes(oi1));
-    SerDeParameters serdeParams = LazySimpleSerDe.initSerdeParams(conf, tbl,
-        LazySimpleSerDe.class.getName());
+    LazySerDeParameters serdeParams = new LazySerDeParameters(conf, tbl, LazySimpleSerDe.class.getName());
     SerDeUtils.initializeSerDe(serde, conf, tbl, null);
     ObjectInspector oi2 = serde.getObjectInspector();
 
@@ -132,7 +131,7 @@ public class TestSimpleMapEqualComparer
 
   Object serializeAndDeserialize(StringTextMapHolder o1, StructObjectInspector oi1,
       LazySimpleSerDe serde,
-      SerDeParameters serdeParams) throws IOException, SerDeException {
+      LazySerDeParameters serdeParams) throws IOException, SerDeException {
     ByteStream.Output serializeStream = new ByteStream.Output();
     LazySimpleSerDe.serialize(serializeStream, o1, oi1, serdeParams
         .getSeparators(), 0, serdeParams.getNullSequence(), serdeParams
@@ -153,8 +152,7 @@ public class TestSimpleMapEqualComparer
     Properties tbl = new Properties();
     tbl.setProperty(serdeConstants.LIST_COLUMNS, ObjectInspectorUtils.getFieldNames(oi1));
     tbl.setProperty(serdeConstants.LIST_COLUMN_TYPES, ObjectInspectorUtils.getFieldTypes(oi1));
-    SerDeParameters serdeParams = LazySimpleSerDe.initSerdeParams(conf, tbl,
-        LazySimpleSerDe.class.getName());
+    LazySerDeParameters serdeParams = new LazySerDeParameters(conf, tbl, LazySimpleSerDe.class.getName());
     SerDeUtils.initializeSerDe(serde, conf, tbl, null);
     ObjectInspector oi2 = serde.getObjectInspector();
 

Modified: hive/branches/cbo/service/src/java/org/apache/hive/service/cli/CLIService.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/service/src/java/org/apache/hive/service/cli/CLIService.java?rev=1660751&r1=1660750&r2=1660751&view=diff
==============================================================================
--- hive/branches/cbo/service/src/java/org/apache/hive/service/cli/CLIService.java (original)
+++ hive/branches/cbo/service/src/java/org/apache/hive/service/cli/CLIService.java Wed Feb 18 22:28:35 2015
@@ -34,6 +34,7 @@ import org.apache.hadoop.hive.conf.HiveC
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
 import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
 import org.apache.hadoop.hive.metastore.IMetaStoreClient;
+import org.apache.hadoop.hive.metastore.api.MetaException;
 import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
 import org.apache.hadoop.hive.ql.metadata.Hive;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
@@ -77,11 +78,6 @@ public class CLIService extends Composit
 
   @Override
   public synchronized void init(HiveConf hiveConf) {
-    try {
-      applyAuthorizationConfigPolicy(hiveConf);
-    } catch (HiveException e) {
-      throw new RuntimeException("Error applying authorization policy on hive configuration", e);
-    }
     this.hiveConf = hiveConf;
     sessionManager = new SessionManager(hiveServer2);
     addService(sessionManager);
@@ -111,11 +107,19 @@ public class CLIService extends Composit
         }
       }
     }
+    // creates connection to HMS and thus *must* occur after kerberos login above
+    try {
+      applyAuthorizationConfigPolicy(hiveConf);
+    } catch (Exception e) {
+      throw new RuntimeException("Error applying authorization policy on hive configuration: "
+          + e.getMessage(), e);
+    }
     setupBlockedUdfs();
     super.init(hiveConf);
   }
 
-  private void applyAuthorizationConfigPolicy(HiveConf newHiveConf) throws HiveException {
+  private void applyAuthorizationConfigPolicy(HiveConf newHiveConf) throws HiveException,
+      MetaException {
     // authorization setup using SessionState should be revisited eventually, as
     // authorization and authentication are not session specific settings
     SessionState ss = new SessionState(newHiveConf);

Modified: hive/branches/cbo/service/src/java/org/apache/hive/service/cli/operation/LogDivertAppender.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/service/src/java/org/apache/hive/service/cli/operation/LogDivertAppender.java?rev=1660751&r1=1660750&r2=1660751&view=diff
==============================================================================
--- hive/branches/cbo/service/src/java/org/apache/hive/service/cli/operation/LogDivertAppender.java (original)
+++ hive/branches/cbo/service/src/java/org/apache/hive/service/cli/operation/LogDivertAppender.java Wed Feb 18 22:28:35 2015
@@ -85,7 +85,8 @@ public class LogDivertAppender extends W
     } else {
       // in non verbose mode, show only select logger messages
       String[] inclLoggerNames = { "org.apache.hadoop.mapreduce.JobSubmitter",
-          "org.apache.hadoop.mapreduce.Job", "SessionState", Task.class.getName() };
+          "org.apache.hadoop.mapreduce.Job", "SessionState", Task.class.getName(),
+          "org.apache.hadoop.hive.ql.exec.spark.status.SparkJobMonitor"};
       addFilter(new NameFilter(false, inclLoggerNames));
     }
   }

Modified: hive/branches/cbo/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java?rev=1660751&r1=1660750&r2=1660751&view=diff
==============================================================================
--- hive/branches/cbo/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java (original)
+++ hive/branches/cbo/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java Wed Feb 18 22:28:35 2015
@@ -30,6 +30,7 @@ import java.util.Properties;
 import java.util.concurrent.Future;
 import java.util.concurrent.RejectedExecutionException;
 
+import org.apache.commons.codec.binary.Base64;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.metastore.api.Schema;
@@ -92,6 +93,14 @@ public class SQLOperation extends Execut
 
     try {
       driver = new Driver(sqlOperationConf, getParentSession().getUserName());
+
+      // set the operation handle information in Driver, so that thrift API users
+      // can use the operation handle they receive, to lookup query information in
+      // Yarn ATS
+      String guid64 = Base64.encodeBase64URLSafeString(getHandle().getHandleIdentifier()
+          .toTHandleIdentifier().getGuid()).trim();
+      driver.setOperationId(guid64);
+
       // In Hive server mode, we are not able to retry in the FetchTask
       // case, when calling fetch queries since execute() has returned.
       // For now, we disable the test attempts.

Modified: hive/branches/cbo/shims/common/pom.xml
URL: http://svn.apache.org/viewvc/hive/branches/cbo/shims/common/pom.xml?rev=1660751&r1=1660750&r2=1660751&view=diff
==============================================================================
--- hive/branches/cbo/shims/common/pom.xml (original)
+++ hive/branches/cbo/shims/common/pom.xml Wed Feb 18 22:28:35 2015
@@ -74,7 +74,6 @@
     <dependency>
       <groupId>org.apache.curator</groupId>
       <artifactId>curator-framework</artifactId>
-       <version>${curator.version}</version>
     </dependency>
     <dependency>
       <groupId>org.apache.zookeeper</groupId>

Modified: hive/branches/cbo/spark-client/src/main/java/org/apache/hive/spark/client/SparkClient.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/spark-client/src/main/java/org/apache/hive/spark/client/SparkClient.java?rev=1660751&r1=1660750&r2=1660751&view=diff
==============================================================================
--- hive/branches/cbo/spark-client/src/main/java/org/apache/hive/spark/client/SparkClient.java (original)
+++ hive/branches/cbo/spark-client/src/main/java/org/apache/hive/spark/client/SparkClient.java Wed Feb 18 22:28:35 2015
@@ -18,7 +18,7 @@
 package org.apache.hive.spark.client;
 
 import java.io.Serializable;
-import java.net.URL;
+import java.net.URI;
 import java.util.concurrent.Future;
 
 import org.apache.hadoop.hive.common.classification.InterfaceAudience;
@@ -68,10 +68,10 @@ public interface SparkClient extends Ser
    * in cluster mode, it may reside on a different host, meaning "file:" URLs have to exist
    * on that node (and not on the client machine).
    *
-   * @param url The location of the jar file.
+   * @param uri The location of the jar file.
    * @return A future that can be used to monitor the operation.
    */
-  Future<?> addJar(URL url);
+  Future<?> addJar(URI uri);
 
   /**
    * Adds a file to the running remote context.
@@ -80,10 +80,10 @@ public interface SparkClient extends Ser
    * in cluster mode, it may reside on a different host, meaning "file:" URLs have to exist
    * on that node (and not on the client machine).
    *
-   * @param url The location of the file.
+   * @param uri The location of the file.
    * @return A future that can be used to monitor the operation.
    */
-  Future<?> addFile(URL url);
+  Future<?> addFile(URI uri);
 
   /**
    * Get the count of executors.

Modified: hive/branches/cbo/spark-client/src/main/java/org/apache/hive/spark/client/SparkClientImpl.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/spark-client/src/main/java/org/apache/hive/spark/client/SparkClientImpl.java?rev=1660751&r1=1660750&r2=1660751&view=diff
==============================================================================
--- hive/branches/cbo/spark-client/src/main/java/org/apache/hive/spark/client/SparkClientImpl.java (original)
+++ hive/branches/cbo/spark-client/src/main/java/org/apache/hive/spark/client/SparkClientImpl.java Wed Feb 18 22:28:35 2015
@@ -17,6 +17,14 @@
 
 package org.apache.hive.spark.client;
 
+import com.google.common.base.Charsets;
+import com.google.common.base.Joiner;
+import com.google.common.base.Strings;
+import com.google.common.base.Preconditions;
+import com.google.common.base.Throwables;
+import com.google.common.collect.Lists;
+import com.google.common.collect.Maps;
+
 import io.netty.channel.ChannelHandlerContext;
 import io.netty.util.concurrent.GenericFutureListener;
 import io.netty.util.concurrent.Promise;
@@ -30,14 +38,12 @@ import java.io.InputStreamReader;
 import java.io.OutputStreamWriter;
 import java.io.Serializable;
 import java.io.Writer;
-import java.net.URL;
+import java.net.URI;
 import java.util.List;
 import java.util.Map;
 import java.util.Properties;
 import java.util.UUID;
 import java.util.concurrent.Future;
-import java.util.concurrent.TimeUnit;
-import java.util.concurrent.TimeoutException;
 import java.util.concurrent.atomic.AtomicInteger;
 
 import org.apache.hadoop.hive.conf.HiveConf;
@@ -49,14 +55,6 @@ import org.apache.spark.SparkException;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.google.common.base.Charsets;
-import com.google.common.base.Joiner;
-import com.google.common.base.Strings;
-import com.google.common.base.Preconditions;
-import com.google.common.base.Throwables;
-import com.google.common.collect.Lists;
-import com.google.common.collect.Maps;
-
 class SparkClientImpl implements SparkClient {
   private static final long serialVersionUID = 1L;
 
@@ -155,13 +153,13 @@ class SparkClientImpl implements SparkCl
   }
 
   @Override
-  public Future<?> addJar(URL url) {
-    return run(new AddJarJob(url.toString()));
+  public Future<?> addJar(URI uri) {
+    return run(new AddJarJob(uri.toString()));
   }
 
   @Override
-  public Future<?> addFile(URL url) {
-    return run(new AddFileJob(url.toString()));
+  public Future<?> addFile(URI uri) {
+    return run(new AddFileJob(uri.toString()));
   }
 
   @Override

Modified: hive/branches/cbo/spark-client/src/test/java/org/apache/hive/spark/client/TestSparkClient.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/spark-client/src/test/java/org/apache/hive/spark/client/TestSparkClient.java?rev=1660751&r1=1660750&r2=1660751&view=diff
==============================================================================
--- hive/branches/cbo/spark-client/src/test/java/org/apache/hive/spark/client/TestSparkClient.java (original)
+++ hive/branches/cbo/spark-client/src/test/java/org/apache/hive/spark/client/TestSparkClient.java Wed Feb 18 22:28:35 2015
@@ -22,7 +22,7 @@ import java.io.FileInputStream;
 import java.io.FileOutputStream;
 import java.io.InputStream;
 import java.io.Serializable;
-import java.net.URL;
+import java.net.URI;
 import java.util.Arrays;
 import java.util.HashMap;
 import java.util.Map;
@@ -204,7 +204,7 @@ public class TestSparkClient {
           jarFile.closeEntry();
           jarFile.close();
 
-          client.addJar(new URL("file:" + jar.getAbsolutePath()))
+          client.addJar(new URI("file:" + jar.getAbsolutePath()))
             .get(TIMEOUT, TimeUnit.SECONDS);
 
           // Need to run a Spark job to make sure the jar is added to the class loader. Monitoring
@@ -220,7 +220,7 @@ public class TestSparkClient {
           fileStream.write("test file".getBytes("UTF-8"));
           fileStream.close();
 
-          client.addJar(new URL("file:" + file.getAbsolutePath()))
+          client.addJar(new URI("file:" + file.getAbsolutePath()))
             .get(TIMEOUT, TimeUnit.SECONDS);
 
           // The same applies to files added with "addFile". They're only guaranteed to be available



Mime
View raw message