hadoop-hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From zs...@apache.org
Subject svn commit: r901625 [1/6] - in /hadoop/hive/trunk: ./ serde/src/java/org/apache/hadoop/hive/serde2/ serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/ serde/src/java/org/apache/hadoop/hive/serde2/columnar/ serde/src/java/org/apache/hadoop/hiv...
Date Thu, 21 Jan 2010 09:52:48 GMT
Author: zshao
Date: Thu Jan 21 09:52:44 2010
New Revision: 901625

URL: http://svn.apache.org/viewvc?rev=901625&view=rev
Log:
HIVE-1081. Automated source code cleanup - Part 2 - serde. (Carl Steinbach via zshao)

Modified:
    hadoop/hive/trunk/CHANGES.txt
    hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/ByteStream.java
    hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/ByteStreamTypedSerDe.java
    hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/ColumnProjectionUtils.java
    hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/ColumnSet.java
    hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/Deserializer.java
    hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/MetadataTypedColumnsetSerDe.java
    hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/SerDe.java
    hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/SerDeException.java
    hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/SerDeUtils.java
    hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/Serializer.java
    hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/TypedSerDe.java
    hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/BinarySortableSerDe.java
    hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/InputByteBuffer.java
    hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/OutputByteBuffer.java
    hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/columnar/BytesRefArrayWritable.java
    hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/columnar/BytesRefWritable.java
    hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/columnar/ColumnarSerDe.java
    hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/columnar/ColumnarStruct.java
    hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/columnar/LazyDecompressionCallback.java
    hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/io/ByteWritable.java
    hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/io/DoubleWritable.java
    hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/io/ShortWritable.java
    hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/thrift/ConfigurableTProtocol.java
    hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/thrift/TBinarySortableProtocol.java
    hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/thrift/TCTLSeparatedProtocol.java
    hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/thrift/TReflectionUtils.java
    hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/thrift/ThriftByteStreamTypedSerDe.java
    hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/thrift/ThriftDeserializer.java
    hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/thrift/WriteNullsProtocol.java
    hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/thrift/WriteTextProtocol.java
    hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/ListTypeInfo.java
    hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/MapTypeInfo.java
    hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/PrimitiveTypeInfo.java
    hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/StructTypeInfo.java
    hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/TypeInfo.java
    hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/TypeInfoFactory.java
    hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/TypeInfoUtils.java
    hadoop/hive/trunk/serde/src/test/org/apache/hadoop/hive/serde2/TestTCTLSeparatedProtocol.java
    hadoop/hive/trunk/serde/src/test/org/apache/hadoop/hive/serde2/binarysortable/MyTestClass.java
    hadoop/hive/trunk/serde/src/test/org/apache/hadoop/hive/serde2/binarysortable/MyTestInnerStruct.java
    hadoop/hive/trunk/serde/src/test/org/apache/hadoop/hive/serde2/binarysortable/TestBinarySortableSerDe.java
    hadoop/hive/trunk/serde/src/test/org/apache/hadoop/hive/serde2/dynamic_type/TestDynamicSerDe.java
    hadoop/hive/trunk/serde/src/test/org/apache/hadoop/hive/serde2/lazy/TestLazyArrayMapStruct.java
    hadoop/hive/trunk/serde/src/test/org/apache/hadoop/hive/serde2/lazy/TestLazyPrimitive.java
    hadoop/hive/trunk/serde/src/test/org/apache/hadoop/hive/serde2/lazy/TestLazySimpleSerDe.java
    hadoop/hive/trunk/serde/src/test/org/apache/hadoop/hive/serde2/lazybinary/MyTestClassBigger.java
    hadoop/hive/trunk/serde/src/test/org/apache/hadoop/hive/serde2/lazybinary/MyTestClassSmaller.java
    hadoop/hive/trunk/serde/src/test/org/apache/hadoop/hive/serde2/lazybinary/TestLazyBinarySerDe.java
    hadoop/hive/trunk/serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/MyStruct.java
    hadoop/hive/trunk/serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/TestObjectInspectorConverters.java
    hadoop/hive/trunk/serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/TestObjectInspectorUtils.java
    hadoop/hive/trunk/serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/TestReflectionObjectInspectors.java
    hadoop/hive/trunk/serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/TestStandardObjectInspectors.java
    hadoop/hive/trunk/serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/TestThriftObjectInspectors.java
    hadoop/hive/trunk/serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/TestUnionStructObjectInspector.java
    hadoop/hive/trunk/serde/src/test/org/apache/hadoop/hive/serde2/thrift_test/CreateSequenceFile.java

Modified: hadoop/hive/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/CHANGES.txt?rev=901625&r1=901624&r2=901625&view=diff
==============================================================================
--- hadoop/hive/trunk/CHANGES.txt (original)
+++ hadoop/hive/trunk/CHANGES.txt Thu Jan 21 09:52:44 2010
@@ -22,6 +22,9 @@
     HIVE-1081. Automated source code cleanup - Part 1.
     (Carl Steinbach via zshao)
 
+    HIVE-1081. Automated source code cleanup - Part 2 - serde.
+    (Carl Steinbach via zshao)
+
   OPTIMIZATIONS
 
   BUG FIXES

Modified: hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/ByteStream.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/ByteStream.java?rev=901625&r1=901624&r2=901625&view=diff
==============================================================================
--- hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/ByteStream.java (original)
+++ hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/ByteStream.java Thu Jan 21 09:52:44 2010
@@ -18,38 +18,58 @@
 
 package org.apache.hadoop.hive.serde2;
 
-
 import org.apache.hadoop.hive.common.io.NonSyncByteArrayInputStream;
 import org.apache.hadoop.hive.common.io.NonSyncByteArrayOutputStream;
 
 /**
  * Extensions to bytearrayinput/output streams
- *
+ * 
  */
 public class ByteStream {
   public static class Input extends NonSyncByteArrayInputStream {
-    public byte[] getData() { return buf; }
-    public int getCount() { return count;}
-    public void reset(byte [] argBuf, int argCount) {
-      buf = argBuf; mark = pos = 0; count = argCount;
+    public byte[] getData() {
+      return buf;
+    }
+
+    public int getCount() {
+      return count;
     }
+
+    public void reset(byte[] argBuf, int argCount) {
+      buf = argBuf;
+      mark = pos = 0;
+      count = argCount;
+    }
+
     public Input() {
-      super(new byte [1]);
+      super(new byte[1]);
     }
 
     public Input(byte[] buf) {
       super(buf);
     }
+
     public Input(byte[] buf, int offset, int length) {
       super(buf, offset, length);
     }
   }
-    
+
   public static class Output extends NonSyncByteArrayOutputStream {
-    public byte[] getData() { return buf; }
-    public int getCount() { return count;}
+    @Override
+    public byte[] getData() {
+      return buf;
+    }
+
+    public int getCount() {
+      return count;
+    }
 
-    public Output() { super(); }
-    public Output(int size) { super(size); }
+    public Output() {
+      super();
+    }
+
+    public Output(int size) {
+      super(size);
+    }
   }
 }

Modified: hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/ByteStreamTypedSerDe.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/ByteStreamTypedSerDe.java?rev=901625&r1=901624&r2=901625&view=diff
==============================================================================
--- hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/ByteStreamTypedSerDe.java (original)
+++ hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/ByteStreamTypedSerDe.java Thu Jan 21 09:52:44 2010
@@ -21,7 +21,6 @@
 import java.lang.reflect.Type;
 
 import org.apache.hadoop.io.BytesWritable;
-import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.Writable;
 
 public abstract class ByteStreamTypedSerDe extends TypedSerDe {
@@ -35,12 +34,12 @@
     bis = new ByteStream.Input();
   }
 
+  @Override
   public Object deserialize(Writable field) throws SerDeException {
     Object retObj = super.deserialize(field);
-    BytesWritable b = (BytesWritable)field;
+    BytesWritable b = (BytesWritable) field;
     bis.reset(b.get(), b.getSize());
     return (retObj);
   }
 
-
 }

Modified: hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/ColumnProjectionUtils.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/ColumnProjectionUtils.java?rev=901625&r1=901624&r2=901625&view=diff
==============================================================================
--- hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/ColumnProjectionUtils.java (original)
+++ hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/ColumnProjectionUtils.java Thu Jan 21 09:52:44 2010
@@ -25,83 +25,86 @@
 
 public class ColumnProjectionUtils {
 
-	public static String READ_COLUMN_IDS_CONF_STR = "hive.io.file.readcolumn.ids";
+  public static String READ_COLUMN_IDS_CONF_STR = "hive.io.file.readcolumn.ids";
 
-	/**
-	 * Sets read columns' ids(start from zero) for RCFile's Reader. Once a column
-	 * is included in the list, RCFile's reader will not skip its value.
-	 * 
-	 */
-	public static void setReadColumnIDs(Configuration conf, ArrayList<Integer> ids) {
-		String id = toReadColumnIDString(ids);
-		setReadColumnIDConf(conf, id);
-	}
-
-	/**
-	 * Sets read columns' ids(start from zero) for RCFile's Reader. Once a column
-	 * is included in the list, RCFile's reader will not skip its value.
-	 * 
-	 */
-	public static void appendReadColumnIDs(Configuration conf,
-	    ArrayList<Integer> ids) {
-		String id = toReadColumnIDString(ids);
-		if(id != null) {
-			String old = conf.get(READ_COLUMN_IDS_CONF_STR, null);
-			String newConfStr = id;
-			if (old != null)
-				newConfStr = newConfStr + StringUtils.COMMA_STR + old;
-
-			setReadColumnIDConf(conf, newConfStr);
-		}
-	}
-
-	private static void setReadColumnIDConf(Configuration conf, String id) {
-		if (id == null || id.length() <= 0) {
-			conf.set(READ_COLUMN_IDS_CONF_STR, "");
-			return;
-		}
-
-		conf.set(READ_COLUMN_IDS_CONF_STR, id);
-	}
-
-	private static String toReadColumnIDString(ArrayList<Integer> ids) {
-		String id = null;
-		if (ids != null) {
-			for (int i = 0; i < ids.size(); i++) {
-				if (i == 0) {
-					id = "" + ids.get(i);
-				} else {
-					id = id + StringUtils.COMMA_STR + ids.get(i);
-				}
-			}
-		}
-		return id;
-	}
-
-	/**
-	 * Returns an array of column ids(start from zero) which is set in the given
-	 * parameter <tt>conf</tt>.
-	 */
-	public static ArrayList<Integer> getReadColumnIDs(Configuration conf) {
-		if( conf == null )
-			return new ArrayList<Integer>(0);
-		String skips = conf.get(READ_COLUMN_IDS_CONF_STR, "");
-		String[] list = StringUtils.split(skips);
-		ArrayList<Integer> result = new ArrayList<Integer>(list.length);
-		for (int i = 0; i < list.length; i++) {
-			//it may contain duplicates, remove duplicates
-			Integer toAdd = Integer.parseInt(list[i]);
-			if (!result.contains(toAdd))
-				result.add(toAdd);
-		}
-		return result;
-	}
-
-	/**
-	 * Clears the read column ids set in the conf, and will read all columns.
-	 */
-	public static void setFullyReadColumns(Configuration conf) {
-		conf.set(READ_COLUMN_IDS_CONF_STR, "");
-	}
+  /**
+   * Sets read columns' ids(start from zero) for RCFile's Reader. Once a column
+   * is included in the list, RCFile's reader will not skip its value.
+   * 
+   */
+  public static void setReadColumnIDs(Configuration conf, ArrayList<Integer> ids) {
+    String id = toReadColumnIDString(ids);
+    setReadColumnIDConf(conf, id);
+  }
+
+  /**
+   * Sets read columns' ids(start from zero) for RCFile's Reader. Once a column
+   * is included in the list, RCFile's reader will not skip its value.
+   * 
+   */
+  public static void appendReadColumnIDs(Configuration conf,
+      ArrayList<Integer> ids) {
+    String id = toReadColumnIDString(ids);
+    if (id != null) {
+      String old = conf.get(READ_COLUMN_IDS_CONF_STR, null);
+      String newConfStr = id;
+      if (old != null) {
+        newConfStr = newConfStr + StringUtils.COMMA_STR + old;
+      }
+
+      setReadColumnIDConf(conf, newConfStr);
+    }
+  }
+
+  private static void setReadColumnIDConf(Configuration conf, String id) {
+    if (id == null || id.length() <= 0) {
+      conf.set(READ_COLUMN_IDS_CONF_STR, "");
+      return;
+    }
+
+    conf.set(READ_COLUMN_IDS_CONF_STR, id);
+  }
+
+  private static String toReadColumnIDString(ArrayList<Integer> ids) {
+    String id = null;
+    if (ids != null) {
+      for (int i = 0; i < ids.size(); i++) {
+        if (i == 0) {
+          id = "" + ids.get(i);
+        } else {
+          id = id + StringUtils.COMMA_STR + ids.get(i);
+        }
+      }
+    }
+    return id;
+  }
+
+  /**
+   * Returns an array of column ids(start from zero) which is set in the given
+   * parameter <tt>conf</tt>.
+   */
+  public static ArrayList<Integer> getReadColumnIDs(Configuration conf) {
+    if (conf == null) {
+      return new ArrayList<Integer>(0);
+    }
+    String skips = conf.get(READ_COLUMN_IDS_CONF_STR, "");
+    String[] list = StringUtils.split(skips);
+    ArrayList<Integer> result = new ArrayList<Integer>(list.length);
+    for (String element : list) {
+      // it may contain duplicates, remove duplicates
+      Integer toAdd = Integer.parseInt(element);
+      if (!result.contains(toAdd)) {
+        result.add(toAdd);
+      }
+    }
+    return result;
+  }
+
+  /**
+   * Clears the read column ids set in the conf, and will read all columns.
+   */
+  public static void setFullyReadColumns(Configuration conf) {
+    conf.set(READ_COLUMN_IDS_CONF_STR, "");
+  }
 
 }

Modified: hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/ColumnSet.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/ColumnSet.java?rev=901625&r1=901624&r2=901625&view=diff
==============================================================================
--- hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/ColumnSet.java (original)
+++ hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/ColumnSet.java Thu Jan 21 09:52:44 2010
@@ -26,15 +26,14 @@
   public ColumnSet() {
   }
 
-  public ColumnSet(ArrayList<String> col)
-  {
+  public ColumnSet(ArrayList<String> col) {
     this();
     this.col = col;
   }
 
+  @Override
   public String toString() {
     return col.toString();
   }
-  
-}
 
+}

Modified: hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/Deserializer.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/Deserializer.java?rev=901625&r1=901624&r2=901625&view=diff
==============================================================================
--- hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/Deserializer.java (original)
+++ hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/Deserializer.java Thu Jan 21 09:52:44 2010
@@ -18,36 +18,44 @@
 
 package org.apache.hadoop.hive.serde2;
 
+import java.util.Properties;
+
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.io.Writable;
-import org.apache.hadoop.conf.Configuration;
-import java.util.Properties;
 
 /**
- * HiveDeserializer is used to deserialize the data from hadoop Writable to a 
+ * HiveDeserializer is used to deserialize the data from hadoop Writable to a
  * custom java object that can be of any type that the developer wants.
  * 
- * HiveDeserializer also provides the ObjectInspector which can be used to inspect 
- * the internal structure of the object (that is returned by deserialize function).
- *
+ * HiveDeserializer also provides the ObjectInspector which can be used to
+ * inspect the internal structure of the object (that is returned by deserialize
+ * function).
+ * 
  */
 public interface Deserializer {
 
   /**
    * Initialize the HiveDeserializer.
-   * @param conf System properties
-   * @param tbl  table properties
+   * 
+   * @param conf
+   *          System properties
+   * @param tbl
+   *          table properties
    * @throws SerDeException
    */
-  public void initialize(Configuration conf, Properties tbl) throws SerDeException;
-  
+  public void initialize(Configuration conf, Properties tbl)
+      throws SerDeException;
+
   /**
-   * Deserialize an object out of a Writable blob.
-   * In most cases, the return value of this function will be constant since the function
-   * will reuse the returned object.
-   * If the client wants to keep a copy of the object, the client needs to clone the
-   * returned value by calling ObjectInspectorUtils.getStandardObject().
-   * @param blob The Writable object containing a serialized object
+   * Deserialize an object out of a Writable blob. In most cases, the return
+   * value of this function will be constant since the function will reuse the
+   * returned object. If the client wants to keep a copy of the object, the
+   * client needs to clone the returned value by calling
+   * ObjectInspectorUtils.getStandardObject().
+   * 
+   * @param blob
+   *          The Writable object containing a serialized object
    * @return A Java object representing the contents in the blob.
    */
   public Object deserialize(Writable blob) throws SerDeException;

Modified: hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/MetadataTypedColumnsetSerDe.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/MetadataTypedColumnsetSerDe.java?rev=901625&r1=901624&r2=901625&view=diff
==============================================================================
--- hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/MetadataTypedColumnsetSerDe.java (original)
+++ hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/MetadataTypedColumnsetSerDe.java Thu Jan 21 09:52:44 2010
@@ -38,35 +38,38 @@
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.Writable;
 
-
 public class MetadataTypedColumnsetSerDe implements SerDe {
 
-  public static final Log LOG = LogFactory.getLog(MetadataTypedColumnsetSerDe.class.getName());
+  public static final Log LOG = LogFactory
+      .getLog(MetadataTypedColumnsetSerDe.class.getName());
 
   static {
     StackTraceElement[] sTrace = new Exception().getStackTrace();
     String className = sTrace[0].getClassName();
     try {
-      // For backward compatibility: this class replaces the columnsetSerDe class.
-      SerDeUtils.registerSerDe("org.apache.hadoop.hive.serde.thrift.columnsetSerDe", 
-          Class.forName(className));
-      } catch(Exception e) {
+      // For backward compatibility: this class replaces the columnsetSerDe
+      // class.
+      SerDeUtils.registerSerDe(
+          "org.apache.hadoop.hive.serde.thrift.columnsetSerDe", Class
+              .forName(className));
+    } catch (Exception e) {
       throw new RuntimeException(e);
     }
   }
-  
+
   final public static String DefaultSeparator = "\001";
   private String separator;
 
   final public static String defaultNullString = "\\N";
-  private String nullString; 
+  private String nullString;
 
   private List<String> columnNames;
   private ObjectInspector cachedObjectInspector;
 
   private boolean lastColumnTakesRest = false;
   private int splitLimit = -1;
-  
+
+  @Override
   public String toString() {
     return "MetaDataTypedColumnsetSerDe[" + separator + "," + columnNames + "]";
   }
@@ -78,17 +81,18 @@
   private String getByteValue(String altValue, String defaultVal) {
     if (altValue != null && altValue.length() > 0) {
       try {
-        byte b [] = new byte[1];
+        byte b[] = new byte[1];
         b[0] = Byte.valueOf(altValue).byteValue();
         return new String(b);
-      } catch(NumberFormatException e) {
+      } catch (NumberFormatException e) {
         return altValue;
       }
     }
     return defaultVal;
   }
 
-  public void initialize(Configuration job, Properties tbl) throws SerDeException {
+  public void initialize(Configuration job, Properties tbl)
+      throws SerDeException {
     String alt_sep = tbl.getProperty(Constants.SERIALIZATION_FORMAT);
     separator = getByteValue(alt_sep, DefaultSeparator);
 
@@ -97,34 +101,46 @@
 
     String columnProperty = tbl.getProperty("columns");
     String serdeName = tbl.getProperty(Constants.SERIALIZATION_LIB);
-    // tables that were serialized with columnsetSerDe doesn't have metadata 
-    // so this hack applies to all such tables 
+    // tables that were serialized with columnsetSerDe doesn't have metadata
+    // so this hack applies to all such tables
     boolean columnsetSerDe = false;
-    if ((serdeName != null) && serdeName.equals("org.apache.hadoop.hive.serde.thrift.columnsetSerDe")) {
+    if ((serdeName != null)
+        && serdeName
+            .equals("org.apache.hadoop.hive.serde.thrift.columnsetSerDe")) {
       columnsetSerDe = true;
     }
-    if (columnProperty == null || columnProperty.length() == 0 
+    if (columnProperty == null || columnProperty.length() == 0
         || columnsetSerDe) {
       // Hack for tables with no columns
       // Treat it as a table with a single column called "col"
-      cachedObjectInspector = ObjectInspectorFactory.getReflectionObjectInspector(
-          ColumnSet.class, ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
+      cachedObjectInspector = ObjectInspectorFactory
+          .getReflectionObjectInspector(ColumnSet.class,
+              ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
     } else {
       columnNames = Arrays.asList(columnProperty.split(","));
-      cachedObjectInspector = MetadataListStructObjectInspector.getInstance(columnNames);
+      cachedObjectInspector = MetadataListStructObjectInspector
+          .getInstance(columnNames);
     }
-    
-    String lastColumnTakesRestString = tbl.getProperty(Constants.SERIALIZATION_LAST_COLUMN_TAKES_REST);
-    lastColumnTakesRest = (lastColumnTakesRestString != null && lastColumnTakesRestString.equalsIgnoreCase("true"));
-    splitLimit = (lastColumnTakesRest && columnNames != null) ? columnNames.size() : -1; 
-    
-    LOG.debug(getClass().getName() + ": initialized with columnNames: " + columnNames + " and separator code=" + (int)separator.charAt(0) 
-        + " lastColumnTakesRest=" + lastColumnTakesRest + " splitLimit=" + splitLimit);
+
+    String lastColumnTakesRestString = tbl
+        .getProperty(Constants.SERIALIZATION_LAST_COLUMN_TAKES_REST);
+    lastColumnTakesRest = (lastColumnTakesRestString != null && lastColumnTakesRestString
+        .equalsIgnoreCase("true"));
+    splitLimit = (lastColumnTakesRest && columnNames != null) ? columnNames
+        .size() : -1;
+
+    LOG.debug(getClass().getName() + ": initialized with columnNames: "
+        + columnNames + " and separator code=" + (int) separator.charAt(0)
+        + " lastColumnTakesRest=" + lastColumnTakesRest + " splitLimit="
+        + splitLimit);
   }
-  
+
   /**
    * Split the row into columns.
-   * @param limit  up to limit columns will be produced (the last column takes all the rest), -1 for unlimited.
+   * 
+   * @param limit
+   *          up to limit columns will be produced (the last column takes all
+   *          the rest), -1 for unlimited.
    * @return The ColumnSet object
    * @throws Exception
    */
@@ -135,9 +151,9 @@
     } else {
       c.col.clear();
     }
-    String [] l1 = row.split(sep, limit);
+    String[] l1 = row.split(sep, limit);
 
-    for(String s: l1) {
+    for (String s : l1) {
       if (s.equals(nullString)) {
         c.col.add(null);
       } else {
@@ -146,12 +162,13 @@
     }
     return (c);
   }
-  
+
   ColumnSet deserializeCache = new ColumnSet();
+
   public Object deserialize(Writable field) throws SerDeException {
     String row = null;
     if (field instanceof BytesWritable) {
-      BytesWritable b = (BytesWritable)field;
+      BytesWritable b = (BytesWritable) field;
       try {
         row = Text.decode(b.get(), 0, b.getSize());
       } catch (CharacterCodingException e) {
@@ -163,17 +180,17 @@
     try {
       deserialize(deserializeCache, row, separator, nullString, splitLimit);
       if (columnNames != null) {
-        assert(columnNames.size() == deserializeCache.col.size());
+        assert (columnNames.size() == deserializeCache.col.size());
       }
       return deserializeCache;
     } catch (ClassCastException e) {
-      throw new SerDeException( this.getClass().getName() + " expects Text or BytesWritable", e);
+      throw new SerDeException(this.getClass().getName()
+          + " expects Text or BytesWritable", e);
     } catch (Exception e) {
       throw new SerDeException(e);
     }
   }
-  
-  
+
   public ObjectInspector getObjectInspector() throws SerDeException {
     return cachedObjectInspector;
   }
@@ -181,27 +198,33 @@
   public Class<? extends Writable> getSerializedClass() {
     return Text.class;
   }
-  
+
   Text serializeCache = new Text();
-  public Writable serialize(Object obj, ObjectInspector objInspector) throws SerDeException {
+
+  public Writable serialize(Object obj, ObjectInspector objInspector)
+      throws SerDeException {
 
     if (objInspector.getCategory() != Category.STRUCT) {
-      throw new SerDeException(getClass().toString() 
-          + " can only serialize struct types, but we got: " + objInspector.getTypeName());
+      throw new SerDeException(getClass().toString()
+          + " can only serialize struct types, but we got: "
+          + objInspector.getTypeName());
     }
     StructObjectInspector soi = (StructObjectInspector) objInspector;
     List<? extends StructField> fields = soi.getAllStructFieldRefs();
-    
+
     StringBuilder sb = new StringBuilder();
-    for(int i=0; i<fields.size(); i++) {
-      if (i>0) sb.append(separator);
+    for (int i = 0; i < fields.size(); i++) {
+      if (i > 0) {
+        sb.append(separator);
+      }
       Object column = soi.getStructFieldData(obj, fields.get(i));
       if (fields.get(i).getFieldObjectInspector().getCategory() == Category.PRIMITIVE) {
         // For primitive object, serialize to plain string
         sb.append(column == null ? nullString : column.toString());
       } else {
         // For complex object, serialize to JSON format
-        sb.append(SerDeUtils.getJSONString(column, fields.get(i).getFieldObjectInspector()));
+        sb.append(SerDeUtils.getJSONString(column, fields.get(i)
+            .getFieldObjectInspector()));
       }
     }
     serializeCache.set(sb.toString());

Modified: hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/SerDe.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/SerDe.java?rev=901625&r1=901624&r2=901625&view=diff
==============================================================================
--- hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/SerDe.java (original)
+++ hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/SerDe.java Thu Jan 21 09:52:44 2010
@@ -21,12 +21,12 @@
 /**
  * A union of HiveDeserializer and HiveSerializer interface.
  * 
- * If a developer wants his hive table to be read-only, then he just want to 
- * return 
+ * If a developer wants his hive table to be read-only, then he just want to
+ * return
  * 
  * both readable and writable, then
- *   
- *
+ * 
+ * 
  */
 public interface SerDe extends Deserializer, Serializer {
 

Modified: hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/SerDeException.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/SerDeException.java?rev=901625&r1=901624&r2=901625&view=diff
==============================================================================
--- hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/SerDeException.java (original)
+++ hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/SerDeException.java Thu Jan 21 09:52:44 2010
@@ -20,7 +20,7 @@
 
 /**
  * Generic exception class for SerDes
- *
+ * 
  */
 
 public class SerDeException extends Exception {
@@ -42,4 +42,3 @@
     super(message, cause);
   }
 }
-

Modified: hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/SerDeUtils.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/SerDeUtils.java?rev=901625&r1=901624&r2=901625&view=diff
==============================================================================
--- hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/SerDeUtils.java (original)
+++ hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/SerDeUtils.java Thu Jan 21 09:52:44 2010
@@ -18,14 +18,15 @@
 
 package org.apache.hadoop.hive.serde2;
 
-import java.util.*;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
 
 import org.apache.hadoop.hive.common.JavaUtils;
-import org.apache.hadoop.hive.serde.Constants;
 import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.StructField;
 import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
@@ -37,11 +38,9 @@
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.ShortObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector;
-import org.apache.hadoop.io.Text;
 
 public class SerDeUtils {
 
-
   public static final char QUOTE = '"';
   public static final char COLON = ':';
   public static final char COMMA = ',';
@@ -50,40 +49,46 @@
   public static final String LBRACE = "{";
   public static final String RBRACE = "}";
 
-  private static HashMap<String, Class<?>> serdes = new HashMap<String, Class<?>> ();
+  private static HashMap<String, Class<?>> serdes = new HashMap<String, Class<?>>();
 
   public static void registerSerDe(String name, Class<?> serde) {
-    if(serdes.containsKey(name)) {
+    if (serdes.containsKey(name)) {
       throw new RuntimeException("double registering serde " + name);
     }
     serdes.put(name, serde);
   }
 
-  public static Deserializer lookupDeserializer(String name) throws SerDeException {
+  public static Deserializer lookupDeserializer(String name)
+      throws SerDeException {
     Class<?> c;
-    if(serdes.containsKey(name)) {
-        c = serdes.get(name);
+    if (serdes.containsKey(name)) {
+      c = serdes.get(name);
     } else {
       try {
         c = Class.forName(name, true, JavaUtils.getClassLoader());
-      } catch(ClassNotFoundException e) {
+      } catch (ClassNotFoundException e) {
         throw new SerDeException("SerDe " + name + " does not exist");
       }
     }
     try {
-      return (Deserializer)c.newInstance();
-    } catch(Exception e) {
+      return (Deserializer) c.newInstance();
+    } catch (Exception e) {
       throw new SerDeException(e);
     }
   }
 
-  private static List<String> nativeSerDeNames = new ArrayList<String>(); 
+  private static List<String> nativeSerDeNames = new ArrayList<String>();
   static {
-    nativeSerDeNames.add(org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe.class.getName());
-    nativeSerDeNames.add(org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe.class.getName());
+    nativeSerDeNames
+        .add(org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe.class
+            .getName());
+    nativeSerDeNames
+        .add(org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe.class
+            .getName());
     // For backward compatibility
     nativeSerDeNames.add("org.apache.hadoop.hive.serde.thrift.columnsetSerDe");
-    nativeSerDeNames.add(org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe.class.getName());
+    nativeSerDeNames
+        .add(org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe.class.getName());
   }
 
   public static boolean isNativeSerDe(String serde) {
@@ -91,16 +96,23 @@
   }
 
   private static boolean initCoreSerDes = registerCoreSerDes();
-  
+
   protected static boolean registerCoreSerDes() {
-    // Eagerly load SerDes so they will register their symbolic names even on Lazy Loading JVMs
+    // Eagerly load SerDes so they will register their symbolic names even on
+    // Lazy Loading JVMs
     try {
       // loading these classes will automatically register the short names
-      Class.forName(org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe.class.getName());
-      Class.forName(org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe.class.getName());
-      Class.forName(org.apache.hadoop.hive.serde2.thrift.ThriftDeserializer.class.getName());
+      Class
+          .forName(org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe.class
+              .getName());
+      Class.forName(org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe.class
+          .getName());
+      Class
+          .forName(org.apache.hadoop.hive.serde2.thrift.ThriftDeserializer.class
+              .getName());
     } catch (ClassNotFoundException e) {
-      throw new RuntimeException("IMPOSSIBLE Exception: Unable to initialize core serdes", e);
+      throw new RuntimeException(
+          "IMPOSSIBLE Exception: Unable to initialize core serdes", e);
     }
     return true;
   }
@@ -159,7 +171,6 @@
     return (escape.toString());
   }
 
-
   public static String lightEscapeString(String str) {
     int length = str.length();
     StringBuilder escape = new StringBuilder(length + 16);
@@ -193,123 +204,129 @@
     return sb.toString();
   }
 
-
   static void buildJSONString(StringBuilder sb, Object o, ObjectInspector oi) {
 
-    switch(oi.getCategory()) {
-      case PRIMITIVE: {
-        PrimitiveObjectInspector poi = (PrimitiveObjectInspector)oi;
-        if (o == null) {
-          sb.append("null");
-        } else {
-          switch (poi.getPrimitiveCategory()) {
-          case BOOLEAN: {
-            boolean b = ((BooleanObjectInspector)poi).get(o);
-            sb.append(b ? "true" : "false");
-            break;
-          }
-          case BYTE: {
-            sb.append(((ByteObjectInspector)poi).get(o));
-            break;
-          }
-          case SHORT: {
-            sb.append(((ShortObjectInspector)poi).get(o));
-            break;
-          }
-          case INT: {
-            sb.append(((IntObjectInspector)poi).get(o));
-            break;
-          }
-          case LONG: {
-            sb.append(((LongObjectInspector)poi).get(o));
-            break;
-          }
-          case FLOAT: {
-            sb.append(((FloatObjectInspector)poi).get(o));
-            break;
-          }
-          case DOUBLE: {
-            sb.append(((DoubleObjectInspector)poi).get(o));
-            break;
-          }
-          case STRING: {
-            sb.append('"'); 
-            sb.append(escapeString(((StringObjectInspector)poi).getPrimitiveJavaObject(o)));
-            sb.append('"'); 
-            break;
-          }
-          default:
-            throw new RuntimeException("Unknown primitive type: " + poi.getPrimitiveCategory());
-          }
+    switch (oi.getCategory()) {
+    case PRIMITIVE: {
+      PrimitiveObjectInspector poi = (PrimitiveObjectInspector) oi;
+      if (o == null) {
+        sb.append("null");
+      } else {
+        switch (poi.getPrimitiveCategory()) {
+        case BOOLEAN: {
+          boolean b = ((BooleanObjectInspector) poi).get(o);
+          sb.append(b ? "true" : "false");
+          break;
+        }
+        case BYTE: {
+          sb.append(((ByteObjectInspector) poi).get(o));
+          break;
+        }
+        case SHORT: {
+          sb.append(((ShortObjectInspector) poi).get(o));
+          break;
+        }
+        case INT: {
+          sb.append(((IntObjectInspector) poi).get(o));
+          break;
+        }
+        case LONG: {
+          sb.append(((LongObjectInspector) poi).get(o));
+          break;
+        }
+        case FLOAT: {
+          sb.append(((FloatObjectInspector) poi).get(o));
+          break;
+        }
+        case DOUBLE: {
+          sb.append(((DoubleObjectInspector) poi).get(o));
+          break;
+        }
+        case STRING: {
+          sb.append('"');
+          sb.append(escapeString(((StringObjectInspector) poi)
+              .getPrimitiveJavaObject(o)));
+          sb.append('"');
+          break;
+        }
+        default:
+          throw new RuntimeException("Unknown primitive type: "
+              + poi.getPrimitiveCategory());
         }
-        break;
       }
-      case LIST: {
-        ListObjectInspector loi = (ListObjectInspector)oi;
-        ObjectInspector listElementObjectInspector = loi.getListElementObjectInspector();
-        List<?> olist = loi.getList(o);
-        if (olist == null) {
-          sb.append("null");
-        } else {
-          sb.append(LBRACKET);
-          for (int i=0; i<olist.size(); i++) {
-            if (i>0) sb.append(COMMA);
-            buildJSONString(sb, olist.get(i), listElementObjectInspector);
+      break;
+    }
+    case LIST: {
+      ListObjectInspector loi = (ListObjectInspector) oi;
+      ObjectInspector listElementObjectInspector = loi
+          .getListElementObjectInspector();
+      List<?> olist = loi.getList(o);
+      if (olist == null) {
+        sb.append("null");
+      } else {
+        sb.append(LBRACKET);
+        for (int i = 0; i < olist.size(); i++) {
+          if (i > 0) {
+            sb.append(COMMA);
           }
-          sb.append(RBRACKET);
+          buildJSONString(sb, olist.get(i), listElementObjectInspector);
         }
-        break;
+        sb.append(RBRACKET);
       }
-      case MAP: {
-        MapObjectInspector moi = (MapObjectInspector)oi;
-        ObjectInspector mapKeyObjectInspector = moi.getMapKeyObjectInspector();
-        ObjectInspector mapValueObjectInspector = moi.getMapValueObjectInspector();
-        Map<?,?> omap = moi.getMap(o);
-        if (omap == null) {
-          sb.append("null");
-        } else {
-          sb.append(LBRACE);
-          boolean first = true;
-          for(Object entry : omap.entrySet()) {
-            if (first) {
-              first = false;
-            } else {
-              sb.append(COMMA);
-            }
-            Map.Entry<?,?> e = (Map.Entry<?,?>)entry;
-            buildJSONString(sb, e.getKey(), mapKeyObjectInspector);
-            sb.append(COLON);
-            buildJSONString(sb, e.getValue(), mapValueObjectInspector);
-          }
-          sb.append(RBRACE);
+      break;
+    }
+    case MAP: {
+      MapObjectInspector moi = (MapObjectInspector) oi;
+      ObjectInspector mapKeyObjectInspector = moi.getMapKeyObjectInspector();
+      ObjectInspector mapValueObjectInspector = moi
+          .getMapValueObjectInspector();
+      Map<?, ?> omap = moi.getMap(o);
+      if (omap == null) {
+        sb.append("null");
+      } else {
+        sb.append(LBRACE);
+        boolean first = true;
+        for (Object entry : omap.entrySet()) {
+          if (first) {
+            first = false;
+          } else {
+            sb.append(COMMA);
+          }
+          Map.Entry<?, ?> e = (Map.Entry<?, ?>) entry;
+          buildJSONString(sb, e.getKey(), mapKeyObjectInspector);
+          sb.append(COLON);
+          buildJSONString(sb, e.getValue(), mapValueObjectInspector);
         }
-        break;
+        sb.append(RBRACE);
       }
-      case STRUCT: {
-        StructObjectInspector soi = (StructObjectInspector)oi;
-        List<? extends StructField> structFields = soi.getAllStructFieldRefs();
-        if (o == null) {
-          sb.append("null");
-        } else {
-          sb.append(LBRACE);
-          for(int i=0; i<structFields.size(); i++) {
-            if (i>0) {
-              sb.append(COMMA);
-            }
-            sb.append(QUOTE);
-            sb.append(structFields.get(i).getFieldName());
-            sb.append(QUOTE);
-            sb.append(COLON);
-            buildJSONString(sb, soi.getStructFieldData(o, structFields.get(i)), 
-                structFields.get(i).getFieldObjectInspector());          
-          }
-          sb.append(RBRACE);
+      break;
+    }
+    case STRUCT: {
+      StructObjectInspector soi = (StructObjectInspector) oi;
+      List<? extends StructField> structFields = soi.getAllStructFieldRefs();
+      if (o == null) {
+        sb.append("null");
+      } else {
+        sb.append(LBRACE);
+        for (int i = 0; i < structFields.size(); i++) {
+          if (i > 0) {
+            sb.append(COMMA);
+          }
+          sb.append(QUOTE);
+          sb.append(structFields.get(i).getFieldName());
+          sb.append(QUOTE);
+          sb.append(COLON);
+          buildJSONString(sb, soi.getStructFieldData(o, structFields.get(i)),
+              structFields.get(i).getFieldObjectInspector());
         }
-        break;
+        sb.append(RBRACE);
       }
-      default:
-        throw new RuntimeException("Unknown type in ObjectInspector!");
-    };
-    
-  }  
+      break;
+    }
+    default:
+      throw new RuntimeException("Unknown type in ObjectInspector!");
+    }
+    ;
+
+  }
 }

Modified: hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/Serializer.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/Serializer.java?rev=901625&r1=901624&r2=901625&view=diff
==============================================================================
--- hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/Serializer.java (original)
+++ hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/Serializer.java Thu Jan 21 09:52:44 2010
@@ -18,40 +18,46 @@
 
 package org.apache.hadoop.hive.serde2;
 
+import java.util.Properties;
+
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.io.Writable;
-import org.apache.hadoop.conf.Configuration;
-import java.util.Properties;
 
 /**
- * HiveSerializer is used to serialize data to a Hadoop Writable object.
- * The serialize 
- * In addition to the interface below, all implementations are assume to have a ctor
- * that takes a single 'Table' object as argument.
- *
+ * HiveSerializer is used to serialize data to a Hadoop Writable object. The
+ * serialize In addition to the interface below, all implementations are assume
+ * to have a ctor that takes a single 'Table' object as argument.
+ * 
  */
 public interface Serializer {
 
   /**
    * Initialize the HiveSerializer.
-   * @param conf System properties
-   * @param tbl  table properties
+   * 
+   * @param conf
+   *          System properties
+   * @param tbl
+   *          table properties
    * @throws SerDeException
    */
-  public void initialize(Configuration conf, Properties tbl) throws SerDeException;
-  
+  public void initialize(Configuration conf, Properties tbl)
+      throws SerDeException;
+
   /**
    * Returns the Writable class that would be returned by the serialize method.
    * This is used to initialize SequenceFile header.
    */
   public Class<? extends Writable> getSerializedClass();
+
   /**
-   * Serialize an object by navigating inside the Object with the ObjectInspector.
-   * In most cases, the return value of this function will be constant since the function
-   * will reuse the Writable object.
-   * If the client wants to keep a copy of the Writable, the client needs to clone the
+   * Serialize an object by navigating inside the Object with the
+   * ObjectInspector. In most cases, the return value of this function will be
+   * constant since the function will reuse the Writable object. If the client
+   * wants to keep a copy of the Writable, the client needs to clone the
    * returned value.
    */
-  public Writable serialize(Object obj, ObjectInspector objInspector) throws SerDeException;
+  public Writable serialize(Object obj, ObjectInspector objInspector)
+      throws SerDeException;
 
 }

Modified: hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/TypedSerDe.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/TypedSerDe.java?rev=901625&r1=901624&r2=901625&view=diff
==============================================================================
--- hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/TypedSerDe.java (original)
+++ hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/TypedSerDe.java Thu Jan 21 09:52:44 2010
@@ -37,20 +37,22 @@
   public TypedSerDe(Type objectType) throws SerDeException {
     this.objectType = objectType;
     if (objectType instanceof Class) {
-      objectClass = (Class<?>)objectType;
+      objectClass = (Class<?>) objectType;
     } else if (objectType instanceof ParameterizedType) {
-      objectClass = (Class<?>)(((ParameterizedType)objectType).getRawType());
+      objectClass = (Class<?>) (((ParameterizedType) objectType).getRawType());
     } else {
-      throw new SerDeException("Cannot create TypedSerDe with type " + objectType);
+      throw new SerDeException("Cannot create TypedSerDe with type "
+          + objectType);
     }
   }
 
   protected Object deserializeCache;
+
   public Object deserialize(Writable blob) throws SerDeException {
     if (deserializeCache == null) {
       return ReflectionUtils.newInstance(objectClass, null);
     } else {
-      assert(deserializeCache.getClass().equals(objectClass));
+      assert (deserializeCache.getClass().equals(objectClass));
       return deserializeCache;
     }
   }
@@ -63,7 +65,7 @@
   protected ObjectInspectorFactory.ObjectInspectorOptions getObjectInspectorOptions() {
     return ObjectInspectorFactory.ObjectInspectorOptions.JAVA;
   }
-  
+
   public void initialize(Configuration job, Properties tbl)
       throws SerDeException {
     // do nothing
@@ -72,7 +74,9 @@
   public Class<? extends Writable> getSerializedClass() {
     return BytesWritable.class;
   }
-  public Writable serialize(Object obj, ObjectInspector objInspector) throws SerDeException {
+
+  public Writable serialize(Object obj, ObjectInspector objInspector)
+      throws SerDeException {
     throw new RuntimeException("not supported");
   }
 

Modified: hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/BinarySortableSerDe.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/BinarySortableSerDe.java?rev=901625&r1=901624&r2=901625&view=diff
==============================================================================
--- hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/BinarySortableSerDe.java (original)
+++ hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/BinarySortableSerDe.java Thu Jan 21 09:52:44 2010
@@ -65,43 +65,41 @@
 import org.apache.hadoop.io.Writable;
 
 /**
- * BinarySortableSerDe can be used to write data in a way that the data can be 
- * compared byte-by-byte with the same order. 
+ * BinarySortableSerDe can be used to write data in a way that the data can be
+ * compared byte-by-byte with the same order.
  * 
- * The data format:
- * NULL:  a single byte \0
- * NON-NULL Primitives: ALWAYS prepend a single byte \1, and then:
- *   Boolean: FALSE = \1, TRUE = \2
- *   Byte:    flip the sign-bit to make sure negative comes before positive
- *   Short:   flip the sign-bit to make sure negative comes before positive
- *   Int:     flip the sign-bit to make sure negative comes before positive
- *   Long:    flip the sign-bit to make sure negative comes before positive
- *   Double:  flip the sign-bit for positive double, and all bits for negative double values
- *   String:  NULL-terminated UTF-8 string, with NULL escaped to \1 \1, and \1 escaped to \1 \2
- * NON-NULL Complex Types: ALWAYS prepend a single byte \1, and then:
- *   Struct:  one field by one field.
- *   List:    \1 followed by each element, and \0 to terminate 
- *   Map:     \1 followed by each key and then each value, and \0 to terminate
- *   
- * This SerDe takes an additional parameter SERIALIZATION_SORT_ORDER which is a string containing only "+" and "-".
- * The length of the string should equal to the number of fields in the top-level struct for serialization.
- * "+" means the field should be sorted ascendingly, and "-" means descendingly. The sub fields in the same top-level
- * field will have the same sort order. 
+ * The data format: NULL: a single byte \0 NON-NULL Primitives: ALWAYS prepend a
+ * single byte \1, and then: Boolean: FALSE = \1, TRUE = \2 Byte: flip the
+ * sign-bit to make sure negative comes before positive Short: flip the sign-bit
+ * to make sure negative comes before positive Int: flip the sign-bit to make
+ * sure negative comes before positive Long: flip the sign-bit to make sure
+ * negative comes before positive Double: flip the sign-bit for positive double,
+ * and all bits for negative double values String: NULL-terminated UTF-8 string,
+ * with NULL escaped to \1 \1, and \1 escaped to \1 \2 NON-NULL Complex Types:
+ * ALWAYS prepend a single byte \1, and then: Struct: one field by one field.
+ * List: \1 followed by each element, and \0 to terminate Map: \1 followed by
+ * each key and then each value, and \0 to terminate
+ * 
+ * This SerDe takes an additional parameter SERIALIZATION_SORT_ORDER which is a
+ * string containing only "+" and "-". The length of the string should equal to
+ * the number of fields in the top-level struct for serialization. "+" means the
+ * field should be sorted ascendingly, and "-" means descendingly. The sub
+ * fields in the same top-level field will have the same sort order.
  * 
  */
 public class BinarySortableSerDe implements SerDe {
 
-  public static final Log LOG = LogFactory.getLog(
-      BinarySortableSerDe.class.getName());
-  
+  public static final Log LOG = LogFactory.getLog(BinarySortableSerDe.class
+      .getName());
+
   List<String> columnNames;
   List<TypeInfo> columnTypes;
-  
+
   TypeInfo rowTypeInfo;
   StructObjectInspector rowObjectInspector;
-  
+
   boolean[] columnSortOrderIsDesc;
-  
+
   @Override
   public void initialize(Configuration conf, Properties tbl)
       throws SerDeException {
@@ -117,27 +115,30 @@
     if (columnTypeProperty.length() == 0) {
       columnTypes = new ArrayList<TypeInfo>();
     } else {
-      columnTypes = TypeInfoUtils.getTypeInfosFromTypeString(columnTypeProperty);
+      columnTypes = TypeInfoUtils
+          .getTypeInfosFromTypeString(columnTypeProperty);
     }
-    assert(columnNames.size() == columnTypes.size());
-    
+    assert (columnNames.size() == columnTypes.size());
+
     // Create row related objects
     rowTypeInfo = TypeInfoFactory.getStructTypeInfo(columnNames, columnTypes);
-    rowObjectInspector = (StructObjectInspector)TypeInfoUtils
+    rowObjectInspector = (StructObjectInspector) TypeInfoUtils
         .getStandardWritableObjectInspectorFromTypeInfo(rowTypeInfo);
     row = new ArrayList<Object>(columnNames.size());
-    for (int i=0; i<columnNames.size(); i++) {
+    for (int i = 0; i < columnNames.size(); i++) {
       row.add(null);
     }
-    
+
     // Get the sort order
-    String columnSortOrder = tbl.getProperty(Constants.SERIALIZATION_SORT_ORDER);
+    String columnSortOrder = tbl
+        .getProperty(Constants.SERIALIZATION_SORT_ORDER);
     columnSortOrderIsDesc = new boolean[columnNames.size()];
-    for (int i=0; i<columnSortOrderIsDesc.length; i++) {
-      columnSortOrderIsDesc[i] = (columnSortOrder != null && columnSortOrder.charAt(i) == '-');
+    for (int i = 0; i < columnSortOrderIsDesc.length; i++) {
+      columnSortOrderIsDesc[i] = (columnSortOrder != null && columnSortOrder
+          .charAt(i) == '-');
     }
   }
-  
+
   @Override
   public Class<? extends Writable> getSerializedClass() {
     return BytesWritable.class;
@@ -150,260 +151,268 @@
 
   ArrayList<Object> row;
   InputByteBuffer inputByteBuffer = new InputByteBuffer();
+
   @Override
   public Object deserialize(Writable blob) throws SerDeException {
-    BytesWritable data = (BytesWritable)blob;
+    BytesWritable data = (BytesWritable) blob;
     inputByteBuffer.reset(data.get(), 0, data.getSize());
-    
+
     try {
-      for (int i=0; i<columnNames.size(); i++) {
-        row.set(i, deserialize(inputByteBuffer, columnTypes.get(i), 
+      for (int i = 0; i < columnNames.size(); i++) {
+        row.set(i, deserialize(inputByteBuffer, columnTypes.get(i),
             columnSortOrderIsDesc[i], row.get(i)));
       }
     } catch (IOException e) {
       throw new SerDeException(e);
     }
-    
+
     return row;
   }
 
-  static Object deserialize(InputByteBuffer buffer, TypeInfo type, 
+  static Object deserialize(InputByteBuffer buffer, TypeInfo type,
       boolean invert, Object reuse) throws IOException {
-    
+
     // Is this field a null?
     byte isNull = buffer.read(invert);
     if (isNull == 0) {
       return null;
     }
-    assert(isNull == 1);
-    
+    assert (isNull == 1);
+
     switch (type.getCategory()) {
-      case PRIMITIVE: {
-        PrimitiveTypeInfo ptype = (PrimitiveTypeInfo)type;
-        switch (ptype.getPrimitiveCategory()) {
-          case VOID: {
-            return null;
-          }
-          case BOOLEAN: {
-            BooleanWritable r = reuse == null ? new BooleanWritable() : (BooleanWritable)reuse;
-            byte b = buffer.read(invert);
-            assert(b == 1 || b == 2);
-            r.set(b == 2);
-            return r;
-          }
-          case BYTE: {
-            ByteWritable r = reuse == null ? new ByteWritable() : (ByteWritable)reuse;
-            r.set((byte)(buffer.read(invert) ^ 0x80));
-            return r;
-          }
-          case SHORT: {
-            ShortWritable r = reuse == null ? new ShortWritable() : (ShortWritable)reuse;
-            int v = buffer.read(invert) ^ 0x80;
-            v = (v << 8) + (buffer.read(invert) & 0xff);
-            r.set((short)v);
-            return r;
-          }
-          case INT: {
-            IntWritable r = reuse == null ? new IntWritable() : (IntWritable)reuse;
-            int v = buffer.read(invert) ^ 0x80;
-            for (int i=0; i<3; i++) {
-              v = (v << 8) + (buffer.read(invert) & 0xff);
-            }
-            r.set(v);
-            return r;
-          }
-          case LONG: {
-            LongWritable r = reuse == null ? new LongWritable() : (LongWritable)reuse;
-            long v = buffer.read(invert) ^ 0x80;
-            for (int i=0; i<7; i++) {
-              v = (v << 8) + (buffer.read(invert) & 0xff);
-            }
-            r.set(v);
-            return r;
-          }
-          case FLOAT: {
-            FloatWritable r = reuse == null ? new FloatWritable() : (FloatWritable)reuse;
-            int v = 0;
-            for (int i=0; i<4; i++) {
-              v = (v << 8) + (buffer.read(invert) & 0xff);
-            }
-            if ((v & (1<<31)) == 0) {
-              // negative number, flip all bits
-              v = ~v;
-            } else {
-              // positive number, flip the first bit
-              v = v ^ (1<<31);
-            }
-            r.set(Float.intBitsToFloat(v));
-            return r;
-          }
-          case DOUBLE: {
-            DoubleWritable r = reuse == null ? new DoubleWritable() : (DoubleWritable)reuse;
-            long v = 0;
-            for (int i=0; i<8; i++) {
-              v = (v << 8) + (buffer.read(invert) & 0xff);
-            }
-            if ((v & (1L<<63)) == 0) {
-              // negative number, flip all bits
-              v = ~v;
-            } else {
-              // positive number, flip the first bit
-              v = v ^ (1L<<63);
-            }
-            r.set(Double.longBitsToDouble(v));
-            return r;
-          }
-          case STRING: {
-            Text r = reuse == null ? new Text() : (Text)reuse;
-            // Get the actual length first
-            int start = buffer.tell();
-            int length = 0;
-            do {
-              byte b = buffer.read(invert);
-              if (b == 0) {
-                // end of string
-                break;
-              }
-              if (b == 1) {
-                // the last char is an escape char. read the actual char
-                buffer.read(invert);
-              }
-              length ++;
-            } while (true);
-            
-            if (length == buffer.tell() - start) {
-              // No escaping happened, so we are already done.
-              r.set(buffer.getData(), start, length);
-            } else {
-              // Escaping happened, we need to copy byte-by-byte.
-              // 1. Set the length first.
-              r.set(buffer.getData(), start, length);
-              // 2. Reset the pointer.
-              buffer.seek(start);
-              // 3. Copy the data.
-              byte[] rdata = r.getBytes();
-              for (int i=0; i<length; i++) {
-                byte b = buffer.read(invert);
-                if (b == 1) {
-                  // The last char is an escape char, read the actual char.
-                  // The serialization format escape \0 to \1, and \1 to \2,
-                  // to make sure the string is null-terminated.
-                  b = (byte)(buffer.read(invert) - 1);
-                }
-                rdata[i] = b;
-              }
-              // 4. Read the null terminator.
-              byte b = buffer.read(invert);
-              assert(b == 0);
-            }
-            return r;
-          }
-          default: {
-            throw new RuntimeException("Unrecognized type: " + ptype.getPrimitiveCategory());
-          }
+    case PRIMITIVE: {
+      PrimitiveTypeInfo ptype = (PrimitiveTypeInfo) type;
+      switch (ptype.getPrimitiveCategory()) {
+      case VOID: {
+        return null;
+      }
+      case BOOLEAN: {
+        BooleanWritable r = reuse == null ? new BooleanWritable()
+            : (BooleanWritable) reuse;
+        byte b = buffer.read(invert);
+        assert (b == 1 || b == 2);
+        r.set(b == 2);
+        return r;
+      }
+      case BYTE: {
+        ByteWritable r = reuse == null ? new ByteWritable()
+            : (ByteWritable) reuse;
+        r.set((byte) (buffer.read(invert) ^ 0x80));
+        return r;
+      }
+      case SHORT: {
+        ShortWritable r = reuse == null ? new ShortWritable()
+            : (ShortWritable) reuse;
+        int v = buffer.read(invert) ^ 0x80;
+        v = (v << 8) + (buffer.read(invert) & 0xff);
+        r.set((short) v);
+        return r;
+      }
+      case INT: {
+        IntWritable r = reuse == null ? new IntWritable() : (IntWritable) reuse;
+        int v = buffer.read(invert) ^ 0x80;
+        for (int i = 0; i < 3; i++) {
+          v = (v << 8) + (buffer.read(invert) & 0xff);
         }
+        r.set(v);
+        return r;
       }
-      case LIST: {
-        ListTypeInfo ltype = (ListTypeInfo)type;
-        TypeInfo etype = ltype.getListElementTypeInfo();
-        
-        // Create the list if needed
-        ArrayList<Object> r = reuse == null ? new ArrayList<Object>() : (ArrayList<Object>)reuse;
-
-        // Read the list
-        int size = 0;
-        while (true) {
-          int more = buffer.read(invert);
-          if (more == 0) {
-            // \0 to terminate
-            break;
-          }
-          // \1 followed by each element
-          assert(more == 1);
-          if (size == r.size()) {
-            r.add(null);
-          }
-          r.set(size, deserialize(buffer, etype, invert, r.get(size)));
-          size++;
+      case LONG: {
+        LongWritable r = reuse == null ? new LongWritable()
+            : (LongWritable) reuse;
+        long v = buffer.read(invert) ^ 0x80;
+        for (int i = 0; i < 7; i++) {
+          v = (v << 8) + (buffer.read(invert) & 0xff);
         }
-        // Remove additional elements if the list is reused
-        while (r.size() > size) {
-          r.remove(r.size()-1);
+        r.set(v);
+        return r;
+      }
+      case FLOAT: {
+        FloatWritable r = reuse == null ? new FloatWritable()
+            : (FloatWritable) reuse;
+        int v = 0;
+        for (int i = 0; i < 4; i++) {
+          v = (v << 8) + (buffer.read(invert) & 0xff);
+        }
+        if ((v & (1 << 31)) == 0) {
+          // negative number, flip all bits
+          v = ~v;
+        } else {
+          // positive number, flip the first bit
+          v = v ^ (1 << 31);
         }
+        r.set(Float.intBitsToFloat(v));
         return r;
-      }      
-      case MAP: {
-        MapTypeInfo mtype = (MapTypeInfo)type;
-        TypeInfo ktype = mtype.getMapKeyTypeInfo();
-        TypeInfo vtype = mtype.getMapValueTypeInfo();
-        
-        // Create the map if needed
-        Map<Object, Object> r;
-        if (reuse == null) {
-          r = new HashMap<Object, Object>();
+      }
+      case DOUBLE: {
+        DoubleWritable r = reuse == null ? new DoubleWritable()
+            : (DoubleWritable) reuse;
+        long v = 0;
+        for (int i = 0; i < 8; i++) {
+          v = (v << 8) + (buffer.read(invert) & 0xff);
+        }
+        if ((v & (1L << 63)) == 0) {
+          // negative number, flip all bits
+          v = ~v;
         } else {
-          r = (HashMap<Object, Object>)reuse;
-          r.clear();
+          // positive number, flip the first bit
+          v = v ^ (1L << 63);
         }
-
-        // Read the map
-        int size = 0;
-        while (true) {
-          int more = buffer.read(invert);
-          if (more == 0) {
-            // \0 to terminate
+        r.set(Double.longBitsToDouble(v));
+        return r;
+      }
+      case STRING: {
+        Text r = reuse == null ? new Text() : (Text) reuse;
+        // Get the actual length first
+        int start = buffer.tell();
+        int length = 0;
+        do {
+          byte b = buffer.read(invert);
+          if (b == 0) {
+            // end of string
             break;
           }
-          // \1 followed by each key and then each value
-          assert(more == 1);
-          Object k = deserialize(buffer, ktype, invert, null);
-          Object v = deserialize(buffer, vtype, invert, null);
-          r.put(k, v);
+          if (b == 1) {
+            // the last char is an escape char. read the actual char
+            buffer.read(invert);
+          }
+          length++;
+        } while (true);
+
+        if (length == buffer.tell() - start) {
+          // No escaping happened, so we are already done.
+          r.set(buffer.getData(), start, length);
+        } else {
+          // Escaping happened, we need to copy byte-by-byte.
+          // 1. Set the length first.
+          r.set(buffer.getData(), start, length);
+          // 2. Reset the pointer.
+          buffer.seek(start);
+          // 3. Copy the data.
+          byte[] rdata = r.getBytes();
+          for (int i = 0; i < length; i++) {
+            byte b = buffer.read(invert);
+            if (b == 1) {
+              // The last char is an escape char, read the actual char.
+              // The serialization format escape \0 to \1, and \1 to \2,
+              // to make sure the string is null-terminated.
+              b = (byte) (buffer.read(invert) - 1);
+            }
+            rdata[i] = b;
+          }
+          // 4. Read the null terminator.
+          byte b = buffer.read(invert);
+          assert (b == 0);
         }
         return r;
       }
-      case STRUCT: {
-        StructTypeInfo stype = (StructTypeInfo)type;
-        List<TypeInfo> fieldTypes = stype.getAllStructFieldTypeInfos();
-        int size = fieldTypes.size();
-        // Create the struct if needed
-        ArrayList<Object> r = reuse == null ? new ArrayList<Object>(size) : (ArrayList<Object>)reuse;
-        assert(r.size() <= size);
-        // Set the size of the struct
-        while (r.size() < size) {
+      default: {
+        throw new RuntimeException("Unrecognized type: "
+            + ptype.getPrimitiveCategory());
+      }
+      }
+    }
+    case LIST: {
+      ListTypeInfo ltype = (ListTypeInfo) type;
+      TypeInfo etype = ltype.getListElementTypeInfo();
+
+      // Create the list if needed
+      ArrayList<Object> r = reuse == null ? new ArrayList<Object>()
+          : (ArrayList<Object>) reuse;
+
+      // Read the list
+      int size = 0;
+      while (true) {
+        int more = buffer.read(invert);
+        if (more == 0) {
+          // \0 to terminate
+          break;
+        }
+        // \1 followed by each element
+        assert (more == 1);
+        if (size == r.size()) {
           r.add(null);
         }
-        // Read one field by one field
-        for (int eid = 0; eid < size; eid++) {
-          r.set(eid, deserialize(buffer, fieldTypes.get(eid), invert, r.get(eid)));
+        r.set(size, deserialize(buffer, etype, invert, r.get(size)));
+        size++;
+      }
+      // Remove additional elements if the list is reused
+      while (r.size() > size) {
+        r.remove(r.size() - 1);
+      }
+      return r;
+    }
+    case MAP: {
+      MapTypeInfo mtype = (MapTypeInfo) type;
+      TypeInfo ktype = mtype.getMapKeyTypeInfo();
+      TypeInfo vtype = mtype.getMapValueTypeInfo();
+
+      // Create the map if needed
+      Map<Object, Object> r;
+      if (reuse == null) {
+        r = new HashMap<Object, Object>();
+      } else {
+        r = (HashMap<Object, Object>) reuse;
+        r.clear();
+      }
+
+      while (true) {
+        int more = buffer.read(invert);
+        if (more == 0) {
+          // \0 to terminate
+          break;
         }
-        return r;
+        // \1 followed by each key and then each value
+        assert (more == 1);
+        Object k = deserialize(buffer, ktype, invert, null);
+        Object v = deserialize(buffer, vtype, invert, null);
+        r.put(k, v);
       }
-      default: {
-        throw new RuntimeException("Unrecognized type: " + type.getCategory());
+      return r;
+    }
+    case STRUCT: {
+      StructTypeInfo stype = (StructTypeInfo) type;
+      List<TypeInfo> fieldTypes = stype.getAllStructFieldTypeInfos();
+      int size = fieldTypes.size();
+      // Create the struct if needed
+      ArrayList<Object> r = reuse == null ? new ArrayList<Object>(size)
+          : (ArrayList<Object>) reuse;
+      assert (r.size() <= size);
+      // Set the size of the struct
+      while (r.size() < size) {
+        r.add(null);
       }
+      // Read one field by one field
+      for (int eid = 0; eid < size; eid++) {
+        r
+            .set(eid, deserialize(buffer, fieldTypes.get(eid), invert, r
+                .get(eid)));
+      }
+      return r;
+    }
+    default: {
+      throw new RuntimeException("Unrecognized type: " + type.getCategory());
+    }
     }
   }
-  
+
   BytesWritable serializeBytesWritable = new BytesWritable();
   OutputByteBuffer outputByteBuffer = new OutputByteBuffer();
-  
+
   @Override
   public Writable serialize(Object obj, ObjectInspector objInspector)
       throws SerDeException {
     outputByteBuffer.reset();
-    StructObjectInspector soi = (StructObjectInspector)objInspector;
+    StructObjectInspector soi = (StructObjectInspector) objInspector;
     List<? extends StructField> fields = soi.getAllStructFieldRefs();
-    
-    for (int i=0; i<columnNames.size(); i++) {
-      serialize(outputByteBuffer, 
-          soi.getStructFieldData(obj, fields.get(i)),
-          fields.get(i).getFieldObjectInspector(), 
-          columnSortOrderIsDesc[i]);
-    }
-    
-    serializeBytesWritable.set(outputByteBuffer.getData(), 
-        0, outputByteBuffer.getLength());
+
+    for (int i = 0; i < columnNames.size(); i++) {
+      serialize(outputByteBuffer, soi.getStructFieldData(obj, fields.get(i)),
+          fields.get(i).getFieldObjectInspector(), columnSortOrderIsDesc[i]);
+    }
+
+    serializeBytesWritable.set(outputByteBuffer.getData(), 0, outputByteBuffer
+        .getLength());
     return serializeBytesWritable;
   }
 
@@ -411,163 +420,161 @@
       boolean invert) {
     // Is this field a null?
     if (o == null) {
-      buffer.write((byte)0, invert);
+      buffer.write((byte) 0, invert);
       return;
     }
     // This field is not a null.
-    buffer.write((byte)1, invert);
-    
+    buffer.write((byte) 1, invert);
+
     switch (oi.getCategory()) {
-      case PRIMITIVE: {
-        PrimitiveObjectInspector poi = (PrimitiveObjectInspector)oi;
-        switch (poi.getPrimitiveCategory()) {
-          case VOID: {
-            return;
-          }
-          case BOOLEAN: {
-            BooleanObjectInspector boi = (BooleanObjectInspector)poi;
-            boolean v = ((BooleanObjectInspector)poi).get(o);
-            buffer.write((byte)(v ? 2 : 1), invert);
-            return;
-          }
-          case BYTE: {
-            ByteObjectInspector boi = (ByteObjectInspector)poi;
-            byte v = boi.get(o);
-            buffer.write((byte)(v ^ 0x80), invert);
-            return;
-          }
-          case SHORT: {
-            ShortObjectInspector spoi = (ShortObjectInspector)poi;
-            short v = spoi.get(o);
-            buffer.write((byte)((v >> 8) ^ 0x80), invert);
-            buffer.write((byte)v, invert);
-            return;
-          }
-          case INT: {
-            IntObjectInspector ioi = (IntObjectInspector)poi;
-            int v = ioi.get(o);
-            buffer.write((byte)((v >> 24) ^ 0x80), invert);
-            buffer.write((byte)(v >> 16), invert);
-            buffer.write((byte)(v >> 8), invert);
-            buffer.write((byte)v, invert);
-            return;
-          }
-          case LONG: {
-            LongObjectInspector loi = (LongObjectInspector)poi;
-            long v = loi.get(o);
-            buffer.write((byte)((v >> 56) ^ 0x80), invert);
-            buffer.write((byte)(v >> 48), invert);
-            buffer.write((byte)(v >> 40), invert);
-            buffer.write((byte)(v >> 32), invert);
-            buffer.write((byte)(v >> 24), invert);
-            buffer.write((byte)(v >> 16), invert);
-            buffer.write((byte)(v >> 8), invert);
-            buffer.write((byte)v, invert);
-            return;
-          }
-          case FLOAT: {
-            FloatObjectInspector foi = (FloatObjectInspector)poi;
-            int v = Float.floatToIntBits(foi.get(o));
-            if ((v & (1<<31)) != 0) {
-              // negative number, flip all bits
-              v = ~v;
-            } else {
-              // positive number, flip the first bit
-              v = v ^ (1<<31);
-            }
-            buffer.write((byte)(v >> 24), invert);
-            buffer.write((byte)(v >> 16), invert);
-            buffer.write((byte)(v >> 8), invert);
-            buffer.write((byte)v, invert);
-            return;
-          }
-          case DOUBLE: {
-            DoubleObjectInspector doi = (DoubleObjectInspector)poi;
-            long v = Double.doubleToLongBits(doi.get(o));
-            if ((v & (1L<<63)) != 0) {
-              // negative number, flip all bits
-              v = ~v;
-            } else {
-              // positive number, flip the first bit
-              v = v ^ (1L<<63);
-            }
-            buffer.write((byte)(v >> 56), invert);
-            buffer.write((byte)(v >> 48), invert);
-            buffer.write((byte)(v >> 40), invert);
-            buffer.write((byte)(v >> 32), invert);
-            buffer.write((byte)(v >> 24), invert);
-            buffer.write((byte)(v >> 16), invert);
-            buffer.write((byte)(v >> 8), invert);
-            buffer.write((byte)v, invert);
-            return;
-          }
-          case STRING: {
-            StringObjectInspector soi = (StringObjectInspector)poi;
-            Text t = soi.getPrimitiveWritableObject(o);
-            byte[] data = t.getBytes();
-            int length = t.getLength();
-            for (int i=0; i<length; i++) {
-              if (data[i] == 0 || data[i] == 1) {
-                buffer.write((byte)1, invert);
-                buffer.write((byte)(data[i]+1), invert);
-              } else {
-                buffer.write(data[i], invert);
-              }
-            }
-            buffer.write((byte)0, invert);
-            return;
-          }
-          default: {
-            throw new RuntimeException("Unrecognized type: " + poi.getPrimitiveCategory());
-          }
-        }
+    case PRIMITIVE: {
+      PrimitiveObjectInspector poi = (PrimitiveObjectInspector) oi;
+      switch (poi.getPrimitiveCategory()) {
+      case VOID: {
+        return;
       }
-      case LIST: {
-        ListObjectInspector loi = (ListObjectInspector)oi;
-        ObjectInspector eoi = loi.getListElementObjectInspector();
-        
-        // \1 followed by each element
-        int size = loi.getListLength(o);
-        for (int eid = 0; eid < size; eid++) {
-          buffer.write((byte)1, invert);
-          serialize(buffer, loi.getListElement(o, eid), eoi, invert);
+      case BOOLEAN: {
+        boolean v = ((BooleanObjectInspector) poi).get(o);
+        buffer.write((byte) (v ? 2 : 1), invert);
+        return;
+      }
+      case BYTE: {
+        ByteObjectInspector boi = (ByteObjectInspector) poi;
+        byte v = boi.get(o);
+        buffer.write((byte) (v ^ 0x80), invert);
+        return;
+      }
+      case SHORT: {
+        ShortObjectInspector spoi = (ShortObjectInspector) poi;
+        short v = spoi.get(o);
+        buffer.write((byte) ((v >> 8) ^ 0x80), invert);
+        buffer.write((byte) v, invert);
+        return;
+      }
+      case INT: {
+        IntObjectInspector ioi = (IntObjectInspector) poi;
+        int v = ioi.get(o);
+        buffer.write((byte) ((v >> 24) ^ 0x80), invert);
+        buffer.write((byte) (v >> 16), invert);
+        buffer.write((byte) (v >> 8), invert);
+        buffer.write((byte) v, invert);
+        return;
+      }
+      case LONG: {
+        LongObjectInspector loi = (LongObjectInspector) poi;
+        long v = loi.get(o);
+        buffer.write((byte) ((v >> 56) ^ 0x80), invert);
+        buffer.write((byte) (v >> 48), invert);
+        buffer.write((byte) (v >> 40), invert);
+        buffer.write((byte) (v >> 32), invert);
+        buffer.write((byte) (v >> 24), invert);
+        buffer.write((byte) (v >> 16), invert);
+        buffer.write((byte) (v >> 8), invert);
+        buffer.write((byte) v, invert);
+        return;
+      }
+      case FLOAT: {
+        FloatObjectInspector foi = (FloatObjectInspector) poi;
+        int v = Float.floatToIntBits(foi.get(o));
+        if ((v & (1 << 31)) != 0) {
+          // negative number, flip all bits
+          v = ~v;
+        } else {
+          // positive number, flip the first bit
+          v = v ^ (1 << 31);
         }
-        // and \0 to terminate
-        buffer.write((byte)0, invert);
+        buffer.write((byte) (v >> 24), invert);
+        buffer.write((byte) (v >> 16), invert);
+        buffer.write((byte) (v >> 8), invert);
+        buffer.write((byte) v, invert);
         return;
-      }      
-      case MAP: {
-        MapObjectInspector moi = (MapObjectInspector)oi;
-        ObjectInspector koi = moi.getMapKeyObjectInspector();
-        ObjectInspector voi = moi.getMapValueObjectInspector();
-        
-        // \1 followed by each key and then each value
-        Map<?, ?> map = moi.getMap(o);
-        for(Map.Entry<?, ?> entry: map.entrySet()) {
-          buffer.write((byte)1, invert);
-          serialize(buffer, entry.getKey(), koi, invert);
-          serialize(buffer, entry.getValue(), voi, invert);
+      }
+      case DOUBLE: {
+        DoubleObjectInspector doi = (DoubleObjectInspector) poi;
+        long v = Double.doubleToLongBits(doi.get(o));
+        if ((v & (1L << 63)) != 0) {
+          // negative number, flip all bits
+          v = ~v;
+        } else {
+          // positive number, flip the first bit
+          v = v ^ (1L << 63);
         }
-        // and \0 to terminate
-        buffer.write((byte)0, invert);
+        buffer.write((byte) (v >> 56), invert);
+        buffer.write((byte) (v >> 48), invert);
+        buffer.write((byte) (v >> 40), invert);
+        buffer.write((byte) (v >> 32), invert);
+        buffer.write((byte) (v >> 24), invert);
+        buffer.write((byte) (v >> 16), invert);
+        buffer.write((byte) (v >> 8), invert);
+        buffer.write((byte) v, invert);
         return;
       }
-      case STRUCT: {
-        StructObjectInspector soi = (StructObjectInspector)oi;
-        List<? extends StructField> fields = soi.getAllStructFieldRefs();
-        
-        for (int i=0; i<fields.size(); i++) {
-          serialize(buffer, 
-              soi.getStructFieldData(o, fields.get(i)),
-              fields.get(i).getFieldObjectInspector(), 
-              invert);
+      case STRING: {
+        StringObjectInspector soi = (StringObjectInspector) poi;
+        Text t = soi.getPrimitiveWritableObject(o);
+        byte[] data = t.getBytes();
+        int length = t.getLength();
+        for (int i = 0; i < length; i++) {
+          if (data[i] == 0 || data[i] == 1) {
+            buffer.write((byte) 1, invert);
+            buffer.write((byte) (data[i] + 1), invert);
+          } else {
+            buffer.write(data[i], invert);
+          }
         }
+        buffer.write((byte) 0, invert);
         return;
       }
       default: {
-        throw new RuntimeException("Unrecognized type: " + oi.getCategory());
+        throw new RuntimeException("Unrecognized type: "
+            + poi.getPrimitiveCategory());
       }
+      }
+    }
+    case LIST: {
+      ListObjectInspector loi = (ListObjectInspector) oi;
+      ObjectInspector eoi = loi.getListElementObjectInspector();
+
+      // \1 followed by each element
+      int size = loi.getListLength(o);
+      for (int eid = 0; eid < size; eid++) {
+        buffer.write((byte) 1, invert);
+        serialize(buffer, loi.getListElement(o, eid), eoi, invert);
+      }
+      // and \0 to terminate
+      buffer.write((byte) 0, invert);
+      return;
+    }
+    case MAP: {
+      MapObjectInspector moi = (MapObjectInspector) oi;
+      ObjectInspector koi = moi.getMapKeyObjectInspector();
+      ObjectInspector voi = moi.getMapValueObjectInspector();
+
+      // \1 followed by each key and then each value
+      Map<?, ?> map = moi.getMap(o);
+      for (Map.Entry<?, ?> entry : map.entrySet()) {
+        buffer.write((byte) 1, invert);
+        serialize(buffer, entry.getKey(), koi, invert);
+        serialize(buffer, entry.getValue(), voi, invert);
+      }
+      // and \0 to terminate
+      buffer.write((byte) 0, invert);
+      return;
+    }
+    case STRUCT: {
+      StructObjectInspector soi = (StructObjectInspector) oi;
+      List<? extends StructField> fields = soi.getAllStructFieldRefs();
+
+      for (int i = 0; i < fields.size(); i++) {
+        serialize(buffer, soi.getStructFieldData(o, fields.get(i)), fields.get(
+            i).getFieldObjectInspector(), invert);
+      }
+      return;
+    }
+    default: {
+      throw new RuntimeException("Unrecognized type: " + oi.getCategory());
     }
-    
+    }
+
   }
 }

Modified: hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/InputByteBuffer.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/InputByteBuffer.java?rev=901625&r1=901624&r2=901625&view=diff
==============================================================================
--- hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/InputByteBuffer.java (original)
+++ hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/InputByteBuffer.java Thu Jan 21 09:52:44 2010
@@ -21,15 +21,15 @@
 import java.io.IOException;
 
 /**
- * This class is much more efficient than ByteArrayInputStream
- * because none of the methods are synchronized.
+ * This class is much more efficient than ByteArrayInputStream because none of
+ * the methods are synchronized.
  */
 public class InputByteBuffer {
-  
+
   byte[] data;
   int start;
   int end;
-  
+
   /**
    * Reset the byte buffer to the given byte range.
    */
@@ -38,66 +38,63 @@
     this.start = start;
     this.end = end;
   }
-  
+
   public final byte read() throws IOException {
     return read(false);
   }
-  
+
   /**
-   * Read one byte from the byte buffer.
-   * Final method to help inlining.
-   * @param invert whether we want to invert all the bits. 
+   * Read one byte from the byte buffer. Final method to help inlining.
+   * 
+   * @param invert
+   *          whether we want to invert all the bits.
    */
   public final byte read(boolean invert) throws IOException {
     if (start >= end) {
       throw new EOFException();
     }
     if (invert) {
-      return (byte)(0xff ^ data[start++]);
+      return (byte) (0xff ^ data[start++]);
     } else {
       return data[start++];
     }
   }
-  
+
   /**
-   * Return the current position.
-   * Final method to help inlining.
+   * Return the current position. Final method to help inlining.
    */
   public final int tell() {
     return start;
   }
-  
+
   /**
-   * Set the current position.
-   * Final method to help inlining.
+   * Set the current position. Final method to help inlining.
    */
   public final void seek(int position) {
     start = position;
   }
-  
+
   public final int getEnd() {
     return end;
   }
-  
+
   /**
    * Returns the underlying byte array.
    */
   public final byte[] getData() {
     return data;
   }
-  
+
   /**
    * Return the bytes in hex format.
    */
   public String dumpHex() {
     StringBuilder sb = new StringBuilder();
-    for (int i=start; i<end; i++) {
+    for (int i = start; i < end; i++) {
       byte b = data[i];
-      int v = (b<0 ? 256 + b : b);
+      int v = (b < 0 ? 256 + b : b);
       sb.append(String.format("x%02x", v));
     }
     return sb.toString();
   }
 }
-
-

Modified: hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/OutputByteBuffer.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/OutputByteBuffer.java?rev=901625&r1=901624&r2=901625&view=diff
==============================================================================
--- hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/OutputByteBuffer.java (original)
+++ hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/OutputByteBuffer.java Thu Jan 21 09:52:44 2010
@@ -20,48 +20,49 @@
 import java.util.Arrays;
 
 /**
- * This class is much more efficient than ByteArrayOutputStream
- * because none of the methods are synchronized.
+ * This class is much more efficient than ByteArrayOutputStream because none of
+ * the methods are synchronized.
  */
 public class OutputByteBuffer {
-  
+
   byte[] data = new byte[128];
   int length;
-  
+
   /**
    * Reset the byte buffer.
    */
   public void reset() {
     length = 0;
   }
-  
+
   public final void write(byte b) {
     write(b, false);
   }
-  
+
   /**
-   * Write one byte to the byte buffer.
-   * Final method to help inlining.
-   * @param invert whether we want to invert all the bits. 
+   * Write one byte to the byte buffer. Final method to help inlining.
+   * 
+   * @param invert
+   *          whether we want to invert all the bits.
    */
   public final void write(byte b, boolean invert) {
     if (length == data.length) {
-      data = Arrays.copyOf(data, data.length*2);
+      data = Arrays.copyOf(data, data.length * 2);
     }
     if (invert) {
-      data[length++] = (byte)(0xff ^ b);
+      data[length++] = (byte) (0xff ^ b);
     } else {
       data[length++] = b;
     }
   }
-  
+
   /**
    * Returns the underlying byte array.
    */
   public final byte[] getData() {
     return data;
   }
-  
+
   /**
    * Returns the current length.
    */
@@ -74,12 +75,12 @@
    */
   public String dumpHex() {
     StringBuilder sb = new StringBuilder();
-    for (int i=0; i<length; i++) {
+    for (int i = 0; i < length; i++) {
       byte b = data[i];
-      int v = (b<0 ? 256 + b : b);
+      int v = (b < 0 ? 256 + b : b);
       sb.append(String.format("x%02x", v));
     }
     return sb.toString();
   }
-  
+
 }

Modified: hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/columnar/BytesRefArrayWritable.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/columnar/BytesRefArrayWritable.java?rev=901625&r1=901624&r2=901625&view=diff
==============================================================================
--- hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/columnar/BytesRefArrayWritable.java (original)
+++ hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/columnar/BytesRefArrayWritable.java Thu Jan 21 09:52:44 2010
@@ -54,8 +54,9 @@
    *              if the specified initial capacity is negative
    */
   public BytesRefArrayWritable(int capacity) {
-    if (capacity < 0)
+    if (capacity < 0) {
       throw new IllegalArgumentException("Capacity can not be negative.");
+    }
     bytesRefWritables = new BytesRefWritable[0];
     ensureCapacity(capacity);
   }
@@ -85,10 +86,10 @@
    * @throws IndexOutOfBoundsException
    */
   public BytesRefWritable get(int index) {
-    if (index >= valid)
+    if (index >= valid) {
       throw new IndexOutOfBoundsException(
-          "This BytesRefArrayWritable only has " + valid
-              + " valid values.");
+          "This BytesRefArrayWritable only has " + valid + " valid values.");
+    }
     return bytesRefWritables[index];
   }
 
@@ -115,12 +116,14 @@
    *           if the specified new element is null
    */
   public void set(int index, BytesRefWritable bytesRefWritable) {
-    if (bytesRefWritable == null)
+    if (bytesRefWritable == null) {
       throw new IllegalArgumentException("Can not assign null.");
+    }
     ensureCapacity(index + 1);
     bytesRefWritables[index] = bytesRefWritable;
-    if (valid <= index)
+    if (valid <= index) {
       valid = index + 1;
+    }
   }
 
   /**
@@ -128,18 +131,22 @@
    */
   @Override
   public int compareTo(BytesRefArrayWritable other) {
-    if (other == null)
+    if (other == null) {
       throw new IllegalArgumentException("Argument can not be null.");
-    if (this == other)
+    }
+    if (this == other) {
       return 0;
+    }
     int sizeDiff = valid - other.valid;
-    if (sizeDiff != 0)
+    if (sizeDiff != 0) {
       return sizeDiff;
+    }
     for (int i = 0; i < valid; i++) {
-      if (other.contains(bytesRefWritables[i]))
+      if (other.contains(bytesRefWritables[i])) {
         continue;
-      else
+      } else {
         return 1;
+      }
     }
     return 0;
   }
@@ -155,11 +162,13 @@
    *           if the specified element is null
    */
   public boolean contains(BytesRefWritable bytesRefWritable) {
-    if (bytesRefWritable == null)
+    if (bytesRefWritable == null) {
       throw new IllegalArgumentException("Argument can not be null.");
+    }
     for (int i = 0; i < valid; i++) {
-      if (bytesRefWritables[i].equals(bytesRefWritable))
+      if (bytesRefWritables[i].equals(bytesRefWritable)) {
         return true;
+      }
     }
     return false;
   }
@@ -167,9 +176,11 @@
   /**
    * {@inheritDoc}
    */
+  @Override
   public boolean equals(Object o) {
-    if (o == null || !(o instanceof BytesRefArrayWritable))
+    if (o == null || !(o instanceof BytesRefArrayWritable)) {
       return false;
+    }
     return compareTo((BytesRefArrayWritable) o) == 0;
   }
 



Mime
View raw message