hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From ga...@apache.org
Subject svn commit: r1673437 [8/8] - in /hive/branches/hbase-metastore: ./ beeline/src/java/org/apache/hive/beeline/ bin/ cli/src/java/org/apache/hadoop/hive/cli/ common/src/java/org/apache/hadoop/hive/common/type/ common/src/java/org/apache/hadoop/hive/conf/ ...
Date Tue, 14 Apr 2015 14:47:33 GMT
Modified: hive/branches/hbase-metastore/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyBinary.java
URL: http://svn.apache.org/viewvc/hive/branches/hbase-metastore/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyBinary.java?rev=1673437&r1=1673436&r2=1673437&view=diff
==============================================================================
--- hive/branches/hbase-metastore/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyBinary.java (original)
+++ hive/branches/hbase-metastore/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyBinary.java Tue Apr 14 14:47:30 2015
@@ -54,7 +54,7 @@ public class LazyBinary extends LazyPrim
   }
 
   // todo this should be configured in serde
-  private byte[] decodeIfNeeded(byte[] recv) {
+  public static byte[] decodeIfNeeded(byte[] recv) {
     boolean arrayByteBase64 = Base64.isArrayByteBase64(recv);
     if (DEBUG_LOG_ENABLED && arrayByteBase64) {
       LOG.debug("Data only contains Base64 alphabets only so try to decode the data.");

Modified: hive/branches/hbase-metastore/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyUtils.java
URL: http://svn.apache.org/viewvc/hive/branches/hbase-metastore/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyUtils.java?rev=1673437&r1=1673436&r2=1673437&view=diff
==============================================================================
--- hive/branches/hbase-metastore/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyUtils.java (original)
+++ hive/branches/hbase-metastore/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyUtils.java Tue Apr 14 14:47:30 2015
@@ -161,7 +161,7 @@ public final class LazyUtils {
         }
       }
     } else {
-      out.write(bytes, 0, len);
+      out.write(bytes, start, len);
     }
   }
 

Modified: hive/branches/hbase-metastore/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryStruct.java
URL: http://svn.apache.org/viewvc/hive/branches/hbase-metastore/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryStruct.java?rev=1673437&r1=1673436&r2=1673437&view=diff
==============================================================================
--- hive/branches/hbase-metastore/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryStruct.java (original)
+++ hive/branches/hbase-metastore/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryStruct.java Tue Apr 14 14:47:30 2015
@@ -166,15 +166,18 @@ public class LazyBinaryStruct extends La
     // Extra bytes at the end?
     if (!extraFieldWarned && lastFieldByteEnd < structByteEnd) {
       extraFieldWarned = true;
-      LOG.warn("Extra bytes detected at the end of the row! Ignoring similar "
-          + "problems.");
+      LOG.warn("Extra bytes detected at the end of the row! " +
+           "Last field end " + lastFieldByteEnd + " and serialize buffer end " + structByteEnd + ". " +
+               "Ignoring similar problems.");
     }
 
     // Missing fields?
     if (!missingFieldWarned && lastFieldByteEnd > structByteEnd) {
       missingFieldWarned = true;
-      LOG.info("Missing fields! Expected " + fields.length + " fields but "
-          + "only got " + fieldId + "! Ignoring similar problems.");
+      LOG.info("Missing fields! Expected " + fields.length + " fields but " +
+              "only got " + fieldId + "! " +
+          "Last field end " + lastFieldByteEnd + " and serialize buffer end " + structByteEnd + ". " +
+          "Ignoring similar problems.");
     }
 
     Arrays.fill(fieldInited, false);

Modified: hive/branches/hbase-metastore/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableHiveDecimalObjectInspector.java
URL: http://svn.apache.org/viewvc/hive/branches/hbase-metastore/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableHiveDecimalObjectInspector.java?rev=1673437&r1=1673436&r2=1673437&view=diff
==============================================================================
--- hive/branches/hbase-metastore/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableHiveDecimalObjectInspector.java (original)
+++ hive/branches/hbase-metastore/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableHiveDecimalObjectInspector.java Tue Apr 14 14:47:30 2015
@@ -29,7 +29,7 @@ implements SettableHiveDecimalObjectInsp
   public WritableHiveDecimalObjectInspector() {
   }
 
-  protected WritableHiveDecimalObjectInspector(DecimalTypeInfo typeInfo) {
+  public WritableHiveDecimalObjectInspector(DecimalTypeInfo typeInfo) {
     super(typeInfo);
   }
 

Modified: hive/branches/hbase-metastore/serde/src/test/org/apache/hadoop/hive/serde2/TestStatsSerde.java
URL: http://svn.apache.org/viewvc/hive/branches/hbase-metastore/serde/src/test/org/apache/hadoop/hive/serde2/TestStatsSerde.java?rev=1673437&r1=1673436&r2=1673437&view=diff
==============================================================================
--- hive/branches/hbase-metastore/serde/src/test/org/apache/hadoop/hive/serde2/TestStatsSerde.java (original)
+++ hive/branches/hbase-metastore/serde/src/test/org/apache/hadoop/hive/serde2/TestStatsSerde.java Tue Apr 14 14:47:30 2015
@@ -30,7 +30,9 @@ import org.apache.hadoop.hive.common.typ
 import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.binarysortable.MyTestClass;
 import org.apache.hadoop.hive.serde2.binarysortable.MyTestInnerStruct;
+import org.apache.hadoop.hive.serde2.binarysortable.MyTestPrimitiveClass;
 import org.apache.hadoop.hive.serde2.binarysortable.TestBinarySortableSerDe;
+import org.apache.hadoop.hive.serde2.binarysortable.MyTestPrimitiveClass.ExtraTypeInfo;
 import org.apache.hadoop.hive.serde2.columnar.BytesRefArrayWritable;
 import org.apache.hadoop.hive.serde2.columnar.BytesRefWritable;
 import org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe;
@@ -104,24 +106,11 @@ public class TestStatsSerde extends Test
       int num = 1000;
       Random r = new Random(1234);
       MyTestClass rows[] = new MyTestClass[num];
+
       for (int i = 0; i < num; i++) {
-        int randField = r.nextInt(12);
-        Byte b = randField > 0 ? null : Byte.valueOf((byte) r.nextInt());
-        Short s = randField > 1 ? null : Short.valueOf((short) r.nextInt());
-        Integer n = randField > 2 ? null : Integer.valueOf(r.nextInt());
-        Long l = randField > 3 ? null : Long.valueOf(r.nextLong());
-        Float f = randField > 4 ? null : Float.valueOf(r.nextFloat());
-        Double d = randField > 5 ? null : Double.valueOf(r.nextDouble());
-        String st = randField > 6 ? null : TestBinarySortableSerDe
-            .getRandString(r);
-	HiveDecimal bd = randField > 7 ? null : TestBinarySortableSerDe.getRandHiveDecimal(r);
-	      Date date = randField > 8 ? null : TestBinarySortableSerDe.getRandDate(r);
-        MyTestInnerStruct is = randField > 9 ? null : new MyTestInnerStruct(r
-            .nextInt(5) - 2, r.nextInt(5) - 2);
-        List<Integer> li = randField > 10 ? null : TestBinarySortableSerDe
-            .getRandIntegerArray(r);
-        byte[] ba = TestBinarySortableSerDe.getRandBA(r, i);
-        MyTestClass t = new MyTestClass(b, s, n, l, f, d, st, bd, date, is, li,ba);
+        MyTestClass t = new MyTestClass();
+        ExtraTypeInfo extraTypeInfo = new ExtraTypeInfo();
+        t.randomFill(r, extraTypeInfo);
         rows[i] = t;
       }
 

Modified: hive/branches/hbase-metastore/serde/src/test/org/apache/hadoop/hive/serde2/binarysortable/MyTestClass.java
URL: http://svn.apache.org/viewvc/hive/branches/hbase-metastore/serde/src/test/org/apache/hadoop/hive/serde2/binarysortable/MyTestClass.java?rev=1673437&r1=1673436&r2=1673437&view=diff
==============================================================================
--- hive/branches/hbase-metastore/serde/src/test/org/apache/hadoop/hive/serde2/binarysortable/MyTestClass.java (original)
+++ hive/branches/hbase-metastore/serde/src/test/org/apache/hadoop/hive/serde2/binarysortable/MyTestClass.java Tue Apr 14 14:47:30 2015
@@ -18,40 +18,82 @@
 package org.apache.hadoop.hive.serde2.binarysortable;
 
 import java.sql.Date;
+import java.sql.Timestamp;
+import java.util.ArrayList;
 import java.util.List;
+import java.util.Random;
 
+import org.apache.hadoop.hive.common.type.HiveChar;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
+import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
+import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
+import org.apache.hadoop.hive.common.type.HiveVarchar;
+import org.apache.hadoop.hive.serde2.binarysortable.MyTestPrimitiveClass.ExtraTypeInfo;
 
 public class MyTestClass {
-    Byte myByte;
-    Short myShort;
-    Integer myInt;
-    Long myLong;
-    Float myFloat;
-    Double myDouble;
-    String myString;
-    HiveDecimal myDecimal;
-    Date myDate;
-    MyTestInnerStruct myStruct;
-    List<Integer> myList;
-    byte[] myBA;
+
+    public Boolean myBool;
+    public Byte myByte;
+    public Short myShort;
+    public Integer myInt;
+    public Long myLong;
+    public Float myFloat;
+    public Double myDouble;
+    public String myString;
+    public HiveChar myHiveChar;
+    public HiveVarchar myHiveVarchar;
+    public byte[] myBinary;
+    public HiveDecimal myDecimal;
+    public Date myDate;
+    public Timestamp myTimestamp;
+    public HiveIntervalYearMonth myIntervalYearMonth;
+    public HiveIntervalDayTime myIntervalDayTime;
+
+    // Add more complex types.
+    public MyTestInnerStruct myStruct;
+    public  List<Integer> myList;
 
     public MyTestClass() {
     }
 
-    public MyTestClass(Byte b, Short s, Integer i, Long l, Float f, Double d,
-		       String st, HiveDecimal bd, Date date, MyTestInnerStruct is, List<Integer> li, byte[] ba) {
-	myByte = b;
-	myShort = s;
-	myInt = i;
-	myLong = l;
-	myFloat = f;
-	myDouble = d;
-	myString = st;
-	myDecimal = bd;
-	myDate = date;
-	myStruct = is;
-	myList = li;
-	myBA = ba;
+    public final static int fieldCount = 18;
+
+    public int randomFill(Random r, ExtraTypeInfo extraTypeInfo) {
+      int randField = r.nextInt(MyTestClass.fieldCount);
+      int field = 0;
+
+      myBool = (randField == field++) ? null : (r.nextInt(1) == 1);
+      myByte = (randField == field++) ? null : Byte.valueOf((byte) r.nextInt());
+      myShort = (randField == field++) ? null : Short.valueOf((short) r.nextInt());
+      myInt = (randField == field++) ? null : Integer.valueOf(r.nextInt());
+      myLong = (randField == field++) ? null : Long.valueOf(r.nextLong());
+      myFloat = (randField == field++) ? null : Float
+          .valueOf(r.nextFloat() * 10 - 5);
+      myDouble = (randField == field++) ? null : Double
+          .valueOf(r.nextDouble() * 10 - 5);
+      myString = (randField == field++) ? null : MyTestPrimitiveClass.getRandString(r);
+      myHiveChar = (randField == field++) ? null : MyTestPrimitiveClass.getRandHiveChar(r, extraTypeInfo);
+      myHiveVarchar = (randField == field++) ? null : MyTestPrimitiveClass.getRandHiveVarchar(r, extraTypeInfo);
+      myBinary = MyTestPrimitiveClass.getRandBinary(r, r.nextInt(1000));
+      myDecimal = (randField == field++) ? null : MyTestPrimitiveClass.getRandHiveDecimal(r, extraTypeInfo);
+      myDate = (randField == field++) ? null : MyTestPrimitiveClass.getRandDate(r);
+      myTimestamp = (randField == field++) ? null : MyTestPrimitiveClass.getRandTimestamp(r);
+      myIntervalYearMonth = (randField == field++) ? null : MyTestPrimitiveClass.getRandIntervalYearMonth(r);
+      myIntervalDayTime = (randField == field++) ? null : MyTestPrimitiveClass.getRandIntervalDayTime(r);
+
+      myStruct = (randField == field++) ? null : new MyTestInnerStruct(
+          r.nextInt(5) - 2, r.nextInt(5) - 2);
+      myList = (randField == field++) ? null : getRandIntegerArray(r);
+      return field;
+    }
+
+    public static List<Integer> getRandIntegerArray(Random r) {
+      int length = r.nextInt(10);
+      ArrayList<Integer> result = new ArrayList<Integer>(length);
+      for (int i = 0; i < length; i++) {
+        result.add(r.nextInt(128));
+      }
+      return result;
     }
+
 }

Modified: hive/branches/hbase-metastore/serde/src/test/org/apache/hadoop/hive/serde2/binarysortable/TestBinarySortableSerDe.java
URL: http://svn.apache.org/viewvc/hive/branches/hbase-metastore/serde/src/test/org/apache/hadoop/hive/serde2/binarysortable/TestBinarySortableSerDe.java?rev=1673437&r1=1673436&r2=1673437&view=diff
==============================================================================
--- hive/branches/hbase-metastore/serde/src/test/org/apache/hadoop/hive/serde2/binarysortable/TestBinarySortableSerDe.java (original)
+++ hive/branches/hbase-metastore/serde/src/test/org/apache/hadoop/hive/serde2/binarysortable/TestBinarySortableSerDe.java Tue Apr 14 14:47:30 2015
@@ -26,11 +26,13 @@ import java.util.Random;
 
 import junit.framework.TestCase;
 
+import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.SerDe;
 import org.apache.hadoop.hive.serde2.SerDeUtils;
+import org.apache.hadoop.hive.serde2.binarysortable.MyTestPrimitiveClass.ExtraTypeInfo;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory.ObjectInspectorOptions;
@@ -64,7 +66,7 @@ public class TestBinarySortableSerDe ext
     return sb.toString();
   }
 
-  private SerDe getSerDe(String fieldNames, String fieldTypes, String order)
+  public static SerDe getSerDe(String fieldNames, String fieldTypes, String order)
       throws Throwable {
     Properties schema = new Properties();
     schema.setProperty(serdeConstants.LIST_COLUMNS, fieldNames);
@@ -124,7 +126,7 @@ public class TestBinarySortableSerDe ext
     }
   }
 
-  private void sort(Object[] structs, ObjectInspector oi) {
+  public static void sort(Object[] structs, ObjectInspector oi) {
     for (int i = 0; i < structs.length; i++) {
       for (int j = i + 1; j < structs.length; j++) {
         if (ObjectInspectorUtils.compare(structs[i], oi, structs[j], oi) > 0) {
@@ -136,66 +138,6 @@ public class TestBinarySortableSerDe ext
     }
   }
 
-  public static HiveDecimal getRandHiveDecimal(Random r) {
-    StringBuilder sb = new StringBuilder();
-    int l1 = 1+r.nextInt(18), l2 = r.nextInt(19);
-
-    if (r.nextBoolean()) {
-      sb.append("-");
-    }
-
-    sb.append(getRandString(r, DECIMAL_CHARS, l1));
-    if (l2 != 0) {
-      sb.append(".");
-      sb.append(getRandString(r, DECIMAL_CHARS, l2));
-    }
-
-    HiveDecimal bd = HiveDecimal.create(sb.toString());
-    return bd;
-  }
-
-  public static Date getRandDate(Random r) {
-    String dateStr = String.format("%d-%02d-%02d",
-        Integer.valueOf(1800 + r.nextInt(500)),  // year
-        Integer.valueOf(1 + r.nextInt(12)),      // month
-        Integer.valueOf(1 + r.nextInt(28)));     // day
-    Date dateVal = Date.valueOf(dateStr);
-    return dateVal;
-  }
-
-  public static String getRandString(Random r) {
-    return getRandString(r, null, r.nextInt(10));
-  }
-
-  public static String getRandString(Random r, String characters, int length) {
-    StringBuilder sb = new StringBuilder();
-    for (int i = 0; i < length; i++) {
-      if (characters == null) {
-        sb.append((char) (r.nextInt(128)));
-      } else {
-        sb.append(characters.charAt(r.nextInt(characters.length())));
-      }
-    }
-    return sb.toString();
-  }
-
-  public static List<Integer> getRandIntegerArray(Random r) {
-    int length = r.nextInt(10);
-    ArrayList<Integer> result = new ArrayList<Integer>(length);
-    for (int i = 0; i < length; i++) {
-      result.add(r.nextInt(128));
-    }
-    return result;
-  }
-
-  public static byte[] getRandBA(Random r, int len){
-    byte[] bytes = new byte[len];
-    for (int j = 0; j < len; j++){
-      bytes[j] = Byte.valueOf((byte) r.nextInt());
-    }
-    return bytes;
-  }
-
   public void testBinarySortableSerDe() throws Throwable {
     try {
 
@@ -206,23 +148,9 @@ public class TestBinarySortableSerDe ext
       MyTestClass rows[] = new MyTestClass[num];
 
       for (int i = 0; i < num; i++) {
-        int randField = r.nextInt(11);
         MyTestClass t = new MyTestClass();
-        t.myByte = randField > 0 ? null : Byte.valueOf((byte) r.nextInt());
-        t.myShort = randField > 1 ? null : Short.valueOf((short) r.nextInt());
-        t.myInt = randField > 2 ? null : Integer.valueOf(r.nextInt());
-        t.myLong = randField > 3 ? null : Long.valueOf(r.nextLong());
-        t.myFloat = randField > 4 ? null : Float
-            .valueOf(r.nextFloat() * 10 - 5);
-        t.myDouble = randField > 5 ? null : Double
-            .valueOf(r.nextDouble() * 10 - 5);
-        t.myString = randField > 6 ? null : getRandString(r);
-        t.myDecimal = randField > 7 ? null : getRandHiveDecimal(r);
-        t.myDate = randField > 8 ? null : getRandDate(r);
-        t.myStruct = randField > 9 ? null : new MyTestInnerStruct(
-            r.nextInt(5) - 2, r.nextInt(5) - 2);
-        t.myList = randField > 10 ? null : getRandIntegerArray(r);
-        t.myBA = getRandBA(r, i);
+        ExtraTypeInfo extraTypeInfo = new ExtraTypeInfo();
+        t.randomFill(r, extraTypeInfo);
         rows[i] = t;
       }
 
@@ -234,10 +162,13 @@ public class TestBinarySortableSerDe ext
       String fieldNames = ObjectInspectorUtils.getFieldNames(rowOI);
       String fieldTypes = ObjectInspectorUtils.getFieldTypes(rowOI);
 
+      String order;
+      order = StringUtils.leftPad("", MyTestClass.fieldCount, '+');
       testBinarySortableSerDe(rows, rowOI, getSerDe(fieldNames, fieldTypes,
-          "++++++++++++"), true);
+          order), true);
+      order = StringUtils.leftPad("", MyTestClass.fieldCount, '-');
       testBinarySortableSerDe(rows, rowOI, getSerDe(fieldNames, fieldTypes,
-          "------------"), false);
+          order), false);
 
       System.out.println("Test testTBinarySortableProtocol passed!");
     } catch (Throwable e) {

Modified: hive/branches/hbase-metastore/serde/src/test/org/apache/hadoop/hive/serde2/lazybinary/MyTestClassBigger.java
URL: http://svn.apache.org/viewvc/hive/branches/hbase-metastore/serde/src/test/org/apache/hadoop/hive/serde2/lazybinary/MyTestClassBigger.java?rev=1673437&r1=1673436&r2=1673437&view=diff
==============================================================================
--- hive/branches/hbase-metastore/serde/src/test/org/apache/hadoop/hive/serde2/lazybinary/MyTestClassBigger.java (original)
+++ hive/branches/hbase-metastore/serde/src/test/org/apache/hadoop/hive/serde2/lazybinary/MyTestClassBigger.java Tue Apr 14 14:47:30 2015
@@ -18,49 +18,119 @@
 package org.apache.hadoop.hive.serde2.lazybinary;
 
 import java.sql.Date;
+import java.sql.Timestamp;
+import java.util.ArrayList;
+import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
+import java.util.Random;
 
+import org.apache.hadoop.hive.common.type.HiveChar;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
+import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
+import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
+import org.apache.hadoop.hive.common.type.HiveVarchar;
+import org.apache.hadoop.hive.serde2.binarysortable.MyTestClass;
 import org.apache.hadoop.hive.serde2.binarysortable.MyTestInnerStruct;
+import org.apache.hadoop.hive.serde2.binarysortable.MyTestPrimitiveClass;
+import org.apache.hadoop.hive.serde2.binarysortable.TestBinarySortableSerDe;
+import org.apache.hadoop.hive.serde2.binarysortable.MyTestPrimitiveClass.ExtraTypeInfo;
 
 /**
  * MyTestClassBigger.
  *
  */
 public class MyTestClassBigger {
-    Byte myByte;
-    Short myShort;
-    Integer myInt;
-    Long myLong;
-    Float myFloat;
-    Double myDouble;
-    String myString;
-    HiveDecimal myDecimal;
-    Date myDate;
-    MyTestInnerStruct myStruct;
-    List<Integer> myList;
-    byte[] myBA;
+
+    // The primitives.
+    public Boolean myBool;
+    public Byte myByte;
+    public Short myShort;
+    public Integer myInt;
+    public Long myLong;
+    public Float myFloat;
+    public Double myDouble;
+    public String myString;
+    public HiveChar myHiveChar;
+    public HiveVarchar myHiveVarchar;
+    public byte[] myBinary;
+    public HiveDecimal myDecimal;
+    public Date myDate;
+    public Timestamp myTimestamp;
+    public HiveIntervalYearMonth myIntervalYearMonth;
+    public HiveIntervalDayTime myIntervalDayTime;
+
+
+    // Add more complex types.
+    public MyTestInnerStruct myStruct;
+    public  List<Integer> myList;
+
+    // Bigger addition.
     Map<String, List<MyTestInnerStruct>> myMap;
 
+    public final static int mapPos = 18;
+
     public MyTestClassBigger() {
     }
 
-    public MyTestClassBigger(Byte b, Short s, Integer i, Long l, Float f,
-			     Double d, String st, HiveDecimal bd, Date date, MyTestInnerStruct is, List<Integer> li,
-			     byte[] ba, Map<String, List<MyTestInnerStruct>> mp) {
-	myByte = b;
-	myShort = s;
-	myInt = i;
-	myLong = l;
-	myFloat = f;
-	myDouble = d;
-	myString = st;
-	myDecimal = bd;
-	myDate = date;
-	myStruct = is;
-	myList = li;
-	myBA = ba;
-	myMap = mp;
+    public final static int biggerCount = 19;
+
+    public int randomFill(Random r, ExtraTypeInfo extraTypeInfo) {
+      int randField = r.nextInt(biggerCount);
+      int field = 0;
+      myBool = (randField == field++) ? null : (r.nextInt(1) == 1);
+      myByte = (randField == field++) ? null : Byte.valueOf((byte) r.nextInt());
+      myShort = (randField == field++) ? null : Short.valueOf((short) r.nextInt());
+      myInt = (randField == field++) ? null : Integer.valueOf(r.nextInt());
+      myLong = (randField == field++) ? null : Long.valueOf(r.nextLong());
+      myFloat = (randField == field++) ? null : Float
+          .valueOf(r.nextFloat() * 10 - 5);
+      myDouble = (randField == field++) ? null : Double
+          .valueOf(r.nextDouble() * 10 - 5);
+      myString = (randField == field++) ? null : MyTestPrimitiveClass.getRandString(r);
+      myHiveChar = (randField == field++) ? null : MyTestPrimitiveClass.getRandHiveChar(r, extraTypeInfo);
+      myHiveVarchar = (randField == field++) ? null : MyTestPrimitiveClass.getRandHiveVarchar(r, extraTypeInfo);
+      myBinary = MyTestPrimitiveClass.getRandBinary(r, r.nextInt(1000));
+      myDecimal = (randField == field++) ? null : MyTestPrimitiveClass.getRandHiveDecimal(r, extraTypeInfo);
+      myDate = (randField == field++) ? null : MyTestPrimitiveClass.getRandDate(r);
+      myTimestamp = (randField == field++) ? null : MyTestPrimitiveClass.getRandTimestamp(r);
+      myIntervalYearMonth = (randField == field++) ? null : MyTestPrimitiveClass.getRandIntervalYearMonth(r);
+      myIntervalDayTime = (randField == field++) ? null : MyTestPrimitiveClass.getRandIntervalDayTime(r);
+
+      myStruct = (randField == field++) ? null : new MyTestInnerStruct(
+          r.nextInt(5) - 2, r.nextInt(5) - 2);
+      myList = (randField == field++) ? null : MyTestClass.getRandIntegerArray(r);
+
+      Map<String, List<MyTestInnerStruct>> mp = new HashMap<String, List<MyTestInnerStruct>>();
+      String key = MyTestPrimitiveClass.getRandString(r);
+      List<MyTestInnerStruct> value = randField > 9 ? null
+          : getRandStructArray(r);
+      mp.put(key, value);
+      String key1 = MyTestPrimitiveClass.getRandString(r);
+      mp.put(key1, null);
+      String key2 = MyTestPrimitiveClass.getRandString(r);
+      List<MyTestInnerStruct> value2 = getRandStructArray(r);
+      mp.put(key2, value2);
+      myMap = mp;
+      return field;
     }
+
+    /**
+     * Generate a random struct array.
+     *
+     * @param r
+     *          random number generator
+     * @return an struct array
+     */
+    static List<MyTestInnerStruct> getRandStructArray(Random r) {
+      int length = r.nextInt(10);
+      ArrayList<MyTestInnerStruct> result = new ArrayList<MyTestInnerStruct>(
+          length);
+      for (int i = 0; i < length; i++) {
+        MyTestInnerStruct ti = new MyTestInnerStruct(r.nextInt(), r.nextInt());
+        result.add(ti);
+      }
+      return result;
+    }
+
 }

Modified: hive/branches/hbase-metastore/serde/src/test/org/apache/hadoop/hive/serde2/lazybinary/MyTestClassSmaller.java
URL: http://svn.apache.org/viewvc/hive/branches/hbase-metastore/serde/src/test/org/apache/hadoop/hive/serde2/lazybinary/MyTestClassSmaller.java?rev=1673437&r1=1673436&r2=1673437&view=diff
==============================================================================
--- hive/branches/hbase-metastore/serde/src/test/org/apache/hadoop/hive/serde2/lazybinary/MyTestClassSmaller.java (original)
+++ hive/branches/hbase-metastore/serde/src/test/org/apache/hadoop/hive/serde2/lazybinary/MyTestClassSmaller.java Tue Apr 14 14:47:30 2015
@@ -18,36 +18,68 @@
 package org.apache.hadoop.hive.serde2.lazybinary;
 
 import java.sql.Date;
+import java.sql.Timestamp;
+import java.util.Random;
 
+import org.apache.hadoop.hive.common.type.HiveChar;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
+import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
+import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
+import org.apache.hadoop.hive.common.type.HiveVarchar;
+import org.apache.hadoop.hive.serde2.binarysortable.MyTestClass;
 import org.apache.hadoop.hive.serde2.binarysortable.MyTestInnerStruct;
+import org.apache.hadoop.hive.serde2.binarysortable.MyTestPrimitiveClass;
+import org.apache.hadoop.hive.serde2.binarysortable.MyTestPrimitiveClass.ExtraTypeInfo;
 
 public class MyTestClassSmaller {
-    Byte myByte;
-    Short myShort;
-    Integer myInt;
-    Long myLong;
-    Float myFloat;
-    Double myDouble;
-    String myString;
-    HiveDecimal myDecimal;
-    Date myDate;
+
+    public Boolean myBool;
+    public Byte myByte;
+    public Short myShort;
+    public Integer myInt;
+    public Long myLong;
+    public Float myFloat;
+    public Double myDouble;
+    public String myString;
+    public HiveChar myHiveChar;
+    public HiveVarchar myHiveVarchar;
+    public byte[] myBinary;
+    public HiveDecimal myDecimal;
+    public Date myDate;
+    public Timestamp myTimestamp;
+    public HiveIntervalYearMonth myIntervalYearMonth;
+    public HiveIntervalDayTime myIntervalDayTime;
+
     MyTestInnerStruct myStruct;
 
-    public MyTestClassSmaller() {
-    }
+    public final static int smallerCount = 17;
 
-    public MyTestClassSmaller(Byte b, Short s, Integer i, Long l, Float f,
-			      Double d, String st, HiveDecimal bd, Date date, MyTestInnerStruct is) {
-	myByte = b;
-	myShort = s;
-	myInt = i;
-	myLong = l;
-	myFloat = f;
-	myDouble = d;
-	myString = st;
-	myDecimal = bd;
-	myDate = date;
-	myStruct = is;
+    public int randomFill(Random r, ExtraTypeInfo extraTypeInfo) {
+      int randField = r.nextInt(smallerCount);
+      int field = 0;
+
+      myBool = (randField == field++) ? null : (r.nextInt(1) == 1);
+      myByte = (randField == field++) ? null : Byte.valueOf((byte) r.nextInt());
+      myShort = (randField == field++) ? null : Short.valueOf((short) r.nextInt());
+      myInt = (randField == field++) ? null : Integer.valueOf(r.nextInt());
+      myLong = (randField == field++) ? null : Long.valueOf(r.nextLong());
+      myFloat = (randField == field++) ? null : Float
+          .valueOf(r.nextFloat() * 10 - 5);
+      myDouble = (randField == field++) ? null : Double
+          .valueOf(r.nextDouble() * 10 - 5);
+      myString = (randField == field++) ? null : MyTestPrimitiveClass.getRandString(r);
+      myHiveChar = (randField == field++) ? null : MyTestPrimitiveClass.getRandHiveChar(r, extraTypeInfo);
+      myHiveVarchar = (randField == field++) ? null : MyTestPrimitiveClass.getRandHiveVarchar(r, extraTypeInfo);
+      myBinary = MyTestPrimitiveClass.getRandBinary(r, r.nextInt(1000));
+      myDecimal = (randField == field++) ? null : MyTestPrimitiveClass.getRandHiveDecimal(r, extraTypeInfo);
+      myDate = (randField == field++) ? null : MyTestPrimitiveClass.getRandDate(r);
+      myTimestamp = (randField == field++) ? null : MyTestPrimitiveClass.getRandTimestamp(r);
+      myIntervalYearMonth = (randField == field++) ? null : MyTestPrimitiveClass.getRandIntervalYearMonth(r);
+      myIntervalDayTime = (randField == field++) ? null : MyTestPrimitiveClass.getRandIntervalDayTime(r);
+
+      myStruct = (randField == field++) ? null : new MyTestInnerStruct(
+          r.nextInt(5) - 2, r.nextInt(5) - 2);
+      return field;
     }
+
 }

Modified: hive/branches/hbase-metastore/serde/src/test/org/apache/hadoop/hive/serde2/lazybinary/TestLazyBinarySerDe.java
URL: http://svn.apache.org/viewvc/hive/branches/hbase-metastore/serde/src/test/org/apache/hadoop/hive/serde2/lazybinary/TestLazyBinarySerDe.java?rev=1673437&r1=1673436&r2=1673437&view=diff
==============================================================================
--- hive/branches/hbase-metastore/serde/src/test/org/apache/hadoop/hive/serde2/lazybinary/TestLazyBinarySerDe.java (original)
+++ hive/branches/hbase-metastore/serde/src/test/org/apache/hadoop/hive/serde2/lazybinary/TestLazyBinarySerDe.java Tue Apr 14 14:47:30 2015
@@ -36,7 +36,9 @@ import org.apache.hadoop.hive.serde2.Ser
 import org.apache.hadoop.hive.serde2.SerDeUtils;
 import org.apache.hadoop.hive.serde2.binarysortable.MyTestClass;
 import org.apache.hadoop.hive.serde2.binarysortable.MyTestInnerStruct;
+import org.apache.hadoop.hive.serde2.binarysortable.MyTestPrimitiveClass;
 import org.apache.hadoop.hive.serde2.binarysortable.TestBinarySortableSerDe;
+import org.apache.hadoop.hive.serde2.binarysortable.MyTestPrimitiveClass.ExtraTypeInfo;
 import org.apache.hadoop.hive.serde2.lazy.ByteArrayRef;
 import org.apache.hadoop.hive.serde2.lazy.LazyBinary;
 import org.apache.hadoop.hive.serde2.lazy.LazyFactory;
@@ -91,7 +93,7 @@ public class TestLazyBinarySerDe extends
    * @return the initialized LazyBinarySerDe
    * @throws Throwable
    */
-  private SerDe getSerDe(String fieldNames, String fieldTypes) throws Throwable {
+  protected static SerDe getSerDe(String fieldNames, String fieldTypes) throws Throwable {
     Properties schema = new Properties();
     schema.setProperty(serdeConstants.LIST_COLUMNS, fieldNames);
     schema.setProperty(serdeConstants.LIST_COLUMN_TYPES, fieldTypes);
@@ -194,46 +196,20 @@ public class TestLazyBinarySerDe extends
 
     int num = 100;
     for (int itest = 0; itest < num; itest++) {
-      int randField = r.nextInt(11);
-      Byte b = randField > 0 ? null : Byte.valueOf((byte) r.nextInt());
-      Short s = randField > 1 ? null : Short.valueOf((short) r.nextInt());
-      Integer n = randField > 2 ? null : Integer.valueOf(r.nextInt());
-      Long l = randField > 3 ? null : Long.valueOf(r.nextLong());
-      Float f = randField > 4 ? null : Float.valueOf(r.nextFloat());
-      Double d = randField > 5 ? null : Double.valueOf(r.nextDouble());
-      String st = randField > 6 ? null : TestBinarySortableSerDe
-          .getRandString(r);
-      HiveDecimal bd = randField > 7 ? null : TestBinarySortableSerDe.getRandHiveDecimal(r);
-      Date date = randField > 8 ? null : TestBinarySortableSerDe.getRandDate(r);
-      MyTestInnerStruct is = randField > 9 ? null : new MyTestInnerStruct(r
-          .nextInt(5) - 2, r.nextInt(5) - 2);
-      List<Integer> li = randField > 10 ? null : TestBinarySortableSerDe
-          .getRandIntegerArray(r);
-      byte[] ba  = TestBinarySortableSerDe.getRandBA(r, itest);
-      Map<String, List<MyTestInnerStruct>> mp = new HashMap<String, List<MyTestInnerStruct>>();
-      String key = TestBinarySortableSerDe.getRandString(r);
-      List<MyTestInnerStruct> value = randField > 9 ? null
-          : getRandStructArray(r);
-      mp.put(key, value);
-      String key1 = TestBinarySortableSerDe.getRandString(r);
-      mp.put(key1, null);
-      String key2 = TestBinarySortableSerDe.getRandString(r);
-      List<MyTestInnerStruct> value2 = getRandStructArray(r);
-      mp.put(key2, value2);
-
-      MyTestClassBigger input = new MyTestClassBigger(b, s, n, l, f, d, st, bd, date, is,
-          li, ba, mp);
-      BytesWritable bw = (BytesWritable) serde1.serialize(input, rowOI1);
+      MyTestClassBigger t = new MyTestClassBigger();
+      ExtraTypeInfo extraTypeInfo = new ExtraTypeInfo();
+      t.randomFill(r, extraTypeInfo);
+      BytesWritable bw = (BytesWritable) serde1.serialize(t, rowOI1);
       Object output = serde2.deserialize(bw);
 
-      if (0 != compareDiffSizedStructs(input, rowOI1, output, serdeOI2)) {
+      if (0 != compareDiffSizedStructs(t, rowOI1, output, serdeOI2)) {
         System.out.println("structs      = "
-            + SerDeUtils.getJSONString(input, rowOI1));
+            + SerDeUtils.getJSONString(t, rowOI1));
         System.out.println("deserialized = "
             + SerDeUtils.getJSONString(output, serdeOI2));
         System.out.println("serialized   = "
             + TestBinarySortableSerDe.hexString(bw));
-        assertEquals(input, output);
+        assertEquals(t, output);
       }
     }
   }
@@ -263,34 +239,20 @@ public class TestLazyBinarySerDe extends
 
     int num = 100;
     for (int itest = 0; itest < num; itest++) {
-      int randField = r.nextInt(12);
-      Byte b = randField > 0 ? null : Byte.valueOf((byte) r.nextInt());
-      Short s = randField > 1 ? null : Short.valueOf((short) r.nextInt());
-      Integer n = randField > 2 ? null : Integer.valueOf(r.nextInt());
-      Long l = randField > 3 ? null : Long.valueOf(r.nextLong());
-      Float f = randField > 4 ? null : Float.valueOf(r.nextFloat());
-      Double d = randField > 5 ? null : Double.valueOf(r.nextDouble());
-      String st = randField > 6 ? null : TestBinarySortableSerDe
-          .getRandString(r);
-      HiveDecimal bd = randField > 7 ? null : TestBinarySortableSerDe.getRandHiveDecimal(r);
-      Date date = randField > 8 ? null : TestBinarySortableSerDe.getRandDate(r);
-      MyTestInnerStruct is = randField > 9 ? null : new MyTestInnerStruct(r
-          .nextInt(5) - 2, r.nextInt(5) - 2);
-      List<Integer> li = randField > 10 ? null : TestBinarySortableSerDe
-          .getRandIntegerArray(r);
-      byte[] ba = TestBinarySortableSerDe.getRandBA(r, itest);
-      MyTestClass input = new MyTestClass(b, s, n, l, f, d, st, bd, date, is, li, ba);
-      BytesWritable bw = (BytesWritable) serde1.serialize(input, rowOI1);
+      MyTestClass t = new MyTestClass();
+      ExtraTypeInfo extraTypeInfo = new ExtraTypeInfo();
+      t.randomFill(r, extraTypeInfo);
+      BytesWritable bw = (BytesWritable) serde1.serialize(t, rowOI1);
       Object output = serde2.deserialize(bw);
 
-      if (0 != compareDiffSizedStructs(input, rowOI1, output, serdeOI2)) {
+      if (0 != compareDiffSizedStructs(t, rowOI1, output, serdeOI2)) {
         System.out.println("structs      = "
-            + SerDeUtils.getJSONString(input, rowOI1));
+            + SerDeUtils.getJSONString(t, rowOI1));
         System.out.println("deserialized = "
             + SerDeUtils.getJSONString(output, serdeOI2));
         System.out.println("serialized   = "
             + TestBinarySortableSerDe.hexString(bw));
-        assertEquals(input, output);
+        assertEquals(t, output);
       }
     }
   }
@@ -320,34 +282,21 @@ public class TestLazyBinarySerDe extends
 
     int num = 100;
     for (int itest = 0; itest < num; itest++) {
-      int randField = r.nextInt(12);
-      Byte b = randField > 0 ? null : Byte.valueOf((byte) r.nextInt());
-      Short s = randField > 1 ? null : Short.valueOf((short) r.nextInt());
-      Integer n = randField > 2 ? null : Integer.valueOf(r.nextInt());
-      Long l = randField > 3 ? null : Long.valueOf(r.nextLong());
-      Float f = randField > 4 ? null : Float.valueOf(r.nextFloat());
-      Double d = randField > 5 ? null : Double.valueOf(r.nextDouble());
-      String st = randField > 6 ? null : TestBinarySortableSerDe
-          .getRandString(r);
-      HiveDecimal bd = randField > 7 ? null : TestBinarySortableSerDe.getRandHiveDecimal(r);
-      Date date = randField > 8 ? null : TestBinarySortableSerDe.getRandDate(r);
-      MyTestInnerStruct is = randField > 9 ? null : new MyTestInnerStruct(r
-          .nextInt(5) - 2, r.nextInt(5) - 2);
-      List<Integer> li = randField > 10 ? null : TestBinarySortableSerDe
-          .getRandIntegerArray(r);
-      byte[] ba = TestBinarySortableSerDe.getRandBA(r, itest);
-      MyTestClass input = new MyTestClass(b, s, n, l, f, d, st, bd, date, is, li,ba);
-      BytesWritable bw = (BytesWritable) serde1.serialize(input, rowOI1);
+      MyTestClass t = new MyTestClass();
+      ExtraTypeInfo extraTypeInfo = new ExtraTypeInfo();
+      t.randomFill(r, extraTypeInfo);
+
+      BytesWritable bw = (BytesWritable) serde1.serialize(t, rowOI1);
       Object output = serde2.deserialize(bw);
 
-      if (0 != compareDiffSizedStructs(input, rowOI1, output, serdeOI2)) {
+      if (0 != compareDiffSizedStructs(t, rowOI1, output, serdeOI2)) {
         System.out.println("structs      = "
-            + SerDeUtils.getJSONString(input, rowOI1));
+            + SerDeUtils.getJSONString(t, rowOI1));
         System.out.println("deserialized = "
             + SerDeUtils.getJSONString(output, serdeOI2));
         System.out.println("serialized   = "
             + TestBinarySortableSerDe.hexString(bw));
-        assertEquals(input, output);
+        assertEquals(t, output);
       }
     }
   }
@@ -377,33 +326,20 @@ public class TestLazyBinarySerDe extends
 
     int num = 100;
     for (int itest = 0; itest < num; itest++) {
-      int randField = r.nextInt(9);
-      Byte b = randField > 0 ? null : Byte.valueOf((byte) r.nextInt());
-      Short s = randField > 1 ? null : Short.valueOf((short) r.nextInt());
-      Integer n = randField > 2 ? null : Integer.valueOf(r.nextInt());
-      Long l = randField > 3 ? null : Long.valueOf(r.nextLong());
-      Float f = randField > 4 ? null : Float.valueOf(r.nextFloat());
-      Double d = randField > 5 ? null : Double.valueOf(r.nextDouble());
-      String st = randField > 6 ? null : TestBinarySortableSerDe
-          .getRandString(r);
-      HiveDecimal bd = randField > 7 ? null : TestBinarySortableSerDe.getRandHiveDecimal(r);
-      Date date = randField > 7 ? null : TestBinarySortableSerDe.getRandDate(r);
-      MyTestInnerStruct is = randField > 7 ? null : new MyTestInnerStruct(r
-          .nextInt(5) - 2, r.nextInt(5) - 2);
-
-      MyTestClassSmaller input = new MyTestClassSmaller(b, s, n, l, f, d, st, bd, date,
-          is);
-      BytesWritable bw = (BytesWritable) serde1.serialize(input, rowOI1);
+      MyTestClassSmaller t = new MyTestClassSmaller();
+      ExtraTypeInfo extraTypeInfo = new ExtraTypeInfo();
+      t.randomFill(r, extraTypeInfo);
+      BytesWritable bw = (BytesWritable) serde1.serialize(t, rowOI1);
       Object output = serde2.deserialize(bw);
 
-      if (0 != compareDiffSizedStructs(input, rowOI1, output, serdeOI2)) {
+      if (0 != compareDiffSizedStructs(t, rowOI1, output, serdeOI2)) {
         System.out.println("structs      = "
-            + SerDeUtils.getJSONString(input, rowOI1));
+            + SerDeUtils.getJSONString(t, rowOI1));
         System.out.println("deserialized = "
             + SerDeUtils.getJSONString(output, serdeOI2));
         System.out.println("serialized   = "
             + TestBinarySortableSerDe.hexString(bw));
-        assertEquals(input, output);
+        assertEquals(t, output);
       }
     }
   }
@@ -421,13 +357,13 @@ public class TestLazyBinarySerDe extends
     StructObjectInspector soi1 = (StructObjectInspector) serdeOI;
     List<? extends StructField> fields1 = soi1.getAllStructFieldRefs();
     LazyBinaryMapObjectInspector lazympoi = (LazyBinaryMapObjectInspector) fields1
-        .get(12).getFieldObjectInspector();
+        .get(MyTestClassBigger.mapPos).getFieldObjectInspector();
     ObjectInspector lazympkeyoi = lazympoi.getMapKeyObjectInspector();
     ObjectInspector lazympvalueoi = lazympoi.getMapValueObjectInspector();
 
     StructObjectInspector soi2 = rowOI;
     List<? extends StructField> fields2 = soi2.getAllStructFieldRefs();
-    MapObjectInspector inputmpoi = (MapObjectInspector) fields2.get(12)
+    MapObjectInspector inputmpoi = (MapObjectInspector) fields2.get(MyTestClassBigger.mapPos)
         .getFieldObjectInspector();
     ObjectInspector inputmpkeyoi = inputmpoi.getMapKeyObjectInspector();
     ObjectInspector inputmpvalueoi = inputmpoi.getMapValueObjectInspector();
@@ -439,18 +375,19 @@ public class TestLazyBinarySerDe extends
 
       int randFields = r.nextInt(10);
       for (int i = 0; i < randFields; i++) {
-        String key = TestBinarySortableSerDe.getRandString(r);
+        String key = MyTestPrimitiveClass.getRandString(r);
         int randField = r.nextInt(10);
         List<MyTestInnerStruct> value = randField > 4 ? null
             : getRandStructArray(r);
         mp.put(key, value);
+
       }
 
-      MyTestClassBigger input = new MyTestClassBigger(null, null, null, null,
-						      null, null, null, null, null, null, null, null, mp);
-      BytesWritable bw = (BytesWritable) serde.serialize(input, rowOI);
+      MyTestClassBigger t = new MyTestClassBigger();
+      t.myMap = mp;
+      BytesWritable bw = (BytesWritable) serde.serialize(t, rowOI);
       Object output = serde.deserialize(bw);
-      Object lazyobj = soi1.getStructFieldData(output, fields1.get(12));
+      Object lazyobj = soi1.getStructFieldData(output, fields1.get(MyTestClassBigger.mapPos));
       Map<?, ?> outputmp = lazympoi.getMap(lazyobj);
 
       if (outputmp.size() != mp.size()) {
@@ -497,23 +434,9 @@ public class TestLazyBinarySerDe extends
       Random r = new Random(1234);
       MyTestClass rows[] = new MyTestClass[num];
       for (int i = 0; i < num; i++) {
-        int randField = r.nextInt(12);
-        Byte b = randField > 0 ? null : Byte.valueOf((byte) r.nextInt());
-        Short s = randField > 1 ? null : Short.valueOf((short) r.nextInt());
-        Integer n = randField > 2 ? null : Integer.valueOf(r.nextInt());
-        Long l = randField > 3 ? null : Long.valueOf(r.nextLong());
-        Float f = randField > 4 ? null : Float.valueOf(r.nextFloat());
-        Double d = randField > 5 ? null : Double.valueOf(r.nextDouble());
-        String st = randField > 6 ? null : TestBinarySortableSerDe
-            .getRandString(r);
-        HiveDecimal bd = randField > 7 ? null : TestBinarySortableSerDe.getRandHiveDecimal(r);
-        Date date = randField > 8 ? null : TestBinarySortableSerDe.getRandDate(r);
-        MyTestInnerStruct is = randField > 9 ? null : new MyTestInnerStruct(r
-            .nextInt(5) - 2, r.nextInt(5) - 2);
-        List<Integer> li = randField > 10 ? null : TestBinarySortableSerDe
-            .getRandIntegerArray(r);
-        byte[] ba = TestBinarySortableSerDe.getRandBA(r, i);
-        MyTestClass t = new MyTestClass(b, s, n, l, f, d, st, bd, date, is, li, ba);
+        MyTestClass t = new MyTestClass();
+        ExtraTypeInfo extraTypeInfo = new ExtraTypeInfo();
+        t.randomFill(r, extraTypeInfo);
         rows[i] = t;
       }
 

Modified: hive/branches/hbase-metastore/service/src/java/org/apache/hive/service/cli/CLIServiceUtils.java
URL: http://svn.apache.org/viewvc/hive/branches/hbase-metastore/service/src/java/org/apache/hive/service/cli/CLIServiceUtils.java?rev=1673437&r1=1673436&r2=1673437&view=diff
==============================================================================
--- hive/branches/hbase-metastore/service/src/java/org/apache/hive/service/cli/CLIServiceUtils.java (original)
+++ hive/branches/hbase-metastore/service/src/java/org/apache/hive/service/cli/CLIServiceUtils.java Tue Apr 14 14:47:30 2015
@@ -18,6 +18,9 @@
 
 package org.apache.hive.service.cli;
 
+import org.apache.log4j.Layout;
+import org.apache.log4j.PatternLayout;
+
 /**
  * CLIServiceUtils.
  *
@@ -26,6 +29,10 @@ public class CLIServiceUtils {
 
 
   private static final char SEARCH_STRING_ESCAPE = '\\';
+  public static final Layout verboseLayout = new PatternLayout(
+    "%d{yy/MM/dd HH:mm:ss} %p %c{2}: %m%n");
+  public static final Layout nonVerboseLayout = new PatternLayout(
+    "%-5p : %m%n");
 
   /**
    * Convert a SQL search pattern into an equivalent Java Regex.

Modified: hive/branches/hbase-metastore/service/src/java/org/apache/hive/service/cli/operation/LogDivertAppender.java
URL: http://svn.apache.org/viewvc/hive/branches/hbase-metastore/service/src/java/org/apache/hive/service/cli/operation/LogDivertAppender.java?rev=1673437&r1=1673436&r2=1673437&view=diff
==============================================================================
--- hive/branches/hbase-metastore/service/src/java/org/apache/hive/service/cli/operation/LogDivertAppender.java (original)
+++ hive/branches/hbase-metastore/service/src/java/org/apache/hive/service/cli/operation/LogDivertAppender.java Tue Apr 14 14:47:30 2015
@@ -18,10 +18,16 @@
 
 package org.apache.hive.service.cli.operation;
 import java.io.CharArrayWriter;
+import java.util.Enumeration;
 import java.util.regex.Pattern;
 
 import org.apache.hadoop.hive.ql.exec.Task;
+import org.apache.hadoop.hive.ql.log.PerfLogger;
 import org.apache.hadoop.hive.ql.session.OperationLog;
+import org.apache.hadoop.hive.ql.session.OperationLog.LoggingLevel;
+import org.apache.hive.service.cli.CLIServiceUtils;
+import org.apache.log4j.Appender;
+import org.apache.log4j.ConsoleAppender;
 import org.apache.log4j.Layout;
 import org.apache.log4j.Logger;
 import org.apache.log4j.WriterAppender;
@@ -36,6 +42,8 @@ import com.google.common.base.Joiner;
 public class LogDivertAppender extends WriterAppender {
   private static final Logger LOG = Logger.getLogger(LogDivertAppender.class.getName());
   private final OperationManager operationManager;
+  private boolean isVerbose;
+  private Layout verboseLayout;
 
   /**
    * A log filter that filters messages coming from the logger with the given names.
@@ -45,18 +53,71 @@ public class LogDivertAppender extends W
    * White list filter is used for less verbose log collection
    */
   private static class NameFilter extends Filter {
-    private final Pattern namePattern;
-    private final boolean excludeMatches;
+    private Pattern namePattern;
+    private LoggingLevel loggingMode;
+    private OperationManager operationManager;
+
+    /* Patterns that are excluded in verbose logging level.
+     * Filter out messages coming from log processing classes, or we'll run an infinite loop.
+     */
+    private static final Pattern verboseExcludeNamePattern = Pattern.compile(Joiner.on("|").
+      join(new String[] {LOG.getName(), OperationLog.class.getName(),
+      OperationManager.class.getName()}));
+
+    /* Patterns that are included in execution logging level.
+     * In execution mode, show only select logger messages.
+     */
+    private static final Pattern executionIncludeNamePattern = Pattern.compile(Joiner.on("|").
+      join(new String[] {"org.apache.hadoop.mapreduce.JobSubmitter",
+      "org.apache.hadoop.mapreduce.Job", "SessionState", Task.class.getName(),
+      "org.apache.hadoop.hive.ql.exec.spark.status.SparkJobMonitor"}));
+
+    /* Patterns that are included in performance logging level.
+     * In performance mode, show execution and performance logger messages.
+     */
+    private static final Pattern performanceIncludeNamePattern = Pattern.compile(
+      executionIncludeNamePattern.pattern() + "|" + PerfLogger.class.getName());
+
+    private void setCurrentNamePattern(OperationLog.LoggingLevel mode) {
+      if (mode == OperationLog.LoggingLevel.VERBOSE) {
+        this.namePattern = verboseExcludeNamePattern;
+      } else if (mode == OperationLog.LoggingLevel.EXECUTION) {
+        this.namePattern = executionIncludeNamePattern;
+      } else if (mode == OperationLog.LoggingLevel.PERFORMANCE) {
+        this.namePattern = performanceIncludeNamePattern;
+      }
+    }
 
-    public NameFilter(boolean isExclusionFilter, String [] loggerNames) {
-      this.excludeMatches = isExclusionFilter;
-      String matchRegex = Joiner.on("|").join(loggerNames);
-      this.namePattern = Pattern.compile(matchRegex);
+    public NameFilter(
+      OperationLog.LoggingLevel loggingMode, OperationManager op) {
+      this.operationManager = op;
+      this.loggingMode = loggingMode;
+      setCurrentNamePattern(loggingMode);
     }
 
     @Override
     public int decide(LoggingEvent ev) {
+      OperationLog log = operationManager.getOperationLogByThread();
+      boolean excludeMatches = (loggingMode == OperationLog.LoggingLevel.VERBOSE);
+
+      if (log == null) {
+        return Filter.DENY;
+      }
+
+      OperationLog.LoggingLevel currentLoggingMode = log.getOpLoggingLevel();
+      // If logging is disabled, deny everything.
+      if (currentLoggingMode == OperationLog.LoggingLevel.NONE) {
+        return Filter.DENY;
+      }
+      // Look at the current session's setting
+      // and set the pattern and excludeMatches accordingly.
+      if (currentLoggingMode != loggingMode) {
+        loggingMode = currentLoggingMode;
+        setCurrentNamePattern(loggingMode);
+      }
+
       boolean isMatch = namePattern.matcher(ev.getLoggerName()).matches();
+
       if (excludeMatches == isMatch) {
         // Deny if this is black-list filter (excludeMatches = true) and it
         // matched
@@ -70,25 +131,61 @@ public class LogDivertAppender extends W
   /** This is where the log message will go to */
   private final CharArrayWriter writer = new CharArrayWriter();
 
-  public LogDivertAppender(Layout layout, OperationManager operationManager, boolean isVerbose) {
-    setLayout(layout);
+  private void setLayout (boolean isVerbose, Layout lo) {
+    if (isVerbose) {
+      if (lo == null) {
+        lo = CLIServiceUtils.verboseLayout;
+        LOG.info("Cannot find a Layout from a ConsoleAppender. Using default Layout pattern.");
+      }
+    } else {
+      lo = CLIServiceUtils.nonVerboseLayout;
+    }
+    setLayout(lo);
+  }
+
+  private void initLayout(boolean isVerbose) {
+    // There should be a ConsoleAppender. Copy its Layout.
+    Logger root = Logger.getRootLogger();
+    Layout layout = null;
+
+    Enumeration<?> appenders = root.getAllAppenders();
+    while (appenders.hasMoreElements()) {
+      Appender ap = (Appender) appenders.nextElement();
+      if (ap.getClass().equals(ConsoleAppender.class)) {
+        layout = ap.getLayout();
+        break;
+      }
+    }
+    setLayout(isVerbose, layout);
+  }
+
+  public LogDivertAppender(OperationManager operationManager,
+    OperationLog.LoggingLevel loggingMode) {
+    isVerbose = (loggingMode == OperationLog.LoggingLevel.VERBOSE);
+    initLayout(isVerbose);
     setWriter(writer);
     setName("LogDivertAppender");
     this.operationManager = operationManager;
+    this.verboseLayout = isVerbose ? layout : CLIServiceUtils.verboseLayout;
+    addFilter(new NameFilter(loggingMode, operationManager));
+  }
 
-    if (isVerbose) {
-      // Filter out messages coming from log processing classes, or we'll run an
-      // infinite loop.
-      String[] exclLoggerNames = { LOG.getName(), OperationLog.class.getName(),
-          OperationManager.class.getName() };
-      addFilter(new NameFilter(true, exclLoggerNames));
-    } else {
-      // in non verbose mode, show only select logger messages
-      String[] inclLoggerNames = { "org.apache.hadoop.mapreduce.JobSubmitter",
-          "org.apache.hadoop.mapreduce.Job", "SessionState", Task.class.getName(),
-          "org.apache.hadoop.hive.ql.exec.spark.status.SparkJobMonitor"};
-      addFilter(new NameFilter(false, inclLoggerNames));
+  @Override
+  public void doAppend(LoggingEvent event) {
+    OperationLog log = operationManager.getOperationLogByThread();
+
+    // Set current layout depending on the verbose/non-verbose mode.
+    if (log != null) {
+      boolean isCurrModeVerbose = (log.getOpLoggingLevel() == OperationLog.LoggingLevel.VERBOSE);
+
+      // If there is a logging level change from verbose->non-verbose or vice-versa since
+      // the last subAppend call, change the layout to preserve consistency.
+      if (isCurrModeVerbose != isVerbose) {
+        isVerbose = isCurrModeVerbose;
+        setLayout(isVerbose, verboseLayout);
+      }
     }
+    super.doAppend(event);
   }
 
   /**

Modified: hive/branches/hbase-metastore/service/src/java/org/apache/hive/service/cli/operation/Operation.java
URL: http://svn.apache.org/viewvc/hive/branches/hbase-metastore/service/src/java/org/apache/hive/service/cli/operation/Operation.java?rev=1673437&r1=1673436&r2=1673437&view=diff
==============================================================================
--- hive/branches/hbase-metastore/service/src/java/org/apache/hive/service/cli/operation/Operation.java (original)
+++ hive/branches/hbase-metastore/service/src/java/org/apache/hive/service/cli/operation/Operation.java Tue Apr 14 14:47:30 2015
@@ -210,7 +210,7 @@ public abstract class Operation {
 
       // create OperationLog object with above log file
       try {
-        operationLog = new OperationLog(opHandle.toString(), operationLogFile);
+        operationLog = new OperationLog(opHandle.toString(), operationLogFile, parentSession.getHiveConf());
       } catch (FileNotFoundException e) {
         LOG.warn("Unable to instantiate OperationLog object for operation: " +
             opHandle, e);

Modified: hive/branches/hbase-metastore/service/src/java/org/apache/hive/service/cli/operation/OperationManager.java
URL: http://svn.apache.org/viewvc/hive/branches/hbase-metastore/service/src/java/org/apache/hive/service/cli/operation/OperationManager.java?rev=1673437&r1=1673436&r2=1673437&view=diff
==============================================================================
--- hive/branches/hbase-metastore/service/src/java/org/apache/hive/service/cli/operation/OperationManager.java (original)
+++ hive/branches/hbase-metastore/service/src/java/org/apache/hive/service/cli/operation/OperationManager.java Tue Apr 14 14:47:30 2015
@@ -20,7 +20,6 @@ package org.apache.hive.service.cli.oper
 
 import java.sql.SQLException;
 import java.util.ArrayList;
-import java.util.Enumeration;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
@@ -42,10 +41,7 @@ import org.apache.hive.service.cli.RowSe
 import org.apache.hive.service.cli.TableSchema;
 import org.apache.hive.service.cli.session.HiveSession;
 import org.apache.log4j.Appender;
-import org.apache.log4j.ConsoleAppender;
-import org.apache.log4j.Layout;
 import org.apache.log4j.Logger;
-import org.apache.log4j.PatternLayout;
 
 /**
  * OperationManager.
@@ -54,7 +50,6 @@ import org.apache.log4j.PatternLayout;
 public class OperationManager extends AbstractService {
   private final Log LOG = LogFactory.getLog(OperationManager.class.getName());
 
-  private HiveConf hiveConf;
   private final Map<OperationHandle, Operation> handleToOperation =
       new HashMap<OperationHandle, Operation>();
 
@@ -64,10 +59,9 @@ public class OperationManager extends Ab
 
   @Override
   public synchronized void init(HiveConf hiveConf) {
-    this.hiveConf = hiveConf;
     if (hiveConf.getBoolVar(HiveConf.ConfVars.HIVE_SERVER2_LOGGING_OPERATION_ENABLED)) {
-      boolean isVerbose = hiveConf.getBoolVar(HiveConf.ConfVars.HIVE_SERVER2_LOGGING_OPERATION_VERBOSE);
-      initOperationLogCapture(isVerbose);
+      initOperationLogCapture(hiveConf.getVar(
+        HiveConf.ConfVars.HIVE_SERVER2_LOGGING_OPERATION_LEVEL));
     } else {
       LOG.debug("Operation level logging is turned off");
     }
@@ -86,34 +80,10 @@ public class OperationManager extends Ab
     super.stop();
   }
 
-  private void initOperationLogCapture(boolean isVerbose) {
-    // There should be a ConsoleAppender. Copy its Layout.
-    Logger root = Logger.getRootLogger();
-    Layout layout = null;
-
-    Enumeration<?> appenders = root.getAllAppenders();
-    while (appenders.hasMoreElements()) {
-      Appender ap = (Appender) appenders.nextElement();
-      if (ap.getClass().equals(ConsoleAppender.class)) {
-        layout = ap.getLayout();
-        break;
-      }
-    }
-
-    final String VERBOSE_PATTERN = "%d{yy/MM/dd HH:mm:ss} %p %c{2}: %m%n";
-    final String NONVERBOSE_PATTERN = "%-5p : %m%n";
-
-    if (isVerbose) {
-      if (layout == null) {
-        layout = new PatternLayout(VERBOSE_PATTERN);
-        LOG.info("Cannot find a Layout from a ConsoleAppender. Using default Layout pattern.");
-      }
-    } else {
-      layout = new PatternLayout(NONVERBOSE_PATTERN);
-    }
+  private void initOperationLogCapture(String loggingMode) {
     // Register another Appender (with the same layout) that talks to us.
-    Appender ap = new LogDivertAppender(layout, this, isVerbose);
-    root.addAppender(ap);
+    Appender ap = new LogDivertAppender(this, OperationLog.getLoggingLevel(loggingMode));
+    Logger.getRootLogger().addAppender(ap);
   }
 
   public ExecuteStatementOperation newExecuteStatementOperation(HiveSession parentSession,



Mime
View raw message