hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From mmccl...@apache.org
Subject [01/18] hive git commit: HIVE-12625: Backport to branch-1 HIVE-11981 ORC Schema Evolution Issues (Vectorized, ACID, and Non-Vectorized) (Matt McCline, reviewed by Prasanth J) HIVE-12728: Apply DDL restrictions for ORC schema evolution (Prasanth Jayachan
Date Tue, 12 Jan 2016 17:56:32 GMT
Repository: hive
Updated Branches:
  refs/heads/branch-1 9b5f1ff3c -> 0fd9069e9


http://git-wip-us.apache.org/repos/asf/hive/blob/0fd9069e/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/fast/LazyBinaryDeserializeRead.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/fast/LazyBinaryDeserializeRead.java
b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/fast/LazyBinaryDeserializeRead.java
index a18e8b8..7406697 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/fast/LazyBinaryDeserializeRead.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/fast/LazyBinaryDeserializeRead.java
@@ -32,6 +32,7 @@ import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable;
 import org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable;
 import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable;
 import org.apache.hadoop.hive.serde2.io.TimestampWritable;
+import org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe;
 import org.apache.hadoop.hive.serde2.lazybinary.LazyBinaryUtils;
 import org.apache.hadoop.hive.serde2.lazybinary.LazyBinaryUtils.VInt;
 import org.apache.hadoop.hive.serde2.lazybinary.LazyBinaryUtils.VLong;
@@ -39,6 +40,7 @@ import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.Pr
 import org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo;
 
 /*
@@ -58,7 +60,7 @@ import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo;
 public class LazyBinaryDeserializeRead implements DeserializeRead {
   public static final Log LOG = LogFactory.getLog(LazyBinaryDeserializeRead.class.getName());
 
-  private PrimitiveTypeInfo[] primitiveTypeInfos;
+  private TypeInfo[] typeInfos;
 
   private byte[] bytes;
   private int start;
@@ -80,9 +82,9 @@ public class LazyBinaryDeserializeRead implements DeserializeRead {
   private boolean readBeyondBufferRangeWarned;
   private boolean bufferRangeHasExtraDataWarned;
 
-  public LazyBinaryDeserializeRead(PrimitiveTypeInfo[] primitiveTypeInfos) {
-    this.primitiveTypeInfos = primitiveTypeInfos;
-    fieldCount = primitiveTypeInfos.length;
+  public LazyBinaryDeserializeRead(TypeInfo[] typeInfos) {
+    this.typeInfos = typeInfos;
+    fieldCount = typeInfos.length;
     tempVInt = new VInt();
     tempVLong = new VLong();
     readBeyondConfiguredFieldsWarned = false;
@@ -95,10 +97,10 @@ public class LazyBinaryDeserializeRead implements DeserializeRead {
   }
 
   /*
-   * The primitive type information for all fields.
+   * The type information for all fields.
    */
-  public PrimitiveTypeInfo[] primitiveTypeInfos() {
-    return primitiveTypeInfos;
+  public TypeInfo[] typeInfos() {
+    return typeInfos;
   }
 
   /*
@@ -153,7 +155,7 @@ public class LazyBinaryDeserializeRead implements DeserializeRead {
 
       // We have a field and are positioned to it.
 
-      if (primitiveTypeInfos[fieldIndex].getPrimitiveCategory() != PrimitiveCategory.DECIMAL)
{
+      if (((PrimitiveTypeInfo) typeInfos[fieldIndex]).getPrimitiveCategory() != PrimitiveCategory.DECIMAL)
{
         return false;
       }
 
@@ -508,7 +510,7 @@ public class LazyBinaryDeserializeRead implements DeserializeRead {
     LazyBinaryReadHiveCharResults lazyBinaryReadHiveCharResults = (LazyBinaryReadHiveCharResults)
readHiveCharResults;
 
     if (!lazyBinaryReadHiveCharResults.isInit()) {
-      lazyBinaryReadHiveCharResults.init((CharTypeInfo) primitiveTypeInfos[fieldIndex]);
+      lazyBinaryReadHiveCharResults.init((CharTypeInfo) typeInfos[fieldIndex]);
     }
 
     if (lazyBinaryReadHiveCharResults.readStringResults == null) {
@@ -559,7 +561,7 @@ public class LazyBinaryDeserializeRead implements DeserializeRead {
     LazyBinaryReadHiveVarcharResults lazyBinaryReadHiveVarcharResults = (LazyBinaryReadHiveVarcharResults)
readHiveVarcharResults;
 
     if (!lazyBinaryReadHiveVarcharResults.isInit()) {
-      lazyBinaryReadHiveVarcharResults.init((VarcharTypeInfo) primitiveTypeInfos[fieldIndex]);
+      lazyBinaryReadHiveVarcharResults.init((VarcharTypeInfo) typeInfos[fieldIndex]);
     }
 
     if (lazyBinaryReadHiveVarcharResults.readStringResults == null) {
@@ -913,9 +915,10 @@ public class LazyBinaryDeserializeRead implements DeserializeRead {
     if (tempHiveDecimalWritable == null) {
       tempHiveDecimalWritable = new HiveDecimalWritable();
     }
-    tempHiveDecimalWritable.setFromBytes(bytes, saveStart, length);
+    LazyBinarySerDe.setFromBytes(bytes, saveStart, length,
+        tempHiveDecimalWritable);
 
-    saveDecimalTypeInfo = (DecimalTypeInfo) primitiveTypeInfos[fieldIndex];
+    saveDecimalTypeInfo = (DecimalTypeInfo) typeInfos[fieldIndex];
 
     int precision = saveDecimalTypeInfo.getPrecision();
     int scale = saveDecimalTypeInfo.getScale();
@@ -939,4 +942,4 @@ public class LazyBinaryDeserializeRead implements DeserializeRead {
     // Now return whether it is NULL or NOT NULL.
     return (saveDecimal == null);
   }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/0fd9069e/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/fast/LazyBinarySerializeWrite.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/fast/LazyBinarySerializeWrite.java
b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/fast/LazyBinarySerializeWrite.java
index e0d9c0a..2d201ec 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/fast/LazyBinarySerializeWrite.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/fast/LazyBinarySerializeWrite.java
@@ -35,6 +35,7 @@ import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable;
 import org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable;
 import org.apache.hadoop.hive.serde2.io.TimestampWritable;
+import org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe;
 import org.apache.hadoop.hive.serde2.lazybinary.LazyBinaryUtils;
 import org.apache.hadoop.hive.serde2.fast.SerializeWrite;
 import org.apache.hive.common.util.DateUtils;
@@ -742,4 +743,4 @@ public class LazyBinarySerializeWrite implements SerializeWrite {
       output.writeByte(nullOffset, nullByte);
     }
   }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/0fd9069e/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/TypeInfoUtils.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/TypeInfoUtils.java b/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/TypeInfoUtils.java
index a4323d1..1cae80c 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/TypeInfoUtils.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/TypeInfoUtils.java
@@ -760,11 +760,47 @@ public final class TypeInfoUtils {
     return result;
   }
 
+  public static ArrayList<TypeInfo> typeInfosFromStructObjectInspector(
+      StructObjectInspector structObjectInspector) {
+
+    List<? extends StructField> fields = structObjectInspector.getAllStructFieldRefs();
+    ArrayList<TypeInfo> typeInfoList = new ArrayList<TypeInfo>(fields.size());
+
+    for(StructField field : fields) {
+      TypeInfo typeInfo = TypeInfoUtils.getTypeInfoFromTypeString(
+          field.getFieldObjectInspector().getTypeName());
+      typeInfoList.add(typeInfo);
+    }
+    return typeInfoList;
+  }
+
+  public static ArrayList<TypeInfo> typeInfosFromTypeNames(List<String> typeNames)
{
+
+    ArrayList<TypeInfo> result = new ArrayList<TypeInfo>(typeNames.size());
+
+    for(int i = 0; i < typeNames.size(); i++) {
+      TypeInfo typeInfo = TypeInfoUtils.getTypeInfoFromTypeString(typeNames.get(i));
+      result.add(typeInfo);
+    }
+    return result;
+  }
+
   public static ArrayList<TypeInfo> getTypeInfosFromTypeString(String typeString) {
     TypeInfoParser parser = new TypeInfoParser(typeString);
     return parser.parseTypeInfos();
   }
 
+  public static String getTypesString(List<TypeInfo> typeInfos) {
+    StringBuilder sb = new StringBuilder();
+    for (int i = 0; i < typeInfos.size(); i++) {
+      if (i > 0) {
+        sb.append(":");
+      }
+      sb.append(typeInfos.get(i).getTypeName());
+    }
+    return sb.toString();
+  }
+
   public static TypeInfo getTypeInfoFromTypeString(String typeString) {
     TypeInfoParser parser = new TypeInfoParser(typeString);
     return parser.parseTypeInfos().get(0);


Mime
View raw message