hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From br...@apache.org
Subject svn commit: r1536151 [4/4] - in /hive/trunk: common/src/java/org/apache/hadoop/hive/common/type/ common/src/test/org/apache/hadoop/hive/common/type/ data/files/ jdbc/src/java/org/apache/hadoop/hive/jdbc/ jdbc/src/test/org/apache/hadoop/hive/jdbc/ jdbc/...
Date Sun, 27 Oct 2013 15:34:03 GMT
Modified: hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorFactory.java
URL: http://svn.apache.org/viewvc/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorFactory.java?rev=1536151&r1=1536150&r2=1536151&view=diff
==============================================================================
--- hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorFactory.java
(original)
+++ hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorFactory.java
Sun Oct 27 15:34:01 2013
@@ -22,7 +22,6 @@ import java.util.EnumMap;
 import java.util.HashMap;
 import java.util.Map;
 
-import org.apache.hadoop.hive.common.type.HiveVarchar;
 import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
 import org.apache.hadoop.hive.serde2.io.DateWritable;
@@ -35,6 +34,7 @@ import org.apache.hadoop.hive.serde2.obj
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils.PrimitiveTypeEntry;
+import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
 import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo;
@@ -82,7 +82,7 @@ public final class PrimitiveObjectInspec
   public static final WritableBinaryObjectInspector writableBinaryObjectInspector =
       new WritableBinaryObjectInspector();
   public static final WritableHiveDecimalObjectInspector writableHiveDecimalObjectInspector
=
-      new WritableHiveDecimalObjectInspector();
+      new WritableHiveDecimalObjectInspector(TypeInfoFactory.decimalTypeInfo);
 
   // Map from PrimitiveTypeInfo to AbstractPrimitiveWritableObjectInspector.
   private static HashMap<PrimitiveTypeInfo, AbstractPrimitiveWritableObjectInspector>
cachedPrimitiveWritableInspectorCache =
@@ -112,8 +112,7 @@ public final class PrimitiveObjectInspec
         writableTimestampObjectInspector);
     cachedPrimitiveWritableInspectorCache.put(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.BINARY_TYPE_NAME),
         writableBinaryObjectInspector);
-    cachedPrimitiveWritableInspectorCache.put(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.DECIMAL_TYPE_NAME),
-        writableHiveDecimalObjectInspector);
+    cachedPrimitiveWritableInspectorCache.put(TypeInfoFactory.decimalTypeInfo, writableHiveDecimalObjectInspector);
   }
 
   private static Map<PrimitiveCategory, AbstractPrimitiveWritableObjectInspector> primitiveCategoryToWritableOI
=
@@ -159,7 +158,7 @@ public final class PrimitiveObjectInspec
   public static final JavaBinaryObjectInspector javaByteArrayObjectInspector =
       new JavaBinaryObjectInspector();
   public static final JavaHiveDecimalObjectInspector javaHiveDecimalObjectInspector =
-      new JavaHiveDecimalObjectInspector();
+      new JavaHiveDecimalObjectInspector(TypeInfoFactory.decimalTypeInfo);
 
   // Map from PrimitiveTypeInfo to AbstractPrimitiveJavaObjectInspector.
   private static HashMap<PrimitiveTypeInfo, AbstractPrimitiveJavaObjectInspector> cachedPrimitiveJavaInspectorCache
=
@@ -189,8 +188,7 @@ public final class PrimitiveObjectInspec
         javaTimestampObjectInspector);
     cachedPrimitiveJavaInspectorCache.put(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.BINARY_TYPE_NAME),
         javaByteArrayObjectInspector);
-    cachedPrimitiveJavaInspectorCache.put(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.DECIMAL_TYPE_NAME),
-        javaHiveDecimalObjectInspector);
+    cachedPrimitiveJavaInspectorCache.put(TypeInfoFactory.decimalTypeInfo, javaHiveDecimalObjectInspector);
   }
 
   private static Map<PrimitiveCategory, AbstractPrimitiveJavaObjectInspector> primitiveCategoryToJavaOI
=
@@ -244,8 +242,11 @@ public final class PrimitiveObjectInspec
     case VARCHAR:
       result = new WritableHiveVarcharObjectInspector((VarcharTypeInfo)typeInfo);
       break;
-      default:
-        throw new RuntimeException("Failed to create WritableHiveVarcharObjectInspector for
" + typeInfo );
+    case DECIMAL:
+      result = new WritableHiveDecimalObjectInspector((DecimalTypeInfo)typeInfo);
+      break;
+    default:
+      throw new RuntimeException("Failed to create object inspector for " + typeInfo );
     }
 
     cachedPrimitiveWritableInspectorCache.put(typeInfo, result);
@@ -286,7 +287,7 @@ public final class PrimitiveObjectInspec
     case TIMESTAMP:
       return new WritableConstantTimestampObjectInspector((TimestampWritable)value);
     case DECIMAL:
-      return new WritableConstantHiveDecimalObjectInspector((HiveDecimalWritable)value);
+      return new WritableConstantHiveDecimalObjectInspector((DecimalTypeInfo)typeInfo, (HiveDecimalWritable)value);
     case BINARY:
       return new WritableConstantBinaryObjectInspector((BytesWritable)value);
     case VOID:
@@ -330,6 +331,9 @@ public final class PrimitiveObjectInspec
     case VARCHAR:
       result = new JavaHiveVarcharObjectInspector((VarcharTypeInfo)typeInfo);
       break;
+    case DECIMAL:
+      result = new JavaHiveDecimalObjectInspector((DecimalTypeInfo)typeInfo);
+      break;
       default:
         throw new RuntimeException("Failed to create JavaHiveVarcharObjectInspector for "
+ typeInfo );
     }

Modified: hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableConstantHiveDecimalObjectInspector.java
URL: http://svn.apache.org/viewvc/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableConstantHiveDecimalObjectInspector.java?rev=1536151&r1=1536150&r2=1536151&view=diff
==============================================================================
--- hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableConstantHiveDecimalObjectInspector.java
(original)
+++ hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableConstantHiveDecimalObjectInspector.java
Sun Oct 27 15:34:01 2013
@@ -17,27 +17,43 @@
  */
 package org.apache.hadoop.hive.serde2.objectinspector.primitive;
 
+import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector;
+import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo;
 
 /**
  * A WritableConstantHiveDecimalObjectInspector is a WritableHiveDecimalObjectInspector
  * that implements ConstantObjectInspector.
  */
 public class WritableConstantHiveDecimalObjectInspector extends WritableHiveDecimalObjectInspector
-    implements ConstantObjectInspector {
+implements ConstantObjectInspector {
 
   private HiveDecimalWritable value;
 
   protected WritableConstantHiveDecimalObjectInspector() {
     super();
   }
-  WritableConstantHiveDecimalObjectInspector(HiveDecimalWritable value) {
+
+  WritableConstantHiveDecimalObjectInspector(DecimalTypeInfo typeInfo,
+      HiveDecimalWritable value) {
+    super(typeInfo);
     this.value = value;
   }
 
   @Override
   public HiveDecimalWritable getWritableConstantValue() {
-    return value;
+    // We need to enforce precision/scale here.
+    // A little inefficiency here as we need to create a HiveDecimal instance from the writable
and
+    // recreate a HiveDecimalWritable instance on the HiveDecimal instance. However, we don't
know
+    // the precision/scale of the original writable until we get a HiveDecimal instance from
it.
+    DecimalTypeInfo decTypeInfo = (DecimalTypeInfo)typeInfo;
+    HiveDecimal dec = value == null ? null :
+      value.getHiveDecimal(decTypeInfo.precision(), decTypeInfo.scale());
+    if (dec == null) {
+      return null;
+    }
+    return new HiveDecimalWritable(dec);
   }
+
 }

Modified: hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableHiveDecimalObjectInspector.java
URL: http://svn.apache.org/viewvc/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableHiveDecimalObjectInspector.java?rev=1536151&r1=1536150&r2=1536151&view=diff
==============================================================================
--- hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableHiveDecimalObjectInspector.java
(original)
+++ hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableHiveDecimalObjectInspector.java
Sun Oct 27 15:34:01 2013
@@ -20,24 +20,27 @@ package org.apache.hadoop.hive.serde2.ob
 
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
-import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
+import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.HiveDecimalUtils;
 
-public class WritableHiveDecimalObjectInspector
-    extends AbstractPrimitiveWritableObjectInspector
-    implements SettableHiveDecimalObjectInspector {
+public class WritableHiveDecimalObjectInspector extends AbstractPrimitiveWritableObjectInspector
+implements SettableHiveDecimalObjectInspector {
 
-  protected WritableHiveDecimalObjectInspector() {
-    super(TypeInfoFactory.decimalTypeInfo);
+  public WritableHiveDecimalObjectInspector() {
+  }
+
+  protected WritableHiveDecimalObjectInspector(DecimalTypeInfo typeInfo) {
+    super(typeInfo);
   }
 
   @Override
   public HiveDecimalWritable getPrimitiveWritableObject(Object o) {
-    return o == null ? null : (HiveDecimalWritable) o;
+    return enforcePrecisionScale(((HiveDecimalWritable) o));
   }
 
   @Override
   public HiveDecimal getPrimitiveJavaObject(Object o) {
-    return o == null ? null : ((HiveDecimalWritable) o).getHiveDecimal();
+    return enforcePrecisionScale(((HiveDecimalWritable)o).getHiveDecimal());
   }
 
   @Override
@@ -47,27 +50,34 @@ public class WritableHiveDecimalObjectIn
 
   @Override
   public Object set(Object o, byte[] bytes, int scale) {
-    ((HiveDecimalWritable) o).set(bytes, scale);
-    return o;
+    HiveDecimalWritable writable = (HiveDecimalWritable)create(bytes, scale);
+    if (writable != null) {
+      ((HiveDecimalWritable)o).set(writable);
+      return o;
+    } else {
+      return null;
+    }
   }
 
   @Override
   public Object set(Object o, HiveDecimal t) {
-    if (t == null) {
+    HiveDecimal dec = enforcePrecisionScale(t);
+    if (dec != null) {
+      ((HiveDecimalWritable) o).set(dec);
+      return o;
+    } else {
       return null;
     }
-
-    ((HiveDecimalWritable) o).set(t);
-    return o;
   }
 
   @Override
   public Object set(Object o, HiveDecimalWritable t) {
-    if (t == null) {
+    HiveDecimalWritable writable = enforcePrecisionScale(t);
+    if (writable == null) {
       return null;
     }
 
-    ((HiveDecimalWritable) o).set(t);
+    ((HiveDecimalWritable) o).set(writable);
     return o;
   }
 
@@ -78,11 +88,15 @@ public class WritableHiveDecimalObjectIn
 
   @Override
   public Object create(HiveDecimal t) {
-    if (t == null) {
-      return null;
-    }
+    return t == null ? null : new HiveDecimalWritable(t);
+  }
+
+  private HiveDecimal enforcePrecisionScale(HiveDecimal dec) {
+    return HiveDecimalUtils.enforcePrecisionScale(dec, (DecimalTypeInfo)typeInfo);
+  }
 
-    return new HiveDecimalWritable(t);
+  private HiveDecimalWritable enforcePrecisionScale(HiveDecimalWritable writable) {
+    return HiveDecimalUtils.enforcePrecisionScale(writable, (DecimalTypeInfo)typeInfo);
   }
 
 }

Added: hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/DecimalTypeInfo.java
URL: http://svn.apache.org/viewvc/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/DecimalTypeInfo.java?rev=1536151&view=auto
==============================================================================
--- hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/DecimalTypeInfo.java
(added)
+++ hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/DecimalTypeInfo.java
Sun Oct 27 15:34:01 2013
@@ -0,0 +1,104 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.serde2.typeinfo;
+
+import org.apache.hadoop.hive.serde.serdeConstants;
+
+public class DecimalTypeInfo extends PrimitiveTypeInfo {
+  private static final long serialVersionUID = 1L;
+
+  private int precision;
+  private int scale;
+
+  // no-arg constructor to make kyro happy.
+  public DecimalTypeInfo() {
+  }
+
+  public DecimalTypeInfo(int precision, int scale) {
+    super(serdeConstants.DECIMAL_TYPE_NAME);
+    HiveDecimalUtils.validateParameter(precision, scale);
+    this.precision = precision;
+    this.scale = scale;
+  }
+
+  @Override
+  public String getTypeName() {
+    return getQualifiedName();
+  }
+
+  @Override
+  public boolean equals(Object other) {
+    if (other == null || !(other instanceof DecimalTypeInfo)) {
+      return false;
+    }
+
+    DecimalTypeInfo dti = (DecimalTypeInfo)other;
+
+    return this.precision() == dti.precision() && this.scale() == dti.scale();
+
+  }
+
+  /**
+   * Generate the hashCode for this TypeInfo.
+   */
+  @Override
+  public int hashCode() {
+    return 31 * (17 + precision) + scale;
+  }
+
+  @Override
+  public String toString() {
+    return getQualifiedName();
+  }
+
+  @Override
+  public String getQualifiedName() {
+    return getQualifiedName(precision, scale);
+  }
+
+  public static String getQualifiedName(int precision, int scale) {
+    StringBuilder sb = new StringBuilder(serdeConstants.DECIMAL_TYPE_NAME);
+    sb.append("(");
+    sb.append(precision);
+    sb.append(",");
+    sb.append(scale);
+    sb.append(")");
+    return sb.toString();
+  }
+
+  public int precision() {
+    return precision;
+  }
+
+  public int scale() {
+    return scale;
+  }
+
+  @Override
+  public boolean accept(TypeInfo other) {
+    if (other == null || !(other instanceof DecimalTypeInfo)) {
+      return false;
+    }
+
+    DecimalTypeInfo dti = (DecimalTypeInfo)other;
+    // Make sure "this" has enough integer room to accomodate other's integer digits.
+    return this.precision() - this.scale() >= dti.precision() - dti.scale();
+  }
+
+}

Added: hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/HiveDecimalUtils.java
URL: http://svn.apache.org/viewvc/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/HiveDecimalUtils.java?rev=1536151&view=auto
==============================================================================
--- hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/HiveDecimalUtils.java
(added)
+++ hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/HiveDecimalUtils.java
Sun Oct 27 15:34:01 2013
@@ -0,0 +1,121 @@
+package org.apache.hadoop.hive.serde2.typeinfo;
+
+import java.math.BigDecimal;
+
+import org.apache.hadoop.hive.common.type.HiveDecimal;
+import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
+
+public class HiveDecimalUtils {
+
+  public static HiveDecimal enforcePrecisionScale(HiveDecimal dec, DecimalTypeInfo typeInfo)
{
+    return enforcePrecisionScale(dec, typeInfo.precision(), typeInfo.scale());
+  }
+
+  public static HiveDecimal enforcePrecisionScale(HiveDecimal dec,int maxPrecision, int maxScale)
{
+    if (dec == null) {
+      return null;
+    }
+
+    // Minor optimization, avoiding creating new objects.
+    if (dec.precision() - dec.scale() <= maxPrecision - maxScale && dec.scale()
<= maxScale) {
+      return dec;
+    }
+
+    BigDecimal bd = HiveDecimal.enforcePrecisionScale(dec.bigDecimalValue(),
+        maxPrecision, maxScale);
+    if (bd == null) {
+      return null;
+    }
+
+    return HiveDecimal.create(bd);
+  }
+
+  public static HiveDecimalWritable enforcePrecisionScale(HiveDecimalWritable writable,
+      DecimalTypeInfo typeInfo) {
+    if (writable == null) {
+      return null;
+    }
+
+    HiveDecimal dec = enforcePrecisionScale(writable.getHiveDecimal(), typeInfo);
+    return dec == null ? null : new HiveDecimalWritable(dec);
+  }
+
+  public static HiveDecimalWritable enforcePrecisionScale(HiveDecimalWritable writable,
+      int precision, int scale) {
+    if (writable == null) {
+      return null;
+    }
+
+    HiveDecimal dec = enforcePrecisionScale(writable.getHiveDecimal(), precision, scale);
+    return dec == null ? null : new HiveDecimalWritable(dec);
+  }
+
+  public static void validateParameter(int precision, int scale) {
+    if (precision < 1 || precision > HiveDecimal.MAX_PRECISION) {
+      throw new IllegalArgumentException("Decimal precision out of allowed range [1," +
+          HiveDecimal.MAX_PRECISION + "]");
+    }
+
+    if (scale < 0 || scale > HiveDecimal.MAX_SCALE) {
+      throw new IllegalArgumentException("Decimal scale out of allowed range [0," +
+          HiveDecimal.MAX_SCALE + "]");
+    }
+
+    if (precision < scale) {
+      throw new IllegalArgumentException("Decimal scale must be less than or equal to precision");
+    }
+  }
+
+  /**
+   * Get the precision of double type can be tricky. While a double may have more digits
than
+   * a HiveDecimal can hold, in reality those numbers are of no practical use. Thus, we assume
+   * that a double can have at most HiveDecimal.MAX_PRECISION, which is generous enough.
This
+   * implies that casting a double to a decimal type is always valid.
+   *
+   */
+  public static int getPrecisionForType(PrimitiveTypeInfo typeInfo) {
+    switch (typeInfo.getPrimitiveCategory()) {
+    case DECIMAL:
+      return ((DecimalTypeInfo)typeInfo).precision();
+    case FLOAT:
+      return 23;
+    case BYTE:
+      return 3;
+    case SHORT:
+      return 5;
+    case INT:
+      return 10;
+    case LONG:
+      return 19;
+    default:
+      return HiveDecimal.MAX_PRECISION;
+    }
+  }
+
+  /**
+   * Get the scale of double type can be tricky. While a double may have more decimal digits
than
+   * HiveDecimal, in reality those numbers are of no practical use. Thus, we assume that
a double
+   * can have at most HiveDecimal.MAX_SCALE, which is generous enough. This implies implies
that
+   * casting a double to a decimal type is always valid.
+   *
+   */
+  public static int getScaleForType(PrimitiveTypeInfo typeInfo) {
+    switch (typeInfo.getPrimitiveCategory()) {
+    case DECIMAL:
+      return ((DecimalTypeInfo)typeInfo).scale();
+    case FLOAT:
+      return 7;
+    case BYTE:
+      return 0;
+    case SHORT:
+      return 0;
+    case INT:
+      return 0;
+    case LONG:
+      return 0;
+    default:
+      return HiveDecimal.MAX_SCALE;
+    }
+  }
+
+}

Modified: hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/TypeInfo.java
URL: http://svn.apache.org/viewvc/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/TypeInfo.java?rev=1536151&r1=1536150&r2=1536151&view=diff
==============================================================================
--- hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/TypeInfo.java (original)
+++ hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/TypeInfo.java Sun Oct
27 15:34:01 2013
@@ -68,4 +68,9 @@ public abstract class TypeInfo implement
 
   @Override
   public abstract int hashCode();
+
+  public boolean accept(TypeInfo other) {
+    return this.equals(other);
+  }
+
 }

Modified: hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/TypeInfoFactory.java
URL: http://svn.apache.org/viewvc/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/TypeInfoFactory.java?rev=1536151&r1=1536150&r2=1536151&view=diff
==============================================================================
--- hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/TypeInfoFactory.java
(original)
+++ hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/TypeInfoFactory.java
Sun Oct 27 15:34:01 2013
@@ -24,6 +24,7 @@ import java.util.concurrent.ConcurrentHa
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils.PrimitiveTypeEntry;
@@ -54,7 +55,12 @@ public final class TypeInfoFactory {
   public static final PrimitiveTypeInfo dateTypeInfo = new PrimitiveTypeInfo(serdeConstants.DATE_TYPE_NAME);
   public static final PrimitiveTypeInfo timestampTypeInfo = new PrimitiveTypeInfo(serdeConstants.TIMESTAMP_TYPE_NAME);
   public static final PrimitiveTypeInfo binaryTypeInfo = new PrimitiveTypeInfo(serdeConstants.BINARY_TYPE_NAME);
-  public static final PrimitiveTypeInfo decimalTypeInfo = new PrimitiveTypeInfo(serdeConstants.DECIMAL_TYPE_NAME);
+
+  /**
+   * A DecimalTypeInfo instance that has max precision and max scale.
+   */
+  public static final DecimalTypeInfo decimalTypeInfo = new DecimalTypeInfo(HiveDecimal.MAX_PRECISION,
+      HiveDecimal.MAX_SCALE);
 
   public static final PrimitiveTypeInfo unknownTypeInfo = new PrimitiveTypeInfo("unknown");
 
@@ -75,7 +81,7 @@ public final class TypeInfoFactory {
     cachedPrimitiveTypeInfo.put(serdeConstants.DATE_TYPE_NAME, dateTypeInfo);
     cachedPrimitiveTypeInfo.put(serdeConstants.TIMESTAMP_TYPE_NAME, timestampTypeInfo);
     cachedPrimitiveTypeInfo.put(serdeConstants.BINARY_TYPE_NAME, binaryTypeInfo);
-    cachedPrimitiveTypeInfo.put(serdeConstants.DECIMAL_TYPE_NAME, decimalTypeInfo);
+    cachedPrimitiveTypeInfo.put(decimalTypeInfo.getQualifiedName(), decimalTypeInfo);
     cachedPrimitiveTypeInfo.put("unknown", unknownTypeInfo);
   }
 
@@ -128,6 +134,12 @@ public final class TypeInfoFactory {
           return null;
         }
         return new VarcharTypeInfo(Integer.valueOf(parts.typeParams[0]));
+      case DECIMAL:
+        if (parts.typeParams.length != 2) {
+          return null;
+        }
+        return new DecimalTypeInfo(Integer.valueOf(parts.typeParams[0]),
+            Integer.valueOf(parts.typeParams[1]));
       default:
         return null;
     }
@@ -138,6 +150,11 @@ public final class TypeInfoFactory {
     return (VarcharTypeInfo) getPrimitiveTypeInfo(fullName);
   }
 
+  public static DecimalTypeInfo getDecimalTypeInfo(int precision, int scale) {
+    String fullName = DecimalTypeInfo.getQualifiedName(precision, scale);
+    return (DecimalTypeInfo) getPrimitiveTypeInfo(fullName);
+  };
+
   public static TypeInfo getPrimitiveTypeInfoFromPrimitiveWritable(
       Class<?> clazz) {
     String typeName = PrimitiveObjectInspectorUtils
@@ -207,6 +224,6 @@ public final class TypeInfoFactory {
       cachedMapTypeInfo.put(signature, result);
     }
     return result;
-  };
+  }
 
 }

Modified: hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/TypeInfoUtils.java
URL: http://svn.apache.org/viewvc/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/TypeInfoUtils.java?rev=1536151&r1=1536150&r2=1536151&view=diff
==============================================================================
--- hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/TypeInfoUtils.java (original)
+++ hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/TypeInfoUtils.java Sun
Oct 27 15:34:01 2013
@@ -395,23 +395,39 @@ public final class TypeInfoUtils {
           PrimitiveObjectInspectorUtils.getTypeEntryFromTypeName(t.text);
       if (typeEntry != null && typeEntry.primitiveCategory != PrimitiveCategory.UNKNOWN
) {
         String qualifiedTypeName = typeEntry.typeName;
-        if (typeEntry.primitiveCategory == PrimitiveCategory.VARCHAR) {
-          int length = HiveVarchar.MAX_VARCHAR_LENGTH;
-          
-          String[] params = parseParams();
+        String[] params = parseParams();
+        switch (typeEntry.primitiveCategory) {
+        case VARCHAR:
           if (params == null || params.length == 0) {
-            throw new RuntimeException( "Varchar type is specified without length: " + typeInfoString);
+            throw new IllegalArgumentException( "Varchar type is specified without length:
" + typeInfoString);
           }
-          
+
           if (params.length == 1) {
-            length = Integer.valueOf(params[0]);
+            int length = Integer.valueOf(params[0]);
             VarcharUtils.validateParameter(length);
+            qualifiedTypeName = BaseCharTypeInfo.getQualifiedName(typeEntry.typeName, length);
           } else if (params.length > 1) {
-            throw new RuntimeException("Type varchar only takes one parameter, but " +
+            throw new IllegalArgumentException("Type varchar only takes one parameter, but
" +
                 params.length + " is seen");
-          } 
+          }
+
+          break;
+        case DECIMAL:
+          if (params == null || params.length == 0) {
+            throw new IllegalArgumentException( "Decimal type is specified without length:
" + typeInfoString);
+          }
+
+          if (params.length == 2) {
+            int precision = Integer.valueOf(params[0]);
+            int scale = Integer.valueOf(params[1]);
+            HiveDecimalUtils.validateParameter(precision, scale);
+            qualifiedTypeName = DecimalTypeInfo.getQualifiedName(precision, scale);
+          } else if (params.length > 1) {
+            throw new IllegalArgumentException("Type varchar only takes one parameter, but
" +
+                params.length + " is seen");
+          }
 
-          qualifiedTypeName = BaseCharTypeInfo.getQualifiedName(typeEntry.typeName, length);
+          break;
         }
 
         return TypeInfoFactory.getPrimitiveTypeInfo(qualifiedTypeName);
@@ -679,7 +695,7 @@ public final class TypeInfoUtils {
     switch (oi.getCategory()) {
     case PRIMITIVE: {
       PrimitiveObjectInspector poi = (PrimitiveObjectInspector) oi;
-      result = TypeInfoFactory.getPrimitiveTypeInfo(poi.getTypeName());
+      result = poi.getTypeInfo();
       break;
     }
     case LIST: {

Modified: hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/VarcharTypeInfo.java
URL: http://svn.apache.org/viewvc/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/VarcharTypeInfo.java?rev=1536151&r1=1536150&r2=1536151&view=diff
==============================================================================
--- hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/VarcharTypeInfo.java
(original)
+++ hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/VarcharTypeInfo.java
Sun Oct 27 15:34:01 2013
@@ -45,7 +45,7 @@ public class VarcharTypeInfo extends Bas
 
     VarcharTypeInfo pti = (VarcharTypeInfo) other;
 
-    return this.typeName.equals(pti.typeName) && this.getLength() == pti.getLength();
+    return this.getLength() == pti.getLength();
   }
 
   /**
@@ -53,7 +53,7 @@ public class VarcharTypeInfo extends Bas
    */
   @Override
   public int hashCode() {
-    return getQualifiedName().hashCode();
+    return getLength();
   }
 
   @Override



Mime
View raw message