hadoop-hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From athu...@apache.org
Subject svn commit: r737291 [3/8] - in /hadoop/hive/trunk: ./ eclipse-templates/ ql/src/java/org/apache/hadoop/hive/ql/exec/ ql/src/java/org/apache/hadoop/hive/ql/lib/ ql/src/java/org/apache/hadoop/hive/ql/optimizer/ ql/src/java/org/apache/hadoop/hive/ql/parse...
Date Sat, 24 Jan 2009 01:58:06 GMT
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDAFMax.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDAFMax.java?rev=737291&r1=737290&r2=737291&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDAFMax.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDAFMax.java Sat Jan 24 01:58:01 2009
@@ -18,47 +18,286 @@
 
 package org.apache.hadoop.hive.ql.udf;
 
+import java.sql.Date;
+
 import org.apache.hadoop.hive.ql.exec.UDAF;
+import org.apache.hadoop.hive.ql.exec.UDAFEvaluator;
 
 
 
 public class UDAFMax extends UDAF {
 
-  private double mMax;
-  private boolean mEmpty;
-  
-  public UDAFMax() {
-    super();
-    init();
+  static public class MaxShortEvaluator implements UDAFEvaluator {
+    private short mMax;
+    private boolean mEmpty;
+
+    public MaxShortEvaluator() {
+      super();
+      init();
+    }
+
+    public void init() {
+      mMax = 0;
+      mEmpty = true;
+    }
+
+    public boolean iterate(Short o) {
+      if (o != null) {
+        if (mEmpty) {
+          mMax = o;
+          mEmpty = false;
+        } else {
+          mMax = (short) Math.max(mMax, o);
+        }
+      }
+      return true;
+    }
+
+    public Short terminatePartial() {
+      return mEmpty ? null : Short.valueOf(mMax);
+    }
+
+    public boolean merge(Short o) {
+      return iterate(o);
+    }
+
+    public Short terminate() {
+      return mEmpty ? null : Short.valueOf(mMax);
+    }
   }
 
-  public void init() {
-    mMax = 0;
-    mEmpty = true;
+  static public class MaxIntEvaluator implements UDAFEvaluator {
+    private int mMax;
+    private boolean mEmpty;
+
+    public MaxIntEvaluator() {
+      super();
+      init();
+    }
+
+    public void init() {
+      mMax = 0;
+      mEmpty = true;
+    }
+
+    public boolean iterate(Integer o) {
+      if (o != null) {
+        if (mEmpty) {
+          mMax = o;
+          mEmpty = false;
+        } else {
+          mMax = Math.max(mMax, o);
+        }
+      }
+      return true;
+    }
+
+    public Integer terminatePartial() {
+      return mEmpty ? null : Integer.valueOf(mMax);
+    }
+
+    public boolean merge(Integer o) {
+      return iterate(o);
+    }
+
+    public Integer terminate() {
+      return mEmpty ? null : Integer.valueOf(mMax);
+    }
   }
 
-  public boolean aggregate(Double o) {
-    if (o != null) {
-      if (mEmpty) {
-        mMax = o;
-        mEmpty = false;
-      } else {
-        mMax = Math.max(mMax, o);
+  static public class MaxLongEvaluator implements UDAFEvaluator {
+    private long mMax;
+    private boolean mEmpty;
+
+    public MaxLongEvaluator() {
+      super();
+      init();
+    }
+
+    public void init() {
+      mMax = 0;
+      mEmpty = true;
+    }
+
+    public boolean iterate(Long o) {
+      if (o != null) {
+        if (mEmpty) {
+          mMax = o;
+          mEmpty = false;
+        } else {
+          mMax = Math.max(mMax, o);
+        }
+      }
+      return true;
+    }
+
+    public Long terminatePartial() {
+      return mEmpty ? null : Long.valueOf(mMax);
+    }
+
+    public boolean merge(Long o) {
+      return iterate(o);
+    }
+
+    public Long terminate() {
+      return mEmpty ? null : Long.valueOf(mMax);
+    }
+  }
+
+  static public class MaxFloatEvaluator implements UDAFEvaluator {
+    private float mMax;
+    private boolean mEmpty;
+
+    public MaxFloatEvaluator() {
+      super();
+      init();
+    }
+
+    public void init() {
+      mMax = 0;
+      mEmpty = true;
+    }
+
+    public boolean iterate(Float o) {
+      if (o != null) {
+        if (mEmpty) {
+          mMax = o;
+          mEmpty = false;
+        } else {
+          mMax = Math.max(mMax, o);
+        }
       }
+      return true;
+    }
+
+    public Float terminatePartial() {
+      return mEmpty ? null : Float.valueOf(mMax);
+    }
+
+    public boolean merge(Float o) {
+      return iterate(o);
+    }
+
+    public Float terminate() {
+      return mEmpty ? null : Float.valueOf(mMax);
     }
-    return true;
   }
-  
-  public Double evaluatePartial() {
-    return mEmpty ? null : Double.valueOf(mMax);
+
+  static public class MaxDoubleEvaluator implements UDAFEvaluator {
+    private double mMax;
+    private boolean mEmpty;
+
+    public MaxDoubleEvaluator() {
+      super();
+      init();
+    }
+
+    public void init() {
+      mMax = 0;
+      mEmpty = true;
+    }
+
+    public boolean iterate(Double o) {
+      if (o != null) {
+        if (mEmpty) {
+          mMax = o;
+          mEmpty = false;
+        } else {
+          mMax = Math.max(mMax, o);
+        }
+      }
+      return true;
+    }
+
+    public Double terminatePartial() {
+      return mEmpty ? null : Double.valueOf(mMax);
+    }
+
+    public boolean merge(Double o) {
+      return iterate(o);
+    }
+
+    public Double terminate() {
+      return mEmpty ? null : Double.valueOf(mMax);
+    }
   }
 
-  public boolean aggregatePartial(Double o) {
-    return aggregate(o);
+  static public class MaxStringEvaluator implements UDAFEvaluator {
+    private String mMax;
+    private boolean mEmpty;
+
+    public MaxStringEvaluator() {
+      super();
+      init();
+    }
+
+    public void init() {
+      mMax = null;
+      mEmpty = true;
+    }
+
+    public boolean iterate(String o) {
+      if (o != null) {
+        if (mEmpty) {
+          mMax = o;
+          mEmpty = false;
+        } else if (mMax.compareTo(o) < 0) {
+          mMax = o;
+        }
+      }
+      return true;
+    }
+
+    public String terminatePartial() {
+      return mEmpty ? null : mMax;
+    }
+
+    public boolean merge(String o) {
+      return iterate(o);
+    }
+
+    public String terminate() {
+      return mEmpty ? null : mMax;
+    }
   }
 
-  public Double evaluate() {
-    return mEmpty ? null : Double.valueOf(mMax);
+  static public class MaxDateEvaluator implements UDAFEvaluator {
+    private Date mMax;
+    private boolean mEmpty;
+
+    public MaxDateEvaluator() {
+      super();
+      init();
+    }
+
+    public void init() {
+      mMax = null;
+      mEmpty = true;
+    }
+
+    public boolean iterate(Date o) {
+      if (o != null) {
+        if (mEmpty) {
+          mMax = o;
+          mEmpty = false;
+        } else if (mMax.compareTo(o) < 0){
+          mMax = o;
+        }
+      }
+      return true;
+    }
+
+    public Date terminatePartial() {
+      return mEmpty ? null : mMax;
+    }
+
+    public boolean merge(Date o) {
+      return iterate(o);
+    }
+
+    public Date terminate() {
+      return mEmpty ? null : mMax;
+    }
   }
 
 }

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDAFMin.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDAFMin.java?rev=737291&r1=737290&r2=737291&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDAFMin.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDAFMin.java Sat Jan 24 01:58:01 2009
@@ -18,47 +18,284 @@
 
 package org.apache.hadoop.hive.ql.udf;
 
-import org.apache.hadoop.hive.ql.exec.UDAF;
+import java.sql.Date;
 
+import org.apache.hadoop.hive.ql.exec.UDAF;
+import org.apache.hadoop.hive.ql.exec.UDAFEvaluator;
 
 
 public class UDAFMin extends UDAF {
 
-  private double mMin;
-  private boolean mEmpty;
-  
-  public UDAFMin() {
-    super();
-    init();
+  static public class MinShortEvaluator implements UDAFEvaluator {
+    private short mMin;
+    private boolean mEmpty;
+
+    public MinShortEvaluator() {
+      super();
+      init();
+    }
+
+    public void init() {
+      mMin = 0;
+      mEmpty = true;
+    }
+
+    public boolean iterate(Short o) {
+      if (o != null) {
+        if (mEmpty) {
+          mMin = o;
+          mEmpty = false;
+        } else {
+          mMin = (short) Math.min(mMin, o);
+        }
+      }
+      return true;
+    }
+
+    public Short terminatePartial() {
+      return mEmpty ? null : Short.valueOf(mMin);
+    }
+
+    public boolean merge(Short o) {
+      return iterate(o);
+    }
+
+    public Short terminate() {
+      return mEmpty ? null : Short.valueOf(mMin);
+    }
   }
 
-  public void init() {
-    mMin = 0;
-    mEmpty = true;
+  static public class MinIntEvaluator implements UDAFEvaluator {
+    private int mMin;
+    private boolean mEmpty;
+
+    public MinIntEvaluator() {
+      super();
+      init();
+    }
+
+    public void init() {
+      mMin = 0;
+      mEmpty = true;
+    }
+
+    public boolean iterate(Integer o) {
+      if (o != null) {
+        if (mEmpty) {
+          mMin = o;
+          mEmpty = false;
+        } else {
+          mMin = Math.min(mMin, o);
+        }
+      }
+      return true;
+    }
+
+    public Integer terminatePartial() {
+      return mEmpty ? null : Integer.valueOf(mMin);
+    }
+
+    public boolean merge(Integer o) {
+      return iterate(o);
+    }
+
+    public Integer terminate() {
+      return mEmpty ? null : Integer.valueOf(mMin);
+    }
   }
 
-  public boolean aggregate(Double o) {
-    if (o != null) {
-      if (mEmpty) {
-        mMin = o;
-        mEmpty = false;
-      } else {
-        mMin = Math.min(mMin, o);
+  static public class MinLongEvaluator implements UDAFEvaluator {
+    private long mMin;
+    private boolean mEmpty;
+
+    public MinLongEvaluator() {
+      super();
+      init();
+    }
+
+    public void init() {
+      mMin = 0;
+      mEmpty = true;
+    }
+
+    public boolean iterate(Long o) {
+      if (o != null) {
+        if (mEmpty) {
+          mMin = o;
+          mEmpty = false;
+        } else {
+          mMin = Math.min(mMin, o);
+        }
       }
+      return true;
+    }
+
+    public Long terminatePartial() {
+      return mEmpty ? null : Long.valueOf(mMin);
+    }
+
+    public boolean merge(Long o) {
+      return iterate(o);
+    }
+
+    public Long terminate() {
+      return mEmpty ? null : Long.valueOf(mMin);
     }
-    return true;
   }
-  
-  public Double evaluatePartial() {
-    return mEmpty ? null : Double.valueOf(mMin);
+
+  static public class MinFloatEvaluator implements UDAFEvaluator {
+    private float mMin;
+    private boolean mEmpty;
+
+    public MinFloatEvaluator() {
+      super();
+      init();
+    }
+
+    public void init() {
+      mMin = 0;
+      mEmpty = true;
+    }
+
+    public boolean iterate(Float o) {
+      if (o != null) {
+        if (mEmpty) {
+          mMin = o;
+          mEmpty = false;
+        } else {
+          mMin = Math.min(mMin, o);
+        }
+      }
+      return true;
+    }
+
+    public Float terminatePartial() {
+      return mEmpty ? null : Float.valueOf(mMin);
+    }
+
+    public boolean merge(Float o) {
+      return iterate(o);
+    }
+
+    public Float terminate() {
+      return mEmpty ? null : Float.valueOf(mMin);
+    }
   }
 
-  public boolean aggregatePartial(Double o) {
-    return aggregate(o);
+  static public class MinDoubleEvaluator implements UDAFEvaluator {
+    private double mMin;
+    private boolean mEmpty;
+
+    public MinDoubleEvaluator() {
+      super();
+      init();
+    }
+
+    public void init() {
+      mMin = 0;
+      mEmpty = true;
+    }
+
+    public boolean iterate(Double o) {
+      if (o != null) {
+        if (mEmpty) {
+          mMin = o;
+          mEmpty = false;
+        } else {
+          mMin = Math.min(mMin, o);
+        }
+      }
+      return true;
+    }
+
+    public Double terminatePartial() {
+      return mEmpty ? null : Double.valueOf(mMin);
+    }
+
+    public boolean merge(Double o) {
+      return iterate(o);
+    }
+
+    public Double terminate() {
+      return mEmpty ? null : Double.valueOf(mMin);
+    }
   }
 
-  public Double evaluate() {
-    return mEmpty ? null : Double.valueOf(mMin);
+  static public class MinStringEvaluator implements UDAFEvaluator {
+    private String mMin;
+    private boolean mEmpty;
+
+    public MinStringEvaluator() {
+      super();
+      init();
+    }
+
+    public void init() {
+      mMin = null;
+      mEmpty = true;
+    }
+
+    public boolean iterate(String o) {
+      if (o != null) {
+        if (mEmpty) {
+          mMin = o;
+          mEmpty = false;
+        } else if (mMin.compareTo(o) < 0) {
+          mMin = o;
+        }
+      }
+      return true;
+    }
+
+    public String terminatePartial() {
+      return mEmpty ? null : mMin;
+    }
+
+    public boolean merge(String o) {
+      return iterate(o);
+    }
+
+    public String terminate() {
+      return mEmpty ? null : mMin;
+    }
   }
 
+  static public class MinDateEvaluator implements UDAFEvaluator {
+    private Date mMin;
+    private boolean mEmpty;
+
+    public MinDateEvaluator() {
+      super();
+      init();
+    }
+
+    public void init() {
+      mMin = null;
+      mEmpty = true;
+    }
+
+    public boolean iterate(Date o) {
+      if (o != null) {
+        if (mEmpty) {
+          mMin = o;
+          mEmpty = false;
+        } else if (mMin.compareTo(o) < 0){
+          mMin = o;
+        }
+      }
+      return true;
+    }
+
+    public Date terminatePartial() {
+      return mEmpty ? null : mMin;
+    }
+
+    public boolean merge(Date o) {
+      return iterate(o);
+    }
+
+    public Date terminate() {
+      return mEmpty ? null : mMin;
+    }
+  }
 }

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDAFSum.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDAFSum.java?rev=737291&r1=737290&r2=737291&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDAFSum.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDAFSum.java Sat Jan 24 01:58:01 2009
@@ -18,11 +18,12 @@
 
 package org.apache.hadoop.hive.ql.udf;
 
-import org.apache.hadoop.hive.ql.exec.UDAF;
+import org.apache.hadoop.hive.ql.exec.NumericUDAF;
+import org.apache.hadoop.hive.ql.exec.UDAFEvaluator;
 
 
 
-public class UDAFSum extends UDAF {
+public class UDAFSum extends NumericUDAF implements UDAFEvaluator {
 
   private double mSum;
   private boolean mEmpty;
@@ -37,7 +38,7 @@
     mEmpty = true;
   }
 
-  public boolean aggregate(Double o) {
+  public boolean iterate(Double o) {
     if (o != null) {
       mSum += o;
       mEmpty = false;
@@ -45,12 +46,12 @@
     return true;
   }
   
-  public Double evaluatePartial() {
+  public Double terminatePartial() {
     // This is SQL standard - sum of zero items should be null.
     return mEmpty ? null : new Double(mSum);
   }
 
-  public boolean aggregatePartial(Double o) {
+  public boolean merge(Double o) {
     if (o != null) {
       mSum += o;
       mEmpty = false;
@@ -58,7 +59,7 @@
     return true;
   }
 
-  public Double evaluate() {
+  public Double terminate() {
     // This is SQL standard - sum of zero items should be null.
     return mEmpty ? null : new Double(mSum);
   }

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFBaseCompare.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFBaseCompare.java?rev=737291&r1=737290&r2=737291&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFBaseCompare.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFBaseCompare.java Sat Jan 24 01:58:01 2009
@@ -18,46 +18,18 @@
 
 package org.apache.hadoop.hive.ql.udf;
 
-import java.sql.Date;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hive.ql.exec.ComparisonOpMethodResolver;
 import org.apache.hadoop.hive.ql.exec.UDF;
 
+public abstract class UDFBaseCompare extends UDF {
 
-public abstract class UDFBaseCompare implements UDF {
-
-  private static Log LOG = LogFactory.getLog(UDFBaseCompare.class.getName());
-
-  public UDFBaseCompare() {
-  }
-
-  public abstract Boolean evaluate(Double a, Double b);
-  
-  /** If one of the argument is a String and the other is a Number, convert
-   *  String to double and the Number to double, and then compare.
+  /**
+   * This constructor sets the resolver to be used for comparison operators.
+   * See {@link UDFMethodResolver}
    */
-  public Boolean evaluate(String a, Number b)  {
-    Double aDouble = null;
-    try {
-      aDouble = Double.valueOf(a);
-    } catch (Exception e){
-      // do nothing: aDouble will be null.
-    }
-    return evaluate(aDouble, new Double(b.doubleValue()));
+  public UDFBaseCompare() {
+    setResolver(new ComparisonOpMethodResolver(this.getClass()));
   }
 
-  /** If one of the argument is a String and the other is a Number, convert
-   *  String to double and the Number to double, and then compare.
-   */
-  public Boolean evaluate(Number a, String b)  {
-    Double bDouble = null;
-    try {
-      bDouble = Double.valueOf(b);
-    } catch (Exception e){
-      // do nothing: bDouble will be null.
-    }
-    return evaluate(new Double(a.doubleValue()), bDouble);
-  }
-  
+  public abstract Boolean evaluate(Double a, Double b);  
 }

Added: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFBaseNumericOp.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFBaseNumericOp.java?rev=737291&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFBaseNumericOp.java (added)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFBaseNumericOp.java Sat Jan 24 01:58:01 2009
@@ -0,0 +1,45 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.udf;
+
+import org.apache.hadoop.hive.ql.exec.NumericOpMethodResolver;
+import org.apache.hadoop.hive.ql.exec.UDF;
+
+/**
+ * Base class for numeric operators like +, -, / etc. All these operators
+ * share a common method resolver (NumericOpMethodResolver).
+ */
+public abstract class UDFBaseNumericOp extends UDF {
+
+  /**
+   * Constructor.
+   * This constructor sets the resolver to be used for comparison operators.
+   * See {@link UDFMethodResolver}
+   */
+  public UDFBaseNumericOp() {
+    setResolver(new NumericOpMethodResolver(this.getClass()));
+  }
+
+  public abstract Byte evaluate(Byte a, Byte b);  
+  public abstract Integer evaluate(Integer a, Integer b);  
+  public abstract Long evaluate(Long a, Long b);  
+  public abstract Float evaluate(Float a, Float b);  
+  public abstract Double evaluate(Double a, Double b);  
+
+}

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFCeil.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFCeil.java?rev=737291&r1=737290&r2=737291&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFCeil.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFCeil.java Sat Jan 24 01:58:01 2009
@@ -22,7 +22,7 @@
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hive.ql.exec.UDF;
 
-public class UDFCeil implements UDF {
+public class UDFCeil extends UDF {
 
   private static Log LOG = LogFactory.getLog(UDFCeil.class.getName());
 

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFConcat.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFConcat.java?rev=737291&r1=737290&r2=737291&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFConcat.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFConcat.java Sat Jan 24 01:58:01 2009
@@ -21,7 +21,7 @@
 import org.apache.hadoop.hive.ql.exec.UDF;
 
 
-public class UDFConcat implements UDF {
+public class UDFConcat extends UDF {
 
   public UDFConcat() {
   }

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDate.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDate.java?rev=737291&r1=737290&r2=737291&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDate.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDate.java Sat Jan 24 01:58:01 2009
@@ -28,7 +28,7 @@
 import org.apache.hadoop.hive.ql.exec.UDF;
 
 
-public class UDFDate implements UDF {
+public class UDFDate extends UDF {
 
   private static Log LOG = LogFactory.getLog(UDFDate.class.getName());
 

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDayOfMonth.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDayOfMonth.java?rev=737291&r1=737290&r2=737291&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDayOfMonth.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDayOfMonth.java Sat Jan 24 01:58:01 2009
@@ -28,7 +28,7 @@
 import org.apache.hadoop.hive.ql.exec.UDF;
 
 
-public class UDFDayOfMonth implements UDF {
+public class UDFDayOfMonth extends UDF {
 
   private static Log LOG = LogFactory.getLog(UDFDayOfMonth.class.getName());
 

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDefaultSampleHashFn.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDefaultSampleHashFn.java?rev=737291&r1=737290&r2=737291&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDefaultSampleHashFn.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDefaultSampleHashFn.java Sat Jan 24 01:58:01 2009
@@ -22,7 +22,7 @@
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 
-public class UDFDefaultSampleHashFn implements UDF {
+public class UDFDefaultSampleHashFn extends UDF {
   protected final Log LOG;
 
   public UDFDefaultSampleHashFn() {

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFFloor.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFFloor.java?rev=737291&r1=737290&r2=737291&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFFloor.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFFloor.java Sat Jan 24 01:58:01 2009
@@ -23,7 +23,7 @@
 import org.apache.hadoop.hive.ql.exec.UDF;
 
 
-public class UDFFloor implements UDF {
+public class UDFFloor extends UDF {
 
   private static Log LOG = LogFactory.getLog(UDFFloor.class.getName());
 

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFFromUnixTime.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFFromUnixTime.java?rev=737291&r1=737290&r2=737291&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFFromUnixTime.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFFromUnixTime.java Sat Jan 24 01:58:01 2009
@@ -26,7 +26,7 @@
 import org.apache.hadoop.hive.ql.exec.UDF;
 
 
-public class UDFFromUnixTime implements UDF {
+public class UDFFromUnixTime extends UDF {
 
   private static Log LOG = LogFactory.getLog(UDFFromUnixTime.class.getName());
 

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFJson.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFJson.java?rev=737291&r1=737290&r2=737291&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFJson.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFJson.java Sat Jan 24 01:58:01 2009
@@ -33,7 +33,7 @@
 import org.apache.hadoop.hive.ql.exec.UDF;
 
 
-public class UDFJson implements UDF {
+public class UDFJson extends UDF {
   private static Log LOG = LogFactory.getLog(UDFJson.class.getName());
   private Pattern pattern_key = Pattern.compile("^([a-zA-Z0-9_\\-]+).*");
   private Pattern pattern_index = Pattern.compile("\\[([0-9]+|\\*)\\]");

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLTrim.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLTrim.java?rev=737291&r1=737290&r2=737291&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLTrim.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLTrim.java Sat Jan 24 01:58:01 2009
@@ -24,7 +24,7 @@
 import java.util.regex.Pattern;
 import java.util.regex.Matcher;
 
-public class UDFLTrim implements UDF {
+public class UDFLTrim extends UDF {
 
   public UDFLTrim() {
   }

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLike.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLike.java?rev=737291&r1=737290&r2=737291&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLike.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLike.java Sat Jan 24 01:58:01 2009
@@ -24,7 +24,7 @@
 import java.util.regex.Pattern;
 import java.util.regex.Matcher;
 
-public class UDFLike implements UDF {
+public class UDFLike extends UDF {
 
   private static Log LOG = LogFactory.getLog(UDFLike.class.getName());
   private String lastLikePattern = null;

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLower.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLower.java?rev=737291&r1=737290&r2=737291&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLower.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLower.java Sat Jan 24 01:58:01 2009
@@ -22,7 +22,7 @@
 import java.util.regex.Pattern;
 import java.util.regex.Matcher;
 
-public class UDFLower implements UDF {
+public class UDFLower extends UDF {
 
   public UDFLower() {
   }

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFMonth.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFMonth.java?rev=737291&r1=737290&r2=737291&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFMonth.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFMonth.java Sat Jan 24 01:58:01 2009
@@ -28,7 +28,7 @@
 import org.apache.hadoop.hive.ql.exec.UDF;
 
 
-public class UDFMonth implements UDF {
+public class UDFMonth extends UDF {
 
   private static Log LOG = LogFactory.getLog(UDFMonth.class.getName());
 

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPAnd.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPAnd.java?rev=737291&r1=737290&r2=737291&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPAnd.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPAnd.java Sat Jan 24 01:58:01 2009
@@ -23,7 +23,7 @@
 import org.apache.hadoop.hive.ql.exec.UDF;
 
 
-public class UDFOPAnd implements UDF {
+public class UDFOPAnd extends UDF {
 
   private static Log LOG = LogFactory.getLog("org.apache.hadoop.hive.ql.udf.UDFOPAnd");
 

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPBitAnd.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPBitAnd.java?rev=737291&r1=737290&r2=737291&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPBitAnd.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPBitAnd.java Sat Jan 24 01:58:01 2009
@@ -23,7 +23,7 @@
 import org.apache.hadoop.hive.ql.exec.UDF;
 
 
-public class UDFOPBitAnd implements UDF {
+public class UDFOPBitAnd extends UDF {
 
   private static Log LOG = LogFactory.getLog(UDFOPBitAnd.class.getName());
 

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPBitNot.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPBitNot.java?rev=737291&r1=737290&r2=737291&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPBitNot.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPBitNot.java Sat Jan 24 01:58:01 2009
@@ -23,7 +23,7 @@
 import org.apache.hadoop.hive.ql.exec.UDF;
 
 
-public class UDFOPBitNot implements UDF {
+public class UDFOPBitNot extends UDF {
 
   private static Log LOG = LogFactory.getLog(UDFOPBitNot.class.getName());
 

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPBitOr.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPBitOr.java?rev=737291&r1=737290&r2=737291&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPBitOr.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPBitOr.java Sat Jan 24 01:58:01 2009
@@ -23,7 +23,7 @@
 import org.apache.hadoop.hive.ql.exec.UDF;
 
 
-public class UDFOPBitOr implements UDF {
+public class UDFOPBitOr extends UDF {
 
   private static Log LOG = LogFactory.getLog("org.apache.hadoop.hive.ql.udf.UDFOPBitOr");
 

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPBitXor.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPBitXor.java?rev=737291&r1=737290&r2=737291&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPBitXor.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPBitXor.java Sat Jan 24 01:58:01 2009
@@ -23,7 +23,7 @@
 import org.apache.hadoop.hive.ql.exec.UDF;
 
 
-public class UDFOPBitXor implements UDF {
+public class UDFOPBitXor extends UDF {
 
   private static Log LOG = LogFactory.getLog(UDFOPBitXor.class.getName());
 

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPDivide.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPDivide.java?rev=737291&r1=737290&r2=737291&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPDivide.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPDivide.java Sat Jan 24 01:58:01 2009
@@ -23,7 +23,7 @@
 import org.apache.hadoop.hive.ql.exec.UDF;
 
 
-public class UDFOPDivide implements UDF {
+public class UDFOPDivide extends UDFBaseNumericOp {
 
   private static Log LOG = LogFactory.getLog("org.apache.hadoop.hive.ql.udf.UDFOPDivide");
 

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPMinus.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPMinus.java?rev=737291&r1=737290&r2=737291&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPMinus.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPMinus.java Sat Jan 24 01:58:01 2009
@@ -23,7 +23,7 @@
 import org.apache.hadoop.hive.ql.exec.UDF;
 
 
-public class UDFOPMinus implements UDF {
+public class UDFOPMinus extends UDFBaseNumericOp {
 
   private static Log LOG = LogFactory.getLog("org.apache.hadoop.hive.ql.udf.UDFOPMinus");
 

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPMod.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPMod.java?rev=737291&r1=737290&r2=737291&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPMod.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPMod.java Sat Jan 24 01:58:01 2009
@@ -23,7 +23,7 @@
 import org.apache.hadoop.hive.ql.exec.UDF;
 
 
-public class UDFOPMod implements UDF {
+public class UDFOPMod extends UDFBaseNumericOp {
 
   private static Log LOG = LogFactory.getLog("org.apache.hadoop.hive.ql.udf.UDFOPMod");
 

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPMultiply.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPMultiply.java?rev=737291&r1=737290&r2=737291&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPMultiply.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPMultiply.java Sat Jan 24 01:58:01 2009
@@ -23,7 +23,7 @@
 import org.apache.hadoop.hive.ql.exec.UDF;
 
 
-public class UDFOPMultiply implements UDF {
+public class UDFOPMultiply extends UDFBaseNumericOp {
 
   private static Log LOG = LogFactory.getLog("org.apache.hadoop.hive.ql.udf.UDFOPMultiply");
 

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPNegative.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPNegative.java?rev=737291&r1=737290&r2=737291&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPNegative.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPNegative.java Sat Jan 24 01:58:01 2009
@@ -23,7 +23,7 @@
 import org.apache.hadoop.hive.ql.exec.UDF;
 
 
-public class UDFOPNegative implements UDF {
+public class UDFOPNegative extends UDF {
 
   private static Log LOG = LogFactory.getLog(UDFOPNegative.class.getName());
 

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPNot.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPNot.java?rev=737291&r1=737290&r2=737291&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPNot.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPNot.java Sat Jan 24 01:58:01 2009
@@ -23,7 +23,7 @@
 import org.apache.hadoop.hive.ql.exec.UDF;
 
 
-public class UDFOPNot implements UDF {
+public class UDFOPNot extends UDF {
 
   private static Log LOG = LogFactory.getLog("org.apache.hadoop.hive.ql.udf.UDFOPNot");
 

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPNotNull.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPNotNull.java?rev=737291&r1=737290&r2=737291&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPNotNull.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPNotNull.java Sat Jan 24 01:58:01 2009
@@ -23,7 +23,7 @@
 import org.apache.hadoop.hive.ql.exec.UDF;
 
 
-public class UDFOPNotNull implements UDF {
+public class UDFOPNotNull extends UDF {
 
   private static Log LOG = LogFactory.getLog("org.apache.hadoop.hive.ql.udf.UDFOPNotNull");
 

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPNull.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPNull.java?rev=737291&r1=737290&r2=737291&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPNull.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPNull.java Sat Jan 24 01:58:01 2009
@@ -23,7 +23,7 @@
 import org.apache.hadoop.hive.ql.exec.UDF;
 
 
-public class UDFOPNull implements UDF {
+public class UDFOPNull extends UDF {
 
   private static Log LOG = LogFactory.getLog("org.apache.hadoop.hive.ql.udf.UDFOPNull");
 

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPOr.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPOr.java?rev=737291&r1=737290&r2=737291&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPOr.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPOr.java Sat Jan 24 01:58:01 2009
@@ -23,7 +23,7 @@
 import org.apache.hadoop.hive.ql.exec.UDF;
 
 
-public class UDFOPOr implements UDF {
+public class UDFOPOr extends UDF {
 
   private static Log LOG = LogFactory.getLog(UDFOPOr.class.getName());
 

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPPlus.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPPlus.java?rev=737291&r1=737290&r2=737291&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPPlus.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPPlus.java Sat Jan 24 01:58:01 2009
@@ -32,7 +32,7 @@
  * The case of int + double will be handled by implicit type casting using 
  * UDFRegistry.implicitConvertable method. 
  */
-public class UDFOPPlus implements UDF {
+public class UDFOPPlus extends UDFBaseNumericOp {
 
   private static Log LOG = LogFactory.getLog("org.apache.hadoop.hive.ql.udf.UDFOPPlus");
 

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPPositive.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPPositive.java?rev=737291&r1=737290&r2=737291&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPPositive.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPPositive.java Sat Jan 24 01:58:01 2009
@@ -23,7 +23,7 @@
 import org.apache.hadoop.hive.ql.exec.UDF;
 
 
-public class UDFOPPositive implements UDF {
+public class UDFOPPositive extends UDF {
 
   private static Log LOG = LogFactory.getLog(UDFOPPositive.class.getName());
 

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFRTrim.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFRTrim.java?rev=737291&r1=737290&r2=737291&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFRTrim.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFRTrim.java Sat Jan 24 01:58:01 2009
@@ -24,7 +24,7 @@
 import java.util.regex.Pattern;
 import java.util.regex.Matcher;
 
-public class UDFRTrim implements UDF {
+public class UDFRTrim extends UDF {
 
   public UDFRTrim() {
   }

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFRand.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFRand.java?rev=737291&r1=737290&r2=737291&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFRand.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFRand.java Sat Jan 24 01:58:01 2009
@@ -23,7 +23,7 @@
 import org.apache.hadoop.hive.ql.exec.UDF;
 import java.util.Random;
 
-public class UDFRand implements UDF {
+public class UDFRand extends UDF {
 
   private static Log LOG = LogFactory.getLog(UDFRand.class.getName());
 

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFRegExp.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFRegExp.java?rev=737291&r1=737290&r2=737291&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFRegExp.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFRegExp.java Sat Jan 24 01:58:01 2009
@@ -22,7 +22,7 @@
 import java.util.regex.Pattern;
 import java.util.regex.Matcher;
 
-public class UDFRegExp implements UDF {
+public class UDFRegExp extends UDF {
 
   private String lastRegex = null;
   private Pattern p = null;

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFRegExpReplace.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFRegExpReplace.java?rev=737291&r1=737290&r2=737291&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFRegExpReplace.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFRegExpReplace.java Sat Jan 24 01:58:01 2009
@@ -22,7 +22,7 @@
 import java.util.regex.Pattern;
 import java.util.regex.Matcher;
 
-public class UDFRegExpReplace implements UDF {
+public class UDFRegExpReplace extends UDF {
 
   private String lastRegex = null;
   private Pattern p = null;

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFRound.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFRound.java?rev=737291&r1=737290&r2=737291&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFRound.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFRound.java Sat Jan 24 01:58:01 2009
@@ -24,7 +24,7 @@
 import org.apache.hadoop.hive.ql.exec.UDF;
 
 
-public class UDFRound implements UDF {
+public class UDFRound extends UDF {
 
   public UDFRound() {
   }

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFSize.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFSize.java?rev=737291&r1=737290&r2=737291&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFSize.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFSize.java Sat Jan 24 01:58:01 2009
@@ -23,7 +23,7 @@
 import java.util.Map;
 import java.util.List;
 
-public class UDFSize implements UDF {
+public class UDFSize extends UDF {
 
   public UDFSize() {
   }

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFSubstr.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFSubstr.java?rev=737291&r1=737290&r2=737291&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFSubstr.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFSubstr.java Sat Jan 24 01:58:01 2009
@@ -16,25 +16,25 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.hive.ql.udf;
-
-import org.apache.hadoop.hive.ql.exec.UDF;
-
-
-public class UDFSubstr implements UDF {
-
-  public UDFSubstr() {
-  }
-
-  public String evaluate(String a, int start, int len)  {
-    if (start >= a.length()) return "";
-    if (start + len > a.length()) len = a.length() - start;
-    return a.substring(start, start + len);
-  }
-  
-  public String evaluate(String a, int start)  {
-    if (start >= a.length()) return "";
-    return a.substring(start);
-  }
-
+package org.apache.hadoop.hive.ql.udf;
+
+import org.apache.hadoop.hive.ql.exec.UDF;
+
+
+public class UDFSubstr extends UDF {
+
+  public UDFSubstr() {
+  }
+
+  public String evaluate(String a, int start, int len)  {
+    if (start >= a.length()) return "";
+    if (start + len > a.length()) len = a.length() - start;
+    return a.substring(start, start + len);
+  }
+  
+  public String evaluate(String a, int start)  {
+    if (start >= a.length()) return "";
+    return a.substring(start);
+  }
+
 }

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToBoolean.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToBoolean.java?rev=737291&r1=737290&r2=737291&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToBoolean.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToBoolean.java Sat Jan 24 01:58:01 2009
@@ -23,7 +23,7 @@
 import org.apache.hadoop.hive.ql.exec.UDF;
 
 
-public class UDFToBoolean implements UDF {
+public class UDFToBoolean extends UDF {
 
   private static Log LOG = LogFactory.getLog(UDFToBoolean.class.getName());
 
@@ -31,6 +31,16 @@
   }
 
   /**
+   * Convert a void to boolean. This is called for CAST(... AS BOOLEAN)
+   *
+   * @param i The value of a void type
+   * @return Boolean
+   */
+  public Boolean evaluate(Void i)  {
+      return null;
+  }
+
+  /**
    * Convert from a byte to boolean. This is called for CAST(... AS BOOLEAN)
    *
    * @param i The byte value to convert
@@ -114,4 +124,18 @@
     }
   }
 
+  /**
+   * Convert from a string to boolean. This is called for CAST(... AS BOOLEAN)
+   *
+   * @param i The string value to convert
+   * @return Boolean
+   */
+  public Boolean evaluate(String i)  {
+    if (i == null) {
+      return null;
+    } else {
+      return Boolean.valueOf(i.length() != 0);
+    }
+  }
+
 }

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToByte.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToByte.java?rev=737291&r1=737290&r2=737291&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToByte.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToByte.java Sat Jan 24 01:58:01 2009
@@ -23,7 +23,7 @@
 import org.apache.hadoop.hive.ql.exec.UDF;
 
 
-public class UDFToByte implements UDF {
+public class UDFToByte extends UDF {
 
   private static Log LOG = LogFactory.getLog(UDFToByte.class.getName());
 
@@ -31,6 +31,16 @@
   }
 
   /**
+   * Convert from void to a byte. This is called for CAST(... AS TINYINT)
+   *
+   * @param i The void value to convert
+   * @return Byte
+   */
+  public Byte evaluate(Void i)  {
+    return null;
+  }  
+
+  /**
    * Convert from boolean to a byte. This is called for CAST(... AS TINYINT)
    *
    * @param i The boolean value to convert
@@ -135,4 +145,17 @@
     }
   }
   
+  /**
+   * Convert from date to a Byte. This is called for CAST(... AS TINYINT)
+   *
+   * @param i The date value to convert
+   * @return Byte
+   */
+  public Byte evaluate(java.sql.Date i)  {
+    if (i == null) {
+      return null;
+    } else {
+        return Long.valueOf(i.getTime()).byteValue();
+    }
+  }  
 }

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToDate.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToDate.java?rev=737291&r1=737290&r2=737291&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToDate.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToDate.java Sat Jan 24 01:58:01 2009
@@ -23,7 +23,7 @@
 import org.apache.hadoop.hive.ql.exec.UDF;
 
 
-public class UDFToDate implements UDF {
+public class UDFToDate extends UDF {
 
   private static Log LOG = LogFactory.getLog(UDFToDate.class.getName());
 
@@ -44,4 +44,62 @@
     }
   }
   
+  public java.sql.Date evaluate(Void i) {
+    return null;
+  }
+  
+  public java.sql.Date evaluate(Byte i) {
+    if (i == null) {
+      return null;
+    }
+    else {
+      return new java.sql.Date(i.longValue());
+    }
+  }
+  
+  public java.sql.Date evaluate(Short i) {
+    if (i == null) {
+      return null;
+    }
+    else {
+      return new java.sql.Date(i.longValue());
+    }
+  }
+  
+  public java.sql.Date evaluate(Integer i) {
+    if (i == null) {
+      return null;
+    }
+    else {
+      return new java.sql.Date(i.longValue());
+    }
+  }
+  
+  public java.sql.Date evaluate(Long i) {
+    if (i == null) {
+      return null;
+    }
+    else {
+      return new java.sql.Date(i.longValue());
+    }
+  }
+  
+  public java.sql.Date evaluate(Float i) {
+    if (i == null) {
+      return null;
+    }
+    else {
+      return new java.sql.Date(i.longValue());
+    }
+  }
+  
+  public java.sql.Date evaluate(Double i) {
+    if (i == null) {
+      return null;
+    }
+    else {
+      return new java.sql.Date(i.longValue());
+    }
+  }
+  
 }

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToDouble.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToDouble.java?rev=737291&r1=737290&r2=737291&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToDouble.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToDouble.java Sat Jan 24 01:58:01 2009
@@ -23,7 +23,7 @@
 import org.apache.hadoop.hive.ql.exec.UDF;
 
 
-public class UDFToDouble implements UDF {
+public class UDFToDouble extends UDF {
 
   private static Log LOG = LogFactory.getLog(UDFToDouble.class.getName());
 
@@ -31,6 +31,16 @@
   }
 
   /**
+   * Convert from void to a double. This is called for CAST(... AS DOUBLE)
+   *
+   * @param i The void value to convert
+   * @return Double
+   */
+  public Double evaluate(Void i)  {
+    return null;
+  }
+
+  /**
    * Convert from boolean to a double. This is called for CAST(... AS DOUBLE)
    *
    * @param i The boolean value to convert
@@ -135,4 +145,18 @@
     }
   }
   
+  /**
+   * Convert from date to a double. This is called for CAST(... AS DOUBLE)
+   *
+   * @param i The date value to convert
+   * @return Double
+   */
+  public Double evaluate(java.sql.Date i)  {
+    if (i == null) {
+      return null;
+    } else {
+        return Double.valueOf(i.getTime());
+    }
+  }
+  
 }

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToFloat.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToFloat.java?rev=737291&r1=737290&r2=737291&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToFloat.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToFloat.java Sat Jan 24 01:58:01 2009
@@ -23,7 +23,7 @@
 import org.apache.hadoop.hive.ql.exec.UDF;
 
 
-public class UDFToFloat implements UDF {
+public class UDFToFloat extends UDF {
 
   private static Log LOG = LogFactory.getLog(UDFToFloat.class.getName());
 
@@ -31,6 +31,16 @@
   }
 
   /**
+   * Convert from void to a float. This is called for CAST(... AS FLOAT)
+   *
+   * @param i The void value to convert
+   * @return Float
+   */
+  public Float evaluate(Void i)  {
+    return null;
+  }
+
+  /**
    * Convert from boolean to a float. This is called for CAST(... AS FLOAT)
    *
    * @param i The boolean value to convert
@@ -131,4 +141,17 @@
     }
   }
   
+  /**
+   * Convert from date to a float. This is called for CAST(... AS FLOAT)
+   *
+   * @param i The date value to convert
+   * @return Float
+   */
+  public Float evaluate(java.sql.Date i)  {
+    if (i == null) {
+      return null;
+    } else {
+        return Float.valueOf(i.getTime());
+    }
+  }  
 }

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToInteger.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToInteger.java?rev=737291&r1=737290&r2=737291&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToInteger.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToInteger.java Sat Jan 24 01:58:01 2009
@@ -23,7 +23,7 @@
 import org.apache.hadoop.hive.ql.exec.UDF;
 
 
-public class UDFToInteger implements UDF {
+public class UDFToInteger extends UDF {
 
   private static Log LOG = LogFactory.getLog(UDFToInteger.class.getName());
 
@@ -31,6 +31,16 @@
   }
 
   /**
+   * Convert from void to an integer. This is called for CAST(... AS INT)
+   *
+   * @param i The void value to convert
+   * @return Integer
+   */
+  public Integer evaluate(Void i)  {
+    return null;
+  }
+
+  /**
    * Convert from boolean to an integer. This is called for CAST(... AS INT)
    *
    * @param i The boolean value to convert
@@ -135,4 +145,17 @@
     }
   }
   
+  /**
+   * Convert from date to an integer. This is called for CAST(... AS INT)
+   *
+   * @param i The date value to convert
+   * @return Integer
+   */
+  public Integer evaluate(java.sql.Date i)  {
+    if (i == null) {
+      return null;
+    } else {
+        return Long.valueOf(i.getTime()).intValue();
+    }
+  }  
 }

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToLong.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToLong.java?rev=737291&r1=737290&r2=737291&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToLong.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToLong.java Sat Jan 24 01:58:01 2009
@@ -23,7 +23,7 @@
 import org.apache.hadoop.hive.ql.exec.UDF;
 
 
-public class UDFToLong implements UDF {
+public class UDFToLong extends UDF {
 
   private static Log LOG = LogFactory.getLog(UDFToLong.class.getName());
 
@@ -31,6 +31,16 @@
   }
 
   /**
+   * Convert from void to a long. This is called for CAST(... AS BIGINT)
+   *
+   * @param i The void value to convert
+   * @return Long
+   */
+  public Long evaluate(Void i)  {
+    return null;
+  }
+
+  /**
    * Convert from boolean to a long. This is called for CAST(... AS BIGINT)
    *
    * @param i The boolean value to convert
@@ -145,4 +155,17 @@
     }
   }
   
+  /**
+   * Convert from date to a long. This is called for CAST(... AS BIGINT)
+   *
+   * @param i The date value to convert
+   * @return Long
+   */
+  public Long evaluate(java.sql.Date i)  {
+    if (i == null) {
+      return null;
+    } else {
+        return Long.valueOf(i.getTime());
+    }
+  }  
 }

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToShort.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToShort.java?rev=737291&r1=737290&r2=737291&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToShort.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToShort.java Sat Jan 24 01:58:01 2009
@@ -23,7 +23,7 @@
 import org.apache.hadoop.hive.ql.exec.UDF;
 
 
-public class UDFToShort implements UDF {
+public class UDFToShort extends UDF {
 
   private static Log LOG = LogFactory.getLog(UDFToByte.class.getName());
 
@@ -31,6 +31,16 @@
   }
 
   /**
+   * Convert from void to a short. This is called for CAST(... AS SMALLINT)
+   *
+   * @param i The void value to convert
+   * @return Short
+   */
+  public Short evaluate(Void i)  {
+    return null;
+  }
+
+  /**
    * Convert from boolean to a short. This is called for CAST(... AS SMALLINT)
    *
    * @param i The boolean value to convert
@@ -135,4 +145,17 @@
     }
   }
   
+  /**
+   * Convert from date to a short. This is called for CAST(... AS SMALLINT)
+   *
+   * @param i The date value to convert
+   * @return Short
+   */
+  public Short evaluate(java.sql.Date i)  {
+    if (i == null) {
+      return null;
+    } else {
+        return Long.valueOf(i.getTime()).shortValue();
+    }
+  }  
 }

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToString.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToString.java?rev=737291&r1=737290&r2=737291&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToString.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToString.java Sat Jan 24 01:58:01 2009
@@ -23,13 +23,17 @@
 import org.apache.hadoop.hive.ql.exec.UDF;
 
 
-public class UDFToString implements UDF {
+public class UDFToString extends UDF {
 
   private static Log LOG = LogFactory.getLog(UDFToString.class.getName());
 
   public UDFToString() {
   }
 
+  public String evaluate(Void i)  {
+    return null;
+  }
+
   public String evaluate(Boolean i)  {
     if (i == null) {
       return null;
@@ -86,4 +90,17 @@
     }
   }
   
+  /**
+   * Convert from date to a string. This is called for CAST(... AS STRING)
+   *
+   * @param i The date value to convert
+   * @return String
+   */
+  public String evaluate(java.sql.Date i)  {
+    if (i == null) {
+      return null;
+    } else {
+        return i.toString();
+    }
+  }  
 }

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFTrim.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFTrim.java?rev=737291&r1=737290&r2=737291&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFTrim.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFTrim.java Sat Jan 24 01:58:01 2009
@@ -24,7 +24,7 @@
 import java.util.regex.Pattern;
 import java.util.regex.Matcher;
 
-public class UDFTrim implements UDF {
+public class UDFTrim extends UDF {
 
   public UDFTrim() {
   }

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFUpper.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFUpper.java?rev=737291&r1=737290&r2=737291&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFUpper.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFUpper.java Sat Jan 24 01:58:01 2009
@@ -22,7 +22,7 @@
 import java.util.regex.Pattern;
 import java.util.regex.Matcher;
 
-public class UDFUpper implements UDF {
+public class UDFUpper extends UDF {
 
   public UDFUpper() {
   }

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFYear.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFYear.java?rev=737291&r1=737290&r2=737291&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFYear.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFYear.java Sat Jan 24 01:58:01 2009
@@ -28,7 +28,7 @@
 import org.apache.hadoop.hive.ql.exec.UDF;
 
 
-public class UDFYear implements UDF {
+public class UDFYear extends UDF {
 
   private static Log LOG = LogFactory.getLog(UDFYear.class.getName());
 

Modified: hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java?rev=737291&r1=737290&r2=737291&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java (original)
+++ hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java Sat Jan 24 01:58:01 2009
@@ -146,16 +146,32 @@
 
 
   private filterDesc getTestFilterDesc(String column) {
-    ArrayList<exprNodeDesc> children = new ArrayList<exprNodeDesc>();
-    children.add(new exprNodeColumnDesc(TypeInfoFactory.getPrimitiveTypeInfo(String.class), column));
-    children.add(new exprNodeConstantDesc(TypeInfoFactory.getPrimitiveTypeInfo(Number.class), Long.valueOf(100)));
-
+    ArrayList<exprNodeDesc> children1 = new ArrayList<exprNodeDesc>();
+    children1.add(new exprNodeColumnDesc(TypeInfoFactory.getPrimitiveTypeInfo(String.class), column));
+    exprNodeDesc lhs = new exprNodeFuncDesc(
+        TypeInfoFactory.getPrimitiveTypeInfo(Double.class),
+        FunctionRegistry.getUDFClass(Double.class.getName()),
+        FunctionRegistry.getUDFMethod(Double.class.getName(), String.class),
+        children1);
+    
+    ArrayList<exprNodeDesc> children2 = new ArrayList<exprNodeDesc>();
+    children2.add(new exprNodeConstantDesc(TypeInfoFactory.getPrimitiveTypeInfo(Long.class), Long.valueOf(100)));
+    exprNodeDesc rhs = new exprNodeFuncDesc(
+        TypeInfoFactory.getPrimitiveTypeInfo(Double.class),
+        FunctionRegistry.getUDFClass(Double.class.getName()),
+        FunctionRegistry.getUDFMethod(Double.class.getName(), Long.class),
+        children2);
+    
+    ArrayList<exprNodeDesc> children3 = new ArrayList<exprNodeDesc>();
+    children3.add(lhs);
+    children3.add(rhs);
+    
     exprNodeDesc desc = new exprNodeFuncDesc(
         TypeInfoFactory.getPrimitiveTypeInfo(Boolean.class),
         FunctionRegistry.getUDFClass("<"),
-        FunctionRegistry.getUDFMethod("<", true, String.class, Number.class),
-        children
-    );
+        FunctionRegistry.getUDFMethod("<", Double.class, Double.class),
+        children3);
+    
     return new filterDesc(desc);
   }
 

Modified: hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExpressionEvaluator.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExpressionEvaluator.java?rev=737291&r1=737290&r2=737291&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExpressionEvaluator.java (original)
+++ hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExpressionEvaluator.java Sat Jan 24 01:58:01 2009
@@ -25,6 +25,7 @@
 import org.apache.hadoop.hive.serde2.objectinspector.InspectableObject;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.parse.SemanticAnalyzer;
+import org.apache.hadoop.hive.ql.parse.TypeCheckProcFactory;
 import org.apache.hadoop.hive.ql.plan.exprNodeColumnDesc;
 import org.apache.hadoop.hive.ql.plan.exprNodeConstantDesc;
 import org.apache.hadoop.hive.ql.plan.exprNodeDesc;
@@ -111,7 +112,7 @@
       exprNodeDesc coladesc = new exprNodeColumnDesc(colaType, "cola");
       exprNodeDesc col11desc = new exprNodeIndexDesc(col1desc, new exprNodeConstantDesc(new Integer(1)));
       exprNodeDesc cola0desc = new exprNodeIndexDesc(coladesc, new exprNodeConstantDesc(new Integer(0)));
-      exprNodeDesc func1 = SemanticAnalyzer.getFuncExprNodeDesc("concat", col11desc, cola0desc);
+      exprNodeDesc func1 = TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("concat", col11desc, cola0desc);
       ExprNodeEvaluator eval = ExprNodeEvaluatorFactory.get(func1);
 
       // evaluate on row
@@ -130,7 +131,7 @@
       // get a evaluator for a string concatenation expression
       exprNodeDesc col1desc = new exprNodeColumnDesc(col1Type, "col1");
       exprNodeDesc col11desc = new exprNodeIndexDesc(col1desc, new exprNodeConstantDesc(new Integer(1)));
-      exprNodeDesc func1 = SemanticAnalyzer.getFuncExprNodeDesc(Double.class.getName(), col11desc);
+      exprNodeDesc func1 = TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc(Double.class.getName(), col11desc);
       ExprNodeEvaluator eval = ExprNodeEvaluatorFactory.get(func1);
 
       // evaluate on row
@@ -164,7 +165,7 @@
       measureSpeed("1 + 2",
           basetimes * 100,
           ExprNodeEvaluatorFactory.get(
-              SemanticAnalyzer.getFuncExprNodeDesc("+", 
+              TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("+", 
                   new exprNodeConstantDesc(1), 
                   new exprNodeConstantDesc(2))),
           r,
@@ -172,8 +173,8 @@
       measureSpeed("1 + 2 - 3",
           basetimes * 100,
           ExprNodeEvaluatorFactory.get(
-              SemanticAnalyzer.getFuncExprNodeDesc("-", 
-                  SemanticAnalyzer.getFuncExprNodeDesc("+",
+              TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("-", 
+                  TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("+",
                       new exprNodeConstantDesc(1), 
                       new exprNodeConstantDesc(2)),
                   new exprNodeConstantDesc(3))),
@@ -182,9 +183,9 @@
       measureSpeed("1 + 2 - 3 + 4",
           basetimes * 100,
           ExprNodeEvaluatorFactory.get(
-              SemanticAnalyzer.getFuncExprNodeDesc("+",
-                  SemanticAnalyzer.getFuncExprNodeDesc("-", 
-                      SemanticAnalyzer.getFuncExprNodeDesc("+",
+              TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("+",
+                  TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("-", 
+                      TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("+",
                           new exprNodeConstantDesc(1), 
                           new exprNodeConstantDesc(2)),
                       new exprNodeConstantDesc(3)),
@@ -194,7 +195,7 @@
       measureSpeed("concat(\"1\", \"2\")",
           basetimes * 100,
           ExprNodeEvaluatorFactory.get(
-              SemanticAnalyzer.getFuncExprNodeDesc("concat", 
+              TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("concat", 
                   new exprNodeConstantDesc("1"), 
                   new exprNodeConstantDesc("2"))),
           r,
@@ -202,8 +203,8 @@
       measureSpeed("concat(concat(\"1\", \"2\"), \"3\")",
           basetimes * 100,
           ExprNodeEvaluatorFactory.get(
-              SemanticAnalyzer.getFuncExprNodeDesc("concat", 
-                  SemanticAnalyzer.getFuncExprNodeDesc("concat",
+              TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("concat", 
+                  TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("concat",
                       new exprNodeConstantDesc("1"), 
                       new exprNodeConstantDesc("2")),
                   new exprNodeConstantDesc("3"))),
@@ -212,9 +213,9 @@
       measureSpeed("concat(concat(concat(\"1\", \"2\"), \"3\"), \"4\")",
           basetimes * 100,
           ExprNodeEvaluatorFactory.get(
-              SemanticAnalyzer.getFuncExprNodeDesc("concat", 
-                SemanticAnalyzer.getFuncExprNodeDesc("concat", 
-                    SemanticAnalyzer.getFuncExprNodeDesc("concat",
+              TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("concat", 
+                TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("concat", 
+                    TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("concat",
                         new exprNodeConstantDesc("1"), 
                         new exprNodeConstantDesc("2")),
                     new exprNodeConstantDesc("3")),
@@ -226,7 +227,7 @@
       measureSpeed("concat(col1[1], cola[1])", 
           basetimes * 10,
           ExprNodeEvaluatorFactory.get(
-              SemanticAnalyzer.getFuncExprNodeDesc("concat",
+              TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("concat",
                   new exprNodeIndexDesc(new exprNodeColumnDesc(col1Type, "col1"), constant1), 
                   new exprNodeIndexDesc(new exprNodeColumnDesc(colaType, "cola"), constant1))),
           r,
@@ -234,8 +235,8 @@
       measureSpeed("concat(concat(col1[1], cola[1]), col1[2])", 
           basetimes * 10,
           ExprNodeEvaluatorFactory.get(
-              SemanticAnalyzer.getFuncExprNodeDesc("concat", 
-                  SemanticAnalyzer.getFuncExprNodeDesc("concat", 
+              TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("concat", 
+                  TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("concat", 
                       new exprNodeIndexDesc(new exprNodeColumnDesc(col1Type, "col1"), constant1), 
                       new exprNodeIndexDesc(new exprNodeColumnDesc(colaType, "cola"), constant1)),
                   new exprNodeIndexDesc(new exprNodeColumnDesc(col1Type, "col1"), constant2))),
@@ -244,9 +245,9 @@
       measureSpeed("concat(concat(concat(col1[1], cola[1]), col1[2]), cola[2])", 
           basetimes * 10,
           ExprNodeEvaluatorFactory.get(
-              SemanticAnalyzer.getFuncExprNodeDesc("concat", 
-                  SemanticAnalyzer.getFuncExprNodeDesc("concat", 
-                      SemanticAnalyzer.getFuncExprNodeDesc("concat", 
+              TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("concat", 
+                  TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("concat", 
+                      TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("concat", 
                           new exprNodeIndexDesc(new exprNodeColumnDesc(col1Type, "col1"), constant1), 
                           new exprNodeIndexDesc(new exprNodeColumnDesc(colaType, "cola"), constant1)),
                       new exprNodeIndexDesc(new exprNodeColumnDesc(col1Type, "col1"), constant2)),

Modified: hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java?rev=737291&r1=737290&r2=737291&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java (original)
+++ hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java Sat Jan 24 01:58:01 2009
@@ -29,6 +29,7 @@
 
 
 import org.apache.hadoop.hive.ql.parse.SemanticAnalyzer;
+import org.apache.hadoop.hive.ql.parse.TypeCheckProcFactory;
 import org.apache.hadoop.hive.ql.plan.*;
 import org.apache.hadoop.hive.ql.typeinfo.TypeInfoFactory;
 import org.apache.hadoop.hive.conf.HiveConf;
@@ -74,10 +75,10 @@
       exprNodeDesc col0 = new exprNodeColumnDesc(String.class, "col0");
       exprNodeDesc col1 = new exprNodeColumnDesc(String.class, "col1");
       exprNodeDesc col2 = new exprNodeColumnDesc(String.class, "col2");
-      exprNodeDesc zero = new exprNodeConstantDesc(Number.class, Long.valueOf(0));
-      exprNodeDesc func1 = SemanticAnalyzer.getFuncExprNodeDesc(">", col2, col1);
-      exprNodeDesc func2 = SemanticAnalyzer.getFuncExprNodeDesc("==", col0, zero);
-      exprNodeDesc func3 = SemanticAnalyzer.getFuncExprNodeDesc("&&", func1, func2); 
+      exprNodeDesc zero = new exprNodeConstantDesc(String.class, "0");
+      exprNodeDesc func1 = TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc(">", col2, col1);
+      exprNodeDesc func2 = TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("==", col0, zero);
+      exprNodeDesc func3 = TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("&&", func1, func2); 
       assert(func3 != null);
       filterDesc filterCtx = new filterDesc(func3);
 
@@ -122,7 +123,7 @@
       ArrayList<exprNodeDesc> exprDesc2children = new ArrayList<exprNodeDesc>();
       exprNodeDesc expr1 = new exprNodeColumnDesc(String.class, "col0");
       exprNodeDesc expr2 = new exprNodeConstantDesc("1");
-      exprNodeDesc exprDesc2 = SemanticAnalyzer.getFuncExprNodeDesc("concat", expr1, expr2);
+      exprNodeDesc exprDesc2 = TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("concat", expr1, expr2);
 
       // select operator to project these two columns
       ArrayList<exprNodeDesc> earr = new ArrayList<exprNodeDesc> ();
@@ -167,7 +168,7 @@
       // col2
       exprNodeDesc expr1 = new exprNodeColumnDesc(String.class, "col0");
       exprNodeDesc expr2 = new exprNodeConstantDesc("1");
-      exprNodeDesc exprDesc2 = SemanticAnalyzer.getFuncExprNodeDesc("concat", expr1, expr2);
+      exprNodeDesc exprDesc2 = TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("concat", expr1, expr2);
 
       // select operator to project these two columns
       ArrayList<exprNodeDesc> earr = new ArrayList<exprNodeDesc> ();

Modified: hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestPlan.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestPlan.java?rev=737291&r1=737290&r2=737291&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestPlan.java (original)
+++ hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestPlan.java Sat Jan 24 01:58:01 2009
@@ -26,6 +26,7 @@
 import org.apache.hadoop.mapred.TextInputFormat;
 
 import org.apache.hadoop.hive.ql.parse.SemanticAnalyzer;
+import org.apache.hadoop.hive.ql.parse.TypeCheckProcFactory;
 import org.apache.hadoop.hive.ql.plan.*;
 import org.apache.hadoop.hive.ql.typeinfo.TypeInfoFactory;
 import org.apache.hadoop.hive.ql.exec.Utilities;
@@ -44,7 +45,7 @@
       // initialize a complete map reduce configuration
       exprNodeDesc expr1 = new exprNodeColumnDesc(TypeInfoFactory.getPrimitiveTypeInfo(String.class), F1);
       exprNodeDesc expr2 = new exprNodeColumnDesc(TypeInfoFactory.getPrimitiveTypeInfo(String.class), F2);
-      exprNodeDesc filterExpr = SemanticAnalyzer.getFuncExprNodeDesc("==", expr1, expr2);
+      exprNodeDesc filterExpr = TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("==", expr1, expr2);
 
       filterDesc filterCtx = new filterDesc(filterExpr);
       Operator<filterDesc> op = OperatorFactory.get(filterDesc.class);

Modified: hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/UDFTestLength.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/UDFTestLength.java?rev=737291&r1=737290&r2=737291&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/UDFTestLength.java (original)
+++ hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/UDFTestLength.java Sat Jan 24 01:58:01 2009
@@ -23,7 +23,7 @@
 /**
  * A UDF for testing, which evaluates the length of a string.
  */
-public class UDFTestLength implements UDF {
+public class UDFTestLength extends UDF {
   public Integer evaluate(String s) {
     return s == null ? null : s.length();
   }

Added: hadoop/hive/trunk/ql/src/test/queries/clientpositive/implicit_cast1.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/implicit_cast1.q?rev=737291&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/implicit_cast1.q (added)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/implicit_cast1.q Sat Jan 24 01:58:01 2009
@@ -0,0 +1,13 @@
+CREATE TABLE implicit_test1(a BIGINT, b STRING) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe' WITH SERDEPROPERTIES('serialization.format'= 'org.apache.hadoop.hive.serde2.thrift.TCTLSeparatedProtocol') STORED AS TEXTFILE;
+
+EXPLAIN
+SELECT implicit_test1.*
+FROM implicit_test1
+WHERE implicit_test1.a <> 0;
+
+SELECT implicit_test1.*
+FROM implicit_test1
+WHERE implicit_test1.a <> 0;
+
+DROP TABLE implicit_test1;
+

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/cast1.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/cast1.q.out?rev=737291&r1=737290&r2=737291&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/cast1.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/cast1.q.out Sat Jan 24 01:58:01 2009
@@ -16,7 +16,7 @@
                     type: string
               Filter Operator
                 predicate:
-                    expr: (0 = 86)
+                    expr: (UDFToDouble(0) = UDFToDouble(86))
                     type: boolean
                 Select Operator
                   expressions:

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/cluster.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/cluster.q.out?rev=737291&r1=737290&r2=737291&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/cluster.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/cluster.q.out Sat Jan 24 01:58:01 2009
@@ -12,7 +12,7 @@
         x 
             Filter Operator
               predicate:
-                  expr: (key = 10)
+                  expr: (UDFToDouble(key) = UDFToDouble(10))
                   type: boolean
               Select Operator
                 expressions:
@@ -62,7 +62,7 @@
         x 
             Filter Operator
               predicate:
-                  expr: (key = 20)
+                  expr: (UDFToDouble(key) = UDFToDouble(20))
                   type: boolean
               Select Operator
                 expressions:
@@ -112,7 +112,7 @@
         x 
             Filter Operator
               predicate:
-                  expr: (key = 20)
+                  expr: (UDFToDouble(key) = UDFToDouble(20))
                   type: boolean
               Select Operator
                 expressions:
@@ -162,7 +162,7 @@
         x 
             Filter Operator
               predicate:
-                  expr: (key = 20)
+                  expr: (UDFToDouble(key) = UDFToDouble(20))
                   type: boolean
               Select Operator
                 expressions:
@@ -212,7 +212,7 @@
         x 
             Filter Operator
               predicate:
-                  expr: (key = 20)
+                  expr: (UDFToDouble(key) = UDFToDouble(20))
                   type: boolean
               Select Operator
                 expressions:
@@ -262,7 +262,7 @@
         x 
             Filter Operator
               predicate:
-                  expr: (key = 20)
+                  expr: (UDFToDouble(key) = UDFToDouble(20))
                   type: boolean
               Select Operator
                 expressions:
@@ -312,7 +312,7 @@
         x 
             Filter Operator
               predicate:
-                  expr: (key = 20)
+                  expr: (UDFToDouble(key) = UDFToDouble(20))
                   type: boolean
               Select Operator
                 expressions:
@@ -384,7 +384,7 @@
         Extract
           Filter Operator
             predicate:
-                expr: (0 = 20)
+                expr: (UDFToDouble(0) = UDFToDouble(20))
                 type: boolean
             Select Operator
               expressions:
@@ -457,7 +457,7 @@
             1 {VALUE.0}
           Filter Operator
             predicate:
-                expr: (0 = 20)
+                expr: (UDFToDouble(0) = UDFToDouble(20))
                 type: boolean
             Select Operator
               expressions:
@@ -477,7 +477,7 @@
   Stage: Stage-2
     Map Reduce
       Alias -> Map Operator Tree:
-        /data/users/pchakka/workspace/oshive/build/ql/tmp/4095681/630596716.10002 
+        /data/users/athusoo/commits/hive_trunk_ws2/build/ql/tmp/268129150/10750666.10002 
           Reduce Output Operator
             key expressions:
                   expr: 1
@@ -559,7 +559,7 @@
             1 {VALUE.0} {VALUE.1}
           Filter Operator
             predicate:
-                expr: (0 = 20)
+                expr: (UDFToDouble(0) = UDFToDouble(20))
                 type: boolean
             Select Operator
               expressions:
@@ -581,7 +581,7 @@
   Stage: Stage-2
     Map Reduce
       Alias -> Map Operator Tree:
-        /data/users/pchakka/workspace/oshive/build/ql/tmp/462292647/163669153.10002 
+        /data/users/athusoo/commits/hive_trunk_ws2/build/ql/tmp/223878920/107499083.10002 
           Reduce Output Operator
             key expressions:
                   expr: 1
@@ -665,7 +665,7 @@
             1 {VALUE.0} {VALUE.1}
           Filter Operator
             predicate:
-                expr: (0 = 20)
+                expr: (UDFToDouble(0) = UDFToDouble(20))
                 type: boolean
             Select Operator
               expressions:
@@ -687,7 +687,7 @@
   Stage: Stage-2
     Map Reduce
       Alias -> Map Operator Tree:
-        /data/users/pchakka/workspace/oshive/build/ql/tmp/72781939/1364102870.10002 
+        /data/users/athusoo/commits/hive_trunk_ws2/build/ql/tmp/273045729/75938714.10002 
           Reduce Output Operator
             key expressions:
                   expr: 0
@@ -773,7 +773,7 @@
             1 {VALUE.0}
           Filter Operator
             predicate:
-                expr: (0 = 20)
+                expr: (UDFToDouble(0) = UDFToDouble(20))
                 type: boolean
             Select Operator
               expressions:
@@ -793,7 +793,7 @@
   Stage: Stage-2
     Map Reduce
       Alias -> Map Operator Tree:
-        /data/users/pchakka/workspace/oshive/build/ql/tmp/808635/840293573.10002 
+        /data/users/athusoo/commits/hive_trunk_ws2/build/ql/tmp/294539045/192652633.10002 
           Reduce Output Operator
             key expressions:
                   expr: 0
@@ -838,7 +838,7 @@
         null-subquery1:unioninput-subquery1:src 
             Filter Operator
               predicate:
-                  expr: (key < 100)
+                  expr: (UDFToDouble(key) < UDFToDouble(100))
                   type: boolean
               Select Operator
                 expressions:
@@ -869,7 +869,7 @@
         null-subquery2:unioninput-subquery2:src 
             Filter Operator
               predicate:
-                  expr: (key > 100)
+                  expr: (UDFToDouble(key) > UDFToDouble(100))
                   type: boolean
               Select Operator
                 expressions:

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby3.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby3.q.out?rev=737291&r1=737290&r2=737291&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby3.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby3.q.out Sat Jan 24 01:58:01 2009
@@ -30,8 +30,8 @@
                 expr: avg(DISTINCT UDFToDouble(KEY.0))
                 expr: sum(UDFToDouble(KEY.0))
                 expr: avg(UDFToDouble(KEY.0))
-                expr: min(UDFToDouble(KEY.0))
-                expr: max(UDFToDouble(KEY.0))
+                expr: min(KEY.0)
+                expr: max(KEY.0)
           mode: partial1
           File Output Operator
             compressed: false
@@ -43,7 +43,7 @@
   Stage: Stage-2
     Map Reduce
       Alias -> Map Operator Tree:
-        /tmp/hive-zshao/695280947/659390410.10001 
+        /data/users/athusoo/apacheprojects/hive_local_ws3/build/ql/tmp/191882138/597396542.10001 
           Reduce Output Operator
             sort order: 
             tag: -1
@@ -55,15 +55,15 @@
                   expr: 2
                   type: string
                   expr: 3
-                  type: double
+                  type: string
                   expr: 4
-                  type: double
+                  type: string
       Reduce Operator Tree:
         Group By Operator
           aggregations:
-                expr: avg(VALUE.0)
+                expr: avg(UDFToDouble(VALUE.0))
                 expr: sum(VALUE.1)
-                expr: avg(VALUE.2)
+                expr: avg(UDFToDouble(VALUE.2))
                 expr: min(VALUE.3)
                 expr: max(VALUE.4)
           mode: final
@@ -76,16 +76,28 @@
                   expr: 0
                   type: double
                   expr: 4
-                  type: double
+                  type: string
                   expr: 3
-                  type: double
-            File Output Operator
-              compressed: false
-              table:
-                  input format: org.apache.hadoop.mapred.TextInputFormat
-                  output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
-                  serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe
-                  name: dest1
+                  type: string
+            Select Operator
+              expressions:
+                    expr: 0
+                    type: double
+                    expr: 1
+                    type: double
+                    expr: 2
+                    type: double
+                    expr: UDFToDouble(3)
+                    type: double
+                    expr: UDFToDouble(4)
+                    type: double
+              File Output Operator
+                compressed: false
+                table:
+                    input format: org.apache.hadoop.mapred.TextInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+                    serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe
+                    name: dest1
 
   Stage: Stage-0
     Move Operator
@@ -98,4 +110,4 @@
                 name: dest1
 
 
-130091.0	260.182	256.10355987055016	498.0	0.0
+130091.0	NULL	NULL	98.0	98.0



Mime
View raw message