hadoop-hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From na...@apache.org
Subject svn commit: r804106 [1/24] - in /hadoop/hive/trunk: ./ contrib/src/java/org/apache/hadoop/hive/contrib/udaf/ contrib/src/java/org/apache/hadoop/hive/contrib/udaf/example/ contrib/src/java/org/apache/hadoop/hive/contrib/udf/ contrib/src/java/org/apache/...
Date Fri, 14 Aug 2009 07:48:09 GMT
Author: namit
Date: Fri Aug 14 07:48:02 2009
New Revision: 804106

URL: http://svn.apache.org/viewvc?rev=804106&view=rev
Log:
HIVE-699. Functions with variable length arguments
(Zheng Shao via namit)


Added:
    hadoop/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udaf/
    hadoop/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udaf/example/
    hadoop/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udaf/example/UDAFExampleAvg.java
    hadoop/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udaf/example/UDAFExampleGroupConcat.java
    hadoop/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udf/
    hadoop/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/
    hadoop/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleAdd.java
    hadoop/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleArraySum.java
    hadoop/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleFormat.java
    hadoop/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleMapConcat.java
    hadoop/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleStructPrint.java
    hadoop/hive/trunk/contrib/src/test/queries/clientpositive/udaf_example_avg.q
    hadoop/hive/trunk/contrib/src/test/queries/clientpositive/udaf_example_group_concat.q
    hadoop/hive/trunk/contrib/src/test/queries/clientpositive/udf_example_add.q
    hadoop/hive/trunk/contrib/src/test/queries/clientpositive/udf_example_arraymapstruct.q
    hadoop/hive/trunk/contrib/src/test/queries/clientpositive/udf_example_format.q
    hadoop/hive/trunk/contrib/src/test/results/clientpositive/udaf_example_avg.q.out
    hadoop/hive/trunk/contrib/src/test/results/clientpositive/udaf_example_group_concat.q.out
    hadoop/hive/trunk/contrib/src/test/results/clientpositive/udf_example_add.q.out
    hadoop/hive/trunk/contrib/src/test/results/clientpositive/udf_example_arraymapstruct.q.out
    hadoop/hive/trunk/contrib/src/test/results/clientpositive/udf_example_format.q.out
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBridge.java
    hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/SettableListObjectInspector.java
    hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/SettableMapObjectInspector.java
    hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/SettableStructObjectInspector.java
Modified:
    hadoop/hive/trunk/CHANGES.txt
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/AmbiguousMethodException.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ComparisonOpMethodResolver.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecReducer.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeEvaluatorFactory.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeFuncEvaluator.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeGenericFuncEvaluator.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionInfo.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionTask.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/GroupByOperator.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/NumericOpMethodResolver.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/UDFArgumentException.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/ExprProcFactory.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ASTPartitionPruner.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/aggregationDesc.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeDesc.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeFieldDesc.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeFuncDesc.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeGenericFuncDesc.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerProcFactory.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/OpProcFactory.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/UDAFAvg.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFBridge.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFUtils.java
    hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java
    hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExpressionEvaluator.java
    hadoop/hive/trunk/ql/src/test/results/clientpositive/cast1.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/cluster.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby2.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby2_map.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby2_map_skew.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby2_noskew.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby_map_ppr.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby_ppr.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/implicit_cast1.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input11.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input11_limit.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input12.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input13.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input14.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input14_limit.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input18.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input1_limit.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input2_limit.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input30.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input8.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input_part1.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input_part2.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input_part5.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input_part6.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input_part7.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input_testxpath2.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/join0.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/join11.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/join12.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/join13.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/join14.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/join16.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/join2.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/join20.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/join21.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/join23.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/join25.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/join26.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/join27.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/join28.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/join29.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/join30.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/join31.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/join32.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/join33.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/join34.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/join35.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/join36.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/join4.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/join5.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/join6.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/join7.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/join8.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/join_map_ppr.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/louter_join_ppr.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/mapreduce1.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/mapreduce2.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/mapreduce3.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/mapreduce4.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/mapreduce5.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/mapreduce6.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/mapreduce7.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/mapreduce8.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/no_hooks.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/noalias_subq1.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/notable_alias1.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/notable_alias2.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/nullgroup.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/nullgroup2.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/nullgroup4.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/order2.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/outer_join_ppr.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/ppd_clusterby.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/ppd_constant_expr.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/ppd_gby.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/ppd_gby2.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/ppd_join2.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/ppd_multi_insert.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/ppd_outer_join1.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/ppd_outer_join4.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/ppd_transform.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/quote1.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/rand_partitionpruner1.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/rand_partitionpruner2.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/rand_partitionpruner3.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/regex_col.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/regexp_extract.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/router_join_ppr.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/subq.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/subq2.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/transform_ppr1.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/transform_ppr2.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/udf1.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/udf4.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/udf7.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/udf9.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/udf_10_trims.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/udf_abs.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/udf_isnull_isnotnull.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/udf_like.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/udf_lower.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/udf_parse_url.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/udf_repeat.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/udf_size.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/udf_space.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/union.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/union20.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/union_ppr.q.out
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/case_sensitivity.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/cast1.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/groupby1.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/groupby2.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/groupby3.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/groupby4.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/groupby5.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/groupby6.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/input1.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/input2.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/input20.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/input3.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/input4.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/input6.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/input7.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/input8.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/input9.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/input_part1.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/input_testsequencefile.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/input_testxpath.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/input_testxpath2.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/join2.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/join4.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/join5.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/join6.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/join7.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/join8.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/sample1.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/sample2.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/sample3.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/sample4.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/sample5.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/sample6.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/sample7.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/subq.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/udf1.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/udf4.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/udf6.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/udf_case.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/udf_when.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/union.q.xml
    hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyNonPrimitive.java
    hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorConverters.java
    hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorFactory.java
    hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ReflectionStructObjectInspector.java
    hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/StandardListObjectInspector.java
    hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/StandardMapObjectInspector.java
    hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/StandardStructObjectInspector.java
    hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ThriftStructObjectInspector.java
    hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaBooleanObjectInspector.java
    hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaByteObjectInspector.java
    hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaDoubleObjectInspector.java
    hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaFloatObjectInspector.java
    hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaIntObjectInspector.java
    hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaLongObjectInspector.java
    hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaShortObjectInspector.java
    hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaStringObjectInspector.java
    hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorConverter.java
    hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorUtils.java
    hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/SettableBooleanObjectInspector.java
    hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/SettableByteObjectInspector.java
    hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/SettableDoubleObjectInspector.java
    hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/SettableFloatObjectInspector.java
    hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/SettableIntObjectInspector.java
    hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/SettableLongObjectInspector.java
    hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/SettableShortObjectInspector.java
    hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/SettableStringObjectInspector.java
    hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableBooleanObjectInspector.java
    hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableByteObjectInspector.java
    hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableDoubleObjectInspector.java
    hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableFloatObjectInspector.java
    hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableIntObjectInspector.java
    hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableLongObjectInspector.java
    hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableShortObjectInspector.java
    hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableStringObjectInspector.java
    hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/TypeInfoFactory.java
    hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/TypeInfoUtils.java
    hadoop/hive/trunk/serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/TestStandardObjectInspectors.java

Modified: hadoop/hive/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/CHANGES.txt?rev=804106&r1=804105&r2=804106&view=diff
==============================================================================
--- hadoop/hive/trunk/CHANGES.txt (original)
+++ hadoop/hive/trunk/CHANGES.txt Fri Aug 14 07:48:02 2009
@@ -15,6 +15,9 @@
     HIVE-693. Add a AWS S3 log format deserializer
     (Zheng Shao and Andraz Tori via namit)
 
+    HIVE-699. Functions with variable length arguments
+    (Zheng Shao via namit)
+
   IMPROVEMENTS
 
   OPTIMIZATIONS

Added: hadoop/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udaf/example/UDAFExampleAvg.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udaf/example/UDAFExampleAvg.java?rev=804106&view=auto
==============================================================================
--- hadoop/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udaf/example/UDAFExampleAvg.java (added)
+++ hadoop/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udaf/example/UDAFExampleAvg.java Fri Aug 14 07:48:02 2009
@@ -0,0 +1,124 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.contrib.udaf.example;
+
+import org.apache.hadoop.hive.ql.exec.UDAF;
+import org.apache.hadoop.hive.ql.exec.UDAFEvaluator;
+
+
+/**
+ * This is a simple UDAF that calculates average.
+ * 
+ * It should be very easy to follow and can be used as an example
+ * for writing new UDAFs.
+ *  
+ * Note that Hive internally uses a different mechanism (called
+ * GenericUDAF) to implement built-in aggregation functions, which
+ * are harder to program but more efficient.
+ * 
+ */
+public class UDAFExampleAvg extends UDAF {
+
+  /**
+   * The internal state of an aggregation for average.
+   * 
+   * Note that this is only needed if the internal state cannot be
+   * represented by a primitive.
+   * 
+   * The internal state can also contains fields with types like
+   * ArrayList<String> and HashMap<String,Double> if needed. 
+   */
+  public static class UDAFAvgState {
+    private long mCount;
+    private double mSum;
+  }
+  
+  /**
+   * The actual class for doing the aggregation.
+   * Hive will automatically look for all internal classes of the UDAF
+   * that implements UDAFEvaluator.  
+   */
+  public static class UDAFExampleAvgEvaluator implements UDAFEvaluator {
+    
+    UDAFAvgState state;
+    
+    public UDAFExampleAvgEvaluator() {
+      super();
+      state = new UDAFAvgState();
+      init();
+    }
+    
+    /**
+     * Reset the state of the aggregation.
+     */
+    public void init() {
+      state.mSum = 0;
+      state.mCount = 0;
+    }
+  
+    /**
+     * Iterate through one row of original data.
+     * 
+     * The number and type of arguments need to the same as we call
+     * this UDAF from Hive command line.
+     * 
+     * This function should always return true.
+     */
+    public boolean iterate(Double o) {
+      if (o != null) {
+        state.mSum += o;
+        state.mCount ++;
+      }
+      return true;
+    }
+    
+    /**
+     * Terminate a partial aggregation and return the state.
+     * If the state is a primitive, just return primitive Java classes
+     * like Integer or String.
+     */
+    public UDAFAvgState terminatePartial() {
+      // This is SQL standard - average of zero items should be null.
+      return state.mCount == 0 ? null : state;
+    }
+
+    /**
+     * Merge with a partial aggregation.
+     * 
+     * This function should always have a single argument which has
+     * the same type as the return value of terminatePartial().  
+     */
+    public boolean merge(UDAFAvgState o) {
+      if (o != null) {
+        state.mSum += o.mSum;
+        state.mCount += o.mCount;
+      }
+      return true;
+    }
+  
+    /**
+     * Terminates the aggregation and return the final result.
+     */
+    public Double terminate() {
+      // This is SQL standard - average of zero items should be null.
+      return state.mCount == 0 ? null : Double.valueOf(state.mSum / state.mCount);
+    }
+  }
+  
+}

Added: hadoop/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udaf/example/UDAFExampleGroupConcat.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udaf/example/UDAFExampleGroupConcat.java?rev=804106&view=auto
==============================================================================
--- hadoop/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udaf/example/UDAFExampleGroupConcat.java (added)
+++ hadoop/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udaf/example/UDAFExampleGroupConcat.java Fri Aug 14 07:48:02 2009
@@ -0,0 +1,117 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.contrib.udaf.example;
+
+import java.util.ArrayList;
+import java.util.Collections;
+
+import org.apache.hadoop.hive.ql.exec.UDAF;
+import org.apache.hadoop.hive.ql.exec.UDAFEvaluator;
+
+
+/**
+ * This is a simple UDAF that concatenates all arguments from
+ * different rows into a single string.
+ * 
+ * It should be very easy to follow and can be used as an example
+ * for writing new UDAFs.
+ *  
+ * Note that Hive internally uses a different mechanism (called
+ * GenericUDAF) to implement built-in aggregation functions, which
+ * are harder to program but more efficient.
+ */
+public class UDAFExampleGroupConcat extends UDAF {
+  
+  /**
+   * The actual class for doing the aggregation.
+   * Hive will automatically look for all internal classes of the UDAF
+   * that implements UDAFEvaluator.
+   */
+  public static class UDAFExampleGroupConcatEvaluator implements UDAFEvaluator {
+    
+    ArrayList<String> data;
+    
+    public UDAFExampleGroupConcatEvaluator() {
+      super();
+      data = new ArrayList<String>();
+    }
+    
+    /**
+     * Reset the state of the aggregation.
+     */
+    public void init() {
+      data.clear();
+    }
+  
+    /**
+     * Iterate through one row of original data.
+     * 
+     * This UDF accepts arbitrary number of String arguments, so we use
+     * String[].  If it only accepts a single String, then we should use
+     * a single String argument.
+     * 
+     * This function should always return true.
+     */
+    public boolean iterate(String[] o) {
+      if (o != null) {
+        StringBuilder sb = new StringBuilder();
+        for (int i = 0; i < o.length; i++) {
+          sb.append(o[i]);
+        }
+        data.add(sb.toString());
+      }
+      return true;
+    }
+    
+    /**
+     * Terminate a partial aggregation and return the state.
+     */
+    public ArrayList<String> terminatePartial() {
+      return data;
+    }
+
+    /**
+     * Merge with a partial aggregation.
+     * 
+     * This function should always have a single argument which has
+     * the same type as the return value of terminatePartial().
+     * 
+     * This function should always return true.
+     */
+    public boolean merge(ArrayList<String> o) {
+      if (o != null) {
+        data.addAll(o);
+      }
+      return true;
+    }
+  
+    /**
+     * Terminates the aggregation and return the final result.
+     */
+    public String terminate() {
+      Collections.sort(data);
+      StringBuilder sb = new StringBuilder();
+      for (int i=0; i<data.size(); i++) {
+        sb.append(data.get(i));
+      }
+      return sb.toString();
+    }
+  }
+
+}

Added: hadoop/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleAdd.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleAdd.java?rev=804106&view=auto
==============================================================================
--- hadoop/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleAdd.java (added)
+++ hadoop/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleAdd.java Fri Aug 14 07:48:02 2009
@@ -0,0 +1,40 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.contrib.udf.example;
+
+import org.apache.hadoop.hive.ql.exec.UDF;
+
+public class UDFExampleAdd extends UDF {
+
+  public Integer evaluate(Integer... a) {
+    int total = 0;
+    for (int i=0; i<a.length; i++) {
+      if (a[i] != null) total += a[i];
+    }
+    return total;
+  }
+  
+  public Double evaluate(Double... a) {
+    double total = 0;
+    for (int i=0; i<a.length; i++) {
+      if (a[i] != null) total += a[i];
+    }
+    return total;
+  }
+  
+}

Added: hadoop/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleArraySum.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleArraySum.java?rev=804106&view=auto
==============================================================================
--- hadoop/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleArraySum.java (added)
+++ hadoop/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleArraySum.java Fri Aug 14 07:48:02 2009
@@ -0,0 +1,40 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.contrib.udf.example;
+
+import java.util.List;
+
+import org.apache.hadoop.hive.ql.exec.UDF;
+
+public class UDFExampleArraySum extends UDF {
+
+  public Double evaluate(List<Double> a) {
+    if (a == null) {
+      return null;
+    }
+    double total = 0;
+    for (int i=0; i<a.size(); i++) {
+      Double e = a.get(i);
+      if (e != null) {
+        total += e;
+      }
+    }
+    return total;
+  }
+  
+}

Added: hadoop/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleFormat.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleFormat.java?rev=804106&view=auto
==============================================================================
--- hadoop/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleFormat.java (added)
+++ hadoop/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleFormat.java Fri Aug 14 07:48:02 2009
@@ -0,0 +1,28 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.contrib.udf.example;
+
+import org.apache.hadoop.hive.ql.exec.UDF;
+
+public class UDFExampleFormat extends UDF {
+
+  public String evaluate(String format, Object... args) {
+    return String.format(format, args);
+  }
+  
+}

Added: hadoop/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleMapConcat.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleMapConcat.java?rev=804106&view=auto
==============================================================================
--- hadoop/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleMapConcat.java (added)
+++ hadoop/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleMapConcat.java Fri Aug 14 07:48:02 2009
@@ -0,0 +1,45 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.contrib.udf.example;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.Map;
+
+import org.apache.hadoop.hive.ql.exec.UDF;
+
+public class UDFExampleMapConcat extends UDF {
+
+  public String evaluate(Map<String, String> a) {
+    if (a == null) {
+      return null;
+    }
+    ArrayList<String> r = new ArrayList<String>(a.size());
+    for (Map.Entry<String,String> entry: a.entrySet()) {
+      r.add("(" + entry.getKey() + ":" + entry.getValue() + ")");
+    }
+    Collections.sort(r);
+    
+    StringBuilder sb = new StringBuilder();
+    for (int i = 0; i < r.size(); i++) {
+      sb.append(r.get(i));
+    }
+    return sb.toString();
+  }
+  
+}

Added: hadoop/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleStructPrint.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleStructPrint.java?rev=804106&view=auto
==============================================================================
--- hadoop/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleStructPrint.java (added)
+++ hadoop/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleStructPrint.java Fri Aug 14 07:48:02 2009
@@ -0,0 +1,39 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.contrib.udf.example;
+
+import java.util.List;
+
+import org.apache.hadoop.hive.ql.exec.UDF;
+
+public class UDFExampleStructPrint extends UDF {
+
+  public String evaluate(Object a) {
+    if (a == null) {
+      return null;
+    }
+    List<Object> s = (List<Object>)a;
+    
+    StringBuilder sb = new StringBuilder();
+    for (int i=0; i<s.size(); i++) {
+      sb.append("(" + i + ":" + s.get(i) + ")");
+    }
+    return sb.toString();
+  }
+  
+}

Added: hadoop/hive/trunk/contrib/src/test/queries/clientpositive/udaf_example_avg.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/contrib/src/test/queries/clientpositive/udaf_example_avg.q?rev=804106&view=auto
==============================================================================
--- hadoop/hive/trunk/contrib/src/test/queries/clientpositive/udaf_example_avg.q (added)
+++ hadoop/hive/trunk/contrib/src/test/queries/clientpositive/udaf_example_avg.q Fri Aug 14 07:48:02 2009
@@ -0,0 +1,14 @@
+add jar ../build/contrib/hive_contrib.jar;
+
+CREATE TEMPORARY FUNCTION example_avg AS 'org.apache.hadoop.hive.contrib.udaf.example.UDAFExampleAvg';
+
+EXPLAIN
+SELECT example_avg(substr(value,5)),
+       example_avg(IF(substr(value,5) > 250, NULL, substr(value,5)))
+FROM src;
+
+SELECT example_avg(substr(value,5)),
+       example_avg(IF(substr(value,5) > 250, NULL, substr(value,5)))
+FROM src;
+
+DROP TEMPORARY FUNCTION example_avg;

Added: hadoop/hive/trunk/contrib/src/test/queries/clientpositive/udaf_example_group_concat.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/contrib/src/test/queries/clientpositive/udaf_example_group_concat.q?rev=804106&view=auto
==============================================================================
--- hadoop/hive/trunk/contrib/src/test/queries/clientpositive/udaf_example_group_concat.q (added)
+++ hadoop/hive/trunk/contrib/src/test/queries/clientpositive/udaf_example_group_concat.q Fri Aug 14 07:48:02 2009
@@ -0,0 +1,15 @@
+add jar ../build/contrib/hive_contrib.jar;
+
+CREATE TEMPORARY FUNCTION example_group_concat AS 'org.apache.hadoop.hive.contrib.udaf.example.UDAFExampleGroupConcat';
+
+EXPLAIN
+SELECT substr(value,5,1), example_group_concat("(", key, ":", value, ")")
+FROM src
+GROUP BY substr(value,5,1);
+
+SELECT substr(value,5,1), example_group_concat("(", key, ":", value, ")")
+FROM src
+GROUP BY substr(value,5,1);
+
+
+DROP TEMPORARY FUNCTION example_group_concat;

Added: hadoop/hive/trunk/contrib/src/test/queries/clientpositive/udf_example_add.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/contrib/src/test/queries/clientpositive/udf_example_add.q?rev=804106&view=auto
==============================================================================
--- hadoop/hive/trunk/contrib/src/test/queries/clientpositive/udf_example_add.q (added)
+++ hadoop/hive/trunk/contrib/src/test/queries/clientpositive/udf_example_add.q Fri Aug 14 07:48:02 2009
@@ -0,0 +1,24 @@
+add jar ../build/contrib/hive_contrib.jar;
+
+CREATE TEMPORARY FUNCTION example_add AS 'org.apache.hadoop.hive.contrib.udf.example.UDFExampleAdd';
+
+EXPLAIN
+SELECT example_add(1, 2),
+       example_add(1, 2, 3),
+       example_add(1, 2, 3, 4),
+       example_add(1.1, 2.2),
+       example_add(1.1, 2.2, 3.3),
+       example_add(1.1, 2.2, 3.3, 4.4),
+       example_add(1, 2, 3, 4.4)
+FROM src LIMIT 1;
+
+SELECT example_add(1, 2),
+       example_add(1, 2, 3),
+       example_add(1, 2, 3, 4),
+       example_add(1.1, 2.2),
+       example_add(1.1, 2.2, 3.3),
+       example_add(1.1, 2.2, 3.3, 4.4),
+       example_add(1, 2, 3, 4.4)
+FROM src LIMIT 1;
+
+DROP TEMPORARY FUNCTION example_add;

Added: hadoop/hive/trunk/contrib/src/test/queries/clientpositive/udf_example_arraymapstruct.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/contrib/src/test/queries/clientpositive/udf_example_arraymapstruct.q?rev=804106&view=auto
==============================================================================
--- hadoop/hive/trunk/contrib/src/test/queries/clientpositive/udf_example_arraymapstruct.q (added)
+++ hadoop/hive/trunk/contrib/src/test/queries/clientpositive/udf_example_arraymapstruct.q Fri Aug 14 07:48:02 2009
@@ -0,0 +1,16 @@
+add jar ../build/contrib/hive_contrib.jar;
+
+CREATE TEMPORARY FUNCTION example_arraysum    AS 'org.apache.hadoop.hive.contrib.udf.example.UDFExampleArraySum';
+CREATE TEMPORARY FUNCTION example_mapconcat   AS 'org.apache.hadoop.hive.contrib.udf.example.UDFExampleMapConcat';
+CREATE TEMPORARY FUNCTION example_structprint AS 'org.apache.hadoop.hive.contrib.udf.example.UDFExampleStructPrint';
+
+EXPLAIN
+SELECT example_arraysum(lint), example_mapconcat(mstringstring), example_structprint(lintstring[0])
+FROM src_thrift;
+
+SELECT example_arraysum(lint), example_mapconcat(mstringstring), example_structprint(lintstring[0])
+FROM src_thrift;
+
+DROP TEMPORARY FUNCTION example_arraysum;
+DROP TEMPORARY FUNCTION example_mapconcat;
+DROP TEMPORARY FUNCTION example_structprint;

Added: hadoop/hive/trunk/contrib/src/test/queries/clientpositive/udf_example_format.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/contrib/src/test/queries/clientpositive/udf_example_format.q?rev=804106&view=auto
==============================================================================
--- hadoop/hive/trunk/contrib/src/test/queries/clientpositive/udf_example_format.q (added)
+++ hadoop/hive/trunk/contrib/src/test/queries/clientpositive/udf_example_format.q Fri Aug 14 07:48:02 2009
@@ -0,0 +1,18 @@
+add jar ../build/contrib/hive_contrib.jar;
+
+CREATE TEMPORARY FUNCTION example_format AS 'org.apache.hadoop.hive.contrib.udf.example.UDFExampleFormat';
+
+EXPLAIN
+SELECT example_format("abc"),
+       example_format("%1$s", 1.1),
+       example_format("%1$s %2$e", 1.1, 1.2),
+       example_format("%1$x %2$o %3$d", 10, 10, 10)
+FROM src LIMIT 1;
+
+SELECT example_format("abc"),
+       example_format("%1$s", 1.1),
+       example_format("%1$s %2$e", 1.1, 1.2),
+       example_format("%1$x %2$o %3$d", 10, 10, 10)
+FROM src LIMIT 1;
+
+DROP TEMPORARY FUNCTION example_format;

Added: hadoop/hive/trunk/contrib/src/test/results/clientpositive/udaf_example_avg.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/contrib/src/test/results/clientpositive/udaf_example_avg.q.out?rev=804106&view=auto
==============================================================================
--- hadoop/hive/trunk/contrib/src/test/results/clientpositive/udaf_example_avg.q.out (added)
+++ hadoop/hive/trunk/contrib/src/test/results/clientpositive/udaf_example_avg.q.out Fri Aug 14 07:48:02 2009
@@ -0,0 +1,71 @@
+query: CREATE TEMPORARY FUNCTION example_avg AS 'org.apache.hadoop.hive.contrib.udaf.example.UDAFExampleAvg'
+query: EXPLAIN
+SELECT example_avg(substr(value,5)),
+       example_avg(IF(substr(value,5) > 250, NULL, substr(value,5)))
+FROM src
+ABSTRACT SYNTAX TREE:
+  (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION example_avg (TOK_FUNCTION substr (TOK_TABLE_OR_COL value) 5))) (TOK_SELEXPR (TOK_FUNCTION example_avg (TOK_FUNCTION IF (> (TOK_FUNCTION substr (TOK_TABLE_OR_COL value) 5) 250) TOK_NULL (TOK_FUNCTION substr (TOK_TABLE_OR_COL value) 5)))))))
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Alias -> Map Operator Tree:
+        src 
+          TableScan
+            alias: src
+            Select Operator
+              expressions:
+                    expr: value
+                    type: string
+              outputColumnNames: value
+              Group By Operator
+                aggregations:
+                      expr: example_avg(substr(value, 5))
+                      expr: example_avg(if((substr(value, 5) > 250), null, substr(value, 5)))
+                mode: hash
+                outputColumnNames: _col0, _col1
+                Reduce Output Operator
+                  sort order: 
+                  tag: -1
+                  value expressions:
+                        expr: _col0
+                        type: struct<mcount:bigint,msum:double>
+                        expr: _col1
+                        type: struct<mcount:bigint,msum:double>
+      Reduce Operator Tree:
+        Group By Operator
+          aggregations:
+                expr: example_avg(VALUE._col0)
+                expr: example_avg(VALUE._col1)
+          mode: mergepartial
+          outputColumnNames: _col0, _col1
+          Select Operator
+            expressions:
+                  expr: _col0
+                  type: double
+                  expr: _col1
+                  type: double
+            outputColumnNames: _col0, _col1
+            File Output Operator
+              compressed: false
+              GlobalTableId: 0
+              table:
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+
+
+query: SELECT example_avg(substr(value,5)),
+       example_avg(IF(substr(value,5) > 250, NULL, substr(value,5)))
+FROM src
+Input: default/src
+Output: file:/data/users/zshao/tools/699-trunk-apache-hive/build/ql/tmp/532982360/10000
+260.182	134.82926829268294
+query: DROP TEMPORARY FUNCTION example_avg

Added: hadoop/hive/trunk/contrib/src/test/results/clientpositive/udaf_example_group_concat.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/contrib/src/test/results/clientpositive/udaf_example_group_concat.q.out?rev=804106&view=auto
==============================================================================
--- hadoop/hive/trunk/contrib/src/test/results/clientpositive/udaf_example_group_concat.q.out (added)
+++ hadoop/hive/trunk/contrib/src/test/results/clientpositive/udaf_example_group_concat.q.out Fri Aug 14 07:48:02 2009
@@ -0,0 +1,90 @@
+query: CREATE TEMPORARY FUNCTION example_group_concat AS 'org.apache.hadoop.hive.contrib.udaf.example.UDAFExampleGroupConcat'
+query: EXPLAIN
+SELECT substr(value,5,1), example_group_concat("(", key, ":", value, ")")
+FROM src
+GROUP BY substr(value,5,1)
+ABSTRACT SYNTAX TREE:
+  (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION substr (TOK_TABLE_OR_COL value) 5 1)) (TOK_SELEXPR (TOK_FUNCTION example_group_concat "(" (TOK_TABLE_OR_COL key) ":" (TOK_TABLE_OR_COL value) ")"))) (TOK_GROUPBY (TOK_FUNCTION substr (TOK_TABLE_OR_COL value) 5 1))))
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Alias -> Map Operator Tree:
+        src 
+          TableScan
+            alias: src
+            Select Operator
+              expressions:
+                    expr: key
+                    type: string
+                    expr: value
+                    type: string
+              outputColumnNames: key, value
+              Group By Operator
+                aggregations:
+                      expr: example_group_concat('(', key, ':', value, ')')
+                keys:
+                      expr: substr(value, 5, 1)
+                      type: string
+                mode: hash
+                outputColumnNames: _col0, _col1
+                Reduce Output Operator
+                  key expressions:
+                        expr: _col0
+                        type: string
+                  sort order: +
+                  Map-reduce partition columns:
+                        expr: _col0
+                        type: string
+                  tag: -1
+                  value expressions:
+                        expr: _col1
+                        type: array<string>
+      Reduce Operator Tree:
+        Group By Operator
+          aggregations:
+                expr: example_group_concat(VALUE._col0)
+          keys:
+                expr: KEY._col0
+                type: string
+          mode: mergepartial
+          outputColumnNames: _col0, _col1
+          Select Operator
+            expressions:
+                  expr: _col0
+                  type: string
+                  expr: _col1
+                  type: string
+            outputColumnNames: _col0, _col1
+            File Output Operator
+              compressed: false
+              GlobalTableId: 0
+              table:
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+
+
+query: SELECT substr(value,5,1), example_group_concat("(", key, ":", value, ")")
+FROM src
+GROUP BY substr(value,5,1)
+Input: default/src
+Output: file:/data/users/zshao/tools/699-trunk-apache-hive/build/ql/tmp/148650144/10000
+0	(0:val_0)(0:val_0)(0:val_0)
+1	(100:val_100)(100:val_100)(103:val_103)(103:val_103)(104:val_104)(104:val_104)(105:val_105)(10:val_10)(111:val_111)(113:val_113)(113:val_113)(114:val_114)(116:val_116)(118:val_118)(118:val_118)(119:val_119)(119:val_119)(119:val_119)(11:val_11)(120:val_120)(120:val_120)(125:val_125)(125:val_125)(126:val_126)(128:val_128)(128:val_128)(128:val_128)(129:val_129)(129:val_129)(12:val_12)(12:val_12)(131:val_131)(133:val_133)(134:val_134)(134:val_134)(136:val_136)(137:val_137)(137:val_137)(138:val_138)(138:val_138)(138:val_138)(138:val_138)(143:val_143)(145:val_145)(146:val_146)(146:val_146)(149:val_149)(149:val_149)(150:val_150)(152:val_152)(152:val_152)(153:val_153)(155:val_155)(156:val_156)(157:val_157)(158:val_158)(15:val_15)(15:val_15)(160:val_160)(162:val_162)(163:val_163)(164:val_164)(164:val_164)(165:val_165)(165:val_165)(166:val_166)(167:val_167)(167:val_167)(167:val_167)(168:val_168)(169:val_169)(169:val_169)(169:val_169)(169:val_169)(170:val_170)(172:val_172)(172:val_17
 2)(174:val_174)(174:val_174)(175:val_175)(175:val_175)(176:val_176)(176:val_176)(177:val_177)(178:val_178)(179:val_179)(179:val_179)(17:val_17)(180:val_180)(181:val_181)(183:val_183)(186:val_186)(187:val_187)(187:val_187)(187:val_187)(189:val_189)(18:val_18)(18:val_18)(190:val_190)(191:val_191)(191:val_191)(192:val_192)(193:val_193)(193:val_193)(193:val_193)(194:val_194)(195:val_195)(195:val_195)(196:val_196)(197:val_197)(197:val_197)(199:val_199)(199:val_199)(199:val_199)(19:val_19)
+2	(200:val_200)(200:val_200)(201:val_201)(202:val_202)(203:val_203)(203:val_203)(205:val_205)(205:val_205)(207:val_207)(207:val_207)(208:val_208)(208:val_208)(208:val_208)(209:val_209)(209:val_209)(20:val_20)(213:val_213)(213:val_213)(214:val_214)(216:val_216)(216:val_216)(217:val_217)(217:val_217)(218:val_218)(219:val_219)(219:val_219)(221:val_221)(221:val_221)(222:val_222)(223:val_223)(223:val_223)(224:val_224)(224:val_224)(226:val_226)(228:val_228)(229:val_229)(229:val_229)(230:val_230)(230:val_230)(230:val_230)(230:val_230)(230:val_230)(233:val_233)(233:val_233)(235:val_235)(237:val_237)(237:val_237)(238:val_238)(238:val_238)(239:val_239)(239:val_239)(241:val_241)(242:val_242)(242:val_242)(244:val_244)(247:val_247)(248:val_248)(249:val_249)(24:val_24)(24:val_24)(252:val_252)(255:val_255)(255:val_255)(256:val_256)(256:val_256)(257:val_257)(258:val_258)(260:val_260)(262:val_262)(263:val_263)(265:val_265)(265:val_265)(266:val_266)(26:val_26)(26:val_26)(272:val_272)(272:val_
 272)(273:val_273)(273:val_273)(273:val_273)(274:val_274)(275:val_275)(277:val_277)(277:val_277)(277:val_277)(277:val_277)(278:val_278)(278:val_278)(27:val_27)(280:val_280)(280:val_280)(281:val_281)(281:val_281)(282:val_282)(282:val_282)(283:val_283)(284:val_284)(285:val_285)(286:val_286)(287:val_287)(288:val_288)(288:val_288)(289:val_289)(28:val_28)(291:val_291)(292:val_292)(296:val_296)(298:val_298)(298:val_298)(298:val_298)(2:val_2)
+3	(302:val_302)(305:val_305)(306:val_306)(307:val_307)(307:val_307)(308:val_308)(309:val_309)(309:val_309)(30:val_30)(310:val_310)(311:val_311)(311:val_311)(311:val_311)(315:val_315)(316:val_316)(316:val_316)(316:val_316)(317:val_317)(317:val_317)(318:val_318)(318:val_318)(318:val_318)(321:val_321)(321:val_321)(322:val_322)(322:val_322)(323:val_323)(325:val_325)(325:val_325)(327:val_327)(327:val_327)(327:val_327)(331:val_331)(331:val_331)(332:val_332)(333:val_333)(333:val_333)(335:val_335)(336:val_336)(338:val_338)(339:val_339)(33:val_33)(341:val_341)(342:val_342)(342:val_342)(344:val_344)(344:val_344)(345:val_345)(348:val_348)(348:val_348)(348:val_348)(348:val_348)(348:val_348)(34:val_34)(351:val_351)(353:val_353)(353:val_353)(356:val_356)(35:val_35)(35:val_35)(35:val_35)(360:val_360)(362:val_362)(364:val_364)(365:val_365)(366:val_366)(367:val_367)(367:val_367)(368:val_368)(369:val_369)(369:val_369)(369:val_369)(373:val_373)(374:val_374)(375:val_375)(377:val_377)(378:val_37
 8)(379:val_379)(37:val_37)(37:val_37)(382:val_382)(382:val_382)(384:val_384)(384:val_384)(384:val_384)(386:val_386)(389:val_389)(392:val_392)(393:val_393)(394:val_394)(395:val_395)(395:val_395)(396:val_396)(396:val_396)(396:val_396)(397:val_397)(397:val_397)(399:val_399)(399:val_399)
+4	(400:val_400)(401:val_401)(401:val_401)(401:val_401)(401:val_401)(401:val_401)(402:val_402)(403:val_403)(403:val_403)(403:val_403)(404:val_404)(404:val_404)(406:val_406)(406:val_406)(406:val_406)(406:val_406)(407:val_407)(409:val_409)(409:val_409)(409:val_409)(411:val_411)(413:val_413)(413:val_413)(414:val_414)(414:val_414)(417:val_417)(417:val_417)(417:val_417)(418:val_418)(419:val_419)(41:val_41)(421:val_421)(424:val_424)(424:val_424)(427:val_427)(429:val_429)(429:val_429)(42:val_42)(42:val_42)(430:val_430)(430:val_430)(430:val_430)(431:val_431)(431:val_431)(431:val_431)(432:val_432)(435:val_435)(436:val_436)(437:val_437)(438:val_438)(438:val_438)(438:val_438)(439:val_439)(439:val_439)(43:val_43)(443:val_443)(444:val_444)(446:val_446)(448:val_448)(449:val_449)(44:val_44)(452:val_452)(453:val_453)(454:val_454)(454:val_454)(454:val_454)(455:val_455)(457:val_457)(458:val_458)(458:val_458)(459:val_459)(459:val_459)(460:val_460)(462:val_462)(462:val_462)(463:val_463)(463:val_
 463)(466:val_466)(466:val_466)(466:val_466)(467:val_467)(468:val_468)(468:val_468)(468:val_468)(468:val_468)(469:val_469)(469:val_469)(469:val_469)(469:val_469)(469:val_469)(470:val_470)(472:val_472)(475:val_475)(477:val_477)(478:val_478)(478:val_478)(479:val_479)(47:val_47)(480:val_480)(480:val_480)(480:val_480)(481:val_481)(482:val_482)(483:val_483)(484:val_484)(485:val_485)(487:val_487)(489:val_489)(489:val_489)(489:val_489)(489:val_489)(490:val_490)(491:val_491)(492:val_492)(492:val_492)(493:val_493)(494:val_494)(495:val_495)(496:val_496)(497:val_497)(498:val_498)(498:val_498)(498:val_498)(4:val_4)
+5	(51:val_51)(51:val_51)(53:val_53)(54:val_54)(57:val_57)(58:val_58)(58:val_58)(5:val_5)(5:val_5)(5:val_5)
+6	(64:val_64)(65:val_65)(66:val_66)(67:val_67)(67:val_67)(69:val_69)
+7	(70:val_70)(70:val_70)(70:val_70)(72:val_72)(72:val_72)(74:val_74)(76:val_76)(76:val_76)(77:val_77)(78:val_78)
+8	(80:val_80)(82:val_82)(83:val_83)(83:val_83)(84:val_84)(84:val_84)(85:val_85)(86:val_86)(87:val_87)(8:val_8)
+9	(90:val_90)(90:val_90)(90:val_90)(92:val_92)(95:val_95)(95:val_95)(96:val_96)(97:val_97)(97:val_97)(98:val_98)(98:val_98)(9:val_9)
+query: DROP TEMPORARY FUNCTION example_group_concat

Added: hadoop/hive/trunk/contrib/src/test/results/clientpositive/udf_example_add.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/contrib/src/test/results/clientpositive/udf_example_add.q.out?rev=804106&view=auto
==============================================================================
--- hadoop/hive/trunk/contrib/src/test/results/clientpositive/udf_example_add.q.out (added)
+++ hadoop/hive/trunk/contrib/src/test/results/clientpositive/udf_example_add.q.out Fri Aug 14 07:48:02 2009
@@ -0,0 +1,66 @@
+query: CREATE TEMPORARY FUNCTION example_add AS 'org.apache.hadoop.hive.contrib.udf.example.UDFExampleAdd'
+query: EXPLAIN
+SELECT example_add(1, 2),
+       example_add(1, 2, 3),
+       example_add(1, 2, 3, 4),
+       example_add(1.1, 2.2),
+       example_add(1.1, 2.2, 3.3),
+       example_add(1.1, 2.2, 3.3, 4.4),
+       example_add(1, 2, 3, 4.4)
+FROM src LIMIT 1
+ABSTRACT SYNTAX TREE:
+  (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION example_add 1 2)) (TOK_SELEXPR (TOK_FUNCTION example_add 1 2 3)) (TOK_SELEXPR (TOK_FUNCTION example_add 1 2 3 4)) (TOK_SELEXPR (TOK_FUNCTION example_add 1.1 2.2)) (TOK_SELEXPR (TOK_FUNCTION example_add 1.1 2.2 3.3)) (TOK_SELEXPR (TOK_FUNCTION example_add 1.1 2.2 3.3 4.4)) (TOK_SELEXPR (TOK_FUNCTION example_add 1 2 3 4.4))) (TOK_LIMIT 1)))
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Alias -> Map Operator Tree:
+        src 
+          TableScan
+            alias: src
+            Select Operator
+              expressions:
+                    expr: example_add(1, 2)
+                    type: int
+                    expr: example_add(1, 2, 3)
+                    type: int
+                    expr: example_add(1, 2, 3, 4)
+                    type: int
+                    expr: example_add(1.1, 2.2)
+                    type: double
+                    expr: example_add(1.1, 2.2, 3.3)
+                    type: double
+                    expr: example_add(1.1, 2.2, 3.3, 4.4)
+                    type: double
+                    expr: example_add(1, 2, 3, 4.4)
+                    type: double
+              outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
+              Limit
+                File Output Operator
+                  compressed: false
+                  GlobalTableId: 0
+                  table:
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: 1
+
+
+query: SELECT example_add(1, 2),
+       example_add(1, 2, 3),
+       example_add(1, 2, 3, 4),
+       example_add(1.1, 2.2),
+       example_add(1.1, 2.2, 3.3),
+       example_add(1.1, 2.2, 3.3, 4.4),
+       example_add(1, 2, 3, 4.4)
+FROM src LIMIT 1
+Input: default/src
+Output: file:/data/users/zshao/tools/699-trunk-apache-hive/build/ql/tmp/221019346/10000
+3	6	10	3.3000000000000003	6.6	11.0	10.4
+query: DROP TEMPORARY FUNCTION example_add

Added: hadoop/hive/trunk/contrib/src/test/results/clientpositive/udf_example_arraymapstruct.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/contrib/src/test/results/clientpositive/udf_example_arraymapstruct.q.out?rev=804106&view=auto
==============================================================================
--- hadoop/hive/trunk/contrib/src/test/results/clientpositive/udf_example_arraymapstruct.q.out (added)
+++ hadoop/hive/trunk/contrib/src/test/results/clientpositive/udf_example_arraymapstruct.q.out Fri Aug 14 07:48:02 2009
@@ -0,0 +1,59 @@
+query: CREATE TEMPORARY FUNCTION example_arraysum    AS 'org.apache.hadoop.hive.contrib.udf.example.UDFExampleArraySum'
+query: CREATE TEMPORARY FUNCTION example_mapconcat   AS 'org.apache.hadoop.hive.contrib.udf.example.UDFExampleMapConcat'
+query: CREATE TEMPORARY FUNCTION example_structprint AS 'org.apache.hadoop.hive.contrib.udf.example.UDFExampleStructPrint'
+query: EXPLAIN
+SELECT example_arraysum(lint), example_mapconcat(mstringstring), example_structprint(lintstring[0])
+FROM src_thrift
+ABSTRACT SYNTAX TREE:
+  (TOK_QUERY (TOK_FROM (TOK_TABREF src_thrift)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION example_arraysum (TOK_TABLE_OR_COL lint))) (TOK_SELEXPR (TOK_FUNCTION example_mapconcat (TOK_TABLE_OR_COL mstringstring))) (TOK_SELEXPR (TOK_FUNCTION example_structprint ([ (TOK_TABLE_OR_COL lintstring) 0))))))
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Alias -> Map Operator Tree:
+        src_thrift 
+          TableScan
+            alias: src_thrift
+            Select Operator
+              expressions:
+                    expr: example_arraysum(lint)
+                    type: double
+                    expr: example_mapconcat(mstringstring)
+                    type: string
+                    expr: example_structprint(lintstring[0])
+                    type: string
+              outputColumnNames: _col0, _col1, _col2
+              File Output Operator
+                compressed: false
+                GlobalTableId: 0
+                table:
+                    input format: org.apache.hadoop.mapred.TextInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+
+
+query: SELECT example_arraysum(lint), example_mapconcat(mstringstring), example_structprint(lintstring[0])
+FROM src_thrift
+Input: default/src_thrift
+Output: file:/data/users/zshao/tools/699-trunk-apache-hive/build/ql/tmp/1971586889/10000
+0.0	(key_0:value_0)	(0:0)(1:0)(2:0)
+6.0	(key_1:value_1)	(0:1)(1:1)(2:1)
+12.0	(key_2:value_2)	(0:4)(1:8)(2:2)
+18.0	(key_3:value_3)	(0:9)(1:27)(2:3)
+24.0	(key_4:value_4)	(0:16)(1:64)(2:4)
+30.0	(key_5:value_5)	(0:25)(1:125)(2:5)
+36.0	(key_6:value_6)	(0:36)(1:216)(2:6)
+42.0	(key_7:value_7)	(0:49)(1:343)(2:7)
+48.0	(key_8:value_8)	(0:64)(1:512)(2:8)
+54.0	(key_9:value_9)	(0:81)(1:729)(2:9)
+NULL	NULL	NULL
+query: DROP TEMPORARY FUNCTION example_arraysum
+query: DROP TEMPORARY FUNCTION example_mapconcat
+query: DROP TEMPORARY FUNCTION example_structprint

Added: hadoop/hive/trunk/contrib/src/test/results/clientpositive/udf_example_format.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/contrib/src/test/results/clientpositive/udf_example_format.q.out?rev=804106&view=auto
==============================================================================
--- hadoop/hive/trunk/contrib/src/test/results/clientpositive/udf_example_format.q.out (added)
+++ hadoop/hive/trunk/contrib/src/test/results/clientpositive/udf_example_format.q.out Fri Aug 14 07:48:02 2009
@@ -0,0 +1,54 @@
+query: CREATE TEMPORARY FUNCTION example_format AS 'org.apache.hadoop.hive.contrib.udf.example.UDFExampleFormat'
+query: EXPLAIN
+SELECT example_format("abc"),
+       example_format("%1$s", 1.1),
+       example_format("%1$s %2$e", 1.1, 1.2),
+       example_format("%1$x %2$o %3$d", 10, 10, 10)
+FROM src LIMIT 1
+ABSTRACT SYNTAX TREE:
+  (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION example_format "abc")) (TOK_SELEXPR (TOK_FUNCTION example_format "%1$s" 1.1)) (TOK_SELEXPR (TOK_FUNCTION example_format "%1$s %2$e" 1.1 1.2)) (TOK_SELEXPR (TOK_FUNCTION example_format "%1$x %2$o %3$d" 10 10 10))) (TOK_LIMIT 1)))
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Alias -> Map Operator Tree:
+        src 
+          TableScan
+            alias: src
+            Select Operator
+              expressions:
+                    expr: example_format('abc')
+                    type: string
+                    expr: example_format('%1$s', 1.1)
+                    type: string
+                    expr: example_format('%1$s %2$e', 1.1, 1.2)
+                    type: string
+                    expr: example_format('%1$x %2$o %3$d', 10, 10, 10)
+                    type: string
+              outputColumnNames: _col0, _col1, _col2, _col3
+              Limit
+                File Output Operator
+                  compressed: false
+                  GlobalTableId: 0
+                  table:
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: 1
+
+
+query: SELECT example_format("abc"),
+       example_format("%1$s", 1.1),
+       example_format("%1$s %2$e", 1.1, 1.2),
+       example_format("%1$x %2$o %3$d", 10, 10, 10)
+FROM src LIMIT 1
+Input: default/src
+Output: file:/data/users/zshao/tools/699-trunk-apache-hive/build/ql/tmp/857451543/10000
+abc	1.1	1.1 1.200000e+00	a 12 10
+query: DROP TEMPORARY FUNCTION example_format

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/AmbiguousMethodException.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/AmbiguousMethodException.java?rev=804106&r1=804105&r2=804106&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/AmbiguousMethodException.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/AmbiguousMethodException.java Fri Aug 14 07:48:02 2009
@@ -20,15 +20,13 @@
 
 import java.util.List;
 
-import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.ql.parse.SemanticException;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
 
 /**
  * Exception thrown by the UDF and UDAF method resolvers in case a unique method is not found.
  *
  */
-public class AmbiguousMethodException extends SemanticException {
+public class AmbiguousMethodException extends UDFArgumentException {
 
   /**
    * 
@@ -52,6 +50,7 @@
    * @param argTypeInfos The list of argument types that lead to an ambiguity.
    */
   public AmbiguousMethodException(Class<?> funcClass, List<TypeInfo> argTypeInfos) {
+    super("Ambiguous method for " + funcClass + " with " + argTypeInfos);
     this.funcClass = funcClass;
     this.argTypeInfos = argTypeInfos;
   }

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ComparisonOpMethodResolver.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ComparisonOpMethodResolver.java?rev=804106&r1=804105&r2=804106&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ComparisonOpMethodResolver.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ComparisonOpMethodResolver.java Fri Aug 14 07:48:02 2009
@@ -79,8 +79,12 @@
     for(Method m: Arrays.asList(udfClass.getMethods())) {
       if (m.getName().equals("evaluate")) {
 
-        List<TypeInfo> acceptedTypeInfos = TypeInfoUtils.getParameterTypeInfos(m);
-
+        List<TypeInfo> acceptedTypeInfos = TypeInfoUtils.getParameterTypeInfos(m, pTypeInfos.size());
+        if (acceptedTypeInfos == null) {
+          // null means the method does not accept number of arguments passed.
+          continue;
+        }
+        
         boolean match = (acceptedTypeInfos.size() == pTypeInfos.size());
 
         for(int i=0; i<pTypeInfos.size() && match; i++) {

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java?rev=804106&r1=804105&r2=804106&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java Fri Aug 14 07:48:02 2009
@@ -64,6 +64,8 @@
 import org.apache.hadoop.hive.ql.plan.showFunctionsDesc;
 import org.apache.hadoop.hive.ql.plan.showPartitionsDesc;
 import org.apache.hadoop.hive.ql.plan.showTablesDesc;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDFBridge;
 import org.apache.hadoop.hive.serde.Constants;
 import org.apache.hadoop.hive.serde2.Deserializer;
 import org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe;
@@ -437,12 +439,21 @@
       // get the function documentation
       description desc = null;
       FunctionInfo fi = FunctionRegistry.getFunctionInfo(name);
-      if(fi.getUDFClass() != null) {
-        desc = fi.getUDFClass().getAnnotation(description.class);
-      } else if(fi.getGenericUDFClass() != null) {
-        desc = fi.getGenericUDFClass().getAnnotation(description.class);
+      
+      Class<?> funcClass = null;
+      GenericUDF udf = fi.getGenericUDF();
+      if (udf != null) {
+        // If it's a GenericUDFBridge, then let's use the  
+        if (udf instanceof GenericUDFBridge) {
+          funcClass = ((GenericUDFBridge)udf).getUdfClass();
+        } else {
+          funcClass = udf.getClass();
+        }
       }
       
+      if (funcClass != null) {
+        desc = funcClass.getAnnotation(description.class);
+      }
       if (desc != null) {
         outStream.writeBytes(desc.value().replace("_FUNC_", name));
         if(descFunc.isExtended() && desc.extended().length()>0) {

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecReducer.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecReducer.java?rev=804106&r1=804105&r2=804106&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecReducer.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecReducer.java Fri Aug 14 07:48:02 2009
@@ -252,7 +252,7 @@
       if(!abort) {
         // signal new failure to map-reduce
         l4j.error("Hit error while closing operators - failing tree");
-        throw new RuntimeException ("Error while closing operators: " + e.getMessage());
+        throw new RuntimeException ("Error while closing operators: " + e.getMessage(), e);
       }
     }
   }

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java?rev=804106&r1=804105&r2=804106&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java Fri Aug 14 07:48:02 2009
@@ -96,11 +96,13 @@
       if (isPrintable(ent.getValue())) {
         out.print(ent.getValue());
         out.println();
-      }
-      else if (ent.getValue() instanceof Serializable) {
+      } else if (ent.getValue() instanceof List) {
+        out.print(ent.getValue().toString());
+        out.println();
+      } else if (ent.getValue() instanceof Serializable) {
         out.println();
         outputPlan((Serializable)ent.getValue(), out, extended, indent+2);
-      }
+      } 
     }
   }
 

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeEvaluatorFactory.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeEvaluatorFactory.java?rev=804106&r1=804105&r2=804106&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeEvaluatorFactory.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeEvaluatorFactory.java Fri Aug 14 07:48:02 2009
@@ -24,7 +24,6 @@
 import org.apache.hadoop.hive.ql.plan.exprNodeGenericFuncDesc;
 import org.apache.hadoop.hive.ql.plan.exprNodeNullDesc;
 import org.apache.hadoop.hive.ql.plan.exprNodeFieldDesc;
-import org.apache.hadoop.hive.ql.plan.exprNodeFuncDesc;
 
 public class ExprNodeEvaluatorFactory {
   
@@ -39,11 +38,7 @@
     if (desc instanceof exprNodeColumnDesc) {
       return new ExprNodeColumnEvaluator((exprNodeColumnDesc)desc);
     }
-    // Function node, e.g. an operator or a UDF node
-    if (desc instanceof exprNodeFuncDesc) {
-      return new ExprNodeFuncEvaluator((exprNodeFuncDesc)desc);
-    }
-    // Generic Function node, e.g. CASE
+    // Generic Function node, e.g. CASE, an operator or a UDF node
     if (desc instanceof exprNodeGenericFuncDesc) {
       return new ExprNodeGenericFuncEvaluator((exprNodeGenericFuncDesc)desc);
     }

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeFuncEvaluator.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeFuncEvaluator.java?rev=804106&r1=804105&r2=804106&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeFuncEvaluator.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeFuncEvaluator.java Fri Aug 14 07:48:02 2009
@@ -1,134 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hive.ql.exec;
-
-import java.lang.reflect.Method;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.ql.plan.exprNodeFuncDesc;
-import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
-import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
-import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
-import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
-import org.apache.hadoop.util.ReflectionUtils;
-
-public class ExprNodeFuncEvaluator extends ExprNodeEvaluator {
-
-  private static final Log LOG = LogFactory.getLog(ExprNodeFuncEvaluator.class.getName());
-  
-  protected exprNodeFuncDesc expr;
-  transient ExprNodeEvaluator[] paramEvaluators;
-  transient ObjectInspector[] paramInspectors;
-  transient boolean[] paramIsPrimitiveWritable;
-  transient Object[] paramValues;
-  transient UDF udf;
-  transient Method udfMethod;
-  
-  public ExprNodeFuncEvaluator(exprNodeFuncDesc expr) {
-    this.expr = expr;
-    assert(expr != null);
-    Class<?> c = expr.getUDFClass();
-    udfMethod = expr.getUDFMethod();
-    LOG.debug(c.toString());
-    LOG.debug(udfMethod.toString());
-    udf = (UDF)ReflectionUtils.newInstance(expr.getUDFClass(), null);
-    int paramNumber = expr.getChildren().size();
-    paramEvaluators = new ExprNodeEvaluator[paramNumber];
-    paramInspectors  = new ObjectInspector[paramNumber];
-    paramIsPrimitiveWritable = new boolean[paramNumber];
-    for(int i=0; i<paramNumber; i++) {
-      paramEvaluators[i] = ExprNodeEvaluatorFactory.get(expr.getChildExprs().get(i));
-      paramIsPrimitiveWritable[i] = PrimitiveObjectInspectorUtils
-          .isPrimitiveWritableClass(udfMethod.getParameterTypes()[i]);
-    }
-    paramValues = new Object[expr.getChildren().size()];
-  }
-
-  @Override
-  public ObjectInspector initialize(ObjectInspector rowInspector)
-    throws HiveException {
-    for (int i=0; i<paramEvaluators.length; i++) {
-      paramInspectors[i] = paramEvaluators[i].initialize(rowInspector);
-    }
-    
-    // The return type of a function can be either Java Primitive or Writable.
-    if (PrimitiveObjectInspectorUtils.isPrimitiveWritableClass(
-        udfMethod.getReturnType())) {
-      PrimitiveCategory pc = PrimitiveObjectInspectorUtils
-          .getTypeEntryFromPrimitiveWritableClass(udfMethod.getReturnType())
-          .primitiveCategory;
-      return PrimitiveObjectInspectorFactory
-          .getPrimitiveWritableObjectInspector(pc);
-    } else {
-      PrimitiveCategory pc = PrimitiveObjectInspectorUtils
-          .getTypeEntryFromPrimitiveJavaClass(udfMethod.getReturnType())
-          .primitiveCategory;
-      return PrimitiveObjectInspectorFactory
-          .getPrimitiveJavaObjectInspector(pc);
-    }
-  }
-  
-  @Override
-  public Object evaluate(Object row) throws HiveException {
-
-    // Evaluate all children first
-    for(int i=0; i<paramEvaluators.length; i++) {
-      
-      Object thisParam = paramEvaluators[i].evaluate(row);
-      Category c = paramInspectors[i].getCategory();
-      
-      // TODO: Both getList and getMap are not very efficient.
-      // We should convert UDFSize and UDFIsNull to ExprNodeEvaluator. 
-      switch(c) {
-        case LIST: {
-          // Need to pass a Java List for List type
-          paramValues[i] = ((ListObjectInspector)paramInspectors[i])
-              .getList(thisParam);
-          break;
-        }
-        case MAP: {
-          // Need to pass a Java Map for Map type
-          paramValues[i] = ((MapObjectInspector)paramInspectors[i])
-              .getMap(thisParam);
-          break;
-        }
-        case PRIMITIVE: {
-          PrimitiveObjectInspector poi = (PrimitiveObjectInspector)paramInspectors[i];
-          paramValues[i] = (paramIsPrimitiveWritable[i]
-              ? poi.getPrimitiveWritableObject(thisParam)
-              : poi.getPrimitiveJavaObject(thisParam));
-          break;
-        }
-        default: {
-          // STRUCT
-          paramValues[i] = thisParam;
-        }
-      }
-    }
-    
-    return FunctionRegistry.invoke(udfMethod, udf, paramValues);
-  }
-
-}

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeGenericFuncEvaluator.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeGenericFuncEvaluator.java?rev=804106&r1=804105&r2=804106&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeGenericFuncEvaluator.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeGenericFuncEvaluator.java Fri Aug 14 07:48:02 2009
@@ -72,8 +72,7 @@
     for (int i=0; i<children.length; i++) {
       childrenOIs[i] = children[i].initialize(rowInspector);
     }
-    genericUDF = (GenericUDF) ReflectionUtils.newInstance(
-        expr.getGenericUDFClass(), null);
+    genericUDF = expr.getGenericUDF();
     return genericUDF.initialize(childrenOIs);
   }
   

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionInfo.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionInfo.java?rev=804106&r1=804105&r2=804106&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionInfo.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionInfo.java Fri Aug 14 07:48:02 2009
@@ -22,87 +22,58 @@
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
 
 public class FunctionInfo {
+  
   private boolean isNative;
 
   private String displayName;
 
-  private OperatorType opType;
-
-  private boolean isOperator;
-  
-  private Class<? extends UDF> udfClass;
-  
-  private Class<? extends GenericUDF> genericUDFClass;
+  private GenericUDF genericUDF;
 
   private GenericUDAFResolver genericUDAFResolver;
 
-  public static enum OperatorType { NO_OP, PREFIX, INFIX, POSTFIX };
-
-  public FunctionInfo(String displayName, Class<? extends UDF> udfClass,
-      Class<? extends GenericUDF> genericUdfClass) {
-    this(true, displayName, udfClass, genericUdfClass);
-  }
-
-  public FunctionInfo(boolean isNative, String displayName, Class<? extends UDF> udfClass,
-      Class<? extends GenericUDF> genericUdfClass) {
+  public FunctionInfo(boolean isNative, String displayName, GenericUDF genericUDF) {
     this.isNative = isNative;
     this.displayName = displayName;
-    opType = OperatorType.NO_OP;
-    isOperator = false;
-    this.udfClass = udfClass;
-    this.genericUDFClass = genericUdfClass;
+    this.genericUDF = genericUDF;
     this.genericUDAFResolver = null;
   }
 
-  public FunctionInfo(String displayName, GenericUDAFResolver genericUDAFResolver) {
-    this(true, displayName, genericUDAFResolver);
-  }
-
   public FunctionInfo(boolean isNative, String displayName, GenericUDAFResolver genericUDAFResolver) {
     this.isNative = isNative;
     this.displayName = displayName;
-    this.opType = OperatorType.NO_OP;
-    this.udfClass = null;
-    this.genericUDFClass = null;
+    this.genericUDF = null;
     this.genericUDAFResolver = genericUDAFResolver;
   }
 
-  public boolean isAggFunction() {
-    return genericUDAFResolver != null;
-  }
-
-  public boolean isOperator() {
-    return isOperator;
-  }
-
-  public void setIsOperator(boolean val) {
-    isOperator = val;
-  }
-  
-  public void setOpType(OperatorType opt) {
-    opType = opt;
-  }
-  
-  public OperatorType getOpType() {
-    return opType;
-  }
-
-  public Class<? extends UDF> getUDFClass() {
-    return udfClass;
-  }
-
-  public Class<? extends GenericUDF> getGenericUDFClass() {
-    return genericUDFClass;
+  /**
+   * Get a new GenericUDF object for the function. 
+   */
+  public GenericUDF getGenericUDF() {
+    // GenericUDF is stateful - we have to make a copy here
+    return FunctionRegistry.cloneGenericUDF(genericUDF);
   }
   
+  /**
+   * Get the GenericUDAFResolver object for the function. 
+   */
   public GenericUDAFResolver getGenericUDAFResolver() {
     return genericUDAFResolver;
   }
   
+  /**
+   * Get the display name for this function.
+   * This should be transfered into exprNodeGenericUDFDesc, and will be 
+   * used as the first parameter to GenericUDF.getDisplayName() call, instead
+   * of hard-coding the function name.  This will solve the problem of 
+   * displaying only one name when a udf is registered under 2 names.
+   */
   public String getDisplayName() {
     return displayName;
   }
   
+  /**
+   * Native functions cannot be unregistered.
+   */
   public boolean isNative() {
     return isNative;
   }



Mime
View raw message