hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From hashut...@apache.org
Subject svn commit: r1441972 [1/8] - in /hive/branches/ptf-windowing: common/src/java/org/apache/hadoop/hive/conf/ data/files/ ql/if/ ql/src/gen/thrift/gen-cpp/ ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/ ql/src/gen/thrift/gen-php/ ql/sr...
Date Sun, 03 Feb 2013 21:43:12 GMT
Author: hashutosh
Date: Sun Feb  3 21:43:10 2013
New Revision: 1441972

URL: http://svn.apache.org/viewvc?rev=1441972&view=rev
Log:
Initial commit of ptf-windowing functionality. (Harish Bhutani and Prajkta Kalmegh via Ashutosh Chauhan)

Added:
    hive/branches/ptf-windowing/data/files/flights_tiny.txt
    hive/branches/ptf-windowing/data/files/part_tiny.txt
    hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/exec/PTFFunctionInfo.java
    hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/exec/PTFOperator.java
    hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/exec/PTFPartition.java
    hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/exec/PTFPersistence.java
    hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/exec/PTFUtils.java
    hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/exec/PartitionTableFunctionDescription.java
    hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/exec/WindowFunctionDescription.java
    hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/exec/WindowFunctionInfo.java
    hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/parse/PTFSpec.java
    hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/parse/PTFTranslator.java
    hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/parse/WindowingExprNodeEvaluatorFactory.java
    hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/plan/PTFDef.java
    hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/plan/PTFDesc.java
    hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFCumeDist.java
    hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFDenseRank.java
    hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFFirstValue.java
    hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFLastValue.java
    hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFNTile.java
    hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFPercentRank.java
    hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFRank.java
    hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFRowNumber.java
    hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFLeadLag.java
    hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/
    hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/NPath.java
    hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/Noop.java
    hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/NoopWithMap.java
    hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/TableFunctionEvaluator.java
    hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/TableFunctionResolver.java
    hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/WindowingTableFunction.java
    hive/branches/ptf-windowing/ql/src/test/queries/clientnegative/ptf_negative_AggrFuncsWithNoGBYNoPartDef.q
    hive/branches/ptf-windowing/ql/src/test/queries/clientnegative/ptf_negative_AmbiguousWindowDefn.q
    hive/branches/ptf-windowing/ql/src/test/queries/clientnegative/ptf_negative_DuplicateWindowAlias.q
    hive/branches/ptf-windowing/ql/src/test/queries/clientnegative/ptf_negative_HavingLead.q
    hive/branches/ptf-windowing/ql/src/test/queries/clientnegative/ptf_negative_HavingLeadWithPTF.q
    hive/branches/ptf-windowing/ql/src/test/queries/clientnegative/ptf_negative_IncompatibleDistributeClause.q
    hive/branches/ptf-windowing/ql/src/test/queries/clientnegative/ptf_negative_IncompatibleOrderInWindowDefs.q
    hive/branches/ptf-windowing/ql/src/test/queries/clientnegative/ptf_negative_IncompatiblePartitionInWindowDefs.q
    hive/branches/ptf-windowing/ql/src/test/queries/clientnegative/ptf_negative_IncompatibleSortClause.q
    hive/branches/ptf-windowing/ql/src/test/queries/clientnegative/ptf_negative_InvalidValueBoundary.q
    hive/branches/ptf-windowing/ql/src/test/queries/clientnegative/ptf_negative_JoinWithAmbigousAlias.q
    hive/branches/ptf-windowing/ql/src/test/queries/clientnegative/ptf_negative_NoSortNoDistByClause.q
    hive/branches/ptf-windowing/ql/src/test/queries/clientnegative/ptf_negative_WhereLead.q
    hive/branches/ptf-windowing/ql/src/test/queries/clientnegative/ptf_negative_WhereWithRankCond.q
    hive/branches/ptf-windowing/ql/src/test/queries/clientpositive/ptf_general_queries.q
    hive/branches/ptf-windowing/ql/src/test/results/clientnegative/ptf_negative_AggrFuncsWithNoGBYNoPartDef.q.out
    hive/branches/ptf-windowing/ql/src/test/results/clientnegative/ptf_negative_AmbiguousWindowDefn.q.out
    hive/branches/ptf-windowing/ql/src/test/results/clientnegative/ptf_negative_DuplicateWindowAlias.q.out
    hive/branches/ptf-windowing/ql/src/test/results/clientnegative/ptf_negative_HavingLead.q.out
    hive/branches/ptf-windowing/ql/src/test/results/clientnegative/ptf_negative_HavingLeadWithPTF.q.out
    hive/branches/ptf-windowing/ql/src/test/results/clientnegative/ptf_negative_IncompatibleDistributeClause.q.out
    hive/branches/ptf-windowing/ql/src/test/results/clientnegative/ptf_negative_IncompatibleOrderInWindowDefs.q.out
    hive/branches/ptf-windowing/ql/src/test/results/clientnegative/ptf_negative_IncompatiblePartitionInWindowDefs.q.out
    hive/branches/ptf-windowing/ql/src/test/results/clientnegative/ptf_negative_IncompatibleSortClause.q.out
    hive/branches/ptf-windowing/ql/src/test/results/clientnegative/ptf_negative_InvalidValueBoundary.q.out
    hive/branches/ptf-windowing/ql/src/test/results/clientnegative/ptf_negative_JoinWithAmbigousAlias.q.out
    hive/branches/ptf-windowing/ql/src/test/results/clientnegative/ptf_negative_NoSortNoDistByClause.q.out
    hive/branches/ptf-windowing/ql/src/test/results/clientnegative/ptf_negative_WhereLead.q.out
    hive/branches/ptf-windowing/ql/src/test/results/clientnegative/ptf_negative_WhereWithRankCond.q.out
    hive/branches/ptf-windowing/ql/src/test/results/clientpositive/ptf_general_queries.q.out
Modified:
    hive/branches/ptf-windowing/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
    hive/branches/ptf-windowing/ql/if/queryplan.thrift
    hive/branches/ptf-windowing/ql/src/gen/thrift/gen-cpp/queryplan_types.cpp
    hive/branches/ptf-windowing/ql/src/gen/thrift/gen-cpp/queryplan_types.h
    hive/branches/ptf-windowing/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/Operator.java
    hive/branches/ptf-windowing/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/OperatorType.java
    hive/branches/ptf-windowing/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/Query.java
    hive/branches/ptf-windowing/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/Stage.java
    hive/branches/ptf-windowing/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/Task.java
    hive/branches/ptf-windowing/ql/src/gen/thrift/gen-php/Types.php
    hive/branches/ptf-windowing/ql/src/gen/thrift/gen-py/queryplan/ttypes.py
    hive/branches/ptf-windowing/ql/src/gen/thrift/gen-rb/queryplan_types.rb
    hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/QueryProperties.java
    hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
    hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/exec/OperatorFactory.java
    hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/hooks/LineageInfo.java
    hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/optimizer/lineage/Generator.java
    hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/parse/ASTNode.java
    hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g
    hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseDriver.java
    hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/parse/QB.java
    hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/parse/QBParseInfo.java
    hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java

Modified: hive/branches/ptf-windowing/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
URL: http://svn.apache.org/viewvc/hive/branches/ptf-windowing/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java?rev=1441972&r1=1441971&r2=1441972&view=diff
==============================================================================
--- hive/branches/ptf-windowing/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java (original)
+++ hive/branches/ptf-windowing/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java Sun Feb  3 21:43:10 2013
@@ -707,6 +707,11 @@ public class HiveConf extends Configurat
 
     // Whether to show the unquoted partition names in query results.
     HIVE_DECODE_PARTITION_NAME("hive.decode.partition.name", false),
+
+    // ptf partition constants
+    HIVE_PTF_PARTITION_PERSISTENCE_CLASS("hive.ptf.partition.persistence",
+        "org.apache.hadoop.hive.ql.exec.PTFPersistence$PartitionedByteBasedList"),
+    HIVE_PTF_PARTITION_PERSISTENT_SIZE("hive.ptf.partition.persistence.memsize", (int) Math.pow(2, (6 + 10 + 10)) ), // 64MB
     ;
 
     public final String varname;

Added: hive/branches/ptf-windowing/data/files/flights_tiny.txt
URL: http://svn.apache.org/viewvc/hive/branches/ptf-windowing/data/files/flights_tiny.txt?rev=1441972&view=auto
==============================================================================
--- hive/branches/ptf-windowing/data/files/flights_tiny.txt (added)
+++ hive/branches/ptf-windowing/data/files/flights_tiny.txt Sun Feb  3 21:43:10 2013
@@ -0,0 +1,137 @@
+BaltimoreNew York20101020-30.01064
+BaltimoreNew York2010102023.01142
+BaltimoreNew York201010206.01599
+ChicagoNew York2010102042.0361
+ChicagoNew York2010102024.0897
+ChicagoNew York2010102015.01531
+ChicagoNew York20101020-6.01610
+ChicagoNew York20101020-2.03198
+BaltimoreNew York2010102117.01064
+BaltimoreNew York20101021105.01142
+BaltimoreNew York2010102128.01599
+ChicagoNew York20101021142.0361
+ChicagoNew York2010102177.0897
+ChicagoNew York2010102153.01531
+ChicagoNew York20101021-5.01610
+ChicagoNew York2010102151.03198
+BaltimoreNew York20101022-12.01064
+BaltimoreNew York2010102254.01142
+BaltimoreNew York2010102218.01599
+ChicagoNew York201010222.0361
+ChicagoNew York2010102224.0897
+ChicagoNew York2010102216.01531
+ChicagoNew York20101022-6.01610
+ChicagoNew York20101022-11.03198
+BaltimoreNew York2010102318.0272
+BaltimoreNew York20101023-10.01805
+BaltimoreNew York201010236.03171
+ChicagoNew York201010233.0384
+ChicagoNew York2010102332.0426
+ChicagoNew York201010231.0650
+ChicagoNew York2010102311.03085
+BaltimoreNew York2010102412.01599
+BaltimoreNew York2010102420.02571
+ChicagoNew York2010102410.0361
+ChicagoNew York20101024113.0897
+ChicagoNew York20101024-5.01531
+ChicagoNew York20101024-17.01610
+ChicagoNew York20101024-3.03198
+BaltimoreNew York20101025-25.01064
+BaltimoreNew York2010102592.01142
+BaltimoreNew York20101025106.01599
+ChicagoNew York2010102531.0361
+ChicagoNew York20101025-1.0897
+ChicagoNew York2010102543.01531
+ChicagoNew York201010256.01610
+ChicagoNew York20101025-16.03198
+BaltimoreNew York20101026-22.01064
+BaltimoreNew York20101026123.01142
+BaltimoreNew York2010102690.01599
+ChicagoNew York2010102612.0361
+ChicagoNew York201010260.0897
+ChicagoNew York2010102629.01531
+ChicagoNew York20101026-17.01610
+ChicagoNew York201010266.03198
+BaltimoreNew York20101027-18.01064
+BaltimoreNew York2010102749.01142
+BaltimoreNew York2010102792.01599
+ChicagoNew York20101027148.0361
+ChicagoNew York20101027-11.0897
+ChicagoNew York2010102770.01531
+ChicagoNew York201010278.01610
+ChicagoNew York2010102721.03198
+BaltimoreNew York20101028-4.01064
+BaltimoreNew York20101028-14.01142
+BaltimoreNew York20101028-14.01599
+ChicagoNew York201010282.0361
+ChicagoNew York201010282.0897
+ChicagoNew York20101028-11.01531
+ChicagoNew York201010283.01610
+ChicagoNew York20101028-18.03198
+BaltimoreNew York20101029-24.01064
+BaltimoreNew York2010102921.01142
+BaltimoreNew York20101029-2.01599
+ChicagoNew York20101029-12.0361
+ChicagoNew York20101029-11.0897
+ChicagoNew York2010102915.01531
+ChicagoNew York20101029-18.01610
+ChicagoNew York20101029-4.03198
+BaltimoreNew York2010103014.0272
+BaltimoreNew York20101030-1.01805
+BaltimoreNew York201010305.03171
+ChicagoNew York20101030-6.0384
+ChicagoNew York20101030-10.0426
+ChicagoNew York20101030-5.0650
+ChicagoNew York20101030-5.03085
+BaltimoreNew York20101031-1.01599
+BaltimoreNew York20101031-14.02571
+ChicagoNew York20101031-25.0361
+ChicagoNew York20101031-18.0897
+ChicagoNew York20101031-4.01531
+ChicagoNew York20101031-22.01610
+ChicagoNew York20101031-15.03198
+ClevelandNew York20101030-23.02018
+ClevelandNew York20101030-12.02932
+ClevelandNew York20101029-4.02630
+ClevelandNew York20101029-19.02646
+ClevelandNew York20101029-12.03014
+ClevelandNew York201010283.02630
+ClevelandNew York20101028-6.02646
+ClevelandNew York201010281.03014
+ClevelandNew York2010102716.02630
+ClevelandNew York2010102727.03014
+ClevelandNew York201010264.02630
+ClevelandNew York20101026-27.02646
+ClevelandNew York20101026-11.02662
+ClevelandNew York2010102613.03014
+ClevelandNew York20101025-4.02630
+ClevelandNew York2010102581.02646
+ClevelandNew York2010102542.03014
+ClevelandNew York201010245.02254
+ClevelandNew York20101024-11.02630
+ClevelandNew York20101024-20.02646
+ClevelandNew York20101024-9.03014
+ClevelandNew York20101023-21.02932
+ClevelandNew York201010221.02630
+ClevelandNew York20101022-25.02646
+ClevelandNew York20101022-3.03014
+ClevelandNew York201010213.02630
+ClevelandNew York2010102129.02646
+ClevelandNew York2010102172.03014
+ClevelandNew York20101020-8.02630
+ClevelandNew York20101020-15.03014
+WashingtonNew York20101023-25.05832
+WashingtonNew York20101023-21.05904
+WashingtonNew York20101023-18.05917
+WashingtonNew York20101030-27.05904
+WashingtonNew York20101030-16.05917
+WashingtonNew York20101020-2.07291
+WashingtonNew York2010102122.07291
+WashingtonNew York20101023-16.07274
+WashingtonNew York20101024-26.07282
+WashingtonNew York201010259.07291
+WashingtonNew York201010264.07291
+WashingtonNew York2010102726.07291
+WashingtonNew York2010102845.07291
+WashingtonNew York201010291.07291
+WashingtonNew York20101031-18.07282

Added: hive/branches/ptf-windowing/data/files/part_tiny.txt
URL: http://svn.apache.org/viewvc/hive/branches/ptf-windowing/data/files/part_tiny.txt?rev=1441972&view=auto
==============================================================================
--- hive/branches/ptf-windowing/data/files/part_tiny.txt (added)
+++ hive/branches/ptf-windowing/data/files/part_tiny.txt Sun Feb  3 21:43:10 2013
@@ -0,0 +1,26 @@
+121152almond antique burnished rose metallicManufacturer#1Brand#14PROMO PLATED TIN2JUMBO BOX1173.15e pinto beans h
+121152almond antique burnished rose metallicManufacturer#1Brand#14PROMO PLATED TIN2JUMBO BOX1173.15e pinto beans h
+85768almond antique chartreuse lavender yellowManufacturer#1Brand#12LARGE BRUSHED STEEL34SM BAG1753.76refull
+110592almond antique salmon chartreuse burlywoodManufacturer#1Brand#15PROMO BURNISHED NICKEL6JUMBO PKG1602.59 to the furiously
+86428almond aquamarine burnished black steelManufacturer#1Brand#12STANDARD ANODIZED STEEL28WRAP BAG1414.42arefully 
+65667almond aquamarine pink moccasin thistleManufacturer#1Brand#12LARGE BURNISHED STEEL42JUMBO CASE1632.66e across the expr
+105685almond antique violet chocolate turquoiseManufacturer#2Brand#22MEDIUM ANODIZED COPPER14MED CAN1690.68ly pending requ
+191709almond antique violet turquoise frostedManufacturer#2Brand#22ECONOMY POLISHED STEEL40MED BOX1800.7 haggle
+146985almond aquamarine midnight light salmonManufacturer#2Brand#23MEDIUM BURNISHED COPPER2SM CASE2031.98s cajole caref
+132666almond aquamarine rose maroon antiqueManufacturer#2Brand#24SMALL POLISHED NICKEL25MED BOX1698.66even 
+195606almond aquamarine sandy cyan gainsboroManufacturer#2Brand#25STANDARD PLATED TIN18SM PKG1701.6ic de
+90681almond antique chartreuse khaki whiteManufacturer#3Brand#31MEDIUM BURNISHED TIN17SM CASE1671.68are slyly after the sl
+17273almond antique forest lavender goldenrodManufacturer#3Brand#35PROMO ANODIZED TIN14JUMBO CASE1190.27along the
+112398almond antique metallic orange dimManufacturer#3Brand#32MEDIUM BURNISHED BRASS19JUMBO JAR1410.39ole car
+40982almond antique misty red oliveManufacturer#3Brand#32ECONOMY PLATED COPPER1LG PKG1922.98c foxes can s
+144293almond antique olive coral navajoManufacturer#3Brand#34STANDARD POLISHED STEEL45JUMBO CAN1337.29ag furiously about 
+49671almond antique gainsboro frosted violetManufacturer#4Brand#41SMALL BRUSHED BRASS10SM BOX1620.67ccounts run quick
+48427almond antique violet mint lemonManufacturer#4Brand#42PROMO POLISHED STEEL39SM CASE1375.42hely ironic i
+45261almond aquamarine floral ivory bisqueManufacturer#4Brand#42SMALL PLATED STEEL27WRAP CASE1206.26careful
+17927almond aquamarine yellow dodger mintManufacturer#4Brand#41ECONOMY BRUSHED COPPER7SM PKG1844.92ites. eve
+33357almond azure aquamarine papaya violetManufacturer#4Brand#41STANDARD ANODIZED TIN12WRAP CASE1290.35reful
+192697almond antique blue firebrick mintManufacturer#5Brand#52MEDIUM BURNISHED TIN31LG DRUM1789.69ickly ir
+42669almond antique medium spring khakiManufacturer#5Brand#51STANDARD BURNISHED TIN6MED CAN1611.66sits haggl
+155733almond antique sky peru orangeManufacturer#5Brand#53SMALL PLATED BRASS2WRAP DRUM1788.73furiously. bra
+15103almond aquamarine dodger light gainsboroManufacturer#5Brand#53ECONOMY BURNISHED STEEL46LG PACK1018.1packages hinder carefu
+78486almond azure blanched chiffon midnightManufacturer#5Brand#52LARGE BRUSHED BRASS23MED BAG1464.48hely blith

Modified: hive/branches/ptf-windowing/ql/if/queryplan.thrift
URL: http://svn.apache.org/viewvc/hive/branches/ptf-windowing/ql/if/queryplan.thrift?rev=1441972&r1=1441971&r2=1441972&view=diff
==============================================================================
--- hive/branches/ptf-windowing/ql/if/queryplan.thrift (original)
+++ hive/branches/ptf-windowing/ql/if/queryplan.thrift Sun Feb  3 21:43:10 2013
@@ -53,6 +53,7 @@ enum OperatorType {
   LATERALVIEWFORWARD,
   HASHTABLESINK,
   HASHTABLEDUMMY,
+  PTF,
 }
 
 struct Operator {

Modified: hive/branches/ptf-windowing/ql/src/gen/thrift/gen-cpp/queryplan_types.cpp
URL: http://svn.apache.org/viewvc/hive/branches/ptf-windowing/ql/src/gen/thrift/gen-cpp/queryplan_types.cpp?rev=1441972&r1=1441971&r2=1441972&view=diff
==============================================================================
--- hive/branches/ptf-windowing/ql/src/gen/thrift/gen-cpp/queryplan_types.cpp (original)
+++ hive/branches/ptf-windowing/ql/src/gen/thrift/gen-cpp/queryplan_types.cpp Sun Feb  3 21:43:10 2013
@@ -48,7 +48,8 @@ int _kOperatorTypeValues[] = {
   OperatorType::LATERALVIEWJOIN,
   OperatorType::LATERALVIEWFORWARD,
   OperatorType::HASHTABLESINK,
-  OperatorType::HASHTABLEDUMMY
+  OperatorType::HASHTABLEDUMMY,
+  OperatorType::PTF
 };
 const char* _kOperatorTypeNames[] = {
   "JOIN",
@@ -68,9 +69,10 @@ const char* _kOperatorTypeNames[] = {
   "LATERALVIEWJOIN",
   "LATERALVIEWFORWARD",
   "HASHTABLESINK",
-  "HASHTABLEDUMMY"
+  "HASHTABLEDUMMY",
+  "PTF"
 };
-const std::map<int, const char*> _OperatorType_VALUES_TO_NAMES(::apache::thrift::TEnumIterator(18, _kOperatorTypeValues, _kOperatorTypeNames), ::apache::thrift::TEnumIterator(-1, NULL, NULL));
+const std::map<int, const char*> _OperatorType_VALUES_TO_NAMES(::apache::thrift::TEnumIterator(19, _kOperatorTypeValues, _kOperatorTypeNames), ::apache::thrift::TEnumIterator(-1, NULL, NULL));
 
 int _kTaskTypeValues[] = {
   TaskType::MAP,

Modified: hive/branches/ptf-windowing/ql/src/gen/thrift/gen-cpp/queryplan_types.h
URL: http://svn.apache.org/viewvc/hive/branches/ptf-windowing/ql/src/gen/thrift/gen-cpp/queryplan_types.h?rev=1441972&r1=1441971&r2=1441972&view=diff
==============================================================================
--- hive/branches/ptf-windowing/ql/src/gen/thrift/gen-cpp/queryplan_types.h (original)
+++ hive/branches/ptf-windowing/ql/src/gen/thrift/gen-cpp/queryplan_types.h Sun Feb  3 21:43:10 2013
@@ -53,7 +53,8 @@ struct OperatorType {
     LATERALVIEWJOIN = 14,
     LATERALVIEWFORWARD = 15,
     HASHTABLESINK = 16,
-    HASHTABLEDUMMY = 17
+    HASHTABLEDUMMY = 17,
+    PTF = 18
   };
 };
 

Modified: hive/branches/ptf-windowing/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/Operator.java
URL: http://svn.apache.org/viewvc/hive/branches/ptf-windowing/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/Operator.java?rev=1441972&r1=1441971&r2=1441972&view=diff
==============================================================================
--- hive/branches/ptf-windowing/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/Operator.java (original)
+++ hive/branches/ptf-windowing/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/Operator.java Sun Feb  3 21:43:10 2013
@@ -810,7 +810,7 @@ public class Operator implements org.apa
                 for (int _i25 = 0; _i25 < _map24.size; ++_i25)
                 {
                   String _key26; // required
-                  String _val27; // optional
+                  String _val27; // required
                   _key26 = iprot.readString();
                   _val27 = iprot.readString();
                   struct.operatorAttributes.put(_key26, _val27);
@@ -830,7 +830,7 @@ public class Operator implements org.apa
                 for (int _i29 = 0; _i29 < _map28.size; ++_i29)
                 {
                   String _key30; // required
-                  long _val31; // optional
+                  long _val31; // required
                   _key30 = iprot.readString();
                   _val31 = iprot.readI64();
                   struct.operatorCounters.put(_key30, _val31);
@@ -1003,7 +1003,7 @@ public class Operator implements org.apa
           for (int _i37 = 0; _i37 < _map36.size; ++_i37)
           {
             String _key38; // required
-            String _val39; // optional
+            String _val39; // required
             _key38 = iprot.readString();
             _val39 = iprot.readString();
             struct.operatorAttributes.put(_key38, _val39);
@@ -1018,7 +1018,7 @@ public class Operator implements org.apa
           for (int _i41 = 0; _i41 < _map40.size; ++_i41)
           {
             String _key42; // required
-            long _val43; // optional
+            long _val43; // required
             _key42 = iprot.readString();
             _val43 = iprot.readI64();
             struct.operatorCounters.put(_key42, _val43);

Modified: hive/branches/ptf-windowing/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/OperatorType.java
URL: http://svn.apache.org/viewvc/hive/branches/ptf-windowing/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/OperatorType.java?rev=1441972&r1=1441971&r2=1441972&view=diff
==============================================================================
--- hive/branches/ptf-windowing/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/OperatorType.java (original)
+++ hive/branches/ptf-windowing/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/OperatorType.java Sun Feb  3 21:43:10 2013
@@ -29,7 +29,8 @@ public enum OperatorType implements org.
   LATERALVIEWJOIN(14),
   LATERALVIEWFORWARD(15),
   HASHTABLESINK(16),
-  HASHTABLEDUMMY(17);
+  HASHTABLEDUMMY(17),
+  PTF(18);
 
   private final int value;
 
@@ -86,6 +87,8 @@ public enum OperatorType implements org.
         return HASHTABLESINK;
       case 17:
         return HASHTABLEDUMMY;
+      case 18:
+        return PTF;
       default:
         return null;
     }

Modified: hive/branches/ptf-windowing/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/Query.java
URL: http://svn.apache.org/viewvc/hive/branches/ptf-windowing/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/Query.java?rev=1441972&r1=1441971&r2=1441972&view=diff
==============================================================================
--- hive/branches/ptf-windowing/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/Query.java (original)
+++ hive/branches/ptf-windowing/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/Query.java Sun Feb  3 21:43:10 2013
@@ -983,7 +983,7 @@ public class Query implements org.apache
                 for (int _i101 = 0; _i101 < _map100.size; ++_i101)
                 {
                   String _key102; // required
-                  String _val103; // optional
+                  String _val103; // required
                   _key102 = iprot.readString();
                   _val103 = iprot.readString();
                   struct.queryAttributes.put(_key102, _val103);
@@ -1003,7 +1003,7 @@ public class Query implements org.apache
                 for (int _i105 = 0; _i105 < _map104.size; ++_i105)
                 {
                   String _key106; // required
-                  long _val107; // optional
+                  long _val107; // required
                   _key106 = iprot.readString();
                   _val107 = iprot.readI64();
                   struct.queryCounters.put(_key106, _val107);
@@ -1239,7 +1239,7 @@ public class Query implements org.apache
           for (int _i118 = 0; _i118 < _map117.size; ++_i118)
           {
             String _key119; // required
-            String _val120; // optional
+            String _val120; // required
             _key119 = iprot.readString();
             _val120 = iprot.readString();
             struct.queryAttributes.put(_key119, _val120);
@@ -1254,7 +1254,7 @@ public class Query implements org.apache
           for (int _i122 = 0; _i122 < _map121.size; ++_i122)
           {
             String _key123; // required
-            long _val124; // optional
+            long _val124; // required
             _key123 = iprot.readString();
             _val124 = iprot.readI64();
             struct.queryCounters.put(_key123, _val124);

Modified: hive/branches/ptf-windowing/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/Stage.java
URL: http://svn.apache.org/viewvc/hive/branches/ptf-windowing/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/Stage.java?rev=1441972&r1=1441971&r2=1441972&view=diff
==============================================================================
--- hive/branches/ptf-windowing/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/Stage.java (original)
+++ hive/branches/ptf-windowing/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/Stage.java Sun Feb  3 21:43:10 2013
@@ -911,7 +911,7 @@ public class Stage implements org.apache
                 for (int _i73 = 0; _i73 < _map72.size; ++_i73)
                 {
                   String _key74; // required
-                  String _val75; // optional
+                  String _val75; // required
                   _key74 = iprot.readString();
                   _val75 = iprot.readString();
                   struct.stageAttributes.put(_key74, _val75);
@@ -931,7 +931,7 @@ public class Stage implements org.apache
                 for (int _i77 = 0; _i77 < _map76.size; ++_i77)
                 {
                   String _key78; // required
-                  long _val79; // optional
+                  long _val79; // required
                   _key78 = iprot.readString();
                   _val79 = iprot.readI64();
                   struct.stageCounters.put(_key78, _val79);
@@ -1147,7 +1147,7 @@ public class Stage implements org.apache
           for (int _i90 = 0; _i90 < _map89.size; ++_i90)
           {
             String _key91; // required
-            String _val92; // optional
+            String _val92; // required
             _key91 = iprot.readString();
             _val92 = iprot.readString();
             struct.stageAttributes.put(_key91, _val92);
@@ -1162,7 +1162,7 @@ public class Stage implements org.apache
           for (int _i94 = 0; _i94 < _map93.size; ++_i94)
           {
             String _key95; // required
-            long _val96; // optional
+            long _val96; // required
             _key95 = iprot.readString();
             _val96 = iprot.readI64();
             struct.stageCounters.put(_key95, _val96);

Modified: hive/branches/ptf-windowing/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/Task.java
URL: http://svn.apache.org/viewvc/hive/branches/ptf-windowing/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/Task.java?rev=1441972&r1=1441971&r2=1441972&view=diff
==============================================================================
--- hive/branches/ptf-windowing/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/Task.java (original)
+++ hive/branches/ptf-windowing/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/Task.java Sun Feb  3 21:43:10 2013
@@ -996,7 +996,7 @@ public class Task implements org.apache.
                 for (int _i45 = 0; _i45 < _map44.size; ++_i45)
                 {
                   String _key46; // required
-                  String _val47; // optional
+                  String _val47; // required
                   _key46 = iprot.readString();
                   _val47 = iprot.readString();
                   struct.taskAttributes.put(_key46, _val47);
@@ -1016,7 +1016,7 @@ public class Task implements org.apache.
                 for (int _i49 = 0; _i49 < _map48.size; ++_i49)
                 {
                   String _key50; // required
-                  long _val51; // optional
+                  long _val51; // required
                   _key50 = iprot.readString();
                   _val51 = iprot.readI64();
                   struct.taskCounters.put(_key50, _val51);
@@ -1256,7 +1256,7 @@ public class Task implements org.apache.
           for (int _i62 = 0; _i62 < _map61.size; ++_i62)
           {
             String _key63; // required
-            String _val64; // optional
+            String _val64; // required
             _key63 = iprot.readString();
             _val64 = iprot.readString();
             struct.taskAttributes.put(_key63, _val64);
@@ -1271,7 +1271,7 @@ public class Task implements org.apache.
           for (int _i66 = 0; _i66 < _map65.size; ++_i66)
           {
             String _key67; // required
-            long _val68; // optional
+            long _val68; // required
             _key67 = iprot.readString();
             _val68 = iprot.readI64();
             struct.taskCounters.put(_key67, _val68);

Modified: hive/branches/ptf-windowing/ql/src/gen/thrift/gen-php/Types.php
URL: http://svn.apache.org/viewvc/hive/branches/ptf-windowing/ql/src/gen/thrift/gen-php/Types.php?rev=1441972&r1=1441971&r2=1441972&view=diff
==============================================================================
--- hive/branches/ptf-windowing/ql/src/gen/thrift/gen-php/Types.php (original)
+++ hive/branches/ptf-windowing/ql/src/gen/thrift/gen-php/Types.php Sun Feb  3 21:43:10 2013
@@ -53,6 +53,7 @@ final class OperatorType {
   const LATERALVIEWFORWARD = 15;
   const HASHTABLESINK = 16;
   const HASHTABLEDUMMY = 17;
+  const PTF = 18;
   static public $__names = array(
     0 => 'JOIN',
     1 => 'MAPJOIN',
@@ -72,6 +73,7 @@ final class OperatorType {
     15 => 'LATERALVIEWFORWARD',
     16 => 'HASHTABLESINK',
     17 => 'HASHTABLEDUMMY',
+    18 => 'PTF',
   );
 }
 

Modified: hive/branches/ptf-windowing/ql/src/gen/thrift/gen-py/queryplan/ttypes.py
URL: http://svn.apache.org/viewvc/hive/branches/ptf-windowing/ql/src/gen/thrift/gen-py/queryplan/ttypes.py?rev=1441972&r1=1441971&r2=1441972&view=diff
==============================================================================
--- hive/branches/ptf-windowing/ql/src/gen/thrift/gen-py/queryplan/ttypes.py (original)
+++ hive/branches/ptf-windowing/ql/src/gen/thrift/gen-py/queryplan/ttypes.py Sun Feb  3 21:43:10 2013
@@ -63,6 +63,7 @@ class OperatorType:
   LATERALVIEWFORWARD = 15
   HASHTABLESINK = 16
   HASHTABLEDUMMY = 17
+  PTF = 18
 
   _VALUES_TO_NAMES = {
     0: "JOIN",
@@ -83,6 +84,7 @@ class OperatorType:
     15: "LATERALVIEWFORWARD",
     16: "HASHTABLESINK",
     17: "HASHTABLEDUMMY",
+    18: "PTF",
   }
 
   _NAMES_TO_VALUES = {
@@ -104,6 +106,7 @@ class OperatorType:
     "LATERALVIEWFORWARD": 15,
     "HASHTABLESINK": 16,
     "HASHTABLEDUMMY": 17,
+    "PTF": 18,
   }
 
 class TaskType:

Modified: hive/branches/ptf-windowing/ql/src/gen/thrift/gen-rb/queryplan_types.rb
URL: http://svn.apache.org/viewvc/hive/branches/ptf-windowing/ql/src/gen/thrift/gen-rb/queryplan_types.rb?rev=1441972&r1=1441971&r2=1441972&view=diff
==============================================================================
--- hive/branches/ptf-windowing/ql/src/gen/thrift/gen-rb/queryplan_types.rb (original)
+++ hive/branches/ptf-windowing/ql/src/gen/thrift/gen-rb/queryplan_types.rb Sun Feb  3 21:43:10 2013
@@ -39,8 +39,9 @@ module OperatorType
   LATERALVIEWFORWARD = 15
   HASHTABLESINK = 16
   HASHTABLEDUMMY = 17
-  VALUE_MAP = {0 => "JOIN", 1 => "MAPJOIN", 2 => "EXTRACT", 3 => "FILTER", 4 => "FORWARD", 5 => "GROUPBY", 6 => "LIMIT", 7 => "SCRIPT", 8 => "SELECT", 9 => "TABLESCAN", 10 => "FILESINK", 11 => "REDUCESINK", 12 => "UNION", 13 => "UDTF", 14 => "LATERALVIEWJOIN", 15 => "LATERALVIEWFORWARD", 16 => "HASHTABLESINK", 17 => "HASHTABLEDUMMY"}
-  VALID_VALUES = Set.new([JOIN, MAPJOIN, EXTRACT, FILTER, FORWARD, GROUPBY, LIMIT, SCRIPT, SELECT, TABLESCAN, FILESINK, REDUCESINK, UNION, UDTF, LATERALVIEWJOIN, LATERALVIEWFORWARD, HASHTABLESINK, HASHTABLEDUMMY]).freeze
+  PTF = 18
+  VALUE_MAP = {0 => "JOIN", 1 => "MAPJOIN", 2 => "EXTRACT", 3 => "FILTER", 4 => "FORWARD", 5 => "GROUPBY", 6 => "LIMIT", 7 => "SCRIPT", 8 => "SELECT", 9 => "TABLESCAN", 10 => "FILESINK", 11 => "REDUCESINK", 12 => "UNION", 13 => "UDTF", 14 => "LATERALVIEWJOIN", 15 => "LATERALVIEWFORWARD", 16 => "HASHTABLESINK", 17 => "HASHTABLEDUMMY", 18 => "PTF"}
+  VALID_VALUES = Set.new([JOIN, MAPJOIN, EXTRACT, FILTER, FORWARD, GROUPBY, LIMIT, SCRIPT, SELECT, TABLESCAN, FILESINK, REDUCESINK, UNION, UDTF, LATERALVIEWJOIN, LATERALVIEWFORWARD, HASHTABLESINK, HASHTABLEDUMMY, PTF]).freeze
 end
 
 module TaskType

Modified: hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/QueryProperties.java
URL: http://svn.apache.org/viewvc/hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/QueryProperties.java?rev=1441972&r1=1441971&r2=1441972&view=diff
==============================================================================
--- hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/QueryProperties.java (original)
+++ hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/QueryProperties.java Sun Feb  3 21:43:10 2013
@@ -37,6 +37,7 @@ public class QueryProperties {
   boolean hasOrderBy = false;
   boolean hasSortBy = false;
   boolean hasJoinFollowedByGroupBy = false;
+  boolean hasPTF = false;
 
   // does the query have a using clause
   boolean usesScript = false;
@@ -107,4 +108,12 @@ public class QueryProperties {
   public void setHasClusterBy(boolean hasClusterBy) {
     this.hasClusterBy = hasClusterBy;
   }
+
+  public boolean hasPTF() {
+    return hasPTF;
+  }
+
+  public void setHasPTF(boolean hasPTF) {
+    this.hasPTF = hasPTF;
+  }
 }

Modified: hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
URL: http://svn.apache.org/viewvc/hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java?rev=1441972&r1=1441971&r2=1441972&view=diff
==============================================================================
--- hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java (original)
+++ hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java Sun Feb  3 21:43:10 2013
@@ -135,15 +135,23 @@ import org.apache.hadoop.hive.ql.udf.gen
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFCount;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFCovariance;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFCovarianceSample;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFCumeDist;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFDenseRank;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEWAHBitmap;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFFirstValue;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFHistogramNumeric;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFLastValue;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFMax;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFMin;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFNTile;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFParameterInfo;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFPercentRank;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFPercentileApprox;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFRank;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFResolver;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFResolver2;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFRowNumber;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFStd;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFStdSample;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFSum;
@@ -172,6 +180,8 @@ import org.apache.hadoop.hive.ql.udf.gen
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFInFile;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFIndex;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFInstr;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDFLeadLag.GenericUDFLag;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDFLeadLag.GenericUDFLead;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFLocate;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFMap;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFMapKeys;
@@ -214,6 +224,11 @@ import org.apache.hadoop.hive.ql.udf.gen
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDTFParseUrlTuple;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDTFStack;
 import org.apache.hadoop.hive.ql.udf.generic.SimpleGenericUDAFParameterInfo;
+import org.apache.hadoop.hive.ql.udf.ptf.NPath.NPathResolver;
+import org.apache.hadoop.hive.ql.udf.ptf.Noop.NoopResolver;
+import org.apache.hadoop.hive.ql.udf.ptf.NoopWithMap.NoopWithMapResolver;
+import org.apache.hadoop.hive.ql.udf.ptf.TableFunctionResolver;
+import org.apache.hadoop.hive.ql.udf.ptf.WindowingTableFunction.WindowingTableFunctionResolver;
 import org.apache.hadoop.hive.ql.udf.xml.GenericUDFXPath;
 import org.apache.hadoop.hive.ql.udf.xml.UDFXPathBoolean;
 import org.apache.hadoop.hive.ql.udf.xml.UDFXPathDouble;
@@ -236,6 +251,7 @@ import org.w3c.dom.Document;
 import org.w3c.dom.Element;
 import org.w3c.dom.NodeList;
 
+
 /**
  * FunctionRegistry.
  */
@@ -247,6 +263,24 @@ public final class FunctionRegistry {
    * The mapping from expression function names to expression classes.
    */
   static Map<String, FunctionInfo> mFunctions = Collections.synchronizedMap(new LinkedHashMap<String, FunctionInfo>());
+
+  /*
+   * PTF variables
+   * */
+
+  public static final String LEAD_FUNC_NAME = "lead";
+  public static final String LAG_FUNC_NAME = "lag";
+
+  public static final String WINDOWING_TABLE_FUNCTION = "windowingtablefunction";
+  public static final String NOOP_TABLE_FUNCTION = "noop";
+  public static final String NOOP_MAP_TABLE_FUNCTION = "noopwithmap";
+
+  static Map<String, PTFFunctionInfo> tableFunctions = Collections.synchronizedMap(new LinkedHashMap<String, PTFFunctionInfo>());
+  static Map<String, WindowFunctionInfo> windowFunctions = Collections.synchronizedMap(new LinkedHashMap<String, WindowFunctionInfo>());
+
+  public static final ArrayList<String> RANKING_FUNCTIONS = new  ArrayList<String>();
+  public static final ArrayList<String> NAVIGATION_FUNCTIONS = new  ArrayList<String>();
+
   static {
     registerUDF("concat", UDFConcat.class, false);
     registerUDF("substr", UDFSubstr.class, false);
@@ -480,6 +514,34 @@ public final class FunctionRegistry {
     registerGenericUDTF("json_tuple", GenericUDTFJSONTuple.class);
     registerGenericUDTF("parse_url_tuple", GenericUDTFParseUrlTuple.class);
     registerGenericUDTF("stack", GenericUDTFStack.class);
+
+    //PTF declarations
+    registerGenericUDF(true, LEAD_FUNC_NAME, GenericUDFLead.class);
+    registerGenericUDF(true, LAG_FUNC_NAME, GenericUDFLag.class);
+
+    registerHiveUDAFsAsWindowFunctions();
+    registerWindowFunction("rownumber", new GenericUDAFRowNumber());
+    registerWindowFunction("rank", new GenericUDAFRank());
+    registerWindowFunction("denserank", new GenericUDAFDenseRank());
+    registerWindowFunction("percentrank", new GenericUDAFPercentRank());
+    registerWindowFunction("cumedist", new GenericUDAFCumeDist());
+    registerWindowFunction("ntile", new GenericUDAFNTile());
+    registerWindowFunction("first_value", new GenericUDAFFirstValue());
+    registerWindowFunction("last_value", new GenericUDAFLastValue());
+
+    RANKING_FUNCTIONS.add("rank");
+    RANKING_FUNCTIONS.add("denserank");
+    RANKING_FUNCTIONS.add("percentrank");
+
+    NAVIGATION_FUNCTIONS.add(LEAD_FUNC_NAME);
+    NAVIGATION_FUNCTIONS.add(LAG_FUNC_NAME);
+    NAVIGATION_FUNCTIONS.add("first_value");
+    NAVIGATION_FUNCTIONS.add("last_value");
+
+    registerTableFunction(NOOP_TABLE_FUNCTION, NoopResolver.class);
+    registerTableFunction(NOOP_MAP_TABLE_FUNCTION, NoopWithMapResolver.class);
+    registerTableFunction(WINDOWING_TABLE_FUNCTION,  WindowingTableFunctionResolver.class);
+    registerTableFunction("npath", NPathResolver.class);
   }
 
   public static void registerTemporaryUDF(String functionName,
@@ -1349,4 +1411,69 @@ public final class FunctionRegistry {
   private FunctionRegistry() {
     // prevent instantiation
   }
+
+
+  //---------PTF functions------------
+
+  public static void registerWindowFunction(String name, GenericUDAFResolver wFn)
+  {
+    registerGenericUDAF(true, name, wFn);
+    FunctionInfo fInfo = getFunctionInfo(name);
+    WindowFunctionInfo wInfo = new WindowFunctionInfo(fInfo);
+    windowFunctions.put(name.toLowerCase(), wInfo);
+  }
+
+  public static boolean isWindowFunction(String name)
+  {
+     WindowFunctionInfo wFInfo = windowFunctions.get(name.toLowerCase());
+     return wFInfo != null;
+  }
+
+  public static WindowFunctionInfo getWindowFunctionInfo(String name)
+  {
+    return windowFunctions.get(name.toLowerCase());
+  }
+
+  static void registerHiveUDAFsAsWindowFunctions()
+  {
+    Set<String> fNames = getFunctionNames();
+    for(String fName : fNames)
+    {
+      FunctionInfo fInfo = getFunctionInfo(fName);
+      if ( fInfo.isGenericUDAF())
+      {
+        WindowFunctionInfo wInfo = new WindowFunctionInfo(fInfo);
+        windowFunctions.put(fName, wInfo);
+      }
+    }
+  }
+
+  public static boolean isTableFunction(String name)
+  {
+    PTFFunctionInfo tFInfo = tableFunctions.get(name.toLowerCase());
+     return tFInfo != null && !tFInfo.isInternal();
+  }
+
+  public static TableFunctionResolver getTableFunctionResolver(String name)
+  {
+    PTFFunctionInfo tfInfo = tableFunctions.get(name.toLowerCase());
+    return (TableFunctionResolver) ReflectionUtils.newInstance(tfInfo.getFunctionResolver(), null);
+  }
+
+  public static TableFunctionResolver getWindowingTableFunction()
+  {
+    return getTableFunctionResolver(WINDOWING_TABLE_FUNCTION);
+  }
+
+  public static TableFunctionResolver getNoopTableFunction()
+  {
+    return getTableFunctionResolver(NOOP_TABLE_FUNCTION);
+  }
+
+  public static void registerTableFunction(String name, Class<? extends TableFunctionResolver> tFnCls)
+  {
+    PTFFunctionInfo tInfo = new PTFFunctionInfo(name, tFnCls);
+    tableFunctions.put(name.toLowerCase(), tInfo);
+  }
+
 }

Modified: hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/exec/OperatorFactory.java
URL: http://svn.apache.org/viewvc/hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/exec/OperatorFactory.java?rev=1441972&r1=1441971&r2=1441972&view=diff
==============================================================================
--- hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/exec/OperatorFactory.java (original)
+++ hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/exec/OperatorFactory.java Sun Feb  3 21:43:10 2013
@@ -36,6 +36,7 @@ import org.apache.hadoop.hive.ql.plan.La
 import org.apache.hadoop.hive.ql.plan.LimitDesc;
 import org.apache.hadoop.hive.ql.plan.MapJoinDesc;
 import org.apache.hadoop.hive.ql.plan.OperatorDesc;
+import org.apache.hadoop.hive.ql.plan.PTFDesc;
 import org.apache.hadoop.hive.ql.plan.ReduceSinkDesc;
 import org.apache.hadoop.hive.ql.plan.SMBJoinDesc;
 import org.apache.hadoop.hive.ql.plan.ScriptDesc;
@@ -74,6 +75,7 @@ public final class OperatorFactory {
     opvec.add(new OpTuple<FileSinkDesc>(FileSinkDesc.class, FileSinkOperator.class));
     opvec.add(new OpTuple<CollectDesc>(CollectDesc.class, CollectOperator.class));
     opvec.add(new OpTuple<ScriptDesc>(ScriptDesc.class, ScriptOperator.class));
+    opvec.add(new OpTuple<PTFDesc>(PTFDesc.class, PTFOperator.class));
     opvec.add(new OpTuple<ReduceSinkDesc>(ReduceSinkDesc.class, ReduceSinkOperator.class));
     opvec.add(new OpTuple<ExtractDesc>(ExtractDesc.class, ExtractOperator.class));
     opvec.add(new OpTuple<GroupByDesc>(GroupByDesc.class, GroupByOperator.class));

Added: hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/exec/PTFFunctionInfo.java
URL: http://svn.apache.org/viewvc/hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/exec/PTFFunctionInfo.java?rev=1441972&view=auto
==============================================================================
--- hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/exec/PTFFunctionInfo.java (added)
+++ hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/exec/PTFFunctionInfo.java Sun Feb  3 21:43:10 2013
@@ -0,0 +1,40 @@
+package org.apache.hadoop.hive.ql.exec;
+
+import org.apache.hadoop.hive.ql.exec.PartitionTableFunctionDescription;
+import org.apache.hadoop.hive.ql.udf.ptf.TableFunctionResolver;
+
+class PTFFunctionInfo
+{
+	String displayName;
+	Class<? extends TableFunctionResolver>  functionResolver;
+	boolean isInternal;
+
+	public PTFFunctionInfo(String displayName, Class<? extends TableFunctionResolver> tFnCls)
+	{
+		super();
+		this.displayName = displayName;
+		this.functionResolver = tFnCls;
+		isInternal = false;
+		PartitionTableFunctionDescription def = functionResolver.getAnnotation(PartitionTableFunctionDescription.class);
+		if ( def != null)
+		{
+			isInternal = def.isInternal();
+		}
+	}
+
+	public String getDisplayName()
+	{
+		return displayName;
+	}
+
+	public Class<? extends TableFunctionResolver> getFunctionResolver()
+	{
+		return functionResolver;
+	}
+
+	public boolean isInternal()
+	{
+		return isInternal;
+	}
+
+}
\ No newline at end of file

Added: hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/exec/PTFOperator.java
URL: http://svn.apache.org/viewvc/hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/exec/PTFOperator.java?rev=1441972&view=auto
==============================================================================
--- hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/exec/PTFOperator.java (added)
+++ hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/exec/PTFOperator.java Sun Feb  3 21:43:10 2013
@@ -0,0 +1,396 @@
+package org.apache.hadoop.hive.ql.exec;
+
+import java.io.ByteArrayInputStream;
+import java.io.Serializable;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Stack;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.exec.PTFPartition.PTFPartitionIterator;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.parse.PTFTranslator;
+import org.apache.hadoop.hive.ql.parse.PTFTranslator.PTFDefDeserializer;
+import org.apache.hadoop.hive.ql.parse.PTFTranslator.PTFTranslationInfo;
+import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
+import org.apache.hadoop.hive.ql.plan.OperatorDesc;
+import org.apache.hadoop.hive.ql.plan.PTFDef;
+import org.apache.hadoop.hive.ql.plan.PTFDef.ColumnDef;
+import org.apache.hadoop.hive.ql.plan.PTFDef.PTFInputDef;
+import org.apache.hadoop.hive.ql.plan.PTFDef.PartitionDef;
+import org.apache.hadoop.hive.ql.plan.PTFDef.TableFuncDef;
+import org.apache.hadoop.hive.ql.plan.PTFDef.WhereDef;
+import org.apache.hadoop.hive.ql.plan.PTFDesc;
+import org.apache.hadoop.hive.ql.plan.api.OperatorType;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDFLeadLag;
+import org.apache.hadoop.hive.ql.udf.ptf.TableFunctionEvaluator;
+import org.apache.hadoop.hive.serde2.SerDe;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters.Converter;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils.ObjectInspectorCopyOption;
+import org.apache.hadoop.hive.serde2.objectinspector.StructField;
+import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+import org.apache.hadoop.io.Writable;
+
+public class PTFOperator extends Operator<PTFDesc> implements Serializable
+{
+
+	private static final long serialVersionUID = 1L;
+	PTFDef qDef;
+	PTFPartition inputPart;
+	boolean isMapOperator;
+
+	transient KeyWrapperFactory keyWrapperFactory;
+	protected transient KeyWrapper currentKeys;
+	protected transient KeyWrapper newKeys;
+	transient HiveConf hiveConf;
+
+	/*
+	 * 1. Find out if the operator is invoked at Map-Side or Reduce-side
+	 * 2. Get the deserialized QueryDef
+	 * 3. Reconstruct the transient variables in QueryDef
+	 * 4. Create input partition to store rows coming from previous operator
+	 */
+	@Override
+	protected void initializeOp(Configuration jobConf) throws HiveException
+	{
+		hiveConf = new HiveConf(jobConf, PTFOperator.class);
+		// if the parent is ExtractOperator, this invocation is from reduce-side
+		Operator<? extends OperatorDesc> parentOp = getParentOperators().get(0);
+		if (parentOp instanceof ExtractOperator)
+		{
+			isMapOperator = false;
+		}
+		else
+		{
+			isMapOperator = true;
+		}
+
+		// use the string from PTFDesc to get deserialized QueryDef
+		qDef = (PTFDef) PTFUtils
+				.deserialize(new ByteArrayInputStream(conf.getQueryDefStr()
+						.getBytes()));
+		reconstructQueryDef(hiveConf);
+    inputPart = PTFOperator.createFirstPartitionForChain(qDef,
+        inputObjInspectors[0], hiveConf, isMapOperator);
+
+		// OI for FileSinkOperator is taken from select-list (reduce-side)
+		// OI for ReduceSinkOperator is taken from TODO
+		if (isMapOperator)
+		{
+			TableFuncDef tDef = PTFTranslator.getFirstTableFunction(qDef);
+			outputObjInspector = tDef.getRawInputOI();
+		}
+		else
+		{
+			outputObjInspector = qDef.getSelectList().getOI();
+		}
+
+		setupKeysWrapper(inputObjInspectors[0]);
+
+		super.initializeOp(jobConf);
+	}
+
+	@Override
+	protected void closeOp(boolean abort) throws HiveException
+	{
+		super.closeOp(abort);
+    if(inputPart.size() != 0){
+      if (isMapOperator)
+      {
+        processMapFunction();
+      }
+      else
+      {
+        processInputPartition();
+      }
+    }
+	}
+
+	@Override
+	public void processOp(Object row, int tag) throws HiveException
+	{
+	  if (!isMapOperator )
+    {
+      /*
+       * checkif current row belongs to the current accumulated Partition:
+       * - If not:
+       *  - process the current Partition
+       *  - reset input Partition
+       * - set currentKey to the newKey if it is null or has changed.
+       */
+      newKeys.getNewKey(row, inputPart.getOI());
+      boolean keysAreEqual = (currentKeys != null && newKeys != null)?
+              newKeys.equals(currentKeys) : false;
+
+      if (currentKeys != null && !keysAreEqual)
+      {
+        processInputPartition();
+        inputPart = PTFOperator.createFirstPartitionForChain(qDef, inputObjInspectors[0], hiveConf, isMapOperator);
+      }
+
+      if (currentKeys == null || !keysAreEqual)
+      {
+        if (currentKeys == null)
+        {
+          currentKeys = newKeys.copyKey();
+        }
+        else
+        {
+          currentKeys.copyKey(newKeys);
+        }
+      }
+    }
+
+    // add row to current Partition.
+    inputPart.append(row);
+	}
+
+	/**
+	 * Initialize the visitor to use the QueryDefDeserializer Use the order
+	 * defined in QueryDefWalker to visit the QueryDef
+	 *
+	 * @param hiveConf
+	 * @throws HiveException
+	 */
+	protected void reconstructQueryDef(HiveConf hiveConf) throws HiveException
+	{
+
+	  PTFDefDeserializer qdd = new PTFDefDeserializer(hiveConf,
+				inputObjInspectors[0]);
+	  PTFTranslator.PTFDefWalker qdw = new PTFTranslator.PTFDefWalker(qdd);
+		qdw.walk(qDef);
+	}
+
+	protected void setupKeysWrapper(ObjectInspector inputOI) throws HiveException
+	{
+		PartitionDef pDef = PTFTranslator.getFirstTableFunction(qDef).getWindow().getPartDef();
+		ArrayList<ColumnDef> cols = pDef.getColumns();
+		int numCols = cols.size();
+		ExprNodeEvaluator[] keyFields = new ExprNodeEvaluator[numCols];
+		ObjectInspector[] keyOIs = new ObjectInspector[numCols];
+		ObjectInspector[] currentKeyOIs = new ObjectInspector[numCols];
+
+		for(int i=0; i<numCols; i++)
+		{
+			ColumnDef cDef = cols.get(i);
+			/*
+			 * Why cannot we just use the ExprNodeEvaluator on the column?
+			 * - because on the reduce-side it is initialized based on the rowOI of the HiveTable
+			 *   and not the OI of the ExtractOp ( the parent of this Operator on the reduce-side)
+			 */
+			keyFields[i] = ExprNodeEvaluatorFactory.get(cDef.getExprNode());
+			keyOIs[i] = keyFields[i].initialize(inputOI);
+			currentKeyOIs[i] = ObjectInspectorUtils.getStandardObjectInspector(keyOIs[i], ObjectInspectorCopyOption.WRITABLE);
+		}
+
+		keyWrapperFactory = new KeyWrapperFactory(keyFields, keyOIs, currentKeyOIs);
+
+	    newKeys = keyWrapperFactory.getKeyWrapper();
+	}
+
+	protected void processInputPartition() throws HiveException
+	{
+	  PTFPartition outPart = PTFOperator.executeChain(qDef, inputPart);
+    PTFOperator.executeSelectList(qDef, outPart, this);
+	}
+
+	protected void processMapFunction() throws HiveException
+	{
+	  TableFuncDef tDef = PTFTranslator.getFirstTableFunction(qDef);
+    PTFPartition outPart = tDef.getFunction().transformRawInput(inputPart);
+    PTFPartitionIterator<Object> pItr = outPart.iterator();
+    while (pItr.hasNext())
+    {
+      Object oRow = pItr.next();
+      forward(oRow, outputObjInspector);
+    }
+	}
+
+	/**
+	 * @return the name of the operator
+	 */
+  @Override
+  public String getName() {
+    return getOperatorName();
+  }
+
+  static public String getOperatorName() {
+    return "PTF";
+  }
+
+
+	@Override
+	public OperatorType getType()
+	{
+		return OperatorType.PTF;
+	}
+
+	 /**
+   * For all the table functions to be applied to the input
+   * hive table or query, push them on a stack.
+   * For each table function popped out of the stack,
+   * execute the function on the input partition
+   * and return an output partition.
+   * @param qDef
+   * @param part
+   * @return
+   * @throws HiveException
+   */
+  private static PTFPartition executeChain(PTFDef qDef, PTFPartition part)
+      throws HiveException
+  {
+    Stack<TableFuncDef> fnDefs = new Stack<TableFuncDef>();
+    PTFInputDef iDef = qDef.getInput();
+    while (true)
+    {
+      if (iDef instanceof TableFuncDef)
+      {
+        fnDefs.push((TableFuncDef) iDef);
+        iDef = ((TableFuncDef) iDef).getInput();
+      }
+      else
+      {
+        break;
+      }
+    }
+
+    TableFuncDef currFnDef;
+    while (!fnDefs.isEmpty())
+    {
+      currFnDef = fnDefs.pop();
+      part = currFnDef.getFunction().execute(part);
+    }
+    return part;
+  }
+
+  /**
+   * For each row in the partition:
+   * 1. evaluate the where condition if applicable.
+   * 2. evaluate the value for each column retrieved
+   *    from the select list
+   * 3. Forward the writable value or object based on the
+   *    implementation of the ForwardSink
+   * @param qDef
+   * @param oPart
+   * @param rS
+   * @throws HiveException
+   */
+  @SuppressWarnings(
+  { "rawtypes", "unchecked" })
+  private static void executeSelectList(PTFDef qDef, PTFPartition oPart, PTFOperator op)
+      throws HiveException
+  {
+    StructObjectInspector selectOI = qDef.getSelectList().getOI();
+    StructObjectInspector inputOI = qDef.getInput().getOI();
+    int numCols = selectOI.getAllStructFieldRefs().size();
+    ArrayList<ColumnDef> cols = qDef.getSelectList().getColumns();
+    int numSelCols = cols == null ? 0 : cols.size();
+    Object[] output = new Object[numCols];
+
+
+    WhereDef whDef = qDef.getWhere();
+    boolean applyWhere = whDef != null;
+    Converter whConverter = !applyWhere ? null
+        : ObjectInspectorConverters
+            .getConverter(
+                whDef.getOI(),
+                PrimitiveObjectInspectorFactory.javaBooleanObjectInspector);
+    ExprNodeEvaluator whCondEval = !applyWhere ? null : whDef
+        .getExprEvaluator();
+
+    Writable value = null;
+    PTFPartitionIterator<Object> pItr = oPart.iterator();
+    PTFOperator.connectLeadLagFunctionsToPartition(qDef, pItr);
+    while (pItr.hasNext())
+    {
+      int colCnt = 0;
+      Object oRow = pItr.next();
+
+      if (applyWhere)
+      {
+        Object whCond = null;
+        whCond = whCondEval.evaluate(oRow);
+        whCond = whConverter.convert(whCond);
+        if (whCond == null || !((Boolean) whCond).booleanValue())
+        {
+          continue;
+        }
+      }
+
+      /*
+       * Setup the output row columns in the following order
+       * - the columns in the SelectList processed by the PTF (ie the Select Exprs that have navigation expressions)
+       * - the columns from the final PTF.
+       */
+
+      if ( cols != null ) {
+        for (ColumnDef cDef : cols)
+        {
+          Object newCol = cDef.getExprEvaluator().evaluate(oRow);
+          output[colCnt++] = newCol;
+        }
+      }
+
+      for(; colCnt < numCols; ) {
+        StructField field = inputOI.getAllStructFieldRefs().get(colCnt - numSelCols);
+        output[colCnt++] = ObjectInspectorUtils.copyToStandardObject(inputOI.getStructFieldData(oRow, field),
+            field.getFieldObjectInspector());
+      }
+
+      op.forward(output, op.outputObjInspector);
+    }
+  }
+
+  /**
+   * Create a new partition.
+   * The input OI is used to evaluate rows appended to the partition.
+   * The serde is determined based on whether the query has a map-phase
+   * or not. The OI on the serde is used by PTFs to evaluate output of the
+   * partition.
+   * @param qDef
+   * @param oi
+   * @param hiveConf
+   * @return
+   * @throws HiveException
+   */
+  public static PTFPartition createFirstPartitionForChain(PTFDef qDef, ObjectInspector oi,
+      HiveConf hiveConf, boolean isMapSide) throws HiveException
+  {
+    TableFuncDef tabDef = PTFTranslator.getFirstTableFunction(qDef);
+    TableFunctionEvaluator tEval = tabDef.getFunction();
+    String partClassName = tEval.getPartitionClass();
+    int partMemSize = tEval.getPartitionMemSize();
+
+    PTFPartition part = null;
+    SerDe serde = tabDef.getInput().getSerde();
+    part = new PTFPartition(partClassName, partMemSize, serde,
+        (StructObjectInspector) oi);
+    return part;
+
+  }
+
+  public static void connectLeadLagFunctionsToPartition(PTFDef qDef,
+      PTFPartitionIterator<Object> pItr) throws HiveException
+  {
+    PTFTranslationInfo tInfo = qDef.getTranslationInfo();
+    List<ExprNodeGenericFuncDesc> llFnDescs = tInfo.getLLInfo()
+        .getLeadLagExprs();
+    if (llFnDescs == null) {
+      return;
+    }
+    for (ExprNodeGenericFuncDesc llFnDesc : llFnDescs)
+    {
+      GenericUDFLeadLag llFn = (GenericUDFLeadLag) llFnDesc
+          .getGenericUDF();
+      llFn.setpItr(pItr);
+    }
+  }
+
+
+
+}

Added: hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/exec/PTFPartition.java
URL: http://svn.apache.org/viewvc/hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/exec/PTFPartition.java?rev=1441972&view=auto
==============================================================================
--- hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/exec/PTFPartition.java (added)
+++ hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/exec/PTFPartition.java Sun Feb  3 21:43:10 2013
@@ -0,0 +1,262 @@
+package org.apache.hadoop.hive.ql.exec;
+
+import java.util.ConcurrentModificationException;
+import java.util.Iterator;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
+import org.apache.hadoop.hive.ql.exec.PTFPersistence.ByteBasedList;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.serde2.SerDe;
+import org.apache.hadoop.hive.serde2.SerDeException;
+import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
+import org.apache.hadoop.io.Writable;
+
+/*
+ * represents a collection of rows that is acted upon by a TableFunction or a WindowFunction.
+ */
+public class PTFPartition
+{
+  SerDe serDe;
+  StructObjectInspector OI;
+  private ByteBasedList elems;
+  private Writable wRow;
+  private int sz;
+
+  public PTFPartition(HiveConf cfg, SerDe serDe, StructObjectInspector oI) throws HiveException
+  {
+    String partitionClass = HiveConf.getVar(cfg, ConfVars.HIVE_PTF_PARTITION_PERSISTENCE_CLASS);
+    int partitionMemSize = HiveConf.getIntVar(cfg, ConfVars.HIVE_PTF_PARTITION_PERSISTENT_SIZE);
+    init(partitionClass, partitionMemSize, serDe, oI);
+  }
+
+  public PTFPartition(String partitionClass, int partitionMemSize, SerDe serDe, StructObjectInspector oI) throws HiveException
+  {
+    init(partitionClass, partitionMemSize, serDe, oI);
+  }
+
+  private void init(String partitionClass, int partitionMemSize, SerDe serDe, StructObjectInspector oI) throws HiveException
+  {
+    this.serDe = serDe;
+    OI = oI;
+    elems = PTFPersistence.createList(partitionClass, partitionMemSize);
+    sz = 0;
+    wRow = createWritable();
+  }
+
+  public SerDe getSerDe()
+  {
+    return serDe;
+  }
+  public void setSerDe(SerDe serDe)
+  {
+    this.serDe = serDe;
+  }
+  public StructObjectInspector getOI()
+  {
+    return OI;
+  }
+  public void setOI(StructObjectInspector oI)
+  {
+    OI = oI;
+  }
+
+  private Writable createWritable() throws HiveException
+  {
+    try
+    {
+      return serDe.getSerializedClass().newInstance();
+    }
+    catch(Throwable t)
+    {
+      throw new HiveException(t);
+    }
+  }
+
+  public Object getAt(int i) throws HiveException
+  {
+    try
+    {
+      elems.get(i, wRow);
+      Object o = serDe.deserialize(wRow);
+      return o;
+    }
+    catch(SerDeException  se)
+    {
+      throw new HiveException(se);
+    }
+  }
+
+  public Object getWritableAt(int i) throws HiveException
+  {
+    elems.get(i, wRow);
+    return wRow;
+  }
+
+  public void append(Writable o) throws HiveException
+  {
+    elems.append(o);
+    sz++;
+  }
+
+  public void append(Object o) throws HiveException
+  {
+    try
+    {
+      append(serDe.serialize(o, OI));
+    }
+    catch(SerDeException e)
+    {
+      throw new HiveException(e);
+    }
+  }
+
+  public int size()
+  {
+    return sz;
+  }
+
+  public PTFPartitionIterator<Object> iterator()
+  {
+    return new PItr(0, size());
+  }
+
+  public PTFPartitionIterator<Object> range(int start, int end)
+  {
+    assert(start >= 0);
+    assert(end < size());
+    assert(start <= end);
+    return new PItr(start, end);
+  }
+
+  class PItr implements PTFPartitionIterator<Object>
+  {
+    int idx;
+    final int start;
+    final int end;
+    final int createTimeSz;
+
+    PItr(int start, int end)
+    {
+      this.idx = start;
+      this.start = start;
+      this.end = end;
+      createTimeSz = PTFPartition.this.size();
+    }
+
+    public boolean hasNext()
+    {
+      checkForComodification() ;
+      return idx < end;
+    }
+
+    public Object next()
+    {
+      checkForComodification();
+      try
+      {
+        return PTFPartition.this.getAt(idx++);
+      }
+      catch(HiveException e)
+      {
+        throw new RuntimeException(e);
+      }
+    }
+
+    public void remove()
+    {
+      throw new UnsupportedOperationException();
+    }
+
+    final void checkForComodification()
+    {
+        if (createTimeSz != PTFPartition.this.size()) {
+          throw new ConcurrentModificationException();
+        }
+    }
+
+    @Override
+    public int getIndex()
+    {
+      return idx;
+    }
+
+    private Object getAt(int i)
+    {
+      try
+      {
+        return PTFPartition.this.getAt(i);
+      }
+      catch(HiveException e)
+      {
+        throw new RuntimeException(e);
+      }
+    }
+
+    @Override
+    public Object lead(int amt)
+    {
+      int i = idx + amt;
+      i = i >= end ? end - 1 : i;
+      return getAt(i);
+    }
+
+    @Override
+    public Object lag(int amt)
+    {
+      int i = idx - amt;
+      i = i < start ? start : i;
+      return getAt(i);
+    }
+
+    @Override
+    public Object resetToIndex(int idx)
+    {
+      if ( idx < start || idx >= end )
+      {
+        return null;
+      }
+      Object o = getAt(idx);
+      this.idx = idx + 1;
+      return o;
+    }
+
+    @Override
+    public PTFPartition getPartition()
+    {
+      return PTFPartition.this;
+    }
+
+    @Override
+    public void reset()
+    {
+      idx = start;
+    }
+  };
+
+  /*
+   * provide an Iterator on the rows in a Partiton.
+   * Iterator exposes the index of the next location.
+   * Client can invoke lead/lag relative to the next location.
+   */
+  public static interface PTFPartitionIterator<T> extends Iterator<T>
+  {
+    int getIndex();
+
+    T lead(int amt);
+
+    T lag(int amt);
+
+    /*
+     * after a lead and lag call, allow Object associated with SerDe and writable associated with partition to be reset
+     * to the value for the current Index.
+     */
+    Object resetToIndex(int idx);
+
+    PTFPartition getPartition();
+
+    void reset();
+  }
+
+
+}



Mime
View raw message