hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From hashut...@apache.org
Subject svn commit: r1441972 [6/8] - in /hive/branches/ptf-windowing: common/src/java/org/apache/hadoop/hive/conf/ data/files/ ql/if/ ql/src/gen/thrift/gen-cpp/ ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/ ql/src/gen/thrift/gen-php/ ql/sr...
Date Sun, 03 Feb 2013 21:43:12 GMT
Added: hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/NPath.java
URL: http://svn.apache.org/viewvc/hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/NPath.java?rev=1441972&view=auto
==============================================================================
--- hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/NPath.java (added)
+++ hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/NPath.java Sun Feb  3 21:43:10 2013
@@ -0,0 +1,820 @@
+package org.apache.hadoop.hive.ql.udf.ptf;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+
+import org.apache.hadoop.hive.ql.exec.ColumnInfo;
+import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator;
+import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluatorFactory;
+import org.apache.hadoop.hive.ql.exec.PTFPartition;
+import org.apache.hadoop.hive.ql.exec.PTFPartition.PTFPartitionIterator;
+import org.apache.hadoop.hive.ql.exec.PTFUtils;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.parse.ASTNode;
+import org.apache.hadoop.hive.ql.parse.PTFSpec.SelectSpec;
+import org.apache.hadoop.hive.ql.parse.PTFTranslator;
+import org.apache.hadoop.hive.ql.parse.RowResolver;
+import org.apache.hadoop.hive.ql.parse.SemanticAnalyzer;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hadoop.hive.ql.parse.TypeCheckCtx;
+import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;
+import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
+import org.apache.hadoop.hive.ql.plan.PTFDef;
+import org.apache.hadoop.hive.ql.plan.PTFDef.ArgDef;
+import org.apache.hadoop.hive.ql.plan.PTFDef.PTFInputDef;
+import org.apache.hadoop.hive.ql.plan.PTFDef.TableFuncDef;
+import org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters.Converter;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.StandardListObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
+
+/**
+ * return rows that meet a specified pattern. Use symbols to specify a list of expressions to match.
+ * Pattern is used to specify a Path. The results list can contain expressions based on the input columns
+ * and also the matched Path.
+ * <ol>
+ * <li><b>pattern:</b> pattern for the Path. Path is 'dot' separated list of symbols.
+ * Each element is treated as a symbol. Elements that end in '*' or '+' are interpreted with the
+ * usual meaning of zero or more, one or more respectively. For e.g. "LATE.EARLY*.ONTIMEOREARLY" implies a sequence of flights
+ * where the first occurrence was LATE, followed by zero or more EARLY flights, followed by a ONTIME or EARLY flight.
+ * <li><b>symbols</b> specify a list of name, expression pairs. For e.g.
+ * 'LATE', arrival_delay > 0, 'EARLY', arrival_delay < 0 , 'ONTIME', arrival_delay == 0.
+ * These symbols can be used in the Pattern defined above.
+ * <li><b>resultSelectList</b> specified as a select list.
+ * The expressions in the selectList are evaluated in the context where all the input columns are available, plus the attribute
+ * "tpath" is available. Path is a collection of rows that represents the matching Path.
+ * </ol>
+ */
+public class NPath extends TableFunctionEvaluator
+{
+  private transient String patternStr;
+  private transient SymbolsInfo symInfo;
+  private transient String resultExprStr;
+  private transient SymbolFunction syFn;
+  private ResultExprInfo resultExprInfo;
+  /*
+   * the names of the Columns of the input to NPath. Used to setup the tpath Struct column.
+   */
+  private ArrayList<String> inputColumnNames;
+
+  @Override
+  public void execute(PTFPartitionIterator<Object> pItr, PTFPartition outP) throws HiveException
+  {
+    while (pItr.hasNext())
+    {
+      Object iRow = pItr.next();
+
+      SymbolFunctionResult syFnRes = SymbolFunction.match(syFn, iRow, pItr);
+      if (syFnRes.matches )
+      {
+        int sz = syFnRes.nextRow - (pItr.getIndex() - 1);
+        Object selectListInput = NPath.getSelectListInput(iRow, tDef.getInput().getOI(), pItr, sz);
+        ArrayList<Object> oRow = new ArrayList<Object>();
+        for(ExprNodeEvaluator resExprEval : resultExprInfo.resultExprEvals)
+        {
+          oRow.add(resExprEval.evaluate(selectListInput));
+        }
+        outP.append(oRow);
+      }
+    }
+  }
+
+  static void throwErrorWithSignature(String message) throws SemanticException
+  {
+    throw new SemanticException(PTFUtils.sprintf(
+        "NPath signature is: SymbolPattern, one or more SymbolName, expression pairs, the result expression as a select list. Error %s",
+        message));
+  }
+  public static class NPathResolver extends TableFunctionResolver
+  {
+
+    @Override
+    protected TableFunctionEvaluator createEvaluator(PTFDef qDef, TableFuncDef tDef)
+    {
+
+      return new NPath();
+    }
+
+    /**
+     * <ul>
+     * <li> check structure of Arguments:
+     * <ol>
+     * <li> First arg should be a String
+     * <li> then there should be an even number of Arguments: String, expression; expression should be Convertible to Boolean.
+     * <li> finally there should be a String.
+     * </ol>
+     * <li> convert pattern into a NNode chain.
+     * <li> convert symbol args into a Symbol Map.
+     * <li> parse selectList into SelectList struct. The inputOI used to translate these expressions should be based on the
+     * columns in the Input, the 'path.attr'
+     * </ul>
+     */
+    @Override
+    public void setupOutputOI() throws SemanticException
+    {
+      NPath evaluator = (NPath) getEvaluator();
+      TableFuncDef tDef = evaluator.getTableDef();
+
+      ArrayList<ArgDef> args = tDef.getArgs();
+      int argsNum = args == null ? 0 : args.size();
+
+      if ( argsNum < 4 )
+      {
+        throwErrorWithSignature("at least 4 arguments required");
+      }
+
+      validateAndSetupPatternStr(evaluator, args);
+      validateAndSetupSymbolInfo(evaluator, args, argsNum);
+      validateAndSetupResultExprStr(evaluator, args, argsNum);
+      setupSymbolFunctionChain(evaluator);
+
+      /*
+       * setup OI for input to resultExpr select list
+       */
+      RowResolver selectListInputRR = NPath.createSelectListRR(evaluator, tDef.getInput());
+
+      /*
+       * parse ResultExpr Str and setup OI.
+       */
+      ResultExpressionParser resultExprParser = new ResultExpressionParser(evaluator.resultExprStr, selectListInputRR);
+      try {
+        resultExprParser.translate();
+      }
+      catch(HiveException he) {
+        throw new SemanticException(he);
+      }
+      evaluator.resultExprInfo = resultExprParser.getResultExprInfo();
+      StructObjectInspector OI = evaluator.resultExprInfo.resultOI;
+      setOutputOI(OI);
+    }
+    /*
+     * validate and setup patternStr
+     */
+    private void validateAndSetupPatternStr(NPath evaluator, ArrayList<ArgDef> args) throws SemanticException {
+      ArgDef symboPatternArg = args.get(0);
+      ObjectInspector symbolPatternArgOI = symboPatternArg.getOI();
+
+      if ( !ObjectInspectorUtils.isConstantObjectInspector(symbolPatternArgOI) ||
+          (symbolPatternArgOI.getCategory() != ObjectInspector.Category.PRIMITIVE) ||
+          ((PrimitiveObjectInspector)symbolPatternArgOI).getPrimitiveCategory() != PrimitiveObjectInspector.PrimitiveCategory.STRING )
+      {
+        throwErrorWithSignature("Currently the symbol Pattern must be a Constant String.");
+      }
+
+      evaluator.patternStr = ((ConstantObjectInspector)symbolPatternArgOI).getWritableConstantValue().toString();
+    }
+
+    /*
+     * validate and setup SymbolInfo
+     */
+    private void validateAndSetupSymbolInfo(NPath evaluator, ArrayList<ArgDef> args, int argsNum) throws SemanticException {
+      int symbolArgsSz = argsNum - 2;
+      if ( symbolArgsSz % 2 != 0)
+      {
+        throwErrorWithSignature("Symbol Name, Expression need to be specified in pairs: there are odd number of symbol args");
+      }
+
+      evaluator.symInfo = new SymbolsInfo(symbolArgsSz/2);
+      for(int i=1; i <= symbolArgsSz; i += 2)
+      {
+        ArgDef symbolNameArg = args.get(i);
+        ObjectInspector symbolNameArgOI = symbolNameArg.getOI();
+
+        if ( !ObjectInspectorUtils.isConstantObjectInspector(symbolNameArgOI) ||
+            (symbolNameArgOI.getCategory() != ObjectInspector.Category.PRIMITIVE) ||
+            ((PrimitiveObjectInspector)symbolNameArgOI).getPrimitiveCategory() != PrimitiveObjectInspector.PrimitiveCategory.STRING )
+        {
+          throwErrorWithSignature(PTFUtils.sprintf("Currently a Symbol Name(%s) must be a Constant String", symbolNameArg.getExpression().toStringTree()));
+        }
+        String symbolName = ((ConstantObjectInspector)symbolNameArgOI).getWritableConstantValue().toString();
+
+        ArgDef symolExprArg = args.get(i+1);
+        ObjectInspector symolExprArgOI = symolExprArg.getOI();
+        if ( (symolExprArgOI.getCategory() != ObjectInspector.Category.PRIMITIVE) ||
+              ((PrimitiveObjectInspector)symolExprArgOI).getPrimitiveCategory() != PrimitiveObjectInspector.PrimitiveCategory.BOOLEAN )
+        {
+          throwErrorWithSignature(PTFUtils.sprintf("Currently a Symbol Expression(%s) must be a boolean expression", symolExprArg.getExpression().toStringTree()));
+        }
+        evaluator.symInfo.add(symbolName, symolExprArg);
+      }
+    }
+
+    /*
+     * validate and setup resultExprStr
+     */
+    private void validateAndSetupResultExprStr(NPath evaluator, ArrayList<ArgDef> args, int argsNum) throws SemanticException {
+      ArgDef resultExprArg = args.get(argsNum - 1);
+      ObjectInspector resultExprArgOI = resultExprArg.getOI();
+
+      if ( !ObjectInspectorUtils.isConstantObjectInspector(resultExprArgOI) ||
+            (resultExprArgOI.getCategory() != ObjectInspector.Category.PRIMITIVE) ||
+            ((PrimitiveObjectInspector)resultExprArgOI).getPrimitiveCategory() != PrimitiveObjectInspector.PrimitiveCategory.STRING )
+      {
+        throwErrorWithSignature("Currently the result Expr parameter must be a Constant String.");
+      }
+
+      evaluator.resultExprStr = ((ConstantObjectInspector)resultExprArgOI).getWritableConstantValue().toString();
+    }
+
+    /*
+     * setup SymbolFunction chain.
+     */
+    private void setupSymbolFunctionChain(NPath evaluator) throws SemanticException {
+      SymbolParser syP = new SymbolParser(evaluator.patternStr,
+          evaluator.symInfo.symbolExprsNames,
+          evaluator.symInfo.symbolExprsEvaluators, evaluator.symInfo.symbolExprsOIs);
+      syP.parse();
+      evaluator.syFn = syP.getSymbolFunction();
+    }
+
+    @Override
+    public boolean transformsRawInput()
+    {
+      return false;
+    }
+
+    @Override
+    public void initializeOutputOI() throws HiveException {
+      try {
+        NPath evaluator = (NPath) getEvaluator();
+        TableFuncDef tDef = evaluator.getTableDef();
+
+        ArrayList<ArgDef> args = tDef.getArgs();
+        int argsNum = args.size();
+
+        validateAndSetupPatternStr(evaluator, args);
+        validateAndSetupSymbolInfo(evaluator, args, argsNum);
+        validateAndSetupResultExprStr(evaluator, args, argsNum);
+        setupSymbolFunctionChain(evaluator);
+
+        /*
+         * setup OI for input to resultExpr select list
+         */
+        RowResolver selectListInputRR = NPath.createSelectListRR(evaluator, tDef.getInput());
+        StructObjectInspector selectListInputOI = (StructObjectInspector) PTFTranslator.getInputOI(selectListInputRR);
+        ResultExprInfo resultExprInfo = evaluator.resultExprInfo;
+        ArrayList<ObjectInspector> selectListExprOIs = new ArrayList<ObjectInspector>();
+        resultExprInfo.resultExprEvals = new ArrayList<ExprNodeEvaluator>();
+
+        for(int i=0 ; i < resultExprInfo.resultExprNodes.size(); i++) {
+          ExprNodeDesc selectColumnExprNode =resultExprInfo.resultExprNodes.get(i);
+          ExprNodeEvaluator selectColumnExprEval = ExprNodeEvaluatorFactory.get(selectColumnExprNode);
+          ObjectInspector selectColumnOI = selectColumnExprEval.initialize(selectListInputOI);
+          resultExprInfo.resultExprEvals.add(selectColumnExprEval);
+          selectListExprOIs.add(selectColumnOI);
+        }
+
+        resultExprInfo.resultOI = ObjectInspectorFactory.getStandardStructObjectInspector(
+            resultExprInfo.resultExprNames, selectListExprOIs);
+        setOutputOI(resultExprInfo.resultOI);
+      }
+      catch(SemanticException se) {
+        throw new HiveException(se);
+      }
+    }
+
+    @Override
+    public ArrayList<String> getOutputColumnNames() {
+      NPath evaluator = (NPath) getEvaluator();
+      return evaluator.resultExprInfo.getResultExprNames();
+    }
+
+  }
+
+  public ResultExprInfo getResultExprInfo() {
+    return resultExprInfo;
+  }
+
+  public void setResultExprInfo(ResultExprInfo resultExprInfo) {
+    this.resultExprInfo = resultExprInfo;
+  }
+
+  static class SymbolsInfo {
+    int sz;
+    ArrayList<ExprNodeEvaluator> symbolExprsEvaluators;
+    ArrayList<ObjectInspector> symbolExprsOIs;
+    ArrayList<String> symbolExprsNames;
+
+    SymbolsInfo(int sz)
+    {
+      this.sz = sz;
+      symbolExprsEvaluators = new ArrayList<ExprNodeEvaluator>(sz);
+      symbolExprsOIs = new ArrayList<ObjectInspector>(sz);
+      symbolExprsNames = new ArrayList<String>(sz);
+    }
+
+    void add(String name, ArgDef arg)
+    {
+      symbolExprsNames.add(name);
+      symbolExprsEvaluators.add(arg.getExprEvaluator());
+      symbolExprsOIs.add(arg.getOI());
+    }
+  }
+
+  public static class ResultExprInfo {
+    ArrayList<String> resultExprNames;
+    ArrayList<ExprNodeDesc> resultExprNodes;
+    private transient ArrayList<ExprNodeEvaluator> resultExprEvals;
+    private transient StructObjectInspector resultOI;
+
+    public ArrayList<String> getResultExprNames() {
+      return resultExprNames;
+    }
+    public void setResultExprNames(ArrayList<String> resultExprNames) {
+      this.resultExprNames = resultExprNames;
+    }
+    public ArrayList<ExprNodeDesc> getResultExprNodes() {
+      return resultExprNodes;
+    }
+    public void setResultExprNodes(ArrayList<ExprNodeDesc> resultExprNodes) {
+      this.resultExprNodes = resultExprNodes;
+    }
+  }
+
+  public static abstract class SymbolFunction
+  {
+    SymbolFunctionResult result;
+
+    public SymbolFunction()
+    {
+      result = new SymbolFunctionResult();
+    }
+
+    public static SymbolFunctionResult match(SymbolFunction syFn, Object row,
+        PTFPartitionIterator<Object> pItr) throws HiveException
+    {
+      int resetToIdx = pItr.getIndex() - 1;
+      try
+      {
+        return syFn.match(row, pItr);
+      } finally
+      {
+        pItr.resetToIndex(resetToIdx);
+      }
+    }
+
+    protected abstract SymbolFunctionResult match(Object row, PTFPartitionIterator<Object> pItr)
+        throws HiveException;
+
+    protected abstract boolean isOptional();
+  }
+
+  public static class Symbol extends SymbolFunction {
+    ExprNodeEvaluator symbolExprEval;
+    Converter converter;
+
+    public Symbol(ExprNodeEvaluator symbolExprEval, ObjectInspector symbolOI)
+    {
+      this.symbolExprEval = symbolExprEval;
+      converter = ObjectInspectorConverters.getConverter(
+          symbolOI,
+          PrimitiveObjectInspectorFactory.javaBooleanObjectInspector);
+    }
+
+    @Override
+    protected SymbolFunctionResult match(Object row, PTFPartitionIterator<Object> pItr)
+        throws HiveException
+    {
+      Object val = null;
+      val = symbolExprEval.evaluate(row);
+      val = converter.convert(val);
+      result.matches = ((Boolean) val).booleanValue();
+      result.nextRow = pItr.getIndex();
+
+      return result;
+    }
+
+    @Override
+    protected boolean isOptional()
+    {
+      return false;
+    }
+  }
+
+  public static class Star extends SymbolFunction {
+    SymbolFunction symbolFn;
+
+    public Star(SymbolFunction symbolFn)
+    {
+      this.symbolFn = symbolFn;
+    }
+
+    @Override
+    protected SymbolFunctionResult match(Object row, PTFPartitionIterator<Object> pItr)
+        throws HiveException
+    {
+      result.matches = true;
+      SymbolFunctionResult rowResult = symbolFn.match(row, pItr);
+
+      while (rowResult.matches && pItr.hasNext())
+      {
+        row = pItr.next();
+        rowResult = symbolFn.match(row, pItr);
+      }
+
+      result.nextRow = pItr.getIndex() - 1;
+      return result;
+    }
+
+    @Override
+    protected boolean isOptional()
+    {
+      return true;
+    }
+  }
+
+  public static class Plus extends SymbolFunction {
+    SymbolFunction symbolFn;
+
+    public Plus(SymbolFunction symbolFn)
+    {
+      this.symbolFn = symbolFn;
+    }
+
+    @Override
+    protected SymbolFunctionResult match(Object row, PTFPartitionIterator<Object> pItr)
+        throws HiveException
+    {
+      SymbolFunctionResult rowResult = symbolFn.match(row, pItr);
+
+      if (!rowResult.matches)
+      {
+        result.matches = false;
+        result.nextRow = pItr.getIndex() - 1;
+        return result;
+      }
+
+      result.matches = true;
+      while (rowResult.matches && pItr.hasNext())
+      {
+        row = pItr.next();
+        rowResult = symbolFn.match(row, pItr);
+      }
+
+      result.nextRow = pItr.getIndex() - 1;
+      return result;
+    }
+
+    @Override
+    protected boolean isOptional()
+    {
+      return false;
+    }
+  }
+
+  public static class Chain extends SymbolFunction
+  {
+    ArrayList<SymbolFunction> components;
+
+    public Chain(ArrayList<SymbolFunction> components)
+    {
+      this.components = components;
+    }
+
+    /*
+     * Iterate over the Symbol Functions in the Chain:
+     * - If we are not at the end of the Iterator (i.e. row != null )
+     * - match the current componentFn
+     * - if it returns false, then return false
+     * - otherwise set row to the next row from the Iterator.
+     * - if we are at the end of the Iterator
+     * - skip any optional Symbol Fns (star patterns) at the end.
+     * - but if we come to a non optional Symbol Fn, return false.
+     * - if we match all Fns in the chain return true.
+     */
+    @Override
+    protected SymbolFunctionResult match(Object row, PTFPartitionIterator<Object> pItr)
+        throws HiveException
+    {
+      SymbolFunctionResult componentResult = null;
+      for (SymbolFunction sFn : components)
+      {
+        if (row != null)
+        {
+          componentResult = sFn.match(row, pItr);
+          if (!componentResult.matches)
+          {
+            result.matches = false;
+            result.nextRow = componentResult.nextRow;
+            return result;
+          }
+          row = pItr.resetToIndex(componentResult.nextRow);
+        }
+        else
+        {
+          if (!sFn.isOptional())
+          {
+            result.matches = false;
+            result.nextRow = componentResult.nextRow;
+            return result;
+          }
+        }
+      }
+
+      result.matches = true;
+      result.nextRow = componentResult.nextRow;
+      return result;
+    }
+
+    @Override
+    protected boolean isOptional()
+    {
+      return false;
+    }
+  }
+
+
+  public static class SymbolFunctionResult
+  {
+    /*
+     * does the row match the pattern represented by this SymbolFunction
+     */
+    public boolean matches;
+    /*
+     * what is the index of the row beyond the set of rows that match this pattern.
+     */
+    public int nextRow;
+  }
+
+  public static class SymbolParser
+  {
+    String patternStr;
+    String[] symbols;
+    HashMap<String, Object[]> symbolExprEvalMap;
+    ArrayList<SymbolFunction> symbolFunctions;
+    Chain symbolFnChain;
+
+
+    public SymbolParser(String patternStr, ArrayList<String> symbolNames,
+        ArrayList<ExprNodeEvaluator> symbolExprEvals, ArrayList<ObjectInspector> symbolExprOIs)
+    {
+      super();
+      this.patternStr = patternStr;
+      symbolExprEvalMap = new HashMap<String, Object[]>();
+      int sz = symbolNames.size();
+      for(int i=0; i < sz; i++)
+      {
+        String symbolName = symbolNames.get(i);
+        ExprNodeEvaluator symbolExprEval = symbolExprEvals.get(i);
+        ObjectInspector symbolExprOI = symbolExprOIs.get(i);
+        symbolExprEvalMap.put(symbolName.toLowerCase(), new Object[] {symbolExprEval, symbolExprOI});
+      }
+    }
+
+    public SymbolFunction getSymbolFunction()
+    {
+      return symbolFnChain;
+    }
+
+    public void parse() throws SemanticException
+    {
+      symbols = patternStr.split("\\.");
+      symbolFunctions = new ArrayList<SymbolFunction>();
+
+      for(String symbol : symbols)
+      {
+        boolean isStar = symbol.endsWith("*");
+        boolean isPlus = symbol.endsWith("+");
+
+        symbol = (isStar || isPlus) ? symbol.substring(0, symbol.length() - 1) : symbol;
+        Object[] symbolDetails = symbolExprEvalMap.get(symbol.toLowerCase());
+        if ( symbolDetails == null )
+        {
+          throw new SemanticException(PTFUtils.sprintf("Unknown Symbol %s", symbol));
+        }
+
+        ExprNodeEvaluator symbolExprEval = (ExprNodeEvaluator) symbolDetails[0];
+        ObjectInspector symbolExprOI = (ObjectInspector) symbolDetails[1];
+        SymbolFunction sFn = new Symbol(symbolExprEval, symbolExprOI);
+
+        if ( isStar )
+        {
+          sFn = new Star(sFn);
+        }
+        else if ( isPlus )
+        {
+          sFn = new Plus(sFn);
+        }
+        symbolFunctions.add(sFn);
+      }
+      symbolFnChain = new Chain(symbolFunctions);
+    }
+  }
+
+  /*
+   * ResultExpression is a Select List with the following variation:
+   * - the select keyword is optional. The parser checks if the expression doesn't start with
+   * select; if not it prefixes it.
+   * - Window Fn clauses are not permitted.
+   * - expressions can operate on the input columns plus the psuedo column 'path' which is array of
+   * structs. The shape of the struct is
+   * the same as the input.
+   */
+  public static class ResultExpressionParser {
+    String resultExprString;
+
+    RowResolver selectListInputRowResolver;
+    TypeCheckCtx selectListInputTypeCheckCtx;
+    StructObjectInspector selectListInputOI;
+
+    SelectSpec selectSpec;
+
+    ResultExprInfo resultExprInfo;
+
+    public ResultExpressionParser(String resultExprString, RowResolver selectListInputRowResolver)
+    {
+      this.resultExprString = resultExprString;
+      this.selectListInputRowResolver = selectListInputRowResolver;
+    }
+
+    public void translate() throws SemanticException, HiveException
+    {
+      setupSelectListInputInfo();
+      fixResultExprString();
+      parse();
+      validateSelectExpr();
+      buildSelectListEvaluators();
+    }
+
+    public ResultExprInfo getResultExprInfo() {
+      return resultExprInfo;
+    }
+
+    private void buildSelectListEvaluators() throws SemanticException, HiveException
+    {
+      resultExprInfo = new ResultExprInfo();
+      resultExprInfo.resultExprEvals = new ArrayList<ExprNodeEvaluator>();
+      resultExprInfo.resultExprNames = new ArrayList<String>();
+      resultExprInfo.resultExprNodes = new ArrayList<ExprNodeDesc>();
+      //result
+      ArrayList<ObjectInspector> selectListExprOIs = new ArrayList<ObjectInspector>();
+      int i = 0;
+      Iterator<Object> it = selectSpec.getColumnListAndAlias();
+      while (it.hasNext())
+      {
+        Object[] selectColDetails = (Object[]) it.next();
+        String selectColName = (String) selectColDetails[1];
+        ASTNode selectColumnNode = (ASTNode) selectColDetails[2];
+        ExprNodeDesc selectColumnExprNode = PTFTranslator.buildExprNode(selectColumnNode,
+            selectListInputTypeCheckCtx);
+        ExprNodeEvaluator selectColumnExprEval = ExprNodeEvaluatorFactory.get(selectColumnExprNode);
+        ObjectInspector selectColumnOI = null;
+        selectColumnOI = selectColumnExprEval.initialize(selectListInputOI);
+
+        selectColName = getColumnName(selectColName, selectColumnExprNode, i);
+
+        resultExprInfo.resultExprEvals.add(selectColumnExprEval);
+        selectListExprOIs.add(selectColumnOI);
+        resultExprInfo.resultExprNodes.add(selectColumnExprNode);
+        resultExprInfo.resultExprNames.add(selectColName);
+        i++;
+      }
+
+      resultExprInfo.resultOI = ObjectInspectorFactory.getStandardStructObjectInspector(
+          resultExprInfo.resultExprNames, selectListExprOIs);
+    }
+
+    private void setupSelectListInputInfo() throws SemanticException
+    {
+      selectListInputTypeCheckCtx = new TypeCheckCtx(selectListInputRowResolver);
+      selectListInputTypeCheckCtx.setUnparseTranslator(null);
+      /*
+       * create SelectListOI
+       */
+      selectListInputOI = (StructObjectInspector) PTFTranslator.getInputOI(selectListInputRowResolver);
+    }
+
+    private void fixResultExprString()
+    {
+      String r = resultExprString.trim();
+      String prefix = r.substring(0, 6);
+      if (!prefix.toLowerCase().equals("select"))
+      {
+        r = "select " + r;
+      }
+      resultExprString = r;
+    }
+
+    private void parse() throws SemanticException
+    {
+      selectSpec = SemanticAnalyzer.parseSelect(resultExprString);
+    }
+
+    private void validateSelectExpr() throws SemanticException
+    {
+      if (selectSpec.getWindowFuncs() != null)
+      {
+        throw new SemanticException(
+            "NPath Result Expression cannot have Windowing Function expressions");
+      }
+
+      for (ASTNode node : selectSpec.getExpressions())
+      {
+        PTFTranslator.validateNoLeadLagInValueBoundarySpec(node,
+            "Lead/Lag not allowed in NPath Result Expression");
+      }
+    }
+
+    private String getColumnName(String alias, ExprNodeDesc exprNode, int colIdx)
+    {
+      if (alias != null)
+      {
+        return alias;
+      }
+      else if (exprNode instanceof ExprNodeColumnDesc)
+      {
+        ExprNodeColumnDesc colDesc = (ExprNodeColumnDesc) exprNode;
+        return colDesc.getColumn();
+      }
+      return "npath_col_" + colIdx;
+    }
+  }
+
+  public static final String PATHATTR_NAME = "tpath";
+
+  /*
+   * add array<struct> to the list of columns
+   */
+  protected static RowResolver createSelectListRR(NPath evaluator, PTFInputDef inpDef) throws SemanticException {
+    RowResolver rr = new RowResolver();
+    RowResolver inputRR = inpDef.getInputInfo().getRowResolver();
+    boolean inputColNamesKnown = evaluator.inputColumnNames != null;
+
+    if ( !inputColNamesKnown ) {
+      evaluator.inputColumnNames = new ArrayList<String>();
+    }
+
+    ArrayList<ObjectInspector> inpColOIs = new ArrayList<ObjectInspector>();
+
+    for (ColumnInfo inpCInfo : inputRR.getColumnInfos()) {
+      ColumnInfo cInfo = new ColumnInfo(inpCInfo);
+      String colAlias = cInfo.getAlias();
+
+      String[] tabColAlias = inputRR.reverseLookup(inpCInfo.getInternalName());
+      if (tabColAlias != null) {
+        colAlias = tabColAlias[1];
+      }
+      ASTNode inExpr = null;
+      inExpr = PTFTranslator.getASTNode(inpCInfo, inputRR);
+      if ( inExpr != null ) {
+        rr.putExpression(inExpr, cInfo);
+      }
+      else {
+        colAlias = colAlias == null ? cInfo.getInternalName() : colAlias;
+        rr.put(cInfo.getTabAlias(), colAlias, cInfo);
+      }
+
+      if ( !inputColNamesKnown ) {
+        evaluator.inputColumnNames.add(colAlias);
+      }
+      inpColOIs.add(cInfo.getObjectInspector());
+    }
+
+    StandardListObjectInspector pathAttrOI = ObjectInspectorFactory.getStandardListObjectInspector(
+        ObjectInspectorFactory.getStandardStructObjectInspector(evaluator.inputColumnNames, inpColOIs));
+
+    ColumnInfo pathColumn = new ColumnInfo(PATHATTR_NAME,
+        TypeInfoUtils.getTypeInfoFromObjectInspector(pathAttrOI),
+        null,
+        false, false);
+    rr.put(null, PATHATTR_NAME, pathColumn);
+
+    return rr;
+  }
+
+  public static Object getSelectListInput(Object currRow, ObjectInspector rowOI,
+      PTFPartitionIterator<Object> pItr, int sz) {
+    ArrayList<Object> oRow = new ArrayList<Object>();
+    List<?> currRowAsStdObject = (List<?>) ObjectInspectorUtils
+        .copyToStandardObject(currRow, rowOI);
+    oRow.addAll(currRowAsStdObject);
+    oRow.add(getPath(currRow, rowOI, pItr, sz));
+    return oRow;
+  }
+
+  public static ArrayList<Object> getPath(Object currRow, ObjectInspector rowOI,
+      PTFPartitionIterator<Object> pItr, int sz) {
+    int idx = pItr.getIndex() - 1;
+    ArrayList<Object> path = new ArrayList<Object>();
+    path.add(ObjectInspectorUtils.copyToStandardObject(currRow, rowOI));
+    int pSz = 1;
+
+    while (pSz < sz && pItr.hasNext())
+    {
+      currRow = pItr.next();
+      path.add(ObjectInspectorUtils.copyToStandardObject(currRow, rowOI));
+      pSz++;
+    }
+    pItr.resetToIndex(idx);
+    return path;
+  }
+}

Added: hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/Noop.java
URL: http://svn.apache.org/viewvc/hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/Noop.java?rev=1441972&view=auto
==============================================================================
--- hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/Noop.java (added)
+++ hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/Noop.java Sun Feb  3 21:43:10 2013
@@ -0,0 +1,78 @@
+package org.apache.hadoop.hive.ql.udf.ptf;
+
+import java.util.ArrayList;
+
+import org.apache.hadoop.hive.ql.exec.PTFPartition;
+import org.apache.hadoop.hive.ql.exec.PTFPartition.PTFPartitionIterator;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hadoop.hive.ql.plan.PTFDef;
+import org.apache.hadoop.hive.ql.plan.PTFDef.TableFuncDef;
+import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
+
+public class Noop extends TableFunctionEvaluator
+{
+
+  @Override
+  public PTFPartition execute(PTFPartition iPart) throws HiveException
+  {
+    return iPart;
+  }
+
+  @Override
+  protected void execute(PTFPartitionIterator<Object> pItr, PTFPartition oPart)
+  {
+    throw new UnsupportedOperationException();
+  }
+
+  public static class NoopResolver extends TableFunctionResolver
+  {
+
+    @Override
+    protected TableFunctionEvaluator createEvaluator(PTFDef qDef, TableFuncDef tDef)
+    {
+      return new Noop();
+    }
+
+    @Override
+    public void setupOutputOI() throws SemanticException
+    {
+      StructObjectInspector OI = getEvaluator().getTableDef().getInput().getOI();
+      setOutputOI(OI);
+    }
+
+    /*
+     * (non-Javadoc)
+     * @see org.apache.hadoop.hive.ql.udf.ptf.TableFunctionResolver#carryForwardNames()
+     * Setting to true is correct only for special internal Functions.
+     */
+    @Override
+    public boolean carryForwardNames() {
+      return true;
+    }
+
+    /*
+     * (non-Javadoc)
+     * @see org.apache.hadoop.hive.ql.udf.ptf.TableFunctionResolver#getOutputNames()
+     * Set to null only because carryForwardNames is true.
+     */
+    @Override
+    public ArrayList<String> getOutputColumnNames() {
+      return null;
+    }
+
+    @Override
+    public boolean transformsRawInput()
+    {
+      return false;
+    }
+
+    @Override
+    public void initializeOutputOI() throws HiveException {
+      setupOutputOI();
+
+    }
+
+  }
+
+}
\ No newline at end of file

Added: hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/NoopWithMap.java
URL: http://svn.apache.org/viewvc/hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/NoopWithMap.java?rev=1441972&view=auto
==============================================================================
--- hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/NoopWithMap.java (added)
+++ hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/NoopWithMap.java Sun Feb  3 21:43:10 2013
@@ -0,0 +1,98 @@
+package org.apache.hadoop.hive.ql.udf.ptf;
+
+import java.util.ArrayList;
+
+import org.apache.hadoop.hive.ql.exec.PTFPartition;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hadoop.hive.ql.plan.PTFDef;
+import org.apache.hadoop.hive.ql.plan.PTFDef.TableFuncDef;
+import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
+
+public class NoopWithMap extends Noop
+{
+  @Override
+  public PTFPartition execute(PTFPartition iPart) throws HiveException
+  {
+    return iPart;
+  }
+
+  @Override
+  protected PTFPartition _transformRawInput(PTFPartition iPart) throws HiveException
+  {
+    return iPart;
+  }
+
+  public static class NoopWithMapResolver extends TableFunctionResolver
+  {
+
+    @Override
+    protected TableFunctionEvaluator createEvaluator(PTFDef qDef, TableFuncDef tDef)
+    {
+      return new NoopWithMap();
+    }
+
+    @Override
+    public void setupOutputOI() throws SemanticException
+    {
+      StructObjectInspector OI = getEvaluator().getTableDef().getInput().getOI();
+      setOutputOI(OI);
+    }
+
+    /*
+     * (non-Javadoc)
+     * @see org.apache.hadoop.hive.ql.udf.ptf.TableFunctionResolver#carryForwardNames()
+     * Setting to true is correct only for special internal Functions.
+     */
+    @Override
+    public boolean carryForwardNames() {
+      return true;
+    }
+
+    /*
+     * (non-Javadoc)
+     * @see org.apache.hadoop.hive.ql.udf.ptf.TableFunctionResolver#getOutputNames()
+     * Set to null only because carryForwardNames is true.
+     */
+    @Override
+    public ArrayList<String> getOutputColumnNames() {
+      return null;
+    }
+
+    @Override
+    public void setupRawInputOI() throws SemanticException
+    {
+      StructObjectInspector OI = getEvaluator().getTableDef().getInput().getOI();
+      setRawInputOI(OI);
+    }
+
+    /*
+     * (non-Javadoc)
+     * @see org.apache.hadoop.hive.ql.udf.ptf.TableFunctionResolver#getOutputNames()
+     * Set to null only because carryForwardNames is true.
+     */
+    @Override
+    public ArrayList<String> getRawInputColumnNames() throws SemanticException {
+      return null;
+    }
+
+    @Override
+    public boolean transformsRawInput()
+    {
+      return true;
+    }
+
+    @Override
+    public void initializeOutputOI() throws HiveException {
+      setupOutputOI();
+    }
+
+    @Override
+    public void initializeRawInputOI() throws HiveException {
+      setupRawInputOI();
+    }
+
+  }
+
+
+}

Added: hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/TableFunctionEvaluator.java
URL: http://svn.apache.org/viewvc/hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/TableFunctionEvaluator.java?rev=1441972&view=auto
==============================================================================
--- hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/TableFunctionEvaluator.java (added)
+++ hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/TableFunctionEvaluator.java Sun Feb  3 21:43:10 2013
@@ -0,0 +1,143 @@
+package org.apache.hadoop.hive.ql.udf.ptf;
+
+import static org.apache.hadoop.hive.ql.exec.PTFUtils.sprintf;
+
+import org.apache.hadoop.hive.ql.exec.PTFOperator;
+import org.apache.hadoop.hive.ql.exec.PTFPartition;
+import org.apache.hadoop.hive.ql.exec.PTFPartition.PTFPartitionIterator;
+import org.apache.hadoop.hive.ql.exec.PTFUtils;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.plan.PTFDef;
+import org.apache.hadoop.hive.ql.plan.PTFDef.TableFuncDef;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator;
+import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
+
+/**
+ * Based on Hive {@link GenericUDAFEvaluator}. Break up the responsibility of the old AsbtractTableFunction
+ * class into a Resolver and Evaluator.
+ * <p>
+ * The Evaluator also holds onto the {@link TableFunctionDef}. This provides information
+ * about the arguments to the function, the shape of the Input partition and the Partitioning details.
+ * The Evaluator is responsible for providing the 2 execute methods:
+ * <ol>
+ * <li><b>execute:</b> which is invoked after the input is partitioned; the contract
+ * is, it is given an input Partition and must return an output Partition. The shape of the output
+ * Partition is obtained from the getOutputOI call.
+ * <li><b>transformRawInput:</b> In the case where this function indicates that it will transform the raw input
+ * before it is fed through the partitioning mechanics, this function is called. Again the contract is
+ * t is given an input Partition and must return an Partition. The shape of the output Partition is
+ * obtained from getRawInputOI() call.
+ * </ol>
+ *
+ */
+public abstract class TableFunctionEvaluator
+{
+  transient protected StructObjectInspector OI;
+  transient protected StructObjectInspector rawInputOI;
+  protected TableFuncDef tDef;
+  protected PTFDef qDef;
+  String partitionClass;
+  int partitionMemSize;
+  boolean transformsRawInput;
+
+  static{
+    PTFUtils.makeTransient(TableFunctionEvaluator.class, "OI");
+    PTFUtils.makeTransient(TableFunctionEvaluator.class, "rawInputOI");
+  }
+
+
+  public StructObjectInspector getOutputOI()
+  {
+    return OI;
+  }
+
+  protected void setOutputOI(StructObjectInspector outputOI)
+  {
+    OI = outputOI;
+  }
+
+  public TableFuncDef getTableDef()
+  {
+    return tDef;
+  }
+
+  public void setTableDef(TableFuncDef tDef)
+  {
+    this.tDef = tDef;
+  }
+
+  protected PTFDef getQueryDef()
+  {
+    return qDef;
+  }
+
+  protected void setQueryDef(PTFDef qDef)
+  {
+    this.qDef = qDef;
+  }
+
+  public String getPartitionClass()
+  {
+    return partitionClass;
+  }
+
+  public void setPartitionClass(String partitionClass)
+  {
+    this.partitionClass = partitionClass;
+  }
+
+  public int getPartitionMemSize()
+  {
+    return partitionMemSize;
+  }
+
+  public void setPartitionMemSize(int partitionMemSize)
+  {
+    this.partitionMemSize = partitionMemSize;
+  }
+
+  public StructObjectInspector getRawInputOI()
+  {
+    return rawInputOI;
+  }
+
+  protected void setRawInputOI(StructObjectInspector rawInputOI)
+  {
+    this.rawInputOI = rawInputOI;
+  }
+
+  public boolean isTransformsRawInput() {
+    return transformsRawInput;
+  }
+
+  public void setTransformsRawInput(boolean transformsRawInput) {
+    this.transformsRawInput = transformsRawInput;
+  }
+
+  public PTFPartition execute(PTFPartition iPart)
+      throws HiveException
+  {
+    PTFPartitionIterator<Object> pItr = iPart.iterator();
+    PTFOperator.connectLeadLagFunctionsToPartition(qDef, pItr);
+    PTFPartition outP = new PTFPartition(getPartitionClass(),
+        getPartitionMemSize(), tDef.getSerde(), OI);
+    execute(pItr, outP);
+    return outP;
+  }
+
+  protected abstract void execute(PTFPartitionIterator<Object> pItr, PTFPartition oPart) throws HiveException;
+
+  public PTFPartition transformRawInput(PTFPartition iPart) throws HiveException
+  {
+    if ( !isTransformsRawInput())
+    {
+      throw new HiveException(sprintf("Internal Error: mapExecute called on function (%s)that has no Map Phase", tDef.getName()));
+    }
+    return _transformRawInput(iPart);
+  }
+
+  protected PTFPartition _transformRawInput(PTFPartition iPart) throws HiveException
+  {
+    return null;
+  }
+}

Added: hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/TableFunctionResolver.java
URL: http://svn.apache.org/viewvc/hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/TableFunctionResolver.java?rev=1441972&view=auto
==============================================================================
--- hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/TableFunctionResolver.java (added)
+++ hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/TableFunctionResolver.java Sun Feb  3 21:43:10 2013
@@ -0,0 +1,198 @@
+package org.apache.hadoop.hive.ql.udf.ptf;
+
+import java.util.ArrayList;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
+import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator;
+import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
+import org.apache.hadoop.hive.ql.plan.PTFDef;
+import org.apache.hadoop.hive.ql.plan.PTFDef.TableFuncDef;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFResolver;
+import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
+
+/**
+ * Based on Hive {@link GenericUDAFResolver}. Break up the responsibility of the
+ * old AbstractTableFunction class into a Resolver and Evaluator.
+ * The Resolver is responsible for:
+ * <ol>
+ * <li> setting up the {@link tableFunctionEvaluator}
+ * <li> Setting up the The raw and output ObjectInspectors of the Evaluator.
+ * <li> The Evaluator also holds onto the {@link TableFunctionDef}. This provides information
+ * about the arguments to the function, the shape of the Input partition and the Partitioning details.
+ * </ol>
+ * The Resolver for a function is obtained from the {@link FunctionRegistry}. The Resolver is initialized
+ * by the following 4 step process:
+ * <ol>
+ * <li> The initialize method is called; which is passed the {@link PTFDef} and the {@link TableFunctionDef}.
+ * <li> The resolver is then asked to setup the Raw ObjectInspector. This is only required if the Function reshapes
+ * the raw input.
+ * <li> Once the Resolver has had a chance to compute the shape of the Raw Input that is fed to the partitioning
+ * machinery; the translator sets up the partitioning details on the tableFuncDef.
+ * <li> finally the resolver is asked to setup the output ObjectInspector.
+ * </ol>
+ */
+@SuppressWarnings("deprecation")
+public abstract class TableFunctionResolver
+{
+  TableFunctionEvaluator evaluator;
+  PTFDef qDef;
+
+  /*
+   * - called during translation.
+   * - invokes createEvaluator which must be implemented by a subclass
+   * - sets up the evaluator with references to the TableDef, PartitionClass, PartitonMemsize and
+   *   the transformsRawInput boolean.
+   */
+  public void initialize(PTFDef qDef, TableFuncDef tDef)
+      throws SemanticException
+  {
+    this.qDef = qDef;
+    HiveConf cfg = qDef.getTranslationInfo().getHiveCfg();
+    String partitionClass = HiveConf.getVar(cfg, ConfVars.HIVE_PTF_PARTITION_PERSISTENCE_CLASS);
+    int partitionMemSize = HiveConf.getIntVar(cfg, ConfVars.HIVE_PTF_PARTITION_PERSISTENT_SIZE);
+
+    evaluator = createEvaluator(qDef, tDef);
+    evaluator.setTransformsRawInput(transformsRawInput());
+    evaluator.setTableDef(tDef);
+    evaluator.setQueryDef(qDef);
+    evaluator.setPartitionClass(partitionClass);
+    evaluator.setPartitionMemSize(partitionMemSize);
+
+  }
+
+  /*
+   * called during deserialization of a QueryDef during runtime.
+   */
+  public void initialize(PTFDef qDef, TableFuncDef tDef, TableFunctionEvaluator evaluator)
+      throws HiveException
+  {
+    this.evaluator = evaluator;
+    this.qDef = qDef;
+    evaluator.setTableDef(tDef);
+    evaluator.setQueryDef(qDef);
+  }
+
+  public TableFunctionEvaluator getEvaluator()
+  {
+    return evaluator;
+  }
+
+  /*
+   * - a subclass must provide this method.
+   * - this method is invoked during translation and also when the Operator is initialized during runtime.
+   * - a subclass must use this call to setup the shape of its output.
+   * - subsequent to this call, a call to getOutputOI call on the {@link TableFunctionEvaluator} must return the OI
+   * of the output of this function.
+   */
+  public abstract void setupOutputOI() throws SemanticException;
+
+  /*
+   * A PTF Function must provide the 'external' names of the columns in its Output.
+   *
+   */
+  public abstract ArrayList<String> getOutputColumnNames() throws SemanticException;
+
+
+  /**
+   * This method is invoked during runtime(during deserialization of theQueryDef).
+   * At this point the TableFunction can assume that the {@link ExprNodeDesc Expression Nodes}
+   * exist for all the Def (ArgDef, ColumnDef, WindowDef..). It is the responsibility of
+   * the TableFunction to construct the {@link ExprNodeEvaluator evaluators} and setup the OI.
+   *
+   * @param tblFuncDef
+   * @param qDef
+   * @throws HiveException
+   */
+  public abstract void initializeOutputOI() throws HiveException;
+
+  /*
+   * - Called on functions that transform the raw input.
+   * - this method is invoked during translation and also when the Operator is initialized during runtime.
+   * - a subclass must use this call to setup the shape of the raw input, that is fed to the partitioning mechanics.
+   * - subsequent to this call, a call to getRawInputOI call on the {@link TableFunctionEvaluator} must return the OI
+   *   of the output of this function.
+   */
+  public void setupRawInputOI() throws SemanticException
+  {
+    if (!transformsRawInput())
+    {
+      return;
+    }
+    throw new SemanticException(
+        "Function has map phase, must extend setupMapOI");
+  }
+
+  /*
+   * A PTF Function must provide the 'external' names of the columns in the transformed Raw Input.
+   *
+   */
+  public ArrayList<String> getRawInputColumnNames() throws SemanticException {
+    if (!transformsRawInput())
+    {
+      return null;
+    }
+    throw new SemanticException(
+        "Function transforms Raw Input; must extend getRawColumnInputNames");
+  }
+
+  /*
+   * Same responsibility as initializeOI, but for the RawInput.
+   */
+  public void initializeRawInputOI() throws HiveException
+  {
+    if (!transformsRawInput())
+    {
+      return;
+    }
+    throw new HiveException(
+        "Function has map phase, must extend initializeRawInputOI");
+  }
+
+  /*
+   * callback method used by subclasses to set the RawInputOI on the Evaluator.
+   */
+  protected void setRawInputOI(StructObjectInspector rawInputOI)
+  {
+    evaluator.setRawInputOI(rawInputOI);
+  }
+
+  /*
+   * callback method used by subclasses to set the OutputOI on the Evaluator.
+   */
+  protected void setOutputOI(StructObjectInspector outputOI)
+  {
+    evaluator.setOutputOI(outputOI);
+  }
+
+  public PTFDef getQueryDef()
+  {
+    return qDef;
+  }
+
+  /*
+   * This is used during translation to decide if the internalName -> alias mapping from the Input to the PTF is carried
+   * forward when building the Output RR for this PTF.
+   * This is used by internal PTFs: NOOP, WindowingTableFunction to make names in its input available in the Output.
+   * In general this should be false; and the names used for the Output Columns must be provided by the PTF Writer in the
+   * function getOutputNames.
+   */
+  public boolean carryForwardNames() {
+    return false;
+  }
+
+  /*
+   * a subclass must indicate whether it will transform the raw input before it is fed through the
+   * partitioning mechanics.
+   */
+  public abstract boolean transformsRawInput();
+
+  /*
+   * a subclass must provide the {@link TableFunctionEvaluator} instance.
+   */
+  protected abstract TableFunctionEvaluator createEvaluator(PTFDef qDef,
+      TableFuncDef tDef);
+}

Added: hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/WindowingTableFunction.java
URL: http://svn.apache.org/viewvc/hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/WindowingTableFunction.java?rev=1441972&view=auto
==============================================================================
--- hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/WindowingTableFunction.java (added)
+++ hive/branches/ptf-windowing/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/WindowingTableFunction.java Sun Feb  3 21:43:10 2013
@@ -0,0 +1,505 @@
+package org.apache.hadoop.hive.ql.udf.ptf;
+
+import java.util.AbstractList;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
+import org.apache.hadoop.hive.ql.exec.PTFOperator;
+import org.apache.hadoop.hive.ql.exec.PTFPartition;
+import org.apache.hadoop.hive.ql.exec.PTFPartition.PTFPartitionIterator;
+import org.apache.hadoop.hive.ql.exec.WindowFunctionInfo;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.parse.PTFSpec.WindowFrameSpec.BoundarySpec;
+import org.apache.hadoop.hive.ql.parse.PTFSpec.WindowFrameSpec.Direction;
+import org.apache.hadoop.hive.ql.parse.PTFSpec.WindowFunctionSpec;
+import org.apache.hadoop.hive.ql.parse.PTFTranslator;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hadoop.hive.ql.plan.PTFDef;
+import org.apache.hadoop.hive.ql.plan.PTFDef.ArgDef;
+import org.apache.hadoop.hive.ql.plan.PTFDef.SelectDef;
+import org.apache.hadoop.hive.ql.plan.PTFDef.TableFuncDef;
+import org.apache.hadoop.hive.ql.plan.PTFDef.WindowFrameDef.BoundaryDef;
+import org.apache.hadoop.hive.ql.plan.PTFDef.WindowFrameDef.CurrentRowDef;
+import org.apache.hadoop.hive.ql.plan.PTFDef.WindowFrameDef.RangeBoundaryDef;
+import org.apache.hadoop.hive.ql.plan.PTFDef.WindowFrameDef.ValueBoundaryDef;
+import org.apache.hadoop.hive.ql.plan.PTFDef.WindowFunctionDef;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator.AggregationBuffer;
+import org.apache.hadoop.hive.serde2.SerDeException;
+import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.StructField;
+import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
+
+public class WindowingTableFunction extends TableFunctionEvaluator
+{
+  ArrayList<WindowFunctionDef> wFnDefs;
+
+  @SuppressWarnings({ "unchecked", "rawtypes" })
+  @Override
+  public void execute(PTFPartitionIterator<Object> pItr, PTFPartition outP) throws HiveException
+  {
+    ArrayList<List<?>> oColumns = new ArrayList<List<?>>();
+    PTFPartition iPart = pItr.getPartition();
+    StructObjectInspector inputOI;
+    try {
+      inputOI = (StructObjectInspector) iPart.getSerDe().getObjectInspector();
+    } catch (SerDeException se) {
+      throw new HiveException(se);
+    }
+
+    for(WindowFunctionDef wFn : wFnDefs)
+    {
+      boolean processWindow = wFn.getWindow() != null && wFn.getWindow().getWindow() != null;
+      pItr.reset();
+      if ( !processWindow )
+      {
+        GenericUDAFEvaluator fEval = wFn.getEvaluator();
+        Object[] args = new Object[wFn.getArgs() == null ? 0 : wFn.getArgs().size()];
+        AggregationBuffer aggBuffer = fEval.getNewAggregationBuffer();
+        while(pItr.hasNext())
+        {
+          Object row = pItr.next();
+          int i =0;
+          if ( wFn.getArgs() != null ) {
+            for(ArgDef arg : wFn.getArgs())
+            {
+              args[i++] = arg.getExprEvaluator().evaluate(row);
+            }
+          }
+          fEval.aggregate(aggBuffer, args);
+        }
+        Object out = fEval.evaluate(aggBuffer);
+        WindowFunctionInfo wFnInfo = FunctionRegistry.getWindowFunctionInfo(wFn.getSpec().getName());
+        if ( !wFnInfo.isPivotResult())
+        {
+          out = new SameList(iPart.size(), out);
+        }
+        oColumns.add((List<?>)out);
+      }
+      else
+      {
+        oColumns.add(executeFnwithWindow(getQueryDef(), wFn, iPart));
+      }
+    }
+
+    /*
+     * Output Columns in the following order
+     * - the columns representing the output from Window Fns
+     * - the input Rows columns
+     */
+
+    for(int i=0; i < iPart.size(); i++)
+    {
+      ArrayList oRow = new ArrayList();
+      Object iRow = iPart.getAt(i);
+
+      for(int j=0; j < oColumns.size(); j++)
+      {
+        oRow.add(oColumns.get(j).get(i));
+      }
+
+      for(StructField f : inputOI.getAllStructFieldRefs())
+      {
+        oRow.add(inputOI.getStructFieldData(iRow, f));
+      }
+
+      outP.append(oRow);
+    }
+  }
+
+  public static class WindowingTableFunctionResolver extends TableFunctionResolver
+  {
+
+    @Override
+    protected TableFunctionEvaluator createEvaluator(PTFDef qDef, TableFuncDef tDef)
+    {
+
+      return new WindowingTableFunction();
+    }
+
+    @Override
+    public void setupOutputOI() throws SemanticException
+    {
+      ArrayList<WindowFunctionDef> wFnDefs = new ArrayList<WindowFunctionDef>();
+      PTFDef qDef = getQueryDef();
+      SelectDef select = qDef.getSelectList();
+      ArrayList<WindowFunctionSpec> wFnSpecs = qDef.getSpec().getSelectList().getWindowFuncs();
+      ArrayList<String> aliases = new ArrayList<String>();
+      ArrayList<ObjectInspector> fieldOIs = new ArrayList<ObjectInspector>();
+
+      /*
+       * Setup the columns in the OI in the following order
+       * - the columns representing the Window Fns
+       * - the columns from the input
+       * Why?
+       * - during translation the input contains Virtual columns that are not represent during runtime
+       * - this messes with the Column Numbers (and hence internal Names) if we add the columns in a different order.
+       */
+
+      for(WindowFunctionSpec wFnS : wFnSpecs)
+      {
+          WindowFunctionDef wFnDef = PTFTranslator.translate(qDef, getEvaluator().getTableDef(), wFnS);
+          WindowFunctionInfo wFnInfo = FunctionRegistry.getWindowFunctionInfo(wFnS.getName());
+          wFnDefs.add(wFnDef);
+          aliases.add(wFnS.getAlias());
+          if ( wFnInfo.isPivotResult())
+          {
+            ListObjectInspector lOI = (ListObjectInspector) wFnDef.getOI();
+            fieldOIs.add(lOI.getListElementObjectInspector());
+          }
+          else
+          {
+            fieldOIs.add(wFnDef.getOI());
+          }
+      }
+
+      PTFTranslator.addInputColumnsToList(qDef, getEvaluator().getTableDef(), aliases, fieldOIs);
+
+      select.setWindowFuncs(wFnDefs);
+      WindowingTableFunction wTFn = (WindowingTableFunction) getEvaluator();
+      wTFn.wFnDefs = wFnDefs;
+
+      StructObjectInspector OI = ObjectInspectorFactory.getStandardStructObjectInspector(aliases, fieldOIs);
+      setOutputOI(OI);
+    }
+
+    /*
+     * Setup the OI based on the:
+     * - Input TableDef's columns
+     * - the Window Functions.
+     */
+    @Override
+    public void initializeOutputOI() throws HiveException
+    {
+      PTFDef qDef = getQueryDef();
+      TableFuncDef tblFuncDef = evaluator.getTableDef();
+      WindowingTableFunction wTFn = (WindowingTableFunction) tblFuncDef.getFunction();
+      ArrayList<WindowFunctionDef> wFnDefs = qDef.getSelectList().getWindowFuncs();
+      ArrayList<String> aliases = new ArrayList<String>();
+      ArrayList<ObjectInspector> fieldOIs = new ArrayList<ObjectInspector>();
+
+      /*
+       * Setup the columns in the OI in the following order
+       * - the columns representing the Window Fns
+       * - the columns from the input
+       * Why?
+       * - during translation the input contains Virtual columns that are not present during runtime
+       * - this messes with the Column Numbers (and hence internal Names) if we add the columns in a different order.
+       */
+
+      for (WindowFunctionDef wFnDef : wFnDefs) {
+        WindowFunctionSpec wFnS = wFnDef.getSpec();
+        WindowFunctionInfo wFnInfo = FunctionRegistry.getWindowFunctionInfo(wFnS.getName());
+        aliases.add(wFnS.getAlias());
+        if ( wFnInfo.isPivotResult())
+        {
+          ListObjectInspector lOI = (ListObjectInspector) wFnDef.getOI();
+          fieldOIs.add(lOI.getListElementObjectInspector());
+        }
+        else
+        {
+          fieldOIs.add(wFnDef.getOI());
+        }
+
+      }
+      PTFTranslator.addInputColumnsToList(qDef, getEvaluator().getTableDef(), aliases, fieldOIs);
+
+      wTFn.wFnDefs = wFnDefs;
+      StructObjectInspector OI = ObjectInspectorFactory.getStandardStructObjectInspector(aliases, fieldOIs);
+      setOutputOI(OI);
+    }
+
+
+    @Override
+    public boolean transformsRawInput()
+    {
+      return false;
+    }
+
+    /*
+     * (non-Javadoc)
+     * @see org.apache.hadoop.hive.ql.udf.ptf.TableFunctionResolver#carryForwardNames()
+     * Setting to true is correct only for special internal Functions.
+     */
+    @Override
+    public boolean carryForwardNames() {
+      return true;
+    }
+
+    /*
+     * (non-Javadoc)
+     * @see org.apache.hadoop.hive.ql.udf.ptf.TableFunctionResolver#getOutputNames()
+     * Set to null only because carryForwardNames is true.
+     */
+    @Override
+    public ArrayList<String> getOutputColumnNames() {
+      return null;
+    }
+
+  }
+
+  static ArrayList<Object> executeFnwithWindow(PTFDef qDef, WindowFunctionDef wFnDef, PTFPartition iPart)
+    throws HiveException
+  {
+    ArrayList<Object> vals = new ArrayList<Object>();
+
+    GenericUDAFEvaluator fEval = wFnDef.getEvaluator();
+    Object[] args = new Object[wFnDef.getArgs().size()];
+    for(int i=0; i < iPart.size(); i++)
+    {
+      AggregationBuffer aggBuffer = fEval.getNewAggregationBuffer();
+      Range rng = getRange(wFnDef, i, iPart);
+      PTFPartitionIterator<Object> rItr = rng.iterator();
+      PTFOperator.connectLeadLagFunctionsToPartition(qDef, rItr);
+      while(rItr.hasNext())
+      {
+        Object row = rItr.next();
+        int j = 0;
+        for(ArgDef arg : wFnDef.getArgs())
+        {
+          args[j++] = arg.getExprEvaluator().evaluate(row);
+        }
+        fEval.aggregate(aggBuffer, args);
+      }
+      Object out = fEval.evaluate(aggBuffer);
+      out = ObjectInspectorUtils.copyToStandardObject(out, wFnDef.getOI());
+      vals.add(out);
+    }
+    return vals;
+  }
+
+  static Range getRange(WindowFunctionDef wFnDef, int currRow, PTFPartition p) throws HiveException
+  {
+    BoundaryDef startB = wFnDef.getWindow().getWindow().getStart();
+    BoundaryDef endB = wFnDef.getWindow().getWindow().getEnd();
+
+    int start = getIndex(startB, currRow, p, false);
+    int end = getIndex(endB, currRow, p, true);
+
+    return new Range(start, end, p);
+  }
+
+  static int getIndex(BoundaryDef bDef, int currRow, PTFPartition p, boolean end) throws HiveException
+  {
+    if ( bDef instanceof CurrentRowDef)
+    {
+      return currRow + (end ? 1 : 0);
+    }
+    else if ( bDef instanceof RangeBoundaryDef)
+    {
+      RangeBoundaryDef rbDef = (RangeBoundaryDef) bDef;
+      int amt = rbDef.getAmt();
+
+      if ( amt == BoundarySpec.UNBOUNDED_AMOUNT )
+      {
+        return rbDef.getDirection() == Direction.PRECEDING ? 0 : p.size();
+      }
+
+      amt = rbDef.getDirection() == Direction.PRECEDING ?  -amt : amt;
+      int idx = currRow + amt;
+      idx = idx < 0 ? 0 : (idx > p.size() ? p.size() : idx);
+      return idx + (end && idx < p.size() ? 1 : 0);
+    }
+    else
+    {
+      ValueBoundaryScanner vbs = ValueBoundaryScanner.getScanner((ValueBoundaryDef)bDef);
+      return vbs.computeBoundaryRange(currRow, p);
+    }
+  }
+
+  static class Range
+  {
+    int start;
+    int end;
+    PTFPartition p;
+
+    public Range(int start, int end, PTFPartition p)
+    {
+      super();
+      this.start = start;
+      this.end = end;
+      this.p = p;
+    }
+
+    public PTFPartitionIterator<Object> iterator()
+    {
+      return p.range(start, end);
+    }
+  }
+
+  /*
+   * - starting from the given rowIdx scan in the given direction until a row's expr
+   * evaluates to an amt that crosses the 'amt' threshold specified in the ValueBoundaryDef.
+   */
+  static abstract class ValueBoundaryScanner
+  {
+    ValueBoundaryDef bndDef;
+
+    public ValueBoundaryScanner(ValueBoundaryDef bndDef)
+    {
+      this.bndDef = bndDef;
+    }
+
+    /*
+     * return the other end of the Boundary
+     * - when scanning backwards: go back until you reach a row where the
+     * startingValue - rowValue >= amt
+     * - when scanning forward:  go forward go back until you reach a row where the
+     *  rowValue - startingValue >= amt
+     */
+    public int computeBoundaryRange(int rowIdx, PTFPartition p) throws HiveException
+    {
+      int r = rowIdx;
+      Object rowValue = computeValue(p.getAt(r));
+      int amt = bndDef.getAmt();
+
+      if ( amt == BoundarySpec.UNBOUNDED_AMOUNT )
+      {
+        return bndDef.getDirection() == Direction.PRECEDING ? 0 : p.size();
+      }
+
+      Direction d = bndDef.getDirection();
+      boolean scanNext = rowValue != null;
+      while ( scanNext )
+      {
+        if ( d == Direction.PRECEDING ) {
+          r = r - 1;
+        }
+        else {
+          r = r + 1;
+        }
+
+        if ( r < 0 || r >= p.size() )
+        {
+          scanNext = false;
+          break;
+        }
+
+        Object currVal = computeValue(p.getAt(r));
+        if ( currVal == null )
+        {
+          scanNext = false;
+          break;
+        }
+
+        switch(d)
+        {
+        case PRECEDING:
+          scanNext = !isGreater(rowValue, currVal, amt);
+        break;
+        case FOLLOWING:
+          scanNext = !isGreater(currVal, rowValue, amt);
+        case CURRENT:
+        default:
+          break;
+        }
+      }
+      /*
+       * if moving backwards, then r is at a row that failed the range test. So incr r, so that
+       * Range starts from a row where the test succeeds.
+       * Whereas when moving forward, leave r as is; because the Range's end value should be the
+       * row idx not in the Range.
+       */
+      if ( d == Direction.PRECEDING ) {
+        r = r + 1;
+      }
+      r = r < 0 ? 0 : (r >= p.size() ? p.size() : r);
+      return r;
+    }
+
+    public Object computeValue(Object row) throws HiveException
+    {
+      Object o = bndDef.getExprEvaluator().evaluate(row);
+      return ObjectInspectorUtils.copyToStandardObject(o, bndDef.getOI());
+    }
+
+    public abstract boolean isGreater(Object v1, Object v2, int amt);
+
+
+    public static ValueBoundaryScanner getScanner(ValueBoundaryDef vbDef)
+    {
+      PrimitiveObjectInspector pOI = (PrimitiveObjectInspector) vbDef.getOI();
+      switch(pOI.getPrimitiveCategory())
+      {
+      case BYTE:
+      case INT:
+      case LONG:
+      case SHORT:
+      case TIMESTAMP:
+        return new LongValueBoundaryScanner(vbDef);
+      case DOUBLE:
+      case FLOAT:
+        return new DoubleValueBoundaryScanner(vbDef);
+      }
+      return null;
+    }
+  }
+
+  public static class LongValueBoundaryScanner extends ValueBoundaryScanner
+  {
+    public LongValueBoundaryScanner(ValueBoundaryDef bndDef)
+    {
+      super(bndDef);
+    }
+
+    @Override
+    public boolean isGreater(Object v1, Object v2, int amt)
+    {
+      long l1 = PrimitiveObjectInspectorUtils.getLong(v1,
+          (PrimitiveObjectInspector) bndDef.getOI());
+      long l2 = PrimitiveObjectInspectorUtils.getLong(v2,
+          (PrimitiveObjectInspector) bndDef.getOI());
+      return (l1 -l2) >= amt;
+    }
+  }
+
+  public static class DoubleValueBoundaryScanner extends ValueBoundaryScanner
+  {
+    public DoubleValueBoundaryScanner(ValueBoundaryDef bndDef)
+    {
+      super(bndDef);
+    }
+
+    @Override
+    public boolean isGreater(Object v1, Object v2, int amt)
+    {
+      double d1 = PrimitiveObjectInspectorUtils.getDouble(v1,
+          (PrimitiveObjectInspector) bndDef.getOI());
+      double d2 = PrimitiveObjectInspectorUtils.getDouble(v2,
+          (PrimitiveObjectInspector) bndDef.getOI());
+      return (d1 -d2) >= amt;
+    }
+  }
+
+  public static class SameList<E> extends AbstractList<E>
+  {
+    int sz;
+    E val;
+
+    public SameList(int sz, E val)
+    {
+      this.sz = sz;
+      this.val = val;
+    }
+
+    @Override
+    public E get(int index)
+    {
+      return val;
+    }
+
+    @Override
+    public int size()
+    {
+      return sz;
+    }
+
+  }
+
+}

Added: hive/branches/ptf-windowing/ql/src/test/queries/clientnegative/ptf_negative_AggrFuncsWithNoGBYNoPartDef.q
URL: http://svn.apache.org/viewvc/hive/branches/ptf-windowing/ql/src/test/queries/clientnegative/ptf_negative_AggrFuncsWithNoGBYNoPartDef.q?rev=1441972&view=auto
==============================================================================
--- hive/branches/ptf-windowing/ql/src/test/queries/clientnegative/ptf_negative_AggrFuncsWithNoGBYNoPartDef.q (added)
+++ hive/branches/ptf-windowing/ql/src/test/queries/clientnegative/ptf_negative_AggrFuncsWithNoGBYNoPartDef.q Sun Feb  3 21:43:10 2013
@@ -0,0 +1,20 @@
+DROP TABLE part;
+
+CREATE TABLE part( 
+    p_partkey INT,
+    p_name STRING,
+    p_mfgr STRING,
+    p_brand STRING,
+    p_type STRING,
+    p_size INT,
+    p_container STRING,
+    p_retailprice DOUBLE,
+    p_comment STRING
+);
+
+LOAD DATA LOCAL INPATH '../data/files/part_tiny.txt' overwrite into table part;
+
+-- testAggrFuncsWithNoGBYNoPartDef
+select p_mfgr, 
+sum(p_retailprice) as s1  
+from part;
\ No newline at end of file

Added: hive/branches/ptf-windowing/ql/src/test/queries/clientnegative/ptf_negative_AmbiguousWindowDefn.q
URL: http://svn.apache.org/viewvc/hive/branches/ptf-windowing/ql/src/test/queries/clientnegative/ptf_negative_AmbiguousWindowDefn.q?rev=1441972&view=auto
==============================================================================
--- hive/branches/ptf-windowing/ql/src/test/queries/clientnegative/ptf_negative_AmbiguousWindowDefn.q (added)
+++ hive/branches/ptf-windowing/ql/src/test/queries/clientnegative/ptf_negative_AmbiguousWindowDefn.q Sun Feb  3 21:43:10 2013
@@ -0,0 +1,28 @@
+DROP TABLE part;
+
+CREATE TABLE part( 
+    p_partkey INT,
+    p_name STRING,
+    p_mfgr STRING,
+    p_brand STRING,
+    p_type STRING,
+    p_size INT,
+    p_container STRING,
+    p_retailprice DOUBLE,
+    p_comment STRING
+);
+
+LOAD DATA LOCAL INPATH '../data/files/part_tiny.txt' overwrite into table part;
+
+-- testAmbiguousWindowDefn
+select p_mfgr, p_name, p_size, 
+sum(p_size) as s1 over (w1), 
+sum(p_size) as s2 over (w2),
+sum(p_size) as s3 over (w3) 
+from part 
+distribute by p_mfgr 
+sort by p_mfgr 
+window w1 as rows between 2 preceding and 2 following, 
+       w2 as rows between unbounded preceding and current row, 
+       w3 as w3;
+

Added: hive/branches/ptf-windowing/ql/src/test/queries/clientnegative/ptf_negative_DuplicateWindowAlias.q
URL: http://svn.apache.org/viewvc/hive/branches/ptf-windowing/ql/src/test/queries/clientnegative/ptf_negative_DuplicateWindowAlias.q?rev=1441972&view=auto
==============================================================================
--- hive/branches/ptf-windowing/ql/src/test/queries/clientnegative/ptf_negative_DuplicateWindowAlias.q (added)
+++ hive/branches/ptf-windowing/ql/src/test/queries/clientnegative/ptf_negative_DuplicateWindowAlias.q Sun Feb  3 21:43:10 2013
@@ -0,0 +1,24 @@
+DROP TABLE part;
+
+CREATE TABLE part( 
+    p_partkey INT,
+    p_name STRING,
+    p_mfgr STRING,
+    p_brand STRING,
+    p_type STRING,
+    p_size INT,
+    p_container STRING,
+    p_retailprice DOUBLE,
+    p_comment STRING
+);
+
+LOAD DATA LOCAL INPATH '../data/files/part_tiny.txt' overwrite into table part;
+
+-- testDuplicateWindowAlias
+select p_mfgr, p_name, p_size, 
+sum(p_size) as s1 over (w1), 
+sum(p_size) as s2 over (w2) 
+from part 
+window w1 as distribute by p_mfgr  sort by p_mfgr rows between 2 preceding and 2 following, 
+       w2 as w1, 
+       w2 as rows between unbounded preceding and current row; 

Added: hive/branches/ptf-windowing/ql/src/test/queries/clientnegative/ptf_negative_HavingLead.q
URL: http://svn.apache.org/viewvc/hive/branches/ptf-windowing/ql/src/test/queries/clientnegative/ptf_negative_HavingLead.q?rev=1441972&view=auto
==============================================================================
--- hive/branches/ptf-windowing/ql/src/test/queries/clientnegative/ptf_negative_HavingLead.q (added)
+++ hive/branches/ptf-windowing/ql/src/test/queries/clientnegative/ptf_negative_HavingLead.q Sun Feb  3 21:43:10 2013
@@ -0,0 +1,22 @@
+DROP TABLE part;
+
+CREATE TABLE part( 
+    p_partkey INT,
+    p_name STRING,
+    p_mfgr STRING,
+    p_brand STRING,
+    p_type STRING,
+    p_size INT,
+    p_container STRING,
+    p_retailprice DOUBLE,
+    p_comment STRING
+);
+
+LOAD DATA LOCAL INPATH '../data/files/part_tiny.txt' overwrite into table part;
+
+-- testHavingLead
+select  p_mfgr,p_name, p_size 
+from part 
+having lead(p_size, 1) <= p_size 
+distribute by p_mfgr 
+sort by p_name;
\ No newline at end of file

Added: hive/branches/ptf-windowing/ql/src/test/queries/clientnegative/ptf_negative_HavingLeadWithPTF.q
URL: http://svn.apache.org/viewvc/hive/branches/ptf-windowing/ql/src/test/queries/clientnegative/ptf_negative_HavingLeadWithPTF.q?rev=1441972&view=auto
==============================================================================
--- hive/branches/ptf-windowing/ql/src/test/queries/clientnegative/ptf_negative_HavingLeadWithPTF.q (added)
+++ hive/branches/ptf-windowing/ql/src/test/queries/clientnegative/ptf_negative_HavingLeadWithPTF.q Sun Feb  3 21:43:10 2013
@@ -0,0 +1,24 @@
+DROP TABLE part;
+
+CREATE TABLE part( 
+    p_partkey INT,
+    p_name STRING,
+    p_mfgr STRING,
+    p_brand STRING,
+    p_type STRING,
+    p_size INT,
+    p_container STRING,
+    p_retailprice DOUBLE,
+    p_comment STRING
+);
+
+LOAD DATA LOCAL INPATH '../data/files/part_tiny.txt' overwrite into table part;
+
+-- testHavingLeadWithPTF
+select  p_mfgr,p_name, p_size 
+from noop(part 
+distribute by p_mfgr 
+sort by p_name) 
+having lead(p_size, 1) <= p_size 
+distribute by p_mfgr 
+sort by p_name;   
\ No newline at end of file

Added: hive/branches/ptf-windowing/ql/src/test/queries/clientnegative/ptf_negative_IncompatibleDistributeClause.q
URL: http://svn.apache.org/viewvc/hive/branches/ptf-windowing/ql/src/test/queries/clientnegative/ptf_negative_IncompatibleDistributeClause.q?rev=1441972&view=auto
==============================================================================
--- hive/branches/ptf-windowing/ql/src/test/queries/clientnegative/ptf_negative_IncompatibleDistributeClause.q (added)
+++ hive/branches/ptf-windowing/ql/src/test/queries/clientnegative/ptf_negative_IncompatibleDistributeClause.q Sun Feb  3 21:43:10 2013
@@ -0,0 +1,23 @@
+DROP TABLE part;
+
+CREATE TABLE part( 
+    p_partkey INT,
+    p_name STRING,
+    p_mfgr STRING,
+    p_brand STRING,
+    p_type STRING,
+    p_size INT,
+    p_container STRING,
+    p_retailprice DOUBLE,
+    p_comment STRING
+);
+
+LOAD DATA LOCAL INPATH '../data/files/part_tiny.txt' overwrite into table part;
+
+-- testIncompatibleDistributeClause 
+select  p_mfgr,p_name, p_size,  
+rank() as r, denserank() as dr,  
+sum(p_size) as s over (w1)  
+from part  
+distribute by p_mfgr  
+window w1 as distribute by p_name rows between 2 preceding and 2 following;
\ No newline at end of file

Added: hive/branches/ptf-windowing/ql/src/test/queries/clientnegative/ptf_negative_IncompatibleOrderInWindowDefs.q
URL: http://svn.apache.org/viewvc/hive/branches/ptf-windowing/ql/src/test/queries/clientnegative/ptf_negative_IncompatibleOrderInWindowDefs.q?rev=1441972&view=auto
==============================================================================
--- hive/branches/ptf-windowing/ql/src/test/queries/clientnegative/ptf_negative_IncompatibleOrderInWindowDefs.q (added)
+++ hive/branches/ptf-windowing/ql/src/test/queries/clientnegative/ptf_negative_IncompatibleOrderInWindowDefs.q Sun Feb  3 21:43:10 2013
@@ -0,0 +1,25 @@
+DROP TABLE part;
+
+CREATE TABLE part( 
+    p_partkey INT,
+    p_name STRING,
+    p_mfgr STRING,
+    p_brand STRING,
+    p_type STRING,
+    p_size INT,
+    p_container STRING,
+    p_retailprice DOUBLE,
+    p_comment STRING
+);
+
+LOAD DATA LOCAL INPATH '../data/files/part_tiny.txt' overwrite into table part;
+
+-- testIncompatibleOrderInWindowDefs
+select p_mfgr, p_name, p_size, 
+sum(p_size) as s1 over (w1), 
+sum(p_size) as s2 over (w2) 
+from part 
+distribute by p_mfgr 
+sort by p_mfgr 
+window w1 as distribute by p_mfgr sort by p_mfgr rows between 2 preceding and 2 following, 
+       w2 as distribute by p_mfgr sort by p_name rows between unbounded preceding and current row; 

Added: hive/branches/ptf-windowing/ql/src/test/queries/clientnegative/ptf_negative_IncompatiblePartitionInWindowDefs.q
URL: http://svn.apache.org/viewvc/hive/branches/ptf-windowing/ql/src/test/queries/clientnegative/ptf_negative_IncompatiblePartitionInWindowDefs.q?rev=1441972&view=auto
==============================================================================
--- hive/branches/ptf-windowing/ql/src/test/queries/clientnegative/ptf_negative_IncompatiblePartitionInWindowDefs.q (added)
+++ hive/branches/ptf-windowing/ql/src/test/queries/clientnegative/ptf_negative_IncompatiblePartitionInWindowDefs.q Sun Feb  3 21:43:10 2013
@@ -0,0 +1,25 @@
+DROP TABLE part;
+
+CREATE TABLE part( 
+    p_partkey INT,
+    p_name STRING,
+    p_mfgr STRING,
+    p_brand STRING,
+    p_type STRING,
+    p_size INT,
+    p_container STRING,
+    p_retailprice DOUBLE,
+    p_comment STRING
+);
+
+LOAD DATA LOCAL INPATH '../data/files/part_tiny.txt' overwrite into table part;
+
+-- testIncompatiblePartitionInWindowDefs
+select p_mfgr, p_name, p_size, 
+sum(p_size) as s1 over (w1), 
+sum(p_size) as s2 over (w2) 
+from part 
+distribute by p_mfgr 
+sort by p_mfgr 
+window w1 as distribute by p_mfgr sort by p_mfgr rows between 2 preceding and 2 following, 
+       w2 as distribute by p_name sort by p_name rows between unbounded preceding and current row; 

Added: hive/branches/ptf-windowing/ql/src/test/queries/clientnegative/ptf_negative_IncompatibleSortClause.q
URL: http://svn.apache.org/viewvc/hive/branches/ptf-windowing/ql/src/test/queries/clientnegative/ptf_negative_IncompatibleSortClause.q?rev=1441972&view=auto
==============================================================================
--- hive/branches/ptf-windowing/ql/src/test/queries/clientnegative/ptf_negative_IncompatibleSortClause.q (added)
+++ hive/branches/ptf-windowing/ql/src/test/queries/clientnegative/ptf_negative_IncompatibleSortClause.q Sun Feb  3 21:43:10 2013
@@ -0,0 +1,23 @@
+DROP TABLE part;
+
+CREATE TABLE part( 
+    p_partkey INT,
+    p_name STRING,
+    p_mfgr STRING,
+    p_brand STRING,
+    p_type STRING,
+    p_size INT,
+    p_container STRING,
+    p_retailprice DOUBLE,
+    p_comment STRING
+);
+
+LOAD DATA LOCAL INPATH '../data/files/part_tiny.txt' overwrite into table part;
+
+-- testIncompatibleSortClause 
+select  p_mfgr,p_name, p_size,  
+rank() as r, denserank() as dr,  
+sum(p_size) as s over (w1)  
+from part  
+distribute by p_mfgr  
+window w1 as distribute by p_mfgr sort by p_name  rows between 2 preceding and 2 following;
\ No newline at end of file

Added: hive/branches/ptf-windowing/ql/src/test/queries/clientnegative/ptf_negative_InvalidValueBoundary.q
URL: http://svn.apache.org/viewvc/hive/branches/ptf-windowing/ql/src/test/queries/clientnegative/ptf_negative_InvalidValueBoundary.q?rev=1441972&view=auto
==============================================================================
--- hive/branches/ptf-windowing/ql/src/test/queries/clientnegative/ptf_negative_InvalidValueBoundary.q (added)
+++ hive/branches/ptf-windowing/ql/src/test/queries/clientnegative/ptf_negative_InvalidValueBoundary.q Sun Feb  3 21:43:10 2013
@@ -0,0 +1,24 @@
+DROP TABLE part;
+
+CREATE TABLE part( 
+    p_partkey INT,
+    p_name STRING,
+    p_mfgr STRING,
+    p_brand STRING,
+    p_type STRING,
+    p_size INT,
+    p_container STRING,
+    p_retailprice DOUBLE,
+    p_comment STRING
+);
+
+LOAD DATA LOCAL INPATH '../data/files/part_tiny.txt' overwrite into table part;
+
+-- testInvalidValueBoundary
+select  p_mfgr,p_name, p_size,   
+sum(p_size) as s over (w1) ,    
+denserank() as dr  
+from part  
+distribute by p_mfgr  
+sort by p_name  
+window w1 as range between p_name 2 less and current row;
\ No newline at end of file

Added: hive/branches/ptf-windowing/ql/src/test/queries/clientnegative/ptf_negative_JoinWithAmbigousAlias.q
URL: http://svn.apache.org/viewvc/hive/branches/ptf-windowing/ql/src/test/queries/clientnegative/ptf_negative_JoinWithAmbigousAlias.q?rev=1441972&view=auto
==============================================================================
--- hive/branches/ptf-windowing/ql/src/test/queries/clientnegative/ptf_negative_JoinWithAmbigousAlias.q (added)
+++ hive/branches/ptf-windowing/ql/src/test/queries/clientnegative/ptf_negative_JoinWithAmbigousAlias.q Sun Feb  3 21:43:10 2013
@@ -0,0 +1,22 @@
+DROP TABLE part;
+
+CREATE TABLE part( 
+    p_partkey INT,
+    p_name STRING,
+    p_mfgr STRING,
+    p_brand STRING,
+    p_type STRING,
+    p_size INT,
+    p_container STRING,
+    p_retailprice DOUBLE,
+    p_comment STRING
+);
+
+LOAD DATA LOCAL INPATH '../data/files/part_tiny.txt' overwrite into table part;
+
+-- testJoinWithAmbigousAlias
+select abc.* 
+from noop(part 
+distribute by p_mfgr 
+sort by p_name 
+) abc join part on abc.p_partkey = p1.p_partkey;
\ No newline at end of file

Added: hive/branches/ptf-windowing/ql/src/test/queries/clientnegative/ptf_negative_NoSortNoDistByClause.q
URL: http://svn.apache.org/viewvc/hive/branches/ptf-windowing/ql/src/test/queries/clientnegative/ptf_negative_NoSortNoDistByClause.q?rev=1441972&view=auto
==============================================================================
--- hive/branches/ptf-windowing/ql/src/test/queries/clientnegative/ptf_negative_NoSortNoDistByClause.q (added)
+++ hive/branches/ptf-windowing/ql/src/test/queries/clientnegative/ptf_negative_NoSortNoDistByClause.q Sun Feb  3 21:43:10 2013
@@ -0,0 +1,21 @@
+DROP TABLE part;
+
+CREATE TABLE part( 
+    p_partkey INT,
+    p_name STRING,
+    p_mfgr STRING,
+    p_brand STRING,
+    p_type STRING,
+    p_size INT,
+    p_container STRING,
+    p_retailprice DOUBLE,
+    p_comment STRING
+);
+
+LOAD DATA LOCAL INPATH '../data/files/part_tiny.txt' overwrite into table part;
+
+-- testNoSortNoDistByClause 
+select  p_mfgr,p_name, p_size,  
+rank() as r, denserank() as dr  
+from part  
+window w1 as rows between 2 preceding and 2 following;
\ No newline at end of file

Added: hive/branches/ptf-windowing/ql/src/test/queries/clientnegative/ptf_negative_WhereLead.q
URL: http://svn.apache.org/viewvc/hive/branches/ptf-windowing/ql/src/test/queries/clientnegative/ptf_negative_WhereLead.q?rev=1441972&view=auto
==============================================================================
--- hive/branches/ptf-windowing/ql/src/test/queries/clientnegative/ptf_negative_WhereLead.q (added)
+++ hive/branches/ptf-windowing/ql/src/test/queries/clientnegative/ptf_negative_WhereLead.q Sun Feb  3 21:43:10 2013
@@ -0,0 +1,22 @@
+DROP TABLE part;
+
+CREATE TABLE part( 
+    p_partkey INT,
+    p_name STRING,
+    p_mfgr STRING,
+    p_brand STRING,
+    p_type STRING,
+    p_size INT,
+    p_container STRING,
+    p_retailprice DOUBLE,
+    p_comment STRING
+);
+
+LOAD DATA LOCAL INPATH '../data/files/part_tiny.txt' overwrite into table part;
+
+-- testWhereLead
+select  p_mfgr,p_name, p_size 
+from part 
+where lead(p_size, 1) <= p_size 
+distribute by p_mfgr 
+sort by p_name;

Added: hive/branches/ptf-windowing/ql/src/test/queries/clientnegative/ptf_negative_WhereWithRankCond.q
URL: http://svn.apache.org/viewvc/hive/branches/ptf-windowing/ql/src/test/queries/clientnegative/ptf_negative_WhereWithRankCond.q?rev=1441972&view=auto
==============================================================================
--- hive/branches/ptf-windowing/ql/src/test/queries/clientnegative/ptf_negative_WhereWithRankCond.q (added)
+++ hive/branches/ptf-windowing/ql/src/test/queries/clientnegative/ptf_negative_WhereWithRankCond.q Sun Feb  3 21:43:10 2013
@@ -0,0 +1,23 @@
+DROP TABLE part;
+
+CREATE TABLE part( 
+    p_partkey INT,
+    p_name STRING,
+    p_mfgr STRING,
+    p_brand STRING,
+    p_type STRING,
+    p_size INT,
+    p_container STRING,
+    p_retailprice DOUBLE,
+    p_comment STRING
+);
+
+LOAD DATA LOCAL INPATH '../data/files/part_tiny.txt' overwrite into table part;
+
+-- testWhereWithRankCond
+select  p_mfgr,p_name, p_size, 
+rank() as r 
+from part 
+where r < 4 
+distribute by p_mfgr 
+sort by p_mfgr;



Mime
View raw message