hadoop-hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From na...@apache.org
Subject svn commit: r770837 - in /hadoop/hive/trunk: ./ ql/src/java/org/apache/hadoop/hive/ql/parse/ ql/src/test/queries/clientnegative/ ql/src/test/queries/clientpositive/ ql/src/test/results/clientnegative/ ql/src/test/results/clientpositive/ ql/src/test/res...
Date Fri, 01 May 2009 22:06:30 GMT
Author: namit
Date: Fri May  1 22:06:30 2009
New Revision: 770837

URL: http://svn.apache.org/viewvc?rev=770837&view=rev
Log:
HIVE-420. Support regular expressions for column names
(Zheng Shao via namit)


Added:
    hadoop/hive/trunk/ql/src/test/queries/clientnegative/regex_col_1.q
    hadoop/hive/trunk/ql/src/test/queries/clientnegative/regex_col_2.q
    hadoop/hive/trunk/ql/src/test/queries/clientnegative/regex_col_groupby.q
    hadoop/hive/trunk/ql/src/test/queries/clientpositive/input22.q
    hadoop/hive/trunk/ql/src/test/queries/clientpositive/regex_col.q
    hadoop/hive/trunk/ql/src/test/results/clientnegative/regex_col_1.q.out
    hadoop/hive/trunk/ql/src/test/results/clientnegative/regex_col_2.q.out
    hadoop/hive/trunk/ql/src/test/results/clientnegative/regex_col_groupby.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/input22.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/regex_col.q.out
Modified:
    hadoop/hive/trunk/CHANGES.txt
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
    hadoop/hive/trunk/ql/src/test/results/clientpositive/join22.q.out
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/input20.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/input4.q.xml
    hadoop/hive/trunk/ql/src/test/results/compiler/plan/input5.q.xml

Modified: hadoop/hive/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/CHANGES.txt?rev=770837&r1=770836&r2=770837&view=diff
==============================================================================
--- hadoop/hive/trunk/CHANGES.txt (original)
+++ hadoop/hive/trunk/CHANGES.txt Fri May  1 22:06:30 2009
@@ -24,6 +24,9 @@
     HIVE-352. Column-based storage format RCFile.
     (Yongqiang He via zshao)
 
+    HIVE-420. Support regular expressions for column names
+    (Zheng Shao via namit)
+
   IMPROVEMENTS
     HIVE-389. Option to build without ivy (jssarma)
 

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g?rev=770837&r1=770836&r2=770837&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g Fri May  1 22:06:30 2009
@@ -1199,6 +1199,8 @@
 RPAREN : ')' ;
 LSQUARE : '[' ;
 RSQUARE : ']' ;
+LCURLY : '{';
+RCURLY : '}';
 
 EQUAL : '=';
 NOTEQUAL : '<>';
@@ -1217,6 +1219,8 @@
 TILDE : '~';
 BITWISEOR : '|';
 BITWISEXOR : '^';
+QUESTION : '?';
+DOLLAR : '$';
 
 // LITERALS
 fragment
@@ -1241,6 +1245,14 @@
     'e' ( PLUS|MINUS )? (Digit)+
     ;
 
+fragment
+RegexComponent
+    : 'a'..'z' | 'A'..'Z' | '0'..'9' | '_'
+    | PLUS | STAR | QUESTION | MINUS | DOT
+    | LPAREN | RPAREN | LSQUARE | RSQUARE | LCURLY | RCURLY
+    | BITWISEXOR | BITWISEOR | DOLLAR
+    ;
+
 StringLiteral
     :
     ( '\'' (~'\'')* '\'' | '\"' (~'\"')* '\"' )+
@@ -1260,7 +1272,7 @@
 Identifier
     :
     (Letter | Digit) (Letter | Digit | '_')*
-    | '`' (Letter | Digit) (Letter | Digit | '_')* '`'
+    | '`' RegexComponent+ '`'
     ;
 
 CharSetName

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java?rev=770837&r1=770836&r2=770837&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java Fri May  1 22:06:30 2009
@@ -33,6 +33,8 @@
 import java.util.Set;
 import java.util.TreeSet;
 import java.util.Vector;
+import java.util.regex.Pattern;
+import java.util.regex.PatternSyntaxException;
 
 import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.fs.Path;
@@ -1012,7 +1014,7 @@
   }
 
   @SuppressWarnings("nls")
-  private Integer genColList(String tabAlias, String alias, ASTNode sel,
+  private Integer genColListRegex(String colRegex, String tabAlias, String alias, ASTNode sel,
     ArrayList<exprNodeDesc> col_list, RowResolver input, Integer pos,
     RowResolver output) throws SemanticException {
 
@@ -1021,7 +1023,14 @@
       throw new SemanticException(ErrorMsg.INVALID_TABLE_ALIAS.getMsg(sel));
     
     // TODO: Have to put in the support for AS clause
-
+    Pattern regex = null;
+    try {
+      regex = Pattern.compile(colRegex, Pattern.CASE_INSENSITIVE);
+    } catch (PatternSyntaxException e) {
+      throw new SemanticException(ErrorMsg.INVALID_COLUMN.getMsg(sel, e.getMessage()));
+    }
+    
+    int matched = 0;
     // This is the tab.* case
     // In this case add all the columns to the fieldList
     // from the input schema
@@ -1034,14 +1043,23 @@
         continue;
       }
  
+      // Not matching the regex?
+      if (!regex.matcher(tmp[1]).matches()) {
+        continue;
+      }
+      
       exprNodeColumnDesc expr = new exprNodeColumnDesc(colInfo.getType(), name);
       col_list.add(expr);
       output.put(tmp[0], tmp[1], new ColumnInfo(pos.toString(), colInfo.getType()));
       pos = Integer.valueOf(pos.intValue() + 1);
+      matched ++;
+    }
+    if (matched == 0) {
+      throw new SemanticException(ErrorMsg.INVALID_COLUMN.getMsg(sel));
     }
     return pos;
   }
-
+  
   /**
    * If the user script command needs any modifications - do it here
    */
@@ -1194,6 +1212,20 @@
     return colRef;
   }
   
+  /**
+   * Returns whether the pattern is a regex expression (instead of a normal string).
+   * Normal string is a string with all alphabets/digits and "_".
+   */
+  private static boolean isRegex(String pattern) {
+    for(int i=0; i<pattern.length(); i++) {
+      if (!Character.isLetterOrDigit(pattern.charAt(i))
+          && pattern.charAt(i) != '_') {
+        return true;
+      }
+    }
+    return false;    
+  }
+  
   @SuppressWarnings("nls")
   private Operator genSelectPlan(String dest, QB qb,
     Operator input) throws SemanticException {
@@ -1206,72 +1238,88 @@
     String alias = qb.getParseInfo().getAlias();
     Integer pos = Integer.valueOf(0);
     RowResolver inputRR = opParseCtx.get(input).getRR();
+    // SELECT * or SELECT TRANSFORM(*)
     boolean selectStar = false;
+
+    boolean isInTransform = (selExprList.getChild(0).getChild(0).getType() 
+        == HiveParser.TOK_TRANSFORM);
+    if (isInTransform) {
+      trfm = (ASTNode) selExprList.getChild(0).getChild(0);
+    }
     
+    // The list of expressions after SELECT or SELECT TRANSFORM.
+    ASTNode exprList = (isInTransform ? (ASTNode) trfm.getChild(0) : selExprList);
+
     LOG.debug("genSelectPlan: input = " + inputRR.toString());
-    // Iterate over the selects
-    for (int i = 0; i < selExprList.getChildCount(); ++i) {
+    // Iterate over all expression (either after SELECT, or in SELECT TRANSFORM)
+    for (int i = 0; i < exprList.getChildCount(); ++i) {
 
-      // list of the columns
-      ASTNode selExpr = (ASTNode) selExprList.getChild(i);
-      String[] colRef = getColAlias(selExpr, "_C" + i, inputRR);
-      String colAlias = colRef[1];
-      String tabAlias = colRef[0];
-      ASTNode sel = (ASTNode)selExpr.getChild(0);
+      // child can be EXPR AS ALIAS, or EXPR.
+      ASTNode child = (ASTNode) exprList.getChild(i);
+      boolean hasAsClause = (!isInTransform) && (child.getChildCount() == 2);
+      // The real expression
+      ASTNode expr;
+      String tabAlias;
+      String colAlias;
       
-      if (sel.getToken().getType() == HiveParser.TOK_ALLCOLREF) {
+      if (isInTransform) {
         tabAlias = null;
-        if (sel.getChildCount() == 1)
-          tabAlias = unescapeIdentifier(sel.getChild(0).getText().toLowerCase());
-        pos = genColList(tabAlias, alias, sel, col_list, inputRR, pos, out_rwsch);
+        colAlias = "_C" + i;
+        expr = child;
+      } else {
+        String[] colRef = getColAlias(child, "_C" + i, inputRR);
+        tabAlias = colRef[0];
+        colAlias = colRef[1];
+        // Get rid of TOK_SELEXPR
+        expr = (ASTNode)child.getChild(0);
+      }
+       
+      if (expr.getType() == HiveParser.TOK_ALLCOLREF) {
+        pos = genColListRegex(".*", 
+            expr.getChildCount() == 0 ? null : unescapeIdentifier(expr.getChild(0).getText().toLowerCase()),
+            alias, expr, col_list, inputRR, pos, out_rwsch);
         selectStar = true;
-      } else if (sel.getToken().getType() == HiveParser.TOK_TRANSFORM) {
-        if (i > 0) {
-          throw new SemanticException(ErrorMsg.INVALID_TRANSFORM.getMsg(sel));
-        }
-        trfm = sel;
-        ASTNode cols = (ASTNode) trfm.getChild(0);
-        for (int j = 0; j < cols.getChildCount(); ++j) {
-          ASTNode expr = (ASTNode) cols.getChild(j);
-          if (expr.getToken().getType() == HiveParser.TOK_ALLCOLREF) {
-            tabAlias = null;
-            if (sel.getChildCount() == 1)
-              tabAlias = unescapeIdentifier(sel.getChild(0).getText().toLowerCase());
-
-            pos = genColList(tabAlias, alias, expr, col_list, inputRR, pos, out_rwsch);
-            selectStar = true;
-          } else {
-            exprNodeDesc exp = genExprNodeDesc(expr, inputRR);
-            col_list.add(exp);
-            if (!StringUtils.isEmpty(alias) &&
-                (out_rwsch.get(null, colAlias) != null)) {
-              throw new SemanticException(ErrorMsg.AMBIGUOUS_COLUMN.getMsg(expr.getChild(1)));
-            }
-
-            out_rwsch.put(tabAlias, unescapeIdentifier(expr.getText()),
-                          new ColumnInfo((Integer.valueOf(pos)).toString(),
-                                         exp.getTypeInfo()));
-          }
-        }
+      } else if (expr.getType() == HiveParser.TOK_TABLE_OR_COL
+          && !hasAsClause
+          && !inputRR.getIsExprResolver()
+          && isRegex(unescapeIdentifier(expr.getChild(0).getText()))) {
+        // In case the expression is a regex COL.
+        // This can only happen without AS clause
+        // We don't allow this for ExprResolver - the Group By case
+        pos = genColListRegex(unescapeIdentifier(expr.getChild(0).getText()),
+            null, alias, expr, col_list, inputRR, pos, out_rwsch);
+      } else if (expr.getType() == HiveParser.DOT
+          && expr.getChild(0).getType() == HiveParser.TOK_TABLE_OR_COL
+          && inputRR.hasTableAlias(unescapeIdentifier(expr.getChild(0).getChild(0).getText().toLowerCase()))
+          && !hasAsClause
+          && !inputRR.getIsExprResolver()
+          && isRegex(unescapeIdentifier(expr.getChild(1).getText()))) {
+        // In case the expression is TABLE.COL (col can be regex).
+        // This can only happen without AS clause
+        // We don't allow this for ExprResolver - the Group By case
+        pos = genColListRegex(unescapeIdentifier(expr.getChild(1).getText()), 
+            unescapeIdentifier(expr.getChild(0).getChild(0).getText().toLowerCase()),
+            alias, expr, col_list, inputRR, pos, out_rwsch);
       } else {
         // Case when this is an expression
-        exprNodeDesc exp = genExprNodeDesc(sel, inputRR);
+        exprNodeDesc exp = genExprNodeDesc(expr, inputRR);
         col_list.add(exp);
         if (!StringUtils.isEmpty(alias) &&
             (out_rwsch.get(null, colAlias) != null)) {
-          throw new SemanticException(ErrorMsg.AMBIGUOUS_COLUMN.getMsg(sel.getChild(1)));
+          throw new SemanticException(ErrorMsg.AMBIGUOUS_COLUMN.getMsg(expr.getChild(1)));
         }
-        // Since the as clause is lacking we just use the text representation
-        // of the expression as the column name
         out_rwsch.put(tabAlias, colAlias,
                       new ColumnInfo((Integer.valueOf(pos)).toString(),
                                      exp.getTypeInfo()));
+        pos = Integer.valueOf(pos.intValue() + 1);
       }
-      pos = Integer.valueOf(pos.intValue() + 1);
     }
+    selectStar = selectStar && exprList.getChildCount() == 1;
 
+    
     Map<String, exprNodeDesc> colExprMap = new HashMap<String, exprNodeDesc>();
     for (int i=0; i<col_list.size(); i++) {
+      // Replace NULL with CAST(NULL AS STRING)
       if (col_list.get(i) instanceof exprNodeNullDesc) {
         col_list.set(i, new exprNodeConstantDesc(TypeInfoFactory.stringTypeInfo, null));
       }
@@ -1279,11 +1327,11 @@
     }
     
     Operator output = putOpInsertMap(OperatorFactory.getAndMakeChild(
-        new selectDesc(col_list, (selExprList.getChildCount() == 1) && selectStar), new RowSchema(out_rwsch.getColumnInfos()),
+        new selectDesc(col_list, selectStar), new RowSchema(out_rwsch.getColumnInfos()),
         input), out_rwsch);
 
     output.setColumnExprMap(colExprMap);
-    if (trfm != null) {
+    if (isInTransform) {
       output = genScriptPlan(trfm, qb, output);
     }
 

Added: hadoop/hive/trunk/ql/src/test/queries/clientnegative/regex_col_1.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientnegative/regex_col_1.q?rev=770837&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientnegative/regex_col_1.q (added)
+++ hadoop/hive/trunk/ql/src/test/queries/clientnegative/regex_col_1.q Fri May  1 22:06:30 2009
@@ -0,0 +1,2 @@
+EXPLAIN
+SELECT `+++` FROM srcpart;

Added: hadoop/hive/trunk/ql/src/test/queries/clientnegative/regex_col_2.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientnegative/regex_col_2.q?rev=770837&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientnegative/regex_col_2.q (added)
+++ hadoop/hive/trunk/ql/src/test/queries/clientnegative/regex_col_2.q Fri May  1 22:06:30 2009
@@ -0,0 +1,2 @@
+EXPLAIN
+SELECT `.a.` FROM srcpart;

Added: hadoop/hive/trunk/ql/src/test/queries/clientnegative/regex_col_groupby.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientnegative/regex_col_groupby.q?rev=770837&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientnegative/regex_col_groupby.q (added)
+++ hadoop/hive/trunk/ql/src/test/queries/clientnegative/regex_col_groupby.q Fri May  1 22:06:30 2009
@@ -0,0 +1,2 @@
+EXPLAIN
+SELECT `..`, count(1) FROM srcpart GROUP BY `..`;

Added: hadoop/hive/trunk/ql/src/test/queries/clientpositive/input22.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/input22.q?rev=770837&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/input22.q (added)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/input22.q Fri May  1 22:06:30 2009
@@ -0,0 +1,15 @@
+CREATE TABLE INPUT4(KEY STRING, VALUE STRING) STORED AS TEXTFILE;
+LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE INPUT4;
+
+EXPLAIN
+SELECT a.KEY2
+FROM (SELECT INPUT4.*, INPUT4.KEY as KEY2
+      FROM INPUT4) a
+ORDER BY KEY2 LIMIT 10;
+
+SELECT a.KEY2
+FROM (SELECT INPUT4.*, INPUT4.KEY as KEY2
+      FROM INPUT4) a
+ORDER BY KEY2 LIMIT 10;
+
+DROP TABLE INPUT4;

Added: hadoop/hive/trunk/ql/src/test/queries/clientpositive/regex_col.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/regex_col.q?rev=770837&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/regex_col.q (added)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/regex_col.q Fri May  1 22:06:30 2009
@@ -0,0 +1,35 @@
+EXPLAIN
+SELECT * FROM srcpart;
+
+EXPLAIN
+SELECT `..` FROM srcpart;
+
+EXPLAIN
+SELECT srcpart.`..` FROM srcpart;
+
+EXPLAIN
+SELECT `..` FROM srcpart a JOIN srcpart b
+ON a.key = b.key AND a.value = b.value;
+
+EXPLAIN
+SELECT b.`..` FROM srcpart a JOIN srcpart b
+ON a.key = b.key AND a.hr = b.hr AND a.ds = b.ds AND a.key = 103
+ORDER BY ds, hr;
+
+SELECT b.`..` FROM srcpart a JOIN srcpart b
+ON a.key = b.key AND a.hr = b.hr AND a.ds = b.ds AND a.key = 103
+ORDER BY ds, hr;
+
+EXPLAIN
+SELECT `.e.` FROM srcpart;
+
+EXPLAIN
+SELECT `d.*` FROM srcpart;
+
+EXPLAIN
+SELECT `(ds)?+.+` FROM srcpart;
+
+EXPLAIN
+SELECT `(ds|hr)?+.+` FROM srcpart ORDER BY key, value LIMIT 10;
+
+SELECT `(ds|hr)?+.+` FROM srcpart ORDER BY key, value LIMIT 10;

Added: hadoop/hive/trunk/ql/src/test/results/clientnegative/regex_col_1.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/regex_col_1.q.out?rev=770837&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/regex_col_1.q.out (added)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/regex_col_1.q.out Fri May  1 22:06:30 2009
@@ -0,0 +1,3 @@
+FAILED: Error in semantic analysis: line 2:7 Invalid Column Reference `+++`: Dangling meta character '+' near index 0
++++
+^

Added: hadoop/hive/trunk/ql/src/test/results/clientnegative/regex_col_2.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/regex_col_2.q.out?rev=770837&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/regex_col_2.q.out (added)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/regex_col_2.q.out Fri May  1 22:06:30 2009
@@ -0,0 +1 @@
+FAILED: Error in semantic analysis: line 2:7 Invalid Column Reference `.a.`

Added: hadoop/hive/trunk/ql/src/test/results/clientnegative/regex_col_groupby.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/regex_col_groupby.q.out?rev=770837&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/regex_col_groupby.q.out (added)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/regex_col_groupby.q.out Fri May  1 22:06:30 2009
@@ -0,0 +1 @@
+FAILED: Error in semantic analysis: line 2:44 Invalid Table Alias or Column Reference `..`

Added: hadoop/hive/trunk/ql/src/test/results/clientpositive/input22.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/input22.q.out?rev=770837&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/input22.q.out (added)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/input22.q.out Fri May  1 22:06:30 2009
@@ -0,0 +1,58 @@
+ABSTRACT SYNTAX TREE:
+  (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF INPUT4)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF INPUT4)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL INPUT4) KEY) KEY2)))) a)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) KEY2))) (TOK_ORDERBY (TOK_TABLE_OR_COL KEY2)) (TOK_LIMIT 10)))
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Alias -> Map Operator Tree:
+        a:input4 
+            Select Operator
+              expressions:
+                    expr: key
+                    type: string
+                    expr: value
+                    type: string
+                    expr: key
+                    type: string
+              Select Operator
+                expressions:
+                      expr: 2
+                      type: string
+                Reduce Output Operator
+                  key expressions:
+                        expr: 0
+                        type: string
+                  sort order: +
+                  tag: -1
+                  value expressions:
+                        expr: 0
+                        type: string
+      Reduce Operator Tree:
+        Extract
+          Limit
+            File Output Operator
+              compressed: false
+              GlobalTableId: 0
+              table:
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: 10
+
+
+0
+0
+0
+10
+100
+100
+103
+103
+104
+104

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/join22.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/join22.q.out?rev=770837&r1=770836&r2=770837&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/join22.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/join22.q.out Fri May  1 22:06:30 2009
@@ -71,11 +71,11 @@
         $INTNAME 
           Reduce Output Operator
             key expressions:
-                  expr: 3
+                  expr: 2
                   type: string
             sort order: +
             Map-reduce partition columns:
-                  expr: 3
+                  expr: 2
                   type: string
             tag: 1
             value expressions:
@@ -83,9 +83,9 @@
                   type: string
                   expr: 1
                   type: string
-                  expr: 3
+                  expr: 2
                   type: string
-                  expr: 4
+                  expr: 3
                   type: string
         src5:src4 
             Reduce Output Operator

Added: hadoop/hive/trunk/ql/src/test/results/clientpositive/regex_col.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/regex_col.q.out?rev=770837&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/regex_col.q.out (added)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/regex_col.q.out Fri May  1 22:06:30 2009
@@ -0,0 +1,486 @@
+ABSTRACT SYNTAX TREE:
+  (TOK_QUERY (TOK_FROM (TOK_TABREF srcpart)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF))))
+
+STAGE DEPENDENCIES:
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+
+
+ABSTRACT SYNTAX TREE:
+  (TOK_QUERY (TOK_FROM (TOK_TABREF srcpart)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL `..`)))))
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Alias -> Map Operator Tree:
+        srcpart 
+            Select Operator
+              expressions:
+                    expr: ds
+                    type: string
+                    expr: hr
+                    type: string
+              Select Operator
+                expressions:
+                      expr: 0
+                      type: string
+                      expr: 1
+                      type: string
+                File Output Operator
+                  compressed: false
+                  GlobalTableId: 0
+                  table:
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+
+
+ABSTRACT SYNTAX TREE:
+  (TOK_QUERY (TOK_FROM (TOK_TABREF srcpart)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL srcpart) `..`)))))
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Alias -> Map Operator Tree:
+        srcpart 
+            Select Operator
+              expressions:
+                    expr: ds
+                    type: string
+                    expr: hr
+                    type: string
+              Select Operator
+                expressions:
+                      expr: 0
+                      type: string
+                      expr: 1
+                      type: string
+                File Output Operator
+                  compressed: false
+                  GlobalTableId: 0
+                  table:
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+
+
+ABSTRACT SYNTAX TREE:
+  (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_TABREF srcpart a) (TOK_TABREF srcpart b) (AND (= (. (TOK_TABLE_OR_COL a) key) (. (TOK_TABLE_OR_COL b) key)) (= (. (TOK_TABLE_OR_COL a) value) (. (TOK_TABLE_OR_COL b) value))))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL `..`)))))
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Alias -> Map Operator Tree:
+        b 
+            Reduce Output Operator
+              key expressions:
+                    expr: key
+                    type: string
+                    expr: value
+                    type: string
+              sort order: ++
+              Map-reduce partition columns:
+                    expr: key
+                    type: string
+                    expr: value
+                    type: string
+              tag: 1
+              value expressions:
+                    expr: key
+                    type: string
+                    expr: value
+                    type: string
+                    expr: ds
+                    type: string
+                    expr: hr
+                    type: string
+        a 
+            Reduce Output Operator
+              key expressions:
+                    expr: key
+                    type: string
+                    expr: value
+                    type: string
+              sort order: ++
+              Map-reduce partition columns:
+                    expr: key
+                    type: string
+                    expr: value
+                    type: string
+              tag: 0
+              value expressions:
+                    expr: key
+                    type: string
+                    expr: value
+                    type: string
+                    expr: ds
+                    type: string
+                    expr: hr
+                    type: string
+      Reduce Operator Tree:
+        Join Operator
+          condition map:
+               Inner Join 0 to 1
+          condition expressions:
+            0 {VALUE.0} {VALUE.1} {VALUE.2} {VALUE.3}
+            1 {VALUE.0} {VALUE.1} {VALUE.2} {VALUE.3}
+          Select Operator
+            expressions:
+                  expr: 2
+                  type: string
+                  expr: 3
+                  type: string
+                  expr: 6
+                  type: string
+                  expr: 7
+                  type: string
+            File Output Operator
+              compressed: false
+              GlobalTableId: 0
+              table:
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+
+
+ABSTRACT SYNTAX TREE:
+  (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_TABREF srcpart a) (TOK_TABREF srcpart b) (AND (AND (AND (= (. (TOK_TABLE_OR_COL a) key) (. (TOK_TABLE_OR_COL b) key)) (= (. (TOK_TABLE_OR_COL a) hr) (. (TOK_TABLE_OR_COL b) hr))) (= (. (TOK_TABLE_OR_COL a) ds) (. (TOK_TABLE_OR_COL b) ds))) (= (. (TOK_TABLE_OR_COL a) key) 103)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL b) `..`))) (TOK_ORDERBY (TOK_TABLE_OR_COL ds) (TOK_TABLE_OR_COL hr))))
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-2 depends on stages: Stage-1
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Alias -> Map Operator Tree:
+        b 
+            Select Operator
+              expressions:
+                    expr: key
+                    type: string
+                    expr: hr
+                    type: string
+                    expr: ds
+                    type: string
+              Reduce Output Operator
+                key expressions:
+                      expr: 0
+                      type: string
+                      expr: 1
+                      type: string
+                      expr: 2
+                      type: string
+                sort order: +++
+                Map-reduce partition columns:
+                      expr: 0
+                      type: string
+                      expr: 1
+                      type: string
+                      expr: 2
+                      type: string
+                tag: 1
+                value expressions:
+                      expr: 0
+                      type: string
+                      expr: 1
+                      type: string
+                      expr: 2
+                      type: string
+        a 
+            Select Operator
+              expressions:
+                    expr: key
+                    type: string
+                    expr: hr
+                    type: string
+                    expr: ds
+                    type: string
+              Filter Operator
+                predicate:
+                    expr: (UDFToDouble(0) = UDFToDouble(103))
+                    type: boolean
+                Reduce Output Operator
+                  key expressions:
+                        expr: 0
+                        type: string
+                        expr: 1
+                        type: string
+                        expr: 2
+                        type: string
+                  sort order: +++
+                  Map-reduce partition columns:
+                        expr: 0
+                        type: string
+                        expr: 1
+                        type: string
+                        expr: 2
+                        type: string
+                  tag: 0
+                  value expressions:
+                        expr: 0
+                        type: string
+                        expr: 1
+                        type: string
+                        expr: 2
+                        type: string
+      Reduce Operator Tree:
+        Join Operator
+          condition map:
+               Inner Join 0 to 1
+          condition expressions:
+            0 {VALUE.0} {VALUE.1} {VALUE.2}
+            1 {VALUE.0} {VALUE.1} {VALUE.2}
+          Select Operator
+            expressions:
+                  expr: 4
+                  type: string
+                  expr: 5
+                  type: string
+            File Output Operator
+              compressed: false
+              GlobalTableId: 0
+              table:
+                  input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                  name: binary_table
+
+  Stage: Stage-2
+    Map Reduce
+      Alias -> Map Operator Tree:
+        /data/users/zshao/tools/420-trunk-apache-hive/build/ql/tmp/53871021/461403503.10002 
+          Reduce Output Operator
+            key expressions:
+                  expr: 1
+                  type: string
+                  expr: 0
+                  type: string
+            sort order: ++
+            tag: -1
+            value expressions:
+                  expr: 0
+                  type: string
+                  expr: 1
+                  type: string
+      Reduce Operator Tree:
+        Extract
+          File Output Operator
+            compressed: false
+            GlobalTableId: 0
+            table:
+                input format: org.apache.hadoop.mapred.TextInputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+
+
+11	2008-04-08
+11	2008-04-08
+11	2008-04-08
+11	2008-04-08
+12	2008-04-08
+12	2008-04-08
+12	2008-04-08
+12	2008-04-08
+11	2008-04-09
+11	2008-04-09
+11	2008-04-09
+11	2008-04-09
+12	2008-04-09
+12	2008-04-09
+12	2008-04-09
+12	2008-04-09
+ABSTRACT SYNTAX TREE:
+  (TOK_QUERY (TOK_FROM (TOK_TABREF srcpart)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL `.e.`)))))
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Alias -> Map Operator Tree:
+        srcpart 
+            Select Operator
+              expressions:
+                    expr: key
+                    type: string
+              Select Operator
+                expressions:
+                      expr: 0
+                      type: string
+                File Output Operator
+                  compressed: false
+                  GlobalTableId: 0
+                  table:
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+
+
+ABSTRACT SYNTAX TREE:
+  (TOK_QUERY (TOK_FROM (TOK_TABREF srcpart)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL `d.*`)))))
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Alias -> Map Operator Tree:
+        srcpart 
+            Select Operator
+              expressions:
+                    expr: ds
+                    type: string
+              Select Operator
+                expressions:
+                      expr: 0
+                      type: string
+                File Output Operator
+                  compressed: false
+                  GlobalTableId: 0
+                  table:
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+
+
+ABSTRACT SYNTAX TREE:
+  (TOK_QUERY (TOK_FROM (TOK_TABREF srcpart)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL `(ds)?+.+`)))))
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Alias -> Map Operator Tree:
+        srcpart 
+            Select Operator
+              expressions:
+                    expr: key
+                    type: string
+                    expr: value
+                    type: string
+                    expr: hr
+                    type: string
+              Select Operator
+                expressions:
+                      expr: 0
+                      type: string
+                      expr: 1
+                      type: string
+                      expr: 2
+                      type: string
+                File Output Operator
+                  compressed: false
+                  GlobalTableId: 0
+                  table:
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+
+
+ABSTRACT SYNTAX TREE:
+  (TOK_QUERY (TOK_FROM (TOK_TABREF srcpart)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL `(ds|hr)?+.+`))) (TOK_ORDERBY (TOK_TABLE_OR_COL key) (TOK_TABLE_OR_COL value)) (TOK_LIMIT 10)))
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Alias -> Map Operator Tree:
+        srcpart 
+            Select Operator
+              expressions:
+                    expr: key
+                    type: string
+                    expr: value
+                    type: string
+              Select Operator
+                expressions:
+                      expr: 0
+                      type: string
+                      expr: 1
+                      type: string
+                Reduce Output Operator
+                  key expressions:
+                        expr: 0
+                        type: string
+                        expr: 1
+                        type: string
+                  sort order: ++
+                  tag: -1
+                  value expressions:
+                        expr: 0
+                        type: string
+                        expr: 1
+                        type: string
+      Reduce Operator Tree:
+        Extract
+          Limit
+            File Output Operator
+              compressed: false
+              GlobalTableId: 0
+              table:
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: 10
+
+
+0	val_0
+0	val_0
+0	val_0
+0	val_0
+0	val_0
+0	val_0
+0	val_0
+0	val_0
+0	val_0
+0	val_0

Modified: hadoop/hive/trunk/ql/src/test/results/compiler/plan/input20.q.xml
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/compiler/plan/input20.q.xml?rev=770837&r1=770836&r2=770837&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/compiler/plan/input20.q.xml (original)
+++ hadoop/hive/trunk/ql/src/test/results/compiler/plan/input20.q.xml Fri May  1 22:06:30 2009
@@ -523,7 +523,7 @@
                     <void method="add"> 
                      <object class="org.apache.hadoop.hive.ql.exec.ColumnInfo"> 
                       <void property="internalName"> 
-                       <string>0</string> 
+                       <string>1</string> 
                       </void> 
                       <void property="type"> 
                        <object idref="PrimitiveTypeInfo1"/> 
@@ -626,7 +626,7 @@
     <void property="pathToAliases"> 
      <object class="java.util.LinkedHashMap"> 
       <void method="put"> 
-       <string>file:/data/users/zshao/tools/deploy-trunk-apache-hive/build/ql/test/data/warehouse/src</string> 
+       <string>file:/data/users/zshao/tools/420-trunk-apache-hive/build/ql/test/data/warehouse/src</string> 
        <object class="java.util.ArrayList"> 
         <void method="add"> 
          <string>tmap:src</string> 
@@ -638,7 +638,7 @@
     <void property="pathToPartitionInfo"> 
      <object class="java.util.LinkedHashMap"> 
       <void method="put"> 
-       <string>file:/data/users/zshao/tools/deploy-trunk-apache-hive/build/ql/test/data/warehouse/src</string> 
+       <string>file:/data/users/zshao/tools/420-trunk-apache-hive/build/ql/test/data/warehouse/src</string> 
        <object class="org.apache.hadoop.hive.ql.plan.partitionDesc"> 
         <void property="partSpec"> 
          <object class="java.util.LinkedHashMap"/> 
@@ -694,7 +694,7 @@
             </void> 
             <void method="put"> 
              <string>location</string> 
-             <string>file:/data/users/zshao/tools/deploy-trunk-apache-hive/build/ql/test/data/warehouse/src</string> 
+             <string>file:/data/users/zshao/tools/420-trunk-apache-hive/build/ql/test/data/warehouse/src</string> 
             </void> 
            </object> 
           </void> 
@@ -724,7 +724,7 @@
                   <void property="conf"> 
                    <object class="org.apache.hadoop.hive.ql.plan.fileSinkDesc"> 
                     <void property="dirName"> 
-                     <string>/data/users/zshao/tools/deploy-trunk-apache-hive/ql/../build/ql/tmp/681046781.10001.insclause-0</string> 
+                     <string>/data/users/zshao/tools/420-trunk-apache-hive/ql/../build/ql/tmp/130550747.10001.insclause-0</string> 
                     </void> 
                     <void property="tableInfo"> 
                      <object class="org.apache.hadoop.hive.ql.plan.tableDesc"> 
@@ -931,7 +931,7 @@
               <void method="add"> 
                <object class="org.apache.hadoop.hive.ql.exec.ColumnInfo"> 
                 <void property="internalName"> 
-                 <string>0</string> 
+                 <string>1</string> 
                 </void> 
                 <void property="type"> 
                  <object idref="PrimitiveTypeInfo0"/> 

Modified: hadoop/hive/trunk/ql/src/test/results/compiler/plan/input4.q.xml
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/compiler/plan/input4.q.xml?rev=770837&r1=770836&r2=770837&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/compiler/plan/input4.q.xml (original)
+++ hadoop/hive/trunk/ql/src/test/results/compiler/plan/input4.q.xml Fri May  1 22:06:30 2009
@@ -31,7 +31,7 @@
              <boolean>true</boolean> 
             </void> 
             <void property="sourceDir"> 
-             <string>/data/users/zshao/tools/deploy-trunk-apache-hive/ql/../build/ql/tmp/295555796/271523094.10000.insclause-0</string> 
+             <string>/data/users/zshao/tools/420-trunk-apache-hive/ql/../build/ql/tmp/578168945/1186078520.10000.insclause-0</string> 
             </void> 
             <void property="table"> 
              <object id="tableDesc0" class="org.apache.hadoop.hive.ql.plan.tableDesc"> 
@@ -84,7 +84,7 @@
                 </void> 
                 <void method="put"> 
                  <string>location</string> 
-                 <string>file:/data/users/zshao/tools/deploy-trunk-apache-hive/build/ql/test/data/warehouse/dest1</string> 
+                 <string>file:/data/users/zshao/tools/420-trunk-apache-hive/build/ql/test/data/warehouse/dest1</string> 
                 </void> 
                </object> 
               </void> 
@@ -94,7 +94,7 @@
              </object> 
             </void> 
             <void property="tmpDir"> 
-             <string>/data/users/athusoo/commits/hive_trunk_ws9/ql/../build/ql/tmp/258804439/588812485.10001</string> 
+             <string>/data/users/zshao/tools/420-trunk-apache-hive/ql/../build/ql/tmp/578168945/1186078520.10001</string> 
             </void> 
            </object> 
           </void> 
@@ -442,7 +442,7 @@
                 <void method="add"> 
                  <object class="org.apache.hadoop.hive.ql.exec.ColumnInfo"> 
                   <void property="internalName"> 
-                   <string>0</string> 
+                   <string>1</string> 
                   </void> 
                   <void property="type"> 
                    <object idref="PrimitiveTypeInfo0"/> 
@@ -498,7 +498,7 @@
     <void property="pathToAliases"> 
      <object class="java.util.LinkedHashMap"> 
       <void method="put"> 
-       <string>file:/data/users/zshao/tools/deploy-trunk-apache-hive/build/ql/test/data/warehouse/src</string> 
+       <string>file:/data/users/zshao/tools/420-trunk-apache-hive/build/ql/test/data/warehouse/src</string> 
        <object class="java.util.ArrayList"> 
         <void method="add"> 
          <string>tmap:src</string> 
@@ -510,7 +510,7 @@
     <void property="pathToPartitionInfo"> 
      <object class="java.util.LinkedHashMap"> 
       <void method="put"> 
-       <string>file:/data/users/zshao/tools/deploy-trunk-apache-hive/build/ql/test/data/warehouse/src</string> 
+       <string>file:/data/users/zshao/tools/420-trunk-apache-hive/build/ql/test/data/warehouse/src</string> 
        <object class="org.apache.hadoop.hive.ql.plan.partitionDesc"> 
         <void property="partSpec"> 
          <object class="java.util.LinkedHashMap"/> 
@@ -566,7 +566,7 @@
             </void> 
             <void method="put"> 
              <string>location</string> 
-             <string>file:/data/users/zshao/tools/deploy-trunk-apache-hive/build/ql/test/data/warehouse/src</string> 
+             <string>file:/data/users/zshao/tools/420-trunk-apache-hive/build/ql/test/data/warehouse/src</string> 
             </void> 
            </object> 
           </void> 
@@ -599,7 +599,7 @@
                      <int>1</int> 
                     </void> 
                     <void property="dirName"> 
-                     <string>/data/users/zshao/tools/deploy-trunk-apache-hive/ql/../build/ql/tmp/295555796/271523094.10000.insclause-0</string> 
+                     <string>/data/users/zshao/tools/420-trunk-apache-hive/ql/../build/ql/tmp/578168945/1186078520.10000.insclause-0</string> 
                     </void> 
                     <void property="tableInfo"> 
                      <object idref="tableDesc0"/> 

Modified: hadoop/hive/trunk/ql/src/test/results/compiler/plan/input5.q.xml
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/compiler/plan/input5.q.xml?rev=770837&r1=770836&r2=770837&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/compiler/plan/input5.q.xml (original)
+++ hadoop/hive/trunk/ql/src/test/results/compiler/plan/input5.q.xml Fri May  1 22:06:30 2009
@@ -31,7 +31,7 @@
              <boolean>true</boolean> 
             </void> 
             <void property="sourceDir"> 
-             <string>/data/users/zshao/tools/deploy-trunk-apache-hive/ql/../build/ql/tmp/173509724.10000.insclause-0</string> 
+             <string>/data/users/zshao/tools/420-trunk-apache-hive/ql/../build/ql/tmp/1158416232.10000.insclause-0</string> 
             </void> 
             <void property="table"> 
              <object id="tableDesc0" class="org.apache.hadoop.hive.ql.plan.tableDesc"> 
@@ -84,7 +84,7 @@
                 </void> 
                 <void method="put"> 
                  <string>location</string> 
-                 <string>file:/data/users/zshao/tools/deploy-trunk-apache-hive/build/ql/test/data/warehouse/dest1</string> 
+                 <string>file:/data/users/zshao/tools/420-trunk-apache-hive/build/ql/test/data/warehouse/dest1</string> 
                 </void> 
                </object> 
               </void> 
@@ -94,7 +94,7 @@
              </object> 
             </void> 
             <void property="tmpDir"> 
-             <string>/data/users/athusoo/commits/hive_trunk_ws9/ql/../build/ql/tmp/142551517.10001</string> 
+             <string>/data/users/zshao/tools/420-trunk-apache-hive/ql/../build/ql/tmp/1158416232.10001</string> 
             </void> 
            </object> 
           </void> 
@@ -458,7 +458,7 @@
                     <void method="add"> 
                      <object class="org.apache.hadoop.hive.ql.exec.ColumnInfo"> 
                       <void property="internalName"> 
-                       <string>0</string> 
+                       <string>1</string> 
                       </void> 
                       <void property="type"> 
                        <object idref="ListTypeInfo0"/> 
@@ -632,7 +632,7 @@
     <void property="pathToAliases"> 
      <object class="java.util.LinkedHashMap"> 
       <void method="put"> 
-       <string>file:/data/users/zshao/tools/deploy-trunk-apache-hive/build/ql/test/data/warehouse/src_thrift</string> 
+       <string>file:/data/users/zshao/tools/420-trunk-apache-hive/build/ql/test/data/warehouse/src_thrift</string> 
        <object class="java.util.ArrayList"> 
         <void method="add"> 
          <string>tmap:src_thrift</string> 
@@ -644,7 +644,7 @@
     <void property="pathToPartitionInfo"> 
      <object class="java.util.LinkedHashMap"> 
       <void method="put"> 
-       <string>file:/data/users/zshao/tools/deploy-trunk-apache-hive/build/ql/test/data/warehouse/src_thrift</string> 
+       <string>file:/data/users/zshao/tools/420-trunk-apache-hive/build/ql/test/data/warehouse/src_thrift</string> 
        <object class="org.apache.hadoop.hive.ql.plan.partitionDesc"> 
         <void property="partSpec"> 
          <object class="java.util.LinkedHashMap"/> 
@@ -704,7 +704,7 @@
             </void> 
             <void method="put"> 
              <string>location</string> 
-             <string>file:/data/users/zshao/tools/deploy-trunk-apache-hive/build/ql/test/data/warehouse/src_thrift</string> 
+             <string>file:/data/users/zshao/tools/420-trunk-apache-hive/build/ql/test/data/warehouse/src_thrift</string> 
             </void> 
            </object> 
           </void> 
@@ -733,7 +733,7 @@
                  <int>1</int> 
                 </void> 
                 <void property="dirName"> 
-                 <string>/data/users/zshao/tools/deploy-trunk-apache-hive/ql/../build/ql/tmp/173509724.10000.insclause-0</string> 
+                 <string>/data/users/zshao/tools/420-trunk-apache-hive/ql/../build/ql/tmp/1158416232.10000.insclause-0</string> 
                 </void> 
                 <void property="tableInfo"> 
                  <object idref="tableDesc0"/> 



Mime
View raw message