hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From na...@apache.org
Subject svn commit: r813545 - in /hadoop/hive/trunk: ./ data/files/ ql/src/java/org/apache/hadoop/hive/ql/exec/ ql/src/java/org/apache/hadoop/hive/ql/parse/ ql/src/java/org/apache/hadoop/hive/ql/plan/ ql/src/test/queries/clientnegative/ ql/src/test/queries/cli...
Date Thu, 10 Sep 2009 18:59:05 GMT
Author: namit
Date: Thu Sep 10 18:59:04 2009
New Revision: 813545

URL: http://svn.apache.org/viewvc?rev=813545&view=rev
Log:
HIVE-591. Add Unique Join. (Emil Ibrishimov via namit)


Added:
    hadoop/hive/trunk/data/files/T1.txt
    hadoop/hive/trunk/data/files/T2.txt
    hadoop/hive/trunk/data/files/T3.txt
    hadoop/hive/trunk/ql/src/test/queries/clientnegative/uniquejoin.q
    hadoop/hive/trunk/ql/src/test/queries/clientnegative/uniquejoin2.q
    hadoop/hive/trunk/ql/src/test/queries/clientnegative/uniquejoin3.q
    hadoop/hive/trunk/ql/src/test/queries/clientpositive/uniquejoin.q
    hadoop/hive/trunk/ql/src/test/results/clientnegative/uniquejoin.q.out
    hadoop/hive/trunk/ql/src/test/results/clientnegative/uniquejoin2.q.out
    hadoop/hive/trunk/ql/src/test/results/clientnegative/uniquejoin3.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/uniquejoin.q.out
Modified:
    hadoop/hive/trunk/CHANGES.txt
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/CommonJoinOperator.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/joinCond.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/joinType.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/joinCond.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/joinDesc.java

Modified: hadoop/hive/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/CHANGES.txt?rev=813545&r1=813544&r2=813545&view=diff
==============================================================================
--- hadoop/hive/trunk/CHANGES.txt (original)
+++ hadoop/hive/trunk/CHANGES.txt Thu Sep 10 18:59:04 2009
@@ -37,6 +37,8 @@
 
     HIVE-687. Add UDF unhex. (Mihir Kedia via namit)
 
+    HIVE-591. Add Unique Join. (Emil Ibrishimov via namit)
+
   IMPROVEMENTS
 
     HIVE-760. Add version info to META-INF/MANIFEST.MF.

Added: hadoop/hive/trunk/data/files/T1.txt
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/data/files/T1.txt?rev=813545&view=auto
==============================================================================
--- hadoop/hive/trunk/data/files/T1.txt (added)
+++ hadoop/hive/trunk/data/files/T1.txt Thu Sep 10 18:59:04 2009
@@ -0,0 +1,6 @@
+111
+212
+313
+717
+818
+828

Added: hadoop/hive/trunk/data/files/T2.txt
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/data/files/T2.txt?rev=813545&view=auto
==============================================================================
--- hadoop/hive/trunk/data/files/T2.txt (added)
+++ hadoop/hive/trunk/data/files/T2.txt Thu Sep 10 18:59:04 2009
@@ -0,0 +1,6 @@
+222
+313
+414
+515
+818
+818

Added: hadoop/hive/trunk/data/files/T3.txt
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/data/files/T3.txt?rev=813545&view=auto
==============================================================================
--- hadoop/hive/trunk/data/files/T3.txt (added)
+++ hadoop/hive/trunk/data/files/T3.txt Thu Sep 10 18:59:04 2009
@@ -0,0 +1,4 @@
+212
+414
+616
+717

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/CommonJoinOperator.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/CommonJoinOperator.java?rev=813545&r1=813544&r2=813545&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/CommonJoinOperator.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/CommonJoinOperator.java Thu
Sep 10 18:59:04 2009
@@ -20,6 +20,7 @@
 
 import java.io.Serializable;
 import java.util.ArrayList;
+import java.util.Arrays;
 import java.util.HashMap;
 import java.util.Iterator;
 import java.util.List;
@@ -542,23 +543,77 @@
     checkAndGenObject();
   }
 
+  private void genUniqueJoinObject(int aliasNum, IntermediateObject intObj)
+               throws HiveException {
+    if (aliasNum == numAliases) {
+      int p = 0;
+      for (int i = 0; i < numAliases; i++) {
+        int sz = joinValues.get(order[i]).size();
+        ArrayList<Object> obj = intObj.getObjs()[i];
+        for (int j = 0; j < sz; j++) {
+          forwardCache[p++] = obj.get(j);
+        }
+      }
+      
+      forward(forwardCache, outputObjInspector);
+      return;
+    }
+    
+    Iterator<ArrayList<Object>> alias = storage.get(order[aliasNum]).iterator();
+    while (alias.hasNext()) {
+      intObj.pushObj(alias.next());
+      genUniqueJoinObject(aliasNum+1, intObj);
+      intObj.popObj();
+    }
+  }
+  
   protected void checkAndGenObject() throws HiveException {
-    // does any result need to be emitted
-    for (int i = 0; i < numAliases; i++) {
-      Byte alias = order[i];
-      if (storage.get(alias).iterator().hasNext() == false) {
-        if (noOuterJoin) {
-          LOG.trace("No data for alias=" + i);
-          return;
-        } else {
+    if (condn[0].getType() == joinDesc.UNIQUE_JOIN) {
+      IntermediateObject intObj = 
+                          new IntermediateObject(new ArrayList[numAliases], 0);
+      
+      // Check if results need to be emitted.
+      // Results only need to be emitted if there is a non-null entry in a table
+      // that is preserved or if there are no non-null entries
+      boolean preserve = false; // Will be true if there is a non-null entry
+                                // in a preserved table
+      boolean hasNulls = false; // Will be true if there are null entries
+      for (int i = 0; i < numAliases; i++) {
+        Byte alias = order[i];
+        Iterator<ArrayList<Object>> aliasRes = storage.get(alias).iterator();
+        if (aliasRes.hasNext() == false) {
           storage.put(alias, dummyObjVectors[i]);
+          hasNulls = true;
+        } else if(condn[i].getPreserved()) {
+          preserve = true;
         }
       }
-    }
+      
+      if (hasNulls && !preserve) {
+        return;
+      }
 
-    LOG.trace("calling genObject");
-    genObject(null, 0, new IntermediateObject(new ArrayList[numAliases], 0), true);
-    LOG.trace("called genObject");
+      LOG.trace("calling genUniqueJoinObject");
+      genUniqueJoinObject(0, new IntermediateObject(new ArrayList[numAliases], 0));
+      LOG.trace("called genUniqueJoinObject");
+    } else {
+      // does any result need to be emitted
+      for (int i = 0; i < numAliases; i++) {
+        Byte alias = order[i];
+        if (storage.get(alias).iterator().hasNext() == false) {
+          if (noOuterJoin) {
+            LOG.trace("No data for alias=" + i);
+            return;
+          } else {
+            storage.put(alias, dummyObjVectors[i]);
+          }
+        }
+      }
+      
+      LOG.trace("calling genObject");
+      genObject(null, 0, new IntermediateObject(new ArrayList[numAliases], 0), true);
+      LOG.trace("called genObject");
+    }
   }
 
   /**

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g?rev=813545&r1=813544&r2=813545&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g Thu Sep 10 18:59:04
2009
@@ -65,6 +65,7 @@
 TOK_LEFTOUTERJOIN;
 TOK_RIGHTOUTERJOIN;
 TOK_FULLOUTERJOIN;
+TOK_UNIQUEJOIN;
 TOK_LOAD;
 TOK_NULL;
 TOK_ISNULL;
@@ -833,11 +834,28 @@
 joinSource
 @init { msgs.push("join source"); }
 @after { msgs.pop(); }
-    :
-    fromSource
-    ( joinToken^ fromSource (KW_ON! expression)? )*
+    : fromSource ( joinToken^ fromSource (KW_ON! expression)? )*
+    | uniqueJoinToken^ uniqueJoinSource (COMMA! uniqueJoinSource)+
+    ;
+
+uniqueJoinSource
+@init { msgs.push("join source"); }
+@after { msgs.pop(); }
+    : KW_PRESERVE? fromSource uniqueJoinExpr 
     ;
 
+uniqueJoinExpr
+@init { msgs.push("unique join expression list"); }
+@after { msgs.pop(); }
+    : LPAREN e1+=expression (COMMA e1+=expression)* RPAREN 
+      -> ^(TOK_EXPLIST $e1*)
+    ;
+
+uniqueJoinToken
+@init { msgs.push("unique join"); }
+@after { msgs.pop(); }
+    : KW_UNIQUEJOIN -> TOK_UNIQUEJOIN;
+
 joinToken
 @init { msgs.push("join type specifier"); }
 @after { msgs.pop(); }
@@ -1246,6 +1264,8 @@
 KW_INSERT : 'INSERT';
 KW_OVERWRITE : 'OVERWRITE';
 KW_OUTER : 'OUTER';
+KW_UNIQUEJOIN : 'UNIQUEJOIN';
+KW_PRESERVE : 'PRESERVE';
 KW_JOIN : 'JOIN';
 KW_LEFT : 'LEFT';
 KW_RIGHT : 'RIGHT';

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java?rev=813545&r1=813544&r2=813545&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java Thu
Sep 10 18:59:04 2009
@@ -20,6 +20,7 @@
 
 import java.io.Serializable;
 import java.util.ArrayList;
+import java.util.Arrays;
 import java.util.Formatter;
 import java.util.HashMap;
 import java.util.HashSet;
@@ -38,6 +39,7 @@
 import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.common.FileUtils;
+import org.apache.hadoop.hive.common.JavaUtils;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.MetaStoreUtils;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
@@ -412,7 +414,8 @@
     if ((node.getToken().getType() == HiveParser.TOK_JOIN) ||
         (node.getToken().getType() == HiveParser.TOK_LEFTOUTERJOIN) ||
         (node.getToken().getType() == HiveParser.TOK_RIGHTOUTERJOIN) ||
-        (node.getToken().getType() == HiveParser.TOK_FULLOUTERJOIN))
+        (node.getToken().getType() == HiveParser.TOK_FULLOUTERJOIN) ||
+        (node.getToken().getType() == HiveParser.TOK_UNIQUEJOIN))
       return true;
 
     return false;
@@ -421,7 +424,8 @@
   @SuppressWarnings("nls")
   private void processJoin(QB qb, ASTNode join) throws SemanticException {
     int numChildren = join.getChildCount();
-    if ((numChildren != 2) && (numChildren != 3))
+    if ((numChildren != 2) && (numChildren != 3)
+        && join.getToken().getType() != HiveParser.TOK_UNIQUEJOIN)
       throw new SemanticException("Join with multiple children");
 
     for (int num = 0; num < numChildren; num++) {
@@ -3265,6 +3269,92 @@
     return cols;
   }
   
+  private QBJoinTree genUniqueJoinTree(QB qb, ASTNode joinParseTree)
+  throws SemanticException {
+    QBJoinTree joinTree = new QBJoinTree();
+    joinTree.setNoOuterJoin(false);
+    
+    joinTree.setExpressions(new Vector<Vector<ASTNode>>());
+    joinTree.setFilters(new Vector<Vector<ASTNode>>());
+    
+    // Create joinTree structures to fill them up later
+    Vector<String> rightAliases = new Vector<String>();
+    Vector<String> leftAliases  = new Vector<String>();
+    Vector<String> baseSrc      = new Vector<String>();
+    Vector<Boolean> preserved   = new Vector<Boolean>();
+
+    boolean lastPreserved = false;
+    int cols = -1;
+    
+    for(int i = 0; i < joinParseTree.getChildCount(); i++) { 
+      ASTNode child = (ASTNode) joinParseTree.getChild(i);
+      
+      switch(child.getToken().getType()) {
+        case HiveParser.TOK_TABREF:
+          // Handle a table - populate aliases appropriately:
+          // leftAliases should contain the first table, rightAliases should
+          // contain all other tables and baseSrc should contain all tables
+          
+          String table_name = unescapeIdentifier(child.getChild(0).getText());
+          String alias = child.getChildCount() == 1 ? table_name : 
+            unescapeIdentifier(child.getChild(child.getChildCount()-1).getText().toLowerCase());
+          
+          if (i == 0) {
+            leftAliases.add(alias);
+            joinTree.setLeftAlias(alias);
+          } else {
+            rightAliases.add(alias);
+          }
+          baseSrc.add(alias);
+          
+          preserved.add(lastPreserved);
+          lastPreserved = false;
+          break;
+          
+        case HiveParser.TOK_EXPLIST:
+          if (cols == -1 && child.getChildCount() != 0) {
+            cols = child.getChildCount();
+          } else if(child.getChildCount() != cols) {
+            throw new SemanticException("Tables with different or invalid " +
+            		"number of keys in UNIQUEJOIN");
+          }
+          
+          Vector<ASTNode> expressions = new Vector<ASTNode>();
+          Vector<ASTNode> filt = new Vector<ASTNode>();
+
+          for (Node exp: child.getChildren()) {
+            expressions.add((ASTNode)exp);
+          }
+          
+          joinTree.getExpressions().add(expressions);
+          joinTree.getFilters().add(filt);
+          break;
+          
+        case HiveParser.KW_PRESERVE:
+          lastPreserved = true;
+          break;
+          
+        case HiveParser.TOK_SUBQUERY:
+          throw new SemanticException("Subqueries are not supported in UNIQUEJOIN");
+          
+        default:
+          throw new SemanticException("Unexpected UNIQUEJOIN structure");
+      }
+    }
+    
+    joinTree.setBaseSrc(baseSrc.toArray(new String[0]));
+    joinTree.setLeftAliases(leftAliases.toArray(new String[0]));
+    joinTree.setRightAliases(rightAliases.toArray(new String[0]));
+    
+    joinCond[] condn = new joinCond[preserved.size()];
+    for (int i = 0; i < condn.length; i++) {
+      condn[i] = new joinCond(preserved.get(i));
+    }
+    joinTree.setJoinCond(condn);
+    
+    return joinTree;
+  }
+  
   private QBJoinTree genJoinTree(QB qb, ASTNode joinParseTree)
       throws SemanticException {
     QBJoinTree joinTree = new QBJoinTree();
@@ -4188,10 +4278,16 @@
     // process join
     if (qb.getParseInfo().getJoinExpr() != null) {
       ASTNode joinExpr = qb.getParseInfo().getJoinExpr();
-      QBJoinTree joinTree = genJoinTree(qb, joinExpr);
-      qb.setQbJoinTree(joinTree);
-      mergeJoinTree(qb);
 
+      if (joinExpr.getToken().getType() == HiveParser.TOK_UNIQUEJOIN) {
+        QBJoinTree joinTree = genUniqueJoinTree(qb, joinExpr);
+        qb.setQbJoinTree(joinTree);
+      } else {
+        QBJoinTree joinTree = genJoinTree(qb, joinExpr);
+        qb.setQbJoinTree(joinTree);
+        mergeJoinTree(qb);
+      }
+      
       // if any filters are present in the join tree, push them on top of the table
       pushJoinFilters(qb, qb.getQbJoinTree(), aliasToOpInfo);
       srcOpInfo = genJoinPlan(qb, aliasToOpInfo);

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/joinCond.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/joinCond.java?rev=813545&r1=813544&r2=813545&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/joinCond.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/joinCond.java Thu Sep 10
18:59:04 2009
@@ -18,6 +18,10 @@
 
 package org.apache.hadoop.hive.ql.parse;
 
+import java.util.Vector;
+
+import org.apache.hadoop.hive.ql.plan.joinDesc;
+
 /**
  * Join conditions Descriptor implementation.
  * 
@@ -26,6 +30,7 @@
   private int left;
   private int right;
   private joinType joinType;
+  private boolean preserved;
 
   public joinCond() {  }
 
@@ -34,6 +39,23 @@
     this.right = right;
     this.joinType = joinType;
   }
+  
+  /**
+   * Constructor for a UNIQUEJOIN cond
+   * 
+   * @param p true if table is preserved, false otherwise
+   */
+  public joinCond(boolean p) {
+    this.joinType = org.apache.hadoop.hive.ql.parse.joinType.UNIQUE;
+    this.preserved = p;
+  }
+  
+  /**
+   * @return the true if table is preserved, false otherwise
+   */
+  public boolean getPreserved() {
+    return preserved;
+  }
 
   public int getLeft() {
     return left;

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/joinType.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/joinType.java?rev=813545&r1=813544&r2=813545&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/joinType.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/joinType.java Thu Sep 10
18:59:04 2009
@@ -18,4 +18,4 @@
 
 package org.apache.hadoop.hive.ql.parse;
 
-public enum joinType {INNER, LEFTOUTER, RIGHTOUTER, FULLOUTER};
+public enum joinType {INNER, LEFTOUTER, RIGHTOUTER, FULLOUTER, UNIQUE};

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/joinCond.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/joinCond.java?rev=813545&r1=813544&r2=813545&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/joinCond.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/joinCond.java Thu Sep 10
18:59:04 2009
@@ -19,6 +19,7 @@
 package org.apache.hadoop.hive.ql.plan;
 
 import java.io.Serializable;
+import java.util.Vector;
 
 /**
  * Join conditions Descriptor implementation.
@@ -29,6 +30,7 @@
   private int left;
   private int right;
   private int type;
+  private boolean preserved;
 
   public joinCond() {}
 
@@ -39,8 +41,9 @@
   }
 
   public joinCond(org.apache.hadoop.hive.ql.parse.joinCond condn) {
-    this.left     = condn.getLeft();
-    this.right    = condn.getRight();
+    this.left       = condn.getLeft();
+    this.right      = condn.getRight();
+    this.preserved  = condn.getPreserved();
     org.apache.hadoop.hive.ql.parse.joinType itype = condn.getJoinType();
     if (itype == org.apache.hadoop.hive.ql.parse.joinType.INNER)
       this.type = joinDesc.INNER_JOIN;
@@ -50,10 +53,26 @@
       this.type = joinDesc.RIGHT_OUTER_JOIN;
     else if (itype == org.apache.hadoop.hive.ql.parse.joinType.FULLOUTER)
       this.type = joinDesc.FULL_OUTER_JOIN;
+    else if (itype == org.apache.hadoop.hive.ql.parse.joinType.UNIQUE)
+      this.type = joinDesc.UNIQUE_JOIN;
     else
       assert false;
   }
   
+  /**
+   * @return true if table is preserved, false otherwise
+   */
+  public boolean getPreserved() {
+    return this.preserved;
+  }
+  
+  /**
+   * @param preserved if table is preserved, false otherwise
+   */
+  public void setPreserved(final boolean preserved) {
+    this.preserved = preserved;
+  }
+  
   public int getLeft() {
     return this.left;
   }
@@ -95,6 +114,9 @@
     case joinDesc.RIGHT_OUTER_JOIN:
       sb.append("Right Outer Join");
       break;
+    case joinDesc.UNIQUE_JOIN:
+      sb.append("Unique Join");
+      break;
     default:
       sb.append("Unknow Join");
       break;

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/joinDesc.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/joinDesc.java?rev=813545&r1=813544&r2=813545&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/joinDesc.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/joinDesc.java Thu Sep 10
18:59:04 2009
@@ -41,6 +41,7 @@
   public static final int LEFT_OUTER_JOIN = 1;
   public static final int RIGHT_OUTER_JOIN = 2;
   public static final int FULL_OUTER_JOIN = 3;
+  public static final int UNIQUE_JOIN = 4;
 
   // alias to key mapping
   private Map<Byte, List<exprNodeDesc>> exprs;

Added: hadoop/hive/trunk/ql/src/test/queries/clientnegative/uniquejoin.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientnegative/uniquejoin.q?rev=813545&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientnegative/uniquejoin.q (added)
+++ hadoop/hive/trunk/ql/src/test/queries/clientnegative/uniquejoin.q Thu Sep 10 18:59:04
2009
@@ -0,0 +1,3 @@
+FROM UNIQUEJOIN (SELECT src.key from src WHERE src.key<4) a (a.key), PRESERVE  src b(b.key)
+SELECT a.key, b.key;
+

Added: hadoop/hive/trunk/ql/src/test/queries/clientnegative/uniquejoin2.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientnegative/uniquejoin2.q?rev=813545&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientnegative/uniquejoin2.q (added)
+++ hadoop/hive/trunk/ql/src/test/queries/clientnegative/uniquejoin2.q Thu Sep 10 18:59:04
2009
@@ -0,0 +1,3 @@
+FROM UNIQUEJOIN src a (a.key), PRESERVE src b (b.key, b.val)
+SELECT a.key, b.key;
+

Added: hadoop/hive/trunk/ql/src/test/queries/clientnegative/uniquejoin3.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientnegative/uniquejoin3.q?rev=813545&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientnegative/uniquejoin3.q (added)
+++ hadoop/hive/trunk/ql/src/test/queries/clientnegative/uniquejoin3.q Thu Sep 10 18:59:04
2009
@@ -0,0 +1,3 @@
+FROM UNIQUEJOIN src a (a.key), PRESERVE src b (b.key) JOIN src c ON c.key
+SELECT a.key;
+

Added: hadoop/hive/trunk/ql/src/test/queries/clientpositive/uniquejoin.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/uniquejoin.q?rev=813545&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/uniquejoin.q (added)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/uniquejoin.q Thu Sep 10 18:59:04
2009
@@ -0,0 +1,29 @@
+CREATE TABLE T1(key STRING, val STRING) STORED AS TEXTFILE;
+CREATE TABLE T2(key STRING, val STRING) STORED AS TEXTFILE;
+CREATE TABLE T3(key STRING, val STRING) STORED AS TEXTFILE;
+
+LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1;
+LOAD DATA LOCAL INPATH '../data/files/T2.txt' INTO TABLE T2;
+LOAD DATA LOCAL INPATH '../data/files/T3.txt' INTO TABLE T3;
+
+FROM UNIQUEJOIN PRESERVE T1 a (a.key), PRESERVE T2 b (b.key), PRESERVE T3 c (c.key)
+SELECT a.key, b.key, c.key;
+
+FROM UNIQUEJOIN T1 a (a.key), T2 b (b.key), T3 c (c.key)
+SELECT a.key, b.key, c.key;
+
+FROM UNIQUEJOIN T1 a (a.key), T2 b (b.key-1), T3 c (c.key)
+SELECT a.key, b.key, c.key;
+
+FROM UNIQUEJOIN PRESERVE T1 a (a.key, a.val), PRESERVE T2 b (b.key, b.val), PRESERVE T3 c
(c.key, c.val)
+SELECT a.key, a.val, b.key, b.val, c.key, c.val;
+
+FROM UNIQUEJOIN PRESERVE T1 a (a.key), T2 b (b.key), PRESERVE T3 c (c.key)
+SELECT a.key, b.key, c.key;
+
+FROM UNIQUEJOIN PRESERVE T1 a (a.key), T2 b(b.key)
+SELECT a.key, b.key;
+
+DROP TABLE T1;
+DROP TABLE T2;
+DROP TABLE T3;

Added: hadoop/hive/trunk/ql/src/test/results/clientnegative/uniquejoin.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/uniquejoin.q.out?rev=813545&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/uniquejoin.q.out (added)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/uniquejoin.q.out Thu Sep 10 18:59:04
2009
@@ -0,0 +1 @@
+FAILED: Error in semantic analysis: Subqueries are not supported in UNIQUEJOIN

Added: hadoop/hive/trunk/ql/src/test/results/clientnegative/uniquejoin2.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/uniquejoin2.q.out?rev=813545&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/uniquejoin2.q.out (added)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/uniquejoin2.q.out Thu Sep 10 18:59:04
2009
@@ -0,0 +1 @@
+FAILED: Error in semantic analysis: Tables with different or invalid number of keys in UNIQUEJOIN

Added: hadoop/hive/trunk/ql/src/test/results/clientnegative/uniquejoin3.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/uniquejoin3.q.out?rev=813545&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/uniquejoin3.q.out (added)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/uniquejoin3.q.out Thu Sep 10 18:59:04
2009
@@ -0,0 +1,2 @@
+FAILED: Parse Error: line 1:54 required (...)+ loop did not match anything at input 'JOIN'
in statement
+

Added: hadoop/hive/trunk/ql/src/test/results/clientpositive/uniquejoin.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/uniquejoin.q.out?rev=813545&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/uniquejoin.q.out (added)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/uniquejoin.q.out Thu Sep 10 18:59:04
2009
@@ -0,0 +1,88 @@
+query: CREATE TABLE T1(key STRING, val STRING) STORED AS TEXTFILE
+query: CREATE TABLE T2(key STRING, val STRING) STORED AS TEXTFILE
+query: CREATE TABLE T3(key STRING, val STRING) STORED AS TEXTFILE
+query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' INTO TABLE T1
+query: LOAD DATA LOCAL INPATH '../data/files/T2.txt' INTO TABLE T2
+query: LOAD DATA LOCAL INPATH '../data/files/T3.txt' INTO TABLE T3
+query: FROM UNIQUEJOIN PRESERVE T1 a (a.key), PRESERVE T2 b (b.key), PRESERVE T3 c (c.key)
+SELECT a.key, b.key, c.key
+Input: default/t2
+Input: default/t3
+Input: default/t1
+Output: file:/data/users/emil/hive1/hive1/build/ql/tmp/278324945/10000
+1	NULL	NULL
+2	2	2
+3	3	NULL
+NULL	4	4
+NULL	5	NULL
+NULL	NULL	6
+7	NULL	7
+8	8	NULL
+8	8	NULL
+8	8	NULL
+8	8	NULL
+query: FROM UNIQUEJOIN T1 a (a.key), T2 b (b.key), T3 c (c.key)
+SELECT a.key, b.key, c.key
+Input: default/t2
+Input: default/t3
+Input: default/t1
+Output: file:/data/users/emil/hive1/hive1/build/ql/tmp/1532561203/10000
+2	2	2
+query: FROM UNIQUEJOIN T1 a (a.key), T2 b (b.key-1), T3 c (c.key)
+SELECT a.key, b.key, c.key
+Input: default/t2
+Input: default/t3
+Input: default/t1
+Output: file:/data/users/emil/hive1/hive1/build/ql/tmp/397418687/10000
+2	3	2
+7	8	7
+7	8	7
+query: FROM UNIQUEJOIN PRESERVE T1 a (a.key, a.val), PRESERVE T2 b (b.key, b.val), PRESERVE
T3 c (c.key, c.val)
+SELECT a.key, a.val, b.key, b.val, c.key, c.val
+Input: default/t2
+Input: default/t3
+Input: default/t1
+Output: file:/data/users/emil/hive1/hive1/build/ql/tmp/502838517/10000
+1	11	NULL	NULL	NULL	NULL
+2	12	NULL	NULL	2	12
+NULL	NULL	2	22	NULL	NULL
+3	13	3	13	NULL	NULL
+NULL	NULL	4	14	4	14
+NULL	NULL	5	15	NULL	NULL
+NULL	NULL	NULL	NULL	6	16
+7	17	NULL	NULL	7	17
+8	18	8	18	NULL	NULL
+8	18	8	18	NULL	NULL
+8	28	NULL	NULL	NULL	NULL
+query: FROM UNIQUEJOIN PRESERVE T1 a (a.key), T2 b (b.key), PRESERVE T3 c (c.key)
+SELECT a.key, b.key, c.key
+Input: default/t2
+Input: default/t3
+Input: default/t1
+Output: file:/data/users/emil/hive1/hive1/build/ql/tmp/1705226842/10000
+1	NULL	NULL
+2	2	2
+3	3	NULL
+NULL	4	4
+NULL	NULL	6
+7	NULL	7
+8	8	NULL
+8	8	NULL
+8	8	NULL
+8	8	NULL
+query: FROM UNIQUEJOIN PRESERVE T1 a (a.key), T2 b(b.key)
+SELECT a.key, b.key
+Input: default/t2
+Input: default/t1
+Output: file:/data/users/emil/hive1/hive1/build/ql/tmp/1034075791/10000
+1	NULL
+2	2
+3	3
+7	NULL
+8	8
+8	8
+8	8
+8	8
+query: DROP TABLE T1
+query: DROP TABLE T2
+query: DROP TABLE T3



Mime
View raw message