hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From st...@apache.org
Subject svn commit: r599162 [1/4] - in /lucene/hadoop/trunk/src/contrib/hbase: ./ src/java/org/apache/hadoop/hbase/mapred/ src/java/org/apache/hadoop/hbase/shell/ src/java/org/apache/hadoop/hbase/shell/algebra/ src/java/org/apache/hadoop/hbase/shell/algebra/ge...
Date Wed, 28 Nov 2007 22:10:27 GMT
Author: stack
Date: Wed Nov 28 14:10:21 2007
New Revision: 599162

URL: http://svn.apache.org/viewvc?rev=599162&view=rev
Log:
HADOOP-1608 Relational Algrebra Operators

Added:
    lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/SaveCommand.java
    lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/SubstituteCommand.java
    lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/VariableRef.java
    lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/VariablesPool.java
    lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/
    lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/Constants.java
    lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/DuplicateTable.java
    lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/ExpressionParser.jj
    lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/GroupingFilterMap.java
    lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/IdentityFilterMap.java
    lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/IndexJoin.java
    lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/IndexJoinMap.java
    lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/IndexJoinReduce.java
    lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/Operation.java
    lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/OperationEvaluator.java
    lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/Projection.java
    lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/RelationalOperation.java
    lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/Selection.java
    lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/generated/
    lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/generated/ExpressionParser.java
    lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/generated/ExpressionParserConstants.java
    lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/generated/ExpressionParserTokenManager.java
    lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/generated/ParseException.java
    lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/generated/SimpleCharStream.java
    lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/generated/Token.java
    lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/generated/TokenMgrError.java
    lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/shell/TestSubstitutionVariables.java
    lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/shell/algebra/
    lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/shell/algebra/TestBooleanCondition.java
    lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/shell/algebra/TestBooleanTermFilter.java
    lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/shell/algebra/TestJoinCondition.java
    lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/shell/algebra/TestTableJoinMapReduce.java
Modified:
    lucene/hadoop/trunk/src/contrib/hbase/CHANGES.txt
    lucene/hadoop/trunk/src/contrib/hbase/build.xml
    lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/mapred/GroupingTableMap.java
    lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/BasicCommand.java
    lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/HBaseShell.jj
    lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/HelpCommand.java
    lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/ShowCommand.java
    lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/generated/Parser.java
    lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/generated/ParserConstants.java
    lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/generated/ParserTokenManager.java

Modified: lucene/hadoop/trunk/src/contrib/hbase/CHANGES.txt
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/CHANGES.txt?rev=599162&r1=599161&r2=599162&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/CHANGES.txt (original)
+++ lucene/hadoop/trunk/src/contrib/hbase/CHANGES.txt Wed Nov 28 14:10:21 2007
@@ -48,13 +48,18 @@
     HADOOP-2176 Htable.deleteAll documentation is ambiguous
     HADOOP-2139 (phase 1) Increase parallelism in region servers.
     HADOOP-2267 [Hbase Shell] Change the prompt's title from 'hbase' to 'hql'.
+                (Edward Yoon via Stack)
     HADOOP-2139 (phase 2) Make region server more event driven
     HADOOP-2289 Useless efforts of looking for the non-existant table in select
                 command.
+                (Edward Yoon via Stack)
     HADOOP-2257 Show a total of all requests and regions on the web ui
+                (Paul Saab via Stack)
     HADOOP-2261 HTable.abort no longer throws exception if there is no active update.
     HADOOP-2287 Make hbase unit tests take less time to complete.
     HADOOP-2262 Retry n times instead of n**2 times.
+    HADOOP-1608 Relational Algrebra Operators
+                (Edward Yoon via Stack)
 
 Release 0.15.1
 Branch 0.15

Modified: lucene/hadoop/trunk/src/contrib/hbase/build.xml
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/build.xml?rev=599162&r1=599161&r2=599162&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/build.xml (original)
+++ lucene/hadoop/trunk/src/contrib/hbase/build.xml Wed Nov 28 14:10:21 2007
@@ -69,9 +69,15 @@
   <property name="hbaseshell.src.dir" 
        value="${src.dir}/org/apache/hadoop/hbase/shell" /> 
      <mkdir dir="${hbaseshell.src.dir}/generated" />
+     <mkdir dir="${hbaseshell.src.dir}/algebra/generated"/>
      <javacc
        target="${hbaseshell.src.dir}/HBaseShell.jj"
        outputdirectory="${hbaseshell.src.dir}/generated"
+       javacchome="${javacc.home}"
+     />
+     <javacc
+       target="${hbaseshell.src.dir}/algebra/ExpressionParser.jj"
+       outputdirectory="${hbaseshell.src.dir}/algebra/generated"
        javacchome="${javacc.home}"
      />
   </target>

Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/mapred/GroupingTableMap.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/mapred/GroupingTableMap.java?rev=599162&r1=599161&r2=599162&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/mapred/GroupingTableMap.java (original)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/mapred/GroupingTableMap.java Wed Nov 28 14:10:21 2007
@@ -47,7 +47,7 @@
   public static final String GROUP_COLUMNS =
     "hbase.mapred.groupingtablemap.columns";
   
-  private Text[] m_columns;
+  protected Text[] m_columns;
 
   /** default constructor */
   public GroupingTableMap() {

Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/BasicCommand.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/BasicCommand.java?rev=599162&r1=599161&r2=599162&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/BasicCommand.java (original)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/BasicCommand.java Wed Nov 28 14:10:21 2007
@@ -69,7 +69,7 @@
    * at the end of the column name.
    */
   protected String appendDelimiter(String column) {
-    return (!column.endsWith(FAMILY_INDICATOR))?
+    return (!column.endsWith(FAMILY_INDICATOR) && column.indexOf(FAMILY_INDICATOR) == -1)?
       column + FAMILY_INDICATOR: column;
   }
 

Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/HBaseShell.jj
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/HBaseShell.jj?rev=599162&r1=599161&r2=599162&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/HBaseShell.jj (original)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/HBaseShell.jj Wed Nov 28 14:10:21 2007
@@ -33,6 +33,8 @@
 import java.io.StringReader;
 import java.io.Reader;
 import java.io.Writer;
+import java.net.URLEncoder;
+import java.io.UnsupportedEncodingException;
 
 import org.apache.hadoop.hbase.shell.*;
 
@@ -43,6 +45,7 @@
   private String QueryString;
   private TableFormatter formatter;
   private Writer out;
+  private String secondR;
   
   public Parser(final String query, final Writer o, final TableFormatter f) {
     this((Reader)(new StringReader(query)));
@@ -102,7 +105,11 @@
    | <LPAREN: "(">
    | <RPAREN: ")">
    | <EQUALS: "=">
-   | <NOTEQUAL: "<>">
+   | <LCOMP: ">">
+   | <RCOMP: "<">
+   | <NOT: "not">
+   | <IN: "in">
+   | <NOTEQUAL: "!=">
    | <ASTERISK: "*">
    | <MAX_VERSIONS: "max_versions">
    | <MAX_LENGTH: "max_length">
@@ -121,9 +128,16 @@
    | <CHANGE: "change">
 }
 
+TOKEN : 
+{
+     <SAVE: "save">
+   | <GROUP: "group">
+   | <BY: "by">
+}
+
 TOKEN : /** Literals */
 {
-   <ID: ["A"-"Z","a"-"z","_","-",".",":","/"] (["A"-"Z","a"-"z","0"-"9","_","-",".",":","/"])* >
+   <ID: ["A"-"Z","a"-"z","_","-",":","/"] (["A"-"Z","a"-"z","0"-"9","_","-",":","/"])* >
  | <INTEGER_LITERAL: (["0"-"9"])+ >
  | <FLOATING_POINT_LITERAL:
    (["0"-"9"])+ "." (["0"-"9"])+ (<EXPONENT>)?
@@ -144,7 +158,10 @@
     Command statement = null;
 }
 {
-    ([statement = cmdStatement()] ";" | <EOF>)
+    (
+      [statement = cmdStatement()] ";" | <EOF>
+    )
+  
     {
         return statement;
     }
@@ -171,6 +188,8 @@
       | cmd = clearCommand()
       | cmd = fsCommand()
       | cmd = jarCommand()
+      | cmd = substituteCommand()
+      | cmd = saveCommand()
   )
    {
        return cmd;
@@ -245,6 +264,8 @@
        | t=<CLEAR>
        | t=<FS>
        | t=<JAR>
+       | t=<GROUP>
+       | t=<SAVE>
        | t=<ID>
      ) { argument = t.image.toString(); }
    ]
@@ -263,7 +284,7 @@
 {
    <SHOW>
    [
-       argument = Identifier()
+       argument = identifier()
    ]
   { 
       show.setArgument(argument);
@@ -278,7 +299,7 @@
 }
 {
   ( <DESCRIBE> | <DESC> )
-  argument = Identifier()
+  argument = identifier()
   { 
     desc.setArgument(argument);
     return desc;
@@ -360,14 +381,14 @@
 {
   <CREATE> 
   <TABLE> 
-  table = Identifier() 
+  table = identifier() 
   { 
     createCommand.setTable(table); 
   }
   
   <LPAREN>
   
-  column = Identifier() 
+  column = identifier() 
   columnSpec = ColumnSpec()
   { 
     createCommand.addColumnSpec(column, columnSpec); 
@@ -375,7 +396,7 @@
   
   (
      <COMMA> 
-      column = Identifier() 
+      column = identifier() 
       columnSpec = ColumnSpec() 
       { 
         createCommand.addColumnSpec(column, columnSpec); 
@@ -395,12 +416,12 @@
 }
 {
   <ALTER> 
-  <TABLE> table = Identifier() 
+  <TABLE> table = identifier() 
   { alterCommand.setTable(table); }
 
   (
     LOOKAHEAD(2)
-      <ADD> column = Identifier() columnSpec = ColumnSpec() 
+      <ADD> column = identifier() columnSpec = ColumnSpec() 
       { 
          alterCommand.setOperationType(AlterCommand.OperationType.ADD);
          alterCommand.addColumnSpec(column, columnSpec); 
@@ -412,14 +433,14 @@
       alterCommand.setOperationType(AlterCommand.OperationType.ADD); 
     }
       
-    column = Identifier() columnSpec = ColumnSpec() 
+    column = identifier() columnSpec = ColumnSpec() 
     { 
       alterCommand.addColumnSpec(column, columnSpec); 
     }
       
     ( 
         <COMMA> 
-        column = Identifier()
+        column = identifier()
         columnSpec = ColumnSpec() 
         { 
           alterCommand.addColumnSpec(column, columnSpec); 
@@ -427,13 +448,13 @@
     )*
     <RPAREN>
   |
-    <DROP> column = Identifier()
+    <DROP> column = identifier()
     { 
       alterCommand.setOperationType(AlterCommand.OperationType.DROP);
       alterCommand.setColumn(column); 
     }
   |
-    <CHANGE> column = Identifier() columnSpec = ColumnSpec()  
+    <CHANGE> column = identifier() columnSpec = ColumnSpec()  
     { 
       alterCommand.setOperationType(AlterCommand.OperationType.CHANGE);
       alterCommand.addColumnSpec(column, columnSpec);
@@ -450,7 +471,7 @@
 {
    <DROP>
    <TABLE>
-   tableList = TableList()
+   tableList = tableList()
    { 
      drop.setTableList(tableList);
      return drop;
@@ -468,16 +489,15 @@
 {
   <INSERT>
   <INTO>
-  table = Identifier()
+  table = identifier()
   { 
      in.setTable(table); 
   }
-    
   columnfamilies = getColumns()
   {
      in.setColumnfamilies(columnfamilies);
   }
-    
+  
   <VALUES> values = getLiteralValues()
   {
      in.setValues(values);
@@ -502,13 +522,13 @@
 }
 {
   <DELETE>
-  columnList = ColumnList()
+  columnList = columnList()
   { 
     deleteCommand.setColumnList(columnList); 
   }
 
   <FROM>
-  table = Identifier()
+  table = identifier()
   { 
     deleteCommand.setTable(table); 
   }
@@ -534,9 +554,9 @@
 }
 {
   <SELECT>
-  columns = ColumnList()
+  columns = columnList()
   <FROM>
-  tableName = Identifier()
+  tableName = identifier()
   { 
      select.setColumns(columns);
      select.setTable(tableName);
@@ -583,7 +603,7 @@
 }
 {
   <ENABLE> 
-  table = Identifier()
+  table = identifier()
   { 
     enableCommand.setTable(table);
     return enableCommand;
@@ -597,7 +617,7 @@
 }
 {
   <DISABLE> 
-  table = Identifier()
+  table = identifier()
   { 
     disableCommand.setTable(table); 
     return disableCommand;
@@ -615,8 +635,95 @@
    }
 }
 
-////////////////////////////////////////////////
-// Utility expansion units...
+SubstituteCommand substituteCommand() :
+{
+  Token key = null;
+  Token chainKey = null;
+  Token operation = null;
+  String tableName = null;
+  String condition = "";
+  List<String> notInList = new ArrayList<String>();
+  SubstituteCommand substitute = new SubstituteCommand(this.out);
+  Token extendedKey = null;
+}
+{
+  key=<ID><EQUALS>
+  { substitute.setKey(key.image.toString()); }
+  (
+      chainKey=<ID><DOT>
+    { substitute.setChainKey(chainKey.image.toString()); }
+      operation = <ID> 
+    { substitute.setOperation(operation.image.toString()); }
+    <LPAREN>
+    {
+      String operationType = operation.image.toLowerCase();
+      if(operationType.equals("projection")) {
+        List<String> columnList = columnList();
+        for (int i = 0; i < columnList.size(); i++) {
+          condition += appendIndicator(columnList.get(i)) + " ";
+        }
+    
+      } else {
+        condition = booleanTerm();
+      }
+      
+      substitute.setCondition(condition);
+    }
+    <RPAREN>
+    [
+      <AND> extendedKey=<ID>
+      {
+        String eKey = extendedKey.image.toString();
+        String cKey = chainKey.image.toString();
+        substitute.setCondition(condition);
+        if (secondR.equals(eKey)) {
+          substitute.resetVariableRelation(eKey, cKey);
+        } else {
+          substitute.resetVariableRelation(cKey, eKey);
+        }
+      }
+    ]
+  | <TABLE><LPAREN>
+    tableName = identifier() 
+    { substitute.setInput(tableName); }
+    <RPAREN>
+  | operation=<GROUP>chainKey=<ID><BY>
+    <LPAREN>
+      { 
+        List<String> columnList = columnList(); 
+        for (int i = 0; i < columnList.size(); i++) {
+          condition += appendIndicator(columnList.get(i));
+        }  
+      }
+    <RPAREN>
+    { 
+      substitute.setChainKey(chainKey.image.toString());
+      substitute.setOperation(operation.image.toString());
+      substitute.setCondition(condition);
+    }
+  )
+  
+  {
+    return substitute;
+  }
+}
+
+SaveCommand saveCommand() :
+{
+  Token t = null;
+  String tableName;
+  SaveCommand save = new SaveCommand(this.out);
+}
+{
+  <SAVE> t=<ID>
+  { save.setStatement(t.image.toString()); }
+  <INTO><TABLE><LPAREN>
+  tableName = identifier() { save.setOutput(tableName); }
+  <RPAREN>
+  {
+    return save;
+  }
+}
 
 List<String> getLiteralValues() :
 {
@@ -657,6 +764,19 @@
  }
 }
 
+String getColumn() :
+{
+  Token col;
+}
+{
+  (
+    ( col=<ID> | col=<INTEGER_LITERAL> | col=<ASTERISK> )
+      { return col.image.toString(); }
+    | (col=<QUOTED_IDENTIFIER> | col=<STRING_LITERAL> )
+      { return col.image.substring(1,col.image.toString().length() - 1); }
+  )
+}
+
 List<String> getColumns() : // return parenthesized column list
 {
   List<String> values = new ArrayList<String>();
@@ -680,34 +800,21 @@
     }
 }
 
-String getColumn() :
-{
-  Token col;
-}
-{
-  (
-    ( col=<ID> | col=<ASTERISK> )
-      { return col.image.toString(); }
-    | (col=<QUOTED_IDENTIFIER> | col=<STRING_LITERAL> )
-      { return col.image.substring(1,col.image.toString().length() - 1); }
-  )
-}
-
-List<String> TableList() :
+List<String> tableList() :
 {
   List<String> tableList = new ArrayList<String>();
   String table = null;
 }
 {
-  table = Identifier() { tableList.add(table); }
-  ( <COMMA> table = Identifier()
+  table = identifier() { tableList.add(table); }
+  ( <COMMA> table = identifier()
     { tableList.add(table); } 
   )*
   
   { return tableList; }
 }
 
-List<String> ColumnList() :
+List<String> columnList() :
 {
   List<String> columnList = new ArrayList<String>();  
   String column = null;
@@ -737,7 +844,7 @@
   { return Integer.parseInt(t.image.toString()); }
 }
 
-String Identifier() :
+String identifier() :
 {
   Token t = null;
 }
@@ -748,4 +855,155 @@
    | ( t=<QUOTED_IDENTIFIER> | t=<STRING_LITERAL> )
      { return t.image.substring(1,t.image.toString().length() - 1); }
   )
+}
+
+String booleanTerm() :
+{
+  String query = "";
+  String tmp = null;
+  String joinTerm = null;
+  String connector = null;
+}
+{
+  query = booleanTerms()
+  {
+    if(query.endsWith(".ROW")) {
+      joinTerm = query;
+      query = "";
+    }
+  }
+  (
+      (
+       <AND> { connector = " AND ";  }
+       | <OR> { connector = " OR "; }
+      ) tmp = booleanTerms() { 
+     if(tmp.endsWith(".ROW")) {
+      joinTerm = tmp; 
+     } else {
+       if(!query.equals(""))
+         query += connector;
+        query += tmp; 
+     }
+      }
+  )*
+  { 
+    if(joinTerm != null) {
+      return joinTerm + " BOOL " + query;
+    } else {
+      return query;
+    }
+  }
+}
+
+String booleanTerms() : 
+{
+  Token tSearchName, tComparand;
+  List<String> inList = new ArrayList<String>();
+  String searchName=null,comparator=null,comparand=null;
+  Token joinColumn = null;
+  Token joinKey = null;
+  String result = null;
+}
+{
+  ( 
+    tSearchName=<ID> { searchName = tSearchName.image.toString(); }
+    [
+      <DOT> 
+      (  
+        joinColumn=<ID>
+        { searchName += "." + joinColumn.image.toString(); }
+        | <ROW>
+        { 
+          secondR = searchName;
+          searchName += ".ROW";
+        }
+      )
+    ]
+  )
+  comparator = getComparator()
+  (
+     tComparand=<INTEGER_LITERAL> 
+      { comparand = tComparand.image.toString(); }
+    | tComparand=<STRING_LITERAL> 
+      { comparand = tComparand.image.substring(1,tComparand.image.length() - 1); }
+    | tComparand=<ID>
+     { comparand = tComparand.image.toString(); }
+      [
+        <DOT>
+      (
+          <ROW>
+          { 
+            secondR = comparand;
+            comparand += ".ROW"; 
+            }
+          | joinColumn=<ID>
+          { comparand += "." + joinColumn.image.toString(); }
+      )
+      ]
+    | inList = getColumns()
+    { 
+      if(comparator == null) {
+        comparator = "==";
+      }
+      comparand = "";
+      try{
+        for(int i=0; i<inList.size(); i++) {     
+          comparand += URLEncoder.encode(inList.get(i), "UTF-8");
+          if(inList.size() != (i+1)) { comparand += "|"; }
+        }
+      } catch (UnsupportedEncodingException e) {
+          e.printStackTrace();
+      }
+    }
+  )
+   { 
+    if(searchName.endsWith(".ROW")) {
+      result = appendIndicator(comparand) 
+      + " " + comparator + " " + searchName;
+    } else {
+      result = appendIndicator(searchName) 
+      + " " + comparator + " " + comparand;
+    }
+    
+     return result; 
+   }
+}
+
+String getComparator() :
+{
+  Token t = null;
+  String comparator = null;
+}
+{
+  (  
+      t=<LCOMP> 
+        { comparator = t.image.toString(); } 
+      [<EQUALS> { comparator += "="; }]
+    | t=<RCOMP> 
+        { comparator = t.image.toString(); } 
+      [<EQUALS> { comparator += "="; }]
+    | t=<EQUALS> 
+        { comparator = t.image.toString(); }
+      [<LCOMP> { comparator = ">" + comparator; } ]
+      [<RCOMP> { comparator = "<" + comparator; } ]
+    | t=<NOTEQUAL>
+        { comparator = t.image.toString(); }
+    | <NOT><IN>
+        { comparator = "!!"; }
+    | <IN>
+        { comparator = "=="; }
+  )
+  
+  { return comparator; }
+}
+
+String appendIndicator(String columnName) :
+{
+  String column = columnName;
+}
+{
+  {
+     return (!column.endsWith(":") && column.indexOf(":") == -1) 
+     ? column + ":" : column;
+  }
 }

Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/HelpCommand.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/HelpCommand.java?rev=599162&r1=599161&r2=599162&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/HelpCommand.java (original)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/HelpCommand.java Wed Nov 28 14:10:21 2007
@@ -26,14 +26,13 @@
 import java.util.HashMap;
 import java.util.Map;
 
-import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 
 public class HelpCommand extends BasicCommand {
   private String argument;
-  private static final String [] HEADER =
-    new String [] {"Command", "Description", "Example"};
-  
+  private static final String[] HEADER = new String[] { "Command",
+      "Description", "Example" };
+
   /** application name */
   public static final String APP_NAME = "Hbase Shell";
 
@@ -41,18 +40,18 @@
   public static final String APP_VERSION = "0.0.2";
 
   /** help contents map */
-  public final Map<String, String[]> help =
-    new HashMap<String, String[]>();
-    
+  public final Map<String, String[]> help = new HashMap<String, String[]>();
+
   private final TableFormatter formatter;
-  
+
   public HelpCommand(final Writer o, final TableFormatter f) {
     super(o);
     this.help.putAll(load());
     this.formatter = f;
   }
 
-  public ReturnMsg execute(@SuppressWarnings("unused") HBaseConfiguration conf) {
+  public ReturnMsg execute(@SuppressWarnings("unused")
+  HBaseConfiguration conf) {
     try {
       printHelp(this.argument);
     } catch (IOException e) {
@@ -66,73 +65,104 @@
   }
 
   /**
-   * add help contents 
+   * add help contents
    */
   private Map<? extends String, ? extends String[]> load() {
     Map<String, String[]> load = new HashMap<String, String[]>();
-    load.put("SHOW", new String[] {"List all user tables", "SHOW TABLES;"});
-    
-    load.put("FS", new String[] { "Hadoop FsShell; entering a lone 'FS;' " +
-      "will emit usage",
-      "FS -copyFromLocal /home/user/backup.dat fs/user/backup;"});
-    
-    load.put("JAR", new String[] { "Hadoop RunJar util", 
-      "JAR ./build/hadoop-examples.jar pi 10 10;"});
-    load.put("CLEAR", new String[] {"Clear the screen", "CLEAR;"} );
-    
+    load.put("SHOW", new String[] { "Show information about selected title",
+        "SHOW TABLES[or substitution variable name];" });
+
+    load.put("FS", new String[] {
+        "Hadoop FsShell; entering a lone 'FS;' " + "will emit usage",
+        "FS -copyFromLocal /home/user/backup.dat fs/user/backup;" });
+
+    load.put("JAR", new String[] { "Hadoop RunJar util",
+        "JAR ./build/hadoop-examples.jar pi 10 10;" });
+    load.put("CLEAR", new String[] { "Clear the screen", "CLEAR;" });
+
     load.put("DESCRIBE", new String[] { "Print table information",
-      "[DESCRIBE|DESC] table_name;"});
-    
-    load.put("CREATE", new String[] {
-        "Create tables",
-        "CREATE TABLE table_name (column_family_name [MAX_VERSIONS=n] " +
-        "[MAX_LENGTH=n] [COMPRESSION=NONE|RECORD|BLOCK] [IN_MEMORY] " +
-        "[BLOOMFILTER=NONE|BLOOM|COUNTING|RETOUCHED VECTOR_SIZE=n NUM_HASH=n], " +
-        "...)"
-    });
-    load.put("DROP", new String[] {
-        "Drop tables",
-        "DROP TABLE table_name [, table_name] ...;"});
-    
+        "[DESCRIBE|DESC] table_name;" });
+
+    load
+        .put(
+            "CREATE",
+            new String[] {
+                "Create tables",
+                "CREATE TABLE table_name (column_family_name [MAX_VERSIONS=n] "
+                    + "[MAX_LENGTH=n] [COMPRESSION=NONE|RECORD|BLOCK] [IN_MEMORY] "
+                    + "[BLOOMFILTER=NONE|BLOOM|COUNTING|RETOUCHED VECTOR_SIZE=n NUM_HASH=n], "
+                    + "...)" });
+    load.put("DROP", new String[] { "Drop tables",
+        "DROP TABLE table_name [, table_name] ...;" });
+
     load.put("INSERT", new String[] {
         "Insert values into table",
-        "INSERT INTO table_name (column_name, ...) " +
-        "VALUES ('value', ...) WHERE row='row_key';"
-    });
-    
+        "INSERT INTO table_name (column_name, ...) "
+            + "VALUES ('value', ...) WHERE row='row_key';" });
+
     load.put("DELETE", new String[] {
         "Delete table data",
-        "DELETE {column_name, [, column_name] ... | *} FROM table_name " +
-        "WHERE row='row-key';"
-    });
-    
-    load.put("SELECT",
-        new String[] {
-            "Select values from table",
-            "SELECT {column_name, [, column_name] ... | *} FROM table_name " +
-            "[WHERE row='row_key' | STARTING FROM 'row-key'] " +
-            "[NUM_VERSIONS = version_count] " +
-            "[TIMESTAMP 'timestamp'] " +
-            "[LIMIT = row_count] " +
-            "[INTO FILE 'file_name'];"
-    });
-                
-    load.put("ALTER",
-        new String[] {
-            "Alter structure of table",
-            "ALTER TABLE table_name ADD column_spec | " +
-            "ADD (column_spec, column_spec, ...) | " +
-            "DROP column_family_name | " +
-            "CHANGE column_spec;"
-    });
+        "DELETE {column_name, [, column_name] ... | *} FROM table_name "
+            + "WHERE row='row-key';" });
 
-    load.put("EXIT", new String[] { "Exit shell", "EXIT;"});
+    load.put("SELECT", new String[] {
+        "Select values from table",
+        "SELECT {column_name, [, column_name] ... | *} FROM table_name "
+            + "[WHERE row='row_key' | STARTING FROM 'row-key'] "
+            + "[NUM_VERSIONS = version_count] " + "[TIMESTAMP 'timestamp'] "
+            + "[LIMIT = row_count] " + "[INTO FILE 'file_name'];" });
+
+    load.put("ALTER", new String[] {
+        "Alter structure of table",
+        "ALTER TABLE table_name ADD column_spec | "
+            + "ADD (column_spec, column_spec, ...) | "
+            + "DROP column_family_name | " + "CHANGE column_spec;" });
+
+    load.put("EXIT", new String[] { "Exit shell", "EXIT;" });
+
+    // A Algebraic Query Commands
+    // this is a tentative query language based on a hbase which uses relational
+    // model of
+    // data.
+
+    load.put("TABLE",
+        new String[] { "Load a table", "A = table('table_name');" });
+    load.put("SUBSTITUTE", new String[] { "Substitute expression to [A~Z]",
+        "D = A.projection('cf_name1'[, 'cf_name2']);" });
+    load.put("SAVE", new String[] { "Save results into specified table (It runs a mapreduce job)",
+        "SAVE A INTO table('table_name');" });
+
+    // Relational Operations
+    load.put("PROJECTION", new String[] {
+        "Selects a subset of the columnfamilies of a relation",
+        "A = TABLE('table_name');"
+            + " B = A.Projection('cf_name1'[, 'cf_name2']);" });
+    load
+        .put(
+            "SELECTION",
+            new String[] {
+                "Selects a subset of the rows in a relation that satisfy a selection condition (>, <, AND, OR, etc.)",
+                "A = Table('table_name');"
+                    + " B = A.Selection(cf_name1 > 100 [AND cf_name2 = 'string_value']);" });
+
+    // Aggregation Functions
+    //TODO : and apply aggregate function independently to each group of rows 
+    load
+        .put(
+            "GROUP",
+            new String[] {
+                "Group rows by value of an attribute",
+                "A = Table('table_name');"
+                    + " B = Group A by ('cf_name1'[, 'cf_name2']);" });
 
     return load;
   }
 
-  /** Print out the program version. 
-   * @throws IOException */
+  /**
+   * Print out the program version.
+   * 
+   * @throws IOException
+   */
   public void printVersion() throws IOException {
     println(APP_NAME + ", " + APP_VERSION + " version.\n"
         + "Copyright (c) 2007 by udanax, "
@@ -146,7 +176,7 @@
       printHelp(this.help);
     } else {
       if (this.help.containsKey(cmd.toUpperCase())) {
-        final Map<String, String []> m = new HashMap<String, String []>();
+        final Map<String, String[]> m = new HashMap<String, String[]>();
         m.put(cmd.toUpperCase(), this.help.get(cmd.toUpperCase()));
         printHelp(m);
       } else {
@@ -154,14 +184,14 @@
       }
     }
   }
-  
-  private void printHelp(final Map<String, String []> m) throws IOException {
+
+  private void printHelp(final Map<String, String[]> m) throws IOException {
     this.formatter.header(HEADER);
-    for (Map.Entry<String, String []> e: m.entrySet()) {
-      String [] value = e.getValue();
+    for (Map.Entry<String, String[]> e : m.entrySet()) {
+      String[] value = e.getValue();
       if (value.length == 2) {
-        this.formatter.row(new String [] {e.getKey().toUpperCase(), value[0],
-          value[1]});
+        this.formatter.row(new String[] { e.getKey().toUpperCase(), value[0],
+            value[1] });
       } else {
         throw new IOException("Value has too many elements:" + value);
       }

Added: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/SaveCommand.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/SaveCommand.java?rev=599162&view=auto
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/SaveCommand.java (added)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/SaveCommand.java Wed Nov 28 14:10:21 2007
@@ -0,0 +1,114 @@
+/**
+ * Copyright 2007 The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.shell;
+
+import java.io.IOException;
+import java.io.Writer;
+
+import org.apache.hadoop.hbase.HBaseAdmin;
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.HConnection;
+import org.apache.hadoop.hbase.HConnectionManager;
+import org.apache.hadoop.hbase.shell.algebra.OperationEvaluator;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.mapred.JobClient;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.RunningJob;
+
+/**
+ * Save results to specified table.
+ */
+public class SaveCommand extends BasicCommand {
+  private String chainKey;
+  private String output;
+
+  public SaveCommand(Writer o) {
+    super(o);
+  }
+
+  public ReturnMsg execute(HBaseConfiguration conf) {
+    try {
+      HConnection conn = HConnectionManager.getConnection(conf);
+
+      if (!conn.tableExists(new Text(output))) {
+        OperationEvaluator eval = new OperationEvaluator(conf, chainKey, output);
+        JobConf jobConf = eval.getJobConf();
+        if (submitJob(jobConf)) {
+          return new ReturnMsg(0, "Successfully complete.");
+        } else {
+          HBaseAdmin admin = new HBaseAdmin(conf);
+          admin.deleteTable(new Text(output));
+
+          return new ReturnMsg(0, "Job failed.");
+        }
+      } else {
+        return new ReturnMsg(0, "'" + output + "' table already exist.");
+      }
+
+    } catch (IOException e) {
+      return new ReturnMsg(0, e.toString());
+    }
+  }
+
+  /**
+   * Submit a job to job tracker.
+   * 
+   * @param job
+   * @return result
+   * @throws IOException
+   */
+  public boolean submitJob(JobConf job) throws IOException {
+    JobClient jc = new JobClient(job);
+    boolean success = true;
+    RunningJob running = null;
+    try {
+      running = jc.submitJob(job);
+      String jobId = running.getJobID();
+
+      while (!running.isComplete()) {
+        try {
+          Thread.sleep(5000);
+        } catch (InterruptedException e) {
+        }
+        running = jc.getJob(jobId);
+      }
+      success = running.isSuccessful();
+    } finally {
+      if (!success && (running != null)) {
+        running.killJob();
+      }
+      jc.close();
+    }
+    return success;
+  }
+
+  public void setOutput(String output) {
+    this.output = output;
+  }
+
+  public void setStatement(String chainKey) {
+    this.chainKey = chainKey;
+  }
+
+  @Override
+  public CommandType getCommandType() {
+    return CommandType.SHELL;
+  }
+}

Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/ShowCommand.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/ShowCommand.java?rev=599162&r1=599161&r2=599162&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/ShowCommand.java (original)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/ShowCommand.java Wed Nov 28 14:10:21 2007
@@ -21,6 +21,7 @@
 
 import java.io.IOException;
 import java.io.Writer;
+import java.util.Map;
 
 import org.apache.hadoop.hbase.HBaseAdmin;
 import org.apache.hadoop.hbase.HBaseConfiguration;
@@ -30,16 +31,16 @@
  * Shows all available tables.
  */
 public class ShowCommand extends BasicCommand {
-  private static final String [] HEADER = new String [] {"Name", "Descriptor"};
+  private static final String[] HEADER = new String[] { "Name", "Descriptor" };
   private String command;
   private final TableFormatter formatter;
-  
+
   // Not instantiable
   @SuppressWarnings("unused")
   private ShowCommand() {
     this(null, null);
   }
-  
+
   public ShowCommand(final Writer o, final TableFormatter f) {
     this(o, f, null);
   }
@@ -67,12 +68,37 @@
         formatter.header(HEADER);
         for (int i = 0; i < tableLength; i++) {
           String tableName = tables[i].getName().toString();
-          formatter.row(new String [] {tableName, tables[i].toString()});
+          formatter.row(new String[] { tableName, tables[i].toString() });
         }
         formatter.footer();
         return new ReturnMsg(1, tableLength + " table(s) in set");
+      } else {
+        Map<String, VariableRef> refer = VariablesPool.get(command);
+        if (refer == null) {
+          return new ReturnMsg(0, "Unknown arguments.");
+        }
+
+        String msg = null;
+        for (Map.Entry<String, VariableRef> e : refer.entrySet()) {
+          msg = command + " = ";
+          if (e.getKey() != null) {
+            msg += e.getKey() + ".";
+          }
+          msg += e.getValue().getOperation() + "(";
+          if (e.getValue().getOperation().equals("projection")) {
+            String[] proj = e.getValue().getArgument().split(" ");
+            for (int i = 0; i < proj.length; i++) {
+              msg += "'" + proj[i] + "'";
+              if (i + 1 != proj.length)
+                msg += ", ";
+            }
+          } else {
+            msg += e.getValue().getArgument().replace(" BOOL ", " and ");
+          }
+          msg += ");\n";
+        }
+        return new ReturnMsg(0, msg);
       }
-      return new ReturnMsg(0, "Missing parameters. Please check 'Show' syntax");
     } catch (IOException e) {
       return new ReturnMsg(0, "error msg : " + e.toString());
     }
@@ -81,4 +107,4 @@
   public void setArgument(String argument) {
     this.command = argument;
   }
-}
\ No newline at end of file
+}

Added: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/SubstituteCommand.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/SubstituteCommand.java?rev=599162&view=auto
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/SubstituteCommand.java (added)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/SubstituteCommand.java Wed Nov 28 14:10:21 2007
@@ -0,0 +1,78 @@
+/**
+ * Copyright 2007 The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.shell;
+
+import java.io.Writer;
+
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.shell.algebra.Constants;
+
+/**
+ * This class represents a substitute command.
+ */
+public class SubstituteCommand extends BasicCommand {
+  private String key;
+  private String chainKey;
+  private String operation;
+  private String condition;
+
+  public SubstituteCommand(Writer o) {
+    super(o);
+  }
+
+  public ReturnMsg execute(HBaseConfiguration conf) {
+    VariableRef formula = new VariableRef(operation, condition);
+    VariablesPool.put(key, chainKey, formula);
+    return null;
+  }
+
+  public void setInput(String input) {
+    this.operation = "table";
+    this.condition = input;
+  }
+
+  public void setKey(String key) {
+    this.key = key;
+  }
+
+  public void setChainKey(String chainKey) {
+    this.chainKey = chainKey;
+  }
+
+  public void setOperation(String operation) {
+    this.operation = operation;
+  }
+
+  public void setCondition(String condition) {
+    this.condition = condition;
+  }
+
+  public void resetVariableRelation(String r1, String r2) {
+    setChainKey(r1);
+    String tableName = VariablesPool.get(r1).get(null).getArgument();
+    VariableRef formula = new VariableRef(Constants.JOIN_SECOND_RELATION, tableName);
+    VariablesPool.put(r1, r2, formula);
+  }
+
+  @Override
+  public CommandType getCommandType() {
+    return CommandType.SHELL;
+  }
+}

Added: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/VariableRef.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/VariableRef.java?rev=599162&view=auto
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/VariableRef.java (added)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/VariableRef.java Wed Nov 28 14:10:21 2007
@@ -0,0 +1,50 @@
+/**
+ * Copyright 2007 The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.shell;
+
+/**
+ * VariableRef is used to reference declared Variables.
+ */
+public class VariableRef {
+  String operation;
+  String argument;
+
+  /** Constructor */
+  public VariableRef(String operation, String condition) {
+    this.operation = operation;
+    this.argument = condition;
+  }
+
+  /**
+   * Return argument of an operation
+   * @return argument
+   */
+  public String getArgument() {
+    return argument;
+  }
+
+  /**
+   * Return operation
+   * @return operation
+   */
+  public String getOperation() {
+    return operation;
+  }
+}

Added: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/VariablesPool.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/VariablesPool.java?rev=599162&view=auto
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/VariablesPool.java (added)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/VariablesPool.java Wed Nov 28 14:10:21 2007
@@ -0,0 +1,52 @@
+/**
+ * Copyright 2007 The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.shell;
+
+import java.util.HashMap;
+
+/**
+ * Variable pool is a collection of substitution variables.
+ */
+public class VariablesPool {
+  static HashMap<String, HashMap<String, VariableRef>> variables = new HashMap<String, HashMap<String, VariableRef>>();
+
+  /**
+   * puts the date in the substitution variable.
+   * 
+   * @param key
+   * @param parentKey
+   * @param statement
+   */
+  public static void put(String key, String parentKey, VariableRef statement) {
+    HashMap<String, VariableRef> value = new HashMap<String, VariableRef>();
+    value.put(parentKey, statement);
+    variables.put(key, value);
+  }
+
+  /**
+   * returns the substitution variable's value.
+   * 
+   * @param key
+   * @return HashMap<String, VariableRef>
+   */
+  public static HashMap<String, VariableRef> get(String key) {
+    return variables.get(key);
+  }
+}

Added: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/Constants.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/Constants.java?rev=599162&view=auto
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/Constants.java (added)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/Constants.java Wed Nov 28 14:10:21 2007
@@ -0,0 +1,36 @@
+/**
+ * Copyright 2007 The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.shell.algebra;
+
+/**
+ * List of access control algebraic operations constants.
+ */
+public class Constants {
+  public static final String OUTPUT_TABLE_EXIST = "job.config.output.table";
+  public static final String CONFIG_INPUT = "input";
+  public static final String CONFIG_OUTPUT = "output";
+  public static final String EXPRESSION_FILTER_LIST = "expression.filter.list";
+  
+  public static final String RELATIONAL_PROJECTION = "projection";
+  public static final String RELATIONAL_SELECTION = "selection";
+  public static final String RELATIONAL_GROUP = "group";
+  public static final String RELATIONAL_JOIN = "join";
+  public static final String JOIN_SECOND_RELATION = "secondR";
+}

Added: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/DuplicateTable.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/DuplicateTable.java?rev=599162&view=auto
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/DuplicateTable.java (added)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/DuplicateTable.java Wed Nov 28 14:10:21 2007
@@ -0,0 +1,50 @@
+/**
+ * Copyright 2007 The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.shell.algebra;
+
+import java.io.IOException;
+import java.util.Map;
+
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.HColumnDescriptor;
+import org.apache.hadoop.hbase.mapred.IdentityTableMap;
+import org.apache.hadoop.hbase.mapred.IdentityTableReduce;
+import org.apache.hadoop.mapred.JobConf;
+
+/**
+ * Duplicates Table. R1 to R3 in O(N)
+ */
+public class DuplicateTable extends RelationalOperation {
+  public DuplicateTable(HBaseConfiguration conf, Map<String, String> condition) {
+    super(conf, condition);
+  }
+
+  @Override
+  public JobConf getConf() throws IOException, RuntimeException {
+    HColumnDescriptor[] columns = getInputColumnDescriptor();
+    outputTableCreate(columns, null);
+
+    IdentityTableMap.initJob(input, getColumnStringArray(columns),
+        IdentityTableMap.class, jobConf);
+    IdentityTableReduce.initJob(output, IdentityTableReduce.class, jobConf);
+
+    return jobConf;
+  }
+}

Added: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/ExpressionParser.jj
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/ExpressionParser.jj?rev=599162&view=auto
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/ExpressionParser.jj (added)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/ExpressionParser.jj Wed Nov 28 14:10:21 2007
@@ -0,0 +1,559 @@
+options {
+  STATIC = false;
+  IGNORE_CASE = true;
+}
+
+PARSER_BEGIN(ExpressionParser)
+package org.apache.hadoop.hbase.shell.algebra.generated;
+
+/**
+ * Copyright 2007 The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import java.io.IOException;
+import java.io.Reader;
+import java.io.StringReader;
+import java.io.UnsupportedEncodingException;
+import java.net.URLDecoder;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.SortedMap;
+
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.HTable;
+import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
+import org.apache.hadoop.io.MapWritable;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.io.Writable;
+
+/**
+ * Parsing command line.
+ */
+public class ExpressionParser {
+  HBaseConfiguration conf = new HBaseConfiguration();
+  private String expression;
+  private String joinKeyColumn;
+  private String secondRelation;
+  private Map<String, List<String>> unionSet = new HashMap<String, List<String>>();
+  private Map<String, List<String>> intersectionSet = new HashMap<String, List<String>>();
+  private SortedMap<Text, byte[]> secondValue = null;
+
+  public ExpressionParser(final String expression) {
+    this((Reader)(new StringReader(expression)));
+    this.expression = expression;
+  }
+  
+  public String getExpression() {
+    return this.expression;
+  }
+}
+
+PARSER_END(ExpressionParser)
+
+SKIP :                             
+{
+    " "
+ | "\t"
+ | "\r"
+ | "\n"
+}
+
+TOKEN : /** Literals */
+{
+   <ROW: "row">
+ | <AND: "and">
+ | <OR: "or">
+ | <DOT: ".">
+ | <EQUALS: "=">
+ | <LCOMP: ">">
+ | <RCOMP: "<">
+ | <LCOMPE: ">=">
+ | <RCOMPE: "<=">
+ | <IN: "==">
+ | <NOTIN: "!!">
+ | <BAR: "|">
+ | <BOOL: "bool">
+ | <ID: ["A"-"Z","a"-"z","_","-",":","/"] (["A"-"Z","a"-"z","0"-"9","_","-",":","/"])* >
+ | <INTEGER_LITERAL: (["0"-"9"])+ >
+ | <FLOATING_POINT_LITERAL:
+   (["0"-"9"])+ "." (["0"-"9"])+ (<EXPONENT>)?
+  | "." (["0"-"9"])+ (<EXPONENT>)?
+  | (["0"-"9"])+ <EXPONENT>
+  | (["0"-"9"])+ (<EXPONENT>)?
+   >
+ | <#EXPONENT: ["e","E"] (["+","-"])? (["0"-"9"])+ >
+ | <QUOTED_IDENTIFIER: "\"" (~["\""])+ "\"" >
+ | <STRING_LITERAL: "'" (~["'"])* ( "''" (~["'"])* )* "'" >
+}
+
+void booleanExpressionParse() :
+{
+  Map<String, List<String>> temp = new HashMap<String, List<String>>();
+}
+{
+  temp = booleanTerm()
+  (
+    (
+        <AND>
+        { 
+          if(temp != null) {
+            for(Map.Entry<String, List<String>> e : temp.entrySet()) {
+              List<String> newList = intersectionSet.get(e.getKey());
+              if(newList != null) {
+                newList.addAll(e.getValue());
+              } else {
+                newList = e.getValue();
+              }
+              intersectionSet.put(e.getKey(), newList);
+            }
+          }
+          temp = booleanTerm();
+          for(Map.Entry<String, List<String>> e : temp.entrySet()) {
+            List<String> newList = intersectionSet.get(e.getKey());
+            if(newList != null) {
+              newList.addAll(e.getValue());
+            } else {
+              newList = e.getValue();
+            }
+            intersectionSet.put(e.getKey(), newList);
+          }
+          temp = null;
+          }
+      | <OR>
+        {
+        if(temp != null) {
+          for(Map.Entry<String, List<String>> e : temp.entrySet()) {
+            List<String> newList = unionSet.get(e.getKey());
+            if(newList != null) {
+              newList.addAll(e.getValue());
+            } else {
+              newList = e.getValue();
+            }
+            unionSet.put(e.getKey(), newList);
+          }    
+        }
+        temp = booleanTerm();
+        for(Map.Entry<String, List<String>> e : temp.entrySet()) {
+          List<String> newList = unionSet.get(e.getKey());
+          if(newList != null) {
+            newList.addAll(e.getValue());
+          } else {
+            newList = e.getValue();
+          }
+          unionSet.put(e.getKey(), newList);
+        }
+        temp = null;
+        }
+      ) 
+  )* 
+  {
+    if(temp != null) {
+      for(Map.Entry<String, List<String>> e : temp.entrySet()) {
+        List<String> newList = unionSet.get(e.getKey());
+        if(newList != null) {
+          newList.addAll(e.getValue());
+        } else {
+          newList = e.getValue();
+        }
+        unionSet.put(e.getKey(), newList);
+      }
+    }
+  }
+}
+
+void joinExpressionParse() :
+{
+  Map<String, List<String>> temp = new HashMap<String, List<String>>();
+}
+{
+  indexJoinCondition()
+  <BOOL>
+  [
+    temp = booleanTerm()
+      (
+      (
+       <AND>
+       { 
+         if(temp != null) {
+           for(Map.Entry<String, List<String>> e : temp.entrySet()) {
+             List<String> newList = intersectionSet.get(e.getKey());
+             if(newList != null) {
+               newList.addAll(e.getValue());
+             } else {
+               newList = e.getValue();
+             }
+             intersectionSet.put(e.getKey(), newList);
+           }
+         }
+         temp = booleanTerm();
+         for(Map.Entry<String, List<String>> e : temp.entrySet()) {
+           List<String> newList = intersectionSet.get(e.getKey());
+           if(newList != null) {
+             newList.addAll(e.getValue());
+           } else {
+             newList = e.getValue();
+           }
+           intersectionSet.put(e.getKey(), newList);
+         }
+         temp = null;
+         }
+      | <OR>
+        {
+        if(temp != null) {
+          for(Map.Entry<String, List<String>> e : temp.entrySet()) {
+            List<String> newList = unionSet.get(e.getKey());
+            if(newList != null) {
+              newList.addAll(e.getValue());
+            } else {
+              newList = e.getValue();
+            }
+            unionSet.put(e.getKey(), newList);
+          }
+        }
+        temp = booleanTerm();
+        for(Map.Entry<String, List<String>> e : temp.entrySet()) {
+          List<String> newList = unionSet.get(e.getKey());
+          if(newList != null) {
+            newList.addAll(e.getValue());
+          } else {
+            newList = e.getValue();
+          }
+          unionSet.put(e.getKey(), newList);
+        }
+        temp = null;
+        }
+      ) 
+    )* 
+    {
+      if(temp != null) {
+        for(Map.Entry<String, List<String>> e : temp.entrySet()) {
+          List<String> newList = unionSet.get(e.getKey());
+          if(newList != null) {
+            newList.addAll(e.getValue());
+          } else {
+            newList = e.getValue();
+          }
+          unionSet.put(e.getKey(), newList);
+        }
+        }
+    }
+  ]
+}
+
+void indexJoinCondition() :
+{
+ Token firstR = null; 
+  Token joinKey = null;
+  Token secondR = null;
+}
+{
+  firstR=<ID><DOT>joinKey=<ID>
+  <EQUALS>
+  secondR=<ID><DOT><ROW>
+  {
+    joinKeyColumn = joinKey.image.toString();
+    secondRelation = secondR.image.toString();
+  }
+}
+
+Map<String, List<String>> booleanTerm() :
+{
+  Token tSearchName = null;
+  Token tComparator = null;
+  Token tFirst = null;
+  Token tSecond = null;
+  Map<String, List<String>> result = new HashMap<String, List<String>>();
+  List<String> valueList = new ArrayList<String>();
+  String comparand = null;
+}
+{
+  (
+      tSearchName=<ID>
+    | tSearchName=<INTEGER_LITERAL>
+  ) [<DOT> ( tFirst=<ID> | tFirst=<INTEGER_LITERAL> ) ]
+  ( tComparator=<EQUALS> | tComparator=<LCOMP> | tComparator=<LCOMPE> | tComparator=<RCOMPE>
+  | tComparator=<RCOMP> | tComparator=<IN> | tComparator=<NOTIN> )
+  comparand = getValueList()
+  [<DOT>  ( tSecond=<ID> | tSecond=<INTEGER_LITERAL> )]
+
+  {
+    if(tFirst == null && tSecond == null) {
+      valueList.add(tComparator.image.toString() + " " +comparand);
+      result.put(tSearchName.image.toString(), valueList);
+    } else if (tFirst != null && tSecond != null ){
+      if(tSearchName.image.toString().equals(secondRelation)) {
+        valueList.add(tComparator.image.toString() + " " 
+            + secondValue.get(new Text(tSearchName.image.toString())));
+        result.put(tFirst.image.toString(), valueList);
+      } else {
+        valueList.add(tComparator.image.toString() + " " + tSecond.image.toString());
+        result.put(tFirst.image.toString(), valueList);
+      }
+    }
+    return result;
+  }
+}
+
+String getValueList() :
+{
+  Token tComparand = null;
+  Token tList = null;
+  String result = "";
+}
+{
+  ( tComparand=<ID> | tComparand=<INTEGER_LITERAL> )
+  {
+    result = tComparand.image.toString();
+  }
+  [
+    <BAR>
+    (
+      (tList=<ID> | tList=<INTEGER_LITERAL> )
+      {
+        result += "|" + tList.image.toString();
+      }
+      [ <BAR> ]
+    )*
+  ]
+  { return result; }
+}
+
+boolean checkConstraints(MapWritable data) :
+{
+  Map<String, String> record = getComparisonObject(data);
+  boolean result = false;
+  if(intersectionSet.size() == 0 && unionSet.size() == 0) {
+    return true;
+  }
+  if (data == null || record.size() == 0) {
+    return result; // return false if data is null.
+  } 
+}
+{
+  {
+      if (intersectionSet.size() == record.size()) {
+        result = booleanCompare(intersectionSet, record, true);
+      } else if (unionSet.size() == record.size()) {
+        result = booleanCompare(unionSet, record, false);
+      } else {
+        result = getCompareResult(record);
+      }
+    return result;
+  }
+}
+
+Map<String, String> getComparisonObject(MapWritable data) :
+{
+  Map<String, String> result = new HashMap<String, String>();
+}
+{
+  {
+    for (Map.Entry<Writable, Writable> e : data.entrySet()) {
+      String cKey = e.getKey().toString();
+      String val = new String(((ImmutableBytesWritable) e.getValue()).get());
+
+      if (intersectionSet.containsKey(cKey) || unionSet.containsKey(cKey)) {
+        result.put(cKey, val);
+      }
+    }
+
+    return result;
+  }
+}
+
+MapWritable getJoinColumns(MapWritable value, int numCols, String secondRelation) :
+{
+  MapWritable appendValue = new MapWritable();
+  String joinKey = null;
+  this.secondRelation = secondRelation;
+}
+{
+  {
+    if (numCols > 0) {
+      for (Map.Entry<Writable, Writable> e : value.entrySet()) {
+        Text column = (Text) e.getKey();
+        for (int i = 0; i < numCols; i++) {
+          if (column.equals(new Text(joinKeyColumn))) {
+            joinKey = new String(((ImmutableBytesWritable) e.getValue()).get());
+            break;
+          }
+        }
+      }
+    }
+    
+    //If joinKey is null, just return.
+    if(joinKey == null) {
+      return appendValue;
+    }
+    
+    try {
+      HTable table = new HTable(conf, new Text(secondRelation));
+      secondValue = table.getRow(new Text(joinKey));
+      for (Map.Entry<Text, byte[]> e : secondValue.entrySet()) {
+        appendValue.put(e.getKey(), new ImmutableBytesWritable(e.getValue()));
+      }
+    } catch (IOException e) {
+      e.printStackTrace();
+    }
+
+    return appendValue;
+  }
+}
+
+boolean getCompareResult(Map<String, String> record) :
+{
+}
+{
+  {
+      return (booleanCompare(intersectionSet, record, true) && 
+          booleanCompare(unionSet, record, false)) ? true : false;
+  }
+}
+
+boolean booleanCompare(Map<String, List<String>> expression,
+    Map<String, String> record, boolean isIntersection) :
+{
+  boolean negative = true;
+  boolean positive = false;
+}
+{
+  {
+    try{
+      for (Map.Entry<String, List<String>> e : expression.entrySet()) {
+        String key = e.getKey();
+        List<String> valueList = e.getValue();
+        String recordValueList = record.get(key);
+
+        for (int i = 0; i < valueList.size(); i++) {
+          String[] term = valueList.get(i).split(" ");
+          String comparator = term[0];
+          String comparand = term[1];
+
+          switch (comparator.charAt(0)) {
+            case '>':
+              if (isSecond(comparator, "=")) {
+                if (Integer.parseInt(comparand) > Integer
+                    .parseInt(recordValueList)) {
+                  negative = false;
+                } else {
+                  positive = true;
+                }
+              } else {
+                if (Integer.parseInt(comparand) > Integer
+                    .parseInt(recordValueList)
+                    || comparand.equals(recordValueList)) {
+                  negative = false;
+                } else {
+                  positive = true;
+                }
+              }
+              break;
+            case '<':
+              if (isSecond(comparator, "=")) {
+                if (Integer.parseInt(comparand) < Integer
+                    .parseInt(recordValueList))
+                  negative = false;
+                else
+                  positive = true;
+              } else {
+                if (Integer.parseInt(comparand) < Integer
+                    .parseInt(recordValueList)
+                    || comparand.equals(recordValueList))
+                  negative = false;
+                else
+                  positive = true;
+              }
+              break;
+
+            case '!':
+              if (isSecond(comparator, "!")) {
+                boolean checkBool = true;
+                String[] coms = comparand.split("[|]");
+                for (int j = 0; j < coms.length; j++) {
+                  if (URLDecoder.decode(coms[j], "UTF-8").equals(recordValueList)) {
+                    checkBool = false;
+                  }
+                }
+
+                if (!checkBool) {
+                  negative = false;
+                } else {
+                  positive = true;
+                }
+
+              } else {
+                if (comparand.equals(recordValueList))
+                  negative = false;
+                else
+                  positive = true;
+              }
+              break;
+            case '=':
+              if (isSecond(comparator, "=")) {
+
+                boolean checkBool = true;
+                String[] coms = comparand.split("[|]");
+                for (int j = 0; j < coms.length; j++) {
+                  if (URLDecoder.decode(coms[j], "UTF-8").equals(recordValueList)) {
+                    checkBool = false;
+                  }
+                }
+
+                if (checkBool) {
+                  negative = false;
+                } else {
+                  positive = true;
+                }
+
+              } else {
+                if (!comparand.equals(recordValueList))
+                  negative = false;
+                else
+                  positive = true;
+              }
+              break;
+          }
+        }
+      }
+    } catch (UnsupportedEncodingException e) {
+      e.printStackTrace();
+    }
+
+    boolean result = false;
+    if (isIntersection) {
+      result = negative;
+    } else {
+      result = positive;
+    }
+
+    return result;
+  }
+}
+
+boolean isSecond(String comparator, String string) :
+{
+}
+{
+  {
+    return (comparator.length() == 2 && string.charAt(0) == comparator.charAt(1)) 
+    ? true : false;
+  }
+}

Added: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/GroupingFilterMap.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/GroupingFilterMap.java?rev=599162&view=auto
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/GroupingFilterMap.java (added)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/GroupingFilterMap.java Wed Nov 28 14:10:21 2007
@@ -0,0 +1,83 @@
+/**
+ * Copyright 2007 The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.shell.algebra;
+
+import java.io.IOException;
+
+import org.apache.hadoop.hbase.HStoreKey;
+import org.apache.hadoop.hbase.mapred.GroupingTableMap;
+import org.apache.hadoop.hbase.mapred.TableMap;
+import org.apache.hadoop.hbase.mapred.TableOutputCollector;
+import org.apache.hadoop.hbase.shell.algebra.generated.ExpressionParser;
+import org.apache.hadoop.hbase.shell.algebra.generated.ParseException;
+import org.apache.hadoop.io.MapWritable;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.Reporter;
+
+/**
+ * Extract grouping columns from filtered records.
+ */
+public class GroupingFilterMap extends GroupingTableMap {
+  ExpressionParser expressionParser;
+  public static final String EXPRESSION = "shell.mapred.filtertablemap.exps";
+
+  public static void initJob(String table, String columns, String groupColumns,
+      String expression, Class<? extends TableMap> mapper, JobConf job) {
+    initJob(table, columns, mapper, job);
+    job.set(GROUP_COLUMNS, groupColumns);
+    job.set(EXPRESSION, expression);
+  }
+
+  /** {@inheritDoc} */
+  @Override
+  public void configure(JobConf job) {
+    super.configure(job);
+    String[] cols = job.get(GROUP_COLUMNS, "").split(" ");
+    m_columns = new Text[cols.length];
+    for (int i = 0; i < cols.length; i++) {
+      m_columns[i] = new Text(cols[i]);
+    }
+    expressionParser = new ExpressionParser(job.get(EXPRESSION, ""));
+    try {
+      expressionParser.booleanExpressionParse();
+    } catch (ParseException e) {
+      throw new RuntimeException(e);
+    }
+  }
+
+  public void map(@SuppressWarnings("unused")
+  HStoreKey key, MapWritable value, TableOutputCollector output,
+      @SuppressWarnings("unused")
+      Reporter reporter) throws IOException {
+    byte[][] keyVals = extractKeyValues(value);
+    if (keyVals != null) {
+      Text tKey = createGroupKey(keyVals);
+
+      try {
+        if (expressionParser.checkConstraints(value)) {
+          output.collect(tKey, value);
+        }
+      } catch (ParseException e) {
+        throw new RuntimeException(e);
+      }
+    }
+  }
+}

Added: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/IdentityFilterMap.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/IdentityFilterMap.java?rev=599162&view=auto
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/IdentityFilterMap.java (added)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/IdentityFilterMap.java Wed Nov 28 14:10:21 2007
@@ -0,0 +1,78 @@
+/**
+ * Copyright 2007 The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.shell.algebra;
+
+import java.io.IOException;
+
+import org.apache.hadoop.hbase.HStoreKey;
+import org.apache.hadoop.hbase.mapred.IdentityTableMap;
+import org.apache.hadoop.hbase.mapred.TableMap;
+import org.apache.hadoop.hbase.mapred.TableOutputCollector;
+import org.apache.hadoop.hbase.shell.algebra.generated.ExpressionParser;
+import org.apache.hadoop.hbase.shell.algebra.generated.ParseException;
+import org.apache.hadoop.io.MapWritable;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.Reporter;
+
+/**
+ * Extract filtered records.
+ */
+public class IdentityFilterMap extends IdentityTableMap {
+  ExpressionParser expressionParser;
+  public static final String EXPRESSION = "shell.mapred.filtertablemap.exps";
+
+  @SuppressWarnings("deprecation")
+  public static void initJob(String table, String columns, String expression,
+      Class<? extends TableMap> mapper, JobConf job) {
+    initJob(table, columns, mapper, job);
+    job.set(EXPRESSION, expression);
+  }
+
+  /*
+   * (non-Javadoc)
+   * 
+   * @see org.apache.hadoop.hbase.mapred.TableMap#configure(org.apache.hadoop.mapred.JobConf)
+   */
+  public void configure(JobConf job) {
+    super.configure(job);
+    expressionParser = new ExpressionParser(job.get(EXPRESSION, ""));
+    try {
+      expressionParser.booleanExpressionParse();
+    } catch (ParseException e) {
+      throw new RuntimeException(e);
+    }
+  }
+
+  /**
+   * Filter the value for each specified column family.
+   */
+  public void map(HStoreKey key, MapWritable value,
+      TableOutputCollector output, Reporter reporter) throws IOException {
+    Text tKey = key.getRow();
+    try {
+      if (expressionParser.checkConstraints(value)) {
+        output.collect(tKey, value);
+      }
+    } catch (ParseException e) {
+      throw new RuntimeException(e);
+    }
+  }
+}

Added: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/IndexJoin.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/IndexJoin.java?rev=599162&view=auto
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/IndexJoin.java (added)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/IndexJoin.java Wed Nov 28 14:10:21 2007
@@ -0,0 +1,76 @@
+/**
+ * Copyright 2007 The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.shell.algebra;
+
+import java.io.IOException;
+import java.util.Map;
+
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.HColumnDescriptor;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.mapred.JobConf;
+
+/**
+ * Perform a index join using MapReduce.
+ */
+public class IndexJoin extends RelationalOperation {
+  public IndexJoin(HBaseConfiguration conf, Map<String, String> condition) {
+    super(conf, condition);
+  }
+
+  @Override
+  public JobConf getConf() throws IOException, RuntimeException {
+    String secondRelation = condition.get(Constants.JOIN_SECOND_RELATION);
+
+    HColumnDescriptor[] firstColumns = null;
+    HColumnDescriptor[] secondColumns = null;
+    for (int i = 0; i < tables.length; i++) {
+      if (tables[i].getName().equals(new Text(input))) {
+        firstColumns = tables[i].getFamilies().values().toArray(
+            new HColumnDescriptor[] {});
+      } else if (tables[i].getName().equals(new Text(secondRelation))) {
+        secondColumns = tables[i].getFamilies().values().toArray(
+            new HColumnDescriptor[] {});
+      }
+    }
+
+    String firstColumnsStr = "";
+    String secondColumnsStr = "";
+
+    for (int i = 0; i < firstColumns.length; i++) {
+      desc.addFamily(firstColumns[i]);
+      firstColumnsStr += firstColumns[i].getName() + " ";
+    }
+
+    for (int i = 0; i < secondColumns.length; i++) {
+      desc.addFamily(secondColumns[i]);
+      secondColumnsStr += secondColumns[i].getName() + " ";
+    }
+
+    admin.createTable(desc); // create output table.
+
+    IndexJoinMap.initJob(input, secondRelation, firstColumnsStr,
+        secondColumnsStr, condition.get(Constants.RELATIONAL_JOIN),
+        IndexJoinMap.class, jobConf);
+    IndexJoinReduce.initJob(output, IndexJoinReduce.class, jobConf);
+
+    return jobConf;
+  }
+}

Added: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/IndexJoinMap.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/IndexJoinMap.java?rev=599162&view=auto
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/IndexJoinMap.java (added)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/IndexJoinMap.java Wed Nov 28 14:10:21 2007
@@ -0,0 +1,108 @@
+/**
+ * Copyright 2007 The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.shell.algebra;
+
+import java.io.IOException;
+
+import org.apache.hadoop.hbase.HStoreKey;
+import org.apache.hadoop.hbase.mapred.TableMap;
+import org.apache.hadoop.hbase.mapred.TableOutputCollector;
+import org.apache.hadoop.hbase.shell.algebra.generated.ExpressionParser;
+import org.apache.hadoop.hbase.shell.algebra.generated.ParseException;
+import org.apache.hadoop.io.MapWritable;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.Reporter;
+
+/**
+ * An index join exploits the existence of an row index for one of the relations
+ * used in the join to find matching rows more quickly.
+ * 
+ * Index join (using R2 row index) takes time O(i+m)/map function number.
+ */
+public class IndexJoinMap extends TableMap {
+  ExpressionParser expressionParser;
+  private String secondRelation;
+  public static final String JOIN_EXPRESSION = "shell.mapred.join.expression";
+  public static final String SECOND_RELATION = "shell.mapred.join.second.relation";
+  public static final String FIRST_COLUMNS = "shell.mapred.first.columns";
+  private Text[] first_columns;
+
+  /** constructor */
+  public IndexJoinMap() {
+    super();
+  }
+
+  /**
+   * @param firstRelation R1
+   * @param secondRelation R2
+   * @param firstColumns (A 1,A 2,...,A n)
+   * @param secondColumns (B~1~,B~2~,...,B~m~)
+   * @param joinExpression join condition expression
+   * @param mapper mapper class
+   * @param job jobConf
+   */
+  public static void initJob(String firstRelation, String secondRelation,
+      String firstColumns, String secondColumns, String joinExpression,
+      Class<? extends TableMap> mapper, JobConf job) {
+    initJob(firstRelation, firstColumns, mapper, job);
+    job.set(JOIN_EXPRESSION, joinExpression);
+    job.set(SECOND_RELATION, secondRelation);
+    job.set(FIRST_COLUMNS, firstColumns);
+  }
+
+  /** {@inheritDoc} */
+  @Override
+  public void configure(JobConf job) {
+    super.configure(job);
+    secondRelation = job.get(SECOND_RELATION, "");
+    String[] cols = job.get(FIRST_COLUMNS, "").split(" ");
+    first_columns = new Text[cols.length];
+    for (int i = 0; i < cols.length; i++) {
+      first_columns[i] = new Text(cols[i]);
+    }
+
+    expressionParser = new ExpressionParser(job.get(JOIN_EXPRESSION, ""));
+    try {
+      expressionParser.joinExpressionParse();
+    } catch (ParseException e) {
+      throw new RuntimeException(e);
+    }
+  }
+
+  @Override
+  public void map(HStoreKey key, MapWritable value,
+      TableOutputCollector output, Reporter reporter) throws IOException {
+    Text tKey = key.getRow();
+    try {
+      MapWritable appendValue = expressionParser.getJoinColumns(value,
+          first_columns.length, secondRelation);
+
+      if (appendValue.size() != 0) {
+        value.putAll(appendValue);
+        if (expressionParser.checkConstraints(value)) {
+          output.collect(tKey, value);
+        }
+      }
+    } catch (ParseException e) {
+      throw new RuntimeException(e);
+    }
+  }
+}

Added: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/IndexJoinReduce.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/IndexJoinReduce.java?rev=599162&view=auto
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/IndexJoinReduce.java (added)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/IndexJoinReduce.java Wed Nov 28 14:10:21 2007
@@ -0,0 +1,49 @@
+/**
+ * Copyright 2007 The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.shell.algebra;
+
+import java.io.IOException;
+import java.util.Iterator;
+
+import org.apache.hadoop.hbase.mapred.TableOutputCollector;
+import org.apache.hadoop.hbase.mapred.TableReduce;
+import org.apache.hadoop.io.MapWritable;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.mapred.Reporter;
+
+/**
+ * Table join Reduce class
+ */
+public class IndexJoinReduce extends TableReduce {
+  /** constructor */
+  public IndexJoinReduce() {
+    super();
+  }
+
+  @Override
+  public void reduce(Text key, @SuppressWarnings("unchecked")
+  Iterator values, TableOutputCollector output, Reporter reporter)
+      throws IOException {
+    while (values.hasNext()) {
+      MapWritable r = (MapWritable) values.next();
+      output.collect(key, r);
+    }
+  }
+}

Added: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/Operation.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/Operation.java?rev=599162&view=auto
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/Operation.java (added)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/Operation.java Wed Nov 28 14:10:21 2007
@@ -0,0 +1,41 @@
+/**
+ * Copyright 2007 The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.shell.algebra;
+
+import java.io.IOException;
+
+import org.apache.hadoop.mapred.JobConf;
+
+/**
+ * Operation interface for one of algebra operations like relational algebra
+ * operations, matrix algebra operations, linear algebra operations, topological
+ * algebra operations, etc.
+ */
+public interface Operation {
+
+  /**
+   * return the Map/Reduce job configuration for performing operations.
+   * 
+   * @return JobConf
+   * @throws IOException
+   * @throws RuntimeException
+   */
+  JobConf getConf() throws IOException, RuntimeException;
+}



Mime
View raw message