hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From st...@apache.org
Subject svn commit: r599162 [2/4] - in /lucene/hadoop/trunk/src/contrib/hbase: ./ src/java/org/apache/hadoop/hbase/mapred/ src/java/org/apache/hadoop/hbase/shell/ src/java/org/apache/hadoop/hbase/shell/algebra/ src/java/org/apache/hadoop/hbase/shell/algebra/ge...
Date Wed, 28 Nov 2007 22:10:27 GMT
Added: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/OperationEvaluator.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/OperationEvaluator.java?rev=599162&view=auto
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/OperationEvaluator.java (added)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/OperationEvaluator.java Wed Nov 28 14:10:21 2007
@@ -0,0 +1,88 @@
+/**
+ * Copyright 2007 The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.shell.algebra;
+
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.shell.VariableRef;
+import org.apache.hadoop.hbase.shell.VariablesPool;
+import org.apache.hadoop.mapred.JobConf;
+
+/**
+ * Each algebra operation can be evaluated one of several different algorithms.
+ * 
+ * So, It should be query executor/optimizer later. And It will become the core
+ * module that regulates the query-performance of Hbase Shell.
+ * 
+ * @see <a
+ *      href="http://wiki.apache.org/lucene-hadoop/Hbase/HbaseShell/Executor">Intergrated
+ *      query executor architecture</a>
+ */
+public class OperationEvaluator {
+  private HBaseConfiguration conf;
+  Map<String, String> condition = new HashMap<String, String>();
+
+  /** Constructor */
+  public OperationEvaluator(HBaseConfiguration conf, String chainKey, String output) {
+    this.conf = conf;
+    String chain = chainKey;
+    String input = null;
+
+    while (chain != null) {
+      for (Map.Entry<String, VariableRef> e : VariablesPool.get(chain).entrySet()) {
+        if (e.getKey() == null) {
+          input = e.getValue().getArgument();
+        } else {
+          condition.put(e.getValue().getOperation(), e.getValue()
+              .getArgument());
+        }
+        chain = e.getKey();
+      }
+    }
+    condition.put(Constants.CONFIG_INPUT, input);
+    condition.put(Constants.CONFIG_OUTPUT, output);
+  }
+
+
+  /**
+   * Returns the job configuration object for statements type
+   * 
+   * @return JobConf
+   * @throws IOException
+   * @throws RuntimeException
+   */
+  public JobConf getJobConf() throws IOException, RuntimeException {
+    RelationalOperation operation;
+    if (condition.containsKey(Constants.RELATIONAL_SELECTION)) {
+      operation = new Selection(conf, condition);
+    } else if (condition.containsKey(Constants.RELATIONAL_PROJECTION)) {
+      operation = new Projection(conf, condition);
+    } else if (condition.containsKey(Constants.RELATIONAL_JOIN)) {
+      operation = new IndexJoin(conf, condition);
+    } else {
+      operation = new DuplicateTable(conf, condition);
+    }
+
+    return operation.getOperation().getConf();
+  }
+}

Added: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/Projection.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/Projection.java?rev=599162&view=auto
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/Projection.java (added)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/Projection.java Wed Nov 28 14:10:21 2007
@@ -0,0 +1,54 @@
+/**
+ * Copyright 2007 The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.shell.algebra;
+
+import java.io.IOException;
+import java.util.Map;
+
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.mapred.GroupingTableMap;
+import org.apache.hadoop.hbase.mapred.IdentityTableMap;
+import org.apache.hadoop.hbase.mapred.IdentityTableReduce;
+import org.apache.hadoop.mapred.JobConf;
+
+/**
+ * Perform a relational projection using MapReduce.
+ */
+public class Projection extends RelationalOperation {
+  public Projection(HBaseConfiguration conf, Map<String, String> condition) {
+    super(conf, condition);
+  }
+
+  @Override
+  public JobConf getConf() throws IOException, RuntimeException {
+    outputTableCreate(null, condition.get(Constants.RELATIONAL_PROJECTION));
+
+    if (condition.containsKey(Constants.RELATIONAL_GROUP)) {
+      GroupingTableMap.initJob(input, getProjColumns(), getGroupColumns(),
+          GroupingTableMap.class, jobConf);
+    } else {
+      IdentityTableMap.initJob(input, getProjColumns(), IdentityTableMap.class,
+          jobConf);
+    }
+
+    IdentityTableReduce.initJob(output, IdentityTableReduce.class, jobConf);
+    return jobConf;
+  }
+}

Added: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/RelationalOperation.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/RelationalOperation.java?rev=599162&view=auto
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/RelationalOperation.java (added)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/RelationalOperation.java Wed Nov 28 14:10:21 2007
@@ -0,0 +1,175 @@
+/**
+ * Copyright 2007 The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.shell.algebra;
+
+import java.io.IOException;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Set;
+
+import org.apache.hadoop.hbase.HBaseAdmin;
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.HColumnDescriptor;
+import org.apache.hadoop.hbase.HConnection;
+import org.apache.hadoop.hbase.HConnectionManager;
+import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.mapred.ClusterStatus;
+import org.apache.hadoop.mapred.JobClient;
+import org.apache.hadoop.mapred.JobConf;
+
+/**
+ * Represents the interface to an relational algebra operation like projection,
+ * selection, join, group.
+ */
+public abstract class RelationalOperation implements Operation {
+  protected JobConf jobConf;
+  protected HConnection conn;
+  protected HBaseAdmin admin;
+  protected JobClient jobClient;
+  protected HTableDescriptor desc;
+  protected String input;
+  protected String output;
+  protected Map<String, String> condition;
+  protected HTableDescriptor[] tables;
+  protected Set<String> projSet = new HashSet<String>();
+
+  /**
+   * Constructor
+   * 
+   * @param conf
+   * @param statements
+   */
+  public RelationalOperation(HBaseConfiguration conf,
+      Map<String, String> statements) {
+    this.jobConf = new JobConf(conf);
+    this.conn = HConnectionManager.getConnection(conf);
+    this.condition = statements;
+    this.input = statements.get(Constants.CONFIG_INPUT);
+    this.output = statements.get(Constants.CONFIG_OUTPUT);
+    jobConf.setJobName("shell.mapred-" + +System.currentTimeMillis());
+    desc = new HTableDescriptor(output);
+
+    try {
+      this.admin = new HBaseAdmin(conf);
+      this.jobClient = new JobClient(jobConf);
+      tables = conn.listTables();
+
+      ClusterStatus cluster = jobClient.getClusterStatus();
+      jobConf.setNumMapTasks(cluster.getMapTasks());
+      jobConf.setNumReduceTasks(1);
+    } catch (Exception e) {
+      e.printStackTrace();
+    }
+  }
+
+  /**
+   * Gets the input table column descriptor[]
+   * 
+   * @return columns
+   */
+  public HColumnDescriptor[] getInputColumnDescriptor() {
+    HColumnDescriptor[] columns = null;
+    for (int i = 0; i < tables.length; i++) {
+      if (tables[i].getName().equals(new Text(input))) {
+        columns = tables[i].getFamilies().values().toArray(
+            new HColumnDescriptor[] {});
+        break;
+      }
+    }
+    return columns;
+  }
+
+  /**
+   * Convert HColumnDescriptor[] to String
+   * 
+   * @param columns
+   * @return columns string
+   */
+  public String getColumnStringArray(HColumnDescriptor[] columns) {
+    String result = "";
+    for (int i = 0; i < columns.length; i++) {
+      desc.addFamily(columns[i]);
+      result += columns[i].getName() + " ";
+    }
+    return result;
+  }
+
+  /**
+   * Creates the output table
+   * 
+   * @param columns
+   * @param columnString
+   * @throws IOException
+   */
+  public void outputTableCreate(HColumnDescriptor[] columns, String columnString)
+      throws IOException {
+    if (columnString == null) {
+      for (int i = 0; i < columns.length; i++) {
+        if (projSet.size() > 0) {
+          desc.addFamily(columns[i]);
+        } else {
+          if (projSet.contains(columns[i].getName().toString())) {
+            desc.addFamily(columns[i]);
+          }
+        }
+      }
+    } else {
+      String[] cols = columnString.split(" ");
+      for (int i = 0; i < cols.length; i++) {
+        desc.addFamily(new HColumnDescriptor(cols[i]));
+      }
+    }
+
+    admin.createTable(desc);
+  }
+
+  /**
+   * Return the jobConf
+   */
+  public JobConf getConf() throws IOException, RuntimeException {
+    return jobConf;
+  }
+
+  /**
+   * @return projection conditions
+   */
+  public String getProjColumns() {
+    return condition.get(Constants.RELATIONAL_PROJECTION);
+  }
+
+  /**
+   * @return selection conditions
+   */
+  public String getExpression() {
+    return condition.get(Constants.RELATIONAL_SELECTION);
+  }
+
+  /**
+   * @return group conditions
+   */
+  public String getGroupColumns() {
+    return condition.get(Constants.RELATIONAL_GROUP);
+  }
+
+  public Operation getOperation() {
+    return this;
+  }
+}

Added: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/Selection.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/Selection.java?rev=599162&view=auto
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/Selection.java (added)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/Selection.java Wed Nov 28 14:10:21 2007
@@ -0,0 +1,74 @@
+/**
+ * Copyright 2007 The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.shell.algebra;
+
+import java.io.IOException;
+import java.util.Map;
+
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.HColumnDescriptor;
+import org.apache.hadoop.hbase.mapred.IdentityTableReduce;
+import org.apache.hadoop.mapred.JobConf;
+
+/**
+ * Perform a relational selection by linear search algorithm on each Map
+ * Functions.
+ * 
+ * Scan each file block an test all records to see whether they satisfy the
+ * selection condition.
+ */
+public class Selection extends RelationalOperation {
+  public Selection(HBaseConfiguration conf, Map<String, String> condition) {
+    super(conf, condition);
+  }
+
+  @Override
+  public JobConf getConf() throws IOException, RuntimeException {
+    HColumnDescriptor[] columns = getInputColumnDescriptor();
+    String groupColumns = getGroupColumns(columns);
+    outputTableCreate(columns, null);
+
+    if (condition.containsKey(Constants.RELATIONAL_GROUP)) {
+      GroupingFilterMap.initJob(input, groupColumns, getGroupColumns(),
+          getExpression(), GroupingFilterMap.class, jobConf);
+    } else {
+      IdentityFilterMap.initJob(input, groupColumns, getExpression(),
+          IdentityFilterMap.class, jobConf);
+    }
+
+    IdentityTableReduce.initJob(output, IdentityTableReduce.class, jobConf);
+    return jobConf;
+  }
+
+  private String getGroupColumns(HColumnDescriptor[] columns) {
+    String result = null;
+    if (condition.containsKey(Constants.RELATIONAL_PROJECTION)) {
+      result = getProjColumns();
+    } else {
+      result = getColumnStringArray(columns);
+    }
+
+    String[] groups = result.split(" ");
+    for (int i = 0; i < groups.length; i++) {
+      projSet.add(groups[i]);
+    }
+    return result;
+  }
+}

Added: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/generated/ExpressionParser.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/generated/ExpressionParser.java?rev=599162&view=auto
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/generated/ExpressionParser.java (added)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/generated/ExpressionParser.java Wed Nov 28 14:10:21 2007
@@ -0,0 +1,774 @@
+/* Generated By:JavaCC: Do not edit this line. ExpressionParser.java */
+package org.apache.hadoop.hbase.shell.algebra.generated;
+
+/**
+ * Copyright 2007 The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import java.io.IOException;
+import java.io.Reader;
+import java.io.StringReader;
+import java.io.UnsupportedEncodingException;
+import java.net.URLDecoder;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.SortedMap;
+
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.HTable;
+import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
+import org.apache.hadoop.io.MapWritable;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.io.Writable;
+
+/**
+ * Parsing command line.
+ */
+public class ExpressionParser implements ExpressionParserConstants {
+  HBaseConfiguration conf = new HBaseConfiguration();
+  private String expression;
+  private String joinKeyColumn;
+  private String secondRelation;
+  private Map<String, List<String>> unionSet = new HashMap<String, List<String>>();
+  private Map<String, List<String>> intersectionSet = new HashMap<String, List<String>>();
+  private SortedMap<Text, byte[]> secondValue = null;
+
+  public ExpressionParser(final String expression) {
+    this((Reader)(new StringReader(expression)));
+    this.expression = expression;
+  }
+
+  public String getExpression() {
+    return this.expression;
+  }
+
+  final public void booleanExpressionParse() throws ParseException {
+  Map<String, List<String>> temp = new HashMap<String, List<String>>();
+    temp = booleanTerm();
+    label_1:
+    while (true) {
+      switch ((jj_ntk==-1)?jj_ntk():jj_ntk) {
+      case AND:
+      case OR:
+        ;
+        break;
+      default:
+        jj_la1[0] = jj_gen;
+        break label_1;
+      }
+      switch ((jj_ntk==-1)?jj_ntk():jj_ntk) {
+      case AND:
+        jj_consume_token(AND);
+          if(temp != null) {
+            for(Map.Entry<String, List<String>> e : temp.entrySet()) {
+              List<String> newList = intersectionSet.get(e.getKey());
+              if(newList != null) {
+                newList.addAll(e.getValue());
+              } else {
+                newList = e.getValue();
+              }
+              intersectionSet.put(e.getKey(), newList);
+            }
+          }
+          temp = booleanTerm();
+          for(Map.Entry<String, List<String>> e : temp.entrySet()) {
+            List<String> newList = intersectionSet.get(e.getKey());
+            if(newList != null) {
+              newList.addAll(e.getValue());
+            } else {
+              newList = e.getValue();
+            }
+            intersectionSet.put(e.getKey(), newList);
+          }
+          temp = null;
+        break;
+      case OR:
+        jj_consume_token(OR);
+        if(temp != null) {
+          for(Map.Entry<String, List<String>> e : temp.entrySet()) {
+            List<String> newList = unionSet.get(e.getKey());
+            if(newList != null) {
+              newList.addAll(e.getValue());
+            } else {
+              newList = e.getValue();
+            }
+            unionSet.put(e.getKey(), newList);
+          }
+        }
+        temp = booleanTerm();
+        for(Map.Entry<String, List<String>> e : temp.entrySet()) {
+          List<String> newList = unionSet.get(e.getKey());
+          if(newList != null) {
+            newList.addAll(e.getValue());
+          } else {
+            newList = e.getValue();
+          }
+          unionSet.put(e.getKey(), newList);
+        }
+        temp = null;
+        break;
+      default:
+        jj_la1[1] = jj_gen;
+        jj_consume_token(-1);
+        throw new ParseException();
+      }
+    }
+    if(temp != null) {
+      for(Map.Entry<String, List<String>> e : temp.entrySet()) {
+        List<String> newList = unionSet.get(e.getKey());
+        if(newList != null) {
+          newList.addAll(e.getValue());
+        } else {
+          newList = e.getValue();
+        }
+        unionSet.put(e.getKey(), newList);
+      }
+    }
+  }
+
+  final public void joinExpressionParse() throws ParseException {
+  Map<String, List<String>> temp = new HashMap<String, List<String>>();
+    indexJoinCondition();
+    jj_consume_token(BOOL);
+    switch ((jj_ntk==-1)?jj_ntk():jj_ntk) {
+    case ID:
+    case INTEGER_LITERAL:
+      temp = booleanTerm();
+      label_2:
+      while (true) {
+        switch ((jj_ntk==-1)?jj_ntk():jj_ntk) {
+        case AND:
+        case OR:
+          ;
+          break;
+        default:
+          jj_la1[2] = jj_gen;
+          break label_2;
+        }
+        switch ((jj_ntk==-1)?jj_ntk():jj_ntk) {
+        case AND:
+          jj_consume_token(AND);
+         if(temp != null) {
+           for(Map.Entry<String, List<String>> e : temp.entrySet()) {
+             List<String> newList = intersectionSet.get(e.getKey());
+             if(newList != null) {
+               newList.addAll(e.getValue());
+             } else {
+               newList = e.getValue();
+             }
+             intersectionSet.put(e.getKey(), newList);
+           }
+         }
+         temp = booleanTerm();
+         for(Map.Entry<String, List<String>> e : temp.entrySet()) {
+           List<String> newList = intersectionSet.get(e.getKey());
+           if(newList != null) {
+             newList.addAll(e.getValue());
+           } else {
+             newList = e.getValue();
+           }
+           intersectionSet.put(e.getKey(), newList);
+         }
+         temp = null;
+          break;
+        case OR:
+          jj_consume_token(OR);
+        if(temp != null) {
+          for(Map.Entry<String, List<String>> e : temp.entrySet()) {
+            List<String> newList = unionSet.get(e.getKey());
+            if(newList != null) {
+              newList.addAll(e.getValue());
+            } else {
+              newList = e.getValue();
+            }
+            unionSet.put(e.getKey(), newList);
+          }
+        }
+        temp = booleanTerm();
+        for(Map.Entry<String, List<String>> e : temp.entrySet()) {
+          List<String> newList = unionSet.get(e.getKey());
+          if(newList != null) {
+            newList.addAll(e.getValue());
+          } else {
+            newList = e.getValue();
+          }
+          unionSet.put(e.getKey(), newList);
+        }
+        temp = null;
+          break;
+        default:
+          jj_la1[3] = jj_gen;
+          jj_consume_token(-1);
+          throw new ParseException();
+        }
+      }
+      if(temp != null) {
+        for(Map.Entry<String, List<String>> e : temp.entrySet()) {
+          List<String> newList = unionSet.get(e.getKey());
+          if(newList != null) {
+            newList.addAll(e.getValue());
+          } else {
+            newList = e.getValue();
+          }
+          unionSet.put(e.getKey(), newList);
+        }
+        }
+      break;
+    default:
+      jj_la1[4] = jj_gen;
+      ;
+    }
+  }
+
+  final public void indexJoinCondition() throws ParseException {
+ Token firstR = null;
+  Token joinKey = null;
+  Token secondR = null;
+    firstR = jj_consume_token(ID);
+    jj_consume_token(DOT);
+    joinKey = jj_consume_token(ID);
+    jj_consume_token(EQUALS);
+    secondR = jj_consume_token(ID);
+    jj_consume_token(DOT);
+    jj_consume_token(ROW);
+    joinKeyColumn = joinKey.image.toString();
+    secondRelation = secondR.image.toString();
+  }
+
+  final public Map<String, List<String>> booleanTerm() throws ParseException {
+  Token tSearchName = null;
+  Token tComparator = null;
+  Token tFirst = null;
+  Token tSecond = null;
+  Map<String, List<String>> result = new HashMap<String, List<String>>();
+  List<String> valueList = new ArrayList<String>();
+  String comparand = null;
+    switch ((jj_ntk==-1)?jj_ntk():jj_ntk) {
+    case ID:
+      tSearchName = jj_consume_token(ID);
+      break;
+    case INTEGER_LITERAL:
+      tSearchName = jj_consume_token(INTEGER_LITERAL);
+      break;
+    default:
+      jj_la1[5] = jj_gen;
+      jj_consume_token(-1);
+      throw new ParseException();
+    }
+    switch ((jj_ntk==-1)?jj_ntk():jj_ntk) {
+    case DOT:
+      jj_consume_token(DOT);
+      switch ((jj_ntk==-1)?jj_ntk():jj_ntk) {
+      case ID:
+        tFirst = jj_consume_token(ID);
+        break;
+      case INTEGER_LITERAL:
+        tFirst = jj_consume_token(INTEGER_LITERAL);
+        break;
+      default:
+        jj_la1[6] = jj_gen;
+        jj_consume_token(-1);
+        throw new ParseException();
+      }
+      break;
+    default:
+      jj_la1[7] = jj_gen;
+      ;
+    }
+    switch ((jj_ntk==-1)?jj_ntk():jj_ntk) {
+    case EQUALS:
+      tComparator = jj_consume_token(EQUALS);
+      break;
+    case LCOMP:
+      tComparator = jj_consume_token(LCOMP);
+      break;
+    case LCOMPE:
+      tComparator = jj_consume_token(LCOMPE);
+      break;
+    case RCOMPE:
+      tComparator = jj_consume_token(RCOMPE);
+      break;
+    case RCOMP:
+      tComparator = jj_consume_token(RCOMP);
+      break;
+    case IN:
+      tComparator = jj_consume_token(IN);
+      break;
+    case NOTIN:
+      tComparator = jj_consume_token(NOTIN);
+      break;
+    default:
+      jj_la1[8] = jj_gen;
+      jj_consume_token(-1);
+      throw new ParseException();
+    }
+    comparand = getValueList();
+    switch ((jj_ntk==-1)?jj_ntk():jj_ntk) {
+    case DOT:
+      jj_consume_token(DOT);
+      switch ((jj_ntk==-1)?jj_ntk():jj_ntk) {
+      case ID:
+        tSecond = jj_consume_token(ID);
+        break;
+      case INTEGER_LITERAL:
+        tSecond = jj_consume_token(INTEGER_LITERAL);
+        break;
+      default:
+        jj_la1[9] = jj_gen;
+        jj_consume_token(-1);
+        throw new ParseException();
+      }
+      break;
+    default:
+      jj_la1[10] = jj_gen;
+      ;
+    }
+    if(tFirst == null && tSecond == null) {
+      valueList.add(tComparator.image.toString() + " " +comparand);
+      result.put(tSearchName.image.toString(), valueList);
+    } else if (tFirst != null && tSecond != null ){
+      if(tSearchName.image.toString().equals(secondRelation)) {
+        valueList.add(tComparator.image.toString() + " "
+            + secondValue.get(new Text(tSearchName.image.toString())));
+        result.put(tFirst.image.toString(), valueList);
+      } else {
+        valueList.add(tComparator.image.toString() + " " + tSecond.image.toString());
+        result.put(tFirst.image.toString(), valueList);
+      }
+    }
+    {if (true) return result;}
+    throw new Error("Missing return statement in function");
+  }
+
+  final public String getValueList() throws ParseException {
+  Token tComparand = null;
+  Token tList = null;
+  String result = "";
+    switch ((jj_ntk==-1)?jj_ntk():jj_ntk) {
+    case ID:
+      tComparand = jj_consume_token(ID);
+      break;
+    case INTEGER_LITERAL:
+      tComparand = jj_consume_token(INTEGER_LITERAL);
+      break;
+    default:
+      jj_la1[11] = jj_gen;
+      jj_consume_token(-1);
+      throw new ParseException();
+    }
+    result = tComparand.image.toString();
+    switch ((jj_ntk==-1)?jj_ntk():jj_ntk) {
+    case BAR:
+      jj_consume_token(BAR);
+      label_3:
+      while (true) {
+        switch ((jj_ntk==-1)?jj_ntk():jj_ntk) {
+        case ID:
+        case INTEGER_LITERAL:
+          ;
+          break;
+        default:
+          jj_la1[12] = jj_gen;
+          break label_3;
+        }
+        switch ((jj_ntk==-1)?jj_ntk():jj_ntk) {
+        case ID:
+          tList = jj_consume_token(ID);
+          break;
+        case INTEGER_LITERAL:
+          tList = jj_consume_token(INTEGER_LITERAL);
+          break;
+        default:
+          jj_la1[13] = jj_gen;
+          jj_consume_token(-1);
+          throw new ParseException();
+        }
+        result += "|" + tList.image.toString();
+        switch ((jj_ntk==-1)?jj_ntk():jj_ntk) {
+        case BAR:
+          jj_consume_token(BAR);
+          break;
+        default:
+          jj_la1[14] = jj_gen;
+          ;
+        }
+      }
+      break;
+    default:
+      jj_la1[15] = jj_gen;
+      ;
+    }
+    {if (true) return result;}
+    throw new Error("Missing return statement in function");
+  }
+
+  final public boolean checkConstraints(MapWritable data) throws ParseException {
+  Map<String, String> record = getComparisonObject(data);
+  boolean result = false;
+  if(intersectionSet.size() == 0 && unionSet.size() == 0) {
+    return true;
+  }
+  if (data == null || record.size() == 0) {
+    return result; // return false if data is null.
+  }
+      if (intersectionSet.size() == record.size()) {
+        result = booleanCompare(intersectionSet, record, true);
+      } else if (unionSet.size() == record.size()) {
+        result = booleanCompare(unionSet, record, false);
+      } else {
+        result = getCompareResult(record);
+      }
+    {if (true) return result;}
+    throw new Error("Missing return statement in function");
+  }
+
+  final public Map<String, String> getComparisonObject(MapWritable data) throws ParseException {
+  Map<String, String> result = new HashMap<String, String>();
+    for (Map.Entry<Writable, Writable> e : data.entrySet()) {
+      String cKey = e.getKey().toString();
+      String val = new String(((ImmutableBytesWritable) e.getValue()).get());
+
+      if (intersectionSet.containsKey(cKey) || unionSet.containsKey(cKey)) {
+        result.put(cKey, val);
+      }
+    }
+
+    {if (true) return result;}
+    throw new Error("Missing return statement in function");
+  }
+
+  final public MapWritable getJoinColumns(MapWritable value, int numCols, String secondRelation) throws ParseException {
+  MapWritable appendValue = new MapWritable();
+  String joinKey = null;
+  this.secondRelation = secondRelation;
+    if (numCols > 0) {
+      for (Map.Entry<Writable, Writable> e : value.entrySet()) {
+        Text column = (Text) e.getKey();
+        for (int i = 0; i < numCols; i++) {
+          if (column.equals(new Text(joinKeyColumn))) {
+            joinKey = new String(((ImmutableBytesWritable) e.getValue()).get());
+            break;
+          }
+        }
+      }
+    }
+
+    //If joinKey is null, just return.
+    if(joinKey == null) {
+      {if (true) return appendValue;}
+    }
+
+    try {
+      HTable table = new HTable(conf, new Text(secondRelation));
+      secondValue = table.getRow(new Text(joinKey));
+      for (Map.Entry<Text, byte[]> e : secondValue.entrySet()) {
+        appendValue.put(e.getKey(), new ImmutableBytesWritable(e.getValue()));
+      }
+    } catch (IOException e) {
+      e.printStackTrace();
+    }
+
+    {if (true) return appendValue;}
+    throw new Error("Missing return statement in function");
+  }
+
+  final public boolean getCompareResult(Map<String, String> record) throws ParseException {
+      {if (true) return (booleanCompare(intersectionSet, record, true) &&
+          booleanCompare(unionSet, record, false)) ? true : false;}
+    throw new Error("Missing return statement in function");
+  }
+
+  final public boolean booleanCompare(Map<String, List<String>> expression,
+    Map<String, String> record, boolean isIntersection) throws ParseException {
+  boolean negative = true;
+  boolean positive = false;
+    try{
+      for (Map.Entry<String, List<String>> e : expression.entrySet()) {
+        String key = e.getKey();
+        List<String> valueList = e.getValue();
+        String recordValueList = record.get(key);
+
+        for (int i = 0; i < valueList.size(); i++) {
+          String[] term = valueList.get(i).split(" ");
+          String comparator = term[0];
+          String comparand = term[1];
+
+          switch (comparator.charAt(0)) {
+            case '>':
+              if (isSecond(comparator, "=")) {
+                if (Integer.parseInt(comparand) > Integer
+                    .parseInt(recordValueList)) {
+                  negative = false;
+                } else {
+                  positive = true;
+                }
+              } else {
+                if (Integer.parseInt(comparand) > Integer
+                    .parseInt(recordValueList)
+                    || comparand.equals(recordValueList)) {
+                  negative = false;
+                } else {
+                  positive = true;
+                }
+              }
+              break;
+            case '<':
+              if (isSecond(comparator, "=")) {
+                if (Integer.parseInt(comparand) < Integer
+                    .parseInt(recordValueList))
+                  negative = false;
+                else
+                  positive = true;
+              } else {
+                if (Integer.parseInt(comparand) < Integer
+                    .parseInt(recordValueList)
+                    || comparand.equals(recordValueList))
+                  negative = false;
+                else
+                  positive = true;
+              }
+              break;
+
+            case '!':
+              if (isSecond(comparator, "!")) {
+                boolean checkBool = true;
+                String[] coms = comparand.split("[|]");
+                for (int j = 0; j < coms.length; j++) {
+                  if (URLDecoder.decode(coms[j], "UTF-8").equals(recordValueList)) {
+                    checkBool = false;
+                  }
+                }
+
+                if (!checkBool) {
+                  negative = false;
+                } else {
+                  positive = true;
+                }
+
+              } else {
+                if (comparand.equals(recordValueList))
+                  negative = false;
+                else
+                  positive = true;
+              }
+              break;
+            case '=':
+              if (isSecond(comparator, "=")) {
+
+                boolean checkBool = true;
+                String[] coms = comparand.split("[|]");
+                for (int j = 0; j < coms.length; j++) {
+                  if (URLDecoder.decode(coms[j], "UTF-8").equals(recordValueList)) {
+                    checkBool = false;
+                  }
+                }
+
+                if (checkBool) {
+                  negative = false;
+                } else {
+                  positive = true;
+                }
+
+              } else {
+                if (!comparand.equals(recordValueList))
+                  negative = false;
+                else
+                  positive = true;
+              }
+              break;
+          }
+        }
+      }
+    } catch (UnsupportedEncodingException e) {
+      e.printStackTrace();
+    }
+
+    boolean result = false;
+    if (isIntersection) {
+      result = negative;
+    } else {
+      result = positive;
+    }
+
+    {if (true) return result;}
+    throw new Error("Missing return statement in function");
+  }
+
+  final public boolean isSecond(String comparator, String string) throws ParseException {
+    {if (true) return (comparator.length() == 2 && string.charAt(0) == comparator.charAt(1))
+    ? true : false;}
+    throw new Error("Missing return statement in function");
+  }
+
+  public ExpressionParserTokenManager token_source;
+  SimpleCharStream jj_input_stream;
+  public Token token, jj_nt;
+  private int jj_ntk;
+  private int jj_gen;
+  final private int[] jj_la1 = new int[16];
+  static private int[] jj_la1_0;
+  static {
+      jj_la1_0();
+   }
+   private static void jj_la1_0() {
+      jj_la1_0 = new int[] {0xc0,0xc0,0xc0,0xc0,0xc0000,0xc0000,0xc0000,0x100,0xfe00,0xc0000,0x100,0xc0000,0xc0000,0xc0000,0x10000,0x10000,};
+   }
+
+  public ExpressionParser(java.io.InputStream stream) {
+     this(stream, null);
+  }
+  public ExpressionParser(java.io.InputStream stream, String encoding) {
+    try { jj_input_stream = new SimpleCharStream(stream, encoding, 1, 1); } catch(java.io.UnsupportedEncodingException e) { throw new RuntimeException(e); }
+    token_source = new ExpressionParserTokenManager(jj_input_stream);
+    token = new Token();
+    jj_ntk = -1;
+    jj_gen = 0;
+    for (int i = 0; i < 16; i++) jj_la1[i] = -1;
+  }
+
+  public void ReInit(java.io.InputStream stream) {
+     ReInit(stream, null);
+  }
+  public void ReInit(java.io.InputStream stream, String encoding) {
+    try { jj_input_stream.ReInit(stream, encoding, 1, 1); } catch(java.io.UnsupportedEncodingException e) { throw new RuntimeException(e); }
+    token_source.ReInit(jj_input_stream);
+    token = new Token();
+    jj_ntk = -1;
+    jj_gen = 0;
+    for (int i = 0; i < 16; i++) jj_la1[i] = -1;
+  }
+
+  public ExpressionParser(java.io.Reader stream) {
+    jj_input_stream = new SimpleCharStream(stream, 1, 1);
+    token_source = new ExpressionParserTokenManager(jj_input_stream);
+    token = new Token();
+    jj_ntk = -1;
+    jj_gen = 0;
+    for (int i = 0; i < 16; i++) jj_la1[i] = -1;
+  }
+
+  public void ReInit(java.io.Reader stream) {
+    jj_input_stream.ReInit(stream, 1, 1);
+    token_source.ReInit(jj_input_stream);
+    token = new Token();
+    jj_ntk = -1;
+    jj_gen = 0;
+    for (int i = 0; i < 16; i++) jj_la1[i] = -1;
+  }
+
+  public ExpressionParser(ExpressionParserTokenManager tm) {
+    token_source = tm;
+    token = new Token();
+    jj_ntk = -1;
+    jj_gen = 0;
+    for (int i = 0; i < 16; i++) jj_la1[i] = -1;
+  }
+
+  public void ReInit(ExpressionParserTokenManager tm) {
+    token_source = tm;
+    token = new Token();
+    jj_ntk = -1;
+    jj_gen = 0;
+    for (int i = 0; i < 16; i++) jj_la1[i] = -1;
+  }
+
+  final private Token jj_consume_token(int kind) throws ParseException {
+    Token oldToken;
+    if ((oldToken = token).next != null) token = token.next;
+    else token = token.next = token_source.getNextToken();
+    jj_ntk = -1;
+    if (token.kind == kind) {
+      jj_gen++;
+      return token;
+    }
+    token = oldToken;
+    jj_kind = kind;
+    throw generateParseException();
+  }
+
+  final public Token getNextToken() {
+    if (token.next != null) token = token.next;
+    else token = token.next = token_source.getNextToken();
+    jj_ntk = -1;
+    jj_gen++;
+    return token;
+  }
+
+  final public Token getToken(int index) {
+    Token t = token;
+    for (int i = 0; i < index; i++) {
+      if (t.next != null) t = t.next;
+      else t = t.next = token_source.getNextToken();
+    }
+    return t;
+  }
+
+  final private int jj_ntk() {
+    if ((jj_nt=token.next) == null)
+      return (jj_ntk = (token.next=token_source.getNextToken()).kind);
+    else
+      return (jj_ntk = jj_nt.kind);
+  }
+
+  private java.util.Vector jj_expentries = new java.util.Vector();
+  private int[] jj_expentry;
+  private int jj_kind = -1;
+
+  public ParseException generateParseException() {
+    jj_expentries.removeAllElements();
+    boolean[] la1tokens = new boolean[24];
+    for (int i = 0; i < 24; i++) {
+      la1tokens[i] = false;
+    }
+    if (jj_kind >= 0) {
+      la1tokens[jj_kind] = true;
+      jj_kind = -1;
+    }
+    for (int i = 0; i < 16; i++) {
+      if (jj_la1[i] == jj_gen) {
+        for (int j = 0; j < 32; j++) {
+          if ((jj_la1_0[i] & (1<<j)) != 0) {
+            la1tokens[j] = true;
+          }
+        }
+      }
+    }
+    for (int i = 0; i < 24; i++) {
+      if (la1tokens[i]) {
+        jj_expentry = new int[1];
+        jj_expentry[0] = i;
+        jj_expentries.addElement(jj_expentry);
+      }
+    }
+    int[][] exptokseq = new int[jj_expentries.size()][];
+    for (int i = 0; i < jj_expentries.size(); i++) {
+      exptokseq[i] = (int[])jj_expentries.elementAt(i);
+    }
+    return new ParseException(token, exptokseq, tokenImage);
+  }
+
+  final public void enable_tracing() {
+  }
+
+  final public void disable_tracing() {
+  }
+
+}

Added: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/generated/ExpressionParserConstants.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/generated/ExpressionParserConstants.java?rev=599162&view=auto
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/generated/ExpressionParserConstants.java (added)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/generated/ExpressionParserConstants.java Wed Nov 28 14:10:21 2007
@@ -0,0 +1,56 @@
+/* Generated By:JavaCC: Do not edit this line. ExpressionParserConstants.java */
+package org.apache.hadoop.hbase.shell.algebra.generated;
+
+public interface ExpressionParserConstants {
+
+  int EOF = 0;
+  int ROW = 5;
+  int AND = 6;
+  int OR = 7;
+  int DOT = 8;
+  int EQUALS = 9;
+  int LCOMP = 10;
+  int RCOMP = 11;
+  int LCOMPE = 12;
+  int RCOMPE = 13;
+  int IN = 14;
+  int NOTIN = 15;
+  int BAR = 16;
+  int BOOL = 17;
+  int ID = 18;
+  int INTEGER_LITERAL = 19;
+  int FLOATING_POINT_LITERAL = 20;
+  int EXPONENT = 21;
+  int QUOTED_IDENTIFIER = 22;
+  int STRING_LITERAL = 23;
+
+  int DEFAULT = 0;
+
+  String[] tokenImage = {
+    "<EOF>",
+    "\" \"",
+    "\"\\t\"",
+    "\"\\r\"",
+    "\"\\n\"",
+    "\"row\"",
+    "\"and\"",
+    "\"or\"",
+    "\".\"",
+    "\"=\"",
+    "\">\"",
+    "\"<\"",
+    "\">=\"",
+    "\"<=\"",
+    "\"==\"",
+    "\"!!\"",
+    "\"|\"",
+    "\"bool\"",
+    "<ID>",
+    "<INTEGER_LITERAL>",
+    "<FLOATING_POINT_LITERAL>",
+    "<EXPONENT>",
+    "<QUOTED_IDENTIFIER>",
+    "<STRING_LITERAL>",
+  };
+
+}

Added: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/generated/ExpressionParserTokenManager.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/generated/ExpressionParserTokenManager.java?rev=599162&view=auto
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/generated/ExpressionParserTokenManager.java (added)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/generated/ExpressionParserTokenManager.java Wed Nov 28 14:10:21 2007
@@ -0,0 +1,659 @@
+/* Generated By:JavaCC: Do not edit this line. ExpressionParserTokenManager.java */
+package org.apache.hadoop.hbase.shell.algebra.generated;
+/**
+ * Copyright 2007 The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import java.io.IOException;
+import java.io.Reader;
+import java.io.StringReader;
+import java.io.UnsupportedEncodingException;
+import java.net.URLDecoder;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.SortedMap;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.HTable;
+import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
+import org.apache.hadoop.io.MapWritable;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.io.Writable;
+
+public class ExpressionParserTokenManager implements ExpressionParserConstants
+{
+  public  java.io.PrintStream debugStream = System.out;
+  public  void setDebugStream(java.io.PrintStream ds) { debugStream = ds; }
+private final int jjStopStringLiteralDfa_0(int pos, long active0)
+{
+   switch (pos)
+   {
+      case 0:
+         if ((active0 & 0x200e0L) != 0L)
+         {
+            jjmatchedKind = 18;
+            return 1;
+         }
+         if ((active0 & 0x100L) != 0L)
+            return 3;
+         return -1;
+      case 1:
+         if ((active0 & 0x20060L) != 0L)
+         {
+            jjmatchedKind = 18;
+            jjmatchedPos = 1;
+            return 1;
+         }
+         if ((active0 & 0x80L) != 0L)
+            return 1;
+         return -1;
+      case 2:
+         if ((active0 & 0x20000L) != 0L)
+         {
+            jjmatchedKind = 18;
+            jjmatchedPos = 2;
+            return 1;
+         }
+         if ((active0 & 0x60L) != 0L)
+            return 1;
+         return -1;
+      default :
+         return -1;
+   }
+}
+private final int jjStartNfa_0(int pos, long active0)
+{
+   return jjMoveNfa_0(jjStopStringLiteralDfa_0(pos, active0), pos + 1);
+}
+private final int jjStopAtPos(int pos, int kind)
+{
+   jjmatchedKind = kind;
+   jjmatchedPos = pos;
+   return pos + 1;
+}
+private final int jjStartNfaWithStates_0(int pos, int kind, int state)
+{
+   jjmatchedKind = kind;
+   jjmatchedPos = pos;
+   try { curChar = input_stream.readChar(); }
+   catch(java.io.IOException e) { return pos + 1; }
+   return jjMoveNfa_0(state, pos + 1);
+}
+private final int jjMoveStringLiteralDfa0_0()
+{
+   switch(curChar)
+   {
+      case 33:
+         return jjMoveStringLiteralDfa1_0(0x8000L);
+      case 46:
+         return jjStartNfaWithStates_0(0, 8, 3);
+      case 60:
+         jjmatchedKind = 11;
+         return jjMoveStringLiteralDfa1_0(0x2000L);
+      case 61:
+         jjmatchedKind = 9;
+         return jjMoveStringLiteralDfa1_0(0x4000L);
+      case 62:
+         jjmatchedKind = 10;
+         return jjMoveStringLiteralDfa1_0(0x1000L);
+      case 65:
+      case 97:
+         return jjMoveStringLiteralDfa1_0(0x40L);
+      case 66:
+      case 98:
+         return jjMoveStringLiteralDfa1_0(0x20000L);
+      case 79:
+      case 111:
+         return jjMoveStringLiteralDfa1_0(0x80L);
+      case 82:
+      case 114:
+         return jjMoveStringLiteralDfa1_0(0x20L);
+      case 124:
+         return jjStopAtPos(0, 16);
+      default :
+         return jjMoveNfa_0(0, 0);
+   }
+}
+private final int jjMoveStringLiteralDfa1_0(long active0)
+{
+   try { curChar = input_stream.readChar(); }
+   catch(java.io.IOException e) {
+      jjStopStringLiteralDfa_0(0, active0);
+      return 1;
+   }
+   switch(curChar)
+   {
+      case 33:
+         if ((active0 & 0x8000L) != 0L)
+            return jjStopAtPos(1, 15);
+         break;
+      case 61:
+         if ((active0 & 0x1000L) != 0L)
+            return jjStopAtPos(1, 12);
+         else if ((active0 & 0x2000L) != 0L)
+            return jjStopAtPos(1, 13);
+         else if ((active0 & 0x4000L) != 0L)
+            return jjStopAtPos(1, 14);
+         break;
+      case 78:
+      case 110:
+         return jjMoveStringLiteralDfa2_0(active0, 0x40L);
+      case 79:
+      case 111:
+         return jjMoveStringLiteralDfa2_0(active0, 0x20020L);
+      case 82:
+      case 114:
+         if ((active0 & 0x80L) != 0L)
+            return jjStartNfaWithStates_0(1, 7, 1);
+         break;
+      default :
+         break;
+   }
+   return jjStartNfa_0(0, active0);
+}
+private final int jjMoveStringLiteralDfa2_0(long old0, long active0)
+{
+   if (((active0 &= old0)) == 0L)
+      return jjStartNfa_0(0, old0); 
+   try { curChar = input_stream.readChar(); }
+   catch(java.io.IOException e) {
+      jjStopStringLiteralDfa_0(1, active0);
+      return 2;
+   }
+   switch(curChar)
+   {
+      case 68:
+      case 100:
+         if ((active0 & 0x40L) != 0L)
+            return jjStartNfaWithStates_0(2, 6, 1);
+         break;
+      case 79:
+      case 111:
+         return jjMoveStringLiteralDfa3_0(active0, 0x20000L);
+      case 87:
+      case 119:
+         if ((active0 & 0x20L) != 0L)
+            return jjStartNfaWithStates_0(2, 5, 1);
+         break;
+      default :
+         break;
+   }
+   return jjStartNfa_0(1, active0);
+}
+private final int jjMoveStringLiteralDfa3_0(long old0, long active0)
+{
+   if (((active0 &= old0)) == 0L)
+      return jjStartNfa_0(1, old0); 
+   try { curChar = input_stream.readChar(); }
+   catch(java.io.IOException e) {
+      jjStopStringLiteralDfa_0(2, active0);
+      return 3;
+   }
+   switch(curChar)
+   {
+      case 76:
+      case 108:
+         if ((active0 & 0x20000L) != 0L)
+            return jjStartNfaWithStates_0(3, 17, 1);
+         break;
+      default :
+         break;
+   }
+   return jjStartNfa_0(2, active0);
+}
+private final void jjCheckNAdd(int state)
+{
+   if (jjrounds[state] != jjround)
+   {
+      jjstateSet[jjnewStateCnt++] = state;
+      jjrounds[state] = jjround;
+   }
+}
+private final void jjAddStates(int start, int end)
+{
+   do {
+      jjstateSet[jjnewStateCnt++] = jjnextStates[start];
+   } while (start++ != end);
+}
+private final void jjCheckNAddTwoStates(int state1, int state2)
+{
+   jjCheckNAdd(state1);
+   jjCheckNAdd(state2);
+}
+private final void jjCheckNAddStates(int start, int end)
+{
+   do {
+      jjCheckNAdd(jjnextStates[start]);
+   } while (start++ != end);
+}
+private final void jjCheckNAddStates(int start)
+{
+   jjCheckNAdd(jjnextStates[start]);
+   jjCheckNAdd(jjnextStates[start + 1]);
+}
+static final long[] jjbitVec0 = {
+   0x0L, 0x0L, 0xffffffffffffffffL, 0xffffffffffffffffL
+};
+private final int jjMoveNfa_0(int startState, int curPos)
+{
+   int[] nextStates;
+   int startsAt = 0;
+   jjnewStateCnt = 32;
+   int i = 1;
+   jjstateSet[0] = startState;
+   int j, kind = 0x7fffffff;
+   for (;;)
+   {
+      if (++jjround == 0x7fffffff)
+         ReInitRounds();
+      if (curChar < 64)
+      {
+         long l = 1L << curChar;
+         MatchLoop: do
+         {
+            switch(jjstateSet[--i])
+            {
+               case 0:
+                  if ((0x3ff000000000000L & l) != 0L)
+                  {
+                     if (kind > 19)
+                        kind = 19;
+                     jjCheckNAddStates(0, 6);
+                  }
+                  else if ((0x400a00000000000L & l) != 0L)
+                  {
+                     if (kind > 18)
+                        kind = 18;
+                     jjCheckNAdd(1);
+                  }
+                  else if (curChar == 39)
+                     jjCheckNAddStates(7, 9);
+                  else if (curChar == 34)
+                     jjCheckNAdd(8);
+                  else if (curChar == 46)
+                     jjCheckNAdd(3);
+                  break;
+               case 1:
+                  if ((0x7ffa00000000000L & l) == 0L)
+                     break;
+                  if (kind > 18)
+                     kind = 18;
+                  jjCheckNAdd(1);
+                  break;
+               case 2:
+                  if (curChar == 46)
+                     jjCheckNAdd(3);
+                  break;
+               case 3:
+                  if ((0x3ff000000000000L & l) == 0L)
+                     break;
+                  if (kind > 20)
+                     kind = 20;
+                  jjCheckNAddTwoStates(3, 4);
+                  break;
+               case 5:
+                  if ((0x280000000000L & l) != 0L)
+                     jjCheckNAdd(6);
+                  break;
+               case 6:
+                  if ((0x3ff000000000000L & l) == 0L)
+                     break;
+                  if (kind > 20)
+                     kind = 20;
+                  jjCheckNAdd(6);
+                  break;
+               case 7:
+                  if (curChar == 34)
+                     jjCheckNAdd(8);
+                  break;
+               case 8:
+                  if ((0xfffffffbffffffffL & l) != 0L)
+                     jjCheckNAddTwoStates(8, 9);
+                  break;
+               case 9:
+                  if (curChar == 34 && kind > 22)
+                     kind = 22;
+                  break;
+               case 10:
+                  if (curChar == 39)
+                     jjCheckNAddStates(7, 9);
+                  break;
+               case 11:
+                  if ((0xffffff7fffffffffL & l) != 0L)
+                     jjCheckNAddStates(7, 9);
+                  break;
+               case 12:
+                  if (curChar == 39)
+                     jjCheckNAddStates(10, 12);
+                  break;
+               case 13:
+                  if (curChar == 39)
+                     jjstateSet[jjnewStateCnt++] = 12;
+                  break;
+               case 14:
+                  if ((0xffffff7fffffffffL & l) != 0L)
+                     jjCheckNAddStates(10, 12);
+                  break;
+               case 15:
+                  if (curChar == 39 && kind > 23)
+                     kind = 23;
+                  break;
+               case 16:
+                  if ((0x3ff000000000000L & l) == 0L)
+                     break;
+                  if (kind > 19)
+                     kind = 19;
+                  jjCheckNAddStates(0, 6);
+                  break;
+               case 17:
+                  if ((0x3ff000000000000L & l) == 0L)
+                     break;
+                  if (kind > 19)
+                     kind = 19;
+                  jjCheckNAdd(17);
+                  break;
+               case 18:
+                  if ((0x3ff000000000000L & l) != 0L)
+                     jjCheckNAddTwoStates(18, 19);
+                  break;
+               case 19:
+                  if (curChar == 46)
+                     jjCheckNAdd(20);
+                  break;
+               case 20:
+                  if ((0x3ff000000000000L & l) == 0L)
+                     break;
+                  if (kind > 20)
+                     kind = 20;
+                  jjCheckNAddTwoStates(20, 21);
+                  break;
+               case 22:
+                  if ((0x280000000000L & l) != 0L)
+                     jjCheckNAdd(23);
+                  break;
+               case 23:
+                  if ((0x3ff000000000000L & l) == 0L)
+                     break;
+                  if (kind > 20)
+                     kind = 20;
+                  jjCheckNAdd(23);
+                  break;
+               case 24:
+                  if ((0x3ff000000000000L & l) != 0L)
+                     jjCheckNAddTwoStates(24, 25);
+                  break;
+               case 26:
+                  if ((0x280000000000L & l) != 0L)
+                     jjCheckNAdd(27);
+                  break;
+               case 27:
+                  if ((0x3ff000000000000L & l) == 0L)
+                     break;
+                  if (kind > 20)
+                     kind = 20;
+                  jjCheckNAdd(27);
+                  break;
+               case 28:
+                  if ((0x3ff000000000000L & l) == 0L)
+                     break;
+                  if (kind > 20)
+                     kind = 20;
+                  jjCheckNAddTwoStates(28, 29);
+                  break;
+               case 30:
+                  if ((0x280000000000L & l) != 0L)
+                     jjCheckNAdd(31);
+                  break;
+               case 31:
+                  if ((0x3ff000000000000L & l) == 0L)
+                     break;
+                  if (kind > 20)
+                     kind = 20;
+                  jjCheckNAdd(31);
+                  break;
+               default : break;
+            }
+         } while(i != startsAt);
+      }
+      else if (curChar < 128)
+      {
+         long l = 1L << (curChar & 077);
+         MatchLoop: do
+         {
+            switch(jjstateSet[--i])
+            {
+               case 0:
+               case 1:
+                  if ((0x7fffffe87fffffeL & l) == 0L)
+                     break;
+                  if (kind > 18)
+                     kind = 18;
+                  jjCheckNAdd(1);
+                  break;
+               case 4:
+                  if ((0x2000000020L & l) != 0L)
+                     jjAddStates(13, 14);
+                  break;
+               case 8:
+                  jjAddStates(15, 16);
+                  break;
+               case 11:
+                  jjCheckNAddStates(7, 9);
+                  break;
+               case 14:
+                  jjCheckNAddStates(10, 12);
+                  break;
+               case 21:
+                  if ((0x2000000020L & l) != 0L)
+                     jjAddStates(17, 18);
+                  break;
+               case 25:
+                  if ((0x2000000020L & l) != 0L)
+                     jjAddStates(19, 20);
+                  break;
+               case 29:
+                  if ((0x2000000020L & l) != 0L)
+                     jjAddStates(21, 22);
+                  break;
+               default : break;
+            }
+         } while(i != startsAt);
+      }
+      else
+      {
+         int i2 = (curChar & 0xff) >> 6;
+         long l2 = 1L << (curChar & 077);
+         MatchLoop: do
+         {
+            switch(jjstateSet[--i])
+            {
+               case 8:
+                  if ((jjbitVec0[i2] & l2) != 0L)
+                     jjAddStates(15, 16);
+                  break;
+               case 11:
+                  if ((jjbitVec0[i2] & l2) != 0L)
+                     jjCheckNAddStates(7, 9);
+                  break;
+               case 14:
+                  if ((jjbitVec0[i2] & l2) != 0L)
+                     jjCheckNAddStates(10, 12);
+                  break;
+               default : break;
+            }
+         } while(i != startsAt);
+      }
+      if (kind != 0x7fffffff)
+      {
+         jjmatchedKind = kind;
+         jjmatchedPos = curPos;
+         kind = 0x7fffffff;
+      }
+      ++curPos;
+      if ((i = jjnewStateCnt) == (startsAt = 32 - (jjnewStateCnt = startsAt)))
+         return curPos;
+      try { curChar = input_stream.readChar(); }
+      catch(java.io.IOException e) { return curPos; }
+   }
+}
+static final int[] jjnextStates = {
+   17, 18, 19, 24, 25, 28, 29, 11, 13, 15, 13, 14, 15, 5, 6, 8, 
+   9, 22, 23, 26, 27, 30, 31, 
+};
+public static final String[] jjstrLiteralImages = {
+"", null, null, null, null, null, null, null, "\56", "\75", "\76", "\74", 
+"\76\75", "\74\75", "\75\75", "\41\41", "\174", null, null, null, null, null, null, null, };
+public static final String[] lexStateNames = {
+   "DEFAULT", 
+};
+static final long[] jjtoToken = {
+   0xdfffe1L, 
+};
+static final long[] jjtoSkip = {
+   0x1eL, 
+};
+protected SimpleCharStream input_stream;
+private final int[] jjrounds = new int[32];
+private final int[] jjstateSet = new int[64];
+protected char curChar;
+public ExpressionParserTokenManager(SimpleCharStream stream){
+   if (SimpleCharStream.staticFlag)
+      throw new Error("ERROR: Cannot use a static CharStream class with a non-static lexical analyzer.");
+   input_stream = stream;
+}
+public ExpressionParserTokenManager(SimpleCharStream stream, int lexState){
+   this(stream);
+   SwitchTo(lexState);
+}
+public void ReInit(SimpleCharStream stream)
+{
+   jjmatchedPos = jjnewStateCnt = 0;
+   curLexState = defaultLexState;
+   input_stream = stream;
+   ReInitRounds();
+}
+private final void ReInitRounds()
+{
+   int i;
+   jjround = 0x80000001;
+   for (i = 32; i-- > 0;)
+      jjrounds[i] = 0x80000000;
+}
+public void ReInit(SimpleCharStream stream, int lexState)
+{
+   ReInit(stream);
+   SwitchTo(lexState);
+}
+public void SwitchTo(int lexState)
+{
+   if (lexState >= 1 || lexState < 0)
+      throw new TokenMgrError("Error: Ignoring invalid lexical state : " + lexState + ". State unchanged.", TokenMgrError.INVALID_LEXICAL_STATE);
+   else
+      curLexState = lexState;
+}
+
+protected Token jjFillToken()
+{
+   Token t = Token.newToken(jjmatchedKind);
+   t.kind = jjmatchedKind;
+   String im = jjstrLiteralImages[jjmatchedKind];
+   t.image = (im == null) ? input_stream.GetImage() : im;
+   t.beginLine = input_stream.getBeginLine();
+   t.beginColumn = input_stream.getBeginColumn();
+   t.endLine = input_stream.getEndLine();
+   t.endColumn = input_stream.getEndColumn();
+   return t;
+}
+
+int curLexState = 0;
+int defaultLexState = 0;
+int jjnewStateCnt;
+int jjround;
+int jjmatchedPos;
+int jjmatchedKind;
+
+public Token getNextToken() 
+{
+  int kind;
+  Token specialToken = null;
+  Token matchedToken;
+  int curPos = 0;
+
+  EOFLoop :
+  for (;;)
+  {   
+   try   
+   {     
+      curChar = input_stream.BeginToken();
+   }     
+   catch(java.io.IOException e)
+   {        
+      jjmatchedKind = 0;
+      matchedToken = jjFillToken();
+      return matchedToken;
+   }
+
+   try { input_stream.backup(0);
+      while (curChar <= 32 && (0x100002600L & (1L << curChar)) != 0L)
+         curChar = input_stream.BeginToken();
+   }
+   catch (java.io.IOException e1) { continue EOFLoop; }
+   jjmatchedKind = 0x7fffffff;
+   jjmatchedPos = 0;
+   curPos = jjMoveStringLiteralDfa0_0();
+   if (jjmatchedKind != 0x7fffffff)
+   {
+      if (jjmatchedPos + 1 < curPos)
+         input_stream.backup(curPos - jjmatchedPos - 1);
+      if ((jjtoToken[jjmatchedKind >> 6] & (1L << (jjmatchedKind & 077))) != 0L)
+      {
+         matchedToken = jjFillToken();
+         return matchedToken;
+      }
+      else
+      {
+         continue EOFLoop;
+      }
+   }
+   int error_line = input_stream.getEndLine();
+   int error_column = input_stream.getEndColumn();
+   String error_after = null;
+   boolean EOFSeen = false;
+   try { input_stream.readChar(); input_stream.backup(1); }
+   catch (java.io.IOException e1) {
+      EOFSeen = true;
+      error_after = curPos <= 1 ? "" : input_stream.GetImage();
+      if (curChar == '\n' || curChar == '\r') {
+         error_line++;
+         error_column = 0;
+      }
+      else
+         error_column++;
+   }
+   if (!EOFSeen) {
+      input_stream.backup(1);
+      error_after = curPos <= 1 ? "" : input_stream.GetImage();
+   }
+   throw new TokenMgrError(EOFSeen, curLexState, error_line, error_column, error_after, curChar, TokenMgrError.LEXICAL_ERROR);
+  }
+}
+
+}

Added: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/generated/ParseException.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/generated/ParseException.java?rev=599162&view=auto
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/generated/ParseException.java (added)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/generated/ParseException.java Wed Nov 28 14:10:21 2007
@@ -0,0 +1,192 @@
+/* Generated By:JavaCC: Do not edit this line. ParseException.java Version 3.0 */
+package org.apache.hadoop.hbase.shell.algebra.generated;
+
+/**
+ * This exception is thrown when parse errors are encountered.
+ * You can explicitly create objects of this exception type by
+ * calling the method generateParseException in the generated
+ * parser.
+ *
+ * You can modify this class to customize your error reporting
+ * mechanisms so long as you retain the public fields.
+ */
+public class ParseException extends Exception {
+
+  /**
+   * This constructor is used by the method "generateParseException"
+   * in the generated parser.  Calling this constructor generates
+   * a new object of this type with the fields "currentToken",
+   * "expectedTokenSequences", and "tokenImage" set.  The boolean
+   * flag "specialConstructor" is also set to true to indicate that
+   * this constructor was used to create this object.
+   * This constructor calls its super class with the empty string
+   * to force the "toString" method of parent class "Throwable" to
+   * print the error message in the form:
+   *     ParseException: <result of getMessage>
+   */
+  public ParseException(Token currentTokenVal,
+                        int[][] expectedTokenSequencesVal,
+                        String[] tokenImageVal
+                       )
+  {
+    super("");
+    specialConstructor = true;
+    currentToken = currentTokenVal;
+    expectedTokenSequences = expectedTokenSequencesVal;
+    tokenImage = tokenImageVal;
+  }
+
+  /**
+   * The following constructors are for use by you for whatever
+   * purpose you can think of.  Constructing the exception in this
+   * manner makes the exception behave in the normal way - i.e., as
+   * documented in the class "Throwable".  The fields "errorToken",
+   * "expectedTokenSequences", and "tokenImage" do not contain
+   * relevant information.  The JavaCC generated code does not use
+   * these constructors.
+   */
+
+  public ParseException() {
+    super();
+    specialConstructor = false;
+  }
+
+  public ParseException(String message) {
+    super(message);
+    specialConstructor = false;
+  }
+
+  /**
+   * This variable determines which constructor was used to create
+   * this object and thereby affects the semantics of the
+   * "getMessage" method (see below).
+   */
+  protected boolean specialConstructor;
+
+  /**
+   * This is the last token that has been consumed successfully.  If
+   * this object has been created due to a parse error, the token
+   * followng this token will (therefore) be the first error token.
+   */
+  public Token currentToken;
+
+  /**
+   * Each entry in this array is an array of integers.  Each array
+   * of integers represents a sequence of tokens (by their ordinal
+   * values) that is expected at this point of the parse.
+   */
+  public int[][] expectedTokenSequences;
+
+  /**
+   * This is a reference to the "tokenImage" array of the generated
+   * parser within which the parse error occurred.  This array is
+   * defined in the generated ...Constants interface.
+   */
+  public String[] tokenImage;
+
+  /**
+   * This method has the standard behavior when this object has been
+   * created using the standard constructors.  Otherwise, it uses
+   * "currentToken" and "expectedTokenSequences" to generate a parse
+   * error message and returns it.  If this object has been created
+   * due to a parse error, and you do not catch it (it gets thrown
+   * from the parser), then this method is called during the printing
+   * of the final stack trace, and hence the correct error message
+   * gets displayed.
+   */
+  public String getMessage() {
+    if (!specialConstructor) {
+      return super.getMessage();
+    }
+    StringBuffer expected = new StringBuffer();
+    int maxSize = 0;
+    for (int i = 0; i < expectedTokenSequences.length; i++) {
+      if (maxSize < expectedTokenSequences[i].length) {
+        maxSize = expectedTokenSequences[i].length;
+      }
+      for (int j = 0; j < expectedTokenSequences[i].length; j++) {
+        expected.append(tokenImage[expectedTokenSequences[i][j]]).append(" ");
+      }
+      if (expectedTokenSequences[i][expectedTokenSequences[i].length - 1] != 0) {
+        expected.append("...");
+      }
+      expected.append(eol).append("    ");
+    }
+    String retval = "Encountered \"";
+    Token tok = currentToken.next;
+    for (int i = 0; i < maxSize; i++) {
+      if (i != 0) retval += " ";
+      if (tok.kind == 0) {
+        retval += tokenImage[0];
+        break;
+      }
+      retval += add_escapes(tok.image);
+      tok = tok.next; 
+    }
+    retval += "\" at line " + currentToken.next.beginLine + ", column " + currentToken.next.beginColumn;
+    retval += "." + eol;
+    if (expectedTokenSequences.length == 1) {
+      retval += "Was expecting:" + eol + "    ";
+    } else {
+      retval += "Was expecting one of:" + eol + "    ";
+    }
+    retval += expected.toString();
+    return retval;
+  }
+
+  /**
+   * The end of line string for this machine.
+   */
+  protected String eol = System.getProperty("line.separator", "\n");
+ 
+  /**
+   * Used to convert raw characters to their escaped version
+   * when these raw version cannot be used as part of an ASCII
+   * string literal.
+   */
+  protected String add_escapes(String str) {
+      StringBuffer retval = new StringBuffer();
+      char ch;
+      for (int i = 0; i < str.length(); i++) {
+        switch (str.charAt(i))
+        {
+           case 0 :
+              continue;
+           case '\b':
+              retval.append("\\b");
+              continue;
+           case '\t':
+              retval.append("\\t");
+              continue;
+           case '\n':
+              retval.append("\\n");
+              continue;
+           case '\f':
+              retval.append("\\f");
+              continue;
+           case '\r':
+              retval.append("\\r");
+              continue;
+           case '\"':
+              retval.append("\\\"");
+              continue;
+           case '\'':
+              retval.append("\\\'");
+              continue;
+           case '\\':
+              retval.append("\\\\");
+              continue;
+           default:
+              if ((ch = str.charAt(i)) < 0x20 || ch > 0x7e) {
+                 String s = "0000" + Integer.toString(ch, 16);
+                 retval.append("\\u" + s.substring(s.length() - 4, s.length()));
+              } else {
+                 retval.append(ch);
+              }
+              continue;
+        }
+      }
+      return retval.toString();
+   }
+
+}

Added: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/generated/SimpleCharStream.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/generated/SimpleCharStream.java?rev=599162&view=auto
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/generated/SimpleCharStream.java (added)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/generated/SimpleCharStream.java Wed Nov 28 14:10:21 2007
@@ -0,0 +1,439 @@
+/* Generated By:JavaCC: Do not edit this line. SimpleCharStream.java Version 4.0 */
+package org.apache.hadoop.hbase.shell.algebra.generated;
+
+/**
+ * An implementation of interface CharStream, where the stream is assumed to
+ * contain only ASCII characters (without unicode processing).
+ */
+
+public class SimpleCharStream
+{
+  public static final boolean staticFlag = false;
+  int bufsize;
+  int available;
+  int tokenBegin;
+  public int bufpos = -1;
+  protected int bufline[];
+  protected int bufcolumn[];
+
+  protected int column = 0;
+  protected int line = 1;
+
+  protected boolean prevCharIsCR = false;
+  protected boolean prevCharIsLF = false;
+
+  protected java.io.Reader inputStream;
+
+  protected char[] buffer;
+  protected int maxNextCharInd = 0;
+  protected int inBuf = 0;
+  protected int tabSize = 8;
+
+  protected void setTabSize(int i) { tabSize = i; }
+  protected int getTabSize(int i) { return tabSize; }
+
+
+  protected void ExpandBuff(boolean wrapAround)
+  {
+     char[] newbuffer = new char[bufsize + 2048];
+     int newbufline[] = new int[bufsize + 2048];
+     int newbufcolumn[] = new int[bufsize + 2048];
+
+     try
+     {
+        if (wrapAround)
+        {
+           System.arraycopy(buffer, tokenBegin, newbuffer, 0, bufsize - tokenBegin);
+           System.arraycopy(buffer, 0, newbuffer,
+                                             bufsize - tokenBegin, bufpos);
+           buffer = newbuffer;
+
+           System.arraycopy(bufline, tokenBegin, newbufline, 0, bufsize - tokenBegin);
+           System.arraycopy(bufline, 0, newbufline, bufsize - tokenBegin, bufpos);
+           bufline = newbufline;
+
+           System.arraycopy(bufcolumn, tokenBegin, newbufcolumn, 0, bufsize - tokenBegin);
+           System.arraycopy(bufcolumn, 0, newbufcolumn, bufsize - tokenBegin, bufpos);
+           bufcolumn = newbufcolumn;
+
+           maxNextCharInd = (bufpos += (bufsize - tokenBegin));
+        }
+        else
+        {
+           System.arraycopy(buffer, tokenBegin, newbuffer, 0, bufsize - tokenBegin);
+           buffer = newbuffer;
+
+           System.arraycopy(bufline, tokenBegin, newbufline, 0, bufsize - tokenBegin);
+           bufline = newbufline;
+
+           System.arraycopy(bufcolumn, tokenBegin, newbufcolumn, 0, bufsize - tokenBegin);
+           bufcolumn = newbufcolumn;
+
+           maxNextCharInd = (bufpos -= tokenBegin);
+        }
+     }
+     catch (Throwable t)
+     {
+        throw new Error(t.getMessage());
+     }
+
+
+     bufsize += 2048;
+     available = bufsize;
+     tokenBegin = 0;
+  }
+
+  protected void FillBuff() throws java.io.IOException
+  {
+     if (maxNextCharInd == available)
+     {
+        if (available == bufsize)
+        {
+           if (tokenBegin > 2048)
+           {
+              bufpos = maxNextCharInd = 0;
+              available = tokenBegin;
+           }
+           else if (tokenBegin < 0)
+              bufpos = maxNextCharInd = 0;
+           else
+              ExpandBuff(false);
+        }
+        else if (available > tokenBegin)
+           available = bufsize;
+        else if ((tokenBegin - available) < 2048)
+           ExpandBuff(true);
+        else
+           available = tokenBegin;
+     }
+
+     int i;
+     try {
+        if ((i = inputStream.read(buffer, maxNextCharInd,
+                                    available - maxNextCharInd)) == -1)
+        {
+           inputStream.close();
+           throw new java.io.IOException();
+        }
+        else
+           maxNextCharInd += i;
+        return;
+     }
+     catch(java.io.IOException e) {
+        --bufpos;
+        backup(0);
+        if (tokenBegin == -1)
+           tokenBegin = bufpos;
+        throw e;
+     }
+  }
+
+  public char BeginToken() throws java.io.IOException
+  {
+     tokenBegin = -1;
+     char c = readChar();
+     tokenBegin = bufpos;
+
+     return c;
+  }
+
+  protected void UpdateLineColumn(char c)
+  {
+     column++;
+
+     if (prevCharIsLF)
+     {
+        prevCharIsLF = false;
+        line += (column = 1);
+     }
+     else if (prevCharIsCR)
+     {
+        prevCharIsCR = false;
+        if (c == '\n')
+        {
+           prevCharIsLF = true;
+        }
+        else
+           line += (column = 1);
+     }
+
+     switch (c)
+     {
+        case '\r' :
+           prevCharIsCR = true;
+           break;
+        case '\n' :
+           prevCharIsLF = true;
+           break;
+        case '\t' :
+           column--;
+           column += (tabSize - (column % tabSize));
+           break;
+        default :
+           break;
+     }
+
+     bufline[bufpos] = line;
+     bufcolumn[bufpos] = column;
+  }
+
+  public char readChar() throws java.io.IOException
+  {
+     if (inBuf > 0)
+     {
+        --inBuf;
+
+        if (++bufpos == bufsize)
+           bufpos = 0;
+
+        return buffer[bufpos];
+     }
+
+     if (++bufpos >= maxNextCharInd)
+        FillBuff();
+
+     char c = buffer[bufpos];
+
+     UpdateLineColumn(c);
+     return (c);
+  }
+
+  /**
+   * @deprecated 
+   * @see #getEndColumn
+   */
+
+  public int getColumn() {
+     return bufcolumn[bufpos];
+  }
+
+  /**
+   * @deprecated 
+   * @see #getEndLine
+   */
+
+  public int getLine() {
+     return bufline[bufpos];
+  }
+
+  public int getEndColumn() {
+     return bufcolumn[bufpos];
+  }
+
+  public int getEndLine() {
+     return bufline[bufpos];
+  }
+
+  public int getBeginColumn() {
+     return bufcolumn[tokenBegin];
+  }
+
+  public int getBeginLine() {
+     return bufline[tokenBegin];
+  }
+
+  public void backup(int amount) {
+
+    inBuf += amount;
+    if ((bufpos -= amount) < 0)
+       bufpos += bufsize;
+  }
+
+  public SimpleCharStream(java.io.Reader dstream, int startline,
+  int startcolumn, int buffersize)
+  {
+    inputStream = dstream;
+    line = startline;
+    column = startcolumn - 1;
+
+    available = bufsize = buffersize;
+    buffer = new char[buffersize];
+    bufline = new int[buffersize];
+    bufcolumn = new int[buffersize];
+  }
+
+  public SimpleCharStream(java.io.Reader dstream, int startline,
+                          int startcolumn)
+  {
+     this(dstream, startline, startcolumn, 4096);
+  }
+
+  public SimpleCharStream(java.io.Reader dstream)
+  {
+     this(dstream, 1, 1, 4096);
+  }
+  public void ReInit(java.io.Reader dstream, int startline,
+  int startcolumn, int buffersize)
+  {
+    inputStream = dstream;
+    line = startline;
+    column = startcolumn - 1;
+
+    if (buffer == null || buffersize != buffer.length)
+    {
+      available = bufsize = buffersize;
+      buffer = new char[buffersize];
+      bufline = new int[buffersize];
+      bufcolumn = new int[buffersize];
+    }
+    prevCharIsLF = prevCharIsCR = false;
+    tokenBegin = inBuf = maxNextCharInd = 0;
+    bufpos = -1;
+  }
+
+  public void ReInit(java.io.Reader dstream, int startline,
+                     int startcolumn)
+  {
+     ReInit(dstream, startline, startcolumn, 4096);
+  }
+
+  public void ReInit(java.io.Reader dstream)
+  {
+     ReInit(dstream, 1, 1, 4096);
+  }
+  public SimpleCharStream(java.io.InputStream dstream, String encoding, int startline,
+  int startcolumn, int buffersize) throws java.io.UnsupportedEncodingException
+  {
+     this(encoding == null ? new java.io.InputStreamReader(dstream) : new java.io.InputStreamReader(dstream, encoding), startline, startcolumn, buffersize);
+  }
+
+  public SimpleCharStream(java.io.InputStream dstream, int startline,
+  int startcolumn, int buffersize)
+  {
+     this(new java.io.InputStreamReader(dstream), startline, startcolumn, buffersize);
+  }
+
+  public SimpleCharStream(java.io.InputStream dstream, String encoding, int startline,
+                          int startcolumn) throws java.io.UnsupportedEncodingException
+  {
+     this(dstream, encoding, startline, startcolumn, 4096);
+  }
+
+  public SimpleCharStream(java.io.InputStream dstream, int startline,
+                          int startcolumn)
+  {
+     this(dstream, startline, startcolumn, 4096);
+  }
+
+  public SimpleCharStream(java.io.InputStream dstream, String encoding) throws java.io.UnsupportedEncodingException
+  {
+     this(dstream, encoding, 1, 1, 4096);
+  }
+
+  public SimpleCharStream(java.io.InputStream dstream)
+  {
+     this(dstream, 1, 1, 4096);
+  }
+
+  public void ReInit(java.io.InputStream dstream, String encoding, int startline,
+                          int startcolumn, int buffersize) throws java.io.UnsupportedEncodingException
+  {
+     ReInit(encoding == null ? new java.io.InputStreamReader(dstream) : new java.io.InputStreamReader(dstream, encoding), startline, startcolumn, buffersize);
+  }
+
+  public void ReInit(java.io.InputStream dstream, int startline,
+                          int startcolumn, int buffersize)
+  {
+     ReInit(new java.io.InputStreamReader(dstream), startline, startcolumn, buffersize);
+  }
+
+  public void ReInit(java.io.InputStream dstream, String encoding) throws java.io.UnsupportedEncodingException
+  {
+     ReInit(dstream, encoding, 1, 1, 4096);
+  }
+
+  public void ReInit(java.io.InputStream dstream)
+  {
+     ReInit(dstream, 1, 1, 4096);
+  }
+  public void ReInit(java.io.InputStream dstream, String encoding, int startline,
+                     int startcolumn) throws java.io.UnsupportedEncodingException
+  {
+     ReInit(dstream, encoding, startline, startcolumn, 4096);
+  }
+  public void ReInit(java.io.InputStream dstream, int startline,
+                     int startcolumn)
+  {
+     ReInit(dstream, startline, startcolumn, 4096);
+  }
+  public String GetImage()
+  {
+     if (bufpos >= tokenBegin)
+        return new String(buffer, tokenBegin, bufpos - tokenBegin + 1);
+     else
+        return new String(buffer, tokenBegin, bufsize - tokenBegin) +
+                              new String(buffer, 0, bufpos + 1);
+  }
+
+  public char[] GetSuffix(int len)
+  {
+     char[] ret = new char[len];
+
+     if ((bufpos + 1) >= len)
+        System.arraycopy(buffer, bufpos - len + 1, ret, 0, len);
+     else
+     {
+        System.arraycopy(buffer, bufsize - (len - bufpos - 1), ret, 0,
+                                                          len - bufpos - 1);
+        System.arraycopy(buffer, 0, ret, len - bufpos - 1, bufpos + 1);
+     }
+
+     return ret;
+  }
+
+  public void Done()
+  {
+     buffer = null;
+     bufline = null;
+     bufcolumn = null;
+  }
+
+  /**
+   * Method to adjust line and column numbers for the start of a token.
+   */
+  public void adjustBeginLineColumn(int newLine, int newCol)
+  {
+     int start = tokenBegin;
+     int len;
+
+     if (bufpos >= tokenBegin)
+     {
+        len = bufpos - tokenBegin + inBuf + 1;
+     }
+     else
+     {
+        len = bufsize - tokenBegin + bufpos + 1 + inBuf;
+     }
+
+     int i = 0, j = 0, k = 0;
+     int nextColDiff = 0, columnDiff = 0;
+
+     while (i < len &&
+            bufline[j = start % bufsize] == bufline[k = ++start % bufsize])
+     {
+        bufline[j] = newLine;
+        nextColDiff = columnDiff + bufcolumn[k] - bufcolumn[j];
+        bufcolumn[j] = newCol + columnDiff;
+        columnDiff = nextColDiff;
+        i++;
+     } 
+
+     if (i < len)
+     {
+        bufline[j] = newLine++;
+        bufcolumn[j] = newCol + columnDiff;
+
+        while (i++ < len)
+        {
+           if (bufline[j = start % bufsize] != bufline[++start % bufsize])
+              bufline[j] = newLine++;
+           else
+              bufline[j] = newLine;
+        }
+     }
+
+     line = bufline[j];
+     column = bufcolumn[j];
+  }
+
+}

Added: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/generated/Token.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/generated/Token.java?rev=599162&view=auto
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/generated/Token.java (added)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/algebra/generated/Token.java Wed Nov 28 14:10:21 2007
@@ -0,0 +1,81 @@
+/* Generated By:JavaCC: Do not edit this line. Token.java Version 3.0 */
+package org.apache.hadoop.hbase.shell.algebra.generated;
+
+/**
+ * Describes the input token stream.
+ */
+
+public class Token {
+
+  /**
+   * An integer that describes the kind of this token.  This numbering
+   * system is determined by JavaCCParser, and a table of these numbers is
+   * stored in the file ...Constants.java.
+   */
+  public int kind;
+
+  /**
+   * beginLine and beginColumn describe the position of the first character
+   * of this token; endLine and endColumn describe the position of the
+   * last character of this token.
+   */
+  public int beginLine, beginColumn, endLine, endColumn;
+
+  /**
+   * The string image of the token.
+   */
+  public String image;
+
+  /**
+   * A reference to the next regular (non-special) token from the input
+   * stream.  If this is the last token from the input stream, or if the
+   * token manager has not read tokens beyond this one, this field is
+   * set to null.  This is true only if this token is also a regular
+   * token.  Otherwise, see below for a description of the contents of
+   * this field.
+   */
+  public Token next;
+
+  /**
+   * This field is used to access special tokens that occur prior to this
+   * token, but after the immediately preceding regular (non-special) token.
+   * If there are no such special tokens, this field is set to null.
+   * When there are more than one such special token, this field refers
+   * to the last of these special tokens, which in turn refers to the next
+   * previous special token through its specialToken field, and so on
+   * until the first special token (whose specialToken field is null).
+   * The next fields of special tokens refer to other special tokens that
+   * immediately follow it (without an intervening regular token).  If there
+   * is no such token, this field is null.
+   */
+  public Token specialToken;
+
+  /**
+   * Returns the image.
+   */
+  public String toString()
+  {
+     return image;
+  }
+
+  /**
+   * Returns a new Token object, by default. However, if you want, you
+   * can create and return subclass objects based on the value of ofKind.
+   * Simply add the cases to the switch for all those special cases.
+   * For example, if you have a subclass of Token called IDToken that
+   * you want to create if ofKind is ID, simlpy add something like :
+   *
+   *    case MyParserConstants.ID : return new IDToken();
+   *
+   * to the following switch statement. Then you can cast matchedToken
+   * variable to the appropriate type and use it in your lexical actions.
+   */
+  public static final Token newToken(int ofKind)
+  {
+     switch(ofKind)
+     {
+       default : return new Token();
+     }
+  }
+
+}



Mime
View raw message