hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From st...@apache.org
Subject svn commit: r555415 [1/2] - in /lucene/hadoop/trunk/src/contrib/hbase: ./ bin/ conf/ src/java/org/apache/hadoop/hbase/ src/java/org/apache/hadoop/hbase/shell/ src/java/org/apache/hadoop/hbase/shell/generated/ src/test/org/apache/hadoop/hbase/shell/
Date Wed, 11 Jul 2007 21:54:17 GMT
Author: stack
Date: Wed Jul 11 14:54:15 2007
New Revision: 555415

URL: http://svn.apache.org/viewvc?view=rev&rev=555415
Log:
HADOOP-1375 a simple parser for hbase
M src/contrib/hbase/NOTICE.txt
    Add notice of udanax contributions.
Msrc/contrib/hbase/conf/hbase-default.xml
    (hbaseshell.jline.bell.enabled): Added.
M src/contrib/hbase/CHANGES.txt
    (hadoop-1375) Added.
M src/contrib/hbase/src/java/org/apache/hadoop/hbase/package.html
    Add note on how to start up hbase shell
M src/contrib/hbase/bin/hbase
    Add 'shell'.  Remove 'client' (shell does what it used do and more).
    Removed all reader and logreader until better developed.  Starting
    up a reader or logreader on a running hbase system could do damage).
M src/contrib/hbase/build.xml
    Add a javacc target to generate content of shell/generated subpackage.
A src/contrib/hbase/src/test/org/apache/hadoop/hbase/shell/TestHBaseShell.java
A src/contrib/hbase/src/java/org/apache/hadoop/hbase/Shell.java
A src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/DeleteCommand.java
A src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/CreateCommand.java
A src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/DropCommand.java
A src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/InsertCommand.java
A src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/CommandFactory.java
A src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/HelpContents.java
A src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/ExitCommand.java
A src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/ConsoleTable.java
A src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/DescCommand.java
A src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/SelectCommand.java
A src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/Command.java
A src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/ShowCommand.java
A src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/BasicCommand.java
A src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/HelpManager.java
A src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/ReturnMsg.java
A src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/HelpCommand.java
A src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/HBaseShell.jj
    Added.
A src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/generated/Token.java
A src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/generated/TokenMgrError.java
A src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/generated/SimpleCharStream.java
A src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/generated/ParserTokenManager.java
A src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/generated/ParseException.java
A src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/generated/ParserConstants.java
A src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/generated/Parser.java
    Added javacc generated files.

Added:
    lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/Shell.java
    lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/
    lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/BasicCommand.java
    lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/Command.java
    lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/CommandFactory.java
    lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/ConsoleTable.java
    lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/CreateCommand.java
    lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/DeleteCommand.java
    lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/DescCommand.java
    lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/DropCommand.java
    lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/ExitCommand.java
    lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/HBaseShell.jj
    lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/HelpCommand.java
    lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/HelpContents.java
    lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/HelpManager.java
    lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/InsertCommand.java
    lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/ReturnMsg.java
    lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/SelectCommand.java
    lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/ShowCommand.java
    lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/generated/
    lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/generated/ParseException.java
    lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/generated/Parser.java
    lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/generated/ParserConstants.java
    lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/generated/ParserTokenManager.java
    lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/generated/SimpleCharStream.java
    lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/generated/Token.java
    lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/generated/TokenMgrError.java
    lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/shell/
    lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/shell/TestHBaseShell.java
Modified:
    lucene/hadoop/trunk/src/contrib/hbase/CHANGES.txt
    lucene/hadoop/trunk/src/contrib/hbase/NOTICE.txt
    lucene/hadoop/trunk/src/contrib/hbase/bin/hbase
    lucene/hadoop/trunk/src/contrib/hbase/build.xml
    lucene/hadoop/trunk/src/contrib/hbase/conf/hbase-default.xml
    lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/package.html

Modified: lucene/hadoop/trunk/src/contrib/hbase/CHANGES.txt
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/CHANGES.txt?view=diff&rev=555415&r1=555414&r2=555415
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/CHANGES.txt (original)
+++ lucene/hadoop/trunk/src/contrib/hbase/CHANGES.txt Wed Jul 11 14:54:15 2007
@@ -56,5 +56,4 @@
  33. HADOOP-1538 Provide capability for client specified time stamps in HBase
      HADOOP-1466 Clean up visibility and javadoc issues in HBase.
  34. HADOOP-1589 Exception handling in HBase is broken over client server connections
-
-
+ 35. HADOOP-1375 a simple parser for hbase (Edward Yoon via Stack)

Modified: lucene/hadoop/trunk/src/contrib/hbase/NOTICE.txt
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/NOTICE.txt?view=diff&rev=555415&r1=555414&r2=555415
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/NOTICE.txt (original)
+++ lucene/hadoop/trunk/src/contrib/hbase/NOTICE.txt Wed Jul 11 14:54:15 2007
@@ -1,5 +1,8 @@
 This product includes software developed by The Apache Software
 Foundation (http://www.apache.org/).
 
-In addition, this product includes software developed by European Commission
-project OneLab (http://www.one-lab.org)
+In addition, this product includes software developed by:
+
+European Commission project OneLab (http://www.one-lab.org)
+
+Udanax (http://www.udanax.org)

Modified: lucene/hadoop/trunk/src/contrib/hbase/bin/hbase
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/bin/hbase?view=diff&rev=555415&r1=555414&r2=555415
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/bin/hbase (original)
+++ lucene/hadoop/trunk/src/contrib/hbase/bin/hbase Wed Jul 11 14:54:15 2007
@@ -38,9 +38,7 @@
 if [ $# = 0 ]; then
   echo "Usage: hbase [--hadoop=hadoopdir] <command>"
   echo "where <command> is one of:"
-  echo "  client           run a hbase client"
-  echo "  reader           run a hbase region directory reader"
-  echo "  logreader        output content of a logfile"
+  echo "  shell            run the hbase shell"
   echo "  master           run a hbase HMaster node" 
   echo "  regionserver     run a hbase HRegionServer node" 
   echo " or"
@@ -181,12 +179,8 @@
 unset IFS
 
 # figure out which class to run
-if [ "$COMMAND" = "client" ] ; then
-  CLASS='org.apache.hadoop.hbase.HClient'
-elif [ "$COMMAND" = "reader" ] ; then
-  CLASS='org.apache.hadoop.hbase.HRegiondirReader'
-elif [ "$COMMAND" = "logreader" ] ; then
-  CLASS='org.apache.hadoop.hbase.HLog'
+if [ "$COMMAND" = "shell" ] ; then
+  CLASS='org.apache.hadoop.hbase.Shell'
 elif [ "$COMMAND" = "master" ] ; then
   CLASS='org.apache.hadoop.hbase.HMaster'
 elif [ "$COMMAND" = "regionserver" ] ; then

Modified: lucene/hadoop/trunk/src/contrib/hbase/build.xml
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/build.xml?view=diff&rev=555415&r1=555414&r2=555415
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/build.xml (original)
+++ lucene/hadoop/trunk/src/contrib/hbase/build.xml Wed Jul 11 14:54:15 2007
@@ -5,9 +5,34 @@
 to call at top-level: ant deploy-contrib compile-core-test
 -->
 <project name="hbase" default="jar">
-
   <import file="../build-contrib.xml"/>
 
+<target name="javacc" if="javacc.home">
+<echo message="javacc.home: ${javacc.home}"/>
+<property name="hbaseshell.src.dir" 
+   value="${src.dir}/org/apache/hadoop/hbase/shell" /> 
+ <mkdir dir="${hbaseshell.src.dir}/generated" />
+  <javacc
+         target="${hbaseshell.src.dir}/HBaseShell.jj"
+       outputdirectory="${hbaseshell.src.dir}/generated"
+        javacchome="${javacc.home}"
+   />
+</target>
+
+<target name="compile" depends="init,javacc">
+   <echo message="contrib: ${name}"/>
+   <javac
+    encoding="${build.encoding}"
+    srcdir="${src.dir}"
+    includes="**/*.java"
+    destdir="${build.classes}"
+    debug="${javac.debug}"
+    deprecation="${javac.deprecation}">
+     <classpath refid="classpath"/>
+     <classpath path="path"/>
+   </javac>
+</target>
+	
   <!-- Override jar target to specify main class -->
   <target name="jar" depends="compile">
     <jar

Modified: lucene/hadoop/trunk/src/contrib/hbase/conf/hbase-default.xml
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/conf/hbase-default.xml?view=diff&rev=555415&r1=555414&r2=555415
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/conf/hbase-default.xml (original)
+++ lucene/hadoop/trunk/src/contrib/hbase/conf/hbase-default.xml Wed Jul 11 14:54:15 2007
@@ -110,4 +110,13 @@
     value + (value / 2), the HRegion is split in two.  Default: 128M.
     </description>
   </property>
+
+  <!-- HbaseShell Configurations -->
+  <property>
+ 	<name>hbaseshell.jline.bell.enabled</name>
+  	<value>true</value>
+  	<description>
+  		if true, enable audible keyboard bells if an alert is required.
+  	</description>
+  </property>  
 </configuration>

Added: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/Shell.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/Shell.java?view=auto&rev=555415
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/Shell.java (added)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/Shell.java Wed Jul 11 14:54:15 2007
@@ -0,0 +1,95 @@
+/**
+ * Copyright 2007 The Apache Software Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase;
+
+import java.io.IOException;
+
+import jline.ConsoleReader;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.shell.Command;
+import org.apache.hadoop.hbase.shell.HelpManager;
+import org.apache.hadoop.hbase.shell.ReturnMsg;
+import org.apache.hadoop.hbase.shell.generated.ParseException;
+import org.apache.hadoop.hbase.shell.generated.Parser;
+import org.apache.hadoop.hbase.shell.generated.TokenMgrError;
+
+/**
+ * An hbase shell.
+ * 
+ * @see <a href="http://wiki.apache.org/lucene-hadoop/Hbase/HbaseShell">HBaseShell</a>
+ */
+public class Shell {
+  /** audible keyboard bells */
+  public static final boolean DEFAULT_BELL_ENABLED = true;
+
+  /** Main method */
+  public static void main(String args[]) throws IOException {
+    Configuration conf = new HBaseConfiguration();
+    HClient client = new HClient(conf);
+    ConsoleReader reader = new ConsoleReader();
+    reader.setBellEnabled(conf.getBoolean("hbaseshell.jline.bell.enabled",
+        DEFAULT_BELL_ENABLED));
+    HelpManager help = new HelpManager();
+    help.printVersion();
+    StringBuilder queryStr = new StringBuilder();
+    String extendedLine;
+    while ((extendedLine = reader.readLine(getPrompt(queryStr))) != null) {
+      if (isEndOfCommand(extendedLine)) {
+        queryStr.append(" " + extendedLine);
+        long start = System.currentTimeMillis();
+        Parser parser = new Parser(queryStr.toString());
+        ReturnMsg rs = null;
+        try {
+          Command cmd = parser.terminatedCommand();
+          if (cmd != null) {
+            rs = cmd.execute(client);
+          }
+        } catch (ParseException pe) {
+          String[] msg = pe.getMessage().split("[\n]");
+          System.out.println("Syntax error : Type 'help' for usage: " + msg[0]);
+        } catch (TokenMgrError te) {
+          System.out.println("Lexical error : Type 'help' for usage.");
+        }
+
+        long end = System.currentTimeMillis();
+
+        if (rs != null && rs.getType() > -1)
+          System.out.println(rs.getMsg()
+              + executeTime((rs.getType() == 1), start, end));
+        queryStr = new StringBuilder();
+      } else {
+        queryStr.append(" " + extendedLine);
+      }
+    }
+    System.out.println();
+  }
+
+  /** Return the boolean value indicating whether end of command or not */
+  static boolean isEndOfCommand(String line) {
+    return (line.lastIndexOf(';') > -1) ? true : false;
+  }
+
+  /** Return the string of prompt start string */
+  private static String getPrompt(final StringBuilder queryStr) {
+    return (queryStr.toString().equals("")) ? "HBase > " : "    --> ";
+  }
+
+  /** return a string of code execution time. */
+  public static String executeTime(boolean watch, long start, long end) {
+    return (watch) ? "(" + String.format("%.2f", (end - start) * 0.001) + " sec)" : "";
+  }
+}

Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/package.html
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/package.html?view=diff&rev=555415&r1=555414&r2=555415
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/package.html (original)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/package.html Wed Jul 11 14:54:15 2007
@@ -37,6 +37,10 @@
 </pre>
 Logs can be found in ${HADOOP_LOG_DIR}.
 </p>
+<p>To obtain a shell against a running hbase instance, run:
+<pre>${HBASE_HOME}/bin/hbase shell</pre>
+Once the shell is up, type <code>help;</code> to see list of supported commands.
+</p>
 
 <h2>Related Documentation</h2>
 

Added: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/BasicCommand.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/BasicCommand.java?view=auto&rev=555415
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/BasicCommand.java (added)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/BasicCommand.java Wed Jul 11 14:54:15 2007
@@ -0,0 +1,32 @@
+/**
+ * Copyright 2007 The Apache Software Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.shell;
+
+/**
+ * @see <a href="http://wiki.apache.org/lucene-hadoop/Hbase/HbaseShell">HBaseShell</a>
+ */
+public abstract class BasicCommand implements Command, CommandFactory {
+  
+  public BasicCommand getBasicCommand() {
+    return this;
+  }
+  
+  /** basic commands are their own factories. */
+  public Command getCommand() {
+    return this;
+  }
+  
+}

Added: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/Command.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/Command.java?view=auto&rev=555415
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/Command.java (added)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/Command.java Wed Jul 11 14:54:15 2007
@@ -0,0 +1,26 @@
+/**
+ * Copyright 2007 The Apache Software Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.shell;
+
+import org.apache.hadoop.hbase.HClient;
+
+public interface Command {
+  /** family indicator */
+  public static final String FAMILY_INDICATOR = ":";
+
+  /** Execute a command */
+  public ReturnMsg execute(HClient client);
+}
\ No newline at end of file

Added: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/CommandFactory.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/CommandFactory.java?view=auto&rev=555415
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/CommandFactory.java (added)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/CommandFactory.java Wed Jul 11 14:54:15 2007
@@ -0,0 +1,23 @@
+/**
+ * Copyright 2007 The Apache Software Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.shell;
+
+/**
+ * Parser uses command factories to create command.
+ */
+public interface CommandFactory {
+  Command getCommand();
+}
\ No newline at end of file

Added: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/ConsoleTable.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/ConsoleTable.java?view=auto&rev=555415
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/ConsoleTable.java (added)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/ConsoleTable.java Wed Jul 11 14:54:15 2007
@@ -0,0 +1,186 @@
+/**
+ * Copyright 2007 The Apache Software Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.shell;
+
+/**
+ * Manufactures console table, but stupid.
+ */
+public class ConsoleTable {
+  public static void printHead(String name) {
+    System.out.println("+------+----------------------+");
+    System.out.print("| No.  | ");
+    System.out.printf("%-20s", name);
+    System.out.println(" |");
+  }
+
+  public static void printFoot() {
+    System.out.println("+------+----------------------+");
+    System.out.println();
+  }
+
+  public static void printTable(int count, String name) {
+    System.out.println("+------+----------------------+");
+
+    if (name.length() > 20) {
+      int interval = 20;
+
+      System.out.print("| ");
+      System.out.printf("%-4s", count + 1);
+      System.out.print(" | ");
+      System.out.printf("%-20s", name.substring(0, interval));
+      System.out.println(" |");
+
+      for (int i = 0; i < name.length() / interval; i++) {
+        System.out.print("| ");
+        System.out.printf("%-4s", "");
+        System.out.print(" | ");
+
+        int end = ((interval * i) + interval + interval);
+        if (end > name.length()) {
+          System.out.printf("%-20s", name.substring(end - interval,
+            name.length()));
+        } else {
+          System.out.printf("%-20s", name.substring(end - interval, end));
+        }
+        System.out.println(" |");
+      }
+
+    } else {
+      System.out.print("| ");
+      System.out.printf("%-4s", count + 1);
+      System.out.print(" | ");
+      System.out.printf("%-20s", name);
+      System.out.println(" |");
+    }
+  }
+
+  public static void selectHead() {
+    System.out.println("+------+----------------------+" +
+      "----------------------+----------------------+");
+    System.out.print("| No.  | ");
+    System.out.printf("%-20s", "Row");
+    System.out.printf(" | ");
+    System.out.printf("%-20s", "Column");
+    System.out.printf(" | ");
+    System.out.printf("%-20s", "Cell");
+    System.out.println(" | ");
+  }
+
+  public static void printLine(int count, String key, String column,
+      String cellData) {
+    System.out.println("+------+----------------------+" +
+      "----------------------+----------------------+");
+
+    if (key.length() > 20 || column.length() > 20 || cellData.length() > 20) {
+      int interval = 20;
+      System.out.print("| ");
+      System.out.printf("%-4s", count + 1);
+      System.out.print(" | ");
+      if (key.length() > 20)
+        System.out.printf("%-20s", key.substring(0, interval));
+      else
+        System.out.printf("%-20s", key);
+      System.out.print(" | ");
+      if (column.length() > 20)
+        System.out.printf("%-20s", column.substring(0, interval));
+      else
+        System.out.printf("%-20s", column);
+      System.out.print(" | ");
+      if (cellData.length() > 20)
+        System.out.printf("%-20s", cellData.substring(0, interval));
+      else
+        System.out.printf("%-20s", cellData);
+      System.out.println(" |");
+
+      // System.out.println(getBiggerInt(new int[]{ 3, 1, 9}));
+      int biggerStrLength = getBiggerInt(new int[] { key.length(),
+        column.length(), cellData.length() });
+
+      for (int i = 0; i < (biggerStrLength / interval); i++) {
+        System.out.print("| ");
+        System.out.printf("%-4s", "");
+        System.out.print(" | ");
+
+        int end = ((interval * i) + interval + interval);
+
+        if (end > key.length()) {
+          if (key.length() > interval && end - interval < key.length()) {
+            System.out.printf("%-20s", key.substring(end - interval,
+              key.length()));
+          } else {
+            System.out.printf("%-20s", "");
+          }
+        } else {
+          System.out.printf("%-20s", key.substring(end - interval, end));
+        }
+
+        System.out.print(" | ");
+
+        if (end > column.length()) {
+          if (column.length() > interval && end - interval < column.length()) {
+            System.out.printf("%-20s", column.substring(end - interval,
+              column.length()));
+          } else {
+            System.out.printf("%-20s", "");
+          }
+        } else {
+          System.out.printf("%-20s", column.substring(end - interval, end));
+        }
+
+        System.out.print(" | ");
+        if (end > cellData.length()) {
+          if (cellData.length() > interval &&
+              end - interval < cellData.length()) {
+            System.out.printf("%-20s",
+              cellData.substring(end - interval, cellData.length()));
+          } else {
+            System.out.printf("%-20s", "");
+          }
+        } else {
+          System.out.printf("%-20s", cellData.substring(end - interval, end));
+        }
+        System.out.println(" |");
+      }
+
+    } else {
+      System.out.print("| ");
+      System.out.printf("%-4s", count + 1);
+      System.out.print(" | ");
+      System.out.printf("%-20s", key);
+      System.out.print(" | ");
+      System.out.printf("%-20s", column);
+      System.out.print(" | ");
+      System.out.printf("%-20s", cellData);
+      System.out.println(" |");
+    }
+  }
+
+  public static int getBiggerInt(int[] integers) {
+    int result = -1;
+    for (int i = 0; i < integers.length; i++) {
+      if (integers[i] > result) {
+        result = integers[i];
+      }
+    }
+    return result;
+  }
+
+  public static void selectFoot() {
+    System.out.println("+------+----------------------+" +
+      "----------------------+----------------------+");
+    System.out.println();
+  }
+}

Added: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/CreateCommand.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/CreateCommand.java?view=auto&rev=555415
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/CreateCommand.java (added)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/CreateCommand.java Wed Jul 11 14:54:15 2007
@@ -0,0 +1,68 @@
+/**
+ * Copyright 2007 The Apache Software Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.shell;
+
+import java.io.IOException;
+import java.util.List;
+
+import org.apache.hadoop.hbase.HClient;
+import org.apache.hadoop.hbase.HColumnDescriptor;
+import org.apache.hadoop.hbase.HTableDescriptor;
+
+public class CreateCommand extends BasicCommand {
+  String table;
+
+  List<String> columnfamilies;
+
+  int limit;
+
+  public ReturnMsg execute(HClient client) {
+    if (this.table == null || this.columnfamilies == null)
+      return new ReturnMsg(0, "Syntax error : Please check 'Create' syntax.");
+
+    try {
+      HTableDescriptor desc = new HTableDescriptor(this.table);
+
+      for (int i = 0; i < this.columnfamilies.size(); i++) {
+
+        String columnFamily = columnfamilies.get(i);
+        if (columnFamily.lastIndexOf(':') == (columnFamily.length() - 1)) {
+          columnFamily = columnFamily.substring(0, columnFamily.length() - 1);
+        }
+        desc.addFamily(new HColumnDescriptor(columnFamily + FAMILY_INDICATOR));
+
+      }
+
+      client.createTable(desc);
+
+      return new ReturnMsg(1, "Table created successfully.");
+    } catch (IOException e) {
+      return new ReturnMsg(0, "error msg : " + e.toString());
+    }
+  }
+
+  public void setTable(String table) {
+    this.table = table;
+  }
+
+  public void setColumnfamilies(List<String> columnfamilies) {
+    this.columnfamilies = columnfamilies;
+  }
+
+  public void setLimit(int limit) {
+    this.limit = limit;
+  }
+}

Added: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/DeleteCommand.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/DeleteCommand.java?view=auto&rev=555415
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/DeleteCommand.java (added)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/DeleteCommand.java Wed Jul 11 14:54:15 2007
@@ -0,0 +1,79 @@
+/**
+ * Copyright 2007 The Apache Software Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.shell;
+
+import java.io.IOException;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import org.apache.hadoop.hbase.HClient;
+import org.apache.hadoop.io.Text;
+
+public class DeleteCommand extends BasicCommand {
+  String table;
+
+  Map<String, List<String>> condition;
+
+  public ReturnMsg execute(HClient client) {
+    if (this.table == null || condition == null)
+      return new ReturnMsg(0, "Syntax error : Please check 'Delete' syntax.");
+
+    try {
+      client.openTable(new Text(this.table));
+      long lockId = client.startUpdate(getRow());
+
+      if (getColumn() != null) {
+
+        client.delete(lockId, getColumn());
+
+      } else {
+        Set<Text> keySet = client.getRow(getRow()).keySet();
+        Text[] columnKey = keySet.toArray(new Text[keySet.size()]);
+
+        for (int i = 0; i < columnKey.length; i++) {
+          client.delete(lockId, columnKey[i]);
+        }
+      }
+
+      client.commit(lockId);
+
+      return new ReturnMsg(1, "1 deleted successfully. ");
+    } catch (IOException e) {
+      return new ReturnMsg(0, "error msg : " + e.toString());
+    }
+  }
+
+  public void setTable(String table) {
+    this.table = table;
+  }
+
+  public void setCondition(Map<String, List<String>> cond) {
+    this.condition = cond;
+  }
+
+  public Text getRow() {
+    return new Text(this.condition.get("row").get(1));
+  }
+
+  public Text getColumn() {
+    if (this.condition.containsKey("column")) {
+      return new Text(this.condition.get("column").get(1));
+    } else {
+      return null;
+    }
+  }
+}

Added: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/DescCommand.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/DescCommand.java?view=auto&rev=555415
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/DescCommand.java (added)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/DescCommand.java Wed Jul 11 14:54:15 2007
@@ -0,0 +1,61 @@
+/**
+ * Copyright 2007 The Apache Software Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.shell;
+
+import java.io.IOException;
+
+import org.apache.hadoop.hbase.HClient;
+import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.io.Text;
+
+public class DescCommand extends BasicCommand {
+  String argument;
+
+  public ReturnMsg execute(HClient client) {
+    if (this.argument == null) 
+      return new ReturnMsg(0, "Syntax error : Please check 'Describe' syntax.");
+
+    try {
+      HTableDescriptor[] tables = client.listTables();
+      Text[] columns = null;
+
+      for (int i = 0; i < tables.length; i++) {
+        if (tables[i].getName().toString().equals(this.argument)) {
+          columns = tables[i].families().keySet().toArray(new Text[] {});
+        }
+      }
+
+      if (columns == null) {
+        return new ReturnMsg(0, "Table not found.");
+      }
+
+      ConsoleTable.printHead("ColumnFamily Name");
+      for (int ii = 0; ii < columns.length; ii++) {
+        String familyName = columns[ii].toString().replace(FAMILY_INDICATOR, "");
+        ConsoleTable.printTable(ii, familyName);
+      }
+      ConsoleTable.printFoot();
+
+      return new ReturnMsg(1, columns.length + " columnfamilie(s) found.");
+    } catch (IOException e) {
+      return new ReturnMsg(0, "error msg : " + e.toString());
+    }
+  }
+
+  public void setArgument(String argument) {
+    this.argument = argument;
+  }
+}

Added: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/DropCommand.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/DropCommand.java?view=auto&rev=555415
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/DropCommand.java (added)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/DropCommand.java Wed Jul 11 14:54:15 2007
@@ -0,0 +1,42 @@
+/**
+ * Copyright 2007 The Apache Software Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.shell;
+
+import java.io.IOException;
+
+import org.apache.hadoop.hbase.HClient;
+import org.apache.hadoop.io.Text;
+
+public class DropCommand extends BasicCommand {
+  String argument;
+
+  public ReturnMsg execute(HClient client) {
+    if (this.argument == null) 
+      return new ReturnMsg(0, "Syntax error : Please check 'Drop' syntax.");
+
+    try {
+      client.deleteTable(new Text(this.argument));
+      
+      return new ReturnMsg(1, "Table droped successfully.");
+    } catch (IOException e) {
+      return new ReturnMsg(0, "error msg : " + e.toString());
+    }
+  }
+
+  public void setArgument(String argument) {
+    this.argument = argument;
+  }
+}

Added: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/ExitCommand.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/ExitCommand.java?view=auto&rev=555415
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/ExitCommand.java (added)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/ExitCommand.java Wed Jul 11 14:54:15 2007
@@ -0,0 +1,27 @@
+/**
+ * Copyright 2007 The Apache Software Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.shell;
+
+import org.apache.hadoop.hbase.HClient;
+
+public class ExitCommand extends BasicCommand {
+
+  public ReturnMsg execute(HClient client) {
+    System.exit(1);
+    return null;
+  }
+
+}

Added: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/HBaseShell.jj
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/HBaseShell.jj?view=auto&rev=555415
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/HBaseShell.jj (added)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/HBaseShell.jj Wed Jul 11 14:54:15 2007
@@ -0,0 +1,474 @@
+options {
+  STATIC = false;
+  IGNORE_CASE = true;
+}
+
+PARSER_BEGIN(Parser)
+/**
+ * Copyright 2007 The Apache Software Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.shell.generated;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+import java.util.HashMap;
+import java.io.StringReader;
+import java.io.Reader;
+
+import org.apache.hadoop.hbase.shell.*;
+
+/**
+ * Parsing command line.
+ */
+public class Parser {
+  private static String QueryString;
+  
+  public Parser(String query) {
+    this((Reader)(new StringReader(query)));
+    this.QueryString = query;
+  }
+  
+  public String getQueryStr() {
+    return this.QueryString;
+  }
+}
+
+PARSER_END(Parser)
+
+SKIP :                             
+{
+    " "
+ | "\t"
+ | "\r"
+ | "\n"
+}
+
+TOKEN:
+{
+   <HELP: "help">
+ | <SHOW: "show">
+ | <DESCRIBE: "describe">
+ | <CREATE: "create">	
+ | <DROP: "drop">
+ | <EXIT: "exit">
+ | <INSERT: "insert">
+ | <DELETE: "delete">
+ | <SELECT: "select">
+ | <ROW: "row">
+ | <COLUMN: "column">
+ | <TIME: "time">
+ | <VALUES: "values">
+ | <COLUMNFAMILIES: "columnfamilies">
+ | <WHERE: "where">
+ | <LIMIT: "limit">
+ | <AND: "and">
+ | <OR:  "or">
+ | <COMMA: ",">
+ | <DOT: ".">
+ | <LPAREN: "(">
+ | <RPAREN: ")">
+ | <EQUALS: "=">
+ | <NOTEQUAL: "<>">
+ | <OPTIONS: "-">
+}
+
+TOKEN :
+{
+   <ID: ["a"-"z","A"-"Z","_","-"] ( ["a"-"z","A"-"Z","_","0"-"9","-"] )* >
+ | <NUM: ( ["0"-"9"] )+ >
+ | <STRING: (["A"-"Z","a"-"z","0"-"9"])+ >
+ | <QUOTED_STRING: "\"" (~["\""])+ "\"" >
+ | <STRING_LITERAL: "'" (~["'"])* ( "''" (~["'"])* )* "'" >
+}
+
+/**
+ * Parses the given array of command line arguments.
+ */
+Command terminatedCommand() :
+{
+    Command statement = null;
+}
+{
+    ([statement = cmdStatement()] ";" | <EOF>)
+    {
+        return statement;
+    }
+}
+
+Command cmdStatement() :
+{
+    Command cmd = null;
+}
+{
+  (
+       cmd = exitCommand()
+     | cmd = helpCommand()
+     | cmd = showCommand()
+     | cmd = descCommand()
+     | cmd = createCommand()
+     | cmd = dropCommand()
+     | cmd = insertCommand()
+     | cmd = deleteCommand()
+     | cmd = selectCommand()
+  )
+   {
+       return cmd;
+   }
+}
+
+ExitCommand exitCommand() :
+{
+  ExitCommand exit = new ExitCommand();
+}
+{
+   <EXIT>  { return exit; }
+}
+
+HelpCommand helpCommand() :
+{
+  Token t = null;
+  HelpCommand help = new HelpCommand();
+  String argument = "";
+}
+{
+   <HELP>
+   [
+     (
+         t=<SHOW>
+       | t=<DESCRIBE>
+       | t=<CREATE>
+       | t=<DROP>
+       | t=<EXIT>
+       | t=<INSERT>
+       | t=<DELETE>
+       | t=<SELECT>
+       | t=<ID>
+     ) { argument = t.image.toString(); }
+   ]
+   
+  { 
+      help.setArgument(argument);
+      return help;  
+  }
+}
+
+ShowCommand showCommand() :
+{
+  ShowCommand show = new ShowCommand();
+  String argument = null;
+}
+{
+   <SHOW>
+   [
+       argument = getString()
+   ]
+   
+  { 
+      show.setArgument(argument);
+      return show;
+  }
+}
+
+DescCommand descCommand() :
+{
+  DescCommand desc = new DescCommand();
+  String argument = null;
+}
+{
+   <DESCRIBE>
+   [
+       argument = getString()
+   ]
+   
+  { 
+      desc.setArgument(argument);
+      return desc;
+  }
+}
+
+CreateCommand createCommand() :
+{
+  CreateCommand create = new CreateCommand();
+  String argument = null;
+  List<String> columnfamilies = null;
+  int limit = 1;
+}
+{
+    <CREATE>
+    argument = getString() 
+    { 
+         create.setTable(argument); 
+    }
+    
+    <COLUMNFAMILIES>
+    columnfamilies = getLiteralValues()
+    { 
+         create.setColumnfamilies(columnfamilies); 
+    }
+
+    [ <LIMIT><EQUALS> limit = getInt() {
+            try{
+               create.setLimit(limit);
+            }catch(ClassCastException ce) {
+               throw generateParseException();
+        }
+   } ]
+  { return create; }
+}
+
+DropCommand dropCommand() :
+{
+  DropCommand drop = new DropCommand();
+  String argument = null;
+}
+{
+   <DROP>
+   [
+       argument = getString()
+   ]
+   
+  { 
+      drop.setArgument(argument);
+      return drop;
+  }
+}
+
+InsertCommand insertCommand() :
+{
+    InsertCommand in = new InsertCommand();
+    Map<String, List<String>> cond = null;
+    List<String> columnfamilies = null;
+    List<String> values = null;
+    String table = null;
+}
+{
+    <INSERT>
+    table = getString()
+    { 
+         in.setTable(table); 
+    }
+    
+    columnfamilies = getLiteralValues()
+    {
+        in.setColumnfamilies(columnfamilies);
+    }
+    
+    <VALUES> values = getLiteralValues()
+    {
+        in.setValues(values);
+    }
+      
+    <WHERE> cond = WhereClause() 
+    {
+        try{
+           in.setCondition(cond);
+        }catch(ClassCastException ce) {
+           throw generateParseException();
+        }
+    } 
+    {
+       return in;
+    }
+}
+
+DeleteCommand deleteCommand() :
+{
+    DeleteCommand del = new DeleteCommand();
+    Map<String, List<String>> cond = null;
+    String argument = null;
+}
+{
+    <DELETE>
+    argument = getString()
+    { 
+         del.setTable(argument); 
+    }
+    
+     <WHERE> cond = WhereClause() {
+        try{
+           del.setCondition(cond);
+        }catch(ClassCastException ce) {
+            throw generateParseException();
+       }
+    } 
+    {
+       return del;
+    }
+}
+
+SelectCommand selectCommand() :
+{
+    SelectCommand select = new SelectCommand();
+    Map<String, List<String>> cond = null;
+    String argument = null;
+    int limit;
+}
+{
+    <SELECT>
+    argument = getString()
+    { 
+         select.setTable(argument); 
+    }
+
+     [ <WHERE> cond = WhereClause() {
+        try{
+           select.setCondition(cond);
+        }catch(ClassCastException ce) {
+            throw generateParseException();
+        }
+    } ] 
+    
+    [ <LIMIT><EQUALS> limit = getInt() {
+            try{
+               select.setLimit(limit);
+            }catch(ClassCastException ce) {
+               throw generateParseException();
+        }
+   } ]
+  { return select; }
+}
+
+/**
+* TODO : expressions codes need more love.
+*/
+
+String getString():
+{ Token t = null; }
+{
+    (  t=<ID>
+     | t=<QUOTED_STRING>
+    ) 
+    { return t.image.toString(); }
+}
+
+int getInt():
+{ Token t = null; }
+{
+   t = <NUM>
+   { return Integer.parseInt(t.image.toString()); }
+}
+
+Map<String, List<String>> WhereClause() :
+{
+    Map<String, List<String>> result = 
+               new HashMap<String, List<String>>();
+    List<String> exception = 
+               new ArrayList<String>();
+}
+{
+  { 
+      try{
+        result.putAll(ConditionExpression()); 
+      }catch(ParseException pe) {
+        exception.add(pe.toString());
+        result.put("error", exception);
+      }
+  }
+	(
+     <AND> { 
+      try{
+        result.putAll(ConditionExpression()); 
+      }catch(ParseException pe) {
+        exception.add(pe.toString());
+        result.put("error", exception);
+      }
+     }
+	)*
+	
+ { return result; }
+}
+
+Map<String, List<String>> ConditionExpression() :
+{
+    Token tSearchName, tComparator, tComparand;
+    Map<String, List<String>> tmp = 
+               new HashMap<String, List<String>>();
+    List<String> values = 
+               new ArrayList<String>();
+}
+{
+	( 
+	   tSearchName=<ROW>
+	 | tSearchName=<COLUMN>
+	 | tSearchName=<TIME> 
+	 | tSearchName=<ID>
+	 | tSearchName=<VALUES>
+	 | tSearchName=<COLUMNFAMILIES>
+	 )
+	 
+	( tComparator=<EQUALS> | tComparator=<NOTEQUAL> )
+
+ ( tComparand=<QUOTED_STRING>
+    { 
+          values.add("quoted string"); 
+          tmp.put("error", values); 
+          return tmp; 
+    } 
+  |  tComparand=<STRING_LITERAL> {
+    values.add(tComparator.image);
+    values.add(tComparand.image.substring(1,tComparand.image.length() - 1));
+  
+  if(tSearchName.image.toString().equals("row") || 
+  tSearchName.image.toString().equals("column") || 
+  tSearchName.image.toString().equals("time"))
+    { tmp.put(tSearchName.image, values); }
+  else   
+    {
+    values.add(tSearchName.image.toString());
+    tmp.put("error", values); 
+    }
+    
+    return tmp; 
+} )
+}
+
+List<String> getLiteralValues() :
+{
+    List<String> values = new ArrayList<String>();
+    String literal = null;
+}
+{    
+<LPAREN>
+ { literal = getStringLiteral();
+ if(literal != null) values.add(literal); 
+ }
+    (
+<COMMA> { 
+   literal = getStringLiteral(); 
+   if(literal != null) values.add(literal); 
+}
+| ( 
+       <ID>
+     | <STRING_LITERAL>
+     | <QUOTED_STRING>
+     | <STRING> 
+  )  { values.removeAll(values); }
+	)*
+<RPAREN>
+   { 
+     return values;
+    }
+}
+
+String getStringLiteral() :
+{
+    Token stringLiteral;
+}
+{
+ stringLiteral=<STRING_LITERAL> 
+ { return stringLiteral.image.substring(1,stringLiteral.image.length() - 1); }
+ | <QUOTED_STRING> { return null; }
+}

Added: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/HelpCommand.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/HelpCommand.java?view=auto&rev=555415
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/HelpCommand.java (added)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/HelpCommand.java Wed Jul 11 14:54:15 2007
@@ -0,0 +1,31 @@
+/**
+ * Copyright 2007 The Apache Software Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.shell;
+
+import org.apache.hadoop.hbase.HClient;
+
+public class HelpCommand extends BasicCommand {
+  String argument;
+
+  public ReturnMsg execute(HClient client) {
+    HelpManager.printHelp(this.argument);
+    return null;
+  }
+
+  public void setArgument(String argument) {
+    this.argument = argument;
+  }
+}

Added: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/HelpContents.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/HelpContents.java?view=auto&rev=555415
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/HelpContents.java (added)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/HelpContents.java Wed Jul 11 14:54:15 2007
@@ -0,0 +1,61 @@
+/**
+ * Copyright 2007 The Apache Software Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.shell;
+
+import java.util.HashMap;
+import java.util.Map;
+
+public class HelpContents {
+
+  /**
+   * add help contents 
+   */
+  public static Map<? extends String, ? extends String[]> Load() {
+    Map<String, String[]> load = new HashMap<String, String[]>();
+
+    load.put("SHOW", new String[] { "List all tables.", "SHOW TABLES;" });
+    load.put("DESCRIBE", new String[] { "Describe a table's columnfamilies.",
+        "DESCRIBE <table_name>;" });
+    load.put("CREATE", new String[] {
+        "Create a table",
+        "CREATE <table_name>"
+            + "\n\t  COLUMNFAMILIES('cf_name1'[, 'cf_name2', ...]);"
+            + "\n    [LIMIT=versions_limit];" });
+    load.put("DROP", new String[] {
+        "Drop columnfamilie(s) from a table or drop table(s)",
+        "DROP table_name1[, table_name2, ...] | cf_name1[, cf_name2, ...];" });
+    load.put("INSERT", new String[] {
+        "Insert row into table",
+        "INSERT <table_name>" + "\n\t('column_name1'[, 'column_name2', ...])"
+            + "\n\t    VALUES('entry1'[, 'entry2', ...])"
+            + "\n    WHERE row='row_key';" });
+    load.put("DELETE", new String[] {
+        "Delete cell or row in table.",
+        "DELETE <table_name>" + "\n\t    WHERE row='row_key;"
+            + "\n    [AND column='column_name'];" });
+    load.put("SELECT",
+        new String[] {
+            "Select values from a table",
+            "SELECT <table_name>" + "\n\t    [WHERE row='row_key']"
+                + "\n    [AND column='column_name'];"
+                + "\n    [AND time='timestamp'];"
+                + "\n    [LIMIT=versions_limit];" });
+    load.put("EXIT", new String[] { "Exit shell", "EXIT;" });
+
+    return load;
+  }
+
+}

Added: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/HelpManager.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/HelpManager.java?view=auto&rev=555415
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/HelpManager.java (added)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/HelpManager.java Wed Jul 11 14:54:15 2007
@@ -0,0 +1,78 @@
+/**
+ * Copyright 2007 The Apache Software Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.shell;
+
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * Prints a usage message for the program to the given stream.
+ */
+public class HelpManager {
+  /** application name */
+  public static final String APP_NAME = "HBase Shell";
+
+  /** version of the code */
+  public static final String APP_VERSION = "0.0.1";
+
+  /** help contents map */
+  public static final Map<String, String[]> help = new HashMap<String, String[]>();
+
+  public HelpManager() {
+    help.putAll(HelpContents.Load());
+  }
+
+  /** Print out the program version. */
+  public void printVersion() {
+    System.out.println("\n" + APP_NAME + ", " + APP_VERSION + " version.\n"
+        + "Copyright (c) 2007 by udanax, "
+        + "licensed to Apache Software Foundation.\n"
+        + "Type 'help;' for usage.\n");
+  }
+
+  public static void printHelp(String cmd) {
+    if (cmd.equals("")) {
+      System.out.println("\nType 'help <command>;' to see command-specific "
+          + "usage.\n");
+      for (Map.Entry<String, String[]> helpMap : help.entrySet()) {
+        wrapping(helpMap.getKey(), helpMap.getValue(), false);
+      }
+      System.out.println();
+    } else {
+      if (help.containsKey(cmd.toUpperCase())) {
+        System.out.println();
+        String[] msg = help.get(cmd.toUpperCase());
+        wrapping(cmd.toUpperCase(), msg, true);
+      } else {
+        System.out.println("Unknown Command : Type 'help' for usage.");
+      }
+    }
+  }
+
+  public static void wrapping(String cmd, String[] cmdType, boolean example) {
+    System.out.printf("%-10s", cmd);
+    if (cmdType[0].length() > 55) {
+      System.out.println(cmdType[0].substring(0, 55));
+      System.out.printf("%13s", "");
+      System.out.println(cmdType[0].substring(55, cmdType[1].length()));
+    } else {
+      System.out.println(cmdType[0]);
+    }
+
+    if (example)
+      System.out.println("\n>>> " + cmdType[1] + "\n");
+  }
+}

Added: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/InsertCommand.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/InsertCommand.java?view=auto&rev=555415
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/InsertCommand.java (added)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/InsertCommand.java Wed Jul 11 14:54:15 2007
@@ -0,0 +1,86 @@
+/**
+ * Copyright 2007 The Apache Software Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.shell;
+
+import java.io.IOException;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.hadoop.hbase.HClient;
+import org.apache.hadoop.io.Text;
+
+public class InsertCommand extends BasicCommand {
+  String table;
+
+  List<String> columnfamilies;
+
+  List<String> values;
+
+  Map<String, List<String>> condition;
+
+  public ReturnMsg execute(HClient client) {
+    if (this.table == null || this.values == null || this.condition == null)
+      return new ReturnMsg(0, "Syntax error : Please check 'Insert' syntax.");
+
+    if (this.columnfamilies.size() != this.values.size())
+      return new ReturnMsg(0,
+          "Mismatch between values list and columnfamilies list");
+
+    try {
+      client.openTable(new Text(this.table));
+      long lockId = client.startUpdate(new Text(getRow()));
+
+      for (int i = 0; i < this.values.size(); i++) {
+        client.put(lockId, getColumn(i), getValue(i));
+      }
+
+      client.commit(lockId);
+
+      return new ReturnMsg(1, "1 row inserted successfully.");
+    } catch (IOException e) {
+      String[] msg = e.getMessage().split("[\n]");
+      return new ReturnMsg(0, msg[0]);
+    }
+  }
+
+  public void setTable(String table) {
+    this.table = table;
+  }
+
+  public void setColumnfamilies(List<String> columnfamilies) {
+    this.columnfamilies = columnfamilies;
+  }
+
+  public void setValues(List<String> values) {
+    this.values = values;
+  }
+
+  public void setCondition(Map<String, List<String>> cond) {
+    this.condition = cond;
+  }
+
+  public Text getRow() {
+    return new Text(this.condition.get("row").get(1));
+  }
+
+  public Text getColumn(int i) {
+    return new Text(this.columnfamilies.get(i));
+  }
+
+  public byte[] getValue(int i) {
+    return this.values.get(i).getBytes();
+  }
+}

Added: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/ReturnMsg.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/ReturnMsg.java?view=auto&rev=555415
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/ReturnMsg.java (added)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/ReturnMsg.java Wed Jul 11 14:54:15 2007
@@ -0,0 +1,40 @@
+/**
+ * Copyright 2007 The Apache Software Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.shell;
+
+public class ReturnMsg {
+  private String msg;
+
+  private int type;
+
+  public ReturnMsg(int i, String string) {
+    this.type = i;
+    this.msg = string;
+  }
+
+  public ReturnMsg(int i) {
+    this.type = i;
+    this.msg = "";
+  }
+
+  public String getMsg() {
+    return this.msg;
+  }
+
+  public int getType() {
+    return this.type;
+  }
+}

Added: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/SelectCommand.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/SelectCommand.java?view=auto&rev=555415
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/SelectCommand.java (added)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/SelectCommand.java Wed Jul 11 14:54:15 2007
@@ -0,0 +1,243 @@
+/**
+ * Copyright 2007 The Apache Software Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.shell;
+
+import java.io.IOException;
+import java.util.List;
+import java.util.Map;
+import java.util.TreeMap;
+
+import org.apache.hadoop.hbase.HClient;
+import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.HRegionInfo;
+import org.apache.hadoop.hbase.HScannerInterface;
+import org.apache.hadoop.hbase.HStoreKey;
+import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.io.DataInputBuffer;
+import org.apache.hadoop.io.Text;
+
+public class SelectCommand extends BasicCommand {
+  String table;
+
+  int limit;
+
+  Map<String, List<String>> condition;
+
+  public ReturnMsg execute(HClient client) {
+    if (this.condition != null && this.condition.containsKey("error"))
+      return new ReturnMsg(0, "Syntax error : Please check 'Select' syntax.");
+
+    try {
+      client.openTable(new Text(this.table));
+
+      switch (getCondition()) {
+      case 0:
+
+        HTableDescriptor[] tables = client.listTables();
+        Text[] columns = null;
+
+        if (this.table.equals(HConstants.ROOT_TABLE_NAME.toString())
+            || this.table.equals(HConstants.META_TABLE_NAME.toString())) {
+          columns = HConstants.COLUMN_FAMILY_ARRAY;
+        } else {
+          for (int i = 0; i < tables.length; i++) {
+            if (tables[i].getName().toString().equals(this.table)) {
+              columns = tables[i].families().keySet().toArray(new Text[] {});
+            }
+          }
+        }
+
+        HScannerInterface scan = client.obtainScanner(columns, new Text(""));
+        HStoreKey key = new HStoreKey();
+        TreeMap<Text, byte[]> results = new TreeMap<Text, byte[]>();
+
+        ConsoleTable.selectHead();
+        int count = 0;
+        while (scan.next(key, results)) {
+          Text rowKey = key.getRow();
+
+          for (Text columnKey : results.keySet()) {
+            byte[] value = results.get(columnKey);
+            String cellData = new String(value);
+
+            if (columnKey.equals(HConstants.COL_REGIONINFO)) {
+              DataInputBuffer inbuf = new DataInputBuffer();
+              HRegionInfo info = new HRegionInfo();
+              inbuf.reset(value, value.length);
+              info.readFields(inbuf);
+
+              cellData = "ID : " + String.valueOf(info.getRegionId());
+            }
+            ConsoleTable.printLine(count, rowKey.toString(), columnKey.toString(),
+                cellData);
+            count++;
+          }
+          results = new TreeMap<Text, byte[]>();
+        }
+        ConsoleTable.selectFoot();
+        scan.close();
+
+        break;
+
+      case 1:
+
+        count = 0;
+        ConsoleTable.selectHead();
+        for (Map.Entry<Text, byte[]> entry : client.getRow(new Text(getRow())).entrySet()) {
+
+          byte[] value = entry.getValue();
+          String cellData = new String(value);
+
+          if (entry.getKey().equals(HConstants.COL_REGIONINFO)) {
+            DataInputBuffer inbuf = new DataInputBuffer();
+            HRegionInfo info = new HRegionInfo();
+            inbuf.reset(value, value.length);
+            info.readFields(inbuf);
+
+            cellData = "ID : " + String.valueOf(info.getRegionId());
+          }
+          ConsoleTable.printLine(count, getRow().toString(), entry.getKey().toString(),
+              cellData);
+          count++;
+        }
+        ConsoleTable.selectFoot();
+
+        break;
+
+      case 2:
+
+        Text[] column = new Text[] { new Text(getColumn()) };
+
+        HScannerInterface scanner = client.obtainScanner(column, new Text(""));
+        HStoreKey k = new HStoreKey();
+        TreeMap<Text, byte[]> r = new TreeMap<Text, byte[]>();
+
+        ConsoleTable.selectHead();
+        count = 0;
+        while (scanner.next(k, r)) {
+          Text rowKey = k.getRow();
+
+          for (Text columnKey : r.keySet()) {
+            byte[] value = r.get(columnKey);
+            String cellData = new String(value);
+            ConsoleTable.printLine(count, rowKey.toString(), columnKey.toString(),
+                cellData);
+            count++;
+          }
+          results = new TreeMap<Text, byte[]>();
+        }
+        ConsoleTable.selectFoot();
+        scanner.close();
+
+        break;
+
+      case 3:
+
+        byte[] rs1 = client.get(new Text(getRow()), new Text(getColumn()));
+
+        ConsoleTable.selectHead();
+        ConsoleTable.printLine(0, getRow(), getColumn(),
+          new String(rs1, HConstants.UTF8_ENCODING));
+        ConsoleTable.selectFoot();
+
+        break;
+
+      case 4:
+
+        byte[][] rs2 = client.get(new Text(getRow()), new Text(getColumn()), this.limit);
+
+        ConsoleTable.selectHead();
+        for (int i = 0; i < rs2.length; i++) {
+          ConsoleTable.printLine(i, getRow(), getColumn(),
+            new String(rs2[i], HConstants.UTF8_ENCODING));
+        }
+        ConsoleTable.selectFoot();
+
+        break;
+
+      case 5:
+
+        byte[][] rs3 = client.get(new Text(getRow()), new Text(getColumn()), getTime(), this.limit);
+
+        ConsoleTable.selectHead();
+        for (int i = 0; i < rs3.length; i++) {
+          ConsoleTable.printLine(i, getRow(), getColumn(), new String(rs3[i]));
+        }
+        ConsoleTable.selectFoot();
+
+        break;
+
+      }
+
+      return new ReturnMsg(1, "Successfully print out the selected data.");
+    } catch (IOException e) {
+      String[] msg = e.getMessage().split("[,]");
+      return new ReturnMsg(0, msg[0]);
+    }
+  }
+
+  public void setTable(String table) {
+    this.table = table;
+  }
+
+  public void setLimit(int limit) {
+    this.limit = limit;
+  }
+
+  public void setCondition(Map<String, List<String>> cond) {
+    this.condition = cond;
+  }
+
+  public String getRow() {
+    return this.condition.get("row").get(1);
+  }
+
+  public String getColumn() {
+    return this.condition.get("column").get(1);
+  }
+
+  public long getTime() {
+    return Long.parseLong(this.condition.get("time").get(1));
+  }
+
+  public int getConditionSize() {
+    return this.condition.size();
+  }
+
+  public int getCondition() {
+    int type = 0;
+    if (this.condition == null) {
+      type = 0;
+    } else if (this.condition.containsKey("row")) {
+      if (getConditionSize() == 1) {
+        type = 1;
+      } else if (this.condition.containsKey("column")) {
+        if (getConditionSize() == 2) {
+          if (this.limit == 0) {
+            type = 3;
+          } else {
+            type = 4;
+          }
+        } else {
+          type = 5;
+        }
+      }
+    } else if (this.condition.containsKey("column")) {
+      type = 2;
+    }
+    return type;
+  }
+}

Added: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/ShowCommand.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/ShowCommand.java?view=auto&rev=555415
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/ShowCommand.java (added)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/ShowCommand.java Wed Jul 11 14:54:15 2007
@@ -0,0 +1,58 @@
+/**
+ * Copyright 2007 The Apache Software Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.shell;
+
+import java.io.IOException;
+
+import org.apache.hadoop.hbase.HClient;
+import org.apache.hadoop.hbase.HTableDescriptor;
+
+public class ShowCommand extends BasicCommand {
+  String argument;
+
+  public ReturnMsg execute(HClient client) {
+    if (this.argument == null)
+      return new ReturnMsg(0, "Syntax error : Please check 'Show' syntax.");
+
+    try {
+      int tableLength = 0;
+
+      if ("tables".equals(this.argument)) {
+        HTableDescriptor[] tables = client.listTables();
+        tableLength = tables.length;
+        if (tableLength == 0) {
+          return new ReturnMsg(0, "Table not found.");
+        }
+
+        ConsoleTable.printHead("Table Name");
+        for (int i = 0; i < tableLength; i++) {
+          String tableName = tables[i].getName().toString();
+          ConsoleTable.printTable(i, tableName);
+        }
+        ConsoleTable.printFoot();
+
+        return new ReturnMsg(1, tableLength + " table(s) found.");
+      }
+      return new ReturnMsg(0, "Missing parameters. Please check 'Show' syntax.");
+    } catch (IOException e) {
+      return new ReturnMsg(0, "error msg : " + e.toString());
+    }
+  }
+
+  public void setArgument(String argument) {
+    this.argument = argument;
+  }
+}

Added: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/generated/ParseException.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/generated/ParseException.java?view=auto&rev=555415
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/generated/ParseException.java (added)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/generated/ParseException.java Wed Jul 11 14:54:15 2007
@@ -0,0 +1,207 @@
+/* Generated By:JavaCC: Do not edit this line. ParseException.java Version 3.0 */
+/**
+ * Copyright 2007 The Apache Software Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.shell.generated;
+
+/**
+ * This exception is thrown when parse errors are encountered.
+ * You can explicitly create objects of this exception type by
+ * calling the method generateParseException in the generated
+ * parser.
+ *
+ * You can modify this class to customize your error reporting
+ * mechanisms so long as you retain the public fields.
+ */
+public class ParseException extends Exception {
+
+  /**
+   * This constructor is used by the method "generateParseException"
+   * in the generated parser.  Calling this constructor generates
+   * a new object of this type with the fields "currentToken",
+   * "expectedTokenSequences", and "tokenImage" set.  The boolean
+   * flag "specialConstructor" is also set to true to indicate that
+   * this constructor was used to create this object.
+   * This constructor calls its super class with the empty string
+   * to force the "toString" method of parent class "Throwable" to
+   * print the error message in the form:
+   *     ParseException: <result of getMessage>
+   */
+  public ParseException(Token currentTokenVal,
+                        int[][] expectedTokenSequencesVal,
+                        String[] tokenImageVal
+                       )
+  {
+    super("");
+    specialConstructor = true;
+    currentToken = currentTokenVal;
+    expectedTokenSequences = expectedTokenSequencesVal;
+    tokenImage = tokenImageVal;
+  }
+
+  /**
+   * The following constructors are for use by you for whatever
+   * purpose you can think of.  Constructing the exception in this
+   * manner makes the exception behave in the normal way - i.e., as
+   * documented in the class "Throwable".  The fields "errorToken",
+   * "expectedTokenSequences", and "tokenImage" do not contain
+   * relevant information.  The JavaCC generated code does not use
+   * these constructors.
+   */
+
+  public ParseException() {
+    super();
+    specialConstructor = false;
+  }
+
+  public ParseException(String message) {
+    super(message);
+    specialConstructor = false;
+  }
+
+  /**
+   * This variable determines which constructor was used to create
+   * this object and thereby affects the semantics of the
+   * "getMessage" method (see below).
+   */
+  protected boolean specialConstructor;
+
+  /**
+   * This is the last token that has been consumed successfully.  If
+   * this object has been created due to a parse error, the token
+   * followng this token will (therefore) be the first error token.
+   */
+  public Token currentToken;
+
+  /**
+   * Each entry in this array is an array of integers.  Each array
+   * of integers represents a sequence of tokens (by their ordinal
+   * values) that is expected at this point of the parse.
+   */
+  public int[][] expectedTokenSequences;
+
+  /**
+   * This is a reference to the "tokenImage" array of the generated
+   * parser within which the parse error occurred.  This array is
+   * defined in the generated ...Constants interface.
+   */
+  public String[] tokenImage;
+
+  /**
+   * This method has the standard behavior when this object has been
+   * created using the standard constructors.  Otherwise, it uses
+   * "currentToken" and "expectedTokenSequences" to generate a parse
+   * error message and returns it.  If this object has been created
+   * due to a parse error, and you do not catch it (it gets thrown
+   * from the parser), then this method is called during the printing
+   * of the final stack trace, and hence the correct error message
+   * gets displayed.
+   */
+  public String getMessage() {
+    if (!specialConstructor) {
+      return super.getMessage();
+    }
+    StringBuffer expected = new StringBuffer();
+    int maxSize = 0;
+    for (int i = 0; i < expectedTokenSequences.length; i++) {
+      if (maxSize < expectedTokenSequences[i].length) {
+        maxSize = expectedTokenSequences[i].length;
+      }
+      for (int j = 0; j < expectedTokenSequences[i].length; j++) {
+        expected.append(tokenImage[expectedTokenSequences[i][j]]).append(" ");
+      }
+      if (expectedTokenSequences[i][expectedTokenSequences[i].length - 1] != 0) {
+        expected.append("...");
+      }
+      expected.append(eol).append("    ");
+    }
+    String retval = "Encountered \"";
+    Token tok = currentToken.next;
+    for (int i = 0; i < maxSize; i++) {
+      if (i != 0) retval += " ";
+      if (tok.kind == 0) {
+        retval += tokenImage[0];
+        break;
+      }
+      retval += add_escapes(tok.image);
+      tok = tok.next; 
+    }
+    retval += "\" at line " + currentToken.next.beginLine + ", column " + currentToken.next.beginColumn;
+    retval += "." + eol;
+    if (expectedTokenSequences.length == 1) {
+      retval += "Was expecting:" + eol + "    ";
+    } else {
+      retval += "Was expecting one of:" + eol + "    ";
+    }
+    retval += expected.toString();
+    return retval;
+  }
+
+  /**
+   * The end of line string for this machine.
+   */
+  protected String eol = System.getProperty("line.separator", "\n");
+ 
+  /**
+   * Used to convert raw characters to their escaped version
+   * when these raw version cannot be used as part of an ASCII
+   * string literal.
+   */
+  protected String add_escapes(String str) {
+      StringBuffer retval = new StringBuffer();
+      char ch;
+      for (int i = 0; i < str.length(); i++) {
+        switch (str.charAt(i))
+        {
+           case 0 :
+              continue;
+           case '\b':
+              retval.append("\\b");
+              continue;
+           case '\t':
+              retval.append("\\t");
+              continue;
+           case '\n':
+              retval.append("\\n");
+              continue;
+           case '\f':
+              retval.append("\\f");
+              continue;
+           case '\r':
+              retval.append("\\r");
+              continue;
+           case '\"':
+              retval.append("\\\"");
+              continue;
+           case '\'':
+              retval.append("\\\'");
+              continue;
+           case '\\':
+              retval.append("\\\\");
+              continue;
+           default:
+              if ((ch = str.charAt(i)) < 0x20 || ch > 0x7e) {
+                 String s = "0000" + Integer.toString(ch, 16);
+                 retval.append("\\u" + s.substring(s.length() - 4, s.length()));
+              } else {
+                 retval.append(ch);
+              }
+              continue;
+        }
+      }
+      return retval.toString();
+   }
+
+}



Mime
View raw message