hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From st...@apache.org
Subject svn commit: r582442 [1/2] - in /lucene/hadoop/trunk/src/contrib/hbase: ./ bin/ conf/ src/java/org/apache/hadoop/hbase/ src/java/org/apache/hadoop/hbase/shell/ src/java/org/apache/hadoop/hbase/shell/generated/ src/test/ src/test/org/apache/hadoop/hbase/...
Date Sat, 06 Oct 2007 03:09:52 GMT
Author: stack
Date: Fri Oct  5 20:09:50 2007
New Revision: 582442

URL: http://svn.apache.org/viewvc?rev=582442&view=rev
Log:
HADOOP-1957 Web UI with report on cluster state and basic browsing of tables

Modified:
    lucene/hadoop/trunk/src/contrib/hbase/CHANGES.txt
    lucene/hadoop/trunk/src/contrib/hbase/bin/hbase
    lucene/hadoop/trunk/src/contrib/hbase/build.xml
    lucene/hadoop/trunk/src/contrib/hbase/conf/hbase-default.xml
    lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HConstants.java
    lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HMaster.java
    lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HMemcache.java
    lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HRegion.java
    lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HRegionServer.java
    lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HServerInfo.java
    lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/Leases.java
    lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/Shell.java
    lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/AlterCommand.java
    lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/BasicCommand.java
    lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/ClearCommand.java
    lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/Command.java
    lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/CreateCommand.java
    lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/DeleteCommand.java
    lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/DescCommand.java
    lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/DisableCommand.java
    lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/DropCommand.java
    lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/EnableCommand.java
    lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/ExitCommand.java
    lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/FsCommand.java
    lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/HBaseShell.jj
    lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/HelpCommand.java
    lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/HelpContents.java
    lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/HelpManager.java
    lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/InsertCommand.java
    lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/JarCommand.java
    lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/ReturnMsg.java
    lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/SchemaModificationCommand.java
    lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/SelectCommand.java
    lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/ShowCommand.java
    lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/TableFormatter.java
    lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/TableFormatterFactory.java
    lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/generated/Parser.java
    lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/generated/ParserTokenManager.java
    lucene/hadoop/trunk/src/contrib/hbase/src/test/hbase-site.xml
    lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/HBaseClusterTestCase.java
    lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/HBaseTestCase.java
    lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/MiniHBaseCluster.java
    lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/TestToString.java
    lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/shell/TestHBaseShell.java

Modified: lucene/hadoop/trunk/src/contrib/hbase/CHANGES.txt
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/CHANGES.txt?rev=582442&r1=582441&r2=582442&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/CHANGES.txt (original)
+++ lucene/hadoop/trunk/src/contrib/hbase/CHANGES.txt Fri Oct  5 20:09:50 2007
@@ -22,6 +22,7 @@
                 (Inchul Song and Edward Yoon via Stack)
     HADOOP-1913 Build a Lucene index on an HBase table
                 (Ning Li via Stack)
+    HADOOP-1957 Web UI with report on cluster state and basic browsing of tables
 
   OPTIMIZATIONS
 

Modified: lucene/hadoop/trunk/src/contrib/hbase/bin/hbase
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/bin/hbase?rev=582442&r1=582441&r2=582442&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/bin/hbase (original)
+++ lucene/hadoop/trunk/src/contrib/hbase/bin/hbase Fri Oct  5 20:09:50 2007
@@ -116,6 +116,9 @@
 if [ -d "$HADOOP_HOME/build/classes" ]; then
   CLASSPATH=${CLASSPATH}:$HADOOP_HOME/build/classes
 fi
+if [ -d "$HADOOP_HOME/build/contrib/hbase/webapps" ]; then
+  CLASSPATH=${CLASSPATH}:$HADOOP_HOME/build/contrib/hbase/
+fi
 if [ -d "$HADOOP_HOME/build/webapps" ]; then
   CLASSPATH=${CLASSPATH}:$HADOOP_HOME/build
 fi

Modified: lucene/hadoop/trunk/src/contrib/hbase/build.xml
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/build.xml?rev=582442&r1=582441&r2=582442&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/build.xml (original)
+++ lucene/hadoop/trunk/src/contrib/hbase/build.xml Fri Oct  5 20:09:50 2007
@@ -26,20 +26,44 @@
 <project name="hbase" default="jar">
   <import file="../build-contrib.xml"/>
 
-<target name="javacc" if="javacc.home">
-<echo message="javacc.home: ${javacc.home}"/>
-<property name="hbaseshell.src.dir" 
-   value="${src.dir}/org/apache/hadoop/hbase/shell" /> 
- <mkdir dir="${hbaseshell.src.dir}/generated" />
-  <javacc
-         target="${hbaseshell.src.dir}/HBaseShell.jj"
+  <property name="build.webapps" value="${build.dir}/webapps"/>
+  <property name="src.webapps" value="${basedir}/src/webapps" />
+
+  <target name="init">
+    <antcall target="hadoopbuildcontrib.init"/>
+    <!--Version is set only if called from hadoop build.xml. Set a default-->
+    <condition property="version" value="0.15.0-dev">
+      <not>
+        <isset property="version" />
+      </not>
+    </condition>
+    <!--Copy webapps over to build dir. Exclude jsp and generated-src java
+      classes -->
+    <mkdir dir="${build.webapps}"/>
+    <copy todir="${build.webapps}">
+      <fileset dir="${src.webapps}">
+        <exclude name="**/*.jsp" />
+        <exclude name="**/.*" />
+        <exclude name="**/*~" />
+      </fileset>
+    </copy>
+  </target>
+
+  <target name="javacc" if="javacc.home">
+  <echo message="javacc.home: ${javacc.home}"/>
+  <property name="hbaseshell.src.dir" 
+       value="${src.dir}/org/apache/hadoop/hbase/shell" /> 
+     <mkdir dir="${hbaseshell.src.dir}/generated" />
+     <javacc
+       target="${hbaseshell.src.dir}/HBaseShell.jj"
        outputdirectory="${hbaseshell.src.dir}/generated"
-        javacchome="${javacc.home}"
-   />
-</target>
+       javacchome="${javacc.home}"
+     />
+  </target>
 
-<target name="compile" depends="init,javacc">
+  <target name="compile" depends="init,javacc">
    <echo message="contrib: ${name}"/>
+   <!--Compile whats under src and generated java classes made from jsp-->
    <javac
     encoding="${build.encoding}"
     srcdir="${src.dir}"
@@ -50,13 +74,14 @@
      <classpath refid="classpath"/>
      <classpath path="path"/>
    </javac>
-</target>
+  </target>
 	
   <!-- Override jar target to specify main class -->
   <target name="jar" depends="compile">
     <jar jarfile="${build.dir}/hadoop-${version}-${name}.jar"
         basedir="${build.classes}" >
       <fileset file="${root}/conf/hbase-default.xml"/>
+      <zipfileset dir="${build.webapps}" prefix="webapps"/>
     </jar>
   </target>
 
@@ -85,21 +110,17 @@
   </target>
 
   <!-- the unit test classpath
-    Copied from ../build-contrib.xml so can add 
-  -->
+    Copied from ../build-contrib.xml so can add to it.
+   -->
   <path id="test.classpath">
     <pathelement location="${build.test}" />
+    <pathelement location="${src.test}"/>
     <pathelement location="${hadoop.root}/build/test/classes"/>
     <pathelement location="${hadoop.root}/src/contrib/test"/>
     <pathelement location="${conf.dir}"/>
     <pathelement location="${hadoop.root}/build"/>
-    <pathelement location="${src.test}"/>
     <pathelement location="${root}/conf"/>
+    <pathelement location="${build.dir}"/>
     <path refid="classpath"/>
   </path>
-  
-  <target name="test">
-    <antcall target="hadoopbuildcontrib.test"/>
-  </target>  
-
 </project>

Modified: lucene/hadoop/trunk/src/contrib/hbase/conf/hbase-default.xml
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/conf/hbase-default.xml?rev=582442&r1=582441&r2=582442&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/conf/hbase-default.xml (original)
+++ lucene/hadoop/trunk/src/contrib/hbase/conf/hbase-default.xml Fri Oct  5 20:09:50 2007
@@ -30,12 +30,38 @@
     </description>
   </property>
   <property>
+    <name>hbase.master.info.port</name>
+    <value>60010</value>
+    <description>The port for the hbase master web UI
+    Set to -1 if you do not want the info server to run.
+    </description>
+  </property>
+  <property>
+    <name>hbase.master.info.bindAddress</name>
+    <value>0.0.0.0</value>
+    <description>The address for the hbase master web UI
+    </description>
+  </property>
+  <property>
     <name>hbase.regionserver</name>
-    <value>0.0.0.0:60010</value>
+    <value>0.0.0.0:60020</value>
     <description>The host and port a HBase region server runs at.
     </description>
   </property>
   <property>
+    <name>hbase.regionserver.info.port</name>
+    <value>60030</value>
+    <description>The port for the hbase regionserver web UI
+    Set to -1 if you do not want the info server to run.
+    </description>
+  </property>
+  <property>
+    <name>hbase.regionserver.info.bindAddress</name>
+    <value>0.0.0.0</value>
+    <description>The address for the hbase regionserver web UI
+    </description>
+  </property>
+  <property>
     <name>hbase.regionserver.class</name>
     <value>org.apache.hadoop.hbase.HRegionInterface</value>
     <description>An interface that is assignable to HRegionInterface.  Used in HClient for
@@ -174,6 +200,12 @@
   	<value>true</value>
   	<description>
   		if true, enable audible keyboard bells if an alert is required.
+  	</description>
+  </property>  
+  <property>
+ 	<name>hbaseshell.formatter</name>
+  	<value>org.apache.hadoop.hbase.shell.formatter.AsciiTableFormatter</value>
+  	<description>TableFormatter to use outputting HQL result sets.
   	</description>
   </property>  
 </configuration>

Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HConstants.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HConstants.java?rev=582442&r1=582441&r2=582442&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HConstants.java (original)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HConstants.java Fri Oct  5 20:09:50 2007
@@ -46,12 +46,16 @@
   
   /** Default master address */
   static final String DEFAULT_MASTER_ADDRESS = DEFAULT_HOST + ":60000";
+  
+  static final int DEFAULT_MASTER_INFOPORT = 60010;
 
   /** Parameter name for hbase.regionserver address. */
   static final String REGIONSERVER_ADDRESS = "hbase.regionserver";
   
   /** Default region server address */
-  static final String DEFAULT_REGIONSERVER_ADDRESS = DEFAULT_HOST + ":60010";  
+  static final String DEFAULT_REGIONSERVER_ADDRESS = DEFAULT_HOST + ":60020";
+  
+  static final int DEFAULT_REGIONSERVER_INFOPORT = 60030;
 
   /** Parameter name for what region server interface to use. */
   static final String REGION_SERVER_CLASS = "hbase.regionserver.class";

Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HMaster.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HMaster.java?rev=582442&r1=582441&r2=582442&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HMaster.java (original)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HMaster.java Fri Oct  5 20:09:50 2007
@@ -51,19 +51,19 @@
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.PathFilter;
-import org.apache.hadoop.io.MapWritable;
-import org.apache.hadoop.io.Text;
-import org.apache.hadoop.io.Writable;
-import org.apache.hadoop.ipc.RPC;
-import org.apache.hadoop.ipc.RemoteException;
-import org.apache.hadoop.ipc.Server;
-
 import org.apache.hadoop.hbase.io.BatchUpdate;
 import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
 import org.apache.hadoop.hbase.util.FSUtils;
+import org.apache.hadoop.hbase.util.InfoServer;
 import org.apache.hadoop.hbase.util.Sleeper;
 import org.apache.hadoop.hbase.util.Threads;
 import org.apache.hadoop.hbase.util.Writables;
+import org.apache.hadoop.io.MapWritable;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.io.Writable;
+import org.apache.hadoop.ipc.RPC;
+import org.apache.hadoop.ipc.RemoteException;
+import org.apache.hadoop.ipc.Server;
 
 
 /**
@@ -120,6 +120,11 @@
   
   // A Sleeper that sleeps for threadWakeFrequency
   protected Sleeper sleeper;
+  
+  // Default access so accesible from unit tests. MASTER is name of the webapp
+  // and the attribute name used stuffing this instance into web context.
+  InfoServer infoServer;
+  public static final String MASTER = "master";
 
   /**
    * Base HRegion scanner class. Holds utilty common to <code>ROOT</code> and
@@ -538,7 +543,7 @@
   Integer rootScannerLock = new Integer(0);
 
   @SuppressWarnings("unchecked")
-  static class MetaRegion implements Comparable {
+  public static class MetaRegion implements Comparable {
     private HServerAddress server;
     private Text regionName;
     private Text startKey;
@@ -959,13 +964,48 @@
   public HServerAddress getMasterAddress() {
     return address;
   }
+  
+  /**
+   * @return Hbase root dir.
+   */
+  public Path getRootDir() {
+    return this.dir;
+  }
+
+  /**
+   * @return Read-only map of servers to serverinfo.
+   */
+  public Map<String, HServerInfo> getServersToServerInfo() {
+    return Collections.unmodifiableMap(this.serversToServerInfo);
+  }
+
+  /**
+   * @return Read-only map of servers to load.
+   */
+  public Map<String, HServerLoad> getServersToLoad() {
+    return Collections.unmodifiableMap(this.serversToLoad);
+  }
+
+  /**
+   * @return Location of the <code>-ROOT-</code> region.
+   */
+  public HServerAddress getRootRegionLocation() {
+    return this.rootRegionLocation.get();
+  }
+  
+  /**
+   * @return Read-only map of online regions.
+   */
+  public Map<Text, MetaRegion> getOnlineMetaRegions() {
+    return Collections.unmodifiableSortedMap(this.onlineMetaRegions);
+  }
 
   /** Main processing loop */
   @Override
   public void run() {
     final String threadName = "HMaster";
     Thread.currentThread().setName(threadName);
-    startAllServices();
+    startServiceThreads();
     /*
      * Main processing loop
      */
@@ -1042,11 +1082,18 @@
     synchronized(metaScannerLock) {
       metaScannerThread.interrupt();    // Wake meta scanner
     }
+    if (this.infoServer != null) {
+      LOG.info("Stopping infoServer");
+      try {
+        this.infoServer.stop();
+      } catch (InterruptedException ex) {
+        ex.printStackTrace();
+      }
+    }
     server.stop();                      // Stop server
     serverLeases.close();               // Turn off the lease monitor
 
     // Join up with all threads
-
     try {
       rootScannerThread.join();         // Wait for the root scanner to finish.
     } catch (Exception iex) {
@@ -1067,7 +1114,7 @@
    *  as OOMEs; it should be lightly loaded. See what HRegionServer does if
    *  need to install an unexpected exception handler.
    */
-  private void startAllServices() {
+  private void startServiceThreads() {
     String threadName = Thread.currentThread().getName();
     try {
       Threads.setDaemonThreadRunning(this.rootScannerThread,
@@ -1077,7 +1124,15 @@
       // Leases are not the same as Chore threads. Set name differently.
       this.serverLeases.setName(threadName + ".leaseChecker");
       this.serverLeases.start();
-      // Start the server last so everything else is running before we start
+      // Put up info server.
+      int port = this.conf.getInt("hbase.master.info.port", 60010);
+      if (port >= 0) {
+        String a = this.conf.get("hbase.master.info.bindAddress", "0.0.0.0");
+        this.infoServer = new InfoServer(MASTER, a, port, false);
+        this.infoServer.setAttribute(MASTER, this);
+        this.infoServer.start();
+      }
+      // Start the server so everything else is running before we start
       // receiving requests.
       this.server.start();
     } catch (IOException e) {

Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HMemcache.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HMemcache.java?rev=582442&r1=582441&r2=582442&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HMemcache.java (original)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HMemcache.java Fri Oct  5 20:09:50 2007
@@ -481,14 +481,13 @@
 
     /** Shut down map iterators, and release the lock */
     public void close() {
-      if(! scannerClosed) {
+      if (!scannerClosed) {
         try {
-          for(int i = 0; i < keys.length; i++) {
+          for (int i = 0; i < keys.length; i++) {
             if(keyIterators[i] != null) {
               closeSubScanner(i);
             }
           }
-          
         } finally {
           lock.releaseReadLock();
           scannerClosed = true;

Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HRegion.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HRegion.java?rev=582442&r1=582441&r2=582442&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HRegion.java (original)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HRegion.java Fri Oct  5 20:09:50 2007
@@ -314,7 +314,7 @@
   }
 
   /** Returns a HRegionInfo object for this region */
-  HRegionInfo getRegionInfo() {
+  public HRegionInfo getRegionInfo() {
     return this.regionInfo;
   }
 

Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HRegionServer.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HRegionServer.java?rev=582442&r1=582441&r2=582442&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HRegionServer.java (original)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HRegionServer.java Fri Oct  5 20:09:50 2007
@@ -51,6 +51,7 @@
 import org.apache.hadoop.hbase.io.BatchUpdate;
 import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
 import org.apache.hadoop.hbase.util.FSUtils;
+import org.apache.hadoop.hbase.util.InfoServer;
 import org.apache.hadoop.hbase.util.Sleeper;
 import org.apache.hadoop.hbase.util.Threads;
 import org.apache.hadoop.hbase.util.Writables;
@@ -117,6 +118,12 @@
   // A sleeper that sleeps for msgInterval.
   private final Sleeper sleeper;
 
+  // Info server.  Default access so can be used by unit tests.  REGIONSERVER
+  // is name of the webapp and the attribute name used stuffing this instance
+  // into web context.
+  InfoServer infoServer;
+  public static final String REGIONSERVER = "regionserver";
+
   // Check to see if regions should be split
   private final Thread splitOrCompactCheckerThread;
   // Needed at shutdown. On way out, if can get this lock then we are not in
@@ -408,7 +415,8 @@
       false, conf);
     this.serverInfo = new HServerInfo(new HServerAddress(
       new InetSocketAddress(getThisIP(),
-      this.server.getListenerAddress().getPort())), this.rand.nextLong());
+      this.server.getListenerAddress().getPort())), this.rand.nextLong(),
+      this.conf.getInt("hbase.regionserver.info.port", 60030));
      this.leases = new Leases(
        conf.getInt("hbase.regionserver.lease.period", 3 * 60 * 1000),
        this.threadWakeFrequency);
@@ -546,7 +554,15 @@
     this.leases.closeAfterLeasesExpire();
     this.worker.stop();
     this.server.stop();
-    
+    if (this.infoServer != null) {
+      LOG.info("Stopping infoServer");
+      try {
+        this.infoServer.stop();
+      } catch (InterruptedException ex) {
+        ex.printStackTrace();
+      }
+    }
+
     // Send interrupts to wake up threads if sleeping so they notice shutdown.
     // TODO: Should we check they are alive?  If OOME could have exited already
     synchronized(logRollerLock) {
@@ -689,6 +705,14 @@
     // an unhandled exception, it will just exit.
     this.leases.setName(n + ".leaseChecker");
     this.leases.start();
+    // Put up info server.
+    int port = this.conf.getInt("hbase.regionserver.info.port", 60030);
+    if (port >= 0) {
+      String a = this.conf.get("hbase.master.info.bindAddress", "0.0.0.0");
+      this.infoServer = new InfoServer("regionserver", a, port, false);
+      this.infoServer.setAttribute("regionserver", this);
+      this.infoServer.start();
+    }
     // Start Server.  This service is like leases in that it internally runs
     // a thread.
     this.server.start();
@@ -1050,7 +1074,7 @@
       if (s == null) {
         throw new UnknownScannerException("Name: " + scannerName);
       }
-      leases.renewLease(scannerId, scannerId);
+      this.leases.renewLease(scannerId, scannerId);
 
       // Collect values to be returned here
       MapWritable values = new MapWritable();
@@ -1131,11 +1155,9 @@
   // remote scanner interface
   //
 
-  /** {@inheritDoc} */
   public long openScanner(Text regionName, Text[] cols, Text firstRow,
       final long timestamp, final RowFilterInterface filter)
     throws IOException {
-
     checkOpen();
     requestCount.incrementAndGet();
     try {
@@ -1148,7 +1170,8 @@
       synchronized(scanners) {
         scanners.put(scannerName, s);
       }
-      leases.createLease(scannerId, scannerId, new ScannerListener(scannerName));
+      this.leases.
+        createLease(scannerId, scannerId, new ScannerListener(scannerName));
       return scannerId;
     } catch (IOException e) {
       LOG.error("Error opening scanner (fsOk: " + this.fsOk + ")",
@@ -1172,8 +1195,7 @@
         throw new UnknownScannerException(scannerName);
       }
       s.close();
-      leases.cancelLease(scannerId, scannerId);
-      
+      this.leases.cancelLease(scannerId, scannerId);
     } catch (IOException e) {
       checkFileSystem();
       throw e;
@@ -1250,6 +1272,24 @@
     region.commit(lockid, timestamp);
   }
 
+  /**
+   * @return Info on this server.
+   */
+  public HServerInfo getServerInfo() {
+    return this.serverInfo;
+  }
+
+  /**
+   * @return Immutable list of this servers regions.
+   */
+  public SortedMap<Text, HRegion> getOnlineRegions() {
+    return Collections.unmodifiableSortedMap(this.onlineRegions);
+  }
+
+  public AtomicInteger getRequestCount() {
+    return this.requestCount;
+  }
+  
   /** 
    * Protected utility method for safely obtaining an HRegion handle.
    * @param regionName Name of online {@link HRegion} to return

Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HServerInfo.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HServerInfo.java?rev=582442&r1=582441&r2=582442&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HServerInfo.java (original)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HServerInfo.java Fri Oct  5 20:09:50 2007
@@ -34,23 +34,25 @@
   private HServerAddress serverAddress;
   private long startCode;
   private HServerLoad load;
+  private int infoPort;
 
   /** default constructor - used by Writable */
   public HServerInfo() {
-    this.serverAddress = new HServerAddress();
-    this.startCode = 0;
-    this.load = new HServerLoad();
+    this(new HServerAddress(), 0, HConstants.DEFAULT_REGIONSERVER_INFOPORT);
   }
   
   /**
    * Constructor
    * @param serverAddress
    * @param startCode
+   * @param infoPort Port the info server is listening on.
    */
-  public HServerInfo(HServerAddress serverAddress, long startCode) {
-    this.serverAddress = new HServerAddress(serverAddress);
+  public HServerInfo(HServerAddress serverAddress, long startCode,
+      final int infoPort) {
+    this.serverAddress = serverAddress;
     this.startCode = startCode;
     this.load = new HServerLoad();
+    this.infoPort = infoPort;
   }
   
   /**
@@ -61,6 +63,7 @@
     this.serverAddress = new HServerAddress(other.getServerAddress());
     this.startCode = other.getStartCode();
     this.load = other.getLoad();
+    this.infoPort = other.getInfoPort();
   }
   
   /**
@@ -88,6 +91,13 @@
   }
   
   /**
+   * @return Port the info server is listening on.
+   */
+  public int getInfoPort() {
+    return this.infoPort;
+  }
+  
+  /**
    * @param startCode the startCode to set
    */
   public void setStartCode(long startCode) {
@@ -101,13 +111,40 @@
     + ", load: (" + this.load.toString() + ")";
   }
 
-  // Writable
+  @Override
+  public boolean equals(Object obj) {
+    if (!(obj instanceof HServerInfo)) {
+      return false;
+    }
+    HServerInfo that = (HServerInfo)obj;
+    if (!this.serverAddress.equals(that.serverAddress)) {
+      return false;
+    }
+    if (this.infoPort != that.infoPort) {
+      return false;
+    }
+    if (this.startCode != that.startCode) {
+      return false;
+    }
+    return true;
+  }
+
+  @Override
+  public int hashCode() {
+    int result = this.serverAddress.hashCode();
+    result ^= this.infoPort;
+    result ^= this.startCode;
+    return result;
+  }
+
 
+  // Writable
   /** {@inheritDoc} */
   public void readFields(DataInput in) throws IOException {
     this.serverAddress.readFields(in);
     this.startCode = in.readLong();
     this.load.readFields(in);
+    this.infoPort = in.readInt();
   }
 
   /** {@inheritDoc} */
@@ -115,5 +152,6 @@
     this.serverAddress.write(out);
     out.writeLong(this.startCode);
     this.load.write(out);
+    out.writeInt(this.infoPort);
   }
-}
\ No newline at end of file
+}

Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/Leases.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/Leases.java?rev=582442&r1=582441&r2=582442&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/Leases.java (original)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/Leases.java Fri Oct  5 20:09:50 2007
@@ -87,7 +87,8 @@
   public void closeAfterLeasesExpire() {
     synchronized(this.leases) {
       while (this.leases.size() > 0) {
-        LOG.info(Integer.toString(leases.size()) + " lease(s) " +
+        LOG.info(Thread.currentThread().getName() + " " +
+          Integer.toString(leases.size()) + " lease(s) " +
           "outstanding. Waiting for them to expire.");
         try {
           this.leases.wait(this.leaseCheckFrequency);
@@ -105,7 +106,7 @@
    * without any cancellation calls.
    */
   public void close() {
-    LOG.info("closing leases");
+    LOG.info(Thread.currentThread().getName() + " closing leases");
     this.stop.set(true);
     try {
       this.leaseMonitorThread.interrupt();
@@ -119,7 +120,7 @@
         sortedLeases.clear();
       }
     }
-    LOG.info("leases closed");
+    LOG.info(Thread.currentThread().getName() + " closed leases");
   }
 
   /* A client obtains a lease... */
@@ -330,8 +331,8 @@
     }
     
     void expired() {
-      LOG.info("Lease expired " + getLeaseName());
-
+      LOG.info(Thread.currentThread().getName() + " lease expired " +
+        getLeaseName());
       listener.leaseExpired();
     }
     

Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/Shell.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/Shell.java?rev=582442&r1=582441&r2=582442&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/Shell.java (original)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/Shell.java Fri Oct  5 20:09:50 2007
@@ -20,13 +20,16 @@
 package org.apache.hadoop.hbase;
 
 import java.io.IOException;
+import java.io.OutputStreamWriter;
+import java.io.Writer;
 
 import jline.ConsoleReader;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.shell.Command;
-import org.apache.hadoop.hbase.shell.HelpManager;
+import org.apache.hadoop.hbase.shell.HelpCommand;
 import org.apache.hadoop.hbase.shell.ReturnMsg;
+import org.apache.hadoop.hbase.shell.TableFormatterFactory;
 import org.apache.hadoop.hbase.shell.generated.ParseException;
 import org.apache.hadoop.hbase.shell.generated.Parser;
 import org.apache.hadoop.hbase.shell.generated.TokenMgrError;
@@ -39,7 +42,31 @@
 public class Shell {
   /** audible keyboard bells */
   public static final boolean DEFAULT_BELL_ENABLED = true;
+  
 
+  /** Return the boolean value indicating whether end of command or not */
+  static boolean isEndOfCommand(String line) {
+    return (line.lastIndexOf(';') > -1) ? true : false;
+  }
+
+  /** Return the string of prompt start string */
+  private static String getPrompt(final StringBuilder queryStr) {
+    return (queryStr.toString().equals("")) ? "Hbase> " : "   --> ";
+  }
+
+  /**
+   * @param watch true if execution time should be computed and returned
+   * @param start start of time interval
+   * @param end end of time interval
+   * @return a string of code execution time. */
+  public static String executeTime(boolean watch, long start, long end) {
+    return watch?
+      " (" + String.format("%.2f", Double.valueOf((end - start) * 0.001)) +
+        " sec)":
+      "";
+  }
+
+ 
   /**
    * Main method
    * @param args not used
@@ -50,8 +77,10 @@
     Configuration conf = new HBaseConfiguration();
     ConsoleReader reader = new ConsoleReader();
     reader.setBellEnabled(conf.getBoolean("hbaseshell.jline.bell.enabled",
-        DEFAULT_BELL_ENABLED));
-    HelpManager help = new HelpManager();
+      DEFAULT_BELL_ENABLED));
+    Writer out = new OutputStreamWriter(System.out, "UTF-8");
+    TableFormatterFactory tff = new TableFormatterFactory(out, conf);
+    HelpCommand help = new HelpCommand(out, tff.get());
     help.printVersion();
     StringBuilder queryStr = new StringBuilder();
     String extendedLine;
@@ -59,7 +88,7 @@
       if (isEndOfCommand(extendedLine)) {
         queryStr.append(" " + extendedLine);
         long start = System.currentTimeMillis();
-        Parser parser = new Parser(queryStr.toString());
+        Parser parser = new Parser(queryStr.toString(), out, tff.get());
         ReturnMsg rs = null;
         try {
           Command cmd = parser.terminatedCommand();
@@ -84,27 +113,5 @@
       }
     }
     System.out.println();
-  }
-
-  /** Return the boolean value indicating whether end of command or not */
-  static boolean isEndOfCommand(String line) {
-    return (line.lastIndexOf(';') > -1) ? true : false;
-  }
-
-  /** Return the string of prompt start string */
-  private static String getPrompt(final StringBuilder queryStr) {
-    return (queryStr.toString().equals("")) ? "Hbase> " : "   --> ";
-  }
-
-  /**
-   * @param watch true if execution time should be computed and returned
-   * @param start start of time interval
-   * @param end end of time interval
-   * @return a string of code execution time. */
-  public static String executeTime(boolean watch, long start, long end) {
-    return watch?
-      " (" + String.format("%.2f", Double.valueOf((end - start) * 0.001)) +
-        " sec)":
-      "";
   }
 }

Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/AlterCommand.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/AlterCommand.java?rev=582442&r1=582441&r2=582442&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/AlterCommand.java (original)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/AlterCommand.java Fri Oct  5 20:09:50 2007
@@ -20,6 +20,7 @@
 package org.apache.hadoop.hbase.shell;
 
 import java.io.IOException;
+import java.io.Writer;
 import java.util.HashMap;
 import java.util.Map;
 import java.util.Set;
@@ -33,7 +34,6 @@
  * Alters tables.
  */
 public class AlterCommand extends SchemaModificationCommand {
-  
   public enum OperationType {ADD, DROP, CHANGE, NOOP}
   private OperationType operationType = OperationType.NOOP;
   private Map<String, Map<String, Object>> columnSpecMap =
@@ -41,6 +41,10 @@
   private String table;
   private String column; // column to be dropped
 
+  public AlterCommand(Writer o) {
+    super(o);
+  }
+
   public ReturnMsg execute(Configuration conf) {
     try {
       HBaseAdmin admin = new HBaseAdmin(conf);
@@ -52,7 +56,7 @@
         columns = columnSpecMap.keySet();
         for (String c : columns) {
           columnDesc = getColumnDescriptor(c, columnSpecMap.get(c));
-          System.out.println("Adding " + c + " to " + table +
+          println("Adding " + c + " to " + table +
             "... Please wait.");
           admin.addColumn(new Text(table), columnDesc);
         }
@@ -60,7 +64,7 @@
         break;
       case DROP:
         disableTable(admin, table);
-        System.out.println("Dropping " + column + " from " + table +
+        println("Dropping " + column + " from " + table +
           "... Please wait.");
         column = appendDelimiter(column);
         admin.deleteColumn(new Text(table), new Text(column));
@@ -79,12 +83,12 @@
   }
 
   private void disableTable(HBaseAdmin admin, String t) throws IOException {
-    System.out.println("Disabling " + t + "... Please wait.");
+    println("Disabling " + t + "... Please wait.");
     admin.disableTable(new Text(t));
   }
 
   private void enableTable(HBaseAdmin admin, String t) throws IOException {
-    System.out.println("Enabling " + t + "... Please wait.");
+    println("Enabling " + t + "... Please wait.");
     admin.enableTable(new Text(t));
   }
 
@@ -123,5 +127,10 @@
    */
   public void setOperationType(OperationType operationType) {
     this.operationType = operationType;
+  }
+  
+  @Override
+  public CommandType getCommandType() {
+    return CommandType.DDL;
   }
 }

Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/BasicCommand.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/BasicCommand.java?rev=582442&r1=582441&r2=582442&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/BasicCommand.java (original)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/BasicCommand.java Fri Oct  5 20:09:50 2007
@@ -19,11 +19,32 @@
  */
 package org.apache.hadoop.hbase.shell;
 
+import java.io.IOException;
+import java.io.Writer;
+
 /**
+ * Takes the lowest-common-denominator {@link Writer} doing its own printlns,
+ * etc.
  * @see <a href="http://wiki.apache.org/lucene-hadoop/Hbase/HbaseShell">HBaseShell</a>
  */
 public abstract class BasicCommand implements Command, CommandFactory {
+  private final Writer out;
+  public final String LINE_SEPARATOR = System.getProperty("line.separator");
+
+  // Shutdown constructor.
+  @SuppressWarnings("unused")
+  private BasicCommand() {
+    this(null);
+  }
   
+  /**
+   * Constructor
+   * @param o A Writer.
+   */
+  public BasicCommand(final Writer o) {
+    this.out = o;
+  }
+
   public BasicCommand getBasicCommand() {
     return this;
   }
@@ -50,5 +71,26 @@
   protected String appendDelimiter(String column) {
     return (!column.endsWith(FAMILY_INDICATOR))?
       column + FAMILY_INDICATOR: column;
-  } 
+  }
+
+  /**
+   * @return Writer to use outputting.
+   */
+  public Writer getOut() {
+    return this.out;
+  }
+  
+  public void print(final String msg) throws IOException {
+    this.out.write(msg);
+  }
+  
+  public void println(final String msg) throws IOException {
+    print(msg);
+    print(LINE_SEPARATOR);
+    this.out.flush();
+  }
+  
+  public CommandType getCommandType() {
+    return CommandType.SELECT;
+  }
 }

Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/ClearCommand.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/ClearCommand.java?rev=582442&r1=582441&r2=582442&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/ClearCommand.java (original)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/ClearCommand.java Fri Oct  5 20:09:50 2007
@@ -20,6 +20,7 @@
 package org.apache.hadoop.hbase.shell;
 
 import java.io.IOException;
+import java.io.Writer;
 
 import org.apache.hadoop.conf.Configuration;
 
@@ -27,21 +28,38 @@
  * Clears the console screen. 
  */
 public class ClearCommand extends BasicCommand {
+  public ClearCommand(Writer o) {
+    super(o);
+  }
+
   public ReturnMsg execute(@SuppressWarnings("unused") Configuration conf) {
     clear();
     return null;
   }
 
-  static void clear() {
+  private void clear() {
     String osName = System.getProperty("os.name");
     if (osName.length() > 7 && osName.subSequence(0, 7).equals("Windows")) {
       try {
         Runtime.getRuntime().exec("cmd /C cls");
       } catch (IOException e) {
-        System.out.println("Can't clear." + e.toString());
+        try {
+          println("Can't clear." + e.toString());
+        } catch (IOException e1) {
+          e1.printStackTrace();
+        }
       }
     } else {
-      System.out.print("\033c");
+      try {
+        print("\033c");
+      } catch (IOException e) {
+        e.printStackTrace();
+      }
     }
+  }
+  
+  @Override
+  public CommandType getCommandType() {
+    return CommandType.SHELL;
   }
 }

Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/Command.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/Command.java?rev=582442&r1=582441&r2=582442&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/Command.java (original)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/Command.java Fri Oct  5 20:09:50 2007
@@ -24,10 +24,18 @@
 public interface Command {
   /** family indicator */
   public static final String FAMILY_INDICATOR = ":";
+  
+  public enum CommandType {DDL, UPDATE, SELECT, INSERT, DELETE, SHELL}
 
   /** Execute a command
    * @param conf Configuration
    * @return Result of command execution
    */
-  public ReturnMsg execute(Configuration conf);
+  public ReturnMsg execute(final Configuration conf);
+
+  /**
+   * @return Type of this command whether DDL, SELECT, INSERT, UPDATE, DELETE,
+   * or SHELL.
+   */
+  public CommandType getCommandType();
 }

Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/CreateCommand.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/CreateCommand.java?rev=582442&r1=582441&r2=582442&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/CreateCommand.java (original)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/CreateCommand.java Fri Oct  5 20:09:50 2007
@@ -19,6 +19,7 @@
  */
 package org.apache.hadoop.hbase.shell;
 
+import java.io.Writer;
 import java.util.HashMap;
 import java.util.Map;
 import java.util.Set;
@@ -32,11 +33,14 @@
  * Creates tables.
  */
 public class CreateCommand extends SchemaModificationCommand {
-  
   private String tableName;
   private Map<String, Map<String, Object>> columnSpecMap =
     new HashMap<String, Map<String, Object>>();
-
+  
+  public CreateCommand(Writer o) {
+    super(o);
+  }
+  
   public ReturnMsg execute(Configuration conf) {
     try {
       HBaseAdmin admin = new HBaseAdmin(conf);
@@ -48,7 +52,7 @@
         tableDesc.addFamily(columnDesc);
       }
       
-      System.out.println("Creating table... Please wait.");
+      println("Creating table... Please wait.");
       
       admin.createTable(tableDesc);
       return new ReturnMsg(0, "Table created successfully.");
@@ -72,5 +76,10 @@
    */
   public void addColumnSpec(String column, Map<String, Object> columnSpec) {
     columnSpecMap.put(column, columnSpec);
-  } 
+  }
+  
+  @Override
+  public CommandType getCommandType() {
+    return CommandType.DDL;
+  }
 }

Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/DeleteCommand.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/DeleteCommand.java?rev=582442&r1=582441&r2=582442&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/DeleteCommand.java (original)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/DeleteCommand.java Fri Oct  5 20:09:50 2007
@@ -20,6 +20,7 @@
 package org.apache.hadoop.hbase.shell;
 
 import java.io.IOException;
+import java.io.Writer;
 import java.util.ArrayList;
 import java.util.List;
 
@@ -32,7 +33,10 @@
  * Deletes values from tables.
  */
 public class DeleteCommand extends BasicCommand {
-  
+  public DeleteCommand(Writer o) {
+    super(o);
+  }
+
   private String tableName;
   private String rowKey;
   private List<String> columnList;
@@ -99,5 +103,10 @@
       e.printStackTrace();
     }
     return columns;
+  }
+  
+  @Override
+  public CommandType getCommandType() {
+    return CommandType.DELETE;
   }
 }

Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/DescCommand.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/DescCommand.java?rev=582442&r1=582441&r2=582442&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/DescCommand.java (original)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/DescCommand.java Fri Oct  5 20:09:50 2007
@@ -20,6 +20,7 @@
 package org.apache.hadoop.hbase.shell;
 
 import java.io.IOException;
+import java.io.Writer;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HColumnDescriptor;
@@ -35,8 +36,20 @@
   private static final String [] HEADER =
     new String [] {"Column Family Descriptor"};
   private Text tableName;
-
-  public ReturnMsg execute(Configuration conf) {
+  private final TableFormatter formatter;
+  
+  // Not instantiable
+  @SuppressWarnings("unused")
+  private DescCommand() {
+    this(null, null);
+  }
+  
+  public DescCommand(final Writer o, final TableFormatter f) {
+    super(o);
+    this.formatter = f;
+  }
+  
+  public ReturnMsg execute(final Configuration conf) {
     if (this.tableName == null) 
       return new ReturnMsg(0, "Syntax error : Please check 'Describe' syntax");
     try {
@@ -53,7 +66,6 @@
           break;
         }
       }
-      TableFormatter formatter = TableFormatterFactory.get();
       formatter.header(HEADER);
       // Do a toString on the HColumnDescriptors
       String [] columnStrs = new String[columns.length];
@@ -75,5 +87,5 @@
 
   public void setArgument(String table) {
     this.tableName = new Text(table);
-  } 
+  }
 }

Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/DisableCommand.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/DisableCommand.java?rev=582442&r1=582441&r2=582442&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/DisableCommand.java (original)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/DisableCommand.java Fri Oct  5 20:09:50 2007
@@ -20,6 +20,7 @@
 package org.apache.hadoop.hbase.shell;
 
 import java.io.IOException;
+import java.io.Writer;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HBaseAdmin;
@@ -30,6 +31,10 @@
  */
 public class DisableCommand extends BasicCommand {
   private String tableName;
+  
+  public DisableCommand(Writer o) {
+    super(o);
+  }
  
   public ReturnMsg execute(Configuration conf) {
     assert tableName != null;
@@ -47,5 +52,10 @@
 
   public void setTable(String table) {
     this.tableName = table;
-  } 
+  }
+  
+  @Override
+  public CommandType getCommandType() {
+    return CommandType.DDL;
+  }
 }

Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/DropCommand.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/DropCommand.java?rev=582442&r1=582441&r2=582442&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/DropCommand.java (original)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/DropCommand.java Fri Oct  5 20:09:50 2007
@@ -20,6 +20,7 @@
 package org.apache.hadoop.hbase.shell;
 
 import java.io.IOException;
+import java.io.Writer;
 import java.util.List;
 
 import org.apache.hadoop.conf.Configuration;
@@ -31,6 +32,10 @@
  */
 public class DropCommand extends BasicCommand {
   private List<String> tableList;
+  
+  public DropCommand(Writer o) {
+    super(o);
+  }
 
   public ReturnMsg execute(Configuration conf) {
     if (tableList == null) {
@@ -41,7 +46,7 @@
       HBaseAdmin admin = new HBaseAdmin(conf);
       
       for (String table : tableList) {
-        System.out.println("Dropping " + table + "... Please wait.");
+        println("Dropping " + table + "... Please wait.");
         admin.deleteTable(new Text(table));
       }
       
@@ -53,5 +58,10 @@
 
   public void setTableList(List<String> tableList) {
     this.tableList = tableList;
+  }
+  
+  @Override
+  public CommandType getCommandType() {
+    return CommandType.DDL;
   }
 }

Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/EnableCommand.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/EnableCommand.java?rev=582442&r1=582441&r2=582442&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/EnableCommand.java (original)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/EnableCommand.java Fri Oct  5 20:09:50 2007
@@ -20,6 +20,7 @@
 package org.apache.hadoop.hbase.shell;
 
 import java.io.IOException;
+import java.io.Writer;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HBaseAdmin;
@@ -30,13 +31,16 @@
  */
 public class EnableCommand extends BasicCommand {
   private String tableName;
+  
+  public EnableCommand(Writer o) {
+    super(o);
+  }
  
   public ReturnMsg execute(Configuration conf) {
     assert tableName != null;
     try {
       HBaseAdmin admin = new HBaseAdmin(conf);
       admin.enableTable(new Text(tableName));
-      
       return new ReturnMsg(1, "Table enabled successfully.");
     } catch (IOException e) {
       String[] msg = e.getMessage().split("[\n]");
@@ -46,5 +50,10 @@
 
   public void setTable(String table) {
     this.tableName = table;
+  }
+  
+  @Override
+  public CommandType getCommandType() {
+    return CommandType.DDL;
   }
 }

Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/ExitCommand.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/ExitCommand.java?rev=582442&r1=582441&r2=582442&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/ExitCommand.java (original)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/ExitCommand.java Fri Oct  5 20:09:50 2007
@@ -19,14 +19,24 @@
  */
 package org.apache.hadoop.hbase.shell;
 
+import java.io.Writer;
+
 import org.apache.hadoop.conf.Configuration;
 
 public class ExitCommand extends BasicCommand {
+  public ExitCommand(Writer o) {
+    super(o);
+  }
 
   public ReturnMsg execute(@SuppressWarnings("unused") Configuration conf) {
     // TOD: Is this the best way to exit?  Would be a problem if shell is run
     // inside another program -- St.Ack 09/11/2007
     System.exit(1);
     return null;
+  }
+  
+  @Override
+  public CommandType getCommandType() {
+    return CommandType.SHELL;
   }
 }

Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/FsCommand.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/FsCommand.java?rev=582442&r1=582441&r2=582442&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/FsCommand.java (original)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/FsCommand.java Fri Oct  5 20:09:50 2007
@@ -19,6 +19,7 @@
  */
 package org.apache.hadoop.hbase.shell;
 
+import java.io.Writer;
 import java.util.List;
 
 import org.apache.hadoop.conf.Configuration;
@@ -30,8 +31,13 @@
  */
 public class FsCommand extends BasicCommand {
   private List<String> query;
+  
+  public FsCommand(Writer o) {
+    super(o);
+  }
 
   public ReturnMsg execute(@SuppressWarnings("unused") Configuration conf) {
+    // This commmand will write the 
     FsShell shell = new FsShell();
     try {
       ToolRunner.run(shell, getQuery());
@@ -48,5 +54,10 @@
 
   private String[] getQuery() {
     return query.toArray(new String[] {});
+  }
+  
+  @Override
+  public CommandType getCommandType() {
+    return CommandType.SHELL;
   }
 }

Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/HBaseShell.jj
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/HBaseShell.jj?rev=582442&r1=582441&r2=582442&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/HBaseShell.jj (original)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/HBaseShell.jj Fri Oct  5 20:09:50 2007
@@ -32,6 +32,7 @@
 import java.util.HashMap;
 import java.io.StringReader;
 import java.io.Reader;
+import java.io.Writer;
 
 import org.apache.hadoop.hbase.shell.*;
 
@@ -40,10 +41,14 @@
  */
 public class Parser {
   private String QueryString;
+  private TableFormatter formatter;
+  private Writer out;
   
-  public Parser(String query) {
+  public Parser(final String query, final Writer o, final TableFormatter f) {
     this((Reader)(new StringReader(query)));
     this.QueryString = query;
+    this.formatter = f;
+    this.out = o;
   }
   
   public String getQueryStr() {
@@ -174,7 +179,7 @@
 
 ExitCommand exitCommand() :
 {
-  ExitCommand exit = new ExitCommand();
+  ExitCommand exit = new ExitCommand(this.out);
 }
 {
    <EXIT>  { return exit; }
@@ -183,7 +188,7 @@
 FsCommand fsCommand() :
 {
   Token t = null;
-  FsCommand fs = new FsCommand();
+  FsCommand fs = new FsCommand(this.out);
   List<String> query = new ArrayList<String>();
 }
 {
@@ -202,7 +207,7 @@
 JarCommand jarCommand() :
 {
   Token t = null;
-  JarCommand jar = new JarCommand();
+  JarCommand jar = new JarCommand(this.out);
   List<String> query = new ArrayList<String>();
 }
 {
@@ -221,7 +226,7 @@
 HelpCommand helpCommand() :
 {
   Token t = null;
-  HelpCommand help = new HelpCommand();
+  HelpCommand help = new HelpCommand(this.out, this.formatter);
   String argument = "";
 }
 {
@@ -252,7 +257,7 @@
 
 ShowCommand showCommand() :
 {
-  ShowCommand show = new ShowCommand();
+  ShowCommand show = new ShowCommand(this.out, this.formatter);
   String argument = null;
 }
 {
@@ -268,7 +273,7 @@
 
 DescCommand descCommand() :
 {
-  DescCommand desc = new DescCommand();
+  DescCommand desc = new DescCommand(this.out, this.formatter);
   String argument = null;
 }
 {
@@ -347,7 +352,7 @@
 
 CreateCommand createCommand() :
 {
-  CreateCommand createCommand = new CreateCommand();
+  CreateCommand createCommand = new CreateCommand(this.out);
   String table = null;
   Map<String, Object> columnSpec = null;
   String column = null;
@@ -383,7 +388,7 @@
 
 AlterCommand alterCommand() :
 {
-  AlterCommand alterCommand = new AlterCommand();
+  AlterCommand alterCommand = new AlterCommand(this.out);
   String table = null;
   String column = null;
   Map<String, Object> columnSpec = null;
@@ -439,7 +444,7 @@
 
 DropCommand dropCommand() :
 {
-  DropCommand drop = new DropCommand();
+  DropCommand drop = new DropCommand(this.out);
   List<String> tableList = null;
 }
 {
@@ -454,7 +459,7 @@
 
 InsertCommand insertCommand() :
 {
-  InsertCommand in = new InsertCommand();
+  InsertCommand in = new InsertCommand(this.out);
   List<String> columnfamilies = null;
   List<String> values = null;
   String table = null;
@@ -490,7 +495,7 @@
 
 DeleteCommand deleteCommand() :
 {
-  DeleteCommand deleteCommand = new DeleteCommand();
+  DeleteCommand deleteCommand = new DeleteCommand(this.out);
   List<String> columnList = null;
   Token t = null;
   String table = null;
@@ -519,7 +524,7 @@
 
 SelectCommand selectCommand() :
 {
-  SelectCommand select = new SelectCommand();
+  SelectCommand select = new SelectCommand(this.out, this.formatter);
   List<String> columns = null;
   String rowKey = "";
   String timestamp = null;
@@ -573,7 +578,7 @@
 
 EnableCommand enableCommand() :
 {
-  EnableCommand enableCommand = new EnableCommand();
+  EnableCommand enableCommand = new EnableCommand(this.out);
   String table = null;
 }
 {
@@ -587,7 +592,7 @@
 
 DisableCommand disableCommand() :
 {
-  DisableCommand disableCommand = new DisableCommand();
+  DisableCommand disableCommand = new DisableCommand(this.out);
   String table = null;
 }
 {
@@ -601,7 +606,7 @@
 
 ClearCommand clearCommand() :
 {
-  ClearCommand clear = new ClearCommand();
+  ClearCommand clear = new ClearCommand(this.out);
 }
 {
    <CLEAR>

Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/HelpCommand.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/HelpCommand.java?rev=582442&r1=582441&r2=582442&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/HelpCommand.java (original)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/HelpCommand.java Fri Oct  5 20:09:50 2007
@@ -19,17 +19,164 @@
  */
 package org.apache.hadoop.hbase.shell;
 
+import java.io.IOException;
+import java.io.OutputStreamWriter;
+import java.io.UnsupportedEncodingException;
+import java.io.Writer;
+import java.util.HashMap;
+import java.util.Map;
+
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.HBaseConfiguration;
 
 public class HelpCommand extends BasicCommand {
   private String argument;
+  private static final String [] HEADER =
+    new String [] {"Command", "Description", "Example"};
+  
+  /** application name */
+  public static final String APP_NAME = "Hbase Shell";
+
+  /** version of the code */
+  public static final String APP_VERSION = "0.0.2";
+
+  /** help contents map */
+  public final Map<String, String[]> help =
+    new HashMap<String, String[]>();
+    
+  private final TableFormatter formatter;
+  
+  public HelpCommand(final Writer o, final TableFormatter f) {
+    super(o);
+    this.help.putAll(load());
+    this.formatter = f;
+  }
 
   public ReturnMsg execute(@SuppressWarnings("unused") Configuration conf) {
-    HelpManager.printHelp(this.argument);
+    try {
+      printHelp(this.argument);
+    } catch (IOException e) {
+      e.printStackTrace();
+    }
     return null;
   }
 
   public void setArgument(String argument) {
     this.argument = argument;
+  }
+
+  /**
+   * add help contents 
+   */
+  private Map<? extends String, ? extends String[]> load() {
+    Map<String, String[]> load = new HashMap<String, String[]>();
+    load.put("SHOW", new String[] {"List all user tables", "SHOW TABLES;"});
+    
+    load.put("FS", new String[] { "Hadoop FsShell; entering a lone 'FS;' " +
+      "will emit usage",
+      "FS -copyFromLocal /home/user/backup.dat fs/user/backup;"});
+    
+    load.put("JAR", new String[] { "Hadoop RunJar util", 
+      "JAR ./build/hadoop-examples.jar pi 10 10;"});
+    load.put("CLEAR", new String[] {"Clear the screen", "CLEAR;"} );
+    
+    load.put("DESCRIBE", new String[] { "Print table information",
+      "[DESCRIBE|DESC] table_name;"});
+    
+    load.put("CREATE", new String[] {
+        "Create tables",
+        "CREATE TABLE table_name (column_family_name [MAX_VERSIONS=n] " +
+        "[MAX_LENGTH=n] [COMPRESSION=NONE|RECORD|BLOCK] [IN_MEMORY] " +
+        "[BLOOMFILTER=NONE|BLOOM|COUNTING|RETOUCHED VECTOR_SIZE=n NUM_HASH=n], " +
+        "...)"
+    });
+    load.put("DROP", new String[] {
+        "Drop tables",
+        "DROP TABLE table_name [, table_name] ...;"});
+    
+    load.put("INSERT", new String[] {
+        "Insert values into table",
+        "INSERT INTO table_name (column_name, ...) " +
+        "VALUES ('value', ...) WHERE row='row_key';"
+    });
+    
+    load.put("DELETE", new String[] {
+        "Delete table data",
+        "DELETE {column_name, [, column_name] ... | *} FROM table_name " +
+        "WHERE row='row-key';"
+    });
+    
+    load.put("SELECT",
+        new String[] {
+            "Select values from table",
+            "SELECT {column_name, [, column_name] ... | *} FROM table_name " +
+            "[WHERE row='row_key' | STARTING FROM 'row-key'] " +
+            "[NUM_VERSIONS = version_count] " +
+            "[TIMESTAMP 'timestamp'] " +
+            "[LIMIT = row_count] " +
+            "[INTO FILE 'file_name'];"
+    });
+                
+    load.put("ALTER",
+        new String[] {
+            "Alter structure of table",
+            "ALTER TABLE table_name ADD column_spec | " +
+            "ADD (column_spec, column_spec, ...) | " +
+            "DROP column_family_name | " +
+            "CHANGE column_spec;"
+    });
+
+    load.put("EXIT", new String[] { "Exit shell", "EXIT;"});
+
+    return load;
+  }
+
+  /** Print out the program version. 
+   * @throws IOException */
+  public void printVersion() throws IOException {
+    println(APP_NAME + ", " + APP_VERSION + " version.\n"
+        + "Copyright (c) 2007 by udanax, "
+        + "licensed to Apache Software Foundation.\n"
+        + "Type 'help;' for usage.\n");
+  }
+
+  public void printHelp(final String cmd) throws IOException {
+    if (cmd.equals("")) {
+      println("Type 'help COMMAND;' to see command-specific usage.");
+      printHelp(this.help);
+    } else {
+      if (this.help.containsKey(cmd.toUpperCase())) {
+        final Map<String, String []> m = new HashMap<String, String []>();
+        m.put(cmd.toUpperCase(), this.help.get(cmd.toUpperCase()));
+        printHelp(m);
+      } else {
+        println("Unknown Command : Type 'help;' for usage.");
+      }
+    }
+  }
+  
+  private void printHelp(final Map<String, String []> m) throws IOException {
+    this.formatter.header(HEADER);
+    for (Map.Entry<String, String []> e: m.entrySet()) {
+      String [] value = e.getValue();
+      if (value.length == 2) {
+        this.formatter.row(new String [] {e.getKey().toUpperCase(), value[0],
+          value[1]});
+      } else {
+        throw new IOException("Value has too many elements:" + value);
+      }
+    }
+    this.formatter.footer();
+  }
+
+  public static void main(String[] args) throws UnsupportedEncodingException {
+    HBaseConfiguration conf = new HBaseConfiguration();
+    Writer out = new OutputStreamWriter(System.out, "UTF-8");
+    TableFormatterFactory tff = new TableFormatterFactory(out, conf);
+    HelpCommand cmd = new HelpCommand(out, tff.get());
+    cmd.setArgument("");
+    cmd.execute(conf);
+    cmd.setArgument("select");
+    cmd.execute(conf);
   }
 }

Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/HelpContents.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/HelpContents.java?rev=582442&r1=582441&r2=582442&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/HelpContents.java (original)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/HelpContents.java Fri Oct  5 20:09:50 2007
@@ -1,112 +0,0 @@
-/**
- * Copyright 2007 The Apache Software Foundation
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hbase.shell;
-
-import java.util.HashMap;
-import java.util.Map;
-
-public class HelpContents {
-
-  /**
-   * add help contents 
-   */
-  public static Map<? extends String, ? extends String[]> Load() {
-    Map<String, String[]> load = new HashMap<String, String[]>();
-
-    String columnName = "column_name: " 
-      + "\n\t  column_family_name"
-      + "\n\t| column_family_name:column_label_name";    
-    String columnList = "{column_name, [, column_name] ... | *}";
-
-    load.put("SHOW", new String[] {"List all available tables", "SHOW TABLES;"});
-
-    load.put("FS", new String[] { "Hadoop FsShell operations",
-      "FS -copyFromLocal /home/user/backup.dat fs/user/backup;" });
-    
-    load.put("JAR", new String[] { "Hadoop RunJar util", 
-      "JAR ./build/hadoop-examples.jar pi 10 10;" });
-    
-    load.put("CLEAR", new String[] {"Clear the screen", "CLEAR;"} );
-    
-    load.put("DESCRIBE", new String[] { "Print information about tables",
-    "[DESCRIBE|DESC] table_name;" });
-    
-    load.put("CREATE", new String[] {
-        "Create tables",
-        "CREATE TABLE table_name"
-            + "\n\t(column_family_spec [, column_family_spec] ...);"
-            + "\n\n"
-      + "column_family_spec:"
-      + "\n\tcolumn_family_name"
-      + "\n\t[MAX_VERSIONS=n]"
-      + "\n\t[MAX_LENGTH=n]"
-      + "\n\t[COMPRESSION=NONE|RECORD|BLOCK]"
-      + "\n\t[IN_MEMORY]"
-      + "\n\t[BLOOMFILTER=NONE|BLOOM|COUNTING|RETOUCHED VECTOR_SIZE=n NUM_HASH=n]"
-    });
-    
-    load.put("DROP", new String[] {
-        "Drop tables",
-        "DROP TABLE table_name [, table_name] ...;" });
-    
-    load.put("INSERT", new String[] {
-        "Insert values into tables",
-        "INSERT INTO table_name"
-            + "\n\t(column_name, ...) VALUES ('value', ...)"
-            + "\n\tWHERE row='row_key';"
-            + "\n\n" + columnName            
-    });
-    
-    load.put("DELETE", new String[] {
-        "Delete a subset of the data in a table",
-        "DELETE " + columnList 
-            + "\n\tFROM table_name"
-            + "\n\tWHERE row='row-key';" 
-            + "\n\n"
-            + columnName
-    });
-    
-    load.put("SELECT",
-        new String[] {
-            "Select values from tables",
-            "SELECT " + columnList + " FROM table_name" 
-                + "\n\t[WHERE row='row_key' | STARTING FROM 'row-key']"
-                + "\n\t[NUM_VERSIONS = version_count]"
-                + "\n\t[TIMESTAMP 'timestamp']"
-                + "\n\t[LIMIT = row_count]"
-                + "\n\t[INTO FILE 'file_name'];"
-    });
-                
-    load.put("ALTER",
-        new String[] {
-            "Alter the structure of a table",
-            "ALTER TABLE table_name" 
-                + "\n\t  ADD column_spec"
-                + "\n\t| ADD (column_spec, column_spec, ...)"
-                + "\n\t| DROP column_family_name"
-                + "\n\t| CHANGE column_spec;" 
-    });
-
-    load.put("EXIT", new String[] { "Exit shell", "EXIT;" });
-
-    return load;
-  }
-
-}

Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/HelpManager.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/HelpManager.java?rev=582442&r1=582441&r2=582442&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/HelpManager.java (original)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/HelpManager.java Fri Oct  5 20:09:50 2007
@@ -1,82 +0,0 @@
-/**
- * Copyright 2007 The Apache Software Foundation
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hbase.shell;
-
-import java.util.HashMap;
-import java.util.Map;
-
-/**
- * Prints a usage message for the program to the given stream.
- */
-public class HelpManager {
-  /** application name */
-  public static final String APP_NAME = "Hbase Shell";
-
-  /** version of the code */
-  public static final String APP_VERSION = "0.0.2";
-
-  /** help contents map */
-  public static final Map<String, String[]> help =
-    new HashMap<String, String[]>();
-
-  public HelpManager() {
-    help.putAll(HelpContents.Load());
-  }
-
-  /** Print out the program version. */
-  public void printVersion() {
-    System.out.println(APP_NAME + ", " + APP_VERSION + " version.\n"
-        + "Copyright (c) 2007 by udanax, "
-        + "licensed to Apache Software Foundation.\n"
-        + "Type 'help;' for usage.\n");
-  }
-
-  public static void printHelp(String cmd) {
-    if (cmd.equals("")) {
-      System.out.println("Type 'help <command>;' to see command-specific "
-          + "usage.\n");
-      for (Map.Entry<String, String[]> helpMap : help.entrySet()) {
-        wrapping(helpMap.getKey(), helpMap.getValue(), false);
-      }
-      System.out.println();
-    } else {
-      if (help.containsKey(cmd.toUpperCase())) {
-        String[] msg = help.get(cmd.toUpperCase());
-        wrapping(cmd.toUpperCase(), msg, true);
-      } else {
-        System.out.println("Unknown Command : Type 'help;' for usage.");
-      }
-    }
-  }
-
-  public static void wrapping(String cmd, String[] cmdType, boolean example) {
-    System.out.printf("%-10s", cmd);
-    if (cmdType[0].length() > 55) {
-      System.out.println(cmdType[0].substring(0, 55));
-      System.out.printf("%13s", "");
-      System.out.println(cmdType[0].substring(55, cmdType[1].length()));
-    } else {
-      System.out.println(cmdType[0]);
-    }
-
-    if (example)
-      System.out.println("\nSyntax:\n" + cmdType[1] + "\n");
-  }
-}
\ No newline at end of file

Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/InsertCommand.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/InsertCommand.java?rev=582442&r1=582441&r2=582442&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/InsertCommand.java (original)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/InsertCommand.java Fri Oct  5 20:09:50 2007
@@ -20,6 +20,7 @@
 package org.apache.hadoop.hbase.shell;
 
 import java.io.IOException;
+import java.io.Writer;
 import java.util.List;
 
 import org.apache.hadoop.conf.Configuration;
@@ -30,12 +31,15 @@
  * Inserts values into tables.
  */
 public class InsertCommand extends BasicCommand {
-  
   private Text tableName;
   private List<String> columnfamilies;
   private List<String> values;
   private String rowKey;
 
+  public InsertCommand(Writer o) {
+    super(o);
+  }
+
   public ReturnMsg execute(Configuration conf) {
     if (this.tableName == null || this.values == null || this.rowKey == null)
       return new ReturnMsg(0, "Syntax error : Please check 'Insert' syntax.");
@@ -91,5 +95,10 @@
 
   public byte[] getValue(int i) {
     return this.values.get(i).getBytes();
-  } 
+  }
+  
+  @Override
+  public CommandType getCommandType() {
+    return CommandType.INSERT;
+  }
 }

Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/JarCommand.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/JarCommand.java?rev=582442&r1=582441&r2=582442&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/JarCommand.java (original)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/JarCommand.java Fri Oct  5 20:09:50 2007
@@ -21,6 +21,7 @@
 
 import java.io.File;
 import java.io.IOException;
+import java.io.Writer;
 import java.lang.reflect.Array;
 import java.lang.reflect.InvocationTargetException;
 import java.lang.reflect.Method;
@@ -40,8 +41,11 @@
  * Run hadoop jar commands.
  */
 public class JarCommand extends BasicCommand {
-  
   private List<String> query;
+  
+  public JarCommand(Writer o) {
+    super(o);
+  }
 
   @SuppressWarnings("deprecation")
   public ReturnMsg execute(@SuppressWarnings("unused") Configuration conf) {
@@ -98,6 +102,7 @@
             try {
               FileUtil.fullyDelete(workDir);
             } catch (IOException e) {
+              e.printStackTrace();
             }
           }
         });
@@ -122,7 +127,7 @@
       Method main = mainClass.getMethod("main", new Class[] {
         Array.newInstance(String.class, 0).getClass()
       });
-      String[] newArgs = (String[])Arrays.asList(args)
+      String[] newArgs = Arrays.asList(args)
         .subList(firstArg, args.length).toArray(new String[0]);
       try {
         main.invoke(null, new Object[] { newArgs });
@@ -143,5 +148,10 @@
 
   private String[] getQuery() {
     return query.toArray(new String[] {});
+  }
+  
+  @Override
+  public CommandType getCommandType() {
+    return CommandType.SHELL;
   }
 }

Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/ReturnMsg.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/ReturnMsg.java?rev=582442&r1=582441&r2=582442&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/ReturnMsg.java (original)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/ReturnMsg.java Fri Oct  5 20:09:50 2007
@@ -44,4 +44,9 @@
   public int getType() {
     return this.type;
   }
+  
+  @Override
+  public String toString() {
+    return this.msg;
+  }
 }

Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/SchemaModificationCommand.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/SchemaModificationCommand.java?rev=582442&r1=582441&r2=582442&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/SchemaModificationCommand.java (original)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/SchemaModificationCommand.java Fri Oct  5 20:09:50 2007
@@ -19,6 +19,7 @@
  */
 package org.apache.hadoop.hbase.shell;
 
+import java.io.Writer;
 import java.util.Map;
 import java.util.Set;
 
@@ -32,7 +33,6 @@
  * Command. Provides utility methods for alteration operations.
  */
 public abstract class SchemaModificationCommand extends BasicCommand {
-
   private int maxVersions;
   private int maxLength;
   private HColumnDescriptor.CompressionType compression;
@@ -42,6 +42,10 @@
   private int vectorSize;
   private int numHash;
   private int numEntries;
+  
+  public SchemaModificationCommand(Writer o) {
+    super(o);
+  }
 
   private void initOptions() {
     maxVersions = HColumnDescriptor.DEFAULT_N_VERSIONS;

Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/SelectCommand.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/SelectCommand.java?rev=582442&r1=582441&r2=582442&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/SelectCommand.java (original)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/SelectCommand.java Fri Oct  5 20:09:50 2007
@@ -20,6 +20,8 @@
 package org.apache.hadoop.hbase.shell;
 
 import java.io.IOException;
+import java.io.OutputStreamWriter;
+import java.io.Writer;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;
@@ -34,7 +36,6 @@
 import org.apache.hadoop.hbase.HStoreKey;
 import org.apache.hadoop.hbase.HTable;
 import org.apache.hadoop.hbase.HTableDescriptor;
-import org.apache.hadoop.hbase.shell.generated.ParseException;
 import org.apache.hadoop.hbase.shell.generated.Parser;
 import org.apache.hadoop.hbase.util.Writables;
 import org.apache.hadoop.io.Text;
@@ -60,8 +61,22 @@
     new String [] {"Column", "Cell"};
   private static final String [] HEADER =
     new String [] {"Row", "Column", "Cell"};
+  private static final String STAR = "*";
+  
+  private final TableFormatter formatter;
+  
+  // Not instantiable
+  @SuppressWarnings("unused")
+  private SelectCommand() {
+    this(null, null);
+  }
+  
+  public SelectCommand(final Writer o, final TableFormatter f) {
+    super(o);
+    this.formatter = f;
+  }
 
-  public ReturnMsg execute(Configuration conf) {
+  public ReturnMsg execute(final Configuration conf) {
     if (this.tableName.equals("") || this.rowKey == null ||
         this.columns.size() == 0) {
       return new ReturnMsg(0, "Syntax error : Please check 'Select' syntax.");
@@ -84,7 +99,6 @@
 
   private int compoundWherePrint(HTable table, HBaseAdmin admin) {
     int count = 0;
-    TableFormatter formatter = TableFormatterFactory.get();
     try {
       if (this.version != 0) {
         // A number of versions has been specified.
@@ -113,7 +127,8 @@
         formatter.header(isMultiple()? HEADER_COLUMN_CELL: null);
         for (Map.Entry<Text, byte[]> e: table.getRow(this.rowKey).entrySet()) {
           Text key = e.getKey();
-          if (!this.columns.contains(key.toString())) {
+          String keyStr = key.toString();
+          if (!this.columns.contains(STAR) && !this.columns.contains(keyStr)) {
             continue;
           }
           String cellData = toString(key, e.getValue());
@@ -173,7 +188,8 @@
     }
   }
   
-  private int scanPrint(HTable table, HBaseAdmin admin) {
+  private int scanPrint(HTable table,
+      HBaseAdmin admin) {
     int count = 0;
     HScannerInterface scan = null;
     try {
@@ -186,7 +202,6 @@
       }
       HStoreKey key = new HStoreKey();
       TreeMap<Text, byte[]> results = new TreeMap<Text, byte[]>();
-      TableFormatter formatter = TableFormatterFactory.get();
       // If only one column in query, then don't print out the column.
       formatter.header((parsedColumns.isMultiple())? HEADER: HEADER_ROW_CELL);
       while (scan.next(key, results) && checkLimit(count)) {
@@ -265,7 +280,7 @@
    * @return True if query contains multiple columns.
    */
   private boolean isMultiple() {
-    return this.columns.size() > 1 || this.columns.contains("*");
+    return this.columns.size() > 1 || this.columns.contains(STAR);
   }
 
   private boolean checkLimit(int count) {
@@ -307,10 +322,17 @@
     this.version = version;
   }
   
-  public static void main(String[] args) throws ParseException {
+  public static void main(String[] args) throws Exception {
+    Writer out = new OutputStreamWriter(System.out, "UTF-8");
+    HBaseConfiguration c = new HBaseConfiguration();
     // For debugging
-    Parser parser = new Parser("select * from -ROOT-;");
+    TableFormatterFactory tff =
+      new TableFormatterFactory(out, c);
+    Parser parser = new Parser("select * from 'x' where row='x';", out,  tff.get());
     Command cmd = parser.terminatedCommand();
-    ReturnMsg rm = cmd.execute(new HBaseConfiguration());
+    
+    ReturnMsg rm = cmd.execute(c);
+    out.write(rm == null? "": rm.toString());
+    out.flush();
   }
 }

Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/ShowCommand.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/ShowCommand.java?rev=582442&r1=582441&r2=582442&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/ShowCommand.java (original)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/ShowCommand.java Fri Oct  5 20:09:50 2007
@@ -20,6 +20,7 @@
 package org.apache.hadoop.hbase.shell;
 
 import java.io.IOException;
+import java.io.Writer;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HBaseAdmin;
@@ -29,10 +30,28 @@
  * Shows all available tables.
  */
 public class ShowCommand extends BasicCommand {
-  private static final String [] HEADER = new String [] {"Table Name"};
+  private static final String [] HEADER = new String [] {"Name", "Descriptor"};
   private String command;
+  private final TableFormatter formatter;
+  
+  // Not instantiable
+  @SuppressWarnings("unused")
+  private ShowCommand() {
+    this(null, null);
+  }
+  
+  public ShowCommand(final Writer o, final TableFormatter f) {
+    this(o, f, null);
+  }
 
-  public ReturnMsg execute(Configuration conf) {
+  public ShowCommand(final Writer o, final TableFormatter f,
+      final String argument) {
+    super(o);
+    this.formatter = f;
+    this.command = argument;
+  }
+
+  public ReturnMsg execute(final Configuration conf) {
     if (this.command == null) {
       return new ReturnMsg(0, "Syntax error : Please check 'Show' syntax");
     }
@@ -43,13 +62,12 @@
         HTableDescriptor[] tables = admin.listTables();
         tableLength = tables.length;
         if (tableLength == 0) {
-          return new ReturnMsg(0, "Table not found");
+          return new ReturnMsg(0, "No tables found");
         }
-        TableFormatter formatter = TableFormatterFactory.get();
         formatter.header(HEADER);
         for (int i = 0; i < tableLength; i++) {
           String tableName = tables[i].getName().toString();
-          formatter.row(new String [] {tableName});
+          formatter.row(new String [] {tableName, tables[i].toString()});
         }
         formatter.footer();
         return new ReturnMsg(1, tableLength + " table(s) in set");

Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/TableFormatter.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/TableFormatter.java?rev=582442&r1=582441&r2=582442&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/TableFormatter.java (original)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/TableFormatter.java Fri Oct  5 20:09:50 2007
@@ -19,24 +19,41 @@
  */
 package org.apache.hadoop.hbase.shell;
 
+import java.io.IOException;
+import java.io.Writer;
+
+import org.apache.hadoop.hbase.shell.formatter.AsciiTableFormatter;
+
 /**
  * Interface implemented by table formatters outputting select results.
+ * Implementations must have a constructor that takes a Writer.
+ * @see AsciiTableFormatter
  */
 public interface TableFormatter {
   /**
    * Output header.
    * @param titles Titles to emit.
+   * @throws IOException
    */
-  public void header(final String [] titles);
+  public void header(final String [] titles) throws IOException;
   
   /**
    * Output footer.
+   * @throws IOException
    */
-  public void footer();
+  public void footer() throws IOException;
   
   /**
    * Output a row.
    * @param cells
+   * @throws IOException
    */
-  public void row(final String [] cells);
+  public void row(final String [] cells) throws IOException;
+
+  /**
+   * @return Output stream being used (This is in interface to enforce fact
+   * that formatters use Writers -- that they operate on character streams
+   * rather than on byte streams).
+   */
+  public Writer getOut();
 }



Mime
View raw message