hbase-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From apurt...@apache.org
Subject svn commit: r789592 [1/2] - in /hadoop/hbase/trunk_on_hadoop-0.18.3: ./ conf/ src/contrib/ src/contrib/stargate/ src/contrib/stargate/lib/ src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/client/ src/contrib/stargate/src/java/org/apache/h...
Date Tue, 30 Jun 2009 07:13:00 GMT
Author: apurtell
Date: Tue Jun 30 07:12:58 2009
New Revision: 789592

URL: http://svn.apache.org/viewvc?rev=789592&view=rev
Log:
HBASE-1582, HBASE-1589, contrib improvements

Added:
    hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/lib/jetty-6.1.14.jar   (with props)
    hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/lib/jetty-util-6.1.14.jar   (with props)
    hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/transactional/build.xml
    hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/filter/FilterSet.java
    hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/filter/InclusiveStopFilter.java
    hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/filter/PrefixFilter.java
    hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/filter/ValueFilter.java
    hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/filter/WhileMatchFilter.java
    hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/filter/TestFilterSet.java
    hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/filter/TestPrefixFilter.java
Removed:
    hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/filter/RowInclusiveStopFilter.java
    hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/filter/RowWhileMatchFilter.java
Modified:
    hadoop/hbase/trunk_on_hadoop-0.18.3/CHANGES.txt
    hadoop/hbase/trunk_on_hadoop-0.18.3/build.xml
    hadoop/hbase/trunk_on_hadoop-0.18.3/conf/zoo.cfg
    hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/build-contrib.xml
    hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/build.xml
    hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/client/Cluster.java
    hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/model/ColumnSchemaModel.java
    hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/model/TableInfoModel.java
    hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/transactional/src/java/org/apache/hadoop/hbase/client/tableindexed/IndexSpecification.java
    hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/transactional/src/java/org/apache/hadoop/hbase/ipc/TransactionalRegionInterface.java
    hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/transactional/src/java/org/apache/hadoop/hbase/regionserver/tableindexed/IndexMaintenanceUtils.java
    hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/transactional/src/java/org/apache/hadoop/hbase/regionserver/transactional/TransactionalRegion.java
    hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/transactional/src/java/org/apache/hadoop/hbase/regionserver/transactional/TransactionalRegionServer.java
    hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/HColumnDescriptor.java
    hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/client/HConnectionManager.java
    hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/client/Result.java
    hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/client/package-info.java
    hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/filter/PageFilter.java
    hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/filter/RegexStringComparator.java
    hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/filter/RowPrefixFilter.java
    hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/filter/package-info.java
    hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/io/HbaseObjectWritable.java
    hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/io/hfile/CachedBlock.java
    hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/io/hfile/LruBlockCache.java
    hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/ipc/HBaseRPC.java
    hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/ipc/HBaseRPCProtocolVersion.java
    hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/regionserver/MemStore.java
    hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/regionserver/QueryMatcher.java
    hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/util/ClassSize.java
    hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/HBaseClusterTestCase.java
    hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/HBaseTestCase.java
    hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/PerformanceEvaluation.java
    hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/filter/TestRowPrefixFilter.java
    hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/io/TestHbaseObjectWritable.java
    hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/io/TestHeapSize.java
    hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/io/hfile/TestLruBlockCache.java
    hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/regionserver/TestStoreScanner.java

Modified: hadoop/hbase/trunk_on_hadoop-0.18.3/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/CHANGES.txt?rev=789592&r1=789591&r2=789592&view=diff
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/CHANGES.txt (original)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/CHANGES.txt Tue Jun 30 07:12:58 2009
@@ -233,6 +233,8 @@
    HBASE-1580  Store scanner does not consult filter.filterRow at end of scan
                (Clint Morgan via Stack)
    HBASE-1437  broken links in hbase.org
+   HBASE-1582  Translate ColumnValueFilter and RowFilterSet to the new Filter
+               interface
 
   IMPROVEMENTS
    HBASE-1089  Add count of regions on filesystem to master UI; add percentage
@@ -418,6 +420,7 @@
    HBASE-1581  Run major compaction on .META. when table is dropped or truncated
    HBASE-1587  Update ganglia config and doc to account for ganglia 3.1 and
                hadoop-4675
+   HBASE-1589  Up zk maxClientCnxns from default of 10 to 20 or 30 or so
 
   OPTIMIZATIONS
    HBASE-1412  Change values for delete column and column family in KeyValue

Modified: hadoop/hbase/trunk_on_hadoop-0.18.3/build.xml
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/build.xml?rev=789592&r1=789591&r2=789592&view=diff
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/build.xml (original)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/build.xml Tue Jun 30 07:12:58 2009
@@ -98,7 +98,7 @@
   <fileset id="lib.jars" dir="${basedir}" includes="lib/*.jar"/>
   <path id="classpath">
     <fileset refid="lib.jars"/>
-    <fileset dir="${lib.dir}/jsp-2.1/">
+    <fileset dir="${lib.dir}/jetty-ext/">
       <include name="*jar" />
     </fileset>
     <pathelement location="${build.classes}"/>
@@ -195,7 +195,7 @@
      -->
   <target name="jspc" depends="init" unless="jspc.not.required">
     <path id="jspc.classpath">
-      <fileset dir="${basedir}/lib/jsp-2.1/">
+      <fileset dir="${basedir}/lib/jetty-ext/">
         <include name="*jar" />
       </fileset>
       <fileset dir="${basedir}/lib/">
@@ -423,8 +423,13 @@
         <link href="${javadoc.link.java}"/>
         <classpath >
           <path refid="classpath" />
+          <fileset dir="src/contrib/">
+            <include name="*/lib/*.jar" />
+          </fileset>
           <pathelement path="${java.class.path}"/>
         </classpath>
+    	<packageset dir="src/contrib/transactional/src/java"/>
+    	<packageset dir="src/contrib/stargate/src/java"/>
     </javadoc>
   </target>	
 

Modified: hadoop/hbase/trunk_on_hadoop-0.18.3/conf/zoo.cfg
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/conf/zoo.cfg?rev=789592&r1=789591&r2=789592&view=diff
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/conf/zoo.cfg (original)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/conf/zoo.cfg Tue Jun 30 07:12:58 2009
@@ -10,5 +10,10 @@
 dataDir=${hbase.tmp.dir}/zookeeper
 # the port at which the clients will connect
 clientPort=2181
+# Limit on number of concurrent connections (at the socket level) that a
+# single client, identified by IP address, may make to a single member of
+# the ZooKeeper ensemble. Default is 10.  Set high to avoid zk connection
+# issues running standalone and pseudo-distributed
+maxClientCnxns=30
 
 server.0=localhost:2888:3888

Modified: hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/build-contrib.xml
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/build-contrib.xml?rev=789592&r1=789591&r2=789592&view=diff
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/build-contrib.xml (original)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/build-contrib.xml Tue Jun 30 07:12:58 2009
@@ -58,6 +58,20 @@
 
   <property name="build.encoding" value="ISO-8859-1"/>
 
+  <property name="clover.db.dir" location="${build.dir}/test/clover/db"/>
+  <property name="clover.report.dir" location="${build.dir}/test/clover/reports"/>
+
+  <property name="clover.jar" location="${clover.home}/lib/clover.jar"/>
+  <available property="clover.present" file="${clover.jar}"/>
+
+  <!-- check if clover reports should be generated -->
+  <condition property="clover.enabled">
+    <and>
+        <isset property="run.clover"/>
+        <isset property="clover.present"/>
+    </and>
+  </condition>
+
   <fileset id="lib.jars" dir="${root}" includes="lib/*.jar"/>
 
   <!-- the normal classpath -->
@@ -78,6 +92,7 @@
     <pathelement location="${hbase.root}/build"/>
     <pathelement location="${hbase.root}/src/test"/>
     <pathelement location="${conf.dir}"/>
+    <pathelement path="${clover.jar}"/>
   </path>
 
 
@@ -102,7 +117,7 @@
   <!-- ====================================================== -->
   <!-- Compile a hbase contrib's files                       -->
   <!-- ====================================================== -->
-  <target name="compile" depends="init" unless="skip.contrib">
+  <target name="compile" depends="clover,init" unless="skip.contrib">
     <echo message="contrib: ${name}"/>
     <javac
      encoding="${build.encoding}"
@@ -159,6 +174,49 @@
     />
   </target>
 
+  <!-- ====================================================== -->
+  <!-- Clover stuff                                           -->
+  <!-- ====================================================== -->
+  <target name="clover" depends="clover.setup, clover.info"
+    description="Instrument the Unit tests using Clover.  To use, specify -Dclover.home=&lt;base of clover installation&gt; -Drun.clover=true on the command line."/>
+
+  <target name="clover.setup" if="clover.enabled">
+    <taskdef resource="cloverlib.xml" classpath="${clover.jar}"/>
+    <mkdir dir="${clover.db.dir}"/>
+    <clover-setup initString="${clover.db.dir}/hbase_coverage.db">
+      <fileset dir="src" includes="java/**/*"/>
+    </clover-setup>
+  </target>
+
+  <target name="clover.info" unless="clover.present">
+    <echo>
+      Clover not found. Code coverage reports disabled.
+    </echo>
+  </target>
+
+  <target name="clover.check">
+    <fail unless="clover.present">
+      ##################################################################
+      Clover not found.
+      Please specify -Dclover.home=&lt;base of clover installation&gt;
+      on the command line.
+      ##################################################################
+    </fail>
+  </target>
+
+  <target name="generate-clover-reports" depends="clover.check, clover">
+    <mkdir dir="${clover.report.dir}"/>
+    <clover-report>
+      <current outfile="${clover.report.dir}" title="${final.name}">
+        <format type="html"/>
+      </current>
+    </clover-report>
+    <clover-report>
+      <current outfile="${clover.report.dir}/clover.xml" title="${final.name}">
+        <format type="xml"/>
+      </current>
+    </clover-report>
+  </target>
   
   <!-- ====================================================== -->
   <!-- Make a hbase contrib's examples jar                   -->

Modified: hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/build.xml
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/build.xml?rev=789592&r1=789591&r2=789592&view=diff
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/build.xml (original)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/build.xml Tue Jun 30 07:12:58 2009
@@ -85,19 +85,23 @@
     </war>
   </target>
   
-  <target name="jar" depends="compile-jar">
-    <jar jarfile="${build.dir}/${jar.file}" basedir="${build.classes}"/>
-  </target>
-  
-  <target name="package" depends="jar, war">
-    <mkdir dir="${dist.dir}"/>
-    <copy todir="${dist.dir}/lib" overwrite="true">
+  <!--Override ../build-contrib.xml package-->
+  <target name="package" depends="jar, war" unless="skip.contrib">
+    <mkdir dir="${dist.dir}/contrib/${name}"/>
+    <copy todir="${dist.dir}/contrib/${name}" includeEmptyDirs="false" flatten="true">
+      <fileset dir="${build.dir}">
+        <include name="hbase-${version}-${name}.jar" />
+      </fileset>
+    </copy>
+    <copy todir="${dist.dir}/contrib/${name}" includeEmptyDirs="false" flatten="true">
+      <fileset dir="${build.dir}">
+        <include name="hbase-${version}-${name}.war" />
+      </fileset>
+    </copy>
+    <mkdir dir="${dist.dir}/contrib/${name}/lib"/>
+    <copy todir="${dist.dir}/contrib/${name}/lib" overwrite="true">
       <fileset dir="${lib.dir}"/>
     </copy>
-    <copy todir="${dist.dir}" overwrite="true"
-       file="${build.dir}/${jar.file}"/>
-    <copy todir="${dist.dir}" overwrite="true"
-       file="${build.dir}/${war.file}"/>
   </target>
   
   <target name="clean-contrib">

Added: hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/lib/jetty-6.1.14.jar
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/lib/jetty-6.1.14.jar?rev=789592&view=auto
==============================================================================
Binary file - no diff available.

Propchange: hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/lib/jetty-6.1.14.jar
------------------------------------------------------------------------------
    svn:mime-type = application/octet-stream

Added: hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/lib/jetty-util-6.1.14.jar
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/lib/jetty-util-6.1.14.jar?rev=789592&view=auto
==============================================================================
Binary file - no diff available.

Propchange: hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/lib/jetty-util-6.1.14.jar
------------------------------------------------------------------------------
    svn:mime-type = application/octet-stream

Modified: hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/client/Cluster.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/client/Cluster.java?rev=789592&r1=789591&r2=789592&view=diff
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/client/Cluster.java (original)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/client/Cluster.java Tue Jun 30 07:12:58 2009
@@ -44,7 +44,7 @@
 
   /**
    * Add a node to the cluster
-   * @param name the service location in 'host:port' format
+   * @param node the service location in 'host:port' format
    */
   public Cluster add(String node) {
     nodes.add(node);
@@ -66,7 +66,7 @@
 
   /**
    * Remove a node from the cluster
-   * @param name the service location in 'host:port' format
+   * @param node the service location in 'host:port' format
    */
   public Cluster remove(String node) {
     nodes.remove(node);

Modified: hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/model/ColumnSchemaModel.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/model/ColumnSchemaModel.java?rev=789592&r1=789591&r2=789592&view=diff
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/model/ColumnSchemaModel.java (original)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/model/ColumnSchemaModel.java Tue Jun 30 07:12:58 2009
@@ -75,7 +75,7 @@
   }
 
   /**
-   * @param the table name
+   * @param name the table name
    */
   public void setName(String name) {
     this.name = name;

Modified: hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/model/TableInfoModel.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/model/TableInfoModel.java?rev=789592&r1=789591&r2=789592&view=diff
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/model/TableInfoModel.java (original)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/model/TableInfoModel.java Tue Jun 30 07:12:58 2009
@@ -73,7 +73,7 @@
   }
 
   /**
-   * @param the table name
+   * @param name the table name
    */
   public void setName(String name) {
     this.name = name;

Added: hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/transactional/build.xml
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/transactional/build.xml?rev=789592&view=auto
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/transactional/build.xml (added)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/transactional/build.xml Tue Jun 30 07:12:58 2009
@@ -0,0 +1,26 @@
+<?xml version="1.0"?>
+
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+
+<!-- 
+Before you can run these subtargets directly, you need 
+to call at top-level: ant deploy-contrib compile-core-test
+-->
+<project name="transactional" default="jar">
+  <import file="../build-contrib.xml"/>
+</project>

Modified: hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/transactional/src/java/org/apache/hadoop/hbase/client/tableindexed/IndexSpecification.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/transactional/src/java/org/apache/hadoop/hbase/client/tableindexed/IndexSpecification.java?rev=789592&r1=789591&r2=789592&view=diff
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/transactional/src/java/org/apache/hadoop/hbase/client/tableindexed/IndexSpecification.java (original)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/transactional/src/java/org/apache/hadoop/hbase/client/tableindexed/IndexSpecification.java Tue Jun 30 07:12:58 2009
@@ -63,7 +63,6 @@
    * @param indexedColumns
    * @param additionalColumns
    * @param keyGenerator
-   * @param keyComparator
    */
   public IndexSpecification(String indexId, byte[][] indexedColumns,
       byte[][] additionalColumns, IndexKeyGenerator keyGenerator) {

Modified: hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/transactional/src/java/org/apache/hadoop/hbase/ipc/TransactionalRegionInterface.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/transactional/src/java/org/apache/hadoop/hbase/ipc/TransactionalRegionInterface.java?rev=789592&r1=789591&r2=789592&view=diff
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/transactional/src/java/org/apache/hadoop/hbase/ipc/TransactionalRegionInterface.java (original)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/transactional/src/java/org/apache/hadoop/hbase/ipc/TransactionalRegionInterface.java Tue Jun 30 07:12:58 2009
@@ -77,7 +77,7 @@
    * Put an array of puts into the specified region
    * @param regionName
    * @param puts
-   * @return
+   * @return result
    * @throws IOException
    */
   public int put(long transactionId, final byte[] regionName, final Put [] puts)

Modified: hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/transactional/src/java/org/apache/hadoop/hbase/regionserver/tableindexed/IndexMaintenanceUtils.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/transactional/src/java/org/apache/hadoop/hbase/regionserver/tableindexed/IndexMaintenanceUtils.java?rev=789592&r1=789591&r2=789592&view=diff
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/transactional/src/java/org/apache/hadoop/hbase/regionserver/tableindexed/IndexMaintenanceUtils.java (original)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/transactional/src/java/org/apache/hadoop/hbase/regionserver/tableindexed/IndexMaintenanceUtils.java Tue Jun 30 07:12:58 2009
@@ -73,7 +73,7 @@
      * Ask if this update does apply to the index.
      * 
      * @param indexSpec
-     * @param b
+     * @param columnValues
      * @return true if possibly apply.
      */
     public static boolean doesApplyToIndex(final IndexSpecification indexSpec,

Modified: hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/transactional/src/java/org/apache/hadoop/hbase/regionserver/transactional/TransactionalRegion.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/transactional/src/java/org/apache/hadoop/hbase/regionserver/transactional/TransactionalRegion.java?rev=789592&r1=789591&r2=789592&view=diff
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/transactional/src/java/org/apache/hadoop/hbase/regionserver/transactional/TransactionalRegion.java (original)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/transactional/src/java/org/apache/hadoop/hbase/regionserver/transactional/TransactionalRegion.java Tue Jun 30 07:12:58 2009
@@ -278,7 +278,7 @@
    * Add a write to the transaction. Does not get applied until commit process.
    * 
    * @param transactionId
-   * @param b
+   * @param put
    * @throws IOException
    */
   public void put(final long transactionId, final Put put) throws IOException {
@@ -313,8 +313,7 @@
    * FIXME, not sure about this approach
    * 
    * @param transactionId
-   * @param row
-   * @param timestamp
+   * @param delete
    * @throws IOException
    */
   public void delete(final long transactionId, Delete delete)

Modified: hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/transactional/src/java/org/apache/hadoop/hbase/regionserver/transactional/TransactionalRegionServer.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/transactional/src/java/org/apache/hadoop/hbase/regionserver/transactional/TransactionalRegionServer.java?rev=789592&r1=789591&r2=789592&view=diff
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/transactional/src/java/org/apache/hadoop/hbase/regionserver/transactional/TransactionalRegionServer.java (original)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/transactional/src/java/org/apache/hadoop/hbase/regionserver/transactional/TransactionalRegionServer.java Tue Jun 30 07:12:58 2009
@@ -62,7 +62,6 @@
   private final CleanOldTransactionsChore cleanOldTransactionsThread;
 
   /**
-   * @param address
    * @param conf
    * @throws IOException
    */

Modified: hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/HColumnDescriptor.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/HColumnDescriptor.java?rev=789592&r1=789591&r2=789592&view=diff
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/HColumnDescriptor.java (original)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/HColumnDescriptor.java Tue Jun 30 07:12:58 2009
@@ -603,7 +603,7 @@
         Text t = new Text();
         t.readFields(in);
         this.name = t.getBytes();
-//        if(KeyValue.getFamilyDelimiterIndex(this.name, 0, this.name.length) 
+//        if(KeyValue.getFamilyDelimiterIndex(this.name, 0, this.name.length)
 //            > 0) {
 //          this.name = stripColon(this.name);
 //        }

Modified: hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/client/HConnectionManager.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/client/HConnectionManager.java?rev=789592&r1=789591&r2=789592&view=diff
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/client/HConnectionManager.java (original)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/client/HConnectionManager.java Tue Jun 30 07:12:58 2009
@@ -134,6 +134,7 @@
     private final long pause;
     private final int numRetries;
     private final int maxRPCAttempts;
+    private final long rpcTimeout;
 
     private final Object masterLock = new Object();
     private volatile boolean closed;
@@ -184,6 +185,7 @@
       this.pause = conf.getLong("hbase.client.pause", 2 * 1000);
       this.numRetries = conf.getInt("hbase.client.retries.number", 10);
       this.maxRPCAttempts = conf.getInt("hbase.client.rpc.maxattempts", 1);
+      this.rpcTimeout = conf.getLong("hbase.regionserver.lease.period", 60000);
       
       this.master = null;
       this.masterChecked = false;

Modified: hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/client/Result.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/client/Result.java?rev=789592&r1=789591&r2=789592&view=diff
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/client/Result.java (original)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/client/Result.java Tue Jun 30 07:12:58 2009
@@ -39,6 +39,7 @@
 
 /**
  * Single row result of a {@link Get} or {@link Scan} query.
+ * Backed by array of KeyValues.
  */
 public class Result implements Writable {
   private KeyValue [] kvs = null;
@@ -84,7 +85,7 @@
   }
 
   /**
-   * Directly return the unsorted array of KeyValues in this Result.
+   * Return the unsorted array of KeyValues backing this Result instance.
    * @return unsorted array of KeyValues
    */
   public KeyValue[] raw() {
@@ -92,7 +93,7 @@
   }
 
   /**
-   * Return a sorted list of the KeyValue's in this result.
+   * Create a sorted list of the KeyValue's in this result.
    * 
    * @return The sorted list of KeyValue's.
    */
@@ -108,7 +109,7 @@
    * @return sorted array of KeyValues
    */
   public KeyValue[] sorted() {
-    if(isEmpty()) {
+    if (isEmpty()) {
       return null;
     }
     Arrays.sort(kvs, (Comparator<KeyValue>)KeyValue.COMPARATOR);
@@ -230,7 +231,7 @@
    */
   public byte [] getValue(byte [] family, byte [] qualifier) {
     Map.Entry<Long,byte[]> entry = getKeyValue(family, qualifier);
-    return entry == null ?null :entry.getValue();
+    return entry == null? null: entry.getValue();
   }
 
   public Cell getCellValue(byte[] family, byte[] qualifier) {
@@ -281,11 +282,8 @@
   
   private NavigableMap<Long, byte[]> getVersionMap(
       NavigableMap<byte [], NavigableMap<Long, byte[]>> qualifierMap, byte [] qualifier) {
-    if(qualifier != null) {
-      return qualifierMap.get(qualifier);
-    } else {
-      return qualifierMap.get(new byte[0]);
-    }
+    return qualifier != null?
+      qualifierMap.get(qualifier): qualifierMap.get(new byte[0]);
   }
   
   /**
@@ -341,7 +339,7 @@
    * @return value of the first column
    */
   public byte [] value() {
-    if(isEmpty()) {
+    if (isEmpty()) {
       return null;
     }
     return kvs[0].getValue();
@@ -352,15 +350,14 @@
    * @return true if empty
    */
   public boolean isEmpty() {
-    return (this.kvs == null || this.kvs.length == 0);
+    return this.kvs == null || this.kvs.length == 0;
   }
   
   /**
-   * 
    * @return the size of the underlying KeyValue []
    */
   public int size() {
-    return (this.kvs == null ? 0 : this.kvs.length);
+    return this.kvs == null? 0: this.kvs.length;
   }
   
   /**

Modified: hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/client/package-info.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/client/package-info.java?rev=789592&r1=789591&r2=789592&view=diff
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/client/package-info.java (original)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/client/package-info.java Tue Jun 30 07:12:58 2009
@@ -22,9 +22,14 @@
 
 <h2>Table of Contents</h2>
 <ul>
+ <li><a href="#overview">Overview</a></li>
 <li><a href="#client_example">Example API Usage</a></li>
 </ul>
 
+ <h2><a name="overview">Overview</a></h2>
+ <p>
+ </p>
+ 
 <h2><a name="client_example">Example API Usage</a></h2>
 
 <p>Once you have a running HBase, you probably want a way to hook your application up to it. 

Added: hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/filter/FilterSet.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/filter/FilterSet.java?rev=789592&view=auto
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/filter/FilterSet.java (added)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/filter/FilterSet.java Tue Jun 30 07:12:58 2009
@@ -0,0 +1,217 @@
+/**
+ * Copyright 2007 The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.filter;
+
+import java.io.DataInput;
+import java.io.DataOutput;
+import java.io.IOException;
+import java.util.HashSet;
+import java.util.Set;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.io.HbaseObjectWritable;
+
+/**
+ * Implementation of {@link Filter} that represents a Set of Filters
+ * which will be evaluated with a specified boolean operator MUST_PASS_ALL 
+ * (!AND) or MUST_PASS_ONE (!OR).  Since you can use Filter Sets as children
+ * of Filter Sets, you can create a hierarchy of filters to be evaluated.
+ * <p>TODO: Fix creation of Configuration on serialization and deserialization. 
+ */
+public class FilterSet implements Filter {
+
+  /** set operator */
+  public static enum Operator {
+    /** !AND */
+    MUST_PASS_ALL,
+    /** !OR */
+    MUST_PASS_ONE
+  }
+
+  private Operator operator = Operator.MUST_PASS_ALL;
+  private Set<Filter> filters = new HashSet<Filter>();
+
+  /**
+   * Default constructor, filters nothing. Required though for RPC
+   * deserialization.
+   */
+  public FilterSet() {
+    super();
+  }
+
+  /**
+   * Constructor that takes a set of {@link Filter}s. The default operator
+   * MUST_PASS_ALL is assumed.
+   * 
+   * @param rowFilters
+   */
+  public FilterSet(final Set<Filter> rowFilters) {
+    this.filters = rowFilters;
+  }
+
+  /**
+   * Constructor that takes a set of {@link Filter}s and an operator.
+   * 
+   * @param operator Operator to process filter set with.
+   * @param rowFilters Set of row filters.
+   */
+  public FilterSet(final Operator operator, final Set<Filter> rowFilters) {
+    this.filters = rowFilters;
+    this.operator = operator;
+  }
+
+  /**
+   * Get the operator.
+   * 
+   * @return operator
+   */
+  public Operator getOperator() {
+    return operator;
+  }
+
+  /**
+   * Get the filters.
+   * 
+   * @return filters
+   */
+  public Set<Filter> getFilters() {
+    return filters;
+  }
+
+  /**
+   * Add a filter.
+   * 
+   * @param filter
+   */
+  public void addFilter(Filter filter) {
+    this.filters.add(filter);
+  }
+
+  public void reset() {
+    for (Filter filter : filters) {
+      filter.reset();
+    }
+  }
+
+  public boolean filterRowKey(byte[] rowKey, int offset, int length) {
+    for (Filter filter : filters) {
+      if (operator == Operator.MUST_PASS_ALL) {
+        if (filter.filterAllRemaining()
+            || filter.filterRowKey(rowKey, offset, length)) {
+          return true;
+        }
+      } else if (operator == Operator.MUST_PASS_ONE) {
+        if (!filter.filterAllRemaining()
+            && !filter.filterRowKey(rowKey, offset, length)) {
+          return false;
+        }
+      }
+    }
+    return  operator == Operator.MUST_PASS_ONE;
+  }
+
+  public boolean filterAllRemaining() {
+    for (Filter filter : filters) {
+      if (filter.filterAllRemaining()) {
+        if (operator == Operator.MUST_PASS_ALL) {
+          return true;
+        }
+      } else {
+        if (operator == Operator.MUST_PASS_ONE) {
+          return false;
+        }
+      }
+    }
+    return operator == Operator.MUST_PASS_ONE;
+  }
+
+  public ReturnCode filterKeyValue(KeyValue v) {
+    for (Filter filter : filters) {
+      if (operator == Operator.MUST_PASS_ALL) {
+        if (filter.filterAllRemaining()) {
+          return ReturnCode.NEXT_ROW;
+        }
+        switch (filter.filterKeyValue(v)) {
+        case INCLUDE:
+          continue;
+        case NEXT_ROW:
+        case SKIP:
+          return ReturnCode.SKIP;
+        }
+      } else if (operator == Operator.MUST_PASS_ONE) {
+        if (filter.filterAllRemaining()) {
+          continue;
+        }
+
+        switch (filter.filterKeyValue(v)) {
+        case INCLUDE:
+          return ReturnCode.INCLUDE;
+        case NEXT_ROW:
+        case SKIP:
+          continue;
+        }
+      }
+    }
+    return operator == Operator.MUST_PASS_ONE?
+      ReturnCode.SKIP: ReturnCode.INCLUDE;
+  }
+
+  public boolean filterRow() {
+    for (Filter filter : filters) {
+      if (operator == Operator.MUST_PASS_ALL) {
+        if (filter.filterAllRemaining()
+            || filter.filterRow()) {
+          return true;
+        }
+      } else if (operator == Operator.MUST_PASS_ONE) {
+        if (!filter.filterAllRemaining()
+            && !filter.filterRow()) {
+          return false;
+        }
+      }
+    }
+    return  operator == Operator.MUST_PASS_ONE;
+  }
+
+  public void readFields(final DataInput in) throws IOException {
+    Configuration conf = new HBaseConfiguration();
+    byte opByte = in.readByte();
+    operator = Operator.values()[opByte];
+    int size = in.readInt();
+    if (size > 0) {
+      filters = new HashSet<Filter>();
+      for (int i = 0; i < size; i++) {
+        Filter filter = (Filter)HbaseObjectWritable.readObject(in, conf);
+        filters.add(filter);
+      }
+    }
+  }
+
+  public void write(final DataOutput out) throws IOException {
+    Configuration conf = new HBaseConfiguration();
+    out.writeByte(operator.ordinal());
+    out.writeInt(filters.size());
+    for (Filter filter : filters) {
+      HbaseObjectWritable.writeObject(out, filter, filter.getClass(), conf);
+    }
+  }
+}
\ No newline at end of file

Added: hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/filter/InclusiveStopFilter.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/filter/InclusiveStopFilter.java?rev=789592&view=auto
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/filter/InclusiveStopFilter.java (added)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/filter/InclusiveStopFilter.java Tue Jun 30 07:12:58 2009
@@ -0,0 +1,82 @@
+/*
+ * Copyright 2009 The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.filter;
+
+import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.util.Bytes;
+
+import java.io.DataOutput;
+import java.io.IOException;
+import java.io.DataInput;
+
+/**
+ * A Filter that stops after the given row.  There is no "RowStopFilter" because
+ * the Scan spec allows you to specify a stop row.
+ *
+ * Use this filter to include the stop row, eg: [A,Z].
+ */
+public class InclusiveStopFilter implements Filter {
+  private byte [] stopRowKey;
+
+  public InclusiveStopFilter() {
+    super();
+  }
+
+  public InclusiveStopFilter(final byte [] stopRowKey) {
+    this.stopRowKey = stopRowKey;
+  }
+
+  public void reset() {
+    // noop, no state
+  }
+
+  public boolean filterRowKey(byte[] buffer, int offset, int length) {
+    if (buffer == null) {
+      if (this.stopRowKey == null) {
+        return true; //filter...
+      }
+      return false;
+    }
+    // if stopRowKey is <= buffer, then true, filter row.
+    return Bytes.compareTo(stopRowKey, 0, stopRowKey.length, buffer, offset, length) < 0;
+  }
+
+  public boolean filterAllRemaining() {
+    return false;
+  }
+
+  public ReturnCode filterKeyValue(KeyValue v) {
+    // include everything.
+    return ReturnCode.INCLUDE;
+  }
+
+  public boolean filterRow() {
+    return false;
+  }
+
+  public void write(DataOutput out) throws IOException {
+    Bytes.writeByteArray(out, this.stopRowKey);
+  }
+
+  public void readFields(DataInput in) throws IOException {
+    this.stopRowKey = Bytes.readByteArray(in);
+  }
+}
\ No newline at end of file

Modified: hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/filter/PageFilter.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/filter/PageFilter.java?rev=789592&r1=789591&r2=789592&view=diff
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/filter/PageFilter.java (original)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/filter/PageFilter.java Tue Jun 30 07:12:58 2009
@@ -79,13 +79,11 @@
     out.writeLong(pageSize);
   }
 
-  @Override
   public ReturnCode filterKeyValue(KeyValue v) {
     this.rowsAccepted++;
     return filterAllRemaining()? ReturnCode.NEXT_ROW: ReturnCode.INCLUDE;
   }
 
-  @Override
   public boolean filterRow() {
     return filterAllRemaining();
   }

Added: hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/filter/PrefixFilter.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/filter/PrefixFilter.java?rev=789592&view=auto
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/filter/PrefixFilter.java (added)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/filter/PrefixFilter.java Tue Jun 30 07:12:58 2009
@@ -0,0 +1,78 @@
+/*
+ * Copyright 2009 The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.filter;
+
+import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.util.Bytes;
+
+import java.io.DataOutput;
+import java.io.IOException;
+import java.io.DataInput;
+
+/**
+ * Pass results that have same row prefix.
+ */
+public class PrefixFilter implements Filter {
+  protected byte [] prefix = null;
+
+  public PrefixFilter(final byte [] prefix) {
+    this.prefix = prefix;
+  }
+
+  public PrefixFilter() {
+    super();
+  }
+
+  public void reset() {
+    // Noop
+  }
+
+  public boolean filterRowKey(byte[] buffer, int offset, int length) {
+    if (buffer == null || this.prefix == null)
+      return true;
+    if (length < prefix.length)
+      return true;
+    // if they are equal, return false => pass row
+    // else return true, filter row
+    return Bytes.compareTo(buffer, offset, this.prefix.length, this.prefix, 0,
+      this.prefix.length) != 0;
+  }
+
+  public boolean filterAllRemaining() {
+    return false;
+  }
+
+  public ReturnCode filterKeyValue(KeyValue v) {
+    return ReturnCode.INCLUDE;
+  }
+
+  public boolean filterRow() {
+    return false;
+  }
+
+  public void write(DataOutput out) throws IOException {
+    Bytes.writeByteArray(out, this.prefix);
+  }
+
+  public void readFields(DataInput in) throws IOException {
+    this.prefix = Bytes.readByteArray(in);
+  }
+}
\ No newline at end of file

Modified: hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/filter/RegexStringComparator.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/filter/RegexStringComparator.java?rev=789592&r1=789591&r2=789592&view=diff
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/filter/RegexStringComparator.java (original)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/filter/RegexStringComparator.java Tue Jun 30 07:12:58 2009
@@ -50,11 +50,11 @@
  * </pre>
  */
 public class RegexStringComparator implements WritableByteArrayComparable {
-
   private Pattern pattern;
 
   /** Nullary constructor for Writable */
   public RegexStringComparator() {
+    super();
   }
 
   /**
@@ -78,5 +78,4 @@
   public void write(DataOutput out) throws IOException {
     out.writeUTF(pattern.toString());
   }
-
 }

Modified: hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/filter/RowPrefixFilter.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/filter/RowPrefixFilter.java?rev=789592&r1=789591&r2=789592&view=diff
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/filter/RowPrefixFilter.java (original)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/filter/RowPrefixFilter.java Tue Jun 30 07:12:58 2009
@@ -41,12 +41,10 @@
     super();
   }
 
-  @Override
   public void reset() {
     // Noop
   }
 
-  @Override
   public boolean filterRowKey(byte[] buffer, int offset, int length) {
     if (buffer == null || this.prefix == null)
       return true;
@@ -58,27 +56,22 @@
       this.prefix.length) != 0;
   }
 
-  @Override
   public boolean filterAllRemaining() {
     return false;
   }
 
-  @Override
   public ReturnCode filterKeyValue(KeyValue v) {
     return ReturnCode.INCLUDE;
   }
 
-  @Override
   public boolean filterRow() {
     return false;
   }
 
-  @Override
   public void write(DataOutput out) throws IOException {
     Bytes.writeByteArray(out, this.prefix);
   }
 
-  @Override
   public void readFields(DataInput in) throws IOException {
     this.prefix = Bytes.readByteArray(in);
   }

Added: hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/filter/ValueFilter.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/filter/ValueFilter.java?rev=789592&view=auto
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/filter/ValueFilter.java (added)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/filter/ValueFilter.java Tue Jun 30 07:12:58 2009
@@ -0,0 +1,226 @@
+/**
+ * Copyright 2008 The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.filter;
+
+import java.io.DataInput;
+import java.io.DataOutput;
+import java.io.IOException;
+import java.util.Arrays;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.io.ObjectWritable;
+
+/**
+ * This filter is used to filter based on the value of a given column. It takes
+ * an operator (equal, greater, not equal, etc) and either a byte [] value or a
+ * byte [] comparator. If we have a byte [] value then we just do a
+ * lexicographic compare. If this is not sufficient (eg you want to deserialize
+ * a long and then compare it to a fixed long value), then you can pass in your
+ * own comparator instead.
+ * */
+public class ValueFilter implements Filter {
+  static final Log LOG = LogFactory.getLog(ValueFilter.class);
+
+    /** Comparison operators. */
+  public enum CompareOp {
+    /** less than */
+    LESS,
+    /** less than or equal to */
+    LESS_OR_EQUAL,
+    /** equals */
+    EQUAL,
+    /** not equal */
+    NOT_EQUAL,
+    /** greater than or equal to */
+    GREATER_OR_EQUAL,
+    /** greater than */
+    GREATER;
+  }
+
+  private byte[] columnName;
+  private CompareOp compareOp;
+  private byte[] value;
+  private WritableByteArrayComparable comparator;
+  private boolean filterIfColumnMissing;
+
+  ValueFilter() {
+    // for Writable
+  }
+
+  /**
+   * Constructor.
+   * 
+   * @param columnName name of column
+   * @param compareOp operator
+   * @param value value to compare column values against
+   */
+  public ValueFilter(final byte[] columnName, final CompareOp compareOp,
+      final byte[] value) {
+    this(columnName, compareOp, value, true);
+  }
+
+  /**
+   * Constructor.
+   * 
+   * @param columnName name of column
+   * @param compareOp operator
+   * @param value value to compare column values against
+   * @param filterIfColumnMissing if true then we will filter rows that don't
+   * have the column.
+   */
+  public ValueFilter(final byte[] columnName, final CompareOp compareOp,
+      final byte[] value, boolean filterIfColumnMissing) {
+    this.columnName = columnName;
+    this.compareOp = compareOp;
+    this.value = value;
+    this.filterIfColumnMissing = filterIfColumnMissing;
+  }
+
+  /**
+   * Constructor.
+   * 
+   * @param columnName name of column
+   * @param compareOp operator
+   * @param comparator Comparator to use.
+   */
+  public ValueFilter(final byte[] columnName, final CompareOp compareOp,
+      final WritableByteArrayComparable comparator) {
+    this(columnName, compareOp, comparator, true);
+  }
+
+  /**
+   * Constructor.
+   * 
+   * @param columnName name of column
+   * @param compareOp operator
+   * @param comparator Comparator to use.
+   * @param filterIfColumnMissing if true then we will filter rows that don't
+   * have the column.
+   */
+  public ValueFilter(final byte[] columnName, final CompareOp compareOp,
+      final WritableByteArrayComparable comparator,
+      boolean filterIfColumnMissing) {
+    this.columnName = columnName;
+    this.compareOp = compareOp;
+    this.comparator = comparator;
+    this.filterIfColumnMissing = filterIfColumnMissing;
+  }
+
+  public boolean filterRowKey(byte[] rowKey, int offset, int length) {
+    return false;
+  }
+
+  private boolean filterThisRow = false;
+  private boolean foundColValue = false;
+
+  public ReturnCode filterKeyValue(KeyValue keyValue) {
+    if (Bytes.compareTo(keyValue.getColumn(), this.columnName) != 0) {
+      return ReturnCode.INCLUDE;
+    }
+    LOG.info("Found column [" + Bytes.toString(columnName) + "] in row ["
+        + Bytes.toString(keyValue.getRow()) + "]");
+    foundColValue = true;
+
+    boolean filtered = filterColumnValue(keyValue.getBuffer(), keyValue
+        .getValueOffset(), keyValue.getValueLength());
+    if (filtered) {
+      LOG.info("filtered it");
+      filterThisRow = true;
+      return ReturnCode.NEXT_ROW;
+    }
+    return ReturnCode.INCLUDE;
+  }
+
+  private boolean filterColumnValue(final byte[] data, final int offset,
+      final int length) {
+    int compareResult;
+    if (comparator != null) {
+      compareResult = comparator.compareTo(Arrays.copyOfRange(data, offset,
+          offset + length));
+    } else {
+      compareResult = Bytes.compareTo(value, 0, value.length, data, offset,
+          length);
+    }
+
+    switch (compareOp) {
+    case LESS:
+      return compareResult <= 0;
+    case LESS_OR_EQUAL:
+      return compareResult < 0;
+    case EQUAL:
+      return compareResult != 0;
+    case NOT_EQUAL:
+      return compareResult == 0;
+    case GREATER_OR_EQUAL:
+      return compareResult > 0;
+    case GREATER:
+      return compareResult >= 0;
+    default:
+      throw new RuntimeException("Unknown Compare op " + compareOp.name());
+    }
+  }
+
+  public boolean filterAllRemaining() {
+    return false;
+  }
+
+  public boolean filterRow() {
+    boolean result = filterThisRow || (filterIfColumnMissing && !foundColValue);
+    filterThisRow = false;
+    foundColValue = false;
+    LOG.info("Deciding " + (result ? "" : " not ") + "to filter");
+    return result;
+  }
+
+  public void reset() {
+    // Nothing.
+  }
+
+  public void readFields(final DataInput in) throws IOException {
+    int valueLen = in.readInt();
+    if (valueLen > 0) {
+      value = new byte[valueLen];
+      in.readFully(value);
+    }
+    columnName = Bytes.readByteArray(in);
+    compareOp = CompareOp.valueOf(in.readUTF());
+    comparator = (WritableByteArrayComparable) ObjectWritable.readObject(in,
+        new HBaseConfiguration());
+    filterIfColumnMissing = in.readBoolean();
+  }
+
+  public void write(final DataOutput out) throws IOException {
+    if (value == null) {
+      out.writeInt(0);
+    } else {
+      out.writeInt(value.length);
+      out.write(value);
+    }
+    Bytes.writeByteArray(out, columnName);
+    out.writeUTF(compareOp.name());
+    ObjectWritable.writeObject(out, comparator,
+        WritableByteArrayComparable.class, new HBaseConfiguration());
+    out.writeBoolean(filterIfColumnMissing);
+  }
+}
\ No newline at end of file

Added: hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/filter/WhileMatchFilter.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/filter/WhileMatchFilter.java?rev=789592&view=auto
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/filter/WhileMatchFilter.java (added)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/filter/WhileMatchFilter.java Tue Jun 30 07:12:58 2009
@@ -0,0 +1,89 @@
+/*
+ * Copyright 2009 The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.filter;
+
+import org.apache.hadoop.hbase.KeyValue;
+
+import java.io.DataOutput;
+import java.io.IOException;
+import java.io.DataInput;
+
+/**
+ * A wrapper filter that filters everything after the first filtered row.
+ */
+public class WhileMatchFilter implements Filter {
+  private boolean filterAllRemaining = false;
+  private Filter filter;
+
+  public WhileMatchFilter() {
+    super();
+  }
+
+  public WhileMatchFilter(Filter filter) {
+    this.filter = filter;
+  }
+
+  public void reset() {
+    // no state.
+  }
+
+  private void changeFAR(boolean value) {
+    filterAllRemaining = filterAllRemaining || value;
+  }
+
+  public boolean filterRowKey(byte[] buffer, int offset, int length) {
+    changeFAR(filter.filterRowKey(buffer, offset, length));
+    return filterAllRemaining();
+  }
+
+  public boolean filterAllRemaining() {
+    return this.filterAllRemaining || this.filter.filterAllRemaining();
+  }
+
+  public ReturnCode filterKeyValue(KeyValue v) {
+    ReturnCode c = filter.filterKeyValue(v);
+    changeFAR(c != ReturnCode.INCLUDE);
+    return c;
+  }
+
+  public boolean filterRow() {
+    return false;
+  }
+
+  public void write(DataOutput out) throws IOException {
+    out.writeUTF(this.filter.getClass().getName());
+    this.filter.write(out);
+  }
+
+  public void readFields(DataInput in) throws IOException {
+    String className = in.readUTF();
+    try {
+      this.filter = (Filter)(Class.forName(className).newInstance());
+      this.filter.readFields(in);
+    } catch (InstantiationException e) {
+      throw new RuntimeException("Failed deserialize.", e);
+    } catch (IllegalAccessException e) {
+      throw new RuntimeException("Failed deserialize.", e);
+    } catch (ClassNotFoundException e) {
+      throw new RuntimeException("Failed deserialize.", e);
+    }
+  }
+}

Modified: hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/filter/package-info.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/filter/package-info.java?rev=789592&r1=789591&r2=789592&view=diff
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/filter/package-info.java (original)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/filter/package-info.java Tue Jun 30 07:12:58 2009
@@ -23,7 +23,7 @@
 <p>Since HBase 0.20.0, {@link org.apache.hadoop.hbase.filter.Filter} is the new Interface used filtering.
 It replaces the deprecated {@link org.apache.hadoop.hbase.filter.RowFilterInterface}.
 Filters run the extent of a table unless you wrap your filter in a
-{@link org.apache.hadoop.hbase.filter.RowWhileMatchFilter}.
+{@link org.apache.hadoop.hbase.filter.WhileMatchFilter}.
 The latter returns as soon as the filter stops matching.
 </p>
 */

Modified: hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/io/HbaseObjectWritable.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/io/HbaseObjectWritable.java?rev=789592&r1=789591&r2=789592&view=diff
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/io/HbaseObjectWritable.java (original)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/io/HbaseObjectWritable.java Tue Jun 30 07:12:58 2009
@@ -44,21 +44,14 @@
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.client.Scan;
-import org.apache.hadoop.hbase.filter.RowFilterInterface;
-import org.apache.hadoop.hbase.filter.RowFilterSet;
-import org.apache.hadoop.hbase.filter.RowWhileMatchFilter;
-import org.apache.hadoop.hbase.filter.RowPrefixFilter;
-import org.apache.hadoop.hbase.filter.PageFilter;
-import org.apache.hadoop.hbase.filter.RowInclusiveStopFilter;
-import org.apache.hadoop.hbase.filter.ColumnCountGetFilter;
+import org.apache.hadoop.hbase.filter.PrefixFilter;
+import org.apache.hadoop.hbase.filter.*;
 import org.apache.hadoop.hbase.io.HbaseMapWritable;
 import org.apache.hadoop.io.MapWritable;
 import org.apache.hadoop.io.ObjectWritable;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.WritableFactories;
-import org.apache.hadoop.hbase.io.Cell;
-import org.apache.hadoop.hbase.io.RowResult;
 import org.apache.hadoop.hbase.regionserver.HRegion;
 import org.apache.hadoop.hbase.util.Bytes;
 
@@ -145,10 +138,10 @@
     addToMap(Result[].class, code++);
     addToMap(Scan.class, code++);
 
-    addToMap(RowWhileMatchFilter.class, code++);
-    addToMap(RowPrefixFilter.class, code++);
+    addToMap(WhileMatchFilter.class, code++);
+    addToMap(PrefixFilter.class, code++);
     addToMap(PageFilter.class, code++);
-    addToMap(RowInclusiveStopFilter.class, code++);
+    addToMap(InclusiveStopFilter.class, code++);
     addToMap(ColumnCountGetFilter.class, code++);
   }
   

Modified: hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/io/hfile/CachedBlock.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/io/hfile/CachedBlock.java?rev=789592&r1=789591&r2=789592&view=diff
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/io/hfile/CachedBlock.java (original)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/io/hfile/CachedBlock.java Tue Jun 30 07:12:58 2009
@@ -33,13 +33,12 @@
  * either instantiating as in-memory or handling the transition from single
  * to multiple access.
  */
-class CachedBlock implements HeapSize, Comparable<CachedBlock> {
+public class CachedBlock implements HeapSize, Comparable<CachedBlock> {
   
   public final static long PER_BLOCK_OVERHEAD = ClassSize.align(
-    ClassSize.OBJECT + (2 * ClassSize.REFERENCE) + (2 * Bytes.SIZEOF_LONG) +
+    ClassSize.OBJECT + (3 * ClassSize.REFERENCE) + (2 * Bytes.SIZEOF_LONG) +
     ClassSize.STRING + ClassSize.BYTE_BUFFER);
   
-  
   static enum BlockPriority { 
     /**
      * Accessed a single time (used for scan-resistance)

Modified: hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/io/hfile/LruBlockCache.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/io/hfile/LruBlockCache.java?rev=789592&r1=789591&r2=789592&view=diff
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/io/hfile/LruBlockCache.java (original)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/io/hfile/LruBlockCache.java Tue Jun 30 07:12:58 2009
@@ -212,8 +212,8 @@
     this.stats = new CacheStats();
     this.count = new AtomicLong(0);
     this.elements = new AtomicLong(0);
-    this.overhead = getOverhead(maxSize, blockSize, mapConcurrencyLevel);
-    this.size = new AtomicLong(0);
+    this.overhead = calculateOverhead(maxSize, blockSize, mapConcurrencyLevel);
+    this.size = new AtomicLong(this.overhead);
     if(evictionThread) {
       this.evictionThread = new EvictionThread(this);
       this.evictionThread.start();
@@ -626,31 +626,20 @@
   }
   
   public final static long CACHE_FIXED_OVERHEAD = ClassSize.align(
-      (7 * Bytes.SIZEOF_LONG) + (5 * ClassSize.OBJECT) + Bytes.SIZEOF_BOOLEAN);
-  
-  public final static long CACHE_FUDGE_FACTOR = 1024 * 10; // 10k 
-  
-  public final static long MAP_SEGMENT_OVERHEAD = ClassSize.align(
-      ClassSize.REFERENCE + ClassSize.OBJECT + (3 * Bytes.SIZEOF_INT) +
-      Bytes.SIZEOF_FLOAT + ClassSize.ARRAY);
-  
-  public final static long MAP_ENTRY_OVERHEAD = ClassSize.align(
-      ClassSize.REFERENCE + ClassSize.OBJECT + (3 * ClassSize.REFERENCE) +
-      (2 * Bytes.SIZEOF_INT));
+      (3 * Bytes.SIZEOF_LONG) + (8 * ClassSize.REFERENCE) + 
+      (5 * Bytes.SIZEOF_FLOAT) + Bytes.SIZEOF_BOOLEAN
+      + ClassSize.OBJECT);
   
   // HeapSize implementation
   public long heapSize() {
-    return getCurrentSize() + overhead;
-  }
-  
-  public long cacheSize() {
     return getCurrentSize();
   }
   
-  public static long getOverhead(long maxSize, long blockSize, int concurrency){
-    return CACHE_FIXED_OVERHEAD + CACHE_FUDGE_FACTOR +
-    ((int)Math.ceil(maxSize*1.2/blockSize) * MAP_ENTRY_OVERHEAD) +
-    (concurrency * MAP_SEGMENT_OVERHEAD);
+  public static long calculateOverhead(long maxSize, long blockSize, int concurrency){
+    return CACHE_FIXED_OVERHEAD + ClassSize.CONCURRENT_HASHMAP +
+        ((int)Math.ceil(maxSize*1.2/blockSize) 
+            * ClassSize.CONCURRENT_HASHMAP_ENTRY) +
+        (concurrency * ClassSize.CONCURRENT_HASHMAP_SEGMENT);
   }
   
   // Simple calculators of sizes given factors and maxSize

Modified: hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/ipc/HBaseRPC.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/ipc/HBaseRPC.java?rev=789592&r1=789591&r2=789592&view=diff
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/ipc/HBaseRPC.java (original)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/ipc/HBaseRPC.java Tue Jun 30 07:12:58 2009
@@ -101,7 +101,6 @@
       code = addToMap(VersionedProtocol.class, code);
       code = addToMap(HMasterInterface.class, code);
       code = addToMap(HMasterRegionInterface.class, code);
-      code = addToMap(TransactionalRegionInterface.class, code);
     }
     // End of hbase modifications.
 

Modified: hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/ipc/HBaseRPCProtocolVersion.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/ipc/HBaseRPCProtocolVersion.java?rev=789592&r1=789591&r2=789592&view=diff
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/ipc/HBaseRPCProtocolVersion.java (original)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/ipc/HBaseRPCProtocolVersion.java Tue Jun 30 07:12:58 2009
@@ -71,7 +71,8 @@
    * <li>Version 17: Added incrementColumnValue.</li>
    * <li>Version 18: HBASE-1302.</li>
    * <li>Version 19: Added getClusterStatus().</li>
+   * <li>Version 20: Backed Transaction HBase out of HBase core.</li>
    * </ul>
    */
-  public static final long versionID = 19L;
+  public static final long versionID = 20L;
 }

Modified: hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/regionserver/MemStore.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/regionserver/MemStore.java?rev=789592&r1=789591&r2=789592&view=diff
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/regionserver/MemStore.java (original)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/regionserver/MemStore.java Tue Jun 30 07:12:58 2009
@@ -27,13 +27,9 @@
 import java.util.ArrayList;
 import java.util.Iterator;
 import java.util.List;
-import java.util.Map;
-import java.util.NavigableMap;
 import java.util.NavigableSet;
-import java.util.SortedMap;
 import java.util.SortedSet;
 import java.util.TreeSet;
-import java.util.concurrent.ConcurrentSkipListMap;
 import java.util.concurrent.locks.ReentrantReadWriteLock;
 
 import org.apache.commons.logging.Log;

Modified: hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/regionserver/QueryMatcher.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/regionserver/QueryMatcher.java?rev=789592&r1=789591&r2=789592&view=diff
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/regionserver/QueryMatcher.java (original)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/regionserver/QueryMatcher.java Tue Jun 30 07:12:58 2009
@@ -258,7 +258,7 @@
     }
 
     /* Check Deletes
-     * If deleted, move to next KeyValue 
+     * If deleted, move to next KeyValue
      */
     if (!deletes.isEmpty() && deletes.isDeleted(bytes, columnOffset,
         columnLength, timestamp)) {

Modified: hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/util/ClassSize.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/util/ClassSize.java?rev=789592&r1=789591&r2=789592&view=diff
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/util/ClassSize.java (original)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/util/ClassSize.java Tue Jun 30 07:12:58 2009
@@ -64,6 +64,15 @@
 
   /** Overhead for TreeMap */
   public static int TREEMAP = 0;
+  
+  /** Overhead for ConcurrentHashMap */
+  public static int CONCURRENT_HASHMAP = 0;
+  
+  /** Overhead for ConcurrentHashMap.Entry */
+  public static int CONCURRENT_HASHMAP_ENTRY = 0;
+  
+  /** Overhead for ConcurrentHashMap.Segment */
+  public static int CONCURRENT_HASHMAP_SEGMENT = 0;
 
   private static final String THIRTY_TWO = "32";
 
@@ -81,25 +90,34 @@
     if (arcModel.equals(THIRTY_TWO)) {
       REFERENCE = 4;
     }
+
+    OBJECT = 2 * REFERENCE;
     
     ARRAY = 3 * REFERENCE;
 
-    ARRAYLIST = align(OBJECT + REFERENCE + Bytes.SIZEOF_INT + 
-        align(Bytes.SIZEOF_INT));
+    ARRAYLIST = align(OBJECT + align(REFERENCE) + align(ARRAY) +
+        (2 * Bytes.SIZEOF_INT));
     
-    BYTE_BUFFER = align(OBJECT + REFERENCE + Bytes.SIZEOF_INT + 
-        3 * Bytes.SIZEOF_BOOLEAN + 4 * Bytes.SIZEOF_INT + Bytes.SIZEOF_LONG); 
+    BYTE_BUFFER = align(OBJECT + align(REFERENCE) + align(ARRAY) + 
+        (5 * Bytes.SIZEOF_INT) + 
+        (3 * Bytes.SIZEOF_BOOLEAN) + Bytes.SIZEOF_LONG); 
     
     INTEGER = align(OBJECT + Bytes.SIZEOF_INT);
     
     MAP_ENTRY = align(OBJECT + 5 * REFERENCE + Bytes.SIZEOF_BOOLEAN);
     
-    OBJECT = 2 * REFERENCE;
+    TREEMAP = align(OBJECT + (2 * Bytes.SIZEOF_INT) + align(7 * REFERENCE));
+    
+    STRING = align(OBJECT + ARRAY + REFERENCE + 3 * Bytes.SIZEOF_INT);
     
-    TREEMAP = align(OBJECT + 2 * Bytes.SIZEOF_INT + (5+2) * REFERENCE + 
-        ClassSize.align(OBJECT + Bytes.SIZEOF_INT));
+    CONCURRENT_HASHMAP = align((2 * Bytes.SIZEOF_INT) + ARRAY + 
+        (6 * REFERENCE) + OBJECT);
     
-    STRING = align(OBJECT + REFERENCE + 3 * Bytes.SIZEOF_INT);
+    CONCURRENT_HASHMAP_ENTRY = align(REFERENCE + OBJECT + (3 * REFERENCE) +
+        (2 * Bytes.SIZEOF_INT));
+      
+    CONCURRENT_HASHMAP_SEGMENT = align(REFERENCE + OBJECT + 
+        (3 * Bytes.SIZEOF_INT) + Bytes.SIZEOF_FLOAT + ARRAY);
   }
   
   /**
@@ -186,8 +204,8 @@
       if (LOG.isDebugEnabled()) {
         // Write out region name as string and its encoded name.
         LOG.debug("Primitives " + coeff[0] + ", arrays " + coeff[1] +
-            ", references(inlcuding " + nrOfRefsPerObj + 
-            ", for object overhead) " + coeff[2] + ", refSize " + REFERENCE + 
+            ", references(includes " + nrOfRefsPerObj + 
+            " for object overhead) " + coeff[2] + ", refSize " + REFERENCE + 
             ", size " + size);
       }
     }

Modified: hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/HBaseClusterTestCase.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/HBaseClusterTestCase.java?rev=789592&r1=789591&r2=789592&view=diff
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/HBaseClusterTestCase.java (original)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/HBaseClusterTestCase.java Tue Jun 30 07:12:58 2009
@@ -25,12 +25,12 @@
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.dfs.MiniDFSCluster;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.client.HConnectionManager;
 import org.apache.hadoop.hbase.client.HTable;
 import org.apache.hadoop.hbase.util.FSUtils;
+import org.apache.hadoop.hdfs.MiniDFSCluster;
 import org.apache.hadoop.util.ReflectionUtils;
 
 /**

Modified: hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/HBaseTestCase.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/HBaseTestCase.java?rev=789592&r1=789591&r2=789592&view=diff
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/HBaseTestCase.java (original)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/HBaseTestCase.java Tue Jun 30 07:12:58 2009
@@ -33,7 +33,6 @@
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.dfs.MiniDFSCluster;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.client.Delete;
@@ -49,6 +48,7 @@
 import org.apache.hadoop.hbase.regionserver.HRegion;
 import org.apache.hadoop.hbase.regionserver.InternalScanner;
 import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.hdfs.MiniDFSCluster;
 
 /**
  * Abstract base class for test cases. Performs all static initialization

Modified: hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/PerformanceEvaluation.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/PerformanceEvaluation.java?rev=789592&r1=789591&r2=789592&view=diff
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/PerformanceEvaluation.java (original)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/PerformanceEvaluation.java Tue Jun 30 07:12:58 2009
@@ -436,7 +436,7 @@
     void testRow(final int i) throws IOException {
       Scan scan = new Scan(getRandomRow(this.rand, this.totalRows));
       scan.addColumn(FAMILY_NAME, QUALIFIER_NAME);
-      scan.setFilter(new RowWhileMatchFilter(new PageFilter(120)));
+      scan.setFilter(new WhileMatchFilter(new PageFilter(120)));
       ResultScanner s = this.table.getScanner(scan);
       //int count = 0;
       for (Result rr = null; (rr = s.next()) != null;) {

Added: hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/filter/TestFilterSet.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/filter/TestFilterSet.java?rev=789592&view=auto
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/filter/TestFilterSet.java (added)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/filter/TestFilterSet.java Tue Jun 30 07:12:58 2009
@@ -0,0 +1,111 @@
+/**
+ * Copyright 2007 The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.filter;
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.DataInputStream;
+import java.io.DataOutputStream;
+import java.util.HashSet;
+import java.util.Set;
+
+import org.apache.hadoop.hbase.util.Bytes;
+
+
+import junit.framework.TestCase;
+
+/**
+ * Tests filter sets
+ *
+ */
+public class TestFilterSet extends TestCase {
+  static final int MAX_PAGES = 5;
+  static final char FIRST_CHAR = 'a';
+  static final char LAST_CHAR = 'e';
+  static byte[] GOOD_BYTES = Bytes.toBytes("abc");
+  static byte[] BAD_BYTES = Bytes.toBytes("def");
+
+  /**
+   * Test "must pass one"
+   * @throws Exception
+   */
+  public void testMPONE() throws Exception {
+    Set<Filter> filters = new HashSet<Filter>();
+    filters.add(new PageFilter(MAX_PAGES));
+    filters.add(new WhileMatchFilter(new PrefixFilter(Bytes.toBytes("yyy"))));
+    Filter filterMPONE =
+        new FilterSet(FilterSet.Operator.MUST_PASS_ONE, filters);
+    /* Filter must do all below steps:
+     * <ul>
+     * <li>{@link #reset()}</li>
+     * <li>{@link #filterAllRemaining()} -> true indicates scan is over, false, keep going on.</li>
+     * <li>{@link #filterRowKey(byte[],int,int)} -> true to drop this row,
+     * if false, we will also call</li>
+     * <li>{@link #filterKeyValue(org.apache.hadoop.hbase.KeyValue)} -> true to drop this key/value</li>
+     * <li>{@link #filterRow()} -> last chance to drop entire row based on the sequence of
+     * filterValue() calls. Eg: filter a row if it doesn't contain a specified column.
+     * </li>
+     * </ul>
+    */
+    filterMPONE.reset();
+    assertFalse(filterMPONE.filterAllRemaining());
+    byte [] rowkey = Bytes.toBytes("yyyyyyyyy");
+    assertFalse(filterMPONE.filterRowKey(rowkey, 0, rowkey.length));
+    
+  }
+
+  /**
+   * Test "must pass all"
+   * @throws Exception
+   */
+  public void testMPALL() throws Exception {
+    Set<Filter> filters = new HashSet<Filter>();
+    filters.add(new PageFilter(MAX_PAGES));
+    filters.add(new WhileMatchFilter(new PrefixFilter(Bytes.toBytes("yyy"))));
+    Filter filterMPALL =
+      new FilterSet(FilterSet.Operator.MUST_PASS_ALL, filters);
+  }
+
+  /**
+   * Test serialization
+   * @throws Exception
+   */
+  public void testSerialization() throws Exception {
+    Set<Filter> filters = new HashSet<Filter>();
+    filters.add(new PageFilter(MAX_PAGES));
+    filters.add(new WhileMatchFilter(new PrefixFilter(Bytes.toBytes("yyy"))));
+    Filter filterMPALL =
+      new FilterSet(FilterSet.Operator.MUST_PASS_ALL, filters);
+
+    // Decompose filterMPALL to bytes.
+    ByteArrayOutputStream stream = new ByteArrayOutputStream();
+    DataOutputStream out = new DataOutputStream(stream);
+    filterMPALL.write(out);
+    out.close();
+    byte[] buffer = stream.toByteArray();
+
+    // Recompose filterMPALL.
+    DataInputStream in = new DataInputStream(new ByteArrayInputStream(buffer));
+    FilterSet newFilter = new FilterSet();
+    newFilter.readFields(in);
+
+    // TODO: Run TESTS!!!
+  }
+}
\ No newline at end of file

Added: hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/filter/TestPrefixFilter.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/filter/TestPrefixFilter.java?rev=789592&view=auto
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/filter/TestPrefixFilter.java (added)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/filter/TestPrefixFilter.java Tue Jun 30 07:12:58 2009
@@ -0,0 +1,100 @@
+/*
+ * Copyright 2009 The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.filter;
+
+import junit.framework.TestCase;
+import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.util.Bytes;
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.DataInputStream;
+import java.io.DataOutputStream;
+import java.io.UnsupportedEncodingException;
+
+public class TestPrefixFilter extends TestCase {
+  Filter mainFilter;
+  static final char FIRST_CHAR = 'a';
+  static final char LAST_CHAR = 'e';
+  static final String HOST_PREFIX = "org.apache.site-";
+  static byte [] GOOD_BYTES = null;
+
+  static {
+    try {
+      GOOD_BYTES = "abc".getBytes(HConstants.UTF8_ENCODING);
+    } catch (UnsupportedEncodingException e) {
+      fail();
+    }
+  }
+
+  protected void setUp() throws Exception {
+    super.setUp();
+    this.mainFilter = new PrefixFilter(Bytes.toBytes(HOST_PREFIX));
+  }
+
+  public void testPrefixOnRow() throws Exception {
+    prefixRowTests(mainFilter);
+  }
+
+  public void testPrefixOnRowInsideWhileMatchRow() throws Exception {
+    prefixRowTests(new WhileMatchFilter(this.mainFilter), true);
+  }
+
+  public void testSerialization() throws Exception {
+    // Decompose mainFilter to bytes.
+    ByteArrayOutputStream stream = new ByteArrayOutputStream();
+    DataOutputStream out = new DataOutputStream(stream);
+    mainFilter.write(out);
+    out.close();
+    byte[] buffer = stream.toByteArray();
+
+    // Recompose filter.
+    DataInputStream in = new DataInputStream(new ByteArrayInputStream(buffer));
+    Filter newFilter = new PrefixFilter();
+    newFilter.readFields(in);
+
+    // Ensure the serialization preserved the filter by running all test.
+    prefixRowTests(newFilter);
+  }
+
+  private void prefixRowTests(Filter filter) throws Exception {
+    prefixRowTests(filter, false);
+  }
+
+  private void prefixRowTests(Filter filter, boolean lastFilterAllRemaining)
+  throws Exception {
+    for (char c = FIRST_CHAR; c <= LAST_CHAR; c++) {
+      byte [] t = createRow(c);
+      assertFalse("Failed with character " + c,
+        filter.filterRowKey(t, 0, t.length));
+      assertFalse(filter.filterAllRemaining());
+    }
+    String yahooSite = "com.yahoo.www";
+    byte [] yahooSiteBytes = Bytes.toBytes(yahooSite);
+    assertTrue("Failed with character " +
+      yahooSite, filter.filterRowKey(yahooSiteBytes, 0, yahooSiteBytes.length));
+    assertEquals(filter.filterAllRemaining(), lastFilterAllRemaining);
+  }
+
+  private byte [] createRow(final char c) {
+    return Bytes.toBytes(HOST_PREFIX + Character.toString(c));
+  }
+}
\ No newline at end of file

Modified: hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/filter/TestRowPrefixFilter.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/filter/TestRowPrefixFilter.java?rev=789592&r1=789591&r2=789592&view=diff
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/filter/TestRowPrefixFilter.java (original)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/filter/TestRowPrefixFilter.java Tue Jun 30 07:12:58 2009
@@ -55,7 +55,7 @@
   }
 
   public void testPrefixOnRowInsideWhileMatchRow() throws Exception {
-    prefixRowTests(new RowWhileMatchFilter(this.mainFilter), true);
+    prefixRowTests(new WhileMatchFilter(this.mainFilter), true);
   }
 
   public void testSerialization() throws Exception {

Modified: hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/io/TestHbaseObjectWritable.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/io/TestHbaseObjectWritable.java?rev=789592&r1=789591&r2=789592&view=diff
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/io/TestHbaseObjectWritable.java (original)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/io/TestHbaseObjectWritable.java Tue Jun 30 07:12:58 2009
@@ -32,7 +32,7 @@
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.filter.RowFilterInterface;
 import org.apache.hadoop.hbase.filter.StopRowFilter;
-import org.apache.hadoop.hbase.filter.RowPrefixFilter;
+import org.apache.hadoop.hbase.filter.PrefixFilter;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.WritableComparator;
 
@@ -81,9 +81,9 @@
         RowFilterInterface.class);
     assertTrue(obj instanceof StopRowFilter);
     // Check that filters can be serialized
-    obj = doType(conf, new RowPrefixFilter(HConstants.EMPTY_BYTE_ARRAY),
-      RowPrefixFilter.class);
-    assertTrue(obj instanceof RowPrefixFilter);
+    obj = doType(conf, new PrefixFilter(HConstants.EMPTY_BYTE_ARRAY),
+      PrefixFilter.class);
+    assertTrue(obj instanceof PrefixFilter);
   }
   
   private Object doType(final HBaseConfiguration conf, final Object value,



Mime
View raw message