hbase-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From apurt...@apache.org
Subject svn commit: r813052 [3/4] - in /hadoop/hbase/branches/0.20_on_hadoop-0.18.3: ./ bin/ conf/ src/java/org/apache/hadoop/hbase/ src/java/org/apache/hadoop/hbase/client/ src/java/org/apache/hadoop/hbase/filter/ src/java/org/apache/hadoop/hbase/io/ src/java...
Date Wed, 09 Sep 2009 17:14:24 GMT
Modified: hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/thrift/ThriftServer.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/thrift/ThriftServer.java?rev=813052&r1=813051&r2=813052&view=diff
==============================================================================
--- hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/thrift/ThriftServer.java (original)
+++ hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/thrift/ThriftServer.java Wed Sep  9 17:14:22 2009
@@ -278,9 +278,11 @@
         get.setMaxVersions(numVersions);
         Result result = table.get(get);
         List<Cell> cells = new ArrayList<Cell>();
-        for(KeyValue kv : result.sorted()) {
-          cells.add(new Cell(kv.getValue(), kv.getTimestamp()));
-        }
+	if ( ! result.isEmpty() ) {
+	    for(KeyValue kv : result.sorted()) {
+		cells.add(new Cell(kv.getValue(), kv.getTimestamp()));
+	    }
+	}
         return ThriftUtilities.cellFromHBase(cells.toArray(new Cell[0]));
       } catch (IOException e) {
         throw new IOError(e.getMessage());
@@ -304,12 +306,14 @@
         get.setTimeRange(Long.MIN_VALUE, timestamp);
         get.setMaxVersions(numVersions);
         Result result = table.get(get);
-        List<Cell> cells = new ArrayList<Cell>();
-        KeyValue [] kvs = result.sorted();
-        if (kvs != null) {
-          for(KeyValue kv : kvs) {
-            cells.add(new Cell(kv.getValue(), kv.getTimestamp()));
-          }
+	List<Cell> cells = new ArrayList<Cell>();
+	if ( ! result.isEmpty() ) {
+	    KeyValue [] kvs = result.sorted();
+	    if (kvs != null) {
+		for(KeyValue kv : kvs) {
+		    cells.add(new Cell(kv.getValue(), kv.getTimestamp()));
+		}
+	    }
         }
         return ThriftUtilities.cellFromHBase(cells.toArray(new Cell[0]));
       } catch (IOException e) {

Modified: hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/util/Bytes.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/util/Bytes.java?rev=813052&r1=813051&r2=813052&view=diff
==============================================================================
--- hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/util/Bytes.java (original)
+++ hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/util/Bytes.java Wed Sep  9 17:14:22 2009
@@ -242,7 +242,7 @@
    * @return String made from <code>b</code>
    */
   public static String toString(final byte [] b) {
-    if(b == null) {
+    if (b == null) {
       return null;
     }
     return toString(b, 0, b.length);

Modified: hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/util/ClassSize.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/util/ClassSize.java?rev=813052&r1=813051&r2=813052&view=diff
==============================================================================
--- hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/util/ClassSize.java (original)
+++ hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/util/ClassSize.java Wed Sep  9 17:14:22 2009
@@ -92,6 +92,12 @@
   /** Overhead for AtomicBoolean */
   public static int ATOMIC_BOOLEAN = 0;
   
+  /** Overhead for CopyOnWriteArraySet */
+  public static int COPYONWRITE_ARRAYSET = 0;
+  
+  /** Overhead for CopyOnWriteArrayList */
+  public static int COPYONWRITE_ARRAYLIST = 0;
+  
   private static final String THIRTY_TWO = "32";
 
   /**
@@ -151,6 +157,9 @@
     
     ATOMIC_BOOLEAN = align(OBJECT + Bytes.SIZEOF_BOOLEAN);
     
+    COPYONWRITE_ARRAYSET = align(OBJECT + REFERENCE);
+    
+    COPYONWRITE_ARRAYLIST = align(OBJECT + (2 * REFERENCE) + ARRAY);
   }
   
   /**

Modified: hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/HBaseTestCase.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/HBaseTestCase.java?rev=813052&r1=813051&r2=813052&view=diff
==============================================================================
--- hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/HBaseTestCase.java (original)
+++ hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/HBaseTestCase.java Wed Sep  9 17:14:22 2009
@@ -62,8 +62,7 @@
   protected final static byte [] fam1 = Bytes.toBytes("colfamily1");
   protected final static byte [] fam2 = Bytes.toBytes("colfamily2");
   protected final static byte [] fam3 = Bytes.toBytes("colfamily3");
-  protected static final byte [][] COLUMNS = {fam1,
-    fam2, fam3};
+  protected static final byte [][] COLUMNS = {fam1, fam2, fam3};
 
   private boolean localfs = false;
   protected Path testDir = null;

Modified: hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/TestZooKeeper.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/TestZooKeeper.java?rev=813052&r1=813051&r2=813052&view=diff
==============================================================================
--- hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/TestZooKeeper.java (original)
+++ hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/TestZooKeeper.java Wed Sep  9 17:14:22 2009
@@ -21,6 +21,7 @@
 
 import java.io.IOException;
 
+import org.apache.hadoop.hbase.client.Get;
 import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.hadoop.hbase.client.HConnection;
 import org.apache.hadoop.hbase.client.HConnectionManager;
@@ -114,11 +115,9 @@
     connection.relocateRegion(HConstants.ROOT_TABLE_NAME, HConstants.EMPTY_BYTE_ARRAY);
   }
 
-  /**
-   *
-   */
   public void testRegionServerSessionExpired() {
     try {
+      this.conf.setBoolean("hbase.regionserver.restart.on.zk.expire", true);
       new HTable(conf, HConstants.META_TABLE_NAME);
   
       ZooKeeperWrapper zkw = new ZooKeeperWrapper(conf, EmptyWatcher.instance);
@@ -152,4 +151,28 @@
       fail();
     }
   }
+  
+  public void testMultipleZK() {
+    try {
+      HTable localMeta = new HTable(conf, HConstants.META_TABLE_NAME);
+      HBaseConfiguration otherConf = new HBaseConfiguration(conf);
+      otherConf.set(HConstants.ZOOKEEPER_QUORUM, "127.0.0.1");
+      HTable ipMeta = new HTable(conf, HConstants.META_TABLE_NAME);
+      
+      // dummy, just to open the connection
+      localMeta.exists(new Get(HConstants.LAST_ROW));
+      ipMeta.exists(new Get(HConstants.LAST_ROW));
+
+      // make sure they aren't the same
+      assertFalse(HConnectionManager.getClientZooKeeperWatcher(conf)
+          .getZooKeeperWrapper() == HConnectionManager.getClientZooKeeperWatcher(
+          otherConf).getZooKeeperWrapper());
+      assertFalse(HConnectionManager.getConnection(conf)
+          .getZooKeeperWrapper().getQuorumServers().equals(HConnectionManager
+          .getConnection(otherConf).getZooKeeperWrapper().getQuorumServers()));
+    } catch (Exception e) {
+      e.printStackTrace();
+      fail();
+    }
+  }
 }

Modified: hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/client/TestBatchUpdate.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/client/TestBatchUpdate.java?rev=813052&r1=813051&r2=813052&view=diff
==============================================================================
--- hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/client/TestBatchUpdate.java (original)
+++ hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/client/TestBatchUpdate.java Wed Sep  9 17:14:22 2009
@@ -104,7 +104,7 @@
     }
   }
   
-  public void testRowsBatchUpdateBufferedManyManyFlushes() {
+  public void testRowsBatchUpdateBufferedManyManyFlushes() throws IOException {
     table.setAutoFlush(false);
     table.setWriteBufferSize(10);
     ArrayList<BatchUpdate> rowsUpdate = new ArrayList<BatchUpdate>();

Modified: hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/client/TestPut.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/client/TestPut.java?rev=813052&r1=813051&r2=813052&view=diff
==============================================================================
--- hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/client/TestPut.java (original)
+++ hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/client/TestPut.java Wed Sep  9 17:14:22 2009
@@ -164,7 +164,7 @@
     }
   }
   
-  public void testRowsPutBufferedManyManyFlushes() {
+  public void testRowsPutBufferedManyManyFlushes() throws IOException {
     table.setAutoFlush(false);
     table.setWriteBufferSize(10);
     ArrayList<Put> rowsUpdate = new ArrayList<Put>();

Added: hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/filter/TestFilter.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/filter/TestFilter.java?rev=813052&view=auto
==============================================================================
--- hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/filter/TestFilter.java (added)
+++ hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/filter/TestFilter.java Wed Sep  9 17:14:22 2009
@@ -0,0 +1,868 @@
+package org.apache.hadoop.hbase.filter;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hbase.HBaseTestCase;
+import org.apache.hadoop.hbase.HColumnDescriptor;
+import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.HRegionInfo;
+import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.client.Delete;
+import org.apache.hadoop.hbase.client.Put;
+import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
+import org.apache.hadoop.hbase.filter.FilterList.Operator;
+import org.apache.hadoop.hbase.regionserver.HRegion;
+import org.apache.hadoop.hbase.regionserver.InternalScanner;
+import org.apache.hadoop.hbase.util.Bytes;
+
+/**
+ * Test filters at the HRegion doorstep.
+ */
+public class TestFilter extends HBaseTestCase {
+  private final Log LOG = LogFactory.getLog(this.getClass());
+  private HRegion region;
+  
+  //
+  // Rows, Qualifiers, and Values are in two groups, One and Two.
+  //
+
+  private static final byte [][] ROWS_ONE = {
+      Bytes.toBytes("testRowOne-0"), Bytes.toBytes("testRowOne-1"),
+      Bytes.toBytes("testRowOne-2"), Bytes.toBytes("testRowOne-3")
+  };
+
+  private static final byte [][] ROWS_TWO = {
+      Bytes.toBytes("testRowTwo-0"), Bytes.toBytes("testRowTwo-1"),
+      Bytes.toBytes("testRowTwo-2"), Bytes.toBytes("testRowTwo-3")
+  };
+  
+  private static final byte [][] FAMILIES = {
+    Bytes.toBytes("testFamilyOne"), Bytes.toBytes("testFamilyTwo")
+  };
+
+  private static final byte [][] QUALIFIERS_ONE = {
+    Bytes.toBytes("testQualifierOne-0"), Bytes.toBytes("testQualifierOne-1"),
+    Bytes.toBytes("testQualifierOne-2"), Bytes.toBytes("testQualifierOne-3")
+  };
+  
+  private static final byte [][] QUALIFIERS_TWO = {
+    Bytes.toBytes("testQualifierTwo-0"), Bytes.toBytes("testQualifierTwo-1"),
+    Bytes.toBytes("testQualifierTwo-2"), Bytes.toBytes("testQualifierTwo-3")
+  };
+  
+  private static final byte [][] VALUES = {
+    Bytes.toBytes("testValueOne"), Bytes.toBytes("testValueTwo")
+  };
+  
+  private long numRows = ROWS_ONE.length + ROWS_TWO.length;
+  private long colsPerRow = FAMILIES.length * QUALIFIERS_ONE.length;
+    
+  
+  protected void setUp() throws Exception {
+    super.setUp();
+    HTableDescriptor htd = new HTableDescriptor(getName());
+    htd.addFamily(new HColumnDescriptor(FAMILIES[0]));
+    htd.addFamily(new HColumnDescriptor(FAMILIES[1]));
+    HRegionInfo info = new HRegionInfo(htd, null, null, false);
+    this.region = HRegion.createHRegion(info, this.testDir, this.conf);
+    
+    // Insert first half
+    for(byte [] ROW : ROWS_ONE) {
+      Put p = new Put(ROW);
+      for(byte [] QUALIFIER : QUALIFIERS_ONE) {
+        p.add(FAMILIES[0], QUALIFIER, VALUES[0]);
+      }
+      this.region.put(p);
+    }
+    for(byte [] ROW : ROWS_TWO) {
+      Put p = new Put(ROW);
+      for(byte [] QUALIFIER : QUALIFIERS_TWO) {
+        p.add(FAMILIES[1], QUALIFIER, VALUES[1]);
+      }
+      this.region.put(p);
+    }
+    
+    // Flush
+    this.region.flushcache();
+    
+    // Insert second half (reverse families)
+    for(byte [] ROW : ROWS_ONE) {
+      Put p = new Put(ROW);
+      for(byte [] QUALIFIER : QUALIFIERS_ONE) {
+        p.add(FAMILIES[1], QUALIFIER, VALUES[0]);
+      }
+      this.region.put(p);
+    }
+    for(byte [] ROW : ROWS_TWO) {
+      Put p = new Put(ROW);
+      for(byte [] QUALIFIER : QUALIFIERS_TWO) {
+        p.add(FAMILIES[0], QUALIFIER, VALUES[1]);
+      }
+      this.region.put(p);
+    }
+    
+    // Delete the second qualifier from all rows and families
+    for(byte [] ROW : ROWS_ONE) {
+      Delete d = new Delete(ROW);
+      d.deleteColumns(FAMILIES[0], QUALIFIERS_ONE[1]);
+      d.deleteColumns(FAMILIES[1], QUALIFIERS_ONE[1]);
+      this.region.delete(d, null, false);
+    }    
+    for(byte [] ROW : ROWS_TWO) {
+      Delete d = new Delete(ROW);
+      d.deleteColumns(FAMILIES[0], QUALIFIERS_TWO[1]);
+      d.deleteColumns(FAMILIES[1], QUALIFIERS_TWO[1]);
+      this.region.delete(d, null, false);
+    }
+    colsPerRow -= 2;
+    
+    // Delete the second rows from both groups, one column at a time
+    for(byte [] QUALIFIER : QUALIFIERS_ONE) {
+      Delete d = new Delete(ROWS_ONE[1]);
+      d.deleteColumns(FAMILIES[0], QUALIFIER);
+      d.deleteColumns(FAMILIES[1], QUALIFIER);
+      this.region.delete(d, null, false);
+    }
+    for(byte [] QUALIFIER : QUALIFIERS_TWO) {
+      Delete d = new Delete(ROWS_TWO[1]);
+      d.deleteColumns(FAMILIES[0], QUALIFIER);
+      d.deleteColumns(FAMILIES[1], QUALIFIER);
+      this.region.delete(d, null, false);
+    }
+    numRows -= 2;
+  }
+
+  protected void tearDown() throws Exception {
+    super.tearDown();
+    this.region.close();
+  }
+
+  public void testNoFilter() throws Exception {
+    
+    // No filter
+    long expectedRows = this.numRows;
+    long expectedKeys = this.colsPerRow;
+    
+    // Both families
+    Scan s = new Scan();
+    verifyScan(s, expectedRows, expectedKeys);
+
+    // One family
+    s = new Scan();
+    s.addFamily(FAMILIES[0]);
+    verifyScan(s, expectedRows, expectedKeys/2);
+  }
+  
+  public void testPrefixFilter() throws Exception {
+    
+    // Grab rows from group one (half of total)
+    
+    long expectedRows = this.numRows / 2;
+    long expectedKeys = this.colsPerRow;
+    
+    Scan s = new Scan();
+    s.setFilter(new PrefixFilter(Bytes.toBytes("testRowOne")));
+
+    verifyScan(s, expectedRows, expectedKeys);
+    
+  }
+  
+  public void testPageFilter() throws Exception {
+    
+    // KVs in first 6 rows
+    KeyValue [] expectedKVs = {
+      // testRowOne-0
+      new KeyValue(ROWS_ONE[0], FAMILIES[0], QUALIFIERS_ONE[0], VALUES[0]),
+      new KeyValue(ROWS_ONE[0], FAMILIES[0], QUALIFIERS_ONE[2], VALUES[0]),
+      new KeyValue(ROWS_ONE[0], FAMILIES[0], QUALIFIERS_ONE[3], VALUES[0]),
+      new KeyValue(ROWS_ONE[0], FAMILIES[1], QUALIFIERS_ONE[0], VALUES[0]),
+      new KeyValue(ROWS_ONE[0], FAMILIES[1], QUALIFIERS_ONE[2], VALUES[0]),
+      new KeyValue(ROWS_ONE[0], FAMILIES[1], QUALIFIERS_ONE[3], VALUES[0]),
+      // testRowOne-2
+      new KeyValue(ROWS_ONE[2], FAMILIES[0], QUALIFIERS_ONE[0], VALUES[0]),
+      new KeyValue(ROWS_ONE[2], FAMILIES[0], QUALIFIERS_ONE[2], VALUES[0]),
+      new KeyValue(ROWS_ONE[2], FAMILIES[0], QUALIFIERS_ONE[3], VALUES[0]),
+      new KeyValue(ROWS_ONE[2], FAMILIES[1], QUALIFIERS_ONE[0], VALUES[0]),
+      new KeyValue(ROWS_ONE[2], FAMILIES[1], QUALIFIERS_ONE[2], VALUES[0]),
+      new KeyValue(ROWS_ONE[2], FAMILIES[1], QUALIFIERS_ONE[3], VALUES[0]),
+      // testRowOne-3
+      new KeyValue(ROWS_ONE[3], FAMILIES[0], QUALIFIERS_ONE[0], VALUES[0]),
+      new KeyValue(ROWS_ONE[3], FAMILIES[0], QUALIFIERS_ONE[2], VALUES[0]),
+      new KeyValue(ROWS_ONE[3], FAMILIES[0], QUALIFIERS_ONE[3], VALUES[0]),
+      new KeyValue(ROWS_ONE[3], FAMILIES[1], QUALIFIERS_ONE[0], VALUES[0]),
+      new KeyValue(ROWS_ONE[3], FAMILIES[1], QUALIFIERS_ONE[2], VALUES[0]),
+      new KeyValue(ROWS_ONE[3], FAMILIES[1], QUALIFIERS_ONE[3], VALUES[0]),
+      // testRowTwo-0
+      new KeyValue(ROWS_TWO[0], FAMILIES[0], QUALIFIERS_TWO[0], VALUES[1]),
+      new KeyValue(ROWS_TWO[0], FAMILIES[0], QUALIFIERS_TWO[2], VALUES[1]),
+      new KeyValue(ROWS_TWO[0], FAMILIES[0], QUALIFIERS_TWO[3], VALUES[1]),
+      new KeyValue(ROWS_TWO[0], FAMILIES[1], QUALIFIERS_TWO[0], VALUES[1]),
+      new KeyValue(ROWS_TWO[0], FAMILIES[1], QUALIFIERS_TWO[2], VALUES[1]),
+      new KeyValue(ROWS_TWO[0], FAMILIES[1], QUALIFIERS_TWO[3], VALUES[1]),
+      // testRowTwo-2
+      new KeyValue(ROWS_TWO[2], FAMILIES[0], QUALIFIERS_TWO[0], VALUES[1]),
+      new KeyValue(ROWS_TWO[2], FAMILIES[0], QUALIFIERS_TWO[2], VALUES[1]),
+      new KeyValue(ROWS_TWO[2], FAMILIES[0], QUALIFIERS_TWO[3], VALUES[1]),
+      new KeyValue(ROWS_TWO[2], FAMILIES[1], QUALIFIERS_TWO[0], VALUES[1]),
+      new KeyValue(ROWS_TWO[2], FAMILIES[1], QUALIFIERS_TWO[2], VALUES[1]),
+      new KeyValue(ROWS_TWO[2], FAMILIES[1], QUALIFIERS_TWO[3], VALUES[1]),
+      // testRowTwo-3
+      new KeyValue(ROWS_TWO[3], FAMILIES[0], QUALIFIERS_TWO[0], VALUES[1]),
+      new KeyValue(ROWS_TWO[3], FAMILIES[0], QUALIFIERS_TWO[2], VALUES[1]),
+      new KeyValue(ROWS_TWO[3], FAMILIES[0], QUALIFIERS_TWO[3], VALUES[1]),
+      new KeyValue(ROWS_TWO[3], FAMILIES[1], QUALIFIERS_TWO[0], VALUES[1]),
+      new KeyValue(ROWS_TWO[3], FAMILIES[1], QUALIFIERS_TWO[2], VALUES[1]),
+      new KeyValue(ROWS_TWO[3], FAMILIES[1], QUALIFIERS_TWO[3], VALUES[1])
+    };
+    
+    // Grab all 6 rows
+    long expectedRows = 6;
+    long expectedKeys = this.colsPerRow;
+    Scan s = new Scan();
+    s.setFilter(new PageFilter(expectedRows));
+    verifyScan(s, expectedRows, expectedKeys);
+    s.setFilter(new PageFilter(expectedRows));
+    verifyScanFull(s, expectedKVs);
+    
+    // Grab first 4 rows (6 cols per row)
+    expectedRows = 4;
+    expectedKeys = this.colsPerRow;
+    s = new Scan();
+    s.setFilter(new PageFilter(expectedRows));
+    verifyScan(s, expectedRows, expectedKeys);
+    s.setFilter(new PageFilter(expectedRows));
+    verifyScanFull(s, Arrays.copyOf(expectedKVs, 24));
+    
+    // Grab first 2 rows
+    expectedRows = 2;
+    expectedKeys = this.colsPerRow;
+    s = new Scan();
+    s.setFilter(new PageFilter(expectedRows));
+    verifyScan(s, expectedRows, expectedKeys);
+    s.setFilter(new PageFilter(expectedRows));
+    verifyScanFull(s, Arrays.copyOf(expectedKVs, 12));
+
+    // Grab first row
+    expectedRows = 1;
+    expectedKeys = this.colsPerRow;
+    s = new Scan();
+    s.setFilter(new PageFilter(expectedRows));
+    verifyScan(s, expectedRows, expectedKeys);
+    s.setFilter(new PageFilter(expectedRows));
+    verifyScanFull(s, Arrays.copyOf(expectedKVs, 6));
+    
+  }
+  
+  public void testInclusiveStopFilter() throws IOException {
+
+    // Grab rows from group one
+    
+    // If we just use start/stop row, we get total/2 - 1 rows
+    long expectedRows = (this.numRows / 2) - 1;
+    long expectedKeys = this.colsPerRow;
+    Scan s = new Scan(Bytes.toBytes("testRowOne-0"), 
+        Bytes.toBytes("testRowOne-3"));
+    verifyScan(s, expectedRows, expectedKeys);
+    
+    // Now use start row with inclusive stop filter
+    expectedRows = this.numRows / 2;
+    s = new Scan(Bytes.toBytes("testRowOne-0"));
+    s.setFilter(new InclusiveStopFilter(Bytes.toBytes("testRowOne-3")));
+    verifyScan(s, expectedRows, expectedKeys);
+
+    // Grab rows from group two
+    
+    // If we just use start/stop row, we get total/2 - 1 rows
+    expectedRows = (this.numRows / 2) - 1;
+    expectedKeys = this.colsPerRow;
+    s = new Scan(Bytes.toBytes("testRowTwo-0"), 
+        Bytes.toBytes("testRowTwo-3"));
+    verifyScan(s, expectedRows, expectedKeys);
+    
+    // Now use start row with inclusive stop filter
+    expectedRows = this.numRows / 2;
+    s = new Scan(Bytes.toBytes("testRowTwo-0"));
+    s.setFilter(new InclusiveStopFilter(Bytes.toBytes("testRowTwo-3")));
+    verifyScan(s, expectedRows, expectedKeys);
+
+  }
+  
+  public void testQualifierFilter() throws IOException {
+    
+    // Match two keys (one from each family) in half the rows
+    long expectedRows = this.numRows / 2;
+    long expectedKeys = 2;
+    Filter f = new QualifierFilter(CompareOp.EQUAL,
+        new BinaryComparator(Bytes.toBytes("testQualifierOne-2")));
+    Scan s = new Scan();
+    s.setFilter(f);
+    verifyScanNoEarlyOut(s, expectedRows, expectedKeys);
+    
+    // Match keys less than same qualifier
+    // Expect only two keys (one from each family) in half the rows
+    expectedRows = this.numRows / 2;
+    expectedKeys = 2;
+    f = new QualifierFilter(CompareOp.LESS,
+        new BinaryComparator(Bytes.toBytes("testQualifierOne-2")));
+    s = new Scan();
+    s.setFilter(f);
+    verifyScanNoEarlyOut(s, expectedRows, expectedKeys);
+    
+    // Match keys less than or equal
+    // Expect four keys (two from each family) in half the rows
+    expectedRows = this.numRows / 2;
+    expectedKeys = 4;
+    f = new QualifierFilter(CompareOp.LESS_OR_EQUAL,
+        new BinaryComparator(Bytes.toBytes("testQualifierOne-2")));
+    s = new Scan();
+    s.setFilter(f);
+    verifyScanNoEarlyOut(s, expectedRows, expectedKeys);
+    
+    // Match keys not equal
+    // Expect four keys (two from each family)
+    // Only look in first group of rows
+    expectedRows = this.numRows / 2;
+    expectedKeys = 4;
+    f = new QualifierFilter(CompareOp.NOT_EQUAL,
+        new BinaryComparator(Bytes.toBytes("testQualifierOne-2")));
+    s = new Scan(HConstants.EMPTY_START_ROW, Bytes.toBytes("testRowTwo"));
+    s.setFilter(f);
+    verifyScanNoEarlyOut(s, expectedRows, expectedKeys);
+    
+    // Match keys greater or equal
+    // Expect four keys (two from each family)
+    // Only look in first group of rows
+    expectedRows = this.numRows / 2;
+    expectedKeys = 4;
+    f = new QualifierFilter(CompareOp.GREATER_OR_EQUAL,
+        new BinaryComparator(Bytes.toBytes("testQualifierOne-2")));
+    s = new Scan(HConstants.EMPTY_START_ROW, Bytes.toBytes("testRowTwo"));
+    s.setFilter(f);
+    verifyScanNoEarlyOut(s, expectedRows, expectedKeys);
+    
+    // Match keys greater
+    // Expect two keys (one from each family)
+    // Only look in first group of rows
+    expectedRows = this.numRows / 2;
+    expectedKeys = 2;
+    f = new QualifierFilter(CompareOp.GREATER,
+        new BinaryComparator(Bytes.toBytes("testQualifierOne-2")));
+    s = new Scan(HConstants.EMPTY_START_ROW, Bytes.toBytes("testRowTwo"));
+    s.setFilter(f);
+    verifyScanNoEarlyOut(s, expectedRows, expectedKeys);
+    
+    // Match keys not equal to
+    // Look across rows and fully validate the keys and ordering
+    // Expect varied numbers of keys, 4 per row in group one, 6 per row in group two
+    f = new QualifierFilter(CompareOp.NOT_EQUAL,
+        new BinaryComparator(QUALIFIERS_ONE[2]));
+    s = new Scan();
+    s.setFilter(f);
+    
+    KeyValue [] kvs = {
+        // testRowOne-0
+        new KeyValue(ROWS_ONE[0], FAMILIES[0], QUALIFIERS_ONE[0], VALUES[0]),
+        new KeyValue(ROWS_ONE[0], FAMILIES[0], QUALIFIERS_ONE[3], VALUES[0]),
+        new KeyValue(ROWS_ONE[0], FAMILIES[1], QUALIFIERS_ONE[0], VALUES[0]),
+        new KeyValue(ROWS_ONE[0], FAMILIES[1], QUALIFIERS_ONE[3], VALUES[0]),
+        // testRowOne-2
+        new KeyValue(ROWS_ONE[2], FAMILIES[0], QUALIFIERS_ONE[0], VALUES[0]),
+        new KeyValue(ROWS_ONE[2], FAMILIES[0], QUALIFIERS_ONE[3], VALUES[0]),
+        new KeyValue(ROWS_ONE[2], FAMILIES[1], QUALIFIERS_ONE[0], VALUES[0]),
+        new KeyValue(ROWS_ONE[2], FAMILIES[1], QUALIFIERS_ONE[3], VALUES[0]),
+        // testRowOne-3
+        new KeyValue(ROWS_ONE[3], FAMILIES[0], QUALIFIERS_ONE[0], VALUES[0]),
+        new KeyValue(ROWS_ONE[3], FAMILIES[0], QUALIFIERS_ONE[3], VALUES[0]),
+        new KeyValue(ROWS_ONE[3], FAMILIES[1], QUALIFIERS_ONE[0], VALUES[0]),
+        new KeyValue(ROWS_ONE[3], FAMILIES[1], QUALIFIERS_ONE[3], VALUES[0]),
+        // testRowTwo-0
+        new KeyValue(ROWS_TWO[0], FAMILIES[0], QUALIFIERS_TWO[0], VALUES[1]),
+        new KeyValue(ROWS_TWO[0], FAMILIES[0], QUALIFIERS_TWO[2], VALUES[1]),
+        new KeyValue(ROWS_TWO[0], FAMILIES[0], QUALIFIERS_TWO[3], VALUES[1]),
+        new KeyValue(ROWS_TWO[0], FAMILIES[1], QUALIFIERS_TWO[0], VALUES[1]),
+        new KeyValue(ROWS_TWO[0], FAMILIES[1], QUALIFIERS_TWO[2], VALUES[1]),
+        new KeyValue(ROWS_TWO[0], FAMILIES[1], QUALIFIERS_TWO[3], VALUES[1]),
+        // testRowTwo-2
+        new KeyValue(ROWS_TWO[2], FAMILIES[0], QUALIFIERS_TWO[0], VALUES[1]),
+        new KeyValue(ROWS_TWO[2], FAMILIES[0], QUALIFIERS_TWO[2], VALUES[1]),
+        new KeyValue(ROWS_TWO[2], FAMILIES[0], QUALIFIERS_TWO[3], VALUES[1]),
+        new KeyValue(ROWS_TWO[2], FAMILIES[1], QUALIFIERS_TWO[0], VALUES[1]),
+        new KeyValue(ROWS_TWO[2], FAMILIES[1], QUALIFIERS_TWO[2], VALUES[1]),
+        new KeyValue(ROWS_TWO[2], FAMILIES[1], QUALIFIERS_TWO[3], VALUES[1]),
+        // testRowTwo-3
+        new KeyValue(ROWS_TWO[3], FAMILIES[0], QUALIFIERS_TWO[0], VALUES[1]),
+        new KeyValue(ROWS_TWO[3], FAMILIES[0], QUALIFIERS_TWO[2], VALUES[1]),
+        new KeyValue(ROWS_TWO[3], FAMILIES[0], QUALIFIERS_TWO[3], VALUES[1]),
+        new KeyValue(ROWS_TWO[3], FAMILIES[1], QUALIFIERS_TWO[0], VALUES[1]),
+        new KeyValue(ROWS_TWO[3], FAMILIES[1], QUALIFIERS_TWO[2], VALUES[1]),
+        new KeyValue(ROWS_TWO[3], FAMILIES[1], QUALIFIERS_TWO[3], VALUES[1]),
+    };
+    verifyScanFull(s, kvs);
+     
+    
+    // Test across rows and groups with a regex
+    // Filter out "test*-2"
+    // Expect 4 keys per row across both groups
+    f = new QualifierFilter(CompareOp.NOT_EQUAL,
+        new RegexStringComparator("test.+-2"));
+    s = new Scan();
+    s.setFilter(f);
+    
+    kvs = new KeyValue [] {
+        // testRowOne-0
+        new KeyValue(ROWS_ONE[0], FAMILIES[0], QUALIFIERS_ONE[0], VALUES[0]),
+        new KeyValue(ROWS_ONE[0], FAMILIES[0], QUALIFIERS_ONE[3], VALUES[0]),
+        new KeyValue(ROWS_ONE[0], FAMILIES[1], QUALIFIERS_ONE[0], VALUES[0]),
+        new KeyValue(ROWS_ONE[0], FAMILIES[1], QUALIFIERS_ONE[3], VALUES[0]),
+        // testRowOne-2
+        new KeyValue(ROWS_ONE[2], FAMILIES[0], QUALIFIERS_ONE[0], VALUES[0]),
+        new KeyValue(ROWS_ONE[2], FAMILIES[0], QUALIFIERS_ONE[3], VALUES[0]),
+        new KeyValue(ROWS_ONE[2], FAMILIES[1], QUALIFIERS_ONE[0], VALUES[0]),
+        new KeyValue(ROWS_ONE[2], FAMILIES[1], QUALIFIERS_ONE[3], VALUES[0]),
+        // testRowOne-3
+        new KeyValue(ROWS_ONE[3], FAMILIES[0], QUALIFIERS_ONE[0], VALUES[0]),
+        new KeyValue(ROWS_ONE[3], FAMILIES[0], QUALIFIERS_ONE[3], VALUES[0]),
+        new KeyValue(ROWS_ONE[3], FAMILIES[1], QUALIFIERS_ONE[0], VALUES[0]),
+        new KeyValue(ROWS_ONE[3], FAMILIES[1], QUALIFIERS_ONE[3], VALUES[0]),
+        // testRowTwo-0
+        new KeyValue(ROWS_TWO[0], FAMILIES[0], QUALIFIERS_TWO[0], VALUES[1]),
+        new KeyValue(ROWS_TWO[0], FAMILIES[0], QUALIFIERS_TWO[3], VALUES[1]),
+        new KeyValue(ROWS_TWO[0], FAMILIES[1], QUALIFIERS_TWO[0], VALUES[1]),
+        new KeyValue(ROWS_TWO[0], FAMILIES[1], QUALIFIERS_TWO[3], VALUES[1]),
+        // testRowTwo-2
+        new KeyValue(ROWS_TWO[2], FAMILIES[0], QUALIFIERS_TWO[0], VALUES[1]),
+        new KeyValue(ROWS_TWO[2], FAMILIES[0], QUALIFIERS_TWO[3], VALUES[1]),
+        new KeyValue(ROWS_TWO[2], FAMILIES[1], QUALIFIERS_TWO[0], VALUES[1]),
+        new KeyValue(ROWS_TWO[2], FAMILIES[1], QUALIFIERS_TWO[3], VALUES[1]),
+        // testRowTwo-3
+        new KeyValue(ROWS_TWO[3], FAMILIES[0], QUALIFIERS_TWO[0], VALUES[1]),
+        new KeyValue(ROWS_TWO[3], FAMILIES[0], QUALIFIERS_TWO[3], VALUES[1]),
+        new KeyValue(ROWS_TWO[3], FAMILIES[1], QUALIFIERS_TWO[0], VALUES[1]),
+        new KeyValue(ROWS_TWO[3], FAMILIES[1], QUALIFIERS_TWO[3], VALUES[1]),
+    };
+    verifyScanFull(s, kvs);
+     
+  }
+  
+  public void testRowFilter() throws IOException {
+
+    // Match a single row, all keys
+    long expectedRows = 1;
+    long expectedKeys = this.colsPerRow;
+    Filter f = new RowFilter(CompareOp.EQUAL,
+        new BinaryComparator(Bytes.toBytes("testRowOne-2")));
+    Scan s = new Scan();
+    s.setFilter(f);
+    verifyScanNoEarlyOut(s, expectedRows, expectedKeys);
+    
+    // Match a two rows, one from each group, using regex
+    expectedRows = 2;
+    expectedKeys = this.colsPerRow;
+    f = new RowFilter(CompareOp.EQUAL,
+        new RegexStringComparator("testRow.+-2"));
+    s = new Scan();
+    s.setFilter(f);
+    verifyScanNoEarlyOut(s, expectedRows, expectedKeys);
+    
+    // Match rows less than
+    // Expect all keys in one row
+    expectedRows = 1;
+    expectedKeys = this.colsPerRow;
+    f = new RowFilter(CompareOp.LESS,
+        new BinaryComparator(Bytes.toBytes("testRowOne-2")));
+    s = new Scan();
+    s.setFilter(f);
+    verifyScanNoEarlyOut(s, expectedRows, expectedKeys);
+    
+    // Match rows less than or equal
+    // Expect all keys in two rows
+    expectedRows = 2;
+    expectedKeys = this.colsPerRow;
+    f = new RowFilter(CompareOp.LESS_OR_EQUAL,
+        new BinaryComparator(Bytes.toBytes("testRowOne-2")));
+    s = new Scan();
+    s.setFilter(f);
+    verifyScanNoEarlyOut(s, expectedRows, expectedKeys);
+    
+    // Match rows not equal
+    // Expect all keys in all but one row
+    expectedRows = this.numRows - 1;
+    expectedKeys = this.colsPerRow;
+    f = new RowFilter(CompareOp.NOT_EQUAL,
+        new BinaryComparator(Bytes.toBytes("testRowOne-2")));
+    s = new Scan();
+    s.setFilter(f);
+    verifyScanNoEarlyOut(s, expectedRows, expectedKeys);
+    
+    // Match keys greater or equal
+    // Expect all keys in all but one row
+    expectedRows = this.numRows - 1;
+    expectedKeys = this.colsPerRow;
+    f = new RowFilter(CompareOp.GREATER_OR_EQUAL,
+        new BinaryComparator(Bytes.toBytes("testRowOne-2")));
+    s = new Scan();
+    s.setFilter(f);
+    verifyScanNoEarlyOut(s, expectedRows, expectedKeys);
+    
+    // Match keys greater
+    // Expect all keys in all but two rows
+    expectedRows = this.numRows - 2;
+    expectedKeys = this.colsPerRow;
+    f = new RowFilter(CompareOp.GREATER,
+        new BinaryComparator(Bytes.toBytes("testRowOne-2")));
+    s = new Scan();
+    s.setFilter(f);
+    verifyScanNoEarlyOut(s, expectedRows, expectedKeys);
+    
+    // Match rows not equal to testRowTwo-2
+    // Look across rows and fully validate the keys and ordering
+    // Should see all keys in all rows but testRowTwo-2
+    f = new RowFilter(CompareOp.NOT_EQUAL,
+        new BinaryComparator(Bytes.toBytes("testRowOne-2")));
+    s = new Scan();
+    s.setFilter(f);
+    
+    KeyValue [] kvs = {
+        // testRowOne-0
+        new KeyValue(ROWS_ONE[0], FAMILIES[0], QUALIFIERS_ONE[0], VALUES[0]),
+        new KeyValue(ROWS_ONE[0], FAMILIES[0], QUALIFIERS_ONE[2], VALUES[0]),
+        new KeyValue(ROWS_ONE[0], FAMILIES[0], QUALIFIERS_ONE[3], VALUES[0]),
+        new KeyValue(ROWS_ONE[0], FAMILIES[1], QUALIFIERS_ONE[0], VALUES[0]),
+        new KeyValue(ROWS_ONE[0], FAMILIES[1], QUALIFIERS_ONE[2], VALUES[0]),
+        new KeyValue(ROWS_ONE[0], FAMILIES[1], QUALIFIERS_ONE[3], VALUES[0]),
+        // testRowOne-3
+        new KeyValue(ROWS_ONE[3], FAMILIES[0], QUALIFIERS_ONE[0], VALUES[0]),
+        new KeyValue(ROWS_ONE[3], FAMILIES[0], QUALIFIERS_ONE[2], VALUES[0]),
+        new KeyValue(ROWS_ONE[3], FAMILIES[0], QUALIFIERS_ONE[3], VALUES[0]),
+        new KeyValue(ROWS_ONE[3], FAMILIES[1], QUALIFIERS_ONE[0], VALUES[0]),
+        new KeyValue(ROWS_ONE[3], FAMILIES[1], QUALIFIERS_ONE[2], VALUES[0]),
+        new KeyValue(ROWS_ONE[3], FAMILIES[1], QUALIFIERS_ONE[3], VALUES[0]),
+        // testRowTwo-0
+        new KeyValue(ROWS_TWO[0], FAMILIES[0], QUALIFIERS_TWO[0], VALUES[1]),
+        new KeyValue(ROWS_TWO[0], FAMILIES[0], QUALIFIERS_TWO[2], VALUES[1]),
+        new KeyValue(ROWS_TWO[0], FAMILIES[0], QUALIFIERS_TWO[3], VALUES[1]),
+        new KeyValue(ROWS_TWO[0], FAMILIES[1], QUALIFIERS_TWO[0], VALUES[1]),
+        new KeyValue(ROWS_TWO[0], FAMILIES[1], QUALIFIERS_TWO[2], VALUES[1]),
+        new KeyValue(ROWS_TWO[0], FAMILIES[1], QUALIFIERS_TWO[3], VALUES[1]),
+        // testRowTwo-2
+        new KeyValue(ROWS_TWO[2], FAMILIES[0], QUALIFIERS_TWO[0], VALUES[1]),
+        new KeyValue(ROWS_TWO[2], FAMILIES[0], QUALIFIERS_TWO[2], VALUES[1]),
+        new KeyValue(ROWS_TWO[2], FAMILIES[0], QUALIFIERS_TWO[3], VALUES[1]),
+        new KeyValue(ROWS_TWO[2], FAMILIES[1], QUALIFIERS_TWO[0], VALUES[1]),
+        new KeyValue(ROWS_TWO[2], FAMILIES[1], QUALIFIERS_TWO[2], VALUES[1]),
+        new KeyValue(ROWS_TWO[2], FAMILIES[1], QUALIFIERS_TWO[3], VALUES[1]),
+        // testRowTwo-3
+        new KeyValue(ROWS_TWO[3], FAMILIES[0], QUALIFIERS_TWO[0], VALUES[1]),
+        new KeyValue(ROWS_TWO[3], FAMILIES[0], QUALIFIERS_TWO[2], VALUES[1]),
+        new KeyValue(ROWS_TWO[3], FAMILIES[0], QUALIFIERS_TWO[3], VALUES[1]),
+        new KeyValue(ROWS_TWO[3], FAMILIES[1], QUALIFIERS_TWO[0], VALUES[1]),
+        new KeyValue(ROWS_TWO[3], FAMILIES[1], QUALIFIERS_TWO[2], VALUES[1]),
+        new KeyValue(ROWS_TWO[3], FAMILIES[1], QUALIFIERS_TWO[3], VALUES[1]),
+    };
+    verifyScanFull(s, kvs);
+     
+    
+    // Test across rows and groups with a regex
+    // Filter out everything that doesn't match "*-2"
+    // Expect all keys in two rows
+    f = new RowFilter(CompareOp.EQUAL,
+        new RegexStringComparator(".+-2"));
+    s = new Scan();
+    s.setFilter(f);
+    
+    kvs = new KeyValue [] {
+        // testRowOne-2
+        new KeyValue(ROWS_ONE[2], FAMILIES[0], QUALIFIERS_ONE[0], VALUES[0]),
+        new KeyValue(ROWS_ONE[2], FAMILIES[0], QUALIFIERS_ONE[2], VALUES[0]),
+        new KeyValue(ROWS_ONE[2], FAMILIES[0], QUALIFIERS_ONE[3], VALUES[0]),
+        new KeyValue(ROWS_ONE[2], FAMILIES[1], QUALIFIERS_ONE[0], VALUES[0]),
+        new KeyValue(ROWS_ONE[2], FAMILIES[1], QUALIFIERS_ONE[2], VALUES[0]),
+        new KeyValue(ROWS_ONE[2], FAMILIES[1], QUALIFIERS_ONE[3], VALUES[0]),
+        // testRowTwo-2
+        new KeyValue(ROWS_TWO[2], FAMILIES[0], QUALIFIERS_TWO[0], VALUES[1]),
+        new KeyValue(ROWS_TWO[2], FAMILIES[0], QUALIFIERS_TWO[2], VALUES[1]),
+        new KeyValue(ROWS_TWO[2], FAMILIES[0], QUALIFIERS_TWO[3], VALUES[1]),
+        new KeyValue(ROWS_TWO[2], FAMILIES[1], QUALIFIERS_TWO[0], VALUES[1]),
+        new KeyValue(ROWS_TWO[2], FAMILIES[1], QUALIFIERS_TWO[2], VALUES[1]),
+        new KeyValue(ROWS_TWO[2], FAMILIES[1], QUALIFIERS_TWO[3], VALUES[1])
+    };
+    verifyScanFull(s, kvs);
+     
+  }
+  
+  public void testValueFilter() throws IOException {
+    
+    // Match group one rows
+    long expectedRows = this.numRows / 2;
+    long expectedKeys = this.colsPerRow;
+    Filter f = new ValueFilter(CompareOp.EQUAL,
+        new BinaryComparator(Bytes.toBytes("testValueOne")));
+    Scan s = new Scan();
+    s.setFilter(f);
+    verifyScanNoEarlyOut(s, expectedRows, expectedKeys);
+
+    // Match group two rows
+    expectedRows = this.numRows / 2;
+    expectedKeys = this.colsPerRow;
+    f = new ValueFilter(CompareOp.EQUAL,
+        new BinaryComparator(Bytes.toBytes("testValueTwo")));
+    s = new Scan();
+    s.setFilter(f);
+    verifyScanNoEarlyOut(s, expectedRows, expectedKeys);
+    
+    // Match all values using regex
+    expectedRows = this.numRows;
+    expectedKeys = this.colsPerRow;
+    f = new ValueFilter(CompareOp.EQUAL,
+        new RegexStringComparator("testValue((One)|(Two))"));
+    s = new Scan();
+    s.setFilter(f);
+    verifyScanNoEarlyOut(s, expectedRows, expectedKeys);
+    
+    // Match values less than
+    // Expect group one rows
+    expectedRows = this.numRows / 2;
+    expectedKeys = this.colsPerRow;
+    f = new ValueFilter(CompareOp.LESS,
+        new BinaryComparator(Bytes.toBytes("testValueTwo")));
+    s = new Scan();
+    s.setFilter(f);
+    verifyScanNoEarlyOut(s, expectedRows, expectedKeys);
+    
+    // Match values less than or equal
+    // Expect all rows
+    expectedRows = this.numRows;
+    expectedKeys = this.colsPerRow;
+    f = new ValueFilter(CompareOp.LESS_OR_EQUAL,
+        new BinaryComparator(Bytes.toBytes("testValueTwo")));
+    s = new Scan();
+    s.setFilter(f);
+    verifyScanNoEarlyOut(s, expectedRows, expectedKeys);
+
+    // Match values less than or equal
+    // Expect group one rows
+    expectedRows = this.numRows / 2;
+    expectedKeys = this.colsPerRow;
+    f = new ValueFilter(CompareOp.LESS_OR_EQUAL,
+        new BinaryComparator(Bytes.toBytes("testValueOne")));
+    s = new Scan();
+    s.setFilter(f);
+    verifyScanNoEarlyOut(s, expectedRows, expectedKeys);
+    
+    // Match values not equal
+    // Expect half the rows
+    expectedRows = this.numRows / 2;
+    expectedKeys = this.colsPerRow;
+    f = new ValueFilter(CompareOp.NOT_EQUAL,
+        new BinaryComparator(Bytes.toBytes("testValueOne")));
+    s = new Scan();
+    s.setFilter(f);
+    verifyScanNoEarlyOut(s, expectedRows, expectedKeys);
+    
+    // Match values greater or equal
+    // Expect all rows
+    expectedRows = this.numRows;
+    expectedKeys = this.colsPerRow;
+    f = new ValueFilter(CompareOp.GREATER_OR_EQUAL,
+        new BinaryComparator(Bytes.toBytes("testValueOne")));
+    s = new Scan();
+    s.setFilter(f);
+    verifyScanNoEarlyOut(s, expectedRows, expectedKeys);
+    
+    // Match values greater
+    // Expect half rows
+    expectedRows = this.numRows / 2;
+    expectedKeys = this.colsPerRow;
+    f = new ValueFilter(CompareOp.GREATER,
+        new BinaryComparator(Bytes.toBytes("testValueOne")));
+    s = new Scan();
+    s.setFilter(f);
+    verifyScanNoEarlyOut(s, expectedRows, expectedKeys);
+    
+    // Match values not equal to testValueOne
+    // Look across rows and fully validate the keys and ordering
+    // Should see all keys in all group two rows
+    f = new ValueFilter(CompareOp.NOT_EQUAL,
+        new BinaryComparator(Bytes.toBytes("testValueOne")));
+    s = new Scan();
+    s.setFilter(f);
+    
+    KeyValue [] kvs = {
+        // testRowTwo-0
+        new KeyValue(ROWS_TWO[0], FAMILIES[0], QUALIFIERS_TWO[0], VALUES[1]),
+        new KeyValue(ROWS_TWO[0], FAMILIES[0], QUALIFIERS_TWO[2], VALUES[1]),
+        new KeyValue(ROWS_TWO[0], FAMILIES[0], QUALIFIERS_TWO[3], VALUES[1]),
+        new KeyValue(ROWS_TWO[0], FAMILIES[1], QUALIFIERS_TWO[0], VALUES[1]),
+        new KeyValue(ROWS_TWO[0], FAMILIES[1], QUALIFIERS_TWO[2], VALUES[1]),
+        new KeyValue(ROWS_TWO[0], FAMILIES[1], QUALIFIERS_TWO[3], VALUES[1]),
+        // testRowTwo-2
+        new KeyValue(ROWS_TWO[2], FAMILIES[0], QUALIFIERS_TWO[0], VALUES[1]),
+        new KeyValue(ROWS_TWO[2], FAMILIES[0], QUALIFIERS_TWO[2], VALUES[1]),
+        new KeyValue(ROWS_TWO[2], FAMILIES[0], QUALIFIERS_TWO[3], VALUES[1]),
+        new KeyValue(ROWS_TWO[2], FAMILIES[1], QUALIFIERS_TWO[0], VALUES[1]),
+        new KeyValue(ROWS_TWO[2], FAMILIES[1], QUALIFIERS_TWO[2], VALUES[1]),
+        new KeyValue(ROWS_TWO[2], FAMILIES[1], QUALIFIERS_TWO[3], VALUES[1]),
+        // testRowTwo-3
+        new KeyValue(ROWS_TWO[3], FAMILIES[0], QUALIFIERS_TWO[0], VALUES[1]),
+        new KeyValue(ROWS_TWO[3], FAMILIES[0], QUALIFIERS_TWO[2], VALUES[1]),
+        new KeyValue(ROWS_TWO[3], FAMILIES[0], QUALIFIERS_TWO[3], VALUES[1]),
+        new KeyValue(ROWS_TWO[3], FAMILIES[1], QUALIFIERS_TWO[0], VALUES[1]),
+        new KeyValue(ROWS_TWO[3], FAMILIES[1], QUALIFIERS_TWO[2], VALUES[1]),
+        new KeyValue(ROWS_TWO[3], FAMILIES[1], QUALIFIERS_TWO[3], VALUES[1]),
+    };
+    verifyScanFull(s, kvs);
+  }
+  
+  public void testSkipFilter() throws IOException {
+    
+    // Test for qualifier regex: "testQualifierOne-2"
+    // Should only get rows from second group, and all keys
+    Filter f = new SkipFilter(new QualifierFilter(CompareOp.NOT_EQUAL,
+        new BinaryComparator(Bytes.toBytes("testQualifierOne-2"))));
+    Scan s = new Scan();
+    s.setFilter(f);
+    
+    KeyValue [] kvs = {
+        // testRowTwo-0
+        new KeyValue(ROWS_TWO[0], FAMILIES[0], QUALIFIERS_TWO[0], VALUES[1]),
+        new KeyValue(ROWS_TWO[0], FAMILIES[0], QUALIFIERS_TWO[2], VALUES[1]),
+        new KeyValue(ROWS_TWO[0], FAMILIES[0], QUALIFIERS_TWO[3], VALUES[1]),
+        new KeyValue(ROWS_TWO[0], FAMILIES[1], QUALIFIERS_TWO[0], VALUES[1]),
+        new KeyValue(ROWS_TWO[0], FAMILIES[1], QUALIFIERS_TWO[2], VALUES[1]),
+        new KeyValue(ROWS_TWO[0], FAMILIES[1], QUALIFIERS_TWO[3], VALUES[1]),
+        // testRowTwo-2
+        new KeyValue(ROWS_TWO[2], FAMILIES[0], QUALIFIERS_TWO[0], VALUES[1]),
+        new KeyValue(ROWS_TWO[2], FAMILIES[0], QUALIFIERS_TWO[2], VALUES[1]),
+        new KeyValue(ROWS_TWO[2], FAMILIES[0], QUALIFIERS_TWO[3], VALUES[1]),
+        new KeyValue(ROWS_TWO[2], FAMILIES[1], QUALIFIERS_TWO[0], VALUES[1]),
+        new KeyValue(ROWS_TWO[2], FAMILIES[1], QUALIFIERS_TWO[2], VALUES[1]),
+        new KeyValue(ROWS_TWO[2], FAMILIES[1], QUALIFIERS_TWO[3], VALUES[1]),
+        // testRowTwo-3
+        new KeyValue(ROWS_TWO[3], FAMILIES[0], QUALIFIERS_TWO[0], VALUES[1]),
+        new KeyValue(ROWS_TWO[3], FAMILIES[0], QUALIFIERS_TWO[2], VALUES[1]),
+        new KeyValue(ROWS_TWO[3], FAMILIES[0], QUALIFIERS_TWO[3], VALUES[1]),
+        new KeyValue(ROWS_TWO[3], FAMILIES[1], QUALIFIERS_TWO[0], VALUES[1]),
+        new KeyValue(ROWS_TWO[3], FAMILIES[1], QUALIFIERS_TWO[2], VALUES[1]),
+        new KeyValue(ROWS_TWO[3], FAMILIES[1], QUALIFIERS_TWO[3], VALUES[1]),
+    };
+    verifyScanFull(s, kvs);
+  }
+    
+  // TODO: This is important... need many more tests for ordering, etc
+  // There are limited tests elsewhere but we need HRegion level ones here
+  public void testFilterList() throws IOException {
+    
+    // Test getting a single row, single key using Row, Qualifier, and Value 
+    // regular expression and substring filters
+    // Use must pass all
+    List<Filter> filters = new ArrayList<Filter>();
+    filters.add(new RowFilter(CompareOp.EQUAL, new RegexStringComparator(".+-2")));
+    filters.add(new QualifierFilter(CompareOp.EQUAL, new RegexStringComparator(".+-2")));
+    filters.add(new ValueFilter(CompareOp.EQUAL, new SubstringComparator("One")));
+    Filter f = new FilterList(Operator.MUST_PASS_ALL, filters);
+    Scan s = new Scan();
+    s.addFamily(FAMILIES[0]);
+    s.setFilter(f);
+    KeyValue [] kvs = {
+        new KeyValue(ROWS_ONE[2], FAMILIES[0], QUALIFIERS_ONE[2], VALUES[0])
+    };
+    verifyScanFull(s, kvs);
+
+    // Test getting everything with a MUST_PASS_ONE filter including row, qf, val
+    // regular expression and substring filters
+    filters.clear();
+    filters.add(new RowFilter(CompareOp.EQUAL, new RegexStringComparator(".+Two.+")));
+    filters.add(new QualifierFilter(CompareOp.EQUAL, new RegexStringComparator(".+-2")));
+    filters.add(new ValueFilter(CompareOp.EQUAL, new SubstringComparator("One")));
+    f = new FilterList(Operator.MUST_PASS_ONE, filters);
+    s = new Scan();
+    s.setFilter(f);
+    verifyScanNoEarlyOut(s, this.numRows, this.colsPerRow);
+    
+    
+  }
+  
+  private void verifyScan(Scan s, long expectedRows, long expectedKeys) 
+  throws IOException {
+    InternalScanner scanner = this.region.getScanner(s);
+    List<KeyValue> results = new ArrayList<KeyValue>();
+    int i = 0;
+    for (boolean done = true; done; i++) {
+      done = scanner.next(results);
+      Arrays.sort(results.toArray(new KeyValue[results.size()]),
+          KeyValue.COMPARATOR);
+      LOG.info("counter=" + i + ", " + results);
+      assertTrue("Scanned too many rows! Only expected " + expectedRows + 
+          " total but already scanned " + (i+1), expectedRows > i);
+      assertEquals("Expected " + expectedKeys + " keys per row but " +
+          "returned " + results.size(), expectedKeys, results.size());
+      results.clear();
+    }
+    assertEquals("Expected " + expectedRows + " rows but scanned " + i +
+        " rows", expectedRows, i);
+  }
+
+
+  
+  private void verifyScanNoEarlyOut(Scan s, long expectedRows, 
+      long expectedKeys) 
+  throws IOException {
+    InternalScanner scanner = this.region.getScanner(s);
+    List<KeyValue> results = new ArrayList<KeyValue>();
+    int i = 0;
+    for (boolean done = true; done; i++) {
+      done = scanner.next(results);
+      Arrays.sort(results.toArray(new KeyValue[results.size()]),
+          KeyValue.COMPARATOR);
+      LOG.info("counter=" + i + ", " + results);
+      if(results.isEmpty()) break;
+      assertTrue("Scanned too many rows! Only expected " + expectedRows + 
+          " total but already scanned " + (i+1), expectedRows > i);
+      assertEquals("Expected " + expectedKeys + " keys per row but " +
+          "returned " + results.size(), expectedKeys, results.size());
+      results.clear();
+    }
+    assertEquals("Expected " + expectedRows + " rows but scanned " + i +
+        " rows", expectedRows, i);
+  }
+
+  private void verifyScanFull(Scan s, KeyValue [] kvs)
+  throws IOException {
+    InternalScanner scanner = this.region.getScanner(s);
+    List<KeyValue> results = new ArrayList<KeyValue>();
+    int row = 0;
+    int idx = 0;
+    for (boolean done = true; done; row++) {
+      done = scanner.next(results);
+      Arrays.sort(results.toArray(new KeyValue[results.size()]),
+          KeyValue.COMPARATOR);
+      assertTrue("Scanned too many keys! Only expected " + kvs.length + 
+          " total but already scanned " + (results.size() + idx), 
+          kvs.length >= idx + results.size());
+      for(KeyValue kv : results) {
+        LOG.info("row=" + row + ", result=" + kv.toString() + 
+            ", match=" + kvs[idx].toString());
+        assertTrue("Row mismatch", 
+            Bytes.equals(kv.getRow(), kvs[idx].getRow()));
+        assertTrue("Family mismatch", 
+            Bytes.equals(kv.getFamily(), kvs[idx].getFamily()));
+        assertTrue("Qualifier mismatch", 
+            Bytes.equals(kv.getQualifier(), kvs[idx].getQualifier()));
+        assertTrue("Value mismatch", 
+            Bytes.equals(kv.getValue(), kvs[idx].getValue()));
+        idx++;
+      }
+      results.clear();
+    }
+    LOG.info("Looked at " + row + " rows with " + idx + " keys");
+    assertEquals("Expected " + kvs.length + " total keys but scanned " + idx,
+        kvs.length, idx);
+  }
+}

Modified: hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/filter/TestFilterList.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/filter/TestFilterList.java?rev=813052&r1=813051&r2=813052&view=diff
==============================================================================
--- hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/filter/TestFilterList.java (original)
+++ hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/filter/TestFilterList.java Wed Sep  9 17:14:22 2009
@@ -72,6 +72,7 @@
     byte [] rowkey = Bytes.toBytes("yyyyyyyyy");
     for (int i = 0; i < MAX_PAGES - 1; i++) {
       assertFalse(filterMPONE.filterRowKey(rowkey, 0, rowkey.length));
+      assertFalse(filterMPONE.filterRow());
       KeyValue kv = new KeyValue(rowkey, rowkey, Bytes.toBytes(i),
         Bytes.toBytes(i));
       assertTrue(Filter.ReturnCode.INCLUDE == filterMPONE.filterKeyValue(kv));
@@ -80,6 +81,7 @@
     /* Only pass PageFilter */
     rowkey = Bytes.toBytes("z");
     assertFalse(filterMPONE.filterRowKey(rowkey, 0, rowkey.length));
+    assertFalse(filterMPONE.filterRow());
     KeyValue kv = new KeyValue(rowkey, rowkey, Bytes.toBytes(0),
         Bytes.toBytes(0));
     assertTrue(Filter.ReturnCode.INCLUDE == filterMPONE.filterKeyValue(kv));
@@ -87,19 +89,16 @@
     /* PageFilter will fail now, but should pass because we match yyy */
     rowkey = Bytes.toBytes("yyy");
     assertFalse(filterMPONE.filterRowKey(rowkey, 0, rowkey.length));
+    assertFalse(filterMPONE.filterRow());
     kv = new KeyValue(rowkey, rowkey, Bytes.toBytes(0),
         Bytes.toBytes(0));
     assertTrue(Filter.ReturnCode.INCLUDE == filterMPONE.filterKeyValue(kv));
     
-    /* We should filter the row key now if we match neither */
-    rowkey = Bytes.toBytes("x");
+    /* We should filter any row */
+    rowkey = Bytes.toBytes("z");
     assertTrue(filterMPONE.filterRowKey(rowkey, 0, rowkey.length));
-    kv = new KeyValue(rowkey, rowkey, Bytes.toBytes(0),
-        Bytes.toBytes(0));
-    assertTrue(Filter.ReturnCode.SKIP == filterMPONE.filterKeyValue(kv));
-    
-    // Both filters in Set should be satisfied by now
     assertTrue(filterMPONE.filterRow());
+    assertTrue(filterMPONE.filterAllRemaining());
 
   }
 
@@ -153,6 +152,7 @@
     List<Filter> filters = new ArrayList<Filter>();
     filters.add(new PrefixFilter(Bytes.toBytes("yyy")));
     filters.add(new PageFilter(MAX_PAGES));
+    RegexStringComparator rsc;
     Filter filterMPONE =
         new FilterList(FilterList.Operator.MUST_PASS_ONE, filters);
     /* Filter must do all below steps:
@@ -171,21 +171,23 @@
     assertFalse(filterMPONE.filterAllRemaining());
     
     /* We should be able to fill MAX_PAGES without incrementing page counter */
-    byte [] rowkey = Bytes.toBytes("yyyyyyyyy");
+    byte [] rowkey = Bytes.toBytes("yyyyyyyy");
     for (int i = 0; i < MAX_PAGES; i++) {
       assertFalse(filterMPONE.filterRowKey(rowkey, 0, rowkey.length));
       KeyValue kv = new KeyValue(rowkey, rowkey, Bytes.toBytes(i),
-        Bytes.toBytes(i));
-      assertTrue(Filter.ReturnCode.INCLUDE == filterMPONE.filterKeyValue(kv));
+          Bytes.toBytes(i));
+        assertTrue(Filter.ReturnCode.INCLUDE == filterMPONE.filterKeyValue(kv));
+      assertFalse(filterMPONE.filterRow());
     }
     
     /* Now let's fill the page filter */
-    rowkey = Bytes.toBytes("zzzzzzzz");
+    rowkey = Bytes.toBytes("xxxxxxx");
     for (int i = 0; i < MAX_PAGES; i++) {
       assertFalse(filterMPONE.filterRowKey(rowkey, 0, rowkey.length));
       KeyValue kv = new KeyValue(rowkey, rowkey, Bytes.toBytes(i),
-        Bytes.toBytes(i));
-      assertTrue(Filter.ReturnCode.INCLUDE == filterMPONE.filterKeyValue(kv));
+          Bytes.toBytes(i));
+        assertTrue(Filter.ReturnCode.INCLUDE == filterMPONE.filterKeyValue(kv));
+      assertFalse(filterMPONE.filterRow());
     }
     
     /* We should still be able to include even though page filter is at max */
@@ -193,14 +195,10 @@
     for (int i = 0; i < MAX_PAGES; i++) {
       assertFalse(filterMPONE.filterRowKey(rowkey, 0, rowkey.length));
       KeyValue kv = new KeyValue(rowkey, rowkey, Bytes.toBytes(i),
-        Bytes.toBytes(i));
-      assertTrue(Filter.ReturnCode.INCLUDE == filterMPONE.filterKeyValue(kv));
+          Bytes.toBytes(i));
+        assertTrue(Filter.ReturnCode.INCLUDE == filterMPONE.filterKeyValue(kv));
+      assertFalse(filterMPONE.filterRow());
     }
-    
-    /* We should filter the row key now if we don't match neither */
-    rowkey = Bytes.toBytes("x");
-    assertTrue(filterMPONE.filterRowKey(rowkey, 0, rowkey.length));
-    
   }
 
   /**

Modified: hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/filter/TestInclusiveStopFilter.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/filter/TestInclusiveStopFilter.java?rev=813052&r1=813051&r2=813052&view=diff
==============================================================================
--- hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/filter/TestInclusiveStopFilter.java (original)
+++ hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/filter/TestInclusiveStopFilter.java Wed Sep  9 17:14:22 2009
@@ -83,7 +83,7 @@
     assertTrue("Filtering on " + Bytes.toString(PAST_STOP_ROW),
       filter.filterRowKey(PAST_STOP_ROW, 0, PAST_STOP_ROW.length));
 
-    assertFalse("FilterAllRemaining", filter.filterAllRemaining());
+    assertTrue("FilterAllRemaining", filter.filterAllRemaining());
     assertFalse("FilterNotNull", filter.filterRow());
 
     assertFalse("Filter a null", filter.filterRowKey(null, 0, 0));

Modified: hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/filter/TestPageFilter.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/filter/TestPageFilter.java?rev=813052&r1=813051&r2=813052&view=diff
==============================================================================
--- hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/filter/TestPageFilter.java (original)
+++ hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/filter/TestPageFilter.java Wed Sep  9 17:14:22 2009
@@ -68,30 +68,26 @@
   
   private void pageSizeTests(Filter f) throws Exception {
     testFiltersBeyondPageSize(f, ROW_LIMIT);
-    // Test reset works by going in again.
-    f.reset();
-    testFiltersBeyondPageSize(f, ROW_LIMIT);
   }
   
   private void testFiltersBeyondPageSize(final Filter f, final int pageSize) {
     int count = 0;
     for (int i = 0; i < (pageSize * 2); i++) {
-      byte [] bytes = Bytes.toBytes(Integer.toString(i) + ":tail");
-      KeyValue kv = new KeyValue(bytes, bytes);
-      boolean filterOut =
-        f.filterRowKey(kv.getBuffer(), kv.getRowOffset(), kv.getRowLength());
-      if (!filterOut) {
-        assertFalse("Disagrees with 'filter'", f.filterAllRemaining());
+      boolean filterOut = f.filterRow();
+      
+      if(filterOut) {
+        break;
       } else {
-        // Once we have all for a page, calls to filterAllRemaining should
-        // stay true.
-        assertTrue("Disagrees with 'filter'", f.filterAllRemaining());
-        assertTrue(i >= pageSize);
+        count++;
       }
-      if (Filter.ReturnCode.NEXT_ROW == f.filterKeyValue(kv)) {
-        break;
+      
+      // If at last row, should tell us to skip all remaining
+      if(count == pageSize) {
+        assertTrue(f.filterAllRemaining());
+      } else {
+        assertFalse(f.filterAllRemaining());
       }
-      count++;
+      
     }
     assertEquals(pageSize, count);
   }

Added: hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/filter/TestSingleColumnValueFilter.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/filter/TestSingleColumnValueFilter.java?rev=813052&view=auto
==============================================================================
--- hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/filter/TestSingleColumnValueFilter.java (added)
+++ hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/filter/TestSingleColumnValueFilter.java Wed Sep  9 17:14:22 2009
@@ -0,0 +1,161 @@
+/**
+ * Copyright 2007 The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.filter;
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.DataInputStream;
+import java.io.DataOutputStream;
+import java.util.List;
+
+import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
+import org.apache.hadoop.hbase.util.Bytes;
+
+import junit.framework.TestCase;
+
+/**
+ * Tests the value filter
+ */
+public class TestSingleColumnValueFilter extends TestCase {
+  private static final byte[] ROW = Bytes.toBytes("test");
+  private static final byte[] COLUMN_FAMILY = Bytes.toBytes("test");
+  private static final byte [] COLUMN_QUALIFIER = Bytes.toBytes("foo");
+  private static final byte[] VAL_1 = Bytes.toBytes("a");
+  private static final byte[] VAL_2 = Bytes.toBytes("ab");
+  private static final byte[] VAL_3 = Bytes.toBytes("abc");
+  private static final byte[] VAL_4 = Bytes.toBytes("abcd");
+  private static final byte[] FULLSTRING_1 = 
+    Bytes.toBytes("The quick brown fox jumps over the lazy dog.");
+  private static final byte[] FULLSTRING_2 = 
+    Bytes.toBytes("The slow grey fox trips over the lazy dog.");
+  private static final String QUICK_SUBSTR = "quick";
+  private static final String QUICK_REGEX = ".+quick.+";
+
+  Filter basicFilter;
+  Filter substrFilter;
+  Filter regexFilter;
+
+  @Override
+  protected void setUp() throws Exception {
+    super.setUp();
+    basicFilter = basicFilterNew();
+    substrFilter = substrFilterNew();
+    regexFilter = regexFilterNew();
+  }
+
+  private Filter basicFilterNew() {
+    return new SingleColumnValueFilter(COLUMN_FAMILY, COLUMN_QUALIFIER,
+      CompareOp.GREATER_OR_EQUAL, VAL_2);
+  }
+
+  private Filter substrFilterNew() {
+    return new SingleColumnValueFilter(COLUMN_FAMILY, COLUMN_QUALIFIER,
+      CompareOp.EQUAL,
+      new SubstringComparator(QUICK_SUBSTR));
+  }
+
+  private Filter regexFilterNew() {
+    return new SingleColumnValueFilter(COLUMN_FAMILY, COLUMN_QUALIFIER,
+      CompareOp.EQUAL,
+      new RegexStringComparator(QUICK_REGEX));
+  }
+
+  private void basicFilterTests(Filter filter)
+      throws Exception {
+    KeyValue kv = new KeyValue(ROW, COLUMN_FAMILY, COLUMN_QUALIFIER, VAL_1);
+    assertFalse("basicFilter1", filter.filterKeyValue(kv) == Filter.ReturnCode.INCLUDE);
+    kv = new KeyValue(ROW, COLUMN_FAMILY, COLUMN_QUALIFIER, VAL_2);
+    assertTrue("basicFilter2", filter.filterKeyValue(kv) == Filter.ReturnCode.INCLUDE);
+    kv = new KeyValue(ROW, COLUMN_FAMILY, COLUMN_QUALIFIER, VAL_3);
+    assertTrue("basicFilter3", filter.filterKeyValue(kv) == Filter.ReturnCode.INCLUDE);
+    kv = new KeyValue(ROW, COLUMN_FAMILY, COLUMN_QUALIFIER, VAL_4);
+    assertTrue("basicFilter4", filter.filterKeyValue(kv) == Filter.ReturnCode.INCLUDE);
+    assertFalse("basicFilterAllRemaining", filter.filterAllRemaining());
+    assertFalse("basicFilterNotNull", filter.filterRow());
+  }
+
+  private void substrFilterTests(Filter filter) 
+      throws Exception {
+    KeyValue kv = new KeyValue(ROW, COLUMN_FAMILY, COLUMN_QUALIFIER,
+      FULLSTRING_1);
+    assertTrue("substrTrue",
+      filter.filterKeyValue(kv) == Filter.ReturnCode.INCLUDE);
+    kv = new KeyValue(ROW, COLUMN_FAMILY, COLUMN_QUALIFIER,
+      FULLSTRING_2);
+    assertFalse("substrFalse", filter.filterKeyValue(kv) == Filter.ReturnCode.INCLUDE);
+    assertFalse("substrFilterAllRemaining", filter.filterAllRemaining());
+    assertFalse("substrFilterNotNull", filter.filterRow());
+  }
+
+  private void regexFilterTests(Filter filter) 
+      throws Exception {
+    KeyValue kv = new KeyValue(ROW, COLUMN_FAMILY, COLUMN_QUALIFIER,
+      FULLSTRING_1);
+    assertTrue("regexTrue",
+      filter.filterKeyValue(kv) == Filter.ReturnCode.INCLUDE);
+    kv = new KeyValue(ROW, COLUMN_FAMILY, COLUMN_QUALIFIER,
+      FULLSTRING_2);
+    assertFalse("regexFalse", filter.filterKeyValue(kv) == Filter.ReturnCode.INCLUDE);
+    assertFalse("regexFilterAllRemaining", filter.filterAllRemaining());
+    assertFalse("regexFilterNotNull", filter.filterRow());
+  }    
+                 
+  private Filter serializationTest(Filter filter)
+      throws Exception {
+    // Decompose filter to bytes.
+    ByteArrayOutputStream stream = new ByteArrayOutputStream();
+    DataOutputStream out = new DataOutputStream(stream);
+    filter.write(out);
+    out.close();
+    byte[] buffer = stream.toByteArray();
+  
+    // Recompose filter.
+    DataInputStream in =
+      new DataInputStream(new ByteArrayInputStream(buffer));
+    Filter newFilter = new SingleColumnValueFilter();
+    newFilter.readFields(in);
+  
+    return newFilter;
+  }
+
+  /**
+   * Tests identification of the stop row
+   * @throws Exception
+   */
+  public void testStop() throws Exception {
+    basicFilterTests(basicFilter);
+    substrFilterTests(substrFilter);
+    regexFilterTests(regexFilter);
+  }                               
+
+  /**
+   * Tests serialization
+   * @throws Exception
+   */                       
+  public void testSerialization() throws Exception {
+    Filter newFilter = serializationTest(basicFilter);
+    basicFilterTests(newFilter);
+    newFilter = serializationTest(substrFilter);
+    substrFilterTests(newFilter);
+    newFilter = serializationTest(regexFilter);
+    regexFilterTests(newFilter);
+  }                   
+}
\ No newline at end of file

Modified: hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/io/TestHeapSize.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/io/TestHeapSize.java?rev=813052&r1=813051&r2=813052&view=diff
==============================================================================
--- hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/io/TestHeapSize.java (original)
+++ hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/io/TestHeapSize.java Wed Sep  9 17:14:22 2009
@@ -6,6 +6,8 @@
 import java.util.TreeMap;
 import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.ConcurrentSkipListMap;
+import java.util.concurrent.CopyOnWriteArrayList;
+import java.util.concurrent.CopyOnWriteArraySet;
 import java.util.concurrent.atomic.AtomicBoolean;
 import java.util.concurrent.atomic.AtomicInteger;
 import java.util.concurrent.atomic.AtomicLong;
@@ -164,6 +166,25 @@
       assertEquals(expected, actual);
     }
     
+    // CopyOnWriteArraySet
+    cl = CopyOnWriteArraySet.class;
+    expected = ClassSize.estimateBase(cl, false);
+    actual = ClassSize.COPYONWRITE_ARRAYSET;
+    if(expected != actual) {
+      ClassSize.estimateBase(cl, true);
+      assertEquals(expected, actual);
+    }
+    
+    // CopyOnWriteArrayList
+    cl = CopyOnWriteArrayList.class;
+    expected = ClassSize.estimateBase(cl, false);
+    actual = ClassSize.COPYONWRITE_ARRAYLIST;
+    if(expected != actual) {
+      ClassSize.estimateBase(cl, true);
+      assertEquals(expected, actual);
+    }
+    
+    
   }
   
   /**
@@ -240,11 +261,15 @@
     expected += ClassSize.estimateBase(AtomicLong.class, false);
     expected += ClassSize.estimateBase(ConcurrentSkipListMap.class, false);
     expected += ClassSize.estimateBase(ConcurrentSkipListMap.class, false);
+    expected += ClassSize.estimateBase(CopyOnWriteArraySet.class, false);
+    expected += ClassSize.estimateBase(CopyOnWriteArrayList.class, false);
     if(expected != actual) {
       ClassSize.estimateBase(cl, true);
       ClassSize.estimateBase(ReentrantReadWriteLock.class, true);
       ClassSize.estimateBase(AtomicLong.class, true);
       ClassSize.estimateBase(ConcurrentSkipListMap.class, true);
+      ClassSize.estimateBase(CopyOnWriteArraySet.class, true);
+      ClassSize.estimateBase(CopyOnWriteArrayList.class, true);
       assertEquals(expected, actual);
     }
     

Modified: hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/regionserver/TestCompaction.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/regionserver/TestCompaction.java?rev=813052&r1=813051&r2=813052&view=diff
==============================================================================
--- hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/regionserver/TestCompaction.java (original)
+++ hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/regionserver/TestCompaction.java Wed Sep  9 17:14:22 2009
@@ -258,8 +258,8 @@
 
   private void createSmallerStoreFile(final HRegion region) throws IOException {
     HRegionIncommon loader = new HRegionIncommon(region); 
-    addContent(loader, Bytes.toString(COLUMN_FAMILY),
-        ("bbb").getBytes(), null);
+    addContent(loader, Bytes.toString(COLUMN_FAMILY), ("" +
+    		"bbb").getBytes(), null);
     loader.flushcache();
   }
 }

Added: hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/regionserver/TestGetClosestAtOrBefore.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/regionserver/TestGetClosestAtOrBefore.java?rev=813052&view=auto
==============================================================================
--- hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/regionserver/TestGetClosestAtOrBefore.java (added)
+++ hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/regionserver/TestGetClosestAtOrBefore.java Wed Sep  9 17:14:22 2009
@@ -0,0 +1,350 @@
+/**
+ * Copyright 2009 The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.regionserver;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.dfs.MiniDFSCluster;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.HBaseTestCase;
+import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.HRegionInfo;
+import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.client.Delete;
+import org.apache.hadoop.hbase.client.Put;
+import org.apache.hadoop.hbase.client.Result;
+import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.client.TestGet;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.hbase.util.Writables;
+
+/**
+ * {@link TestGet} is a medley of tests of get all done up as a single test.
+ * This class 
+ */
+public class TestGetClosestAtOrBefore extends HBaseTestCase implements HConstants {
+  static final Log LOG = LogFactory.getLog(TestGetClosestAtOrBefore.class);
+  private MiniDFSCluster miniHdfs;
+  
+  private static final byte [] T00 = Bytes.toBytes("000");
+  private static final byte [] T10 = Bytes.toBytes("010");
+  private static final byte [] T11 = Bytes.toBytes("011");
+  private static final byte [] T12 = Bytes.toBytes("012");
+  private static final byte [] T20 = Bytes.toBytes("020");
+  private static final byte [] T30 = Bytes.toBytes("030");
+  private static final byte [] T31 = Bytes.toBytes("031");
+  private static final byte [] T35 = Bytes.toBytes("035");
+  private static final byte [] T40 = Bytes.toBytes("040");
+
+  @Override
+  protected void setUp() throws Exception {
+    super.setUp();
+    this.miniHdfs = new MiniDFSCluster(this.conf, 1, true, null);
+    // Set the hbase.rootdir to be the home directory in mini dfs.
+    this.conf.set(HConstants.HBASE_DIR,
+      this.miniHdfs.getFileSystem().getHomeDirectory().toString());
+  }
+
+  public void testUsingMetaAndBinary() throws IOException {
+    FileSystem filesystem = FileSystem.get(conf);
+    Path rootdir = filesystem.makeQualified(new Path(conf.get(HConstants.HBASE_DIR)));
+    filesystem.mkdirs(rootdir);
+    // Up flush size else we bind up when we use default catalog flush of 16k.
+    HRegionInfo.FIRST_META_REGIONINFO.getTableDesc().
+      setMemStoreFlushSize(64 * 1024 * 1024);
+    HRegion mr = HRegion.createHRegion(HRegionInfo.FIRST_META_REGIONINFO,
+      rootdir, this.conf);
+    // Write rows for three tables 'A', 'B', and 'C'.
+    for (char c = 'A'; c < 'D'; c++) {
+      HTableDescriptor htd = new HTableDescriptor("" + c);
+      final int last = 128;
+      final int interval = 2;
+      for (int i = 0; i <= last; i += interval) {
+        HRegionInfo hri = new HRegionInfo(htd,
+          i == 0? HConstants.EMPTY_BYTE_ARRAY: Bytes.toBytes((byte)i),
+          i == last? HConstants.EMPTY_BYTE_ARRAY: Bytes.toBytes((byte)i + interval));
+        Put put = new Put(hri.getRegionName());
+        put.add(CATALOG_FAMILY, REGIONINFO_QUALIFIER, Writables.getBytes(hri));
+        mr.put(put, false);
+      }
+    }
+    InternalScanner s = mr.getScanner(new Scan());
+    try {
+      List<KeyValue> keys = new ArrayList<KeyValue>();
+      while(s.next(keys)) {
+        LOG.info(keys);
+        keys.clear();
+      }
+    } finally {
+      s.close();
+    }
+    findRow(mr, 'C', 44, 44);
+    findRow(mr, 'C', 45, 44);
+    findRow(mr, 'C', 46, 46);
+    findRow(mr, 'C', 43, 42);
+    mr.flushcache();
+    findRow(mr, 'C', 44, 44);
+    findRow(mr, 'C', 45, 44);
+    findRow(mr, 'C', 46, 46);
+    findRow(mr, 'C', 43, 42);
+    // Now delete 'C' and make sure I don't get entries from 'B'.
+    byte [] firstRowInC = HRegionInfo.createRegionName(Bytes.toBytes("" + 'C'),
+      HConstants.EMPTY_BYTE_ARRAY, HConstants.ZEROES);
+    Scan scan = new Scan(firstRowInC);
+    s = mr.getScanner(scan);
+    try {
+      List<KeyValue> keys = new ArrayList<KeyValue>();
+      while (s.next(keys)) {
+        mr.delete(new Delete(keys.get(0).getRow()), null, false);
+        keys.clear();
+      }
+    } finally {
+      s.close();
+    }
+    // Assert we get null back (pass -1).
+    findRow(mr, 'C', 44, -1);
+    findRow(mr, 'C', 45, -1);
+    findRow(mr, 'C', 46, -1);
+    findRow(mr, 'C', 43, -1);
+    mr.flushcache();
+    findRow(mr, 'C', 44, -1);
+    findRow(mr, 'C', 45, -1);
+    findRow(mr, 'C', 46, -1);
+    findRow(mr, 'C', 43, -1);
+  }
+
+  /*
+   * @param mr
+   * @param table
+   * @param rowToFind
+   * @param answer Pass -1 if we're not to find anything.
+   * @return Row found.
+   * @throws IOException
+   */
+  private byte [] findRow(final HRegion mr, final char table,
+    final int rowToFind, final int answer)
+  throws IOException {
+    byte [] tableb = Bytes.toBytes("" + table);
+    // Find the row.
+    byte [] tofindBytes = Bytes.toBytes((short)rowToFind);
+    byte [] metaKey = HRegionInfo.createRegionName(tableb, tofindBytes,
+      HConstants.NINES);
+    LOG.info("find=" + new String(metaKey));
+    Result r = mr.getClosestRowBefore(metaKey);
+    if (answer == -1) {
+      assertNull(r);
+      return null;
+    }
+    assertTrue(Bytes.compareTo(Bytes.toBytes((short)answer),
+      extractRowFromMetaRow(r.getRow())) == 0);
+    return r.getRow();
+  }
+
+  private byte [] extractRowFromMetaRow(final byte [] b) {
+    int firstDelimiter = KeyValue.getDelimiter(b, 0, b.length,
+      HRegionInfo.DELIMITER);
+    int lastDelimiter = KeyValue.getDelimiterInReverse(b, 0, b.length,
+      HRegionInfo.DELIMITER);
+    int length = lastDelimiter - firstDelimiter - 1;
+    byte [] row = new byte[length];
+    System.arraycopy(b, firstDelimiter + 1, row, 0, length);
+    return row;
+  }
+
+  /**
+   * Test file of multiple deletes and with deletes as final key.
+   * @see <a href="https://issues.apache.org/jira/browse/HBASE-751">HBASE-751</a>
+   */
+  public void testGetClosestRowBefore3() throws IOException{
+    HRegion region = null;
+    byte [] c0 = COLUMNS[0];
+    byte [] c1 = COLUMNS[1];
+    try {
+      HTableDescriptor htd = createTableDescriptor(getName());
+      region = createNewHRegion(htd, null, null);
+      
+      Put p = new Put(T00);
+      p.add(c0, c0, T00);
+      region.put(p);
+      
+      p = new Put(T10);
+      p.add(c0, c0, T10);
+      region.put(p);
+      
+      p = new Put(T20);
+      p.add(c0, c0, T20);
+      region.put(p);
+      
+      Result r = region.getClosestRowBefore(T20, c0);
+      assertTrue(Bytes.equals(T20, r.getRow()));
+      
+      Delete d = new Delete(T20);
+      d.deleteColumn(c0, c0);
+      region.delete(d, null, false);
+      
+      r = region.getClosestRowBefore(T20, c0);
+      assertTrue(Bytes.equals(T10, r.getRow()));
+      
+      p = new Put(T30);
+      p.add(c0, c0, T30);
+      region.put(p);
+      
+      r = region.getClosestRowBefore(T30, c0);
+      assertTrue(Bytes.equals(T30, r.getRow()));
+      
+      d = new Delete(T30);
+      d.deleteColumn(c0, c0);
+      region.delete(d, null, false);
+
+      r = region.getClosestRowBefore(T30, c0);
+      assertTrue(Bytes.equals(T10, r.getRow()));
+      r = region.getClosestRowBefore(T31, c0);
+      assertTrue(Bytes.equals(T10, r.getRow()));
+
+      region.flushcache();
+
+      // try finding "010" after flush
+      r = region.getClosestRowBefore(T30, c0);
+      assertTrue(Bytes.equals(T10, r.getRow()));
+      r = region.getClosestRowBefore(T31, c0);
+      assertTrue(Bytes.equals(T10, r.getRow()));
+      
+      // Put into a different column family.  Should make it so I still get t10
+      p = new Put(T20);
+      p.add(c1, c1, T20);
+      region.put(p);
+
+      r = region.getClosestRowBefore(T30, c0);
+      assertTrue(Bytes.equals(T10, r.getRow()));
+      r = region.getClosestRowBefore(T31, c0);
+      assertTrue(Bytes.equals(T10, r.getRow()));
+      
+      region.flushcache();
+      
+      r = region.getClosestRowBefore(T30, c0);
+      assertTrue(Bytes.equals(T10, r.getRow()));
+      r = region.getClosestRowBefore(T31, c0);
+      assertTrue(Bytes.equals(T10, r.getRow()));
+      
+      // Now try combo of memcache and mapfiles.  Delete the t20 COLUMS[1]
+      // in memory; make sure we get back t10 again.
+      d = new Delete(T20);
+      d.deleteColumn(c1, c1);
+      region.delete(d, null, false);
+      r = region.getClosestRowBefore(T30, c0);
+      assertTrue(Bytes.equals(T10, r.getRow()));
+      
+      // Ask for a value off the end of the file.  Should return t10.
+      r = region.getClosestRowBefore(T31, c0);
+      assertTrue(Bytes.equals(T10, r.getRow()));
+      region.flushcache();
+      r = region.getClosestRowBefore(T31, c0);
+      assertTrue(Bytes.equals(T10, r.getRow()));
+      
+      // Ok.  Let the candidate come out of hfile but have delete of
+      // the candidate be in memory.
+      p = new Put(T11);
+      p.add(c0, c0, T11);
+      region.put(p);
+      d = new Delete(T10);
+      d.deleteColumn(c1, c1);
+      r = region.getClosestRowBefore(T12, c0);
+      assertTrue(Bytes.equals(T11, r.getRow()));
+    } finally {
+      if (region != null) {
+        try {
+          region.close();
+        } catch (Exception e) {
+          e.printStackTrace();
+        }
+        region.getLog().closeAndDelete();
+      }
+    }
+  }
+
+  /** For HBASE-694 */
+  public void testGetClosestRowBefore2() throws IOException{
+    HRegion region = null;
+    byte [] c0 = COLUMNS[0];
+    try {
+      HTableDescriptor htd = createTableDescriptor(getName());
+      region = createNewHRegion(htd, null, null);
+      
+      Put p = new Put(T10);
+      p.add(c0, c0, T10);
+      region.put(p);
+      
+      p = new Put(T30);
+      p.add(c0, c0, T30);
+      region.put(p);
+      
+      p = new Put(T40);
+      p.add(c0, c0, T40);
+      region.put(p);
+
+      // try finding "035"
+      Result r = region.getClosestRowBefore(T35, c0);
+      assertTrue(Bytes.equals(T30, r.getRow()));
+
+      region.flushcache();
+
+      // try finding "035"
+      r = region.getClosestRowBefore(T35, c0);
+      assertTrue(Bytes.equals(T30, r.getRow()));
+
+      p = new Put(T20);
+      p.add(c0, c0, T20);
+      region.put(p);
+      
+      // try finding "035"
+      r = region.getClosestRowBefore(T35, c0);
+      assertTrue(Bytes.equals(T30, r.getRow()));
+      
+      region.flushcache();
+
+      // try finding "035"
+      r = region.getClosestRowBefore(T35, c0);
+      assertTrue(Bytes.equals(T30, r.getRow()));
+    } finally {
+      if (region != null) {
+        try {
+          region.close();
+        } catch (Exception e) {
+          e.printStackTrace();
+        }
+        region.getLog().closeAndDelete();
+      }
+    }
+  }
+
+  @Override
+  protected void tearDown() throws Exception {
+    if (this.miniHdfs != null) {
+      this.miniHdfs.shutdown();
+    }
+    super.tearDown();
+  }
+}
\ No newline at end of file

Modified: hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/regionserver/TestHLog.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/regionserver/TestHLog.java?rev=813052&r1=813051&r2=813052&view=diff
==============================================================================
--- hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/regionserver/TestHLog.java (original)
+++ hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/regionserver/TestHLog.java Wed Sep  9 17:14:22 2009
@@ -23,8 +23,8 @@
 import java.util.ArrayList;
 import java.util.List;
 
-import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.dfs.MiniDFSCluster;
+import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.HBaseTestCase;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.KeyValue;

Modified: hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/regionserver/TestHRegion.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/regionserver/TestHRegion.java?rev=813052&r1=813051&r2=813052&view=diff
==============================================================================
--- hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/regionserver/TestHRegion.java (original)
+++ hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/regionserver/TestHRegion.java Wed Sep  9 17:14:22 2009
@@ -22,7 +22,6 @@
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.List;
-import java.util.Map;
 import java.util.TreeMap;
 
 import org.apache.commons.logging.Log;
@@ -35,7 +34,6 @@
 import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.KeyValue;
-import org.apache.hadoop.hbase.UnknownScannerException;
 import org.apache.hadoop.hbase.client.Delete;
 import org.apache.hadoop.hbase.client.Get;
 import org.apache.hadoop.hbase.client.Put;
@@ -74,13 +72,32 @@
     super.setUp();
   }
 
-  
   //////////////////////////////////////////////////////////////////////////////
   // New tests that doesn't spin up a mini cluster but rather just test the 
   // individual code pieces in the HRegion. Putting files locally in
   // /tmp/testtable
   //////////////////////////////////////////////////////////////////////////////
 
+  public void testFamilyWithAndWithoutColon() throws Exception {
+    byte [] b = Bytes.toBytes(getName());
+    byte [] cf = Bytes.toBytes("cf");
+    initHRegion(b, getName(), cf);
+    Put p = new Put(b);
+    byte [] cfwithcolon = Bytes.toBytes("cf:");
+    p.add(cfwithcolon, cfwithcolon, cfwithcolon);
+    boolean exception = false;
+    try {
+      this.region.put(p);
+    } catch (NoSuchColumnFamilyException e) {
+      exception = true;
+    }
+    assertTrue(exception);
+    // Can I add it using old style call?
+    p = new Put(b);
+    p.add(cfwithcolon, System.currentTimeMillis(), cfwithcolon);
+    this.region.put(p);
+  }
+
   //////////////////////////////////////////////////////////////////////////////
   // checkAndPut tests
   //////////////////////////////////////////////////////////////////////////////
@@ -294,6 +311,41 @@
     result = region.get(get, null);
     assertEquals(1, result.size());
   }
+  
+  public void testDeleteRowWithFutureTs() throws IOException {
+    byte [] tableName = Bytes.toBytes("testtable");
+    byte [] fam = Bytes.toBytes("info");
+    byte [][] families = {fam};
+    String method = this.getName();
+    initHRegion(tableName, method, families);
+
+    byte [] row = Bytes.toBytes("table_name");
+    // column names
+    byte [] serverinfo = Bytes.toBytes("serverinfo");
+
+    // add data in the far future
+    Put put = new Put(row);
+    put.add(fam, serverinfo, HConstants.LATEST_TIMESTAMP-5,Bytes.toBytes("value"));
+    region.put(put);
+
+    // now delete something in the present
+    Delete delete = new Delete(row);
+    region.delete(delete, null, true);
+
+    // make sure we still see our data
+    Get get = new Get(row).addColumn(fam, serverinfo);
+    Result result = region.get(get, null);
+    assertEquals(1, result.size());
+    
+    // delete the future row
+    delete = new Delete(row,HConstants.LATEST_TIMESTAMP-3,null);
+    region.delete(delete, null, true);
+
+    // make sure it is gone
+    get = new Get(row).addColumn(fam, serverinfo);
+    result = region.get(get, null);
+    assertEquals(0, result.size());
+  }
 
   public void testScanner_DeleteOneFamilyNotAnother() throws IOException {
     byte [] tableName = Bytes.toBytes("test_table");



Mime
View raw message