hbase-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From apurt...@apache.org
Subject svn commit: r788273 [3/3] - in /hadoop/hbase/trunk_on_hadoop-0.18.3: ./ bin/ conf/ src/docs/src/documentation/content/xdocs/ src/java/org/apache/hadoop/hbase/ src/java/org/apache/hadoop/hbase/client/ src/java/org/apache/hadoop/hbase/io/ src/java/org/ap...
Date Thu, 25 Jun 2009 06:55:38 GMT
Modified: hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/io/hfile/TestHFile.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/io/hfile/TestHFile.java?rev=788273&r1=788272&r2=788273&view=diff
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/io/hfile/TestHFile.java (original)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/io/hfile/TestHFile.java Thu Jun 25 06:55:37 2009
@@ -29,7 +29,6 @@
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.HBaseTestCase;
-import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.io.hfile.HFile.BlockIndex;
 import org.apache.hadoop.hbase.io.hfile.HFile.Reader;
 import org.apache.hadoop.hbase.io.hfile.HFile.Writer;
@@ -251,25 +250,22 @@
   /**
    * Checks if the HeapSize calculator is within reason
    */
-  public void testHeapSizeForBlockIndex() {
-    ClassSize cs = null;
+  @SuppressWarnings("unchecked")
+  public void testHeapSizeForBlockIndex() throws IOException{
     Class cl = null;
     long expected = 0L;
     long actual = 0L;
-    try {
-      cs = new ClassSize();
-    } catch(Exception e) {}
     
-    //KeyValue
     cl = BlockIndex.class;
-    expected = cs.estimateBase(cl, false);
+    expected = ClassSize.estimateBase(cl, false);
     BlockIndex bi = new BlockIndex(Bytes.BYTES_RAWCOMPARATOR);
     actual = bi.heapSize();
-    //Since we have a [[]] in BlockIndex and the checker only sees the [] we 
-    // miss a MULTI_ARRAY which is 4*Reference = 32 B
-    actual -= 32;
+    //Since the arrays in BlockIndex(byte [][] blockKeys, long [] blockOffsets,
+    //int [] blockDataSizes) are all null they are not going to show up in the
+    //HeapSize calculation, so need to remove those array costs from ecpected.
+    expected -= ClassSize.align(3 * ClassSize.ARRAY);
     if(expected != actual) {
-      cs.estimateBase(cl, true);
+      ClassSize.estimateBase(cl, true);
       assertEquals(expected, actual);
     }
   }

Modified: hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/regionserver/TestCompaction.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/regionserver/TestCompaction.java?rev=788273&r1=788272&r2=788273&view=diff
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/regionserver/TestCompaction.java (original)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/regionserver/TestCompaction.java Thu Jun 25 06:55:37 2009
@@ -54,8 +54,8 @@
     super();
     
     // Set cache flush size to 1MB
-    conf.setInt("hbase.hregion.memcache.flush.size", 1024*1024);
-    conf.setInt("hbase.hregion.memcache.block.multiplier", 10);
+    conf.setInt("hbase.hregion.memstore.flush.size", 1024*1024);
+    conf.setInt("hbase.hregion.memstore.block.multiplier", 10);
     this.cluster = null;
   }
   
@@ -85,7 +85,7 @@
   }
   
   /**
-   * Run compaction and flushing memcache
+   * Run compaction and flushing memstore
    * Assert deletes get cleaned up.
    * @throws Exception
    */
@@ -129,7 +129,7 @@
 //      cellValues.length);
 //    assertTrue(cellValues.length == 3);
 
-    // Now add deletes to memcache and then flush it.  That will put us over
+    // Now add deletes to memstore and then flush it.  That will put us over
     // the compaction threshold of 3 store files.  Compacting these store files
     // should result in a compacted store file that has no references to the
     // deleted row.

Modified: hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/regionserver/TestDeleteCompare.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/regionserver/TestDeleteCompare.java?rev=788273&r1=788272&r2=788273&view=diff
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/regionserver/TestDeleteCompare.java (original)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/regionserver/TestDeleteCompare.java Thu Jun 25 06:55:37 2009
@@ -20,18 +20,18 @@
   //3. Delete and the matching put
   //4. Big test that include starting on the wrong row and qualifier
   public void testDeleteCompare_DeleteFamily() {
-    //Creating memcache
-    Set<KeyValue> memcache = new TreeSet<KeyValue>(KeyValue.COMPARATOR);
-    memcache.add(KeyValueTestUtil.create("row11", "fam", "col1", 3, "d-c"));
-    memcache.add(KeyValueTestUtil.create("row11", "fam", "col1", 2, "d-c"));
-    memcache.add(KeyValueTestUtil.create("row11", "fam", "col1", 1, "d-c"));
-    memcache.add(KeyValueTestUtil.create("row11", "fam", "col2", 1, "d-c"));
-
-    memcache.add(KeyValueTestUtil.create("row11", "fam", "col3", 3, "d-c"));
-    memcache.add(KeyValueTestUtil.create("row11", "fam", "col3", 2, "d-c"));
-    memcache.add(KeyValueTestUtil.create("row11", "fam", "col3", 1, "d-c"));
+    //Creating memstore
+    Set<KeyValue> memstore = new TreeSet<KeyValue>(KeyValue.COMPARATOR);
+    memstore.add(KeyValueTestUtil.create("row11", "fam", "col1", 3, "d-c"));
+    memstore.add(KeyValueTestUtil.create("row11", "fam", "col1", 2, "d-c"));
+    memstore.add(KeyValueTestUtil.create("row11", "fam", "col1", 1, "d-c"));
+    memstore.add(KeyValueTestUtil.create("row11", "fam", "col2", 1, "d-c"));
+
+    memstore.add(KeyValueTestUtil.create("row11", "fam", "col3", 3, "d-c"));
+    memstore.add(KeyValueTestUtil.create("row11", "fam", "col3", 2, "d-c"));
+    memstore.add(KeyValueTestUtil.create("row11", "fam", "col3", 1, "d-c"));
 
-    memcache.add(KeyValueTestUtil.create("row21", "fam", "col1", 1, "d-c"));
+    memstore.add(KeyValueTestUtil.create("row21", "fam", "col1", 1, "d-c"));
 
     //Creating expected result
     List<DeleteCode> expected = new ArrayList<DeleteCode>();
@@ -55,7 +55,7 @@
     byte deleteType = deleteBuffer[deleteTimestampOffset +Bytes.SIZEOF_LONG];
     
     List<DeleteCode> actual = new ArrayList<DeleteCode>();
-    for(KeyValue mem : memcache){
+    for(KeyValue mem : memstore){
     actual.add(DeleteCompare.deleteCompare(mem, deleteBuffer, deleteRowOffset,
         deleteRowLen, deleteQualifierOffset, deleteQualifierLen,
         deleteTimestampOffset, deleteType, KeyValue.KEY_COMPARATOR));
@@ -69,12 +69,12 @@
   }
   
   public void testDeleteCompare_DeleteColumn() {
-    //Creating memcache
-    Set<KeyValue> memcache = new TreeSet<KeyValue>(KeyValue.COMPARATOR);
-    memcache.add(KeyValueTestUtil.create("row11", "fam", "col1", 3, "d-c"));
-    memcache.add(KeyValueTestUtil.create("row11", "fam", "col1", 2, "d-c"));
-    memcache.add(KeyValueTestUtil.create("row11", "fam", "col1", 1, "d-c"));
-    memcache.add(KeyValueTestUtil.create("row21", "fam", "col1", 1, "d-c"));
+    //Creating memstore
+    Set<KeyValue> memstore = new TreeSet<KeyValue>(KeyValue.COMPARATOR);
+    memstore.add(KeyValueTestUtil.create("row11", "fam", "col1", 3, "d-c"));
+    memstore.add(KeyValueTestUtil.create("row11", "fam", "col1", 2, "d-c"));
+    memstore.add(KeyValueTestUtil.create("row11", "fam", "col1", 1, "d-c"));
+    memstore.add(KeyValueTestUtil.create("row21", "fam", "col1", 1, "d-c"));
 
 
     //Creating expected result
@@ -95,7 +95,7 @@
     byte deleteType = deleteBuffer[deleteTimestampOffset +Bytes.SIZEOF_LONG];
     
     List<DeleteCode> actual = new ArrayList<DeleteCode>();
-    for(KeyValue mem : memcache){
+    for(KeyValue mem : memstore){
     actual.add(DeleteCompare.deleteCompare(mem, deleteBuffer, deleteRowOffset,
         deleteRowLen, deleteQualifierOffset, deleteQualifierLen,
         deleteTimestampOffset, deleteType, KeyValue.KEY_COMPARATOR));
@@ -110,11 +110,11 @@
   
   
   public void testDeleteCompare_Delete() {
-    //Creating memcache
-    Set<KeyValue> memcache = new TreeSet<KeyValue>(KeyValue.COMPARATOR);
-    memcache.add(KeyValueTestUtil.create("row11", "fam", "col1", 3, "d-c"));
-    memcache.add(KeyValueTestUtil.create("row11", "fam", "col1", 2, "d-c"));
-    memcache.add(KeyValueTestUtil.create("row11", "fam", "col1", 1, "d-c"));
+    //Creating memstore
+    Set<KeyValue> memstore = new TreeSet<KeyValue>(KeyValue.COMPARATOR);
+    memstore.add(KeyValueTestUtil.create("row11", "fam", "col1", 3, "d-c"));
+    memstore.add(KeyValueTestUtil.create("row11", "fam", "col1", 2, "d-c"));
+    memstore.add(KeyValueTestUtil.create("row11", "fam", "col1", 1, "d-c"));
 
     //Creating expected result
     List<DeleteCode> expected = new ArrayList<DeleteCode>();
@@ -133,7 +133,7 @@
     byte deleteType = deleteBuffer[deleteTimestampOffset +Bytes.SIZEOF_LONG];
     
     List<DeleteCode> actual = new ArrayList<DeleteCode>();
-    for(KeyValue mem : memcache){
+    for(KeyValue mem : memstore){
     actual.add(DeleteCompare.deleteCompare(mem, deleteBuffer, deleteRowOffset,
         deleteRowLen, deleteQualifierOffset, deleteQualifierLen,
         deleteTimestampOffset, deleteType, KeyValue.KEY_COMPARATOR));
@@ -146,15 +146,15 @@
   }
   
   public void testDeleteCompare_Multiple() {
-    //Creating memcache
-    Set<KeyValue> memcache = new TreeSet<KeyValue>(KeyValue.COMPARATOR);
-    memcache.add(KeyValueTestUtil.create("row11", "fam", "col1", 1, "d-c"));
-    memcache.add(KeyValueTestUtil.create("row21", "fam", "col1", 4, "d-c"));
-    memcache.add(KeyValueTestUtil.create("row21", "fam", "col1", 3, "d-c"));
-    memcache.add(KeyValueTestUtil.create("row21", "fam", "col1", 2, "d-c"));
-    memcache.add(KeyValueTestUtil.create("row21", "fam", "col1", 1,
+    //Creating memstore
+    Set<KeyValue> memstore = new TreeSet<KeyValue>(KeyValue.COMPARATOR);
+    memstore.add(KeyValueTestUtil.create("row11", "fam", "col1", 1, "d-c"));
+    memstore.add(KeyValueTestUtil.create("row21", "fam", "col1", 4, "d-c"));
+    memstore.add(KeyValueTestUtil.create("row21", "fam", "col1", 3, "d-c"));
+    memstore.add(KeyValueTestUtil.create("row21", "fam", "col1", 2, "d-c"));
+    memstore.add(KeyValueTestUtil.create("row21", "fam", "col1", 1,
         KeyValue.Type.Delete, "dont-care"));
-    memcache.add(KeyValueTestUtil.create("row31", "fam", "col1", 1, "dont-care"));
+    memstore.add(KeyValueTestUtil.create("row31", "fam", "col1", 1, "dont-care"));
 
     //Creating expected result
     List<DeleteCode> expected = new ArrayList<DeleteCode>();
@@ -176,7 +176,7 @@
     byte deleteType = deleteBuffer[deleteTimestampOffset +Bytes.SIZEOF_LONG];
     
     List<DeleteCode> actual = new ArrayList<DeleteCode>();
-    for(KeyValue mem : memcache){
+    for(KeyValue mem : memstore){
     actual.add(DeleteCompare.deleteCompare(mem, deleteBuffer, deleteRowOffset,
         deleteRowLen, deleteQualifierOffset, deleteQualifierLen,
         deleteTimestampOffset, deleteType, KeyValue.KEY_COMPARATOR));

Modified: hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/regionserver/TestHRegion.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/regionserver/TestHRegion.java?rev=788273&r1=788272&r2=788273&view=diff
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/regionserver/TestHRegion.java (original)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/regionserver/TestHRegion.java Thu Jun 25 06:55:37 2009
@@ -22,8 +22,8 @@
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.List;
+import java.util.Map;
 import java.util.TreeMap;
-import java.util.Iterator;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -36,11 +36,11 @@
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.UnknownScannerException;
+import org.apache.hadoop.hbase.client.Delete;
 import org.apache.hadoop.hbase.client.Get;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.client.Scan;
-import org.apache.hadoop.hbase.client.Delete;
 import org.apache.hadoop.hbase.filter.ColumnCountGetFilter;
 import org.apache.hadoop.hbase.regionserver.HRegion.RegionScanner;
 import org.apache.hadoop.hbase.util.Bytes;
@@ -191,11 +191,11 @@
     
     //checkAndPut with wrong value
     Store store = region.getStore(fam1);
-    int size = store.memcache.memcache.size();
+    int size = store.memstore.memstore.size();
     
     boolean res = region.checkAndPut(row1, fam1, qf1, val1, put, lockId, true);
     assertEquals(true, res);
-    size = store.memcache.memcache.size();
+    size = store.memstore.memstore.size();
     
     Get get = new Get(row1);
     get.addColumn(fam2, qf1);
@@ -411,13 +411,15 @@
 
     region.delete(fam1, kvs, true);
 
-    // extract the key values out the memcache:
+    // extract the key values out the memstore:
     // This is kinda hacky, but better than nothing...
     long now = System.currentTimeMillis();
-    KeyValue firstKv = region.getStore(fam1).memcache.memcache.first();
+    KeyValue firstKv = region.getStore(fam1).memstore.memstore.firstKey();
     assertTrue(firstKv.getTimestamp() <= now);
     now = firstKv.getTimestamp();
-    for (KeyValue kv : region.getStore(fam1).memcache.memcache) {
+    for (Map.Entry<KeyValue, ?> entry:
+        region.getStore(fam1).memstore.memstore.entrySet()) {
+      KeyValue kv = entry.getKey();
       assertTrue(kv.getTimestamp() <= now);
       now = kv.getTimestamp();
     }
@@ -464,7 +466,7 @@
     String method = this.getName();
     initHRegion(tableName, method, fam1);
     
-    //Add to memcache
+    //Add to memstore
     Put put = new Put(row1);
     put.add(fam1, col1, null);
     put.add(fam1, col2, null);
@@ -526,7 +528,7 @@
     String method = this.getName();
     initHRegion(HConstants.ROOT_TABLE_NAME, method, HConstants.CATALOG_FAMILY);
 
-    //Add to memcache
+    //Add to memstore
     Put put = new Put(HConstants.EMPTY_START_ROW);
     put.add(HConstants.CATALOG_FAMILY, HConstants.REGIONINFO_QUALIFIER, null);
     region.put(put);
@@ -539,7 +541,7 @@
         HConstants.CATALOG_FAMILY, HConstants.REGIONINFO_QUALIFIER);
     KeyValue [] expected = {kv1};
     
-    //Test from memcache
+    //Test from memstore
     Result res = region.get(get, null);
     
     assertEquals(expected.length, res.size());
@@ -868,7 +870,7 @@
     
   }
   
-  public void testScanner_ExplicitColumns_FromMemcache_EnforceVersions() 
+  public void testScanner_ExplicitColumns_FromMemStore_EnforceVersions() 
   throws IOException {
     byte [] tableName = Bytes.toBytes("testtable");
     byte [] row1 = Bytes.toBytes("row1");
@@ -984,7 +986,7 @@
     }
   }
   
-  public void testScanner_ExplicitColumns_FromMemcacheAndFiles_EnforceVersions()
+  public void testScanner_ExplicitColumns_FromMemStoreAndFiles_EnforceVersions()
   throws IOException {
     byte [] tableName = Bytes.toBytes("testtable");
     byte [] row1 = Bytes.toBytes("row1");
@@ -1063,7 +1065,7 @@
     }
   }
   
-  public void testScanner_Wildcard_FromMemcache_EnforceVersions() 
+  public void testScanner_Wildcard_FromMemStore_EnforceVersions() 
   throws IOException {
     byte [] tableName = Bytes.toBytes("testtable");
     byte [] row1 = Bytes.toBytes("row1");
@@ -1229,7 +1231,7 @@
     
   }
   
-  public void testScanner_Wildcard_FromMemcacheAndFiles_EnforceVersions()
+  public void testScanner_Wildcard_FromMemStoreAndFiles_EnforceVersions()
   throws IOException {
     byte [] tableName = Bytes.toBytes("testtable");
     byte [] row1 = Bytes.toBytes("row1");

Modified: hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/regionserver/TestLogRolling.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/regionserver/TestLogRolling.java?rev=788273&r1=788272&r2=788273&view=diff
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/regionserver/TestLogRolling.java (original)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/regionserver/TestLogRolling.java Thu Jun 25 06:55:37 2009
@@ -24,16 +24,13 @@
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hbase.client.HTable;
-import org.apache.hadoop.hbase.client.HBaseAdmin;
-import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.HBaseClusterTestCase;
-
+import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.HTableDescriptor;
-import org.apache.hadoop.hbase.HColumnDescriptor;
-import org.apache.hadoop.hbase.KeyValue;
-import org.apache.hadoop.hbase.io.BatchUpdate;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hbase.client.HTable;
+import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.util.Bytes;
 
 /**
@@ -83,10 +80,10 @@
     conf.setInt("hbase.regionserver.maxlogentries", 32);
 
     // For less frequently updated regions flush after every 2 flushes
-    conf.setInt("hbase.hregion.memcache.optionalflushcount", 2);
+    conf.setInt("hbase.hregion.memstore.optionalflushcount", 2);
 
     // We flush the cache after every 8192 bytes
-    conf.setInt("hbase.hregion.memcache.flush.size", 8192);
+    conf.setInt("hbase.hregion.memstore.flush.size", 8192);
 
     // Make lease timeout longer, lease checks less frequent
     conf.setInt("hbase.master.lease.period", 10 * 1000);

Added: hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/regionserver/TestMemStore.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/regionserver/TestMemStore.java?rev=788273&view=auto
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/regionserver/TestMemStore.java (added)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/regionserver/TestMemStore.java Thu Jun 25 06:55:37 2009
@@ -0,0 +1,644 @@
+/*
+ * Copyright 2009 The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.regionserver;
+
+import java.io.IOException;
+import java.rmi.UnexpectedException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+import java.util.NavigableSet;
+import java.util.TreeSet;
+
+import junit.framework.TestCase;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.KeyValueTestUtil;
+import org.apache.hadoop.hbase.client.Get;
+import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.util.Bytes;
+
+/** memstore test case */
+public class TestMemStore extends TestCase {
+  private final Log LOG = LogFactory.getLog(this.getClass());
+  private MemStore memstore;
+  private static final int ROW_COUNT = 10;
+  private static final int QUALIFIER_COUNT = 10;
+  private static final byte [] FAMILY = Bytes.toBytes("column");
+  private static final byte [] CONTENTS_BASIC = Bytes.toBytes("contents:basic");
+  private static final String CONTENTSTR = "contentstr";
+
+  @Override
+  public void setUp() throws Exception {
+    super.setUp();
+    this.memstore = new MemStore();
+  }
+
+  /** 
+   * Test memstore snapshot happening while scanning.
+   * @throws IOException
+   */
+  public void testScanAcrossSnapshot() throws IOException {
+    int rowCount = addRows(this.memstore);
+    KeyValueScanner [] memstorescanners = this.memstore.getScanners();
+    Scan scan = new Scan();
+    List<KeyValue> result = new ArrayList<KeyValue>();
+    StoreScanner s = new StoreScanner(scan, null, HConstants.LATEST_TIMESTAMP,
+      this.memstore.comparator, null, memstorescanners);
+    int count = 0;
+    try {
+      while (s.next(result)) {
+        LOG.info(result);
+        count++;
+        result.clear();
+      }
+    } finally {
+      s.close();
+    }
+    assertEquals(rowCount, count);
+    // Now assert can count same number even if a snapshot mid-scan.
+    s = new StoreScanner(scan, null, HConstants.LATEST_TIMESTAMP,
+      this.memstore.comparator, null, memstorescanners);
+    count = 0;
+    try {
+      while (s.next(result)) {
+        LOG.info(result);
+        // Assert the stuff is coming out in right order.
+        assertTrue(Bytes.compareTo(Bytes.toBytes(count), result.get(0).getRow()) == 0);
+        count++;
+        if (count == 2) {
+          this.memstore.snapshot();
+          LOG.info("Snapshotted");
+        }
+        result.clear();
+      }
+    } finally {
+      s.close();
+    }
+    assertEquals(rowCount, count);
+  }
+
+  /** 
+   * Test memstore snapshots
+   * @throws IOException
+   */
+  public void testSnapshotting() throws IOException {
+    final int snapshotCount = 5;
+    // Add some rows, run a snapshot. Do it a few times.
+    for (int i = 0; i < snapshotCount; i++) {
+      addRows(this.memstore);
+      runSnapshot(this.memstore);
+      Map<KeyValue, ?> ss = this.memstore.getSnapshot();
+      assertEquals("History not being cleared", 0, ss.size());
+    }
+  }
+
+  public void testMultipleVersionsSimple() throws Exception {
+    MemStore m = new MemStore(HConstants.FOREVER, KeyValue.COMPARATOR);
+    byte [] row = Bytes.toBytes("testRow");
+    byte [] family = Bytes.toBytes("testFamily");
+    byte [] qf = Bytes.toBytes("testQualifier");
+    long [] stamps = {1,2,3};
+    byte [][] values = {Bytes.toBytes("value0"), Bytes.toBytes("value1"),
+        Bytes.toBytes("value2")};
+    KeyValue key0 = new KeyValue(row, family, qf, stamps[0], values[0]);
+    KeyValue key1 = new KeyValue(row, family, qf, stamps[1], values[1]);
+    KeyValue key2 = new KeyValue(row, family, qf, stamps[2], values[2]);
+    
+    m.add(key0);
+    m.add(key1);
+    m.add(key2);
+    
+    assertTrue("Expected memstore to hold 3 values, actually has " + 
+        m.memstore.size(), m.memstore.size() == 3);
+  }
+
+  public void testBinary() throws IOException {
+    MemStore mc = new MemStore(HConstants.FOREVER, KeyValue.ROOT_COMPARATOR);
+    final int start = 43;
+    final int end = 46;
+    for (int k = start; k <= end; k++) {
+      byte [] kk = Bytes.toBytes(k);
+      byte [] row =
+        Bytes.toBytes(".META.,table," + Bytes.toString(kk) + ",1," + k);
+      KeyValue key = new KeyValue(row, CONTENTS_BASIC,
+        System.currentTimeMillis(),
+        (CONTENTSTR + k).getBytes(HConstants.UTF8_ENCODING));
+      mc.add(key);
+      System.out.println(key);
+//      key = new KeyValue(row, Bytes.toBytes(ANCHORNUM + k),
+//        System.currentTimeMillis(),
+//        (ANCHORSTR + k).getBytes(HConstants.UTF8_ENCODING));
+//      mc.add(key);
+//      System.out.println(key);
+    }
+    int index = start;
+    for (Map.Entry<KeyValue, ?> entry: mc.memstore.entrySet()) {
+      System.out.println(entry);
+      byte [] b = entry.getKey().getRow();
+      // Hardcoded offsets into String
+      String str = Bytes.toString(b, 13, 4);
+      byte [] bb = Bytes.toBytes(index);
+      String bbStr = Bytes.toString(bb);
+      assertEquals(str, bbStr);
+      index++;
+    }
+  }
+
+  //////////////////////////////////////////////////////////////////////////////
+  // Get tests
+  //////////////////////////////////////////////////////////////////////////////
+  /** For HBASE-528 */
+  public void testGetRowKeyAtOrBefore() {
+    // set up some test data
+    byte [] t10 = Bytes.toBytes("010");
+    byte [] t20 = Bytes.toBytes("020");
+    byte [] t30 = Bytes.toBytes("030");
+    byte [] t35 = Bytes.toBytes("035");
+    byte [] t40 = Bytes.toBytes("040");
+    
+    memstore.add(getKV(t10, "t10 bytes".getBytes()));
+    memstore.add(getKV(t20, "t20 bytes".getBytes()));
+    memstore.add(getKV(t30, "t30 bytes".getBytes()));
+    memstore.add(getKV(t35, "t35 bytes".getBytes()));
+    // write a delete in there to see if things still work ok
+    memstore.add(getDeleteKV(t35));
+    memstore.add(getKV(t40, "t40 bytes".getBytes()));
+    
+    NavigableSet<KeyValue> results = null;
+    
+    // try finding "015"
+    results =
+      new TreeSet<KeyValue>(this.memstore.comparator.getComparatorIgnoringType());
+    KeyValue t15 = new KeyValue(Bytes.toBytes("015"),
+      System.currentTimeMillis());
+    memstore.getRowKeyAtOrBefore(t15, results);
+    KeyValue kv = results.last();
+    assertTrue(KeyValue.COMPARATOR.compareRows(kv, t10) == 0);
+
+    // try "020", we should get that row exactly
+    results =
+      new TreeSet<KeyValue>(this.memstore.comparator.getComparatorIgnoringType());
+    memstore.getRowKeyAtOrBefore(new KeyValue(t20, System.currentTimeMillis()),
+      results);
+    assertTrue(KeyValue.COMPARATOR.compareRows(results.last(), t20) == 0);
+
+    // try "030", we should get that row exactly
+    results =
+      new TreeSet<KeyValue>(this.memstore.comparator.getComparatorIgnoringType());
+    memstore.getRowKeyAtOrBefore(new KeyValue(t30, System.currentTimeMillis()),
+      results);
+    assertTrue(KeyValue.COMPARATOR.compareRows(results.last(), t30) == 0);
+  
+    // try "038", should skip the deleted "035" and give "030"
+    results =
+      new TreeSet<KeyValue>(this.memstore.comparator.getComparatorIgnoringType());
+    byte [] t38 = Bytes.toBytes("038");
+    memstore.getRowKeyAtOrBefore(new KeyValue(t38, System.currentTimeMillis()),
+      results);
+    assertTrue(KeyValue.COMPARATOR.compareRows(results.last(), t30) == 0);
+  
+    // try "050", should get stuff from "040"
+    results =
+      new TreeSet<KeyValue>(this.memstore.comparator.getComparatorIgnoringType());
+    byte [] t50 = Bytes.toBytes("050");
+    memstore.getRowKeyAtOrBefore(new KeyValue(t50, System.currentTimeMillis()),
+      results);
+    assertTrue(KeyValue.COMPARATOR.compareRows(results.last(), t40) == 0);
+  }
+  
+  
+  /** Test getNextRow from memstore
+   * @throws InterruptedException 
+   */
+  public void testGetNextRow() throws Exception {
+    addRows(this.memstore);
+    // Add more versions to make it a little more interesting.
+    Thread.sleep(1);
+    addRows(this.memstore);
+    KeyValue closestToEmpty = this.memstore.getNextRow(KeyValue.LOWESTKEY);
+    assertTrue(KeyValue.COMPARATOR.compareRows(closestToEmpty,
+      new KeyValue(Bytes.toBytes(0), System.currentTimeMillis())) == 0);
+    for (int i = 0; i < ROW_COUNT; i++) {
+      KeyValue nr = this.memstore.getNextRow(new KeyValue(Bytes.toBytes(i),
+        System.currentTimeMillis()));
+      if (i + 1 == ROW_COUNT) {
+        assertEquals(nr, null);
+      } else {
+        assertTrue(KeyValue.COMPARATOR.compareRows(nr,
+          new KeyValue(Bytes.toBytes(i + 1), System.currentTimeMillis())) == 0);
+      }
+    }
+    //starting from each row, validate results should contain the starting row
+    for (int startRowId = 0; startRowId < ROW_COUNT; startRowId++) {
+      InternalScanner scanner =
+          new StoreScanner(new Scan(Bytes.toBytes(startRowId)), FAMILY,
+              Integer.MAX_VALUE, this.memstore.comparator, null,
+              new KeyValueScanner[]{memstore.getScanners()[0]});
+      List<KeyValue> results = new ArrayList<KeyValue>();
+      for (int i = 0; scanner.next(results); i++) {
+        int rowId = startRowId + i;
+        assertTrue("Row name",
+          KeyValue.COMPARATOR.compareRows(results.get(0),
+          Bytes.toBytes(rowId)) == 0);
+        assertEquals("Count of columns", QUALIFIER_COUNT, results.size());
+        List<KeyValue> row = new ArrayList<KeyValue>();
+        for (KeyValue kv : results) {
+          row.add(kv);
+        }
+        isExpectedRowWithoutTimestamps(rowId, row);
+        // Clear out set.  Otherwise row results accumulate.
+        results.clear();
+      }
+    }
+  }
+  
+  public void testGet_Basic_Found() throws IOException {
+    byte [] row = Bytes.toBytes("testrow");
+    byte [] fam = Bytes.toBytes("testfamily");
+    byte [] qf1 = Bytes.toBytes("testqualifier1");
+    byte [] qf2 = Bytes.toBytes("testqualifier2");
+    byte [] qf3 = Bytes.toBytes("testqualifier3");
+    byte [] val = Bytes.toBytes("testval");
+    
+    //Setting up memstore
+    KeyValue add1 = new KeyValue(row, fam ,qf1, val);
+    KeyValue add2 = new KeyValue(row, fam ,qf2, val);
+    KeyValue add3 = new KeyValue(row, fam ,qf3, val);
+    memstore.add(add1);
+    memstore.add(add2);
+    memstore.add(add3);
+    
+    //test
+    Get get = new Get(row);
+    NavigableSet<byte[]> columns = new TreeSet<byte[]>(Bytes.BYTES_COMPARATOR);
+    columns.add(qf2);
+    long ttl = Long.MAX_VALUE;
+
+    QueryMatcher matcher =
+      new QueryMatcher(get, fam, columns, ttl, KeyValue.KEY_COMPARATOR, 1);
+    
+    List<KeyValue> result = new ArrayList<KeyValue>();
+    boolean res = memstore.get(matcher, result);
+    assertEquals(true, res);
+  }
+  
+  public void testGet_Basic_NotFound() throws IOException {
+    byte [] row = Bytes.toBytes("testrow");
+    byte [] fam = Bytes.toBytes("testfamily");
+    byte [] qf1 = Bytes.toBytes("testqualifier1");
+    byte [] qf2 = Bytes.toBytes("testqualifier2");
+    byte [] qf3 = Bytes.toBytes("testqualifier3");
+    byte [] val = Bytes.toBytes("testval");
+    
+    //Setting up memstore
+    KeyValue add1 = new KeyValue(row, fam ,qf1, val);
+    KeyValue add3 = new KeyValue(row, fam ,qf3, val);
+    memstore.add(add1);
+    memstore.add(add3);
+    
+    //test
+    Get get = new Get(row);
+    NavigableSet<byte[]> columns = new TreeSet<byte[]>(Bytes.BYTES_COMPARATOR);
+    columns.add(qf2);
+    long ttl = Long.MAX_VALUE;
+
+    QueryMatcher matcher =
+      new QueryMatcher(get, fam, columns, ttl, KeyValue.KEY_COMPARATOR, 1);
+    
+    List<KeyValue> result = new ArrayList<KeyValue>();
+    boolean res = memstore.get(matcher, result);
+    assertEquals(false, res);
+  }
+
+  public void testGet_memstoreAndSnapShot() throws IOException {
+    byte [] row = Bytes.toBytes("testrow");
+    byte [] fam = Bytes.toBytes("testfamily");
+    byte [] qf1 = Bytes.toBytes("testqualifier1");
+    byte [] qf2 = Bytes.toBytes("testqualifier2");
+    byte [] qf3 = Bytes.toBytes("testqualifier3");
+    byte [] qf4 = Bytes.toBytes("testqualifier4");
+    byte [] qf5 = Bytes.toBytes("testqualifier5");
+    byte [] val = Bytes.toBytes("testval");
+    
+    //Creating get
+    Get get = new Get(row);
+    NavigableSet<byte[]> columns = new TreeSet<byte[]>(Bytes.BYTES_COMPARATOR);
+    columns.add(qf2);
+    columns.add(qf4);
+    long ttl = Long.MAX_VALUE;
+
+    QueryMatcher matcher =
+      new QueryMatcher(get, fam, columns, ttl, KeyValue.KEY_COMPARATOR, 1);
+    
+    //Setting up memstore
+    memstore.add(new KeyValue(row, fam ,qf1, val));
+    memstore.add(new KeyValue(row, fam ,qf2, val));
+    memstore.add(new KeyValue(row, fam ,qf3, val));
+    //Creating a snapshot
+    memstore.snapshot();
+    assertEquals(3, memstore.snapshot.size());
+    //Adding value to "new" memstore
+    assertEquals(0, memstore.memstore.size());
+    memstore.add(new KeyValue(row, fam ,qf4, val));
+    memstore.add(new KeyValue(row, fam ,qf5, val));
+    assertEquals(2, memstore.memstore.size());
+    
+    List<KeyValue> result = new ArrayList<KeyValue>();
+    boolean res = memstore.get(matcher, result);
+    assertEquals(true, res);
+  }
+  
+  public void testGet_SpecificTimeStamp() throws IOException {
+    byte [] row = Bytes.toBytes("testrow");
+    byte [] fam = Bytes.toBytes("testfamily");
+    byte [] qf1 = Bytes.toBytes("testqualifier1");
+    byte [] qf2 = Bytes.toBytes("testqualifier2");
+    byte [] qf3 = Bytes.toBytes("testqualifier3");
+    byte [] val = Bytes.toBytes("testval");
+    
+    long ts1 = System.currentTimeMillis();
+    long ts2 = ts1++;
+    long ts3 = ts2++;
+    
+    //Creating get
+    Get get = new Get(row);
+    get.setTimeStamp(ts2);
+    NavigableSet<byte[]> columns = new TreeSet<byte[]>(Bytes.BYTES_COMPARATOR);
+    columns.add(qf1);
+    columns.add(qf2);
+    columns.add(qf3);
+    long ttl = Long.MAX_VALUE;
+
+    QueryMatcher matcher = new QueryMatcher(get, fam, columns, ttl,
+      KeyValue.KEY_COMPARATOR, 1);
+    
+    //Setting up expected
+    List<KeyValue> expected = new ArrayList<KeyValue>();
+    KeyValue kv1 = new KeyValue(row, fam ,qf1, ts2, val);
+    KeyValue kv2 = new KeyValue(row, fam ,qf2, ts2, val);
+    KeyValue kv3 = new KeyValue(row, fam ,qf3, ts2, val);
+    expected.add(kv1);
+    expected.add(kv2);
+    expected.add(kv3);
+    
+    //Setting up memstore
+    memstore.add(new KeyValue(row, fam ,qf1, ts1, val));
+    memstore.add(new KeyValue(row, fam ,qf2, ts1, val));
+    memstore.add(new KeyValue(row, fam ,qf3, ts1, val));
+    memstore.add(kv1);
+    memstore.add(kv2);
+    memstore.add(kv3);
+    memstore.add(new KeyValue(row, fam ,qf1, ts3, val));
+    memstore.add(new KeyValue(row, fam ,qf2, ts3, val));
+    memstore.add(new KeyValue(row, fam ,qf3, ts3, val));
+    
+    //Get
+    List<KeyValue> result = new ArrayList<KeyValue>();
+    memstore.get(matcher, result);
+    
+    assertEquals(expected.size(), result.size());
+    for(int i=0; i<expected.size(); i++){
+      assertEquals(expected.get(i), result.get(i));
+    }
+  }
+  
+  
+  //////////////////////////////////////////////////////////////////////////////
+  // Delete tests
+  //////////////////////////////////////////////////////////////////////////////
+  public void testGetWithDelete() throws IOException {
+    byte [] row = Bytes.toBytes("testrow");
+    byte [] fam = Bytes.toBytes("testfamily");
+    byte [] qf1 = Bytes.toBytes("testqualifier");
+    byte [] val = Bytes.toBytes("testval");
+    
+    long ts1 = System.nanoTime();
+    KeyValue put1 = new KeyValue(row, fam, qf1, ts1, val);
+    long ts2 = ts1 + 1;
+    KeyValue put2 = new KeyValue(row, fam, qf1, ts2, val);
+    long ts3 = ts2 +1;
+    KeyValue put3 = new KeyValue(row, fam, qf1, ts3, val);
+    memstore.add(put1);
+    memstore.add(put2);
+    memstore.add(put3);
+    
+    assertEquals(3, memstore.memstore.size());
+    
+    KeyValue del2 = new KeyValue(row, fam, qf1, ts2, KeyValue.Type.Delete, val);
+    memstore.delete(del2);
+
+    List<KeyValue> expected = new ArrayList<KeyValue>();
+    expected.add(put3);
+    expected.add(del2);
+    expected.add(put1);
+    
+    assertEquals(3, memstore.memstore.size());
+    int i = 0;
+    for(Map.Entry<KeyValue, ?> entry : memstore.memstore.entrySet()) {
+      assertEquals(expected.get(i++), entry.getKey());
+    }
+  }
+  
+  public void testGetWithDeleteColumn() throws IOException {
+    byte [] row = Bytes.toBytes("testrow");
+    byte [] fam = Bytes.toBytes("testfamily");
+    byte [] qf1 = Bytes.toBytes("testqualifier");
+    byte [] val = Bytes.toBytes("testval");
+    
+    long ts1 = System.nanoTime();
+    KeyValue put1 = new KeyValue(row, fam, qf1, ts1, val);
+    long ts2 = ts1 + 1;
+    KeyValue put2 = new KeyValue(row, fam, qf1, ts2, val);
+    long ts3 = ts2 +1;
+    KeyValue put3 = new KeyValue(row, fam, qf1, ts3, val);
+    memstore.add(put1);
+    memstore.add(put2);
+    memstore.add(put3);
+    
+    assertEquals(3, memstore.memstore.size());
+    
+    KeyValue del2 = 
+      new KeyValue(row, fam, qf1, ts2, KeyValue.Type.DeleteColumn, val);
+    memstore.delete(del2);
+
+    List<KeyValue> expected = new ArrayList<KeyValue>();
+    expected.add(put3);
+    expected.add(del2);
+    
+    assertEquals(2, memstore.memstore.size());
+    int i = 0;
+    for(Map.Entry<KeyValue, ?> entry : memstore.memstore.entrySet()) {
+      assertEquals(expected.get(i++), entry.getKey());
+    }
+  }
+  
+  
+  public void testGetWithDeleteFamily() throws IOException {
+    byte [] row = Bytes.toBytes("testrow");
+    byte [] fam = Bytes.toBytes("testfamily");
+    byte [] qf1 = Bytes.toBytes("testqualifier1");
+    byte [] qf2 = Bytes.toBytes("testqualifier2");
+    byte [] qf3 = Bytes.toBytes("testqualifier3");
+    byte [] val = Bytes.toBytes("testval");
+    long ts = System.nanoTime();
+    
+    KeyValue put1 = new KeyValue(row, fam, qf1, ts, val);
+    KeyValue put2 = new KeyValue(row, fam, qf2, ts, val);
+    KeyValue put3 = new KeyValue(row, fam, qf3, ts, val);
+    KeyValue put4 = new KeyValue(row, fam, qf3, ts+1, val);
+
+    memstore.add(put1);
+    memstore.add(put2);
+    memstore.add(put3);
+    memstore.add(put4);
+    
+    KeyValue del = 
+      new KeyValue(row, fam, null, ts, KeyValue.Type.DeleteFamily, val);
+    memstore.delete(del);
+
+    List<KeyValue> expected = new ArrayList<KeyValue>();
+    expected.add(del);
+    expected.add(put4);
+    
+    assertEquals(2, memstore.memstore.size());
+    int i = 0;
+    for(Map.Entry<KeyValue, ?> entry : memstore.memstore.entrySet()) {
+      assertEquals(expected.get(i++), entry.getKey());
+    }
+  }
+  
+  public void testKeepDeleteInmemstore() {
+    byte [] row = Bytes.toBytes("testrow");
+    byte [] fam = Bytes.toBytes("testfamily");
+    byte [] qf = Bytes.toBytes("testqualifier");
+    byte [] val = Bytes.toBytes("testval");
+    long ts = System.nanoTime();
+    memstore.add(new KeyValue(row, fam, qf, ts, val));
+    KeyValue delete = new KeyValue(row, fam, qf, ts, KeyValue.Type.Delete, val);
+    memstore.delete(delete);
+    assertEquals(1, memstore.memstore.size());
+    assertEquals(delete, memstore.memstore.firstKey());
+  }
+
+  public void testRetainsDeleteVersion() throws IOException {
+    // add a put to memstore
+    memstore.add(KeyValueTestUtil.create("row1", "fam", "a", 100, "dont-care"));
+
+    // now process a specific delete:
+    KeyValue delete = KeyValueTestUtil.create(
+        "row1", "fam", "a", 100, KeyValue.Type.Delete, "dont-care");
+    memstore.delete(delete);
+
+    assertEquals(1, memstore.memstore.size());
+    assertEquals(delete, memstore.memstore.firstKey());
+  }
+  public void testRetainsDeleteColumn() throws IOException {
+    // add a put to memstore
+    memstore.add(KeyValueTestUtil.create("row1", "fam", "a", 100, "dont-care"));
+
+    // now process a specific delete:
+    KeyValue delete = KeyValueTestUtil.create("row1", "fam", "a", 100,
+        KeyValue.Type.DeleteColumn, "dont-care");
+    memstore.delete(delete);
+
+    assertEquals(1, memstore.memstore.size());
+    assertEquals(delete, memstore.memstore.firstKey());
+  }
+  public void testRetainsDeleteFamily() throws IOException {
+    // add a put to memstore
+    memstore.add(KeyValueTestUtil.create("row1", "fam", "a", 100, "dont-care"));
+
+    // now process a specific delete:
+    KeyValue delete = KeyValueTestUtil.create("row1", "fam", "a", 100,
+        KeyValue.Type.DeleteFamily, "dont-care");
+    memstore.delete(delete);
+
+    assertEquals(1, memstore.memstore.size());
+    assertEquals(delete, memstore.memstore.firstKey());
+  }
+
+  
+  //////////////////////////////////////////////////////////////////////////////
+  // Helpers
+  //////////////////////////////////////////////////////////////////////////////  
+  private byte [] makeQualifier(final int i1, final int i2){
+    return Bytes.toBytes(Integer.toString(i1) + ";" +
+        Integer.toString(i2));
+  }
+  
+  /**
+   * Adds {@link #ROW_COUNT} rows and {@link #COLUMNS_COUNT}
+   * @param hmc Instance to add rows to.
+   * @return How many rows we added.
+   * @throws IOException 
+   */
+  private int addRows(final MemStore hmc) {
+    for (int i = 0; i < ROW_COUNT; i++) {
+      long timestamp = System.currentTimeMillis();
+      for (int ii = 0; ii < QUALIFIER_COUNT; ii++) {
+        byte [] row = Bytes.toBytes(i);
+        byte [] qf = makeQualifier(i, ii);
+        hmc.add(new KeyValue(row, FAMILY, qf, timestamp, qf));
+      }
+    }
+    return ROW_COUNT;
+  }
+
+  private void runSnapshot(final MemStore hmc) throws UnexpectedException {
+    // Save off old state.
+    int oldHistorySize = hmc.getSnapshot().size();
+    hmc.snapshot();
+    Map<KeyValue, ?> ss = hmc.getSnapshot();
+    // Make some assertions about what just happened.
+    assertTrue("History size has not increased", oldHistorySize < ss.size());
+    hmc.clearSnapshot(ss);
+  }
+
+  private void isExpectedRowWithoutTimestamps(final int rowIndex,
+      List<KeyValue> kvs) {
+    int i = 0;
+    for (KeyValue kv: kvs) {
+      String expectedColname = Bytes.toString(makeQualifier(rowIndex, i++));
+      String colnameStr = Bytes.toString(kv.getQualifier());
+      assertEquals("Column name", colnameStr, expectedColname);
+      // Value is column name as bytes.  Usually result is
+      // 100 bytes in size at least. This is the default size
+      // for BytesWriteable.  For comparison, convert bytes to
+      // String and trim to remove trailing null bytes.
+      String colvalueStr = Bytes.toString(kv.getBuffer(), kv.getValueOffset(),
+        kv.getValueLength());
+      assertEquals("Content", colnameStr, colvalueStr);
+    }
+  }
+
+  private KeyValue getDeleteKV(byte [] row) {
+    return new KeyValue(row, Bytes.toBytes("test_col:"),
+      HConstants.LATEST_TIMESTAMP, KeyValue.Type.Delete, null);
+  }
+
+  private KeyValue getKV(byte [] row, byte [] value) {
+    return new KeyValue(row, Bytes.toBytes("test_col:"),
+      HConstants.LATEST_TIMESTAMP, value);
+  }
+}
\ No newline at end of file

Modified: hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/regionserver/TestQueryMatcher.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/regionserver/TestQueryMatcher.java?rev=788273&r1=788272&r2=788273&view=diff
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/regionserver/TestQueryMatcher.java (original)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/regionserver/TestQueryMatcher.java Thu Jun 25 06:55:37 2009
@@ -23,17 +23,14 @@
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.List;
-import java.util.NavigableSet;
-import java.util.TreeSet;
 
 import org.apache.hadoop.hbase.HBaseTestCase;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.KeyValue.KeyComparator;
 import org.apache.hadoop.hbase.client.Get;
-import org.apache.hadoop.hbase.io.TimeRange;
-import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.regionserver.QueryMatcher.MatchCode;
+import org.apache.hadoop.hbase.util.Bytes;
 
 
 public class TestQueryMatcher extends HBaseTestCase
@@ -100,18 +97,18 @@
     QueryMatcher qm = new QueryMatcher(get, fam2,
         get.getFamilyMap().get(fam2), ttl, rowComparator, 1);
 
-    List<KeyValue> memCache = new ArrayList<KeyValue>();
-    memCache.add(new KeyValue(row1, fam2, col1, data));
-    memCache.add(new KeyValue(row1, fam2, col2, data));
-    memCache.add(new KeyValue(row1, fam2, col3, data));
-    memCache.add(new KeyValue(row1, fam2, col4, data));
-    memCache.add(new KeyValue(row1, fam2, col5, data));
+    List<KeyValue> memstore = new ArrayList<KeyValue>();
+    memstore.add(new KeyValue(row1, fam2, col1, data));
+    memstore.add(new KeyValue(row1, fam2, col2, data));
+    memstore.add(new KeyValue(row1, fam2, col3, data));
+    memstore.add(new KeyValue(row1, fam2, col4, data));
+    memstore.add(new KeyValue(row1, fam2, col5, data));
 
-    memCache.add(new KeyValue(row2, fam1, col1, data));
+    memstore.add(new KeyValue(row2, fam1, col1, data));
 
     List<MatchCode> actual = new ArrayList<MatchCode>();
 
-    for(KeyValue kv : memCache){
+    for(KeyValue kv : memstore){
       actual.add(qm.match(kv));
     }
 
@@ -142,17 +139,17 @@
 
     QueryMatcher qm = new QueryMatcher(get, fam2, null, ttl, rowComparator, 1);
 
-    List<KeyValue> memCache = new ArrayList<KeyValue>();
-    memCache.add(new KeyValue(row1, fam2, col1, data));
-    memCache.add(new KeyValue(row1, fam2, col2, data));
-    memCache.add(new KeyValue(row1, fam2, col3, data));
-    memCache.add(new KeyValue(row1, fam2, col4, data));
-    memCache.add(new KeyValue(row1, fam2, col5, data));
-    memCache.add(new KeyValue(row2, fam1, col1, data));
+    List<KeyValue> memstore = new ArrayList<KeyValue>();
+    memstore.add(new KeyValue(row1, fam2, col1, data));
+    memstore.add(new KeyValue(row1, fam2, col2, data));
+    memstore.add(new KeyValue(row1, fam2, col3, data));
+    memstore.add(new KeyValue(row1, fam2, col4, data));
+    memstore.add(new KeyValue(row1, fam2, col5, data));
+    memstore.add(new KeyValue(row2, fam1, col1, data));
 
     List<MatchCode> actual = new ArrayList<MatchCode>();
 
-    for(KeyValue kv : memCache){
+    for(KeyValue kv : memstore){
       actual.add(qm.match(kv));
     }
 

Modified: hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/regionserver/TestScanner.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/regionserver/TestScanner.java?rev=788273&r1=788272&r2=788273&view=diff
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/regionserver/TestScanner.java (original)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/regionserver/TestScanner.java Thu Jun 25 06:55:37 2009
@@ -157,7 +157,7 @@
           byteStream.toByteArray());
       region.put(put);
 
-      // What we just committed is in the memcache. Verify that we can get
+      // What we just committed is in the memstore. Verify that we can get
       // it back both with scanning and get
       
       scan(false, null);
@@ -188,7 +188,7 @@
       region.put(put);
       
       // Validate that we can still get the HRegionInfo, even though it is in
-      // an older row on disk and there is a newer row in the memcache
+      // an older row on disk and there is a newer row in the memstore
       
       scan(true, address.toString());
       getRegionInfo();
@@ -383,7 +383,7 @@
 
   /**
    * Tests to do a concurrent flush (using a 2nd thread) while scanning.  This tests both
-   * the StoreScanner update readers and the transition from memcache -> snapshot -> store file.
+   * the StoreScanner update readers and the transition from memstore -> snapshot -> store file.
    *
    * @throws Exception
    */

Modified: hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/regionserver/TestStore.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/regionserver/TestStore.java?rev=788273&r1=788272&r2=788273&view=diff
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/regionserver/TestStore.java (original)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/regionserver/TestStore.java Thu Jun 25 06:55:37 2009
@@ -88,13 +88,13 @@
   // Get tests
   //////////////////////////////////////////////////////////////////////////////
   /**
-   * Getting data from memcache only
+   * Getting data from memstore only
    * @throws IOException
    */
-  public void testGet_FromMemCacheOnly() throws IOException {
+  public void testGet_FromMemStoreOnly() throws IOException {
     init(this.getName());
     
-    //Put data in memcache
+    //Put data in memstore
     this.store.add(new KeyValue(row, family, qf1, null));
     this.store.add(new KeyValue(row, family, qf2, null));
     this.store.add(new KeyValue(row, family, qf3, null));
@@ -116,7 +116,7 @@
   public void testGet_FromFilesOnly() throws IOException {
     init(this.getName());
 
-    //Put data in memcache
+    //Put data in memstore
     this.store.add(new KeyValue(row, family, qf1, null));
     this.store.add(new KeyValue(row, family, qf2, null));
     //flush
@@ -145,13 +145,13 @@
   }
 
   /**
-   * Getting data from memcache and files
+   * Getting data from memstore and files
    * @throws IOException
    */
-  public void testGet_FromMemCacheAndFiles() throws IOException {
+  public void testGet_FromMemStoreAndFiles() throws IOException {
     init(this.getName());
 
-    //Put data in memcache
+    //Put data in memstore
     this.store.add(new KeyValue(row, family, qf1, null));
     this.store.add(new KeyValue(row, family, qf2, null));
     //flush
@@ -181,7 +181,7 @@
     this.store.snapshot();
     this.store.flushCache(id++);
     assertEquals(storeFilessize, this.store.getStorefiles().size());
-    assertEquals(0, this.store.memcache.memcache.size());
+    assertEquals(0, this.store.memstore.memstore.size());
   }
 
   private void assertCheck() {
@@ -196,14 +196,14 @@
   //////////////////////////////////////////////////////////////////////////////
   /**
    * Testing if the update in place works. When you want to update a value that
-   * is already in memcache, you don't delete it and put a new one, but just 
+   * is already in memstore, you don't delete it and put a new one, but just 
    * update the value in the original KeyValue
    * @throws IOException
    */
   public void testIncrementColumnValue_UpdatingInPlace() throws IOException {
     init(this.getName());
 
-    //Put data in memcache
+    //Put data in memstore
     long value = 1L;
     long amount = 3L;
     this.store.add(new KeyValue(row, family, qf1, Bytes.toBytes(value)));
@@ -227,7 +227,7 @@
   throws IOException {
     init(this.getName());
 
-    //Put data in memcache
+    //Put data in memstore
     long value = 3L;
     long amount = -1L;
     this.store.add(new KeyValue(row, family, qf1, Bytes.toBytes(value)));
@@ -250,7 +250,7 @@
   public void testIncrementColumnValue_AddingNew() throws IOException {
     init(this.getName());
     
-    //Put data in memcache
+    //Put data in memstore
     long value = 1L;
     long amount = 3L;
     this.store.add(new KeyValue(row, family, qf1, Bytes.toBytes(value)));
@@ -268,14 +268,14 @@
   }
 
   /**
-   * When we have the key in a file add a new key + value to memcache with the 
+   * When we have the key in a file add a new key + value to memstore with the 
    * updates value. 
    * @throws IOException
    */
   public void testIncrementColumnValue_UpdatingFromSF() throws IOException {
     init(this.getName());
     
-    //Put data in memcache
+    //Put data in memstore
     long value = 1L;
     long amount = 3L;
     this.store.add(new KeyValue(row, family, qf1, Bytes.toBytes(value)));
@@ -296,14 +296,14 @@
 
   /**
    * Same as testIncrementColumnValue_AddingNew() except that the keys are
-   * checked in file not in memcache
+   * checked in file not in memstore
    * @throws IOException
    */
   public void testIncrementColumnValue_AddingNewAfterSFCheck() 
   throws IOException {
     init(this.getName());
     
-    //Put data in memcache
+    //Put data in memstore
     long value = 1L;
     long amount = 3L;
     this.store.add(new KeyValue(row, family, qf1, Bytes.toBytes(value)));



Mime
View raw message