hbase-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From apurt...@apache.org
Subject svn commit: r789592 [2/2] - in /hadoop/hbase/trunk_on_hadoop-0.18.3: ./ conf/ src/contrib/ src/contrib/stargate/ src/contrib/stargate/lib/ src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/client/ src/contrib/stargate/src/java/org/apache/h...
Date Tue, 30 Jun 2009 07:13:00 GMT
Modified: hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/io/TestHeapSize.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/io/TestHeapSize.java?rev=789592&r1=789591&r2=789592&view=diff
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/io/TestHeapSize.java
(original)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/io/TestHeapSize.java
Tue Jun 30 07:12:58 2009
@@ -1,11 +1,16 @@
 package org.apache.hadoop.hbase.io;
 
 import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.TreeMap;
+import java.util.concurrent.ConcurrentHashMap;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.client.Put;
+import org.apache.hadoop.hbase.io.hfile.CachedBlock;
 import org.apache.hadoop.hbase.io.hfile.LruBlockCache;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.ClassSize;
@@ -23,6 +28,81 @@
   // KeyValue, LruBlockCache, LruHashMap<K,V>, Put, HLogKey
   
   /**
+   * Test our hard-coded sizing of native java objects
+   */
+  @SuppressWarnings("unchecked")
+  public void testNativeSizes() throws IOException {
+    Class cl = null;
+    long expected = 0L;
+    long actual = 0L;
+    
+    // ArrayList
+    cl = ArrayList.class;
+    expected = ClassSize.estimateBase(cl, false);
+    actual = ClassSize.ARRAYLIST;
+    if(expected != actual) {
+      ClassSize.estimateBase(cl, true);
+      assertEquals(expected, actual);
+    }
+    
+    // ByteBuffer
+    cl = ByteBuffer.class;
+    expected = ClassSize.estimateBase(cl, false);
+    actual = ClassSize.BYTE_BUFFER;
+    if(expected != actual) {
+      ClassSize.estimateBase(cl, true);
+      assertEquals(expected, actual);
+    }
+    
+    // Integer
+    cl = Integer.class;
+    expected = ClassSize.estimateBase(cl, false);
+    actual = ClassSize.INTEGER;
+    if(expected != actual) {
+      ClassSize.estimateBase(cl, true);
+      assertEquals(expected, actual);
+    }
+    
+    // Map.Entry
+    // Interface is public, all others are not.  Hard to size via ClassSize
+//    cl = Map.Entry.class;
+//    expected = ClassSize.estimateBase(cl, false);
+//    actual = ClassSize.MAP_ENTRY;
+//    if(expected != actual) {
+//      ClassSize.estimateBase(cl, true);
+//      assertEquals(expected, actual);
+//    }
+    
+    // Object
+    cl = Object.class;
+    expected = ClassSize.estimateBase(cl, false);
+    actual = ClassSize.OBJECT;
+    if(expected != actual) {
+      ClassSize.estimateBase(cl, true);
+      assertEquals(expected, actual);
+    }
+    
+    // TreeMap
+    cl = TreeMap.class;
+    expected = ClassSize.estimateBase(cl, false);
+    actual = ClassSize.TREEMAP;
+    if(expected != actual) {
+      ClassSize.estimateBase(cl, true);
+      assertEquals(expected, actual);
+    }
+    
+    // String
+    cl = String.class;
+    expected = ClassSize.estimateBase(cl, false);
+    actual = ClassSize.STRING;
+    if(expected != actual) {
+      ClassSize.estimateBase(cl, true);
+      assertEquals(expected, actual);
+    }
+    
+  }
+  
+  /**
    * Testing the classes that implements HeapSize and are a part of 0.20. 
    * Some are not tested here for example BlockIndex which is tested in 
    * TestHFile since it is a non public class
@@ -37,7 +117,6 @@
     //KeyValue
     cl = KeyValue.class;
     expected = ClassSize.estimateBase(cl, false);
-    
     KeyValue kv = new KeyValue();
     actual = kv.heapSize();
     if(expected != actual) {
@@ -45,15 +124,36 @@
       assertEquals(expected, actual);
     }
     
-    //LruBlockCache
+    //LruBlockCache Overhead
     cl = LruBlockCache.class;
+    actual = LruBlockCache.CACHE_FIXED_OVERHEAD;
+    expected = ClassSize.estimateBase(cl, false);
+    if(expected != actual) {
+      ClassSize.estimateBase(cl, true);
+      assertEquals(expected, actual);
+    }
+    
+    // LruBlockCache Map Fixed Overhead
+    cl = ConcurrentHashMap.class;
+    actual = ClassSize.CONCURRENT_HASHMAP;
+    expected = ClassSize.estimateBase(cl, false);
+    if(expected != actual) {
+      ClassSize.estimateBase(cl, true);
+      assertEquals(expected, actual);
+    }
+    
+    // CachedBlock Fixed Overhead
+    // We really need "deep" sizing but ClassSize does not do this.
+    // Perhaps we should do all these more in this style....
+    cl = CachedBlock.class;
+    actual = CachedBlock.PER_BLOCK_OVERHEAD;
     expected = ClassSize.estimateBase(cl, false);
-    LruBlockCache c = new LruBlockCache(102400,1024);
-    //Since minimum size for the for a LruBlockCache is 1
-    //we need to remove one reference from the heapsize
-    actual = c.heapSize();// - ClassSize.REFERENCE_SIZE;
+    expected += ClassSize.estimateBase(String.class, false);
+    expected += ClassSize.estimateBase(ByteBuffer.class, false);
     if(expected != actual) {
       ClassSize.estimateBase(cl, true);
+      ClassSize.estimateBase(String.class, true);
+      ClassSize.estimateBase(ByteBuffer.class, true);
       assertEquals(expected, actual);
     }
     

Modified: hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/io/hfile/TestLruBlockCache.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/io/hfile/TestLruBlockCache.java?rev=789592&r1=789591&r2=789592&view=diff
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/io/hfile/TestLruBlockCache.java
(original)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/io/hfile/TestLruBlockCache.java
Tue Jun 30 07:12:58 2009
@@ -39,17 +39,15 @@
   public void testBackgroundEvictionThread() throws Exception {
 
     long maxSize = 100000;
+    long blockSize = calculateBlockSizeDefault(maxSize, 9); // room for 9, will evict
     
-    LruBlockCache cache = new LruBlockCache(maxSize,100);
+    LruBlockCache cache = new LruBlockCache(maxSize,blockSize);
     
-    Block [] blocks = generateFixedBlocks(10, 10000);
-    
-    long expectedCacheSize = 0;
+    Block [] blocks = generateFixedBlocks(10, blockSize, "block");
     
     // Add all the blocks
     for(Block block : blocks) {
       cache.cacheBlock(block.blockName, block.buf);
-      expectedCacheSize += block.heapSize();
     }
     
     // Let the eviction run
@@ -67,14 +65,15 @@
   
   public void testCacheSimple() throws Exception {
     
-    LruBlockCache cache = new LruBlockCache(1000000,10000);
-    
-    Block [] blocks = generateRandomBlocks(10, 10000);
+    long maxSize = 1000000;
+    long blockSize = calculateBlockSizeDefault(maxSize, 101);
     
-    long emptyCacheSize = cache.heapSize();
-    
-    long expectedCacheSize = emptyCacheSize;
+    LruBlockCache cache = new LruBlockCache(maxSize, blockSize);
+
+    Block [] blocks = generateRandomBlocks(100, blockSize);
     
+    long expectedCacheSize = cache.heapSize();
+        
     // Confirm empty
     for(Block block : blocks) {
       assertTrue(cache.getBlock(block.blockName) == null);
@@ -115,17 +114,21 @@
       assertTrue(buf != null);
       assertEquals(buf.capacity(), block.buf.capacity());
     }
+    
+    // Expect no evictions
+    assertEquals(0, cache.getEvictionCount());
   }
   
   public void testCacheEvictionSimple() throws Exception {
     
     long maxSize = 100000;
+    long blockSize = calculateBlockSizeDefault(maxSize, 10);
+        
+    LruBlockCache cache = new LruBlockCache(maxSize,blockSize,false);
     
-    LruBlockCache cache = new LruBlockCache(maxSize,100,false);
-    
-    Block [] blocks = generateFixedBlocks(10, 10000);
+    Block [] blocks = generateFixedBlocks(10, blockSize, "block");
     
-    long expectedCacheSize = 0;
+    long expectedCacheSize = cache.heapSize();
     
     // Add all the blocks
     for(Block block : blocks) {
@@ -134,17 +137,17 @@
     }
     
     // A single eviction run should have occurred
-    assertEquals(cache.getEvictionCount(), 1);
+    assertEquals(1, cache.getEvictionCount());
     
     // Our expected size overruns acceptable limit
     assertTrue(expectedCacheSize > 
       (maxSize * LruBlockCache.DEFAULT_ACCEPTABLE_FACTOR));
     
     // But the cache did not grow beyond max
-    assertTrue(cache.cacheSize() < maxSize);
+    assertTrue(cache.heapSize() < maxSize);
     
     // And is still below the acceptable limit
-    assertTrue(cache.cacheSize() < 
+    assertTrue(cache.heapSize() < 
         (maxSize * LruBlockCache.DEFAULT_ACCEPTABLE_FACTOR));
   
     // All blocks except block 0 and 1 should be in the cache
@@ -159,13 +162,14 @@
   public void testCacheEvictionTwoPriorities() throws Exception {
     
     long maxSize = 100000;
+    long blockSize = calculateBlockSizeDefault(maxSize, 10);
     
-    LruBlockCache cache = new LruBlockCache(maxSize,100,false);
+    LruBlockCache cache = new LruBlockCache(maxSize,blockSize,false);
     
     Block [] singleBlocks = generateFixedBlocks(5, 10000, "single");
     Block [] multiBlocks = generateFixedBlocks(5, 10000, "multi");
     
-    long expectedCacheSize = 0;
+    long expectedCacheSize = cache.heapSize();
     
     // Add and get the multi blocks
     for(Block block : multiBlocks) {
@@ -191,10 +195,10 @@
       (maxSize * LruBlockCache.DEFAULT_ACCEPTABLE_FACTOR));
     
     // But the cache did not grow beyond max
-    assertTrue(cache.cacheSize() <= maxSize);
+    assertTrue(cache.heapSize() <= maxSize);
     
     // And is now below the acceptable limit
-    assertTrue(cache.cacheSize() <= 
+    assertTrue(cache.heapSize() <= 
         (maxSize * LruBlockCache.DEFAULT_ACCEPTABLE_FACTOR));
   
     // We expect fairness across the two priorities.
@@ -216,7 +220,7 @@
   public void testCacheEvictionThreePriorities() throws Exception {
     
     long maxSize = 100000;
-    long blockSize = 9800;
+    long blockSize = calculateBlockSize(maxSize, 10);
     
     LruBlockCache cache = new LruBlockCache(maxSize, blockSize, false,
         (int)Math.ceil(1.2*maxSize/blockSize),
@@ -228,12 +232,12 @@
         0.33f, // multi
         0.34f);// memory
        
-        
+    
     Block [] singleBlocks = generateFixedBlocks(5, blockSize, "single");
     Block [] multiBlocks = generateFixedBlocks(5, blockSize, "multi");
     Block [] memoryBlocks = generateFixedBlocks(5, blockSize, "memory");
     
-    long expectedCacheSize = 0;
+    long expectedCacheSize = cache.heapSize();
     
     // Add 3 blocks from each priority
     for(int i=0;i<3;i++) {
@@ -257,7 +261,7 @@
     assertEquals(0, cache.getEvictionCount());
     
     // Verify cache size
-    assertEquals(expectedCacheSize, cache.cacheSize());
+    assertEquals(expectedCacheSize, cache.heapSize());
     
     // Insert a single block, oldest single should be evicted
     cache.cacheBlock(singleBlocks[3].blockName, singleBlocks[3].buf);
@@ -339,7 +343,7 @@
   public void testScanResistance() throws Exception {
 
     long maxSize = 100000;
-    long blockSize = 9800;
+    long blockSize = calculateBlockSize(maxSize, 10);
     
     LruBlockCache cache = new LruBlockCache(maxSize, blockSize, false,
         (int)Math.ceil(1.2*maxSize/blockSize),
@@ -401,7 +405,7 @@
   public void testResizeBlockCache() throws Exception {
     
     long maxSize = 300000;
-    long blockSize = 9750;
+    long blockSize = calculateBlockSize(maxSize, 31);
     
     LruBlockCache cache = new LruBlockCache(maxSize, blockSize, false,
         (int)Math.ceil(1.2*maxSize/blockSize),
@@ -435,7 +439,7 @@
     assertEquals(0, cache.getEvictionCount());
     
     // Resize to half capacity plus an extra block (otherwise we evict an extra)
-    cache.setMaxSize((long)(maxSize * 0.5f) + blockSize);
+    cache.setMaxSize((long)(maxSize * 0.5f));
     
     // Should have run a single eviction
     assertEquals(1, cache.getEvictionCount());
@@ -470,19 +474,40 @@
     return generateFixedBlocks(numBlocks, (int)size, pfx);
   }
   
-  private Block [] generateFixedBlocks(int numBlocks, int size) {
-    return generateFixedBlocks(numBlocks, size, "block");
-  }
-  
-  private Block [] generateRandomBlocks(int numBlocks, int maxSize) {
+  private Block [] generateRandomBlocks(int numBlocks, long maxSize) {
     Block [] blocks = new Block[numBlocks];
     Random r = new Random();
     for(int i=0;i<numBlocks;i++) {
-      blocks[i] = new Block("block" + i, r.nextInt(maxSize)+1);
+      blocks[i] = new Block("block" + i, r.nextInt((int)maxSize)+1);
     }
     return blocks;
   }
   
+  private long calculateBlockSize(long maxSize, int numBlocks) {
+    long roughBlockSize = (long)Math.ceil(maxSize/numBlocks);
+    int numEntries = (int)Math.ceil((1.2)*maxSize/roughBlockSize);
+    long totalOverhead = LruBlockCache.CACHE_FIXED_OVERHEAD +
+        ClassSize.CONCURRENT_HASHMAP +
+        (numEntries * ClassSize.CONCURRENT_HASHMAP_ENTRY) +
+        (LruBlockCache.DEFAULT_CONCURRENCY_LEVEL * ClassSize.CONCURRENT_HASHMAP_SEGMENT);
+    long negateBlockSize = (long)Math.ceil(totalOverhead/numEntries);
+    negateBlockSize += CachedBlock.PER_BLOCK_OVERHEAD;
+    return ClassSize.align((long)Math.floor((roughBlockSize - negateBlockSize)*0.99f));
+  }
+  
+  private long calculateBlockSizeDefault(long maxSize, int numBlocks) {
+    long roughBlockSize = (long)Math.ceil(maxSize/numBlocks);
+    int numEntries = (int)Math.ceil((1.2)*maxSize/roughBlockSize);
+    long totalOverhead = LruBlockCache.CACHE_FIXED_OVERHEAD +
+        ClassSize.CONCURRENT_HASHMAP +
+        (numEntries * ClassSize.CONCURRENT_HASHMAP_ENTRY) +
+        (LruBlockCache.DEFAULT_CONCURRENCY_LEVEL * ClassSize.CONCURRENT_HASHMAP_SEGMENT);
+    long negateBlockSize = (long)Math.ceil(totalOverhead/numEntries);
+    negateBlockSize += CachedBlock.PER_BLOCK_OVERHEAD;
+    return ClassSize.align((long)Math.floor((roughBlockSize - negateBlockSize)*
+        LruBlockCache.DEFAULT_ACCEPTABLE_FACTOR));
+  }
+  
   private class Block implements HeapSize {
     String blockName;
     ByteBuffer buf;

Modified: hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/regionserver/TestStoreScanner.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/regionserver/TestStoreScanner.java?rev=789592&r1=789591&r2=789592&view=diff
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/regionserver/TestStoreScanner.java
(original)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/regionserver/TestStoreScanner.java
Tue Jun 30 07:12:58 2009
@@ -24,14 +24,8 @@
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.KeyValueTestUtil;
 import org.apache.hadoop.hbase.client.Scan;
-import org.apache.hadoop.hbase.filter.Filter;
-import org.apache.hadoop.hbase.filter.RowInclusiveStopFilter;
-import org.apache.hadoop.hbase.filter.RowWhileMatchFilter;
-import org.apache.hadoop.hbase.filter.RowPrefixFilter;
-import org.apache.hadoop.hbase.filter.RowFilterInterface;
-import org.apache.hadoop.hbase.filter.WhileMatchRowFilter;
-import org.apache.hadoop.hbase.filter.PrefixRowFilter;
-import org.apache.hadoop.hbase.filter.InclusiveStopRowFilter;
+import org.apache.hadoop.hbase.filter.WhileMatchFilter;
+import org.apache.hadoop.hbase.filter.*;
 import org.apache.hadoop.hbase.util.Bytes;
 
 import java.io.IOException;
@@ -402,8 +396,8 @@
 
   // Test new and old row prefix filters.
   public void testNewRowPrefixFilter() throws IOException {
-     Filter f = new RowWhileMatchFilter(
-        new RowPrefixFilter(Bytes.toBytes("R:")));
+     Filter f = new WhileMatchFilter(
+        new PrefixFilter(Bytes.toBytes("R:")));
     Scan s = new Scan(Bytes.toBytes("R:7"));
     s.setFilter(f);
 
@@ -442,7 +436,7 @@
 
   // Test new and old row-inclusive stop filter.
   public void testNewRowInclusiveStopFilter() throws IOException {
-    Filter f = new RowWhileMatchFilter(new RowInclusiveStopFilter(Bytes.toBytes("R:3")));
+    Filter f = new WhileMatchFilter(new InclusiveStopFilter(Bytes.toBytes("R:3")));
     Scan scan = new Scan();
     scan.setFilter(f);
 



Mime
View raw message