hbase-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From te...@apache.org
Subject hbase git commit: HBASE-13070 Fix TestCacheOnWrite (Duo Zhang)
Date Fri, 20 Feb 2015 16:40:43 GMT
Repository: hbase
Updated Branches:
  refs/heads/branch-1 0a21b1e22 -> cfc01532e


HBASE-13070 Fix TestCacheOnWrite (Duo Zhang)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/cfc01532
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/cfc01532
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/cfc01532

Branch: refs/heads/branch-1
Commit: cfc01532e207064242f7003b566cb8699b81c584
Parents: 0a21b1e
Author: tedyu <yuzhihong@gmail.com>
Authored: Fri Feb 20 08:40:34 2015 -0800
Committer: tedyu <yuzhihong@gmail.com>
Committed: Fri Feb 20 08:40:34 2015 -0800

----------------------------------------------------------------------
 .../hadoop/hbase/io/hfile/TestCacheOnWrite.java | 147 +++++++++++--------
 1 file changed, 82 insertions(+), 65 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hbase/blob/cfc01532/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java
b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java
index 4ff2f80..c7017e2 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java
@@ -20,8 +20,8 @@
 package org.apache.hadoop.hbase.io.hfile;
 
 import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNull;
 import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotEquals;
 import static org.junit.Assert.assertTrue;
 
 import java.io.IOException;
@@ -29,7 +29,6 @@ import java.util.ArrayList;
 import java.util.Collection;
 import java.util.EnumMap;
 import java.util.List;
-import java.util.Map;
 import java.util.Random;
 
 import org.apache.commons.logging.Log;
@@ -37,7 +36,6 @@ import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HConstants;
@@ -58,6 +56,7 @@ import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.ChecksumType;
 import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
 import org.junit.After;
+import org.junit.AfterClass;
 import org.junit.Before;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
@@ -65,6 +64,8 @@ import org.junit.runner.RunWith;
 import org.junit.runners.Parameterized;
 import org.junit.runners.Parameterized.Parameters;
 
+import com.google.common.collect.Lists;
+
 /**
  * Tests {@link HFile} cache-on-write functionality for the following block
  * types: data blocks, non-root index blocks, and Bloom filter blocks.
@@ -169,7 +170,7 @@ public class TestCacheOnWrite {
     this.blockCache = blockCache;
     testDescription = "[cacheOnWrite=" + cowType + ", compress=" + compress +
         ", encoderType=" + encoderType + ", cacheCompressedData=" + cacheCompressedData +
"]";
-    System.out.println(testDescription);
+    LOG.info(testDescription);
   }
 
   private static List<BlockCache> getBlockCaches() throws IOException {
@@ -184,10 +185,10 @@ public class TestCacheOnWrite {
 
     // bucket cache
     FileSystem.get(conf).mkdirs(TEST_UTIL.getDataTestDir());
-    int[] bucketSizes = {INDEX_BLOCK_SIZE, DATA_BLOCK_SIZE, BLOOM_BLOCK_SIZE, 64 * 1024 };
+    int[] bucketSizes =
+        { INDEX_BLOCK_SIZE, DATA_BLOCK_SIZE, BLOOM_BLOCK_SIZE, 64 * 1024, 128 * 1024 };
     BlockCache bucketcache =
-        new BucketCache("file:" + TEST_UTIL.getDataTestDir() + "/bucket.data",
-            128 * 1024 * 1024, 64 * 1024, bucketSizes, 5, 64 * 100, null);
+        new BucketCache("offheap", 128 * 1024 * 1024, 64 * 1024, bucketSizes, 5, 64 * 100,
null);
     blockcaches.add(bucketcache);
     return blockcaches;
   }
@@ -200,7 +201,8 @@ public class TestCacheOnWrite {
         for (Compression.Algorithm compress : HBaseTestingUtility.COMPRESSION_ALGORITHMS)
{
           for (BlockEncoderTestType encoderType : BlockEncoderTestType.values()) {
             for (boolean cacheCompressedData : new boolean[] { false, true }) {
-              cowTypes.add(new Object[] { cowType, compress, encoderType, cacheCompressedData,
blockache});
+              cowTypes.add(new Object[] { cowType, compress, encoderType, cacheCompressedData,
+                  blockache });
             }
           }
         }
@@ -209,6 +211,31 @@ public class TestCacheOnWrite {
     return cowTypes;
   }
 
+  private void clearBlockCache(BlockCache blockCache) throws InterruptedException {
+    if (blockCache instanceof LruBlockCache) {
+      ((LruBlockCache) blockCache).clearCache();
+    } else {
+      // BucketCache may not return all cached blocks(blocks in write queue), so check it
here.
+      for (int clearCount = 0; blockCache.getBlockCount() > 0; clearCount++) {
+        if (clearCount > 0) {
+          LOG.warn("clear block cache " + blockCache + " " + clearCount + " times, "
+              + blockCache.getBlockCount() + " blocks remaining");
+          Thread.sleep(10);
+        }
+        for (CachedBlock block : Lists.newArrayList(blockCache)) {
+          BlockCacheKey key = new BlockCacheKey(block.getFilename(), block.getOffset());
+          // CombinedBucketCache may need evict two times.
+          for (int evictCount = 0; blockCache.evictBlock(key); evictCount++) {
+            if (evictCount > 1) {
+              LOG.warn("evict block " + block + " in " + blockCache + " " + evictCount
+                  + " times, maybe a bug here");
+            }
+          }
+        }
+      }
+    }
+  }
+
   @Before
   public void setUp() throws IOException {
     conf = TEST_UTIL.getConfiguration();
@@ -220,25 +247,24 @@ public class TestCacheOnWrite {
     conf.setBoolean(CacheConfig.CACHE_DATA_BLOCKS_COMPRESSED_KEY, cacheCompressedData);
     cowType.modifyConf(conf);
     fs = HFileSystem.get(conf);
+    CacheConfig.GLOBAL_BLOCK_CACHE_INSTANCE = blockCache;
     cacheConf =
         new CacheConfig(blockCache, true, true, cowType.shouldBeCached(BlockType.DATA),
         cowType.shouldBeCached(BlockType.LEAF_INDEX),
-        cowType.shouldBeCached(BlockType.BLOOM_CHUNK), false, cacheCompressedData, true,
false);
+        cowType.shouldBeCached(BlockType.BLOOM_CHUNK), false, cacheCompressedData, false,
false);
   }
 
   @After
-  public void tearDown() {
-    cacheConf = new CacheConfig(conf);
-    blockCache = cacheConf.getBlockCache();
+  public void tearDown() throws IOException, InterruptedException {
+    clearBlockCache(blockCache);
   }
 
-  @Test
-  public void testStoreFileCacheOnWrite() throws IOException {
-    testStoreFileCacheOnWriteInternals(false);
-    testStoreFileCacheOnWriteInternals(true);
+  @AfterClass
+  public static void afterClass() throws IOException {
+    TEST_UTIL.cleanupTestDir();
   }
 
-  protected void testStoreFileCacheOnWriteInternals(boolean useTags) throws IOException {
+  private void testStoreFileCacheOnWriteInternals(boolean useTags) throws IOException {
     writeStoreFile(useTags);
     readStoreFile(useTags);
   }
@@ -322,15 +348,15 @@ public class TestCacheOnWrite {
         encoderType.encode ? BlockType.ENCODED_DATA : BlockType.DATA;
     if (useTags) {
       assertEquals("{" + cachedDataBlockType
-          + "=1550, LEAF_INDEX=173, BLOOM_CHUNK=9, INTERMEDIATE_INDEX=20}", countByType);
+          + "=2663, LEAF_INDEX=297, BLOOM_CHUNK=9, INTERMEDIATE_INDEX=34}", countByType);
     } else {
       assertEquals("{" + cachedDataBlockType
-          + "=1379, LEAF_INDEX=154, BLOOM_CHUNK=9, INTERMEDIATE_INDEX=18}", countByType);
+          + "=2498, LEAF_INDEX=278, BLOOM_CHUNK=9, INTERMEDIATE_INDEX=31}", countByType);
     }
 
     // iterate all the keyvalue from hfile
     while (scanner.next()) {
-      Cell cell = scanner.getKeyValue();
+      scanner.getKeyValue();
     }
     reader.close();
   }
@@ -340,18 +366,16 @@ public class TestCacheOnWrite {
       // Let's make half of KVs puts.
       return KeyValue.Type.Put;
     } else {
-      KeyValue.Type keyType =
-          KeyValue.Type.values()[1 + rand.nextInt(NUM_VALID_KEY_TYPES)];
-      if (keyType == KeyValue.Type.Minimum || keyType == KeyValue.Type.Maximum)
-      {
-        throw new RuntimeException("Generated an invalid key type: " + keyType
-            + ". " + "Probably the layout of KeyValue.Type has changed.");
+      KeyValue.Type keyType = KeyValue.Type.values()[1 + rand.nextInt(NUM_VALID_KEY_TYPES)];
+      if (keyType == KeyValue.Type.Minimum || keyType == KeyValue.Type.Maximum) {
+        throw new RuntimeException("Generated an invalid key type: " + keyType + ". "
+            + "Probably the layout of KeyValue.Type has changed.");
       }
       return keyType;
     }
   }
 
-  public void writeStoreFile(boolean useTags) throws IOException {
+  private void writeStoreFile(boolean useTags) throws IOException {
     if(useTags) {
       TEST_UTIL.getConfiguration().setInt("hfile.format.version", 3);
     } else {
@@ -367,12 +391,11 @@ public class TestCacheOnWrite {
         .withOutputDir(storeFileParentDir).withComparator(KeyValue.COMPARATOR)
         .withFileContext(meta)
         .withBloomType(BLOOM_TYPE).withMaxKeyCount(NUM_KV).build();
-
-    final int rowLen = 32;
+    byte[] cf = Bytes.toBytes("fam");
     for (int i = 0; i < NUM_KV; ++i) {
-      byte[] k = TestHFileWriterV2.randomOrderedKey(rand, i);
-      byte[] v = TestHFileWriterV2.randomValue(rand);
-      int cfLen = rand.nextInt(k.length - rowLen + 1);
+      byte[] row = TestHFileWriterV2.randomOrderedKey(rand, i);
+      byte[] qualifier = TestHFileWriterV2.randomRowOrQualifier(rand);
+      byte[] value = TestHFileWriterV2.randomValue(rand);
       KeyValue kv;
       if(useTags) {
         Tag t = new Tag((byte) 1, "visibility");
@@ -380,21 +403,13 @@ public class TestCacheOnWrite {
         tagList.add(t);
         Tag[] tags = new Tag[1];
         tags[0] = t;
-        kv = new KeyValue(
-            k, 0, rowLen,
-            k, rowLen, cfLen,
-            k, rowLen + cfLen, k.length - rowLen - cfLen,
-            rand.nextLong(),
-            generateKeyType(rand),
-            v, 0, v.length, tagList);
+        kv =
+            new KeyValue(row, 0, row.length, cf, 0, cf.length, qualifier, 0, qualifier.length,
+                rand.nextLong(), generateKeyType(rand), value, 0, value.length, tagList);
       } else {
-        kv = new KeyValue(
-          k, 0, rowLen,
-          k, rowLen, cfLen,
-          k, rowLen + cfLen, k.length - rowLen - cfLen,
-          rand.nextLong(),
-          generateKeyType(rand),
-          v, 0, v.length);
+        kv =
+            new KeyValue(row, 0, row.length, cf, 0, cf.length, qualifier, 0, qualifier.length,
+                rand.nextLong(), generateKeyType(rand), value, 0, value.length);
       }
       sfw.append(kv);
     }
@@ -403,13 +418,8 @@ public class TestCacheOnWrite {
     storeFilePath = sfw.getPath();
   }
 
-  @Test
-  public void testNotCachingDataBlocksDuringCompaction() throws IOException {
-    testNotCachingDataBlocksDuringCompactionInternals(false);
-    testNotCachingDataBlocksDuringCompactionInternals(true);
-  }
-
-  protected void testNotCachingDataBlocksDuringCompactionInternals(boolean useTags) throws
IOException {
+  private void testNotCachingDataBlocksDuringCompactionInternals(boolean useTags)
+      throws IOException, InterruptedException {
     if (useTags) {
       TEST_UTIL.getConfiguration().setInt("hfile.format.version", 3);
     } else {
@@ -449,7 +459,7 @@ public class TestCacheOnWrite {
                   HConstants.LATEST_TIMESTAMP, Bytes.toBytes(valueStr), tags);
               p.add(kv);
             } else {
-              p.add(cfBytes, Bytes.toBytes(qualStr), ts++, Bytes.toBytes(valueStr));
+              p.addColumn(cfBytes, Bytes.toBytes(qualStr), ts++, Bytes.toBytes(valueStr));
             }
           }
         }
@@ -458,20 +468,27 @@ public class TestCacheOnWrite {
       }
       region.flushcache();
     }
-    LruBlockCache blockCache =
-        (LruBlockCache) new CacheConfig(conf).getBlockCache();
-    blockCache.clearCache();
-    assertEquals(0, blockCache.getBlockTypeCountsForTest().size());
+    clearBlockCache(blockCache);
+    assertEquals(0, blockCache.getBlockCount());
     region.compactStores();
     LOG.debug("compactStores() returned");
 
-    Map<BlockType, Integer> blockTypesInCache =
-        blockCache.getBlockTypeCountsForTest();
-    LOG.debug("Block types in cache: " + blockTypesInCache);
-    assertNull(blockTypesInCache.get(BlockType.ENCODED_DATA));
-    assertNull(blockTypesInCache.get(BlockType.DATA));
+    for (CachedBlock block: blockCache) {
+      assertNotEquals(BlockType.ENCODED_DATA, block.getBlockType());
+      assertNotEquals(BlockType.DATA, block.getBlockType());
+    }
     region.close();
-    blockCache.shutdown();
   }
-}
 
+  @Test
+  public void testStoreFileCacheOnWrite() throws IOException {
+    testStoreFileCacheOnWriteInternals(false);
+    testStoreFileCacheOnWriteInternals(true);
+  }
+
+  @Test
+  public void testNotCachingDataBlocksDuringCompaction() throws IOException, InterruptedException
{
+    testNotCachingDataBlocksDuringCompactionInternals(false);
+    testNotCachingDataBlocksDuringCompactionInternals(true);
+  }
+}


Mime
View raw message