hbase-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From te...@apache.org
Subject svn commit: r1174403 - in /hbase/trunk: CHANGES.txt src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFiles.java
Date Thu, 22 Sep 2011 21:55:00 GMT
Author: tedyu
Date: Thu Sep 22 21:55:00 2011
New Revision: 1174403

URL: http://svn.apache.org/viewvc?rev=1174403&view=rev
Log:
HBASE-4449  LoadIncrementalHFiles should be able to handle CFs with blooms
               (David Revell)

Modified:
    hbase/trunk/CHANGES.txt
    hbase/trunk/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFiles.java

Modified: hbase/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hbase/trunk/CHANGES.txt?rev=1174403&r1=1174402&r2=1174403&view=diff
==============================================================================
--- hbase/trunk/CHANGES.txt (original)
+++ hbase/trunk/CHANGES.txt Thu Sep 22 21:55:00 2011
@@ -295,6 +295,8 @@ Release 0.91.0 - Unreleased
   TESTS
    HBASE-4450  test for number of blocks read: to serve as baseline for expected
                blocks read and for catching regressions (Kannan)
+   HBASE-4449  LoadIncrementalHFiles should be able to handle CFs with blooms
+               (David Revell)
   IMPROVEMENTS
    HBASE-3290  Max Compaction Size (Nicolas Spiegelberg via Stack)  
    HBASE-3292  Expose block cache hit/miss/evict counts into region server

Modified: hbase/trunk/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFiles.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFiles.java?rev=1174403&r1=1174402&r2=1174403&view=diff
==============================================================================
--- hbase/trunk/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFiles.java
(original)
+++ hbase/trunk/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFiles.java
Thu Sep 22 21:55:00 2011
@@ -37,6 +37,7 @@ import org.apache.hadoop.hbase.client.HT
 import org.apache.hadoop.hbase.io.hfile.Compression;
 import org.apache.hadoop.hbase.io.hfile.HFile;
 import org.apache.hadoop.hbase.io.hfile.HFileScanner;
+import org.apache.hadoop.hbase.regionserver.StoreFile.BloomType;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.junit.Test;
 
@@ -70,7 +71,7 @@ public class TestLoadIncrementalHFiles {
    */
   @Test
   public void testSimpleLoad() throws Exception {
-    runTest("testSimpleLoad",
+    runTest("testSimpleLoad", BloomType.NONE,
         new byte[][][] {
           new byte[][]{ Bytes.toBytes("aaaa"), Bytes.toBytes("cccc") },
           new byte[][]{ Bytes.toBytes("ddd"), Bytes.toBytes("ooo") },
@@ -83,15 +84,39 @@ public class TestLoadIncrementalHFiles {
    */
   @Test
   public void testRegionCrossingLoad() throws Exception {
-    runTest("testRegionCrossingLoad",
+    runTest("testRegionCrossingLoad", BloomType.NONE,
         new byte[][][] {
           new byte[][]{ Bytes.toBytes("aaaa"), Bytes.toBytes("eee") },
           new byte[][]{ Bytes.toBytes("fff"), Bytes.toBytes("zzz") },
     });
   }
 
-  private void runTest(String testName, byte[][][] hfileRanges)
-  throws Exception {
+  /**
+   * Test loading into a column family that has a ROW bloom filter.
+   */
+  @Test
+  public void testRegionCrossingRowBloom() throws Exception {
+    runTest("testRegionCrossingLoadRowBloom", BloomType.ROW,
+        new byte[][][] {
+          new byte[][]{ Bytes.toBytes("aaaa"), Bytes.toBytes("eee") },
+          new byte[][]{ Bytes.toBytes("fff"), Bytes.toBytes("zzz") },
+    });
+  }
+  
+  /**
+   * Test loading into a column family that has a ROWCOL bloom filter.
+   */
+  @Test
+  public void testRegionCrossingRowColBloom() throws Exception {
+    runTest("testRegionCrossingLoadRowColBloom", BloomType.ROWCOL,
+        new byte[][][] {
+          new byte[][]{ Bytes.toBytes("aaaa"), Bytes.toBytes("eee") },
+          new byte[][]{ Bytes.toBytes("fff"), Bytes.toBytes("zzz") },
+    });
+  }
+
+  private void runTest(String testName, BloomType bloomType, 
+          byte[][][] hfileRanges) throws Exception {
     Path dir = HBaseTestingUtility.getTestDir(testName);
     FileSystem fs = util.getTestFileSystem();
     dir = dir.makeQualified(fs);
@@ -111,7 +136,9 @@ public class TestLoadIncrementalHFiles {
     try {
       HBaseAdmin admin = new HBaseAdmin(util.getConfiguration());
       HTableDescriptor htd = new HTableDescriptor(TABLE);
-      htd.addFamily(new HColumnDescriptor(FAMILY));
+      HColumnDescriptor familyDesc = new HColumnDescriptor(FAMILY);
+      familyDesc.setBloomFilterType(bloomType);
+      htd.addFamily(familyDesc);
       admin.createTable(htd, SPLIT_KEYS);
 
       HTable table = new HTable(util.getConfiguration(), TABLE);



Mime
View raw message