hbase-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From ramkris...@apache.org
Subject hbase git commit: HBASE-13003 - Get tests in TestHFileBlockIndex back (Zhangduo)
Date Wed, 11 Feb 2015 05:09:42 GMT
Repository: hbase
Updated Branches:
  refs/heads/master 8a6e9827a -> f04850810


HBASE-13003 - Get tests in TestHFileBlockIndex back (Zhangduo)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/f0485081
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/f0485081
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/f0485081

Branch: refs/heads/master
Commit: f04850810e9b5ec97368435a97b947fc0023447d
Parents: 8a6e982
Author: Ramkrishna <ramkrishna.s.vasudevan@intel.com>
Authored: Wed Feb 11 10:38:54 2015 +0530
Committer: Ramkrishna <ramkrishna.s.vasudevan@intel.com>
Committed: Wed Feb 11 10:38:54 2015 +0530

----------------------------------------------------------------------
 .../hbase/io/hfile/TestHFileBlockIndex.java     | 45 ++++++++++++--------
 1 file changed, 27 insertions(+), 18 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hbase/blob/f0485081/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.java
b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.java
index 4e79381..939c019 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.java
@@ -42,9 +42,9 @@ import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.CellUtil;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.KeyValue;
-import org.apache.hadoop.hbase.KeyValueUtil;
 import org.apache.hadoop.hbase.fs.HFileSystem;
 import org.apache.hadoop.hbase.io.compress.Compression;
 import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
@@ -54,6 +54,7 @@ import org.apache.hadoop.hbase.testclassification.IOTests;
 import org.apache.hadoop.hbase.testclassification.MediumTests;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.ClassSize;
+import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
 import org.junit.Before;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
@@ -138,7 +139,7 @@ public class TestHFileBlockIndex {
     fs = HFileSystem.get(conf);
   }
 
-  protected void testBlockIndexInternals(boolean useTags) throws IOException {
+  private void testBlockIndexInternals(boolean useTags) throws IOException {
     path = new Path(TEST_UTIL.getDataTestDir(), "block_index_" + compr + useTags);
     writeWholeIndex(useTags);
     readIndex(useTags);
@@ -186,7 +187,7 @@ public class TestHFileBlockIndex {
     }
   }
 
-  public void readIndex(boolean useTags) throws IOException {
+  private void readIndex(boolean useTags) throws IOException {
     long fileSize = fs.getFileStatus(path).getLen();
     LOG.info("Size of " + path + ": " + fileSize);
 
@@ -216,10 +217,10 @@ public class TestHFileBlockIndex {
     for (byte[] key : keys) {
       assertTrue(key != null);
       assertTrue(indexReader != null);
-      HFileBlock b = indexReader.seekToDataBlock(new KeyValue.KeyOnlyKeyValue(key, 0, key.length),
-          null,
-          true, true, false, null);
-      if (Bytes.BYTES_RAWCOMPARATOR.compare(key, firstKeyInFile) < 0) {
+      HFileBlock b =
+          indexReader.seekToDataBlock(new KeyValue.KeyOnlyKeyValue(key, 0, key.length), null,
true,
+            true, false, null);
+      if (KeyValue.COMPARATOR.compareFlatKey(key, firstKeyInFile) < 0) {
         assertTrue(b == null);
         ++i;
         continue;
@@ -260,21 +261,26 @@ public class TestHFileBlockIndex {
         new HFileBlockIndex.BlockIndexWriter(hbw, null, null);
 
     for (int i = 0; i < NUM_DATA_BLOCKS; ++i) {
-      hbw.startWriting(BlockType.DATA).write(
-          String.valueOf(rand.nextInt(1000)).getBytes());
+      hbw.startWriting(BlockType.DATA).write(String.valueOf(rand.nextInt(1000)).getBytes());
       long blockOffset = outputStream.getPos();
       hbw.writeHeaderAndData(outputStream);
 
       byte[] firstKey = null;
+      byte[] family = Bytes.toBytes("f");
+      byte[] qualifier = Bytes.toBytes("q");
       for (int j = 0; j < 16; ++j) {
-        byte[] k = TestHFileWriterV2.randomOrderedKey(rand, i * 16 + j);
+        byte[] k =
+            new KeyValue(TestHFileWriterV2.randomOrderedKey(rand, i * 16 + j), family, qualifier,
+                EnvironmentEdgeManager.currentTime(), KeyValue.Type.Put).getKey();
         keys.add(k);
-        if (j == 8)
+        if (j == 8) {
           firstKey = k;
+        }
       }
       assertTrue(firstKey != null);
-      if (firstKeyInFile == null)
+      if (firstKeyInFile == null) {
         firstKeyInFile = firstKey;
+      }
       biw.addEntry(firstKey, blockOffset, hbw.getOnDiskSizeWithHeader());
 
       writeInlineBlocks(hbw, outputStream, biw, false);
@@ -439,7 +445,7 @@ public class TestHFileBlockIndex {
 
   }
 
-  //@Test
+  @Test
   public void testBlockIndexChunk() throws IOException {
     BlockIndexChunk c = new BlockIndexChunk();
     ByteArrayOutputStream baos = new ByteArrayOutputStream();
@@ -476,7 +482,7 @@ public class TestHFileBlockIndex {
   }
 
   /** Checks if the HeapSize calculator is within reason */
-  //@Test
+  @Test
   public void testHeapSizeForBlockIndex() throws IOException {
     Class<HFileBlockIndex.BlockIndexReader> cl =
         HFileBlockIndex.BlockIndexReader.class;
@@ -504,7 +510,7 @@ public class TestHFileBlockIndex {
    *
    * @throws IOException
    */
-  //@Test
+  @Test
   public void testHFileWriterAndReader() throws IOException {
     Path hfilePath = new Path(TEST_UTIL.getDataTestDir(),
         "hfile_for_block_index");
@@ -536,16 +542,19 @@ public class TestHFileBlockIndex {
                 .withFileContext(meta)
                 .create();
         Random rand = new Random(19231737);
-
+        byte[] family = Bytes.toBytes("f");
+        byte[] qualifier = Bytes.toBytes("q");
         for (int i = 0; i < NUM_KV; ++i) {
           byte[] row = TestHFileWriterV2.randomOrderedKey(rand, i);
 
           // Key will be interpreted by KeyValue.KEY_COMPARATOR
-          KeyValue kv = KeyValueUtil.createFirstOnRow(row, 0, row.length, row, 0, 0,
-              row, 0, 0);
+          KeyValue kv =
+              new KeyValue(row, family, qualifier, EnvironmentEdgeManager.currentTime(),
+                  TestHFileWriterV2.randomValue(rand));
           byte[] k = kv.getKey();
           writer.append(kv);
           keys[i] = k;
+          values[i] = CellUtil.cloneValue(kv);
           keyStrSet.add(Bytes.toStringBinary(k));
           if (i > 0) {
             assertTrue(KeyValue.COMPARATOR.compareFlatKey(keys[i - 1],


Mime
View raw message