hbase-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From raw...@apache.org
Subject svn commit: r1074063 - in /hbase/trunk: CHANGES.txt src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java
Date Thu, 24 Feb 2011 08:02:27 GMT
Author: rawson
Date: Thu Feb 24 08:02:27 2011
New Revision: 1074063

URL: http://svn.apache.org/viewvc?rev=1074063&view=rev
Log:
undoing HBASE-3514 due to build breakage

Modified:
    hbase/trunk/CHANGES.txt
    hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java

Modified: hbase/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hbase/trunk/CHANGES.txt?rev=1074063&r1=1074062&r2=1074063&view=diff
==============================================================================
--- hbase/trunk/CHANGES.txt (original)
+++ hbase/trunk/CHANGES.txt Thu Feb 24 08:02:27 2011
@@ -51,7 +51,6 @@ Release 0.91.0 - Unreleased
                unflushable regions.
    HBASE-3550  FilterList reports false positives (Bill Graham via Andrew
                Purtell)
-   HBASE-3514  Speedup HFile.Writer append
 
   IMPROVEMENTS
    HBASE-3290  Max Compaction Size (Nicolas Spiegelberg via Stack)  

Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java?rev=1074063&r1=1074062&r2=1074063&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java Thu Feb 24 08:02:27
2011
@@ -221,6 +221,9 @@ public class HFile {
     // Used to ensure we write in order.
     private final RawComparator<byte []> comparator;
 
+    // A stream made per block written.
+    private DataOutputStream out;
+
     // Number of uncompressed bytes per block.  Reinitialized when we start
     // new block.
     private int blocksize;
@@ -261,9 +264,9 @@ public class HFile {
     // Block cache to optionally fill on write
     private BlockCache blockCache;
 
-    // Byte array output stream made per block written.
-    private ByteArrayOutputStream baos = null;
-    private DataOutputStream baosDos = null;
+    // Additional byte array output stream used to fill block cache
+    private ByteArrayOutputStream baos;
+    private DataOutputStream baosDos;
     private int blockNumber = 0;
 
     /**
@@ -357,7 +360,7 @@ public class HFile {
      * @throws IOException
      */
     private void checkBlockBoundary() throws IOException {
-      if (baosDos != null && baosDos.size() < blocksize) return;
+      if (this.out != null && this.out.size() < blocksize) return;
       finishBlock();
       newBlock();
     }
@@ -367,18 +370,11 @@ public class HFile {
      * @throws IOException
      */
     private void finishBlock() throws IOException {
-      if (baosDos == null) return;
-
-      // Flush Data Output Stream
-      baosDos.flush();
-
-      // Compress Data and write to output stream
-      DataOutputStream compressStream = getCompressingStream();
-      baos.writeTo(compressStream);
-      int size = releaseCompressingStream(compressStream);
-
+      if (this.out == null) return;
       long now = System.currentTimeMillis();
 
+      int size = releaseCompressingStream(this.out);
+      this.out = null;
       blockKeys.add(firstKey);
       blockOffsets.add(Long.valueOf(blockBegin));
       blockDataSizes.add(Integer.valueOf(size));
@@ -388,17 +384,14 @@ public class HFile {
       writeOps++;
 
       if (blockCache != null) {
+        baosDos.flush();
         byte [] bytes = baos.toByteArray();
         ByteBuffer blockToCache = ByteBuffer.wrap(bytes, DATABLOCKMAGIC.length,
             bytes.length - DATABLOCKMAGIC.length);
         String blockName = path.toString() + blockNumber;
         blockCache.cacheBlock(blockName, blockToCache);
+        baosDos.close();
       }
-
-      baosDos.close();
-      baosDos = null;
-      baos = null;
-
       blockNumber++;
     }
 
@@ -409,14 +402,14 @@ public class HFile {
     private void newBlock() throws IOException {
       // This is where the next block begins.
       blockBegin = outputStream.getPos();
-
+      this.out = getCompressingStream();
+      this.out.write(DATABLOCKMAGIC);
       firstKey = null;
-
-      // to avoid too many calls to realloc(),
-      // pre-allocates the byte stream to the block size + 25%
-      baos = new ByteArrayOutputStream(blocksize + (int)(blocksize * 0.25));
-      baosDos = new DataOutputStream(baos);
-      baosDos.write(DATABLOCKMAGIC);
+      if (blockCache != null) {
+        this.baos = new ByteArrayOutputStream();
+        this.baosDos = new DataOutputStream(baos);
+        this.baosDos.write(DATABLOCKMAGIC);
+      }
     }
 
     /*
@@ -474,7 +467,7 @@ public class HFile {
       for (i = 0; i < metaNames.size(); ++i) {
         // stop when the current key is greater than our own
         byte[] cur = metaNames.get(i);
-        if (Bytes.BYTES_RAWCOMPARATOR.compare(cur, 0, cur.length,
+        if (Bytes.BYTES_RAWCOMPARATOR.compare(cur, 0, cur.length, 
             key, 0, key.length) > 0) {
           break;
         }
@@ -570,12 +563,12 @@ public class HFile {
         checkBlockBoundary();
       }
       // Write length of key and value and then actual key and value bytes.
-      this.baosDos.writeInt(klength);
+      this.out.writeInt(klength);
       this.keylength += klength;
-      this.baosDos.writeInt(vlength);
+      this.out.writeInt(vlength);
       this.valuelength += vlength;
-      this.baosDos.write(key, koffset, klength);
-      this.baosDos.write(value, voffset, vlength);
+      this.out.write(key, koffset, klength);
+      this.out.write(value, voffset, vlength);
       // Are we the first key in this block?
       if (this.firstKey == null) {
         // Copy the key.
@@ -586,6 +579,13 @@ public class HFile {
       this.lastKeyOffset = koffset;
       this.lastKeyLength = klength;
       this.entryCount ++;
+      // If we are pre-caching blocks on write, fill byte array stream
+      if (blockCache != null) {
+        this.baosDos.writeInt(klength);
+        this.baosDos.writeInt(vlength);
+        this.baosDos.write(key, koffset, klength);
+        this.baosDos.write(value, voffset, vlength);
+      }
     }
 
     /*



Mime
View raw message