hbase-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From apurt...@apache.org
Subject hbase git commit: Amend HBASE-8329 Limit compaction speed (zhangduo)
Date Fri, 06 Feb 2015 02:33:07 GMT
Repository: hbase
Updated Branches:
  refs/heads/0.98 3adbcb72d -> 409983a99


Amend HBASE-8329 Limit compaction speed (zhangduo)

Fix compilation issues against Hadoop 1


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/409983a9
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/409983a9
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/409983a9

Branch: refs/heads/0.98
Commit: 409983a99d21a6a723027e531dfd2b9a0228abb6
Parents: 3adbcb7
Author: Andrew Purtell <apurtell@apache.org>
Authored: Thu Feb 5 18:31:06 2015 -0800
Committer: Andrew Purtell <apurtell@apache.org>
Committed: Thu Feb 5 18:31:08 2015 -0800

----------------------------------------------------------------------
 .../org/apache/hadoop/hbase/regionserver/HStore.java     | 11 +++++------
 .../hbase/regionserver/StripeStoreFileManager.java       | 10 +++++-----
 .../hadoop/hbase/regionserver/compactions/Compactor.java |  4 ++--
 3 files changed, 12 insertions(+), 13 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hbase/blob/409983a9/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java
index 105ecfe..92b9f81 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java
@@ -88,7 +88,6 @@ import org.apache.hadoop.hbase.util.ChecksumType;
 import org.apache.hadoop.hbase.util.ClassSize;
 import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
 import org.apache.hadoop.util.StringUtils;
-import org.apache.hadoop.util.StringUtils.TraditionalBinaryPrefix;
 
 import com.google.common.annotations.VisibleForTesting;
 import com.google.common.base.Preconditions;
@@ -877,7 +876,7 @@ public class HStore implements Store {
     if (LOG.isInfoEnabled()) {
       LOG.info("Added " + sf + ", entries=" + r.getEntries() +
         ", sequenceid=" + logCacheFlushId +
-        ", filesize=" + TraditionalBinaryPrefix.long2String(r.length(), "", 1));
+        ", filesize=" + StringUtils.humanReadableInt(r.length()));
     }
     return sf;
   }
@@ -1108,7 +1107,7 @@ public class HStore implements Store {
       LOG.info("Starting compaction of " + filesToCompact.size() + " file(s) in "
           + this + " of " + this.getRegionInfo().getRegionNameAsString()
           + " into tmpdir=" + fs.getTempDir() + ", totalSize="
-          + TraditionalBinaryPrefix.long2String(cr.getSize(), "", 1));
+          + StringUtils.humanReadableInt(cr.getSize()));
 
       // Commence the compaction.
       List<Path> newFiles = compaction.compact(throughputController);
@@ -1223,12 +1222,12 @@ public class HStore implements Store {
       for (StoreFile sf: sfs) {
         message.append(sf.getPath().getName());
         message.append("(size=");
-        message.append(TraditionalBinaryPrefix.long2String(sf.getReader().length(), "", 1));
+        message.append(StringUtils.humanReadableInt(sf.getReader().length()));
         message.append("), ");
       }
     }
     message.append("total size for store is ")
-      .append(StringUtils.TraditionalBinaryPrefix.long2String(storeSize, "", 1))
+      .append(StringUtils.humanReadableInt(storeSize))
       .append(". This selection was in queue for ")
       .append(StringUtils.formatTimeDiff(compactionStartTime, cr.getSelectionTime()))
       .append(", and took ").append(StringUtils.formatTimeDiff(now, compactionStartTime))
@@ -1506,7 +1505,7 @@ public class HStore implements Store {
     completeCompaction(delSfs);
     LOG.info("Completed removal of " + delSfs.size() + " unnecessary (expired) file(s) in
"
         + this + " of " + this.getRegionInfo().getRegionNameAsString()
-        + "; total size for store is " + TraditionalBinaryPrefix.long2String(storeSize, "",
1));
+        + "; total size for store is " + StringUtils.humanReadableInt(storeSize));
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/hbase/blob/409983a9/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreFileManager.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreFileManager.java
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreFileManager.java
index c18d362..b023b6a 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreFileManager.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreFileManager.java
@@ -41,7 +41,7 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.regionserver.compactions.StripeCompactionPolicy;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.ConcatenatedLists;
-import org.apache.hadoop.util.StringUtils.TraditionalBinaryPrefix;
+import org.apache.hadoop.util.StringUtils;
 
 import com.google.common.collect.ImmutableCollection;
 import com.google.common.collect.ImmutableList;
@@ -478,8 +478,8 @@ public class StripeStoreFileManager
         .append(state.level0Files.size())
         .append(
           " files: "
-              + TraditionalBinaryPrefix.long2String(
-                StripeCompactionPolicy.getTotalFileSize(state.level0Files), "", 1) + ";");
+              + StringUtils.humanReadableInt(
+                StripeCompactionPolicy.getTotalFileSize(state.level0Files)) + ";");
     for (int i = 0; i < state.stripeFiles.size(); ++i) {
       String endRow = (i == state.stripeEndRows.length)
           ? "(end)" : "[" + Bytes.toString(state.stripeEndRows[i]) + "]";
@@ -489,8 +489,8 @@ public class StripeStoreFileManager
           .append(state.stripeFiles.get(i).size())
           .append(
             " files: "
-                + TraditionalBinaryPrefix.long2String(
-                  StripeCompactionPolicy.getTotalFileSize(state.stripeFiles.get(i)), "",
1) + ";");
+                + StringUtils.humanReadableInt(
+                  StripeCompactionPolicy.getTotalFileSize(state.stripeFiles.get(i))) + ";");
     }
     sb.append("\n").append(state.stripeFiles.size()).append(" stripes total.");
     sb.append("\n").append(getStorefileCount()).append(" files total.");

http://git-wip-us.apache.org/repos/asf/hbase/blob/409983a9/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/Compactor.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/Compactor.java
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/Compactor.java
index 8581e29..0b434e8 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/Compactor.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/Compactor.java
@@ -45,7 +45,7 @@ import org.apache.hadoop.hbase.regionserver.StoreFileScanner;
 import org.apache.hadoop.hbase.regionserver.StoreScanner;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-import org.apache.hadoop.util.StringUtils.TraditionalBinaryPrefix;
+import org.apache.hadoop.util.StringUtils;
 
 /**
  * A compactor is a compaction algorithm associated a given policy. Base class also contains
@@ -147,7 +147,7 @@ public abstract class Compactor {
         LOG.debug("Compacting " + file +
           ", keycount=" + keyCount +
           ", bloomtype=" + r.getBloomFilterType().toString() +
-          ", size=" + TraditionalBinaryPrefix.long2String(r.length(), "", 1) +
+          ", size=" + StringUtils.humanReadableInt(r.length()) +
           ", encoding=" + r.getHFileReader().getDataBlockEncoding() +
           ", seqNum=" + seqNum +
           (calculatePutTs ? ", earliestPutTs=" + earliestPutTs: ""));


Mime
View raw message