hbase-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From mberto...@apache.org
Subject [2/9] hbase git commit: HBASE-13310 Fix high priority findbugs warnings
Date Tue, 24 Mar 2015 08:32:15 GMT
HBASE-13310 Fix high priority findbugs warnings


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/6e9ded51
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/6e9ded51
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/6e9ded51

Branch: refs/heads/hbase-12439
Commit: 6e9ded51fb2fc4c3e63fb0da5030246f3466db71
Parents: 1428a59
Author: zhangduo <zhangduo@wandoujia.com>
Authored: Sun Mar 22 20:41:43 2015 +0800
Committer: zhangduo <zhangduo@wandoujia.com>
Committed: Tue Mar 24 06:50:04 2015 +0800

----------------------------------------------------------------------
 .../apache/hadoop/hbase/HColumnDescriptor.java  | 26 +++---
 .../apache/hadoop/hbase/util/PrettyPrinter.java |  2 +-
 .../org/apache/hadoop/hbase/util/Threads.java   | 92 ++++++++++++++------
 .../apache/hadoop/hbase/rest/RESTServer.java    |  6 +-
 .../apache/hadoop/hbase/rest/RowResource.java   |  4 +-
 .../org/apache/hadoop/hbase/rest/RowSpec.java   |  8 +-
 .../hbase/rest/model/ColumnSchemaModel.java     | 10 +--
 .../hadoop/hbase/rest/model/ScannerModel.java   |  2 +-
 .../rest/model/StorageClusterStatusModel.java   |  4 +-
 .../hbase/rest/model/TableRegionModel.java      |  2 +-
 .../hbase/rest/model/TableSchemaModel.java      | 16 ++--
 .../hadoop/hbase/io/hfile/HFileBlock.java       | 13 +++
 .../hadoop/hbase/master/HMasterCommandLine.java |  8 +-
 .../regionserver/RegionCoprocessorHost.java     |  5 +-
 .../hbase/security/access/TableAuthManager.java | 11 +--
 .../org/apache/hadoop/hbase/util/HBaseFsck.java |  4 +-
 .../hbase/util/hbck/OfflineMetaRepair.java      |  2 +-
 .../apache/hadoop/hbase/util/TestHBaseFsck.java | 16 ++--
 .../hadoop/hbase/util/hbck/HbckTestingUtil.java |  2 +-
 19 files changed, 141 insertions(+), 92 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hbase/blob/6e9ded51/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java
index 5335bef..d4d8ee4 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java
@@ -734,7 +734,7 @@ public class HColumnDescriptor implements Comparable<HColumnDescriptor>
{
     String compressTagsStr = getValue(COMPRESS_TAGS);
     boolean compressTags = DEFAULT_COMPRESS_TAGS;
     if (compressTagsStr != null) {
-      compressTags = Boolean.valueOf(compressTagsStr);
+      compressTags = Boolean.parseBoolean(compressTagsStr);
     }
     return compressTags;
   }
@@ -747,7 +747,7 @@ public class HColumnDescriptor implements Comparable<HColumnDescriptor>
{
     String compressTagsStr = getValue(COMPRESS_TAGS);
     boolean compressTags = DEFAULT_COMPRESS_TAGS;
     if (compressTagsStr != null) {
-      compressTags = Boolean.valueOf(compressTagsStr);
+      compressTags = Boolean.parseBoolean(compressTagsStr);
     }
     return compressTags;
   }
@@ -778,8 +778,9 @@ public class HColumnDescriptor implements Comparable<HColumnDescriptor>
{
    */
   public boolean isInMemory() {
     String value = getValue(HConstants.IN_MEMORY);
-    if (value != null)
-      return Boolean.valueOf(value).booleanValue();
+    if (value != null) {
+      return Boolean.parseBoolean(value);
+    }
     return DEFAULT_IN_MEMORY;
   }
 
@@ -827,7 +828,7 @@ public class HColumnDescriptor implements Comparable<HColumnDescriptor>
{
    */
   public int getTimeToLive() {
     String value = getValue(TTL);
-    return (value != null)? Integer.valueOf(value).intValue(): DEFAULT_TTL;
+    return (value != null)? Integer.parseInt(value) : DEFAULT_TTL;
   }
 
   /**
@@ -843,7 +844,7 @@ public class HColumnDescriptor implements Comparable<HColumnDescriptor>
{
    */
   public int getMinVersions() {
     String value = getValue(MIN_VERSIONS);
-    return (value != null)? Integer.valueOf(value).intValue(): 0;
+    return (value != null)? Integer.parseInt(value) : 0;
   }
 
   /**
@@ -861,8 +862,9 @@ public class HColumnDescriptor implements Comparable<HColumnDescriptor>
{
    */
   public boolean isBlockCacheEnabled() {
     String value = getValue(BLOCKCACHE);
-    if (value != null)
-      return Boolean.valueOf(value).booleanValue();
+    if (value != null) {
+      return Boolean.parseBoolean(value);
+    }
     return DEFAULT_BLOCKCACHE;
   }
 
@@ -900,7 +902,7 @@ public class HColumnDescriptor implements Comparable<HColumnDescriptor>
{
   public int getScope() {
     byte[] value = getValue(REPLICATION_SCOPE_BYTES);
     if (value != null) {
-      return Integer.valueOf(Bytes.toString(value));
+      return Integer.parseInt(Bytes.toString(value));
     }
     return DEFAULT_REPLICATION_SCOPE;
   }
@@ -966,7 +968,9 @@ public class HColumnDescriptor implements Comparable<HColumnDescriptor>
{
 
   private boolean setAndGetBoolean(final String key, final boolean defaultSetting) {
     String value = getValue(key);
-    if (value != null) return Boolean.valueOf(value).booleanValue();
+    if (value != null) {
+      return Boolean.parseBoolean(value);
+    }
     return defaultSetting;
   }
 
@@ -1201,7 +1205,7 @@ public class HColumnDescriptor implements Comparable<HColumnDescriptor>
{
   @Override
   public int hashCode() {
     int result = Bytes.hashCode(this.name);
-    result ^= Byte.valueOf(COLUMN_DESCRIPTOR_VERSION).hashCode();
+    result ^= (int) COLUMN_DESCRIPTOR_VERSION;
     result ^= values.hashCode();
     result ^= configuration.hashCode();
     return result;

http://git-wip-us.apache.org/repos/asf/hbase/blob/6e9ded51/hbase-common/src/main/java/org/apache/hadoop/hbase/util/PrettyPrinter.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/PrettyPrinter.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/PrettyPrinter.java
index 8c8f618..7728112 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/PrettyPrinter.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/PrettyPrinter.java
@@ -34,7 +34,7 @@ public class PrettyPrinter {
     StringBuilder human = new StringBuilder();
     switch (unit) {
       case TIME_INTERVAL:
-        human.append(humanReadableTTL(Long.valueOf(value)));
+        human.append(humanReadableTTL(Long.parseLong(value)));
         break;
       default:
         human.append(value);

http://git-wip-us.apache.org/repos/asf/hbase/blob/6e9ded51/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Threads.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Threads.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Threads.java
index 81178c4..51a506b 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Threads.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Threads.java
@@ -35,6 +35,8 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.util.ReflectionUtils;
 import org.apache.hadoop.util.StringUtils;
 
+import com.google.common.base.Preconditions;
+
 /**
  * Thread Utility
  */
@@ -266,43 +268,75 @@ public class Threads {
     t.setUncaughtExceptionHandler(LOGGING_EXCEPTION_HANDLER);
   }
 
-  private static Method printThreadInfoMethod = null;
-  private static boolean printThreadInfoMethodWithPrintStream = true;
+  private static interface PrintThreadInfoHelper {
 
-  /**
-   * Print all of the thread's information and stack traces. Wrapper around Hadoop's method.
-   *
-   * @param stream the stream to
-   * @param title a string title for the stack trace
-   */
-  public static void printThreadInfo(PrintStream stream, String title) {
+    void printThreadInfo(PrintStream stream, String title);
 
-    if (printThreadInfoMethod == null) {
+  }
+
+  private static class PrintThreadInfoLazyHolder {
+
+    public static final PrintThreadInfoHelper HELPER = initHelper();
+
+    private static PrintThreadInfoHelper initHelper() {
+      Method method = null;
       try {
         // Hadoop 2.7+ declares printThreadInfo(PrintStream, String)
-        printThreadInfoMethod = ReflectionUtils.class.getMethod("printThreadInfo",
-          PrintStream.class, String.class);
+        method = ReflectionUtils.class.getMethod("printThreadInfo", PrintStream.class,
+          String.class);
+        method.setAccessible(true);
+        final Method hadoop27Method = method;
+        return new PrintThreadInfoHelper() {
+
+          @Override
+          public void printThreadInfo(PrintStream stream, String title) {
+            try {
+              hadoop27Method.invoke(null, stream, title);
+            } catch (IllegalAccessException | IllegalArgumentException e) {
+              throw new RuntimeException(e);
+            } catch (InvocationTargetException e) {
+              throw new RuntimeException(e.getCause());
+            }
+          }
+        };
       } catch (NoSuchMethodException e) {
-        // Hadoop 2.6 and earlier declares printThreadInfo(PrintWriter, String)
-        printThreadInfoMethodWithPrintStream = false;
-        try {
-          printThreadInfoMethod = ReflectionUtils.class.getMethod("printThreadInfo",
-            PrintWriter.class, String.class);
-        } catch (NoSuchMethodException e1) {
-          throw new RuntimeException("Cannot find method. Check hadoop jars linked", e1);
-        }
+        LOG.info(
+          "Can not find hadoop 2.7+ printThreadInfo method, try hadoop hadoop 2.6 and earlier",
e);
       }
-      printThreadInfoMethod.setAccessible(true);
-    }
+      try {
+        // Hadoop 2.6 and earlier declares printThreadInfo(PrintWriter, String)
+        method = ReflectionUtils.class.getMethod("printThreadInfo", PrintWriter.class,
+          String.class);
+        method.setAccessible(true);
+        final Method hadoop26Method = method;
+        return new PrintThreadInfoHelper() {
 
-    try {
-      if (printThreadInfoMethodWithPrintStream) {
-        printThreadInfoMethod.invoke(null, stream, title);
-      } else {
-        printThreadInfoMethod.invoke(null, new PrintWriter(stream), title);
+          @Override
+          public void printThreadInfo(PrintStream stream, String title) {
+            try {
+              hadoop26Method.invoke(null, new PrintWriter(stream), title);
+            } catch (IllegalAccessException | IllegalArgumentException e) {
+              throw new RuntimeException(e);
+            } catch (InvocationTargetException e) {
+              throw new RuntimeException(e.getCause());
+            }
+          }
+        };
+      } catch (NoSuchMethodException e) {
+        LOG.warn("Cannot find printThreadInfo method. Check hadoop jars linked", e);
       }
-    } catch (IllegalAccessException | IllegalArgumentException | InvocationTargetException
e) {
-      throw new RuntimeException(e.getCause());
+      return null;
     }
   }
+
+  /**
+   * Print all of the thread's information and stack traces. Wrapper around Hadoop's method.
+   *
+   * @param stream the stream to
+   * @param title a string title for the stack trace
+   */
+  public static void printThreadInfo(PrintStream stream, String title) {
+    Preconditions.checkNotNull(PrintThreadInfoLazyHolder.HELPER,
+      "Cannot find method. Check hadoop jars linked").printThreadInfo(stream, title);
+  }
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/6e9ded51/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServer.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServer.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServer.java
index bb52fdb..370a083 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServer.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServer.java
@@ -127,8 +127,7 @@ public class RESTServer implements Constants {
     // check for user-defined port setting, if so override the conf
     if (commandLine != null && commandLine.hasOption("port")) {
       String val = commandLine.getOptionValue("port");
-      servlet.getConfiguration()
-          .setInt("hbase.rest.port", Integer.valueOf(val));
+      servlet.getConfiguration().setInt("hbase.rest.port", Integer.parseInt(val));
       LOG.debug("port set to " + val);
     }
 
@@ -141,8 +140,7 @@ public class RESTServer implements Constants {
     // check for user-defined info server port setting, if so override the conf
     if (commandLine != null && commandLine.hasOption("infoport")) {
       String val = commandLine.getOptionValue("infoport");
-      servlet.getConfiguration()
-          .setInt("hbase.rest.info.port", Integer.valueOf(val));
+      servlet.getConfiguration().setInt("hbase.rest.info.port", Integer.parseInt(val));
       LOG.debug("Web UI port set to " + val);
     }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/6e9ded51/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java
index dad5a32..ff1345c 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java
@@ -76,7 +76,7 @@ public class RowResource extends ResourceBase {
     this.tableResource = tableResource;
     this.rowspec = new RowSpec(rowspec);
     if (versions != null) {
-      this.rowspec.setMaxVersions(Integer.valueOf(versions));
+      this.rowspec.setMaxVersions(Integer.parseInt(versions));
     }
     this.check = check;
   }
@@ -271,7 +271,7 @@ public class RowResource extends ResourceBase {
       }
       vals = headers.getRequestHeader("X-Timestamp");
       if (vals != null && !vals.isEmpty()) {
-        timestamp = Long.valueOf(vals.get(0));
+        timestamp = Long.parseLong(vals.get(0));
       }
       if (column == null) {
         servlet.getMetrics().incrementFailedPutRequests(1);

http://git-wip-us.apache.org/repos/asf/hbase/blob/6e9ded51/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowSpec.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowSpec.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowSpec.java
index b6c1ca8..cc51c85 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowSpec.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowSpec.java
@@ -164,7 +164,7 @@ public class RowSpec {
         i++;
       }
       try {
-        time0 = Long.valueOf(URLDecoder.decode(stamp.toString(),
+        time0 = Long.parseLong(URLDecoder.decode(stamp.toString(),
           HConstants.UTF8_ENCODING));
       } catch (NumberFormatException e) {
         throw new IllegalArgumentException(e);
@@ -177,7 +177,7 @@ public class RowSpec {
           i++;
         }
         try {
-          time1 = Long.valueOf(URLDecoder.decode(stamp.toString(),
+          time1 = Long.parseLong(URLDecoder.decode(stamp.toString(),
             HConstants.UTF8_ENCODING));
         } catch (NumberFormatException e) {
           throw new IllegalArgumentException(e);
@@ -245,7 +245,7 @@ public class RowSpec {
           }
           sb.append(c);
         }
-        maxVersions = Integer.valueOf(sb.toString());
+        maxVersions = Integer.parseInt(sb.toString());
       } break;
       case 'n': {
         StringBuilder sb = new StringBuilder();
@@ -257,7 +257,7 @@ public class RowSpec {
           }
           sb.append(c);
         }
-        maxValues = Integer.valueOf(sb.toString());
+        maxValues = Integer.parseInt(sb.toString());
       } break;
       default:
         throw new IllegalArgumentException("unknown parameter '" + c + "'");

http://git-wip-us.apache.org/repos/asf/hbase/blob/6e9ded51/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/ColumnSchemaModel.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/ColumnSchemaModel.java
b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/ColumnSchemaModel.java
index ba0eed8..8562cde 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/ColumnSchemaModel.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/ColumnSchemaModel.java
@@ -138,7 +138,7 @@ public class ColumnSchemaModel implements Serializable {
   public boolean __getBlockcache() {
     Object o = attrs.get(BLOCKCACHE);
     return o != null ? 
-      Boolean.valueOf(o.toString()) : HColumnDescriptor.DEFAULT_BLOCKCACHE;
+      Boolean.parseBoolean(o.toString()) : HColumnDescriptor.DEFAULT_BLOCKCACHE;
   }
 
   /**
@@ -147,7 +147,7 @@ public class ColumnSchemaModel implements Serializable {
   public int __getBlocksize() {
     Object o = attrs.get(BLOCKSIZE);
     return o != null ? 
-      Integer.valueOf(o.toString()) : HColumnDescriptor.DEFAULT_BLOCKSIZE;
+      Integer.parseInt(o.toString()) : HColumnDescriptor.DEFAULT_BLOCKSIZE;
   }
 
   /**
@@ -172,7 +172,7 @@ public class ColumnSchemaModel implements Serializable {
   public boolean __getInMemory() {
     Object o = attrs.get(IN_MEMORY);
     return o != null ? 
-      Boolean.valueOf(o.toString()) : HColumnDescriptor.DEFAULT_IN_MEMORY;
+      Boolean.parseBoolean(o.toString()) : HColumnDescriptor.DEFAULT_IN_MEMORY;
   }
 
   /**
@@ -181,7 +181,7 @@ public class ColumnSchemaModel implements Serializable {
   public int __getTTL() {
     Object o = attrs.get(TTL);
     return o != null ? 
-      Integer.valueOf(o.toString()) : HColumnDescriptor.DEFAULT_TTL;
+      Integer.parseInt(o.toString()) : HColumnDescriptor.DEFAULT_TTL;
   }
 
   /**
@@ -190,7 +190,7 @@ public class ColumnSchemaModel implements Serializable {
   public int __getVersions() {
     Object o = attrs.get(VERSIONS);
     return o != null ? 
-      Integer.valueOf(o.toString()) : HColumnDescriptor.DEFAULT_VERSIONS;
+      Integer.parseInt(o.toString()) : HColumnDescriptor.DEFAULT_VERSIONS;
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/hbase/blob/6e9ded51/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/ScannerModel.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/ScannerModel.java
b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/ScannerModel.java
index 784f7e6..25a6de3 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/ScannerModel.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/ScannerModel.java
@@ -413,7 +413,7 @@ public class ScannerModel implements ProtobufMessageHandler, Serializable
{
         }
       } break;
       case PageFilter:
-        filter = new PageFilter(Long.valueOf(value));
+        filter = new PageFilter(Long.parseLong(value));
         break;
       case PrefixFilter:
         filter = new PrefixFilter(Base64.decode(value));

http://git-wip-us.apache.org/repos/asf/hbase/blob/6e9ded51/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/StorageClusterStatusModel.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/StorageClusterStatusModel.java
b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/StorageClusterStatusModel.java
index 3b044e7..2caec66 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/StorageClusterStatusModel.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/StorageClusterStatusModel.java
@@ -106,7 +106,9 @@ public class StorageClusterStatusModel
     /**
      * Represents a region hosted on a region server.
      */
-    public static class Region {
+    public static class Region implements Serializable {
+      private static final long serialVersionUID = -1326683840086398193L;
+
       private byte[] name;
       private int stores;
       private int storefiles;

http://git-wip-us.apache.org/repos/asf/hbase/blob/6e9ded51/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/TableRegionModel.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/TableRegionModel.java
b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/TableRegionModel.java
index d9b2b65..89fe12c 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/TableRegionModel.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/TableRegionModel.java
@@ -142,7 +142,7 @@ public class TableRegionModel implements Serializable {
     this.startKey = Bytes.toBytes(split[1]);
     String tail = split[2];
     split = tail.split("\\.");
-    id = Long.valueOf(split[0]);
+    id = Long.parseLong(split[0]);
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/hbase/blob/6e9ded51/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/TableSchemaModel.java
----------------------------------------------------------------------
diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/TableSchemaModel.java
b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/TableSchemaModel.java
index 9e9fe47..593c3ab 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/TableSchemaModel.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/TableSchemaModel.java
@@ -221,7 +221,7 @@ public class TableSchemaModel implements Serializable, ProtobufMessageHandler
{
    */
   public boolean __getIsMeta() {
     Object o = attrs.get(IS_META);
-    return o != null ? Boolean.valueOf(o.toString()) : false;
+    return o != null ? Boolean.parseBoolean(o.toString()) : false;
   }
 
   /**
@@ -229,7 +229,7 @@ public class TableSchemaModel implements Serializable, ProtobufMessageHandler
{
    */
   public boolean __getIsRoot() {
     Object o = attrs.get(IS_ROOT);
-    return o != null ? Boolean.valueOf(o.toString()) : false;
+    return o != null ? Boolean.parseBoolean(o.toString()) : false;
   }
 
   /**
@@ -237,8 +237,7 @@ public class TableSchemaModel implements Serializable, ProtobufMessageHandler
{
    */
   public boolean __getReadOnly() {
     Object o = attrs.get(READONLY);
-    return o != null ? 
-      Boolean.valueOf(o.toString()) : HTableDescriptor.DEFAULT_READONLY;
+    return o != null ? Boolean.parseBoolean(o.toString()) : HTableDescriptor.DEFAULT_READONLY;
   }
 
   /**
@@ -285,12 +284,10 @@ public class TableSchemaModel implements Serializable, ProtobufMessageHandler
{
         familyBuilder.addAttrs(attrBuilder);
       }
       if (familyAttrs.containsKey(TTL)) {
-        familyBuilder.setTtl(
-          Integer.valueOf(familyAttrs.get(TTL).toString()));
+        familyBuilder.setTtl(Integer.parseInt(familyAttrs.get(TTL).toString()));
       }
       if (familyAttrs.containsKey(VERSIONS)) {
-        familyBuilder.setMaxVersions(
-          Integer.valueOf(familyAttrs.get(VERSIONS).toString()));
+        familyBuilder.setMaxVersions(Integer.parseInt(familyAttrs.get(VERSIONS).toString()));
       }
       if (familyAttrs.containsKey(COMPRESSION)) {
         familyBuilder.setCompression(familyAttrs.get(COMPRESSION).toString());
@@ -298,8 +295,7 @@ public class TableSchemaModel implements Serializable, ProtobufMessageHandler
{
       builder.addColumns(familyBuilder);
     }
     if (attrs.containsKey(READONLY)) {
-      builder.setReadOnly(
-        Boolean.valueOf(attrs.get(READONLY).toString()));
+      builder.setReadOnly(Boolean.parseBoolean(attrs.get(READONLY).toString()));
     }
     return builder.build().toByteArray();
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/6e9ded51/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java
index b096185..8f82a63 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java
@@ -1749,6 +1749,19 @@ public class HFileBlock implements Cacheable {
   }
 
   @Override
+  public int hashCode() {
+    int result = 1;
+    result = result * 31 + blockType.hashCode();
+    result = result * 31 + nextBlockOnDiskSizeWithHeader;
+    result = result * 31 + (int) (offset ^ (offset >>> 32));
+    result = result * 31 + onDiskSizeWithoutHeader;
+    result = result * 31 + (int) (prevBlockOffset ^ (prevBlockOffset >>> 32));
+    result = result * 31 + uncompressedSizeWithoutHeader;
+    result = result * 31 + buf.hashCode();
+    return result;
+  }
+
+  @Override
   public boolean equals(Object comparison) {
     if (this == comparison) {
       return true;

http://git-wip-us.apache.org/repos/asf/hbase/blob/6e9ded51/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMasterCommandLine.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMasterCommandLine.java
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMasterCommandLine.java
index 6ac681e..3aeee40 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMasterCommandLine.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMasterCommandLine.java
@@ -95,7 +95,7 @@ public class HMasterCommandLine extends ServerCommandLine {
     if (cmd.hasOption("minRegionServers")) {
       String val = cmd.getOptionValue("minRegionServers");
       getConf().setInt("hbase.regions.server.count.min",
-                  Integer.valueOf(val));
+                  Integer.parseInt(val));
       LOG.debug("minRegionServers set to " + val);
     }
 
@@ -103,7 +103,7 @@ public class HMasterCommandLine extends ServerCommandLine {
     if (cmd.hasOption("minServers")) {
       String val = cmd.getOptionValue("minServers");
       getConf().setInt("hbase.regions.server.count.min",
-                  Integer.valueOf(val));
+                  Integer.parseInt(val));
       LOG.debug("minServers set to " + val);
     }
 
@@ -116,13 +116,13 @@ public class HMasterCommandLine extends ServerCommandLine {
     // master when we are in local/standalone mode. Useful testing)
     if (cmd.hasOption("localRegionServers")) {
       String val = cmd.getOptionValue("localRegionServers");
-      getConf().setInt("hbase.regionservers", Integer.valueOf(val));
+      getConf().setInt("hbase.regionservers", Integer.parseInt(val));
       LOG.debug("localRegionServers set to " + val);
     }
     // How many masters to startup inside this process; useful testing
     if (cmd.hasOption("masters")) {
       String val = cmd.getOptionValue("masters");
-      getConf().setInt("hbase.masters", Integer.valueOf(val));
+      getConf().setInt("hbase.masters", Integer.parseInt(val));
       LOG.debug("masters set to " + val);
     }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/6e9ded51/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java
index d10141c..d81d96c 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java
@@ -258,8 +258,9 @@ public class RegionCoprocessorHost
                 key + ", spec: " + spec);
               continue;
             }
-            int priority = matcher.group(3).trim().isEmpty() ?
-                Coprocessor.PRIORITY_USER : Integer.valueOf(matcher.group(3));
+            String priorityStr = matcher.group(3).trim();
+            int priority = priorityStr.isEmpty() ?
+                Coprocessor.PRIORITY_USER : Integer.parseInt(priorityStr);
             String cfgSpec = null;
             try {
               cfgSpec = matcher.group(4);

http://git-wip-us.apache.org/repos/asf/hbase/blob/6e9ded51/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/TableAuthManager.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/TableAuthManager.java
b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/TableAuthManager.java
index e73b23c..d043735 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/TableAuthManager.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/TableAuthManager.java
@@ -23,6 +23,7 @@ import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 import java.util.concurrent.ConcurrentSkipListMap;
+import java.util.concurrent.atomic.AtomicLong;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -105,7 +106,7 @@ public class TableAuthManager {
 
   private Configuration conf;
   private ZKPermissionWatcher zkperms;
-  private volatile long mtime;
+  private final AtomicLong mtime = new AtomicLong(0L);
 
   private TableAuthManager(ZooKeeperWatcher watcher, Configuration conf)
       throws IOException {
@@ -212,7 +213,7 @@ public class TableAuthManager {
         }
       }
       globalCache = newCache;
-      mtime++;
+      mtime.incrementAndGet();
     } catch (IOException e) {
       // Never happens
       LOG.error("Error occured while updating the global cache", e);
@@ -240,7 +241,7 @@ public class TableAuthManager {
     }
 
     tableCache.put(table, newTablePerms);
-    mtime++;
+    mtime.incrementAndGet();
   }
 
   /**
@@ -264,7 +265,7 @@ public class TableAuthManager {
     }
 
     nsCache.put(namespace, newTablePerms);
-    mtime++;
+    mtime.incrementAndGet();
   }
 
   private PermissionCache<TablePermission> getTablePermissions(TableName table) {
@@ -741,7 +742,7 @@ public class TableAuthManager {
   }
 
   public long getMTime() {
-    return mtime;
+    return mtime.get();
   }
 
   static Map<ZooKeeperWatcher,TableAuthManager> managerMap =

http://git-wip-us.apache.org/repos/asf/hbase/blob/6e9ded51/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java
index 28bae6a..1fb64a2 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java
@@ -3985,7 +3985,7 @@ public class HBaseFsck extends Configured implements Closeable {
    * Display the full report from fsck. This displays all live and dead region
    * servers, and all known regions.
    */
-  public void setDisplayFullReport() {
+  public static void setDisplayFullReport() {
     details = true;
   }
 
@@ -3993,7 +3993,7 @@ public class HBaseFsck extends Configured implements Closeable {
    * Set summary mode.
    * Print only summary of the tables and status (OK or INCONSISTENT)
    */
-  void setSummary() {
+  static void setSummary() {
     summary = true;
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/6e9ded51/hbase-server/src/main/java/org/apache/hadoop/hbase/util/hbck/OfflineMetaRepair.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/hbck/OfflineMetaRepair.java
b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/hbck/OfflineMetaRepair.java
index efc141a..8cda518 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/hbck/OfflineMetaRepair.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/hbck/OfflineMetaRepair.java
@@ -81,7 +81,7 @@ public class OfflineMetaRepair {
     for (int i = 0; i < args.length; i++) {
       String cmd = args[i];
       if (cmd.equals("-details")) {
-        fsck.setDisplayFullReport();
+        HBaseFsck.setDisplayFullReport();
       } else if (cmd.equals("-base")) {
         if (i == args.length - 1) {
           System.err.println("OfflineMetaRepair: -base needs an HDFS path.");

http://git-wip-us.apache.org/repos/asf/hbase/blob/6e9ded51/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsck.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsck.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsck.java
index ac2d906..ac79656 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsck.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsck.java
@@ -1001,7 +1001,7 @@ public class TestHBaseFsck {
       // fix the problem.
       HBaseFsck fsck = new HBaseFsck(conf, hbfsckExecutorService);
       fsck.connect();
-      fsck.setDisplayFullReport(); // i.e. -details
+      HBaseFsck.setDisplayFullReport(); // i.e. -details
       fsck.setTimeLag(0);
       fsck.setFixAssignments(true);
       fsck.setFixMeta(true);
@@ -1673,7 +1673,7 @@ public class TestHBaseFsck {
       // fix lingering split parent
       hbck = new HBaseFsck(conf, hbfsckExecutorService);
       hbck.connect();
-      hbck.setDisplayFullReport(); // i.e. -details
+      HBaseFsck.setDisplayFullReport(); // i.e. -details
       hbck.setTimeLag(0);
       hbck.setFixSplitParents(true);
       hbck.onlineHbck();
@@ -1926,7 +1926,7 @@ public class TestHBaseFsck {
       // verify that noHdfsChecking report the same errors
       HBaseFsck fsck = new HBaseFsck(conf, hbfsckExecutorService);
       fsck.connect();
-      fsck.setDisplayFullReport(); // i.e. -details
+      HBaseFsck.setDisplayFullReport(); // i.e. -details
       fsck.setTimeLag(0);
       fsck.setCheckHdfs(false);
       fsck.onlineHbck();
@@ -1937,7 +1937,7 @@ public class TestHBaseFsck {
       // verify that fixAssignments works fine with noHdfsChecking
       fsck = new HBaseFsck(conf, hbfsckExecutorService);
       fsck.connect();
-      fsck.setDisplayFullReport(); // i.e. -details
+      HBaseFsck.setDisplayFullReport(); // i.e. -details
       fsck.setTimeLag(0);
       fsck.setCheckHdfs(false);
       fsck.setFixAssignments(true);
@@ -1979,7 +1979,7 @@ public class TestHBaseFsck {
       // verify that noHdfsChecking report the same errors
       HBaseFsck fsck = new HBaseFsck(conf, hbfsckExecutorService);
       fsck.connect();
-      fsck.setDisplayFullReport(); // i.e. -details
+      HBaseFsck.setDisplayFullReport(); // i.e. -details
       fsck.setTimeLag(0);
       fsck.setCheckHdfs(false);
       fsck.onlineHbck();
@@ -1990,7 +1990,7 @@ public class TestHBaseFsck {
       // verify that fixMeta doesn't work with noHdfsChecking
       fsck = new HBaseFsck(conf, hbfsckExecutorService);
       fsck.connect();
-      fsck.setDisplayFullReport(); // i.e. -details
+      HBaseFsck.setDisplayFullReport(); // i.e. -details
       fsck.setTimeLag(0);
       fsck.setCheckHdfs(false);
       fsck.setFixAssignments(true);
@@ -2045,7 +2045,7 @@ public class TestHBaseFsck {
       // verify that noHdfsChecking can't detect ORPHAN_HDFS_REGION
       HBaseFsck fsck = new HBaseFsck(conf, hbfsckExecutorService);
       fsck.connect();
-      fsck.setDisplayFullReport(); // i.e. -details
+      HBaseFsck.setDisplayFullReport(); // i.e. -details
       fsck.setTimeLag(0);
       fsck.setCheckHdfs(false);
       fsck.onlineHbck();
@@ -2056,7 +2056,7 @@ public class TestHBaseFsck {
       // verify that fixHdfsHoles doesn't work with noHdfsChecking
       fsck = new HBaseFsck(conf, hbfsckExecutorService);
       fsck.connect();
-      fsck.setDisplayFullReport(); // i.e. -details
+      HBaseFsck.setDisplayFullReport(); // i.e. -details
       fsck.setTimeLag(0);
       fsck.setCheckHdfs(false);
       fsck.setFixHdfsHoles(true);

http://git-wip-us.apache.org/repos/asf/hbase/blob/6e9ded51/hbase-server/src/test/java/org/apache/hadoop/hbase/util/hbck/HbckTestingUtil.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/hbck/HbckTestingUtil.java
b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/hbck/HbckTestingUtil.java
index bce8938..3691cd7 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/hbck/HbckTestingUtil.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/hbck/HbckTestingUtil.java
@@ -51,7 +51,7 @@ public class HbckTestingUtil {
     HBaseFsck fsck = new HBaseFsck(conf, exec);
     try {
       fsck.connect();
-      fsck.setDisplayFullReport(); // i.e. -details
+      HBaseFsck.setDisplayFullReport(); // i.e. -details
       fsck.setTimeLag(0);
       fsck.setFixAssignments(fixAssignments);
       fsck.setFixMeta(fixMeta);


Mime
View raw message