hbase-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From ecl...@apache.org
Subject hbase git commit: HBASE-15460 Fix infer issues in hbase-common
Date Mon, 21 Mar 2016 19:01:47 GMT
Repository: hbase
Updated Branches:
  refs/heads/master 1123be918 -> 75252af3a


HBASE-15460 Fix infer issues in hbase-common

Summary: Fix issues found by static analysis

Test Plan: Unit test pass

Differential Revision: https://reviews.facebook.net/D55551


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/75252af3
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/75252af3
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/75252af3

Branch: refs/heads/master
Commit: 75252af3a983966de9a488389987a773b59ae4da
Parents: 1123be9
Author: Elliott Clark <eclark@apache.org>
Authored: Tue Mar 15 09:52:24 2016 -0700
Committer: Elliott Clark <eclark@apache.org>
Committed: Mon Mar 21 11:51:50 2016 -0700

----------------------------------------------------------------------
 .../hbase/io/encoding/EncodedDataBlock.java     |  20 ++-
 .../HFileBlockDefaultDecodingContext.java       |  76 ++++----
 .../apache/hadoop/hbase/nio/SingleByteBuff.java |   6 +-
 .../org/apache/hadoop/hbase/util/Base64.java    |  61 +++++--
 .../org/apache/hadoop/hbase/util/ClassSize.java |   5 +-
 .../hadoop/hbase/util/DynamicClassLoader.java   |  28 +--
 .../java/org/apache/hadoop/hbase/util/JVM.java  | 172 ++++++++++++-------
 .../hbase/util/test/LoadTestKVGenerator.java    |   5 +-
 8 files changed, 234 insertions(+), 139 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hbase/blob/75252af3/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/EncodedDataBlock.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/EncodedDataBlock.java
b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/EncodedDataBlock.java
index 1f7f8fa..0fc0cb5 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/EncodedDataBlock.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/EncodedDataBlock.java
@@ -164,22 +164,28 @@ public class EncodedDataBlock {
    */
   public static int getCompressedSize(Algorithm algo, Compressor compressor,
       byte[] inputBuffer, int offset, int length) throws IOException {
-    DataOutputStream compressedStream = new DataOutputStream(
-        new IOUtils.NullOutputStream());
-    if (compressor != null) {
-      compressor.reset();
-    }
+
+    // Create streams
+    // Storing them so we can close them
+    final IOUtils.NullOutputStream nullOutputStream = new IOUtils.NullOutputStream();
+    final DataOutputStream compressedStream = new DataOutputStream(nullOutputStream);
     OutputStream compressingStream = null;
 
+
     try {
-      compressingStream = algo.createCompressionStream(
-          compressedStream, compressor, 0);
+      if (compressor != null) {
+        compressor.reset();
+      }
+
+      compressingStream = algo.createCompressionStream(compressedStream, compressor, 0);
 
       compressingStream.write(inputBuffer, offset, length);
       compressingStream.flush();
 
       return compressedStream.size();
     } finally {
+      nullOutputStream.close();
+      compressedStream.close();
       if (compressingStream != null) compressingStream.close();
     }
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/75252af3/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/HFileBlockDefaultDecodingContext.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/HFileBlockDefaultDecodingContext.java
b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/HFileBlockDefaultDecodingContext.java
index 30382d9..fa57ed7 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/HFileBlockDefaultDecodingContext.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/HFileBlockDefaultDecodingContext.java
@@ -52,47 +52,53 @@ public class HFileBlockDefaultDecodingContext implements
   @Override
   public void prepareDecoding(int onDiskSizeWithoutHeader, int uncompressedSizeWithoutHeader,
       ByteBuff blockBufferWithoutHeader, ByteBuff onDiskBlock) throws IOException {
-    InputStream in = new DataInputStream(new ByteBuffInputStream(onDiskBlock));
+    final ByteBuffInputStream byteBuffInputStream = new ByteBuffInputStream(onDiskBlock);
+    InputStream dataInputStream = new DataInputStream(byteBuffInputStream);
 
-    Encryption.Context cryptoContext = fileContext.getEncryptionContext();
-    if (cryptoContext != Encryption.Context.NONE) {
+    try {
+      Encryption.Context cryptoContext = fileContext.getEncryptionContext();
+      if (cryptoContext != Encryption.Context.NONE) {
 
-      Cipher cipher = cryptoContext.getCipher();
-      Decryptor decryptor = cipher.getDecryptor();
-      decryptor.setKey(cryptoContext.getKey());
+        Cipher cipher = cryptoContext.getCipher();
+        Decryptor decryptor = cipher.getDecryptor();
+        decryptor.setKey(cryptoContext.getKey());
 
-      // Encrypted block format:
-      // +--------------------------+
-      // | byte iv length           |
-      // +--------------------------+
-      // | iv data ...              |
-      // +--------------------------+
-      // | encrypted block data ... |
-      // +--------------------------+
+        // Encrypted block format:
+        // +--------------------------+
+        // | byte iv length           |
+        // +--------------------------+
+        // | iv data ...              |
+        // +--------------------------+
+        // | encrypted block data ... |
+        // +--------------------------+
 
-      int ivLength = in.read();
-      if (ivLength > 0) {
-        byte[] iv = new byte[ivLength];
-        IOUtils.readFully(in, iv);
-        decryptor.setIv(iv);
-        // All encrypted blocks will have a nonzero IV length. If we see an IV
-        // length of zero, this means the encoding context had 0 bytes of
-        // plaintext to encode.
-        decryptor.reset();
-        in = decryptor.createDecryptionStream(in);
+        int ivLength = dataInputStream.read();
+        if (ivLength > 0) {
+          byte[] iv = new byte[ivLength];
+          IOUtils.readFully(dataInputStream, iv);
+          decryptor.setIv(iv);
+          // All encrypted blocks will have a nonzero IV length. If we see an IV
+          // length of zero, this means the encoding context had 0 bytes of
+          // plaintext to encode.
+          decryptor.reset();
+          dataInputStream = decryptor.createDecryptionStream(dataInputStream);
+        }
+        onDiskSizeWithoutHeader -= Bytes.SIZEOF_BYTE + ivLength;
       }
-      onDiskSizeWithoutHeader -= Bytes.SIZEOF_BYTE + ivLength;
-    }
 
-    Compression.Algorithm compression = fileContext.getCompression();
-    assert blockBufferWithoutHeader.hasArray();
-    if (compression != Compression.Algorithm.NONE) {
-      Compression.decompress(blockBufferWithoutHeader.array(),
-        blockBufferWithoutHeader.arrayOffset(), in, onDiskSizeWithoutHeader,
-        uncompressedSizeWithoutHeader, compression);
-    } else {
-      IOUtils.readFully(in, blockBufferWithoutHeader.array(),
-        blockBufferWithoutHeader.arrayOffset(), onDiskSizeWithoutHeader);
+      Compression.Algorithm compression = fileContext.getCompression();
+      assert blockBufferWithoutHeader.hasArray();
+      if (compression != Compression.Algorithm.NONE) {
+        Compression.decompress(blockBufferWithoutHeader.array(),
+            blockBufferWithoutHeader.arrayOffset(), dataInputStream, onDiskSizeWithoutHeader,
+            uncompressedSizeWithoutHeader, compression);
+      } else {
+        IOUtils.readFully(dataInputStream, blockBufferWithoutHeader.array(),
+            blockBufferWithoutHeader.arrayOffset(), onDiskSizeWithoutHeader);
+      }
+    } finally {
+      byteBuffInputStream.close();
+      dataInputStream.close();
     }
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/75252af3/hbase-common/src/main/java/org/apache/hadoop/hbase/nio/SingleByteBuff.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/nio/SingleByteBuff.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/nio/SingleByteBuff.java
index 946962b..9ad28dc 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/nio/SingleByteBuff.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/nio/SingleByteBuff.java
@@ -200,8 +200,10 @@ public class SingleByteBuff extends ByteBuff {
       // create a copy.
       ObjectIntPair<ByteBuffer> pair = new ObjectIntPair<ByteBuffer>();
       src.asSubByteBuffer(srcOffset, length, pair);
-      ByteBufferUtils.copyFromBufferToBuffer(pair.getFirst(), this.buf, pair.getSecond(),
offset,
-          length);
+      if (pair.getFirst() != null) {
+        ByteBufferUtils.copyFromBufferToBuffer(pair.getFirst(), this.buf, pair.getSecond(),
offset,
+            length);
+      }
     }
     return this;
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/75252af3/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Base64.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Base64.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Base64.java
index a22133d..b825c0f 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Base64.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Base64.java
@@ -1092,7 +1092,9 @@ public class Base64 {
    */
   public static byte[] decodeFromFile(String filename) {
     byte[] decodedData = null;
-    Base64InputStream bis = null;
+    BufferedInputStream bufferedInputStream = null;
+    FileInputStream fileInputStream = null;
+    Base64InputStream base64InputStream = null;
     try {
       File file = new File(filename);
       byte[] buffer;
@@ -1107,14 +1109,14 @@ public class Base64 {
       buffer = new byte[(int) file.length()];
 
       // Open a stream
-
-      bis = new Base64InputStream(new BufferedInputStream(
-          new FileInputStream(file)), DECODE);
+      fileInputStream = new FileInputStream(file);
+      bufferedInputStream = new BufferedInputStream(fileInputStream);
+      base64InputStream = new Base64InputStream(bufferedInputStream, DECODE);
 
       // Read until done
 
       int length = 0;
-      for (int numBytes; (numBytes = bis.read(buffer, length, 4096)) >= 0; ) {
+      for (int numBytes; (numBytes = base64InputStream.read(buffer, length, 4096)) >=
0; ) {
         length += numBytes;
       }
 
@@ -1127,9 +1129,23 @@ public class Base64 {
       LOG.error("Error decoding from file " + filename, e);
 
     } finally {
-      if (bis != null) {
+      if (fileInputStream != null) {
+        try {
+          fileInputStream.close();
+        } catch (Exception e) {
+          LOG.error("error closing FileInputStream", e);
+        }
+      }
+      if (bufferedInputStream != null) {
+        try {
+          bufferedInputStream.close();
+        } catch (Exception e) {
+          LOG.error("error closing BufferedInputStream", e);
+        }
+      }
+      if (base64InputStream != null) {
         try {
-          bis.close();
+          base64InputStream.close();
         } catch (Exception e) {
           LOG.error("error closing Base64InputStream", e);
         }
@@ -1149,7 +1165,10 @@ public class Base64 {
    */
   public static String encodeFromFile(String filename) {
     String encodedData = null;
-    Base64InputStream bis = null;
+    FileInputStream fileInputStream = null;
+    BufferedInputStream bufferedInputStream = null;
+    Base64InputStream base64InputStream = null;
+
     try {
       File file = new File(filename);
 
@@ -1159,12 +1178,13 @@ public class Base64 {
 
       // Open a stream
 
-      bis = new Base64InputStream(new BufferedInputStream(
-              new FileInputStream(file)), ENCODE);
+      fileInputStream = new FileInputStream(file);
+      bufferedInputStream = new BufferedInputStream(fileInputStream);
+      base64InputStream = new Base64InputStream(bufferedInputStream, ENCODE);
 
       // Read until done
       int length = 0;
-      for (int numBytes; (numBytes = bis.read(buffer, length, 4096)) >= 0; ) {
+      for (int numBytes; (numBytes = base64InputStream.read(buffer, length, 4096)) >=
0; ) {
         length += numBytes;
       }
 
@@ -1176,9 +1196,24 @@ public class Base64 {
       LOG.error("Error encoding from file " + filename, e);
 
     } finally {
-      if (bis != null) {
+      // Can't leak exceptions but still need to clean things up.
+      if (fileInputStream != null) {
+        try {
+          fileInputStream.close();
+        } catch (Exception e) {
+          LOG.error("error closing FileInputStream", e);
+        }
+      }
+      if (bufferedInputStream != null) {
+        try {
+          bufferedInputStream.close();
+        } catch (Exception e) {
+          LOG.error("error closing BufferedInputStream", e);
+        }
+      }
+      if (base64InputStream != null) {
         try {
-          bis.close();
+          base64InputStream.close();
         } catch (Exception e) {
           LOG.error("error closing Base64InputStream", e);
         }

http://git-wip-us.apache.org/repos/asf/hbase/blob/75252af3/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ClassSize.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ClassSize.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ClassSize.java
index fdd0fae..3dce955 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ClassSize.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ClassSize.java
@@ -117,7 +117,7 @@ public class ClassSize {
   static {
     final String version = System.getProperty("java.version");
     // Verify String looks like this: 1.6.0_29
-    if (!version.matches("\\d\\.\\d\\..*")) {
+    if (version == null || !version.matches("\\d\\.\\d\\..*")) {
       throw new RuntimeException("Unexpected version format: " + version);
     }
     // Convert char to int
@@ -331,7 +331,8 @@ public class ClassSize {
    * know this too.
    */
   public static boolean is32BitJVM() {
-    return System.getProperty("sun.arch.data.model").equals("32");
+    final String model = System.getProperty("sun.arch.data.model");
+    return model != null && model.equals("32");
   }
 
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/75252af3/hbase-common/src/main/java/org/apache/hadoop/hbase/util/DynamicClassLoader.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/DynamicClassLoader.java
b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/DynamicClassLoader.java
index 214c917..595cc5b 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/DynamicClassLoader.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/DynamicClassLoader.java
@@ -179,19 +179,21 @@ public class DynamicClassLoader extends ClassLoaderBase {
 
   private synchronized void loadNewJars() {
     // Refresh local jar file lists
-    for (File file: localDir.listFiles()) {
-      String fileName = file.getName();
-      if (jarModifiedTime.containsKey(fileName)) {
-        continue;
-      }
-      if (file.isFile() && fileName.endsWith(".jar")) {
-        jarModifiedTime.put(fileName, Long.valueOf(file.lastModified()));
-        try {
-          URL url = file.toURI().toURL();
-          addURL(url);
-        } catch (MalformedURLException mue) {
-          // This should not happen, just log it
-          LOG.warn("Failed to load new jar " + fileName, mue);
+    if (localDir != null) {
+      for (File file : localDir.listFiles()) {
+        String fileName = file.getName();
+        if (jarModifiedTime.containsKey(fileName)) {
+          continue;
+        }
+        if (file.isFile() && fileName.endsWith(".jar")) {
+          jarModifiedTime.put(fileName, Long.valueOf(file.lastModified()));
+          try {
+            URL url = file.toURI().toURL();
+            addURL(url);
+          } catch (MalformedURLException mue) {
+            // This should not happen, just log it
+            LOG.warn("Failed to load new jar " + fileName, mue);
+          }
         }
       }
     }

http://git-wip-us.apache.org/repos/asf/hbase/blob/75252af3/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JVM.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JVM.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JVM.java
index 2d6065b..3625a12 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JVM.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JVM.java
@@ -45,24 +45,30 @@ public class JVM {
   private OperatingSystemMXBean osMbean;
 
   private static final boolean ibmvendor =
-    System.getProperty("java.vendor").contains("IBM");
-  private static final boolean windows = 
-    System.getProperty("os.name").startsWith("Windows");
+      System.getProperty("java.vendor") != null &&
+          System.getProperty("java.vendor").contains("IBM");
+  private static final boolean windows =
+      System.getProperty("os.name") != null &&
+          System.getProperty("os.name").startsWith("Windows");
   private static final boolean linux =
-    System.getProperty("os.name").startsWith("Linux");
+      System.getProperty("os.name") != null &&
+          System.getProperty("os.name").startsWith("Linux");
+  private static final boolean amd64 =
+      System.getProperty("os.arch") != null &&
+          System.getProperty("os.arch").contains("amd64");
+
   private static final String JVMVersion = System.getProperty("java.version");
-  private static final boolean amd64 = System.getProperty("os.arch").contains("amd64");
 
   /**
    * Constructor. Get the running Operating System instance
    */
-  public JVM () {
+  public JVM() {
     this.osMbean = ManagementFactory.getOperatingSystemMXBean();
   }
- 
+
   /**
-   * Check if the OS is unix. 
-   * 
+   * Check if the OS is unix.
+   *
    * @return whether this is unix or not.
    */
   public static boolean isUnix() {
@@ -83,15 +89,16 @@ public class JVM {
 
   /**
    * Check if the arch is amd64;
+   *
    * @return whether this is amd64 or not.
    */
   public static boolean isAmd64() {
     return amd64;
   }
-  
+
   /**
    * Check if the finish() method of GZIPOutputStream is broken
-   * 
+   *
    * @return whether GZIPOutputStream.finish() is broken.
    */
   public static boolean isGZIPOutputStreamFinishBroken() {
@@ -100,11 +107,13 @@ public class JVM {
 
   /**
    * Load the implementation of UnixOperatingSystemMXBean for Oracle jvm
-   * and runs the desired method. 
+   * and runs the desired method.
+   *
    * @param mBeanMethodName : method to run from the interface UnixOperatingSystemMXBean
+   *
    * @return the method result
    */
-  private Long runUnixMXBeanMethod (String mBeanMethodName) {  
+  private Long runUnixMXBeanMethod(String mBeanMethodName) {
     Object unixos;
     Class<?> classRef;
     Method mBeanMethod;
@@ -112,12 +121,11 @@ public class JVM {
     try {
       classRef = Class.forName("com.sun.management.UnixOperatingSystemMXBean");
       if (classRef.isInstance(osMbean)) {
-        mBeanMethod = classRef.getMethod(mBeanMethodName, new Class[0]);
+        mBeanMethod = classRef.getMethod(mBeanMethodName);
         unixos = classRef.cast(osMbean);
-        return (Long)mBeanMethod.invoke(unixos);
+        return (Long) mBeanMethod.invoke(unixos);
       }
-    }
-    catch(Exception e) {
+    } catch (Exception e) {
       LOG.warn("Not able to load class or method for" +
           " com.sun.management.UnixOperatingSystemMXBean.", e);
     }
@@ -127,19 +135,20 @@ public class JVM {
   /**
    * Get the number of opened filed descriptor for the runtime jvm.
    * If Oracle java, it will use the com.sun.management interfaces.
-   * Otherwise, this methods implements it (linux only).  
+   * Otherwise, this methods implements it (linux only).
+   *
    * @return number of open file descriptors for the jvm
    */
   public long getOpenFileDescriptorCount() {
-
     Long ofdc;
-    
+
     if (!ibmvendor) {
       ofdc = runUnixMXBeanMethod("getOpenFileDescriptorCount");
-      return (ofdc != null ? ofdc.longValue () : -1);
+      return (ofdc != null ? ofdc : -1);
     }
-    InputStream in = null;
-    BufferedReader output = null;
+    InputStream inputStream = null;
+    InputStreamReader inputStreamReader = null;
+    BufferedReader bufferedReader = null;
     try {
       //need to get the PID number of the process first
       RuntimeMXBean rtmbean = ManagementFactory.getRuntimeMXBean();
@@ -148,30 +157,39 @@ public class JVM {
 
       //using linux bash commands to retrieve info
       Process p = Runtime.getRuntime().exec(
-      new String[] { "bash", "-c",
-          "ls /proc/" + pidhost[0] + "/fdinfo | wc -l" });
-      in = p.getInputStream();
-      output = new BufferedReader(new InputStreamReader(in));
+          new String[]{"bash", "-c",
+              "ls /proc/" + pidhost[0] + "/fdinfo | wc -l"});
+      inputStream = p.getInputStream();
+      inputStreamReader = new InputStreamReader(inputStream);
+      bufferedReader = new BufferedReader(inputStreamReader);
       String openFileDesCount;
-      if ((openFileDesCount = output.readLine()) != null)      
-             return Long.parseLong(openFileDesCount);
-     } catch (IOException ie) {
-       LOG.warn("Not able to get the number of open file descriptors", ie);
-     } finally {
-       if (output != null) {
-         try {
-           output.close();
-         } catch (IOException e) {
-           LOG.warn("Not able to close the InputStream", e);
-         }
-       }
-       if (in != null){
-         try {
-           in.close();
-         } catch (IOException e) {
-           LOG.warn("Not able to close the InputStream", e);
-         }
-       }
+      if ((openFileDesCount = bufferedReader.readLine()) != null) {
+        return Long.parseLong(openFileDesCount);
+      }
+    } catch (IOException ie) {
+      LOG.warn("Not able to get the number of open file descriptors", ie);
+    } finally {
+      if (bufferedReader != null) {
+        try {
+          bufferedReader.close();
+        } catch (IOException e) {
+          LOG.warn("Not able to close the BufferedReader", e);
+        }
+      }
+      if (inputStreamReader != null) {
+        try {
+          inputStreamReader.close();
+        } catch (IOException e) {
+          LOG.warn("Not able to close the InputStreamReader", e);
+        }
+      }
+      if (inputStream != null) {
+        try {
+          inputStream.close();
+        } catch (IOException e) {
+          LOG.warn("Not able to close the InputStream", e);
+        }
+      }
     }
     return -1;
   }
@@ -185,15 +203,15 @@ public class JVM {
 
   /**
    * @return the physical free memory (not the JVM one, as it's not very useful as it depends
on
-   *  the GC), but the one from the OS as it allows a little bit more to guess if the machine
is
-   *  overloaded or not).
+   * the GC), but the one from the OS as it allows a little bit more to guess if the machine
is
+   * overloaded or not).
    */
   public long getFreeMemory() {
-    if (ibmvendor){
+    if (ibmvendor) {
       return 0;
     }
 
-    Long r =  runUnixMXBeanMethod("getFreePhysicalMemorySize");
+    Long r = runUnixMXBeanMethod("getFreePhysicalMemorySize");
     return (r != null ? r : -1);
   }
 
@@ -203,28 +221,47 @@ public class JVM {
    * http://stackoverflow.com/questions/54686/how-to-get-a-list-of-current-open-windows-process-with-java
    */
   @edu.umd.cs.findbugs.annotations.SuppressWarnings(
-    value="RV_DONT_JUST_NULL_CHECK_READLINE",
-    justification="used by testing")
-  public int getNumberOfRunningProcess(){
-    if (!isUnix()){
+      value = "RV_DONT_JUST_NULL_CHECK_READLINE",
+      justification = "used by testing")
+  public int getNumberOfRunningProcess() {
+    if (!isUnix()) {
       return 0;
     }
 
-    BufferedReader input = null;
+    InputStream inputStream = null;
+    InputStreamReader inputStreamReader = null;
+    BufferedReader bufferedReader = null;
+
     try {
       int count = 0;
       Process p = Runtime.getRuntime().exec("ps -e");
-      input = new BufferedReader(new InputStreamReader(p.getInputStream()));
-      while (input.readLine() != null) {
+      inputStream = p.getInputStream();
+      inputStreamReader = new InputStreamReader(inputStream);
+      bufferedReader = new BufferedReader(inputStreamReader);
+      while (bufferedReader.readLine() != null) {
         count++;
       }
       return count - 1; //  -1 because there is a headline
     } catch (IOException e) {
       return -1;
-    }  finally {
-      if (input != null){
+    } finally {
+      if (bufferedReader != null) {
         try {
-          input.close();
+          bufferedReader.close();
+        } catch (IOException e) {
+          LOG.warn("Not able to close the BufferedReader", e);
+        }
+      }
+      if (inputStreamReader != null) {
+        try {
+          inputStreamReader.close();
+        } catch (IOException e) {
+          LOG.warn("Not able to close the InputStreamReader", e);
+        }
+      }
+      if (inputStream != null) {
+        try {
+          inputStream.close();
         } catch (IOException e) {
           LOG.warn("Not able to close the InputStream", e);
         }
@@ -235,24 +272,27 @@ public class JVM {
   /**
    * Get the number of the maximum file descriptors the system can use.
    * If Oracle java, it will use the com.sun.management interfaces.
-   * Otherwise, this methods implements it (linux only).  
+   * Otherwise, this methods implements it (linux only).
+   *
    * @return max number of file descriptors the operating system can use.
    */
   public long getMaxFileDescriptorCount() {
     Long mfdc;
     if (!ibmvendor) {
       mfdc = runUnixMXBeanMethod("getMaxFileDescriptorCount");
-      return (mfdc != null ? mfdc.longValue () : -1);
+      return (mfdc != null ? mfdc : -1);
     }
     InputStream in = null;
     BufferedReader output = null;
     try {
       //using linux bash commands to retrieve info
-      Process p = Runtime.getRuntime().exec(new String[] { "bash", "-c", "ulimit -n" });
+      Process p = Runtime.getRuntime().exec(new String[]{"bash", "-c", "ulimit -n"});
       in = p.getInputStream();
       output = new BufferedReader(new InputStreamReader(in));
       String maxFileDesCount;
-      if ((maxFileDesCount = output.readLine()) != null) return Long.parseLong(maxFileDesCount);
+      if ((maxFileDesCount = output.readLine()) != null) {
+        return Long.parseLong(maxFileDesCount);
+      }
     } catch (IOException ie) {
       LOG.warn("Not able to get the max number of file descriptors", ie);
     } finally {
@@ -263,7 +303,7 @@ public class JVM {
           LOG.warn("Not able to close the reader", e);
         }
       }
-      if (in != null){
+      if (in != null) {
         try {
           in.close();
         } catch (IOException e) {
@@ -272,5 +312,5 @@ public class JVM {
       }
     }
     return -1;
- }
+  }
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/75252af3/hbase-common/src/main/java/org/apache/hadoop/hbase/util/test/LoadTestKVGenerator.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/test/LoadTestKVGenerator.java
b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/test/LoadTestKVGenerator.java
index 9e9b507..4edd270 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/test/LoadTestKVGenerator.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/test/LoadTestKVGenerator.java
@@ -107,7 +107,10 @@ public class LoadTestKVGenerator {
   private static byte[] getValueForRowColumn(int dataSize, byte[]... seedStrings) {
     long seed = dataSize;
     for (byte[] str : seedStrings) {
-      seed += Bytes.toString(str).hashCode();
+      final String bytesString = Bytes.toString(str);
+      if (bytesString != null) {
+        seed += bytesString.hashCode();
+      }
     }
     Random seededRandom = new Random(seed);
     byte[] randomBytes = new byte[dataSize];


Mime
View raw message