hbase-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From raw...@apache.org
Subject svn commit: r920548 [5/5] - in /hadoop/hbase/trunk: ./ core/src/main/java/org/apache/hadoop/hbase/client/ core/src/main/java/org/apache/hadoop/hbase/filter/ core/src/main/java/org/apache/hadoop/hbase/ipc/ core/src/main/java/org/apache/hadoop/hbase/mast...
Date Mon, 08 Mar 2010 22:25:09 GMT
Modified: hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/util/FSUtils.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/util/FSUtils.java?rev=920548&r1=920547&r2=920548&view=diff
==============================================================================
--- hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/util/FSUtils.java (original)
+++ hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/util/FSUtils.java Mon Mar  8 22:25:06 2010
@@ -1,5 +1,5 @@
 /**
- * Copyright 2007 The Apache Software Foundation
+ * Copyright 2010 The Apache Software Foundation
  *
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
@@ -19,13 +19,6 @@
  */
 package org.apache.hadoop.hbase.util;
 
-import java.io.DataInputStream;
-import java.io.IOException;
-import java.net.URI;
-import java.net.URISyntaxException;
-import java.util.HashMap;
-import java.util.Map;
-
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
@@ -43,6 +36,13 @@
 import org.apache.hadoop.hdfs.DistributedFileSystem;
 import org.apache.hadoop.hdfs.protocol.FSConstants;
 
+import java.io.DataInputStream;
+import java.io.IOException;
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.util.HashMap;
+import java.util.Map;
+
 /**
  * Utility methods for interacting with the underlying file system.
  */
@@ -58,22 +58,22 @@
 
   /**
    * Delete if exists.
-   * @param fs
-   * @param dir
+   * @param fs filesystem object
+   * @param dir directory to delete
    * @return True if deleted <code>dir</code>
-   * @throws IOException
+   * @throws IOException e
    */
   public static boolean deleteDirectory(final FileSystem fs, final Path dir)
   throws IOException {
-    return fs.exists(dir)? fs.delete(dir, true): false;
+    return fs.exists(dir) && fs.delete(dir, true);
   }
 
   /**
    * Check if directory exists.  If it does not, create it.
-   * @param fs 
-   * @param dir
+   * @param fs filesystem object
+   * @param dir path to check
    * @return Path
-   * @throws IOException
+   * @throws IOException e
    */
   public Path checkdir(final FileSystem fs, final Path dir) throws IOException {
     if (!fs.exists(dir)) {
@@ -84,10 +84,10 @@
 
   /**
    * Create file.
-   * @param fs
-   * @param p
+   * @param fs filesystem object
+   * @param p path to create
    * @return Path
-   * @throws IOException
+   * @throws IOException e
    */
   public static Path create(final FileSystem fs, final Path p)
   throws IOException {
@@ -103,8 +103,8 @@
   /**
    * Checks to see if the specified file system is available
    * 
-   * @param fs
-   * @throws IOException
+   * @param fs filesystem
+   * @throws IOException e
    */
   public static void checkFileSystemAvailable(final FileSystem fs) 
   throws IOException {
@@ -133,10 +133,10 @@
   /**
    * Verifies current version of file system
    * 
-   * @param fs
-   * @param rootdir
+   * @param fs filesystem object
+   * @param rootdir root hbase directory
    * @return null if no version file exists, version string otherwise.
-   * @throws IOException
+   * @throws IOException e
    */
   public static String getVersion(FileSystem fs, Path rootdir)
   throws IOException {
@@ -161,7 +161,7 @@
    * @param rootdir root directory of HBase installation
    * @param message if true, issues a message on System.out 
    * 
-   * @throws IOException
+   * @throws IOException e
    */
   public static void checkVersion(FileSystem fs, Path rootdir, 
       boolean message) throws IOException {
@@ -190,9 +190,9 @@
   /**
    * Sets version of file system
    * 
-   * @param fs
-   * @param rootdir
-   * @throws IOException
+   * @param fs filesystem object
+   * @param rootdir hbase root
+   * @throws IOException e
    */
   public static void setVersion(FileSystem fs, Path rootdir) 
   throws IOException {
@@ -202,10 +202,10 @@
   /**
    * Sets version of file system
    * 
-   * @param fs
-   * @param rootdir
-   * @param version
-   * @throws IOException
+   * @param fs filesystem object
+   * @param rootdir hbase root directory
+   * @param version version to set
+   * @throws IOException e
    */
   public static void setVersion(FileSystem fs, Path rootdir, String version) 
   throws IOException {
@@ -241,9 +241,9 @@
 
   /**
    * If DFS, check safe mode and if so, wait until we clear it.
-   * @param conf
+   * @param conf configuration
    * @param wait Sleep between retries
-   * @throws IOException
+   * @throws IOException e
    */
   public static void waitOnSafeMode(final Configuration conf,
     final long wait)
@@ -293,10 +293,10 @@
   }
 
   /**
-   * @param c
+   * @param c configuration
    * @return Path to hbase root directory: i.e. <code>hbase.rootdir</code> from
    * configuration as a Path.
-   * @throws IOException 
+   * @throws IOException e
    */
   public static Path getRootDir(final Configuration c) throws IOException {
     return new Path(c.get(HConstants.HBASE_DIR));
@@ -308,7 +308,7 @@
    * @param fs file system
    * @param rootdir root directory of HBase installation
    * @return true if exists
-   * @throws IOException
+   * @throws IOException e
    */
   public static boolean rootRegionExists(FileSystem fs, Path rootdir)
   throws IOException {
@@ -321,40 +321,40 @@
    * Runs through the hbase rootdir and checks all stores have only
    * one file in them -- that is, they've been major compacted.  Looks
    * at root and meta tables too.
-   * @param fs
-   * @param hbaseRootDir
+   * @param fs filesystem
+   * @param hbaseRootDir hbase root directory
    * @return True if this hbase install is major compacted.
-   * @throws IOException
+   * @throws IOException e
    */
   public static boolean isMajorCompacted(final FileSystem fs,
       final Path hbaseRootDir)
   throws IOException {
     // Presumes any directory under hbase.rootdir is a table.
     FileStatus [] tableDirs = fs.listStatus(hbaseRootDir, new DirFilter(fs));
-    for (int i = 0; i < tableDirs.length; i++) {
+    for (FileStatus tableDir : tableDirs) {
       // Skip the .log directory.  All others should be tables.  Inside a table,
       // there are compaction.dir directories to skip.  Otherwise, all else
       // should be regions.  Then in each region, should only be family
       // directories.  Under each of these, should be one file only.
-      Path d = tableDirs[i].getPath();
+      Path d = tableDir.getPath();
       if (d.getName().equals(HConstants.HREGION_LOGDIR_NAME)) {
         continue;
       }
-      FileStatus [] regionDirs = fs.listStatus(d, new DirFilter(fs));
-      for (int j = 0; j < regionDirs.length; j++) {
-        Path dd = regionDirs[j].getPath();
+      FileStatus[] regionDirs = fs.listStatus(d, new DirFilter(fs));
+      for (FileStatus regionDir : regionDirs) {
+        Path dd = regionDir.getPath();
         if (dd.getName().equals(HConstants.HREGION_COMPACTIONDIR_NAME)) {
           continue;
         }
         // Else its a region name.  Now look in region for families.
-        FileStatus [] familyDirs = fs.listStatus(dd, new DirFilter(fs));
-        for (int k = 0; k < familyDirs.length; k++) {
-          Path family = familyDirs[k].getPath();
+        FileStatus[] familyDirs = fs.listStatus(dd, new DirFilter(fs));
+        for (FileStatus familyDir : familyDirs) {
+          Path family = familyDir.getPath();
           // Now in family make sure only one file.
-          FileStatus [] familyStatus = fs.listStatus(family);
+          FileStatus[] familyStatus = fs.listStatus(family);
           if (familyStatus.length > 1) {
             LOG.debug(family.toString() + " has " + familyStatus.length +
-              " files.");
+                " files.");
             return false;
           }
         }
@@ -374,7 +374,7 @@
   public static int getTotalTableFragmentation(final HMaster master) 
   throws IOException {
     Map<String, Integer> map = getTableFragmentation(master);
-    return map != null && map.size() > 0 ? map.get("-TOTAL-").intValue() : -1;
+    return map != null && map.size() > 0 ? map.get("-TOTAL-") : -1;
   }
     
   /**
@@ -414,31 +414,31 @@
     DirFilter df = new DirFilter(fs);
     // presumes any directory under hbase.rootdir is a table
     FileStatus [] tableDirs = fs.listStatus(hbaseRootDir, df);
-    for (int i = 0; i < tableDirs.length; i++) {
+    for (FileStatus tableDir : tableDirs) {
       // Skip the .log directory.  All others should be tables.  Inside a table,
       // there are compaction.dir directories to skip.  Otherwise, all else
       // should be regions.  Then in each region, should only be family
       // directories.  Under each of these, should be one file only.
-      Path d = tableDirs[i].getPath();
+      Path d = tableDir.getPath();
       if (d.getName().equals(HConstants.HREGION_LOGDIR_NAME)) {
         continue;
       }
       int cfCount = 0;
       int cfFrag = 0;
-      FileStatus [] regionDirs = fs.listStatus(d, df);
-      for (int j = 0; j < regionDirs.length; j++) {
-        Path dd = regionDirs[j].getPath();
+      FileStatus[] regionDirs = fs.listStatus(d, df);
+      for (FileStatus regionDir : regionDirs) {
+        Path dd = regionDir.getPath();
         if (dd.getName().equals(HConstants.HREGION_COMPACTIONDIR_NAME)) {
           continue;
         }
         // else its a region name, now look in region for families
-        FileStatus [] familyDirs = fs.listStatus(dd, df);
-        for (int k = 0; k < familyDirs.length; k++) {
+        FileStatus[] familyDirs = fs.listStatus(dd, df);
+        for (FileStatus familyDir : familyDirs) {
           cfCount++;
           cfCountTotal++;
-          Path family = familyDirs[k].getPath();
+          Path family = familyDir.getPath();
           // now in family make sure only one file
-          FileStatus [] familyStatus = fs.listStatus(family);
+          FileStatus[] familyStatus = fs.listStatus(family);
           if (familyStatus.length > 1) {
             cfFrag++;
             cfFragTotal++;
@@ -446,21 +446,19 @@
         }
       }
       // compute percentage per table and store in result list
-      frags.put(d.getName(), Integer.valueOf(
-        Math.round((float) cfFrag / cfCount * 100)));
+      frags.put(d.getName(), Math.round((float) cfFrag / cfCount * 100));
     }
     // set overall percentage for all tables
-    frags.put("-TOTAL-", Integer.valueOf(
-      Math.round((float) cfFragTotal / cfCountTotal * 100)));
+    frags.put("-TOTAL-", Math.round((float) cfFragTotal / cfCountTotal * 100));
     return frags;
   }
 
   /**
    * Expects to find -ROOT- directory.
-   * @param fs
-   * @param hbaseRootDir
+   * @param fs filesystem
+   * @param hbaseRootDir hbase root directory
    * @return True if this a pre020 layout.
-   * @throws IOException
+   * @throws IOException e
    */
   public static boolean isPre020FileLayout(final FileSystem fs,
     final Path hbaseRootDir)
@@ -476,40 +474,40 @@
    * at root and meta tables too.  This version differs from
    * {@link #isMajorCompacted(FileSystem, Path)} in that it expects a
    * pre-0.20.0 hbase layout on the filesystem.  Used migrating.
-   * @param fs
-   * @param hbaseRootDir
+   * @param fs filesystem
+   * @param hbaseRootDir hbase root directory
    * @return True if this hbase install is major compacted.
-   * @throws IOException
+   * @throws IOException e
    */
   public static boolean isMajorCompactedPre020(final FileSystem fs,
       final Path hbaseRootDir)
   throws IOException {
     // Presumes any directory under hbase.rootdir is a table.
     FileStatus [] tableDirs = fs.listStatus(hbaseRootDir, new DirFilter(fs));
-    for (int i = 0; i < tableDirs.length; i++) {
+    for (FileStatus tableDir : tableDirs) {
       // Inside a table, there are compaction.dir directories to skip.
       // Otherwise, all else should be regions.  Then in each region, should
       // only be family directories.  Under each of these, should be a mapfile
       // and info directory and in these only one file.
-      Path d = tableDirs[i].getPath();
+      Path d = tableDir.getPath();
       if (d.getName().equals(HConstants.HREGION_LOGDIR_NAME)) {
         continue;
       }
-      FileStatus [] regionDirs = fs.listStatus(d, new DirFilter(fs));
-      for (int j = 0; j < regionDirs.length; j++) {
-        Path dd = regionDirs[j].getPath();
+      FileStatus[] regionDirs = fs.listStatus(d, new DirFilter(fs));
+      for (FileStatus regionDir : regionDirs) {
+        Path dd = regionDir.getPath();
         if (dd.getName().equals(HConstants.HREGION_COMPACTIONDIR_NAME)) {
           continue;
         }
         // Else its a region name.  Now look in region for families.
-        FileStatus [] familyDirs = fs.listStatus(dd, new DirFilter(fs));
-        for (int k = 0; k < familyDirs.length; k++) {
-          Path family = familyDirs[k].getPath();
-          FileStatus [] infoAndMapfile = fs.listStatus(family);
+        FileStatus[] familyDirs = fs.listStatus(dd, new DirFilter(fs));
+        for (FileStatus familyDir : familyDirs) {
+          Path family = familyDir.getPath();
+          FileStatus[] infoAndMapfile = fs.listStatus(family);
           // Assert that only info and mapfile in family dir.
           if (infoAndMapfile.length != 0 && infoAndMapfile.length != 2) {
             LOG.debug(family.toString() +
-              " has more than just info and mapfile: " + infoAndMapfile.length);
+                " has more than just info and mapfile: " + infoAndMapfile.length);
             return false;
           }
           // Make sure directory named info or mapfile.
@@ -518,16 +516,16 @@
                 infoAndMapfile[ll].getPath().getName().equals("mapfiles"))
               continue;
             LOG.debug("Unexpected directory name: " +
-              infoAndMapfile[ll].getPath());
+                infoAndMapfile[ll].getPath());
             return false;
           }
           // Now in family, there are 'mapfile' and 'info' subdirs.  Just
           // look in the 'mapfile' subdir.
-          FileStatus [] familyStatus =
-            fs.listStatus(new Path(family, "mapfiles"));
+          FileStatus[] familyStatus =
+              fs.listStatus(new Path(family, "mapfiles"));
           if (familyStatus.length > 1) {
             LOG.debug(family.toString() + " has " + familyStatus.length +
-              " files.");
+                " files.");
             return false;
           }
         }

Modified: hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/util/FileSystemVersionException.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/util/FileSystemVersionException.java?rev=920548&r1=920547&r2=920548&view=diff
==============================================================================
--- hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/util/FileSystemVersionException.java (original)
+++ hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/util/FileSystemVersionException.java Mon Mar  8 22:25:06 2010
@@ -1,5 +1,5 @@
 /**
- * Copyright 2008 The Apache Software Foundation
+ * Copyright 2010 The Apache Software Foundation
  *
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file

Modified: hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/util/Hash.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/util/Hash.java?rev=920548&r1=920547&r2=920548&view=diff
==============================================================================
--- hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/util/Hash.java (original)
+++ hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/util/Hash.java Mon Mar  8 22:25:06 2010
@@ -1,4 +1,6 @@
 /**
+ * Copyright 2010 The Apache Software Foundation
+ *
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information

Modified: hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/util/InfoServer.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/util/InfoServer.java?rev=920548&r1=920547&r2=920548&view=diff
==============================================================================
--- hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/util/InfoServer.java (original)
+++ hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/util/InfoServer.java Mon Mar  8 22:25:06 2010
@@ -1,4 +1,6 @@
 /**
+ * Copyright 2010 The Apache Software Foundation
+ *
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -15,17 +17,18 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.hadoop.hbase.util;
 
-import java.io.IOException;
-import java.net.URL;
-import java.util.Map;
+package org.apache.hadoop.hbase.util;
 
 import org.apache.hadoop.http.HttpServer;
 import org.mortbay.jetty.handler.ContextHandlerCollection;
 import org.mortbay.jetty.servlet.Context;
 import org.mortbay.jetty.servlet.DefaultServlet;
 
+import java.io.IOException;
+import java.net.URL;
+import java.util.Map;
+
 /**
  * Create a Jetty embedded server to answer http requests. The primary goal
  * is to serve up status information for the server.
@@ -39,11 +42,11 @@
    * Create a status server on the given port.
    * The jsp scripts are taken from src/webapps/<code>name<code>.
    * @param name The name of the server
-   * @param bindAddress
+   * @param bindAddress address to bind to
    * @param port The port to use on the server
    * @param findPort whether the server should start at the given port and 
    * increment by 1 until it finds a free port.
-   * @throws IOException
+   * @throws IOException e
    */
   public InfoServer(String name, String bindAddress, int port, boolean findPort)
   throws IOException {
@@ -108,11 +111,11 @@
    * Get the path for this web app
    * @param webappName web app
    * @return path
-   * @throws IOException
+   * @throws IOException e
    */
   public static String getWebAppDir(final String webappName)
   throws IOException {
-    String webappDir = null;
+    String webappDir;
     webappDir = getWebAppsPath("webapps/" + webappName);
     return webappDir;
   }

Modified: hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/util/JenkinsHash.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/util/JenkinsHash.java?rev=920548&r1=920547&r2=920548&view=diff
==============================================================================
--- hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/util/JenkinsHash.java (original)
+++ hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/util/JenkinsHash.java Mon Mar  8 22:25:06 2010
@@ -1,5 +1,5 @@
 /**
- * Copyright 2007 The Apache Software Foundation
+ * Copyright 2010 The Apache Software Foundation
  *
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
@@ -86,6 +86,7 @@
     a = b = c = (0x00000000deadbeefL + length + initval) & INT_MASK;
     int offset = 0;
     for (; length > 12; offset += 12, length -= 12) {
+      //noinspection PointlessArithmeticExpression
       a = (a + (key[offset + 0]    & BYTE_MASK)) & INT_MASK;
       a = (a + (((key[offset + 1]  & BYTE_MASK) <<  8) & INT_MASK)) & INT_MASK;
       a = (a + (((key[offset + 2]  & BYTE_MASK) << 16) & INT_MASK)) & INT_MASK;
@@ -186,6 +187,7 @@
     case  2:
       a = (a + (((key[offset + 1]  & BYTE_MASK) <<  8) & INT_MASK)) & INT_MASK;
     case  1:
+      //noinspection PointlessArithmeticExpression
       a = (a + (key[offset + 0]    & BYTE_MASK)) & INT_MASK;
       break;
     case  0:
@@ -242,7 +244,7 @@
   /**
    * Compute the hash of the specified file
    * @param args name of file to compute hash of.
-   * @throws IOException
+   * @throws IOException e
    */
   public static void main(String[] args) throws IOException {
     if (args.length != 1) {

Modified: hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/util/Keying.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/util/Keying.java?rev=920548&r1=920547&r2=920548&view=diff
==============================================================================
--- hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/util/Keying.java (original)
+++ hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/util/Keying.java Mon Mar  8 22:25:06 2010
@@ -1,5 +1,5 @@
 /**
- * Copyright 2007 The Apache Software Foundation
+ * Copyright 2010 The Apache Software Foundation
  *
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file

Modified: hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/util/Merge.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/util/Merge.java?rev=920548&r1=920547&r2=920548&view=diff
==============================================================================
--- hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/util/Merge.java (original)
+++ hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/util/Merge.java Mon Mar  8 22:25:06 2010
@@ -1,5 +1,5 @@
 /**
- * Copyright 2008 The Apache Software Foundation
+ * Copyright 2010 The Apache Software Foundation
  *
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
@@ -20,9 +20,6 @@
 
 package org.apache.hadoop.hbase.util;
 
-import java.io.IOException;
-import java.util.List;
-
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
@@ -44,6 +41,9 @@
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
 
+import java.io.IOException;
+import java.util.List;
+
 /**
  * Utility that can merge any two regions in the same table: adjacent,
  * overlapping or disjoint.
@@ -64,7 +64,7 @@
   }
 
   /**
-   * @param conf
+   * @param conf configuration
    */
   public Merge(Configuration conf) {
     this.mergeInfo = null;
@@ -219,7 +219,7 @@
           Bytes.toString(region1) + " in " + meta1);
     }
 
-    HRegion metaRegion2 = null;
+    HRegion metaRegion2;
     if (Bytes.equals(meta1.getRegionName(), meta2.getRegionName())) {
       metaRegion2 = metaRegion1;
     } else {
@@ -243,7 +243,7 @@
       throw new IOException("Could not find meta region for " +
           Bytes.toString(merged.getRegionName()));
     }
-    HRegion mergeMeta = null;
+    HRegion mergeMeta;
     if (Bytes.equals(mergedInfo.getRegionName(), meta1.getRegionName())) {
       mergeMeta = metaRegion1;
     } else if (Bytes.equals(mergedInfo.getRegionName(), meta2.getRegionName())) {
@@ -370,13 +370,8 @@
         "Usage: bin/hbase merge <table-name> <region-1> <region-2>\n");
   }
   
-  /**
-   * Main program
-   * 
-   * @param args
-   */
   public static void main(String[] args) {
-    int status = 0;
+    int status;
     try {
       status = ToolRunner.run(HBaseConfiguration.create(), new Merge(), args);
     } catch (Exception e) {

Modified: hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/util/MetaUtils.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/util/MetaUtils.java?rev=920548&r1=920547&r2=920548&view=diff
==============================================================================
--- hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/util/MetaUtils.java (original)
+++ hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/util/MetaUtils.java Mon Mar  8 22:25:06 2010
@@ -1,5 +1,5 @@
 /**
- * Copyright 2008 The Apache Software Foundation
+ * Copyright 2010 The Apache Software Foundation
  *
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
@@ -20,13 +20,6 @@
 
 package org.apache.hadoop.hbase.util;
 
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.List;
-import java.util.Map;
-import java.util.TreeMap;
-
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
@@ -35,8 +28,6 @@
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HConstants;
-import org.apache.hadoop.hbase.HRegionInfo;
-import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.client.Delete;
 import org.apache.hadoop.hbase.client.Get;
 import org.apache.hadoop.hbase.client.HTable;
@@ -47,6 +38,15 @@
 import org.apache.hadoop.hbase.regionserver.InternalScanner;
 import org.apache.hadoop.hbase.regionserver.Store;
 import org.apache.hadoop.hbase.regionserver.wal.HLog;
+import org.apache.hadoop.hbase.HRegionInfo;
+import org.apache.hadoop.hbase.KeyValue;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+import java.util.Map;
+import java.util.TreeMap;
 
 /**
  * Contains utility methods for manipulating HBase meta tables.
@@ -66,13 +66,16 @@
     new TreeMap<byte [], HRegion>(Bytes.BYTES_COMPARATOR));
   
   /** Default constructor 
-   * @throws IOException */
+   * @throws IOException e
+   */
   public MetaUtils() throws IOException {
     this(HBaseConfiguration.create());
   }
   
-  /** @param conf Configuration 
-   * @throws IOException */
+  /**
+   * @param conf Configuration
+   * @throws IOException e
+   */
   public MetaUtils(Configuration conf) throws IOException {
     this.conf = conf;
     conf.setInt("hbase.client.retries.number", 1);
@@ -82,7 +85,7 @@
 
   /**
    * Verifies that DFS is available and that HBase is off-line.
-   * @throws IOException
+   * @throws IOException e
    */
   private void initialize() throws IOException {
     this.fs = FileSystem.get(this.conf);
@@ -90,8 +93,10 @@
     this.rootdir = FSUtils.getRootDir(this.conf);
   }
 
-  /** @return the HLog 
-   * @throws IOException */
+  /**
+   * @return the HLog
+   * @throws IOException e
+   */
   public synchronized HLog getLog() throws IOException {
     if (this.log == null) {
       Path logdir = new Path(this.fs.getHomeDirectory(),
@@ -105,7 +110,7 @@
   
   /**
    * @return HRegion for root region
-   * @throws IOException
+   * @throws IOException e
    */
   public HRegion getRootRegion() throws IOException {
     if (this.rootRegion == null) {
@@ -119,7 +124,7 @@
    * 
    * @param metaInfo HRegionInfo for meta region
    * @return meta HRegion
-   * @throws IOException
+   * @throws IOException e
    */
   public HRegion getMetaRegion(HRegionInfo metaInfo) throws IOException {
     HRegion meta = metaRegions.get(metaInfo.getRegionName());
@@ -178,7 +183,7 @@
      * 
      * @param info HRegionInfo for row
      * @return false to terminate the scan
-     * @throws IOException
+     * @throws IOException e
      */
     public boolean processRow(HRegionInfo info) throws IOException;
   }
@@ -188,7 +193,7 @@
    * the HRegionInfo of the meta region.
    * 
    * @param listener method to be called for each meta region found
-   * @throws IOException
+   * @throws IOException e
    */
   public void scanRootRegion(ScannerListener listener) throws IOException {
     // Open root region so we can scan it
@@ -201,9 +206,9 @@
   /**
    * Scan the passed in metaregion <code>m</code> invoking the passed
    * <code>listener</code> per row found.
-   * @param r
-   * @param listener
-   * @throws IOException
+   * @param r region
+   * @param listener scanner listener
+   * @throws IOException e
    */
   public void scanMetaRegion(final HRegion r, final ScannerListener listener)
   throws IOException {
@@ -247,7 +252,7 @@
    * 
    * @param metaRegionInfo HRegionInfo for meta region
    * @param listener method to be called for each meta region found
-   * @throws IOException
+   * @throws IOException e
    */
   public void scanMetaRegion(HRegionInfo metaRegionInfo,
     ScannerListener listener)
@@ -285,7 +290,7 @@
    * @param row Row in the catalog .META. table whose HRegionInfo's offline
    * status we want to change.
    * @param onlineOffline Pass <code>true</code> to OFFLINE the region.
-   * @throws IOException
+   * @throws IOException e
    */
   public static void changeOnlineStatus (final Configuration c,
       final byte [] row, final boolean onlineOffline)
@@ -320,9 +325,9 @@
   /**
    * Offline version of the online TableOperation,
    * org.apache.hadoop.hbase.master.AddColumn.
-   * @param tableName
+   * @param tableName table name
    * @param hcd Add this column to <code>tableName</code>
-   * @throws IOException 
+   * @throws IOException e
    */
   public void addColumn(final byte [] tableName,
       final HColumnDescriptor hcd)
@@ -354,9 +359,9 @@
   /**
    * Offline version of the online TableOperation,
    * org.apache.hadoop.hbase.master.DeleteColumn.
-   * @param tableName
+   * @param tableName table name
    * @param columnFamily Name of column name to remove.
-   * @throws IOException
+   * @throws IOException e
    */
   public void deleteColumn(final byte [] tableName,
       final byte [] columnFamily) throws IOException {
@@ -392,9 +397,9 @@
   /**
    * Update COL_REGIONINFO in meta region r with HRegionInfo hri
    * 
-   * @param r
-   * @param hri
-   * @throws IOException
+   * @param r region
+   * @param hri region info
+   * @throws IOException e
    */
   public void updateMETARegionInfo(HRegion r, final HRegionInfo hri) 
   throws IOException {
@@ -445,7 +450,7 @@
    * @return List of {@link HRegionInfo} rows found in the ROOT or META
    * catalog table.
    * @param tableName Name of table to go looking for.
-   * @throws IOException
+   * @throws IOException e
    * @see #getMetaRegion(HRegionInfo)
    */
   public List<HRegionInfo> getMETARows(final byte [] tableName)

Modified: hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/util/MurmurHash.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/util/MurmurHash.java?rev=920548&r1=920547&r2=920548&view=diff
==============================================================================
--- hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/util/MurmurHash.java (original)
+++ hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/util/MurmurHash.java Mon Mar  8 22:25:06 2010
@@ -1,4 +1,6 @@
 /**
+ * Copyright 2010 The Apache Software Foundation
+ *
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -49,6 +51,7 @@
       k = k << 8;
       k = k | (data[i_4 + 1] & 0xff);
       k = k << 8;
+      //noinspection PointlessArithmeticExpression
       k = k | (data[i_4 + 0] & 0xff);
       k *= m;
       k ^= k >>> r;

Modified: hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/util/Pair.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/util/Pair.java?rev=920548&r1=920547&r2=920548&view=diff
==============================================================================
--- hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/util/Pair.java (original)
+++ hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/util/Pair.java Mon Mar  8 22:25:06 2010
@@ -1,3 +1,23 @@
+/**
+ * Copyright 2010 The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
 package org.apache.hadoop.hbase.util;
 
 import java.io.Serializable;
@@ -22,8 +42,8 @@
 
   /**
    * Constructor
-   * @param a
-   * @param b
+   * @param a operand
+   * @param b operand
    */
   public Pair(T1 a, T2 b)
   {
@@ -33,7 +53,7 @@
 
   /**
    * Replace the first element of the pair.
-   * @param a
+   * @param a operand
    */
   public void setFirst(T1 a)
   {
@@ -42,7 +62,7 @@
 
   /**
    * Replace the second element of the pair.
-   * @param b 
+   * @param b operand
    */
   public void setSecond(T2 b)
   {

Modified: hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/util/Sleeper.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/util/Sleeper.java?rev=920548&r1=920547&r2=920548&view=diff
==============================================================================
--- hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/util/Sleeper.java (original)
+++ hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/util/Sleeper.java Mon Mar  8 22:25:06 2010
@@ -1,5 +1,5 @@
 /**
- * Copyright 2007 The Apache Software Foundation
+ * Copyright 2010 The Apache Software Foundation
  *
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
@@ -19,11 +19,11 @@
  */
 package org.apache.hadoop.hbase.util;
 
-import java.util.concurrent.atomic.AtomicBoolean;
-
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 
+import java.util.concurrent.atomic.AtomicBoolean;
+
 /**
  * Sleeper for current thread.
  * Sleeps for passed period.  Also checks passed boolean and if interrupted,
@@ -33,11 +33,11 @@
 public class Sleeper {
   private final Log LOG = LogFactory.getLog(this.getClass().getName());
   private final int period;
-  private AtomicBoolean stop;
+  private final AtomicBoolean stop;
   
   /**
-   * @param sleep
-   * @param stop
+   * @param sleep sleep time in milliseconds
+   * @param stop flag for when we stop
    */
   public Sleeper(final int sleep, final AtomicBoolean stop) {
     this.period = sleep;

Modified: hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/util/SoftValue.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/util/SoftValue.java?rev=920548&r1=920547&r2=920548&view=diff
==============================================================================
--- hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/util/SoftValue.java (original)
+++ hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/util/SoftValue.java Mon Mar  8 22:25:06 2010
@@ -1,5 +1,5 @@
 /**
- * Copyright 2008 The Apache Software Foundation
+ * Copyright 2010 The Apache Software Foundation
  *
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file

Modified: hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/util/SoftValueMap.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/util/SoftValueMap.java?rev=920548&r1=920547&r2=920548&view=diff
==============================================================================
--- hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/util/SoftValueMap.java (original)
+++ hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/util/SoftValueMap.java Mon Mar  8 22:25:06 2010
@@ -1,5 +1,5 @@
 /**
- * Copyright 2008 The Apache Software Foundation
+ * Copyright 2010 The Apache Software Foundation
  *
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
@@ -51,9 +51,10 @@
    * ReferenceQueue.
    * @return How many references cleared.
    */
+  @SuppressWarnings({"unchecked"})
   public int checkReferences() {
     int i = 0;
-    for (Object obj = null; (obj = this.rq.poll()) != null;) {
+    for (Object obj; (obj = this.rq.poll()) != null;) {
       i++;
       this.internalMap.remove(((SoftValue<K,V>)obj).getKey());
     }
@@ -72,6 +73,7 @@
     throw new RuntimeException("Not implemented");
   }
   
+  @SuppressWarnings({"SuspiciousMethodCalls"})
   public V get(Object key) {
     checkReferences();
     SoftValue<K,V> value = this.internalMap.get(key);

Modified: hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/util/SoftValueSortedMap.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/util/SoftValueSortedMap.java?rev=920548&r1=920547&r2=920548&view=diff
==============================================================================
--- hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/util/SoftValueSortedMap.java (original)
+++ hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/util/SoftValueSortedMap.java Mon Mar  8 22:25:06 2010
@@ -1,5 +1,5 @@
 /**
- * Copyright 2008 The Apache Software Foundation
+ * Copyright 2010 The Apache Software Foundation
  *
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
@@ -48,13 +48,15 @@
   
   /**
    * Constructor
-   * @param c
+   * @param c comparator
    */
   public SoftValueSortedMap(final Comparator<K> c) {
     this(new TreeMap<K, SoftValue<K,V>>(c));
   }
   
-  /** For headMap and tailMap support */
+  /** For headMap and tailMap support
+   * @param original object to wrap
+   */
   private SoftValueSortedMap(SortedMap<K,SoftValue<K,V>> original) {
     this.internalMap = original;
   }
@@ -67,8 +69,9 @@
    */
   public int checkReferences() {
     int i = 0;
-    for (Object obj = null; (obj = this.rq.poll()) != null;) {
+    for (Object obj; (obj = this.rq.poll()) != null;) {
       i++;
+      //noinspection unchecked
       this.internalMap.remove(((SoftValue<K,V>)obj).getKey());
     }
     return i;
@@ -86,6 +89,7 @@
     throw new RuntimeException("Not implemented");
   }
   
+  @SuppressWarnings({"SuspiciousMethodCalls"})
   public V get(Object key) {
     checkReferences();
     SoftValue<K,V> value = this.internalMap.get(key);

Modified: hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/util/Strings.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/util/Strings.java?rev=920548&r1=920547&r2=920548&view=diff
==============================================================================
--- hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/util/Strings.java (original)
+++ hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/util/Strings.java Mon Mar  8 22:25:06 2010
@@ -1,5 +1,5 @@
 /**
- * Copyright 2008 The Apache Software Foundation
+ * Copyright 2010 The Apache Software Foundation
  *
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
@@ -20,7 +20,7 @@
 package org.apache.hadoop.hbase.util;
 
 /**
- * Utillity for Strings.
+ * Utility for Strings.
  */
 public class Strings {
   public final static String DEFAULT_SEPARATOR = "=";

Modified: hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/util/Threads.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/util/Threads.java?rev=920548&r1=920547&r2=920548&view=diff
==============================================================================
--- hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/util/Threads.java (original)
+++ hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/util/Threads.java Mon Mar  8 22:25:06 2010
@@ -1,4 +1,6 @@
 /**
+ * Copyright 2010 The Apache Software Foundation
+ *
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -17,11 +19,11 @@
  */
 package org.apache.hadoop.hbase.util;
 
-import java.lang.Thread.UncaughtExceptionHandler;
-
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 
+import java.lang.Thread.UncaughtExceptionHandler;
+
 /**
  * Thread Utility
  */
@@ -30,8 +32,8 @@
   
   /**
    * Utility method that sets name, daemon status and starts passed thread.
-   * @param t
-   * @param name
+   * @param t thread to frob
+   * @param name new name
    * @return Returns the passed Thread <code>t</code>.
    */
   public static Thread setDaemonThreadRunning(final Thread t,
@@ -41,8 +43,8 @@
     
   /**
    * Utility method that sets name, daemon status and starts passed thread.
-   * @param t
-   * @param name
+   * @param t thread to frob
+   * @param name new name
    * @param handler A handler to set on the thread.  Pass null if want to
    * use default handler.
    * @return Returns the passed Thread <code>t</code>.

Modified: hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/util/VersionInfo.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/util/VersionInfo.java?rev=920548&r1=920547&r2=920548&view=diff
==============================================================================
--- hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/util/VersionInfo.java (original)
+++ hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/util/VersionInfo.java Mon Mar  8 22:25:06 2010
@@ -1,4 +1,6 @@
-/*
+/**
+ * Copyright 2010 The Apache Software Foundation
+ *
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -35,7 +37,7 @@
 
   /**
    * Get the meta-data for the hbase package.
-   * @return
+   * @return package
    */
   static Package getPackage() {
     return myPackage;
@@ -81,9 +83,6 @@
     return version != null ? version.url() : "Unknown";
   }
   
-  /**
-   * @param args
-   */
   public static void main(String[] args) {
     System.out.println("HBase " + getVersion());
     System.out.println("Subversion " + getUrl() + " -r " + getRevision());

Modified: hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/util/Writables.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/util/Writables.java?rev=920548&r1=920547&r2=920548&view=diff
==============================================================================
--- hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/util/Writables.java (original)
+++ hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/util/Writables.java Mon Mar  8 22:25:06 2010
@@ -1,4 +1,6 @@
 /**
+ * Copyright 2010 The Apache Software Foundation
+ *
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -17,25 +19,25 @@
  */
 package org.apache.hadoop.hbase.util;
 
+import org.apache.hadoop.hbase.HRegionInfo;
+import org.apache.hadoop.io.DataInputBuffer;
+import org.apache.hadoop.io.Writable;
+
 import java.io.ByteArrayInputStream;
 import java.io.ByteArrayOutputStream;
 import java.io.DataInputStream;
 import java.io.DataOutputStream;
 import java.io.IOException;
 
-import org.apache.hadoop.hbase.HRegionInfo;
-import org.apache.hadoop.io.DataInputBuffer;
-import org.apache.hadoop.io.Writable;
-
 /**
  * Utility class with methods for manipulating Writable objects
  */
 public class Writables {
   /**
-   * @param w
+   * @param w writable
    * @return The bytes of <code>w</code> gotten by running its 
    * {@link Writable#write(java.io.DataOutput)} method.
-   * @throws IOException
+   * @throws IOException e
    * @see #getWritable(byte[], Writable)
    */
   public static byte [] getBytes(final Writable w) throws IOException {
@@ -59,13 +61,13 @@
   /**
    * Set bytes into the passed Writable by calling its
    * {@link Writable#readFields(java.io.DataInput)}.
-   * @param bytes
+   * @param bytes serialized bytes
    * @param w An empty Writable (usually made by calling the null-arg
    * constructor).
    * @return The passed Writable after its readFields has been called fed
    * by the passed <code>bytes</code> array or IllegalArgumentException
    * if passed null or an empty <code>bytes</code> array.
-   * @throws IOException
+   * @throws IOException e
    * @throws IllegalArgumentException
    */
   public static Writable getWritable(final byte [] bytes, final Writable w)
@@ -76,15 +78,15 @@
   /**
    * Set bytes into the passed Writable by calling its
    * {@link Writable#readFields(java.io.DataInput)}.
-   * @param bytes
-   * @param offset
-   * @param length
+   * @param bytes serialized bytes
+   * @param offset offset into array
+   * @param length length of data
    * @param w An empty Writable (usually made by calling the null-arg
    * constructor).
    * @return The passed Writable after its readFields has been called fed
    * by the passed <code>bytes</code> array or IllegalArgumentException
    * if passed null or an empty <code>bytes</code> array.
-   * @throws IOException
+   * @throws IOException e
    * @throws IllegalArgumentException
    */
   public static Writable getWritable(final byte [] bytes, final int offset,
@@ -108,9 +110,9 @@
   }
 
   /**
-   * @param bytes
+   * @param bytes serialized bytes
    * @return A HRegionInfo instance built out of passed <code>bytes</code>.
-   * @throws IOException
+   * @throws IOException e
    */
   public static HRegionInfo getHRegionInfo(final byte [] bytes)
   throws IOException {
@@ -118,15 +120,15 @@
   }
  
   /**
-   * @param bytes
+   * @param bytes serialized bytes
    * @return A HRegionInfo instance built out of passed <code>bytes</code>
    * or <code>null</code> if passed bytes are null or an empty array.
-   * @throws IOException
+   * @throws IOException e
    */
   public static HRegionInfo getHRegionInfoOrNull(final byte [] bytes)
   throws IOException {
     return (bytes == null || bytes.length <= 0)?
-      (HRegionInfo)null: getHRegionInfo(bytes);
+        null : getHRegionInfo(bytes);
   }
 
   /**
@@ -134,7 +136,7 @@
    * @param src Source Writable
    * @param tgt Target Writable
    * @return The target Writable.
-   * @throws IOException
+   * @throws IOException e
    */
   public static Writable copyWritable(final Writable src, final Writable tgt)
   throws IOException {
@@ -146,7 +148,7 @@
    * @param bytes Source Writable
    * @param tgt Target Writable
    * @return The target Writable.
-   * @throws IOException
+   * @throws IOException e
    */
   public static Writable copyWritable(final byte [] bytes, final Writable tgt)
   throws IOException {

Modified: hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/zookeeper/HQuorumPeer.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/zookeeper/HQuorumPeer.java?rev=920548&r1=920547&r2=920548&view=diff
==============================================================================
--- hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/zookeeper/HQuorumPeer.java (original)
+++ hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/zookeeper/HQuorumPeer.java Mon Mar  8 22:25:06 2010
@@ -1,5 +1,5 @@
 /**
- * Copyright 2009 The Apache Software Foundation
+ * Copyright 2010 The Apache Software Foundation
  *
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
@@ -19,19 +19,6 @@
  */
 package org.apache.hadoop.hbase.zookeeper;
 
-import java.io.File;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.PrintWriter;
-import java.net.InetAddress;
-import java.net.NetworkInterface;
-import java.net.UnknownHostException;
-import java.util.ArrayList;
-import java.util.Enumeration;
-import java.util.List;
-import java.util.Properties;
-import java.util.Map.Entry;
-
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
@@ -44,6 +31,19 @@
 import org.apache.zookeeper.server.quorum.QuorumPeerConfig;
 import org.apache.zookeeper.server.quorum.QuorumPeerMain;
 
+import java.io.File;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.PrintWriter;
+import java.net.InetAddress;
+import java.net.NetworkInterface;
+import java.net.UnknownHostException;
+import java.util.ArrayList;
+import java.util.Enumeration;
+import java.util.List;
+import java.util.Map.Entry;
+import java.util.Properties;
+
 /**
  * HBase's version of ZooKeeper's QuorumPeer. When HBase is set to manage
  * ZooKeeper, this class is used to start up QuorumPeer instances. By doing

Modified: hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKServerTool.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKServerTool.java?rev=920548&r1=920547&r2=920548&view=diff
==============================================================================
--- hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKServerTool.java (original)
+++ hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKServerTool.java Mon Mar  8 22:25:06 2010
@@ -1,12 +1,32 @@
-package org.apache.hadoop.hbase.zookeeper;
+/**
+ * Copyright 2010 The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
 
-import java.util.Properties;
-import java.util.Map.Entry;
+package org.apache.hadoop.hbase.zookeeper;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HConstants;
 
+import java.util.Map.Entry;
+import java.util.Properties;
+
 /**
  * Tool for reading ZooKeeper servers from HBase XML configuation and producing
  * a line-by-line list for use by bash scripts.

Modified: hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperWrapper.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperWrapper.java?rev=920548&r1=920547&r2=920548&view=diff
==============================================================================
--- hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperWrapper.java (original)
+++ hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperWrapper.java Mon Mar  8 22:25:06 2010
@@ -1,5 +1,5 @@
 /**
- * Copyright 2009 The Apache Software Foundation
+ * Copyright 2010 The Apache Software Foundation
  *
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
@@ -127,6 +127,7 @@
         String host = value.substring(0, value.indexOf(':'));
         servers.add(host);
         try {
+          //noinspection ResultOfMethodCallIgnored
           InetAddress.getByName(host);
           anyValid = true;
         } catch (UnknownHostException e) {
@@ -166,6 +167,7 @@
   }
 
   /** @return String dump of everything in ZooKeeper. */
+  @SuppressWarnings({"ConstantConditions"})
   public String dump() {
     StringBuilder sb = new StringBuilder();
     sb.append("\nHBase tree in ZooKeeper is rooted at ").append(parentZNode);
@@ -201,7 +203,7 @@
    */
   public String[] getServerStats(String server) 
   throws IOException {
-    return getServerStats(server, 1 * 60 * 1000);
+    return getServerStats(server, 60 * 1000);
   }
   
   /**
@@ -392,8 +394,7 @@
 
     String addressString = Bytes.toString(data);
     LOG.debug("Read ZNode " + znode + " got " + addressString);
-    HServerAddress address = new HServerAddress(addressString);
-    return address;
+    return new HServerAddress(addressString);
   }
 
   private boolean ensureExists(final String znode) {



Mime
View raw message