hbase-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From raw...@apache.org
Subject svn commit: r920548 [2/5] - in /hadoop/hbase/trunk: ./ core/src/main/java/org/apache/hadoop/hbase/client/ core/src/main/java/org/apache/hadoop/hbase/filter/ core/src/main/java/org/apache/hadoop/hbase/ipc/ core/src/main/java/org/apache/hadoop/hbase/mast...
Date Mon, 08 Mar 2010 22:25:09 GMT
Modified: hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/client/MetaScanner.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/client/MetaScanner.java?rev=920548&r1=920547&r2=920548&view=diff
==============================================================================
--- hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/client/MetaScanner.java (original)
+++ hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/client/MetaScanner.java Mon Mar  8 22:25:06 2010
@@ -1,12 +1,12 @@
 package org.apache.hadoop.hbase.client;
 
-import java.io.IOException;
-
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.util.Bytes;
 
+import java.io.IOException;
+
 /**
  * Scanner class that contains the <code>.META.</code> table scanning logic 
  * and uses a Retryable scanner. Provided visitors will be called
@@ -18,9 +18,9 @@
    * Scans the meta table and calls a visitor on each RowResult and uses a empty
    * start row value as table name.
    * 
-   * @param configuration
+   * @param configuration conf
    * @param visitor A custom visitor
-   * @throws IOException
+   * @throws IOException e
    */
   public static void metaScan(Configuration configuration,
       MetaScannerVisitor visitor)
@@ -32,10 +32,10 @@
    * Scans the meta table and calls a visitor on each RowResult. Uses a table
    * name to locate meta regions.
    * 
-   * @param configuration
-   * @param visitor
-   * @param tableName
-   * @throws IOException
+   * @param configuration config
+   * @param visitor visitor object
+   * @param tableName table name
+   * @throws IOException e
    */
   public static void metaScan(Configuration configuration,
       MetaScannerVisitor visitor, byte[] tableName)
@@ -46,7 +46,7 @@
           HRegionInfo.createRegionName(tableName, null, ZEROES);
       
     // Scan over each meta region
-    ScannerCallable callable = null;
+    ScannerCallable callable;
     int rows = configuration.getInt("hbase.meta.scanner.caching", 100); 
     do {
       Scan scan = new Scan(startRow).addFamily(CATALOG_FAMILY);
@@ -59,10 +59,10 @@
           //we have all the rows here 
           Result [] rrs = connection.getRegionServerWithRetries(callable);
           if (rrs == null || rrs.length == 0 || rrs[0].size() == 0) {
-            break done; //exit completely
+            break; //exit completely
           }
-          for (int i = 0; i < rrs.length; i++) {
-            if (!visitor.processRow(rrs[i]))
+          for (Result rr : rrs) {
+            if (!visitor.processRow(rr))
               break done; //exit completely
           }
           //here, we didn't break anywhere. Check if we have more rows
@@ -86,9 +86,9 @@
      * Implementations can return false to stop the region's loop if it becomes
      * unnecessary for some reason.
      * 
-     * @param rowResult
+     * @param rowResult result
      * @return A boolean to know if it should continue to loop in the region
-     * @throws IOException
+     * @throws IOException e
      */
     public boolean processRow(Result rowResult) throws IOException;
   }

Modified: hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/client/MultiPut.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/client/MultiPut.java?rev=920548&r1=920547&r2=920548&view=diff
==============================================================================
--- hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/client/MultiPut.java (original)
+++ hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/client/MultiPut.java Mon Mar  8 22:25:06 2010
@@ -1,5 +1,5 @@
 /*
- * Copyright 2009 The Apache Software Foundation
+ * Copyright 2010 The Apache Software Foundation
  *
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
@@ -20,27 +20,37 @@
 
 package org.apache.hadoop.hbase.client;
 
-import org.apache.hadoop.io.Writable;
-import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.HServerAddress;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.io.Writable;
 
+import java.io.DataInput;
 import java.io.DataOutput;
 import java.io.IOException;
-import java.io.DataInput;
-import java.util.List;
-import java.util.Map;
 import java.util.ArrayList;
 import java.util.Collection;
+import java.util.List;
+import java.util.Map;
 import java.util.TreeMap;
 
+/**
+ * Data type class for putting multiple regions worth of puts in one RPC.
+ */
 public class MultiPut implements Writable {
   public HServerAddress address; // client code ONLY
 
   // map of regions to lists of puts for that region.
   public Map<byte[], List<Put> > puts = new TreeMap<byte[], List<Put>>(Bytes.BYTES_COMPARATOR);
 
+  /**
+   * Writable constructor only.
+   */
   public MultiPut() {}
 
+  /**
+   * MultiPut for putting multiple regions worth of puts in one RPC.
+   * @param a address
+   */
   public MultiPut(HServerAddress a) {
     address = a;
   }

Modified: hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/client/MultiPutResponse.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/client/MultiPutResponse.java?rev=920548&r1=920547&r2=920548&view=diff
==============================================================================
--- hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/client/MultiPutResponse.java (original)
+++ hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/client/MultiPutResponse.java Mon Mar  8 22:25:06 2010
@@ -1,5 +1,5 @@
 /*
- * Copyright 2009 The Apache Software Foundation
+ * Copyright 2010 The Apache Software Foundation
  *
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
@@ -20,22 +20,23 @@
 
 package org.apache.hadoop.hbase.client;
 
-import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.io.Writable;
 
+import java.io.DataInput;
 import java.io.DataOutput;
 import java.io.IOException;
-import java.io.DataInput;
 import java.util.Map;
-import java.util.List;
-import java.util.ArrayList;
 import java.util.TreeMap;
 
+/**
+ * Response class for MultiPut.
+ */
 public class MultiPutResponse implements Writable {
 
-  public MultiPut request; // used in client code ONLY
+  protected MultiPut request; // used in client code ONLY
 
-  public Map<byte[], Integer> answers = new TreeMap<byte[], Integer>(Bytes.BYTES_COMPARATOR);
+  protected Map<byte[], Integer> answers = new TreeMap<byte[], Integer>(Bytes.BYTES_COMPARATOR);
 
   public MultiPutResponse() {}
 

Modified: hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/client/NoServerForRegionException.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/client/NoServerForRegionException.java?rev=920548&r1=920547&r2=920548&view=diff
==============================================================================
--- hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/client/NoServerForRegionException.java (original)
+++ hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/client/NoServerForRegionException.java Mon Mar  8 22:25:06 2010
@@ -1,5 +1,5 @@
 /**
- * Copyright 2007 The Apache Software Foundation
+ * Copyright 2010 The Apache Software Foundation
  *
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file

Modified: hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/client/Put.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/client/Put.java?rev=920548&r1=920547&r2=920548&view=diff
==============================================================================
--- hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/client/Put.java (original)
+++ hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/client/Put.java Mon Mar  8 22:25:06 2010
@@ -1,5 +1,5 @@
 /*
- * Copyright 2009 The Apache Software Foundation
+ * Copyright 2010 The Apache Software Foundation
  *
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
@@ -20,6 +20,13 @@
 
 package org.apache.hadoop.hbase.client;
 
+import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.io.HeapSize;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.hbase.util.ClassSize;
+import org.apache.hadoop.io.Writable;
+
 import java.io.DataInput;
 import java.io.DataOutput;
 import java.io.IOException;
@@ -29,14 +36,6 @@
 import java.util.Map;
 import java.util.TreeMap;
 
-import org.apache.hadoop.io.Writable;
-
-import org.apache.hadoop.hbase.HConstants;
-import org.apache.hadoop.hbase.KeyValue;
-import org.apache.hadoop.hbase.io.HeapSize;
-import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.hadoop.hbase.util.ClassSize;
-
 
 /** 
  * Used to perform Put operations for a single row.
@@ -128,6 +127,7 @@
    * @param family family name
    * @param qualifier column qualifier
    * @param value column value
+   * @return this
    */
   public Put add(byte [] family, byte [] qualifier, byte [] value) {
     return add(family, qualifier, this.timestamp, value);
@@ -140,6 +140,7 @@
    * @param qualifier column qualifier
    * @param ts version timestamp
    * @param value column value
+   * @return this
    */
   public Put add(byte [] family, byte [] qualifier, long ts, byte [] value) {
     List<KeyValue> list = getKeyValueList(family);
@@ -153,7 +154,9 @@
    * Add the specified KeyValue to this Put operation.  Operation assumes that 
    * the passed KeyValue is immutable and its backing array will not be modified
    * for the duration of this Put.
-   * @param kv
+   * @param kv individual KeyValue
+   * @return this
+   * @throws java.io.IOException e
    */
   public Put add(KeyValue kv) throws IOException{
     byte [] family = kv.getFamily();
@@ -172,13 +175,9 @@
     return this;
   }
 
-  /**
+  /*
    * Create a KeyValue with this objects row key and the Put identifier.
    * 
-   * @param family
-   * @param qualifier
-   * @param ts
-   * @param value
    * @return a KeyValue with this objects row key and the Put identifier.
    */
   private KeyValue createPutKeyValue(byte[] family, byte[] qualifier, long ts,
@@ -192,8 +191,8 @@
    * a value assigned to the given family & qualifier.
    * Both given arguments must match the KeyValue object to return true.
    * 
-   * @param family
-   * @param qualifier
+   * @param family column family
+   * @param qualifier column qualifier
    * @return returns true if the given family and qualifier already has an
    * existing KeyValue object in the family map.
    */
@@ -206,9 +205,9 @@
    * a value assigned to the given family, qualifier and timestamp.
    * All 3 given arguments must match the KeyValue object to return true.
    * 
-   * @param family
-   * @param qualifier
-   * @param ts
+   * @param family column family
+   * @param qualifier column qualifier
+   * @param ts timestamp
    * @return returns true if the given family, qualifier and timestamp already has an
    * existing KeyValue object in the family map.
    */
@@ -221,9 +220,9 @@
    * a value assigned to the given family, qualifier and timestamp.
    * All 3 given arguments must match the KeyValue object to return true.
    * 
-   * @param family
-   * @param qualifier
-   * @param value
+   * @param family column family
+   * @param qualifier column qualifier
+   * @param value value to check
    * @return returns true if the given family, qualifier and value already has an
    * existing KeyValue object in the family map.
    */
@@ -236,10 +235,10 @@
    * the given value assigned to the given family, qualifier and timestamp.
    * All 4 given arguments must match the KeyValue object to return true.
    * 
-   * @param family
-   * @param qualifier
-   * @param ts
-   * @param value
+   * @param family column family
+   * @param qualifier column qualifier
+   * @param ts timestamp
+   * @param value value to check
    * @return returns true if the given family, qualifier timestamp and value 
    * already has an existing KeyValue object in the family map.
    */
@@ -247,7 +246,7 @@
       return has(family, qualifier, ts, value, false, false);
   }
   
-  /**
+  /*
    * Private method to determine if this object's familyMap contains 
    * the given value assigned to the given family, qualifier and timestamp
    * respecting the 2 boolean arguments
@@ -264,9 +263,14 @@
   private boolean has(byte [] family, byte [] qualifier, long ts, byte [] value, 
       boolean ignoreTS, boolean ignoreValue) {
     List<KeyValue> list = getKeyValueList(family);
-    if (list.size() == 0 ) {
+    if (list.size() == 0) {
       return false;
     }
+    // Boolean analysis of ignoreTS/ignoreValue.
+    // T T => 2
+    // T F => 3 (first is always true)
+    // F T => 2
+    // F F => 1
     if (!ignoreTS && !ignoreValue) {
       KeyValue kv = createPutKeyValue(family, qualifier, ts, value);
       return (list.contains(kv));
@@ -277,20 +281,14 @@
           return true;
         }
       }
-    } else if (ignoreTS) {
+    } else {
+      // ignoreTS is always true
       for (KeyValue kv: list) {
       if (Arrays.equals(kv.getFamily(), family) && Arrays.equals(kv.getQualifier(), qualifier)
               && Arrays.equals(kv.getValue(), value)) {
           return true;
         }
       }
-    } else {
-      for (KeyValue kv: list) {
-      if (Arrays.equals(kv.getFamily(), family) && Arrays.equals(
-          kv.getQualifier(), qualifier)) {
-          return true;
-        }
-    }
     }
     return false;
   }
@@ -298,8 +296,8 @@
   /**
    * Returns a list of all KeyValue objects with matching column family and qualifier.
    * 
-   * @param family
-   * @param qualifier
+   * @param family column family
+   * @param qualifier column qualifier
    * @return a list of KeyValue objects with the matching family and qualifier, 
    * returns an empty list if one doesnt exist for the given family.
    */
@@ -317,7 +315,7 @@
    * Creates an empty list if one doesnt exist for the given column family
    * or else it returns the associated list of KeyValue objects.
    * 
-   * @param family
+   * @param family column family
    * @return a list of KeyValue objects, returns an empty list if one doesnt exist.
    */
   private List<KeyValue> getKeyValueList(byte[] family) {
@@ -538,6 +536,7 @@
    * @param ts version timestamp
    * @param value column value
    * @deprecated use {@link #add(byte[], byte[], long, byte[])} instead
+   * @return true
    */
   public Put add(byte [] column, long ts, byte [] value) {
     byte [][] parts = KeyValue.parseColumn(column);

Modified: hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/client/RegionOfflineException.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/client/RegionOfflineException.java?rev=920548&r1=920547&r2=920548&view=diff
==============================================================================
--- hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/client/RegionOfflineException.java (original)
+++ hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/client/RegionOfflineException.java Mon Mar  8 22:25:06 2010
@@ -1,5 +1,5 @@
 /**
- * Copyright 2008 The Apache Software Foundation
+ * Copyright 2010 The Apache Software Foundation
  *
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
@@ -24,7 +24,7 @@
 /** Thrown when a table can not be located */
 public class RegionOfflineException extends RegionException {
   private static final long serialVersionUID = 466008402L;
-/** default constructor */
+  /** default constructor */
   public RegionOfflineException() {
     super();
   }

Modified: hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/client/Result.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/client/Result.java?rev=920548&r1=920547&r2=920548&view=diff
==============================================================================
--- hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/client/Result.java (original)
+++ hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/client/Result.java Mon Mar  8 22:25:06 2010
@@ -1,5 +1,5 @@
 /*
- * Copyright 2009 The Apache Software Foundation
+ * Copyright 2010 The Apache Software Foundation
  *
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
@@ -20,6 +20,12 @@
 
 package org.apache.hadoop.hbase.client;
 
+import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.KeyValue.SplitKeyValue;
+import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.io.Writable;
+
 import java.io.DataInput;
 import java.io.DataOutput;
 import java.io.IOException;
@@ -31,12 +37,6 @@
 import java.util.NavigableMap;
 import java.util.TreeMap;
 
-import org.apache.hadoop.hbase.KeyValue;
-import org.apache.hadoop.hbase.KeyValue.SplitKeyValue;
-import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
-import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.hadoop.io.Writable;
-
 /**
  * Single row result of a {@link Get} or {@link Scan} query.<p>
  * 
@@ -152,7 +152,7 @@
     if (isEmpty()) {
       return null;
     }
-    Arrays.sort(kvs, (Comparator<KeyValue>)KeyValue.COMPARATOR);
+    Arrays.sort(kvs, KeyValue.COMPARATOR);
     return kvs;
   }
 
@@ -238,6 +238,7 @@
    * Map of qualifiers to values.
    * <p>
    * Returns a Map of the form: <code>Map&lt;qualifier,value></code>
+   * @param family column family to get
    * @return map of qualifiers to values
    */
   public NavigableMap<byte[], byte[]> getFamilyMap(byte [] family) {
@@ -319,10 +320,7 @@
       return false;
     }
     NavigableMap<Long, byte[]> versionMap = getVersionMap(qualifierMap, qualifier);
-    if(versionMap == null) {
-      return false;
-    }
-    return true;
+    return versionMap != null;
   }
     
   /**

Modified: hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/client/ResultScanner.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/client/ResultScanner.java?rev=920548&r1=920547&r2=920548&view=diff
==============================================================================
--- hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/client/ResultScanner.java (original)
+++ hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/client/ResultScanner.java Mon Mar  8 22:25:06 2010
@@ -1,5 +1,5 @@
 /**
- * Copyright 2009 The Apache Software Foundation
+ * Copyright 2010 The Apache Software Foundation
  *
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
@@ -22,8 +22,6 @@
 import java.io.Closeable;
 import java.io.IOException;
 
-import org.apache.hadoop.hbase.client.Result;
-
 /**
  * Interface for client-side scanning.
  * Go to {@link HTable} to obtain instances.
@@ -34,14 +32,14 @@
    * Grab the next row's worth of values. The scanner will return a Result.
    * @return Result object if there is another row, null if the scanner is
    * exhausted.
-   * @throws IOException
+   * @throws IOException e
    */  
   public Result next() throws IOException;
  
   /**
    * @param nbRows number of rows to return
    * @return Between zero and <param>nbRows</param> Results
-   * @throws IOException
+   * @throws IOException e
    */
   public Result [] next(int nbRows) throws IOException;
  

Modified: hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/client/RetriesExhaustedException.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/client/RetriesExhaustedException.java?rev=920548&r1=920547&r2=920548&view=diff
==============================================================================
--- hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/client/RetriesExhaustedException.java (original)
+++ hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/client/RetriesExhaustedException.java Mon Mar  8 22:25:06 2010
@@ -1,5 +1,5 @@
 /**
- * Copyright 2008 The Apache Software Foundation
+ * Copyright 2010 The Apache Software Foundation
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -15,11 +15,11 @@
  */
 package org.apache.hadoop.hbase.client;
 
+import org.apache.hadoop.hbase.util.Bytes;
+
 import java.io.IOException;
 import java.util.List;
 
-import org.apache.hadoop.hbase.util.Bytes;
-
 /** 
  * Exception thrown by HTable methods when an attempt to do something (like
  * commit changes) fails after a bunch of retries. 

Modified: hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/client/Row.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/client/Row.java?rev=920548&r1=920547&r2=920548&view=diff
==============================================================================
--- hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/client/Row.java (original)
+++ hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/client/Row.java Mon Mar  8 22:25:06 2010
@@ -1,5 +1,5 @@
 /*
- * Copyright 2009 The Apache Software Foundation
+ * Copyright 2010 The Apache Software Foundation
  *
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file

Modified: hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/client/RowLock.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/client/RowLock.java?rev=920548&r1=920547&r2=920548&view=diff
==============================================================================
--- hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/client/RowLock.java (original)
+++ hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/client/RowLock.java Mon Mar  8 22:25:06 2010
@@ -1,5 +1,5 @@
 /**
- * Copyright 2008 The Apache Software Foundation
+ * Copyright 2010 The Apache Software Foundation
  *
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
@@ -28,8 +28,8 @@
 
   /**
    * Creates a RowLock from a row and lock id
-   * @param row
-   * @param lockId
+   * @param row row to lock on
+   * @param lockId the lock id
    */
   public RowLock(final byte [] row, final long lockId) {
     this.row = row;
@@ -38,7 +38,7 @@
   
   /**
    * Creates a RowLock with only a lock id
-   * @param lockId
+   * @param lockId lock id
    */
   public RowLock(final long lockId) {
     this.lockId = lockId;

Modified: hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/client/Scan.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/client/Scan.java?rev=920548&r1=920547&r2=920548&view=diff
==============================================================================
--- hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/client/Scan.java (original)
+++ hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/client/Scan.java Mon Mar  8 22:25:06 2010
@@ -1,5 +1,5 @@
 /*
- * Copyright 2009 The Apache Software Foundation
+ * Copyright 2010 The Apache Software Foundation
  *
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
@@ -20,14 +20,6 @@
 
 package org.apache.hadoop.hbase.client;
 
-import java.io.DataInput;
-import java.io.DataOutput;
-import java.io.IOException;
-import java.util.Map;
-import java.util.NavigableSet;
-import java.util.TreeMap;
-import java.util.TreeSet;
-
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.KeyValue;
@@ -37,6 +29,14 @@
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.WritableFactories;
 
+import java.io.DataInput;
+import java.io.DataOutput;
+import java.io.IOException;
+import java.util.Map;
+import java.util.NavigableSet;
+import java.util.TreeMap;
+import java.util.TreeSet;
+
 /**
  * Used to perform Scan operations.
  * <p>
@@ -154,6 +154,7 @@
    * <p>
    * Overrides previous calls to addColumn for this family.
    * @param family family name
+   * @return this
    */
   public Scan addFamily(byte [] family) {
     familyMap.remove(family);
@@ -167,6 +168,7 @@
    * Overrides previous calls to addFamily for this family.
    * @param family family name
    * @param qualifier column qualifier
+   * @return this
    */
   public Scan addColumn(byte [] family, byte [] qualifier) {
     NavigableSet<byte []> set = familyMap.get(family);
@@ -189,6 +191,7 @@
    * @throws IOException if invalid time range
    * @see #setMaxVersions()
    * @see #setMaxVersions(int)
+   * @return this
    */
   public Scan setTimeRange(long minStamp, long maxStamp)
   throws IOException {
@@ -204,6 +207,7 @@
    * @param timestamp version timestamp
    * @see #setMaxVersions()
    * @see #setMaxVersions(int)
+   * @return this
    */
   public Scan setTimeStamp(long timestamp) {
     try {
@@ -215,8 +219,9 @@
   }
 
   /**
-   * Set the start row.
-   * @param startRow
+   * Set the start row of the scan.
+   * @param startRow row to start scan on, inclusive
+   * @return this
    */
   public Scan setStartRow(byte [] startRow) {
     this.startRow = startRow;
@@ -225,7 +230,8 @@
   
   /**
    * Set the stop row.
-   * @param stopRow
+   * @param stopRow row to end at (exclusive)
+   * @return this
    */
   public Scan setStopRow(byte [] stopRow) {
     this.stopRow = stopRow;
@@ -234,6 +240,7 @@
   
   /**
    * Get all available versions.
+   * @return this
    */
   public Scan setMaxVersions() {
     this.maxVersions = Integer.MAX_VALUE;
@@ -243,6 +250,7 @@
   /**
    * Get up to the specified number of versions of each column.
    * @param maxVersions maximum versions for each column
+   * @return this
    */
   public Scan setMaxVersions(int maxVersions) {
     this.maxVersions = maxVersions;
@@ -270,6 +278,7 @@
   /**
    * Apply the specified server-side filter when performing the Scan.
    * @param filter filter to run on the server
+   * @return this
    */
   public Scan setFilter(Filter filter) {
     this.filter = filter;
@@ -278,7 +287,8 @@
 
   /**
    * Setting the familyMap
-   * @param familyMap
+   * @param familyMap map of family to qualifier
+   * @return this
    */
   public Scan setFamilyMap(Map<byte [], NavigableSet<byte []>> familyMap) {
     this.familyMap = familyMap;
@@ -410,15 +420,16 @@
     sb.append(", stopRow=");
     sb.append(Bytes.toString(this.stopRow));
     sb.append(", maxVersions=");
-    sb.append("" + this.maxVersions);
+    sb.append(this.maxVersions);
     sb.append(", batch=");
-    sb.append("" + this.batch);
+    sb.append(this.batch);
     sb.append(", caching=");
-    sb.append("" + this.caching);
+    sb.append(this.caching);
     sb.append(", cacheBlocks=");
-    sb.append("" + this.cacheBlocks);
+    sb.append(this.cacheBlocks);
     sb.append(", timeRange=");
-    sb.append("[" + this.tr.getMin() + "," + this.tr.getMax() + ")");
+    sb.append("[").append(this.tr.getMin()).append(",");
+    sb.append(this.tr.getMax()).append(")");
     sb.append(", families=");
     if(this.familyMap.size() == 0) {
       sb.append("ALL");
@@ -539,7 +550,7 @@
    * <p>
    * Note: It will through an error when the colon is missing.
    *
-   * @param familyAndQualifier
+   * @param familyAndQualifier family and qualifier
    * @return A reference to this instance.
    * @throws IllegalArgumentException When the colon is missing.
    * @deprecated use {@link #addColumn(byte[], byte[])} instead
@@ -561,10 +572,11 @@
    *
    * @param columns array of columns, formatted as <pre>family:qualifier</pre>
    * @deprecated issue multiple {@link #addColumn(byte[], byte[])} instead
+   * @return this
    */
   public Scan addColumns(byte [][] columns) {
-    for (int i = 0; i < columns.length; i++) {
-      addColumn(columns[i]);
+    for (byte[] column : columns) {
+      addColumn(column);
     }
     return this;
   }
@@ -608,12 +620,12 @@
         for (byte[] qual : quals) {
           if (cs.length() > 0) cs.append(" ");
           // encode values to make parsing easier later
-          cs.append(Bytes.toStringBinary(fam) + ":" + Bytes.toStringBinary(qual));
+          cs.append(Bytes.toStringBinary(fam)).append(":").append(Bytes.toStringBinary(qual));
         }
         cols.append(cs);
       } else {
         // only add the family but with old style delimiter
-        cols.append(Bytes.toStringBinary(fam) + ":");
+        cols.append(Bytes.toStringBinary(fam)).append(":");
       }
     }
     return cols.toString();

Modified: hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/client/ScannerCallable.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/client/ScannerCallable.java?rev=920548&r1=920547&r2=920548&view=diff
==============================================================================
--- hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/client/ScannerCallable.java (original)
+++ hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/client/ScannerCallable.java Mon Mar  8 22:25:06 2010
@@ -1,6 +1,5 @@
-
 /**
- * Copyright 2008 The Apache Software Foundation
+ * Copyright 2010 The Apache Software Foundation
  *
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
@@ -21,8 +20,6 @@
 
 package org.apache.hadoop.hbase.client;
 
-import java.io.IOException;
-
 import org.apache.hadoop.hbase.DoNotRetryIOException;
 import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.NotServingRegionException;
@@ -30,6 +27,8 @@
 import org.apache.hadoop.ipc.RemoteException;
 import org.mortbay.log.Log;
 
+import java.io.IOException;
+
 
 /**
  * Retries scanner operations such as create, next, etc.
@@ -43,9 +42,9 @@
   private int caching = 1;
 
   /**
-   * @param connection
-   * @param tableName
-   * @param scan
+   * @param connection which connection
+   * @param tableName table callable is on
+   * @param scan the scan to execute
    */
   public ScannerCallable (HConnection connection, byte [] tableName, Scan scan) {
     super(connection, tableName, scan.getStartRow());
@@ -53,7 +52,7 @@
   }
   
   /**
-   * @param reload
+   * @param reload force reload of server location
    * @throws IOException
    */
   @Override

Modified: hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/client/ScannerTimeoutException.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/client/ScannerTimeoutException.java?rev=920548&r1=920547&r2=920548&view=diff
==============================================================================
--- hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/client/ScannerTimeoutException.java (original)
+++ hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/client/ScannerTimeoutException.java Mon Mar  8 22:25:06 2010
@@ -1,5 +1,5 @@
 /**
- * Copyright 2008 The Apache Software Foundation
+ * Copyright 2010 The Apache Software Foundation
  *
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file

Modified: hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/client/ServerCallable.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/client/ServerCallable.java?rev=920548&r1=920547&r2=920548&view=diff
==============================================================================
--- hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/client/ServerCallable.java (original)
+++ hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/client/ServerCallable.java Mon Mar  8 22:25:06 2010
@@ -1,5 +1,5 @@
 /**
- * Copyright 2008 The Apache Software Foundation
+ * Copyright 2010 The Apache Software Foundation
  *
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
@@ -20,12 +20,12 @@
 
 package org.apache.hadoop.hbase.client;
 
-import java.io.IOException;
-import java.util.concurrent.Callable;
-
 import org.apache.hadoop.hbase.HRegionLocation;
 import org.apache.hadoop.hbase.ipc.HRegionInterface;
 
+import java.io.IOException;
+import java.util.concurrent.Callable;
+
 /**
  * Abstract class that implements Callable, used by retryable actions.
  * @param <T> the class that the ServerCallable handles
@@ -38,9 +38,9 @@
   protected HRegionInterface server;
 
   /**
-   * @param connection
-   * @param tableName
-   * @param row
+   * @param connection connection callable is on
+   * @param tableName table name callable is on
+   * @param row row we are querying
    */
   public ServerCallable(HConnection connection, byte [] tableName, byte [] row) {
     this.connection = connection;
@@ -51,7 +51,7 @@
   /**
    * 
    * @param reload set this to true if connection should re-find the region
-   * @throws IOException
+   * @throws IOException e
    */
   public void instantiateServer(boolean reload) throws IOException {
     this.location = connection.getRegionLocation(tableName, row, reload);

Modified: hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/client/ServerConnection.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/client/ServerConnection.java?rev=920548&r1=920547&r2=920548&view=diff
==============================================================================
--- hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/client/ServerConnection.java (original)
+++ hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/client/ServerConnection.java Mon Mar  8 22:25:06 2010
@@ -1,5 +1,5 @@
 /**
- * Copyright 2008 The Apache Software Foundation
+ * Copyright 2010 The Apache Software Foundation
  *
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
@@ -29,7 +29,7 @@
 public interface ServerConnection extends HConnection {
   /**
    * Set root region location in connection
-   * @param rootRegion
+   * @param rootRegion region location for root region
    */
   public void setRootRegionLocation(HRegionLocation rootRegion);
   

Modified: hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/client/ServerConnectionManager.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/client/ServerConnectionManager.java?rev=920548&r1=920547&r2=920548&view=diff
==============================================================================
--- hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/client/ServerConnectionManager.java (original)
+++ hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/client/ServerConnectionManager.java Mon Mar  8 22:25:06 2010
@@ -1,5 +1,5 @@
 /**
- * Copyright 2008 The Apache Software Foundation
+ * Copyright 2010 The Apache Software Foundation
  *
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
@@ -36,7 +36,7 @@
   /**
    * Get the connection object for the instance specified by the configuration
    * If no current connection exists, create a new connection for that instance
-   * @param conf
+   * @param conf configuration
    * @return HConnection object for the instance specified by the configuration
    */
   public static ServerConnection getConnection(Configuration conf) {

Modified: hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/client/UnmodifyableHColumnDescriptor.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/client/UnmodifyableHColumnDescriptor.java?rev=920548&r1=920547&r2=920548&view=diff
==============================================================================
--- hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/client/UnmodifyableHColumnDescriptor.java (original)
+++ hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/client/UnmodifyableHColumnDescriptor.java Mon Mar  8 22:25:06 2010
@@ -1,3 +1,23 @@
+/**
+ * Copyright 2010 The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
 package org.apache.hadoop.hbase.client;
 
 import org.apache.hadoop.hbase.HColumnDescriptor;
@@ -9,7 +29,7 @@
 public class UnmodifyableHColumnDescriptor extends HColumnDescriptor {
 
   /**
-   * @param desc
+   * @param desc wrapped
    */
   public UnmodifyableHColumnDescriptor (final HColumnDescriptor desc) {
     super(desc);

Modified: hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/client/UnmodifyableHRegionInfo.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/client/UnmodifyableHRegionInfo.java?rev=920548&r1=920547&r2=920548&view=diff
==============================================================================
--- hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/client/UnmodifyableHRegionInfo.java (original)
+++ hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/client/UnmodifyableHRegionInfo.java Mon Mar  8 22:25:06 2010
@@ -1,5 +1,5 @@
 /**
- * Copyright 2008 The Apache Software Foundation
+ * Copyright 2010 The Apache Software Foundation
  *
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file

Modified: hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/client/UnmodifyableHTableDescriptor.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/client/UnmodifyableHTableDescriptor.java?rev=920548&r1=920547&r2=920548&view=diff
==============================================================================
--- hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/client/UnmodifyableHTableDescriptor.java (original)
+++ hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/client/UnmodifyableHTableDescriptor.java Mon Mar  8 22:25:06 2010
@@ -1,5 +1,5 @@
 /**
- * Copyright 2008 The Apache Software Foundation
+ * Copyright 2010 The Apache Software Foundation
  *
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
@@ -22,7 +22,6 @@
 
 import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HTableDescriptor;
-//import org.apache.hadoop.hbase.client.tableindexed.IndexSpecification;
 
 /**
  * Read-only table descriptor.

Modified: hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/client/package-info.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/client/package-info.java?rev=920548&r1=920547&r2=920548&view=diff
==============================================================================
--- hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/client/package-info.java (original)
+++ hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/client/package-info.java Mon Mar  8 22:25:06 2010
@@ -1,5 +1,5 @@
 /*
- * Copyright 2009 The Apache Software Foundation
+ * Copyright 2010 The Apache Software Foundation
  *
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
@@ -17,6 +17,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+
 /**
 Provides HBase Client
 

Modified: hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/filter/BinaryComparator.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/filter/BinaryComparator.java?rev=920548&r1=920547&r2=920548&view=diff
==============================================================================
--- hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/filter/BinaryComparator.java (original)
+++ hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/filter/BinaryComparator.java Mon Mar  8 22:25:06 2010
@@ -1,5 +1,5 @@
 /*
- * Copyright 2009 The Apache Software Foundation
+ * Copyright 2010 The Apache Software Foundation
  *
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
@@ -22,7 +22,7 @@
 
 /**
  * A binary comparator which lexicographically compares against the specified 
- * byte array using {@link Bytes#compareTo(byte[], byte[])}.
+ * byte array using {@link org.apache.hadoop.hbase.util.Bytes#compareTo(byte[], byte[])}.
  */
 public class BinaryComparator extends WritableByteArrayComparable {
 
@@ -31,7 +31,7 @@
 
   /**
    * Constructor
-   * @param value
+   * @param value value
    */
   public BinaryComparator(byte[] value) {
     super(value);

Modified: hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/filter/BinaryPrefixComparator.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/filter/BinaryPrefixComparator.java?rev=920548&r1=920547&r2=920548&view=diff
==============================================================================
--- hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/filter/BinaryPrefixComparator.java (original)
+++ hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/filter/BinaryPrefixComparator.java Mon Mar  8 22:25:06 2010
@@ -1,5 +1,5 @@
 /*
- * Copyright 2009 The Apache Software Foundation
+ * Copyright 2010 The Apache Software Foundation
  *
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
@@ -34,7 +34,7 @@
 
   /**
    * Constructor
-   * @param value
+   * @param value value
    */
   public BinaryPrefixComparator(byte[] value) {
     super(value);

Modified: hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/filter/ColumnCountGetFilter.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/filter/ColumnCountGetFilter.java?rev=920548&r1=920547&r2=920548&view=diff
==============================================================================
--- hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/filter/ColumnCountGetFilter.java (original)
+++ hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/filter/ColumnCountGetFilter.java Mon Mar  8 22:25:06 2010
@@ -1,11 +1,31 @@
+/**
+ * Copyright 2010 The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
 package org.apache.hadoop.hbase.filter;
 
+import org.apache.hadoop.hbase.KeyValue;
+
 import java.io.DataInput;
 import java.io.DataOutput;
 import java.io.IOException;
 
-import org.apache.hadoop.hbase.KeyValue;
-
 /**
  * Simple filter that returns first N columns on row only.
  * This filter was written to test filters in Get and as soon as it gets

Modified: hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/filter/CompareFilter.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/filter/CompareFilter.java?rev=920548&r1=920547&r2=920548&view=diff
==============================================================================
--- hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/filter/CompareFilter.java (original)
+++ hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/filter/CompareFilter.java Mon Mar  8 22:25:06 2010
@@ -1,5 +1,5 @@
 /**
- * Copyright 2009 The Apache Software Foundation
+ * Copyright 2010 The Apache Software Foundation
  *
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
@@ -20,14 +20,14 @@
 
 package org.apache.hadoop.hbase.filter;
 
+import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.io.HbaseObjectWritable;
+
 import java.io.DataInput;
 import java.io.DataOutput;
 import java.io.IOException;
 import java.util.Arrays;
 
-import org.apache.hadoop.hbase.KeyValue;
-import org.apache.hadoop.hbase.io.HbaseObjectWritable;
-
 /**
  * This is a generic filter to be used to filter by comparison.  It takes an 
  * operator (equal, greater, not equal, etc) and a byte [] comparator.
@@ -58,7 +58,7 @@
     /** greater than or equal to */
     GREATER_OR_EQUAL,
     /** greater than */
-    GREATER;
+    GREATER,
   }
   
   protected CompareOp compareOp;

Modified: hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/filter/Filter.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/filter/Filter.java?rev=920548&r1=920547&r2=920548&view=diff
==============================================================================
--- hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/filter/Filter.java (original)
+++ hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/filter/Filter.java Mon Mar  8 22:25:06 2010
@@ -1,5 +1,5 @@
 /*
- * Copyright 2009 The Apache Software Foundation
+ * Copyright 2010 The Apache Software Foundation
  *
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
@@ -20,8 +20,8 @@
 
 package org.apache.hadoop.hbase.filter;
 
-import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.io.Writable;
 
 /**
  * Interface for row and column filters directly applied within the regionserver.

Modified: hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java?rev=920548&r1=920547&r2=920548&view=diff
==============================================================================
--- hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java (original)
+++ hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java Mon Mar  8 22:25:06 2010
@@ -1,5 +1,5 @@
 /**
- * Copyright 2009 The Apache Software Foundation
+ * Copyright 2010 The Apache Software Foundation
  *
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
@@ -19,18 +19,18 @@
  */
 package org.apache.hadoop.hbase.filter;
 
-import java.io.DataInput;
-import java.io.DataOutput;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.List;
-
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.io.HbaseObjectWritable;
 import org.apache.hadoop.io.Writable;
 
+import java.io.DataInput;
+import java.io.DataOutput;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
 /**
  * Implementation of {@link Filter} that represents an ordered List of Filters
  * which will be evaluated with a specified boolean operator {@link Operator#MUST_PASS_ALL} 
@@ -64,7 +64,7 @@
    * Constructor that takes a set of {@link Filter}s. The default operator
    * MUST_PASS_ALL is assumed.
    * 
-   * @param rowFilters
+   * @param rowFilters list of filters
    */
   public FilterList(final List<Filter> rowFilters) {
     this.filters = rowFilters;
@@ -111,7 +111,7 @@
   /**
    * Add a filter.
    * 
-   * @param filter
+   * @param filter another filter
    */
   public void addFilter(Filter filter) {
     this.filters.add(filter);
@@ -178,7 +178,7 @@
           return ReturnCode.INCLUDE;
         case NEXT_ROW:
         case SKIP:
-          continue;
+          // continue;
         }
       }
     }

Modified: hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyOnlyFilter.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyOnlyFilter.java?rev=920548&r1=920547&r2=920548&view=diff
==============================================================================
--- hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyOnlyFilter.java (original)
+++ hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyOnlyFilter.java Mon Mar  8 22:25:06 2010
@@ -1,5 +1,5 @@
 /*
- * Copyright 2009 The Apache Software Foundation
+ * Copyright 2010 The Apache Software Foundation
  *
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file

Modified: hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/filter/InclusiveStopFilter.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/filter/InclusiveStopFilter.java?rev=920548&r1=920547&r2=920548&view=diff
==============================================================================
--- hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/filter/InclusiveStopFilter.java (original)
+++ hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/filter/InclusiveStopFilter.java Mon Mar  8 22:25:06 2010
@@ -1,5 +1,5 @@
 /*
- * Copyright 2009 The Apache Software Foundation
+ * Copyright 2010 The Apache Software Foundation
  *
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
@@ -23,9 +23,9 @@
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.util.Bytes;
 
+import java.io.DataInput;
 import java.io.DataOutput;
 import java.io.IOException;
-import java.io.DataInput;
 
 /**
  * A Filter that stops after the given row.  There is no "RowStopFilter" because
@@ -55,6 +55,7 @@
 
   public boolean filterRowKey(byte[] buffer, int offset, int length) {
     if (buffer == null) {
+      //noinspection RedundantIfStatement
       if (this.stopRowKey == null) {
         return true; //filter...
       }

Modified: hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/filter/InvalidRowFilterException.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/filter/InvalidRowFilterException.java?rev=920548&r1=920547&r2=920548&view=diff
==============================================================================
--- hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/filter/InvalidRowFilterException.java (original)
+++ hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/filter/InvalidRowFilterException.java Mon Mar  8 22:25:06 2010
@@ -1,5 +1,5 @@
 /**
- * Copyright 2007 The Apache Software Foundation
+ * Copyright 2010 The Apache Software Foundation
  *
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file

Modified: hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/filter/PageFilter.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/filter/PageFilter.java?rev=920548&r1=920547&r2=920548&view=diff
==============================================================================
--- hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/filter/PageFilter.java (original)
+++ hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/filter/PageFilter.java Mon Mar  8 22:25:06 2010
@@ -1,5 +1,5 @@
 /**
- * Copyright 2009 The Apache Software Foundation
+ * Copyright 2010 The Apache Software Foundation
  *
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
@@ -19,12 +19,12 @@
  */
 package org.apache.hadoop.hbase.filter;
 
+import org.apache.hadoop.hbase.KeyValue;
+
 import java.io.DataInput;
 import java.io.DataOutput;
 import java.io.IOException;
 
-import org.apache.hadoop.hbase.KeyValue;
-
 /**
  * Implementation of Filter interface that limits results to a specific page
  * size. It terminates scanning once the number of filter-passed rows is >

Modified: hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/filter/PrefixFilter.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/filter/PrefixFilter.java?rev=920548&r1=920547&r2=920548&view=diff
==============================================================================
--- hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/filter/PrefixFilter.java (original)
+++ hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/filter/PrefixFilter.java Mon Mar  8 22:25:06 2010
@@ -1,5 +1,5 @@
 /*
- * Copyright 2009 The Apache Software Foundation
+ * Copyright 2010 The Apache Software Foundation
  *
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file

Modified: hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/filter/QualifierFilter.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/filter/QualifierFilter.java?rev=920548&r1=920547&r2=920548&view=diff
==============================================================================
--- hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/filter/QualifierFilter.java (original)
+++ hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/filter/QualifierFilter.java Mon Mar  8 22:25:06 2010
@@ -1,5 +1,5 @@
 /**
- * Copyright 2009 The Apache Software Foundation
+ * Copyright 2010 The Apache Software Foundation
  *
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file

Modified: hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/filter/RegexStringComparator.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/filter/RegexStringComparator.java?rev=920548&r1=920547&r2=920548&view=diff
==============================================================================
--- hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/filter/RegexStringComparator.java (original)
+++ hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/filter/RegexStringComparator.java Mon Mar  8 22:25:06 2010
@@ -1,5 +1,5 @@
 /**
- * Copyright 2008 The Apache Software Foundation
+ * Copyright 2010 The Apache Software Foundation
  *
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
@@ -19,13 +19,13 @@
  */
 package org.apache.hadoop.hbase.filter;
 
+import org.apache.hadoop.hbase.util.Bytes;
+
 import java.io.DataInput;
 import java.io.DataOutput;
 import java.io.IOException;
 import java.util.regex.Pattern;
 
-import org.apache.hadoop.hbase.util.Bytes;
-
 /**
  * This comparator is for use with {@link CompareFilter} implementations, such 
  * as {@link RowFilter}, {@link QualifierFilter}, and {@link ValueFilter}, for 

Modified: hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/filter/RowFilter.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/filter/RowFilter.java?rev=920548&r1=920547&r2=920548&view=diff
==============================================================================
--- hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/filter/RowFilter.java (original)
+++ hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/filter/RowFilter.java Mon Mar  8 22:25:06 2010
@@ -1,5 +1,5 @@
 /**
- * Copyright 2009 The Apache Software Foundation
+ * Copyright 2010 The Apache Software Foundation
  *
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file

Modified: hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueExcludeFilter.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueExcludeFilter.java?rev=920548&r1=920547&r2=920548&view=diff
==============================================================================
--- hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueExcludeFilter.java (original)
+++ hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueExcludeFilter.java Mon Mar  8 22:25:06 2010
@@ -1,5 +1,5 @@
 /**
- * Copyright 2009 The Apache Software Foundation
+ * Copyright 2010 The Apache Software Foundation
  *
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file

Modified: hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.java?rev=920548&r1=920547&r2=920548&view=diff
==============================================================================
--- hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.java (original)
+++ hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.java Mon Mar  8 22:25:06 2010
@@ -1,5 +1,5 @@
 /**
- * Copyright 2009 The Apache Software Foundation
+ * Copyright 2010 The Apache Software Foundation
  *
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
@@ -20,11 +20,6 @@
 
 package org.apache.hadoop.hbase.filter;
 
-import java.io.DataInput;
-import java.io.DataOutput;
-import java.io.IOException;
-import java.util.Arrays;
-
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hbase.KeyValue;
@@ -33,6 +28,11 @@
 import org.apache.hadoop.hbase.io.HbaseObjectWritable;
 import org.apache.hadoop.hbase.util.Bytes;
 
+import java.io.DataInput;
+import java.io.DataOutput;
+import java.io.IOException;
+import java.util.Arrays;
+
 /**
  * This filter is used to filter cells based on value. It takes a {@link CompareFilter.CompareOp} 
  * operator (equal, greater, not equal, etc), and either a byte [] value or 
@@ -228,6 +228,7 @@
    * If true, the entire row will be skipped if the column is not found.
    * <p>
    * If false, the row will pass if the column is not found.  This is default.
+   * @param filterIfMissing flag
    */
   public void setFilterIfMissing(boolean filterIfMissing) {
     this.filterIfMissing = filterIfMissing;
@@ -238,6 +239,7 @@
    * If true, the row will be returned if only the latest version of the column
    * value matches. If false, the row will be returned if any version of the
    * column value matches. The default is true.
+   * @return return value
    */
   public boolean getLatestVersionOnly() {
     return latestVersionOnly;
@@ -248,6 +250,7 @@
    * If true, the row will be returned if only the latest version of the column
    * value matches. If false, the row will be returned if any version of the
    * column value matches. The default is true.
+   * @param latestVersionOnly flag
    */
   public void setLatestVersionOnly(boolean latestVersionOnly) {
     this.latestVersionOnly = latestVersionOnly;

Modified: hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/filter/SkipFilter.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/filter/SkipFilter.java?rev=920548&r1=920547&r2=920548&view=diff
==============================================================================
--- hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/filter/SkipFilter.java (original)
+++ hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/filter/SkipFilter.java Mon Mar  8 22:25:06 2010
@@ -1,5 +1,5 @@
 /*
- * Copyright 2009 The Apache Software Foundation
+ * Copyright 2010 The Apache Software Foundation
  *
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
@@ -22,9 +22,9 @@
 
 import org.apache.hadoop.hbase.KeyValue;
 
+import java.io.DataInput;
 import java.io.DataOutput;
 import java.io.IOException;
-import java.io.DataInput;
 
 /**
  * A wrapper filter that filters an entire row if any of the KeyValue checks do 

Modified: hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/filter/SubstringComparator.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/filter/SubstringComparator.java?rev=920548&r1=920547&r2=920548&view=diff
==============================================================================
--- hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/filter/SubstringComparator.java (original)
+++ hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/filter/SubstringComparator.java Mon Mar  8 22:25:06 2010
@@ -1,5 +1,5 @@
 /**
- * Copyright 2008 The Apache Software Foundation
+ * Copyright 2010 The Apache Software Foundation
  *
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
@@ -19,12 +19,12 @@
  */
 package org.apache.hadoop.hbase.filter;
 
+import org.apache.hadoop.hbase.util.Bytes;
+
 import java.io.DataInput;
 import java.io.DataOutput;
 import java.io.IOException;
 
-import org.apache.hadoop.hbase.util.Bytes;
-
 /**
  * This comparator is for use with ColumnValueFilter, for filtering based on
  * the value of a given column. Use it to test if a given substring appears

Modified: hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/filter/ValueFilter.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/filter/ValueFilter.java?rev=920548&r1=920547&r2=920548&view=diff
==============================================================================
--- hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/filter/ValueFilter.java (original)
+++ hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/filter/ValueFilter.java Mon Mar  8 22:25:06 2010
@@ -1,5 +1,5 @@
 /**
- * Copyright 2009 The Apache Software Foundation
+ * Copyright 2010 The Apache Software Foundation
  *
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file

Modified: hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/filter/WhileMatchFilter.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/filter/WhileMatchFilter.java?rev=920548&r1=920547&r2=920548&view=diff
==============================================================================
--- hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/filter/WhileMatchFilter.java (original)
+++ hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/filter/WhileMatchFilter.java Mon Mar  8 22:25:06 2010
@@ -1,5 +1,5 @@
 /*
- * Copyright 2009 The Apache Software Foundation
+ * Copyright 2010 The Apache Software Foundation
  *
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
@@ -22,9 +22,9 @@
 
 import org.apache.hadoop.hbase.KeyValue;
 
+import java.io.DataInput;
 import java.io.DataOutput;
 import java.io.IOException;
-import java.io.DataInput;
 
 /**
  * A wrapper filter that returns true from {@link #filterAllRemaining()} as soon

Modified: hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/filter/WritableByteArrayComparable.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/filter/WritableByteArrayComparable.java?rev=920548&r1=920547&r2=920548&view=diff
==============================================================================
--- hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/filter/WritableByteArrayComparable.java (original)
+++ hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/filter/WritableByteArrayComparable.java Mon Mar  8 22:25:06 2010
@@ -1,5 +1,5 @@
 /**
- * Copyright 2008 The Apache Software Foundation
+ * Copyright 2010 The Apache Software Foundation
  *
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
@@ -19,13 +19,13 @@
  */
 package org.apache.hadoop.hbase.filter;
 
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.io.Writable;
+
 import java.io.DataInput;
 import java.io.DataOutput;
 import java.io.IOException;
 
-import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.hadoop.io.Writable;
-
 /** Base class, combines Comparable<byte []> and Writable. */
 public abstract class WritableByteArrayComparable implements Writable, Comparable<byte[]> {
 

Modified: hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/filter/package-info.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/filter/package-info.java?rev=920548&r1=920547&r2=920548&view=diff
==============================================================================
--- hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/filter/package-info.java (original)
+++ hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/filter/package-info.java Mon Mar  8 22:25:06 2010
@@ -1,5 +1,5 @@
 /*
- * Copyright 2008 The Apache Software Foundation
+ * Copyright 2010 The Apache Software Foundation
  *
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
@@ -17,7 +17,9 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-/**Provides row-level filters applied to HRegion scan results during calls to
+
+/**
+ * Provides row-level filters applied to HRegion scan results during calls to
  * {@link org.apache.hadoop.hbase.client.ResultScanner#next()}. 
 
 <p>

Modified: hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/ipc/HBaseClient.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/ipc/HBaseClient.java?rev=920548&r1=920547&r2=920548&view=diff
==============================================================================
--- hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/ipc/HBaseClient.java (original)
+++ hadoop/hbase/trunk/core/src/main/java/org/apache/hadoop/hbase/ipc/HBaseClient.java Mon Mar  8 22:25:06 2010
@@ -1,4 +1,6 @@
 /**
+ * Copyright 2010 The Apache Software Foundation
+ *
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -18,41 +20,38 @@
 
 package org.apache.hadoop.hbase.ipc;
 
-import java.net.Socket;
-import java.net.InetSocketAddress;
-import java.net.SocketTimeoutException;
-import java.net.UnknownHostException;
-import java.net.ConnectException;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.io.DataOutputBuffer;
+import org.apache.hadoop.io.IOUtils;
+import org.apache.hadoop.io.ObjectWritable;
+import org.apache.hadoop.io.Writable;
+import org.apache.hadoop.io.WritableUtils;
+import org.apache.hadoop.ipc.RemoteException;
+import org.apache.hadoop.net.NetUtils;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.util.ReflectionUtils;
 
-import java.io.IOException;
-import java.io.DataInputStream;
-import java.io.DataOutputStream;
+import javax.net.SocketFactory;
 import java.io.BufferedInputStream;
 import java.io.BufferedOutputStream;
+import java.io.DataInputStream;
+import java.io.DataOutputStream;
 import java.io.FilterInputStream;
+import java.io.IOException;
 import java.io.InputStream;
-
+import java.net.ConnectException;
+import java.net.InetSocketAddress;
+import java.net.Socket;
+import java.net.SocketTimeoutException;
+import java.net.UnknownHostException;
 import java.util.Hashtable;
 import java.util.Iterator;
 import java.util.Map.Entry;
 import java.util.concurrent.atomic.AtomicBoolean;
 import java.util.concurrent.atomic.AtomicLong;
 
-import javax.net.SocketFactory;
-
-import org.apache.commons.logging.*;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.io.IOUtils;
-import org.apache.hadoop.io.ObjectWritable;
-import org.apache.hadoop.io.Writable;
-import org.apache.hadoop.io.WritableUtils;
-import org.apache.hadoop.io.DataOutputBuffer;
-import org.apache.hadoop.ipc.RemoteException;
-import org.apache.hadoop.net.NetUtils;
-import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.hadoop.util.ReflectionUtils;
-
 /** A client for an IPC service.  IPC calls take a single {@link Writable} as a
  * parameter, and return a {@link Writable} as their value.  A service runs on
  * a port and is defined by a parameter class and a value class.
@@ -64,24 +63,24 @@
  */
 public class HBaseClient {
   
-  public static final Log LOG =
+  private static final Log LOG =
     LogFactory.getLog("org.apache.hadoop.ipc.HBaseClient");
-  protected Hashtable<ConnectionId, Connection> connections =
+  protected final Hashtable<ConnectionId, Connection> connections =
     new Hashtable<ConnectionId, Connection>();
 
-  protected Class<? extends Writable> valueClass;   // class of call values
+  protected final Class<? extends Writable> valueClass;   // class of call values
   protected int counter;                            // counter for call ids
-  protected AtomicBoolean running = new AtomicBoolean(true); // if client runs
+  protected final AtomicBoolean running = new AtomicBoolean(true); // if client runs
   final protected Configuration conf;
-  final protected int maxIdleTime; //connections will be culled if it was idle for 
-                           //maxIdleTime msecs
+  final protected int maxIdleTime; // connections will be culled if it was idle for
+                           // maxIdleTime microsecs
   final protected int maxRetries; //the max. no. of retries for socket connections
   final protected long failureSleep; // Time to sleep before retry on failure.
-  protected boolean tcpNoDelay; // if T then disable Nagle's Algorithm
-  protected boolean tcpKeepAlive; // if T then use keepalives
-  protected int pingInterval; // how often sends ping to the server in msecs
+  protected final boolean tcpNoDelay; // if T then disable Nagle's Algorithm
+  protected final boolean tcpKeepAlive; // if T then use keepalives
+  protected final int pingInterval; // how often sends ping to the server in msecs
 
-  protected SocketFactory socketFactory;           // how to create sockets
+  protected final SocketFactory socketFactory;           // how to create sockets
   private int refCount = 1;
   
   final private static String PING_INTERVAL_NAME = "ipc.ping.interval";
@@ -94,7 +93,8 @@
    * @param conf Configuration
    * @param pingInterval the ping interval
    */
-  final public static void setPingInterval(Configuration conf, int pingInterval) {
+  @SuppressWarnings({"UnusedDeclaration"})
+  public static void setPingInterval(Configuration conf, int pingInterval) {
     conf.setInt(PING_INTERVAL_NAME, pingInterval);
   }
 
@@ -105,7 +105,7 @@
    * @param conf Configuration
    * @return the ping interval
    */
-  final static int getPingInterval(Configuration conf) {
+  static int getPingInterval(Configuration conf) {
     return conf.getInt(PING_INTERVAL_NAME, DEFAULT_PING_INTERVAL);
   }
   
@@ -136,8 +136,8 @@
 
   /** A call waiting for a value. */
   private class Call {
-    int id;                                       // call id
-    Writable param;                               // parameter
+    final int id;                                       // call id
+    final Writable param;                               // parameter
     Writable value;                               // value, null if error
     IOException error;                            // exception, null if value
     boolean done;                                 // true when call is done
@@ -187,9 +187,9 @@
     private DataOutputStream out;
     
     // currently active calls
-    private Hashtable<Integer, Call> calls = new Hashtable<Integer, Call>();
-    private AtomicLong lastActivity = new AtomicLong();// last I/O activity time
-    protected AtomicBoolean shouldCloseConnection = new AtomicBoolean();  // indicate if the connection is closed
+    private final Hashtable<Integer, Call> calls = new Hashtable<Integer, Call>();
+    private final AtomicLong lastActivity = new AtomicLong();// last I/O activity time
+    protected final AtomicBoolean shouldCloseConnection = new AtomicBoolean();  // indicate if the connection is closed
     private IOException closeException; // close reason
 
     public Connection(InetSocketAddress address) throws IOException {
@@ -287,6 +287,7 @@
     /** Connect to the server and set up the I/O streams. It then sends
      * a header to the server and starts
      * the connection thread that waits for responses.
+     * @throws java.io.IOException e
      */
     protected synchronized void setupIOstreams() throws IOException {
       if (socket != null || shouldCloseConnection.get()) {
@@ -395,6 +396,7 @@
      * 
      * Return true if it is time to read a response; false otherwise.
      */
+    @SuppressWarnings({"ThrowableInstanceNeverThrown"})
     private synchronized boolean waitForWork() {
       if (calls.isEmpty() && !shouldCloseConnection.get()  && running.get())  {
         long timeout = maxIdleTime-
@@ -402,7 +404,7 @@
         if (timeout>0) {
           try {
             wait(timeout);
-          } catch (InterruptedException e) {}
+          } catch (InterruptedException ignored) {}
         }
       }
       
@@ -431,7 +433,8 @@
       long curTime = System.currentTimeMillis();
       if ( curTime - lastActivity.get() >= pingInterval) {
         lastActivity.set(curTime);
-        synchronized (out) {
+        //noinspection SynchronizeOnNonFinalField
+        synchronized (this.out) {
           out.writeInt(PING_CALL_ID);
           out.flush();
         }
@@ -455,18 +458,18 @@
             + connections.size());
     }
 
-    /** Initiates a call by sending the parameter to the remote server.
+    /* Initiates a call by sending the parameter to the remote server.
      * Note: this is not called from the Connection thread, but by other
      * threads.
-     * @param call
      */
-    public void sendParam(Call call) {
+    protected void sendParam(Call call) {
       if (shouldCloseConnection.get()) {
         return;
       }
 
       DataOutputBuffer d=null;
       try {
+        //noinspection SynchronizeOnNonFinalField
         synchronized (this.out) { // FindBugs IS2_INCONSISTENT_SYNC
           if (LOG.isDebugEnabled())
             LOG.debug(getName() + " sending #" + call.id);
@@ -510,6 +513,7 @@
 
         boolean isError = in.readBoolean();     // read if error
         if (isError) {
+          //noinspection ThrowableInstanceNeverThrown
           call.setException(new RemoteException( WritableUtils.readString(in),
               WritableUtils.readString(in)));
         } else {
@@ -585,8 +589,8 @@
 
   /** Call implementation used for parallel calls. */
   private class ParallelCall extends Call {
-    private ParallelResults results;
-    protected int index;
+    private final ParallelResults results;
+    protected final int index;
     
     public ParallelCall(Writable param, ParallelResults results, int index) {
       super(param);
@@ -603,7 +607,7 @@
 
   /** Result collector for parallel calls. */
   private static class ParallelResults {
-    protected Writable[] values;
+    protected final Writable[] values;
     protected int size;
     protected int count;
 
@@ -612,11 +616,10 @@
       this.size = size;
     }
 
-    /**
+    /*
      * Collect a result.
-     * @param call
      */
-    public synchronized void callComplete(ParallelCall call) {
+    synchronized void callComplete(ParallelCall call) {
       // FindBugs IS2_INCONSISTENT_SYNC
       values[call.index] = call.value;            // store the value
       count++;                                    // count it
@@ -628,9 +631,9 @@
   /**
    * Construct an IPC client whose values are of the given {@link Writable}
    * class.
-   * @param valueClass
-   * @param conf
-   * @param factory
+   * @param valueClass value class
+   * @param conf configuration
+   * @param factory socket factory
    */
   public HBaseClient(Class<? extends Writable> valueClass, Configuration conf, 
       SocketFactory factory) {
@@ -651,8 +654,8 @@
 
   /**
    * Construct an IPC client with the default SocketFactory
-   * @param valueClass
-   * @param conf
+   * @param valueClass value class
+   * @param conf configuration
    */
   public HBaseClient(Class<? extends Writable> valueClass, Configuration conf) {
     this(valueClass, conf, NetUtils.getDefaultSocketFactory(conf));
@@ -688,7 +691,7 @@
     while (!connections.isEmpty()) {
       try {
         Thread.sleep(100);
-      } catch (InterruptedException e) {
+      } catch (InterruptedException ignored) {
       }
     }
   }
@@ -696,10 +699,10 @@
   /** Make a call, passing <code>param</code>, to the IPC server running at
    * <code>address</code>, returning the value.  Throws exceptions if there are
    * network problems or if the remote code threw an exception. 
-   * @param param 
-   * @param address 
-   * @return Writable 
-   * @throws IOException
+   * @param param writable parameter
+   * @param address network address
+   * @return Writable
+   * @throws IOException e
    */
   public Writable call(Writable param, InetSocketAddress address)
   throws IOException {
@@ -712,6 +715,7 @@
     Call call = new Call(param);
     Connection connection = getConnection(addr, ticket, call);
     connection.sendParam(call);                 // send the parameter
+    //noinspection SynchronizationOnLocalVariableOrMethodParameter
     synchronized (call) {
       while (!call.done) {
         try {
@@ -743,6 +747,7 @@
    * @param exception the relevant exception
    * @return an exception to throw
    */
+  @SuppressWarnings({"ThrowableInstanceNeverThrown"})
   private IOException wrapException(InetSocketAddress addr,
                                          IOException exception) {
     if (exception instanceof ConnectException) {
@@ -766,16 +771,18 @@
    * corresponding address.  When all values are available, or have timed out
    * or errored, the collected results are returned in an array.  The array
    * contains nulls for calls that timed out or errored.  
-   * @param params 
-   * @param addresses 
+   * @param params writable parameters
+   * @param addresses socket addresses
    * @return  Writable[]
-   * @throws IOException
+   * @throws IOException e
    */
   public Writable[] call(Writable[] params, InetSocketAddress[] addresses)
     throws IOException {
     if (addresses.length == 0) return new Writable[0];
 
     ParallelResults results = new ParallelResults(params.length);
+    // TODO this synchronization block doesnt make any sense, we should possibly fix it
+    //noinspection SynchronizationOnLocalVariableOrMethodParameter
     synchronized (results) {
       for (int i = 0; i < params.length; i++) {
         ParallelCall call = new ParallelCall(params[i], results, i);
@@ -792,14 +799,14 @@
       while (results.count != results.size) {
         try {
           results.wait();                    // wait for all results
-        } catch (InterruptedException e) {}
+        } catch (InterruptedException ignored) {}
       }
 
       return results.values;
     }
   }
 
-  /** Get a connection from the pool, or create a new one and add it to the
+  /* Get a connection from the pool, or create a new one and add it to the
    * pool.  Connections to a given host/port are reused. */
   private Connection getConnection(InetSocketAddress addr, 
                                    UserGroupInformation ticket,
@@ -838,8 +845,8 @@
    * to servers are uniquely identified by <remoteAddress, ticket>
    */
   private static class ConnectionId {
-    InetSocketAddress address;
-    UserGroupInformation ticket;
+    final InetSocketAddress address;
+    final UserGroupInformation ticket;
     
     ConnectionId(InetSocketAddress address, UserGroupInformation ticket) {
       this.address = address;



Mime
View raw message