hbase-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From st...@apache.org
Subject svn commit: r787318 - in /hadoop/hbase/trunk: ./ src/java/org/apache/hadoop/hbase/client/ src/java/org/apache/hadoop/hbase/filter/
Date Mon, 22 Jun 2009 17:22:52 GMT
Author: stack
Date: Mon Jun 22 17:22:52 2009
New Revision: 787318

URL: http://svn.apache.org/viewvc?rev=787318&view=rev
Log:
HBASE-1561 HTable Mismatch between javadoc and what it actually does

Modified:
    hadoop/hbase/trunk/CHANGES.txt
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/Delete.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/Get.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/HTable.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/filter/RowWhileMatchFilter.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/filter/package-info.java

Modified: hadoop/hbase/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/CHANGES.txt?rev=787318&r1=787317&r2=787318&view=diff
==============================================================================
--- hadoop/hbase/trunk/CHANGES.txt (original)
+++ hadoop/hbase/trunk/CHANGES.txt Mon Jun 22 17:22:52 2009
@@ -211,6 +211,7 @@
    HBASE-1545  atomicIncrements creating new values with Long.MAX_VALUE
    HBASE-1547  atomicIncrement doesnt increase hregion.memcacheSize
    HBASE-1553  ClassSize missing in trunk
+   HBASE-1561  HTable Mismatch between javadoc and what it actually does
 
   IMPROVEMENTS
    HBASE-1089  Add count of regions on filesystem to master UI; add percentage

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/Delete.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/Delete.java?rev=787318&r1=787317&r2=787318&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/Delete.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/Delete.java Mon Jun 22 17:22:52
2009
@@ -87,13 +87,16 @@
 
   /**
    * Create a Delete operation for the specified row and timestamp, using
-   * an optional row lock.
-   * <p>
+   * an optional row lock.<p>
+   * 
    * If no further operations are done, this will delete all columns in all
    * families of the specified row with a timestamp less than or equal to the 
-   * specified timestamp.
+   * specified timestamp.<p>
+   * 
+   * This timestamp is ONLY used for a delete row operation.  If specifying 
+   * families or columns, you must specify each timestamp individually.
    * @param row row key
-   * @param timestamp maximum version timestamp
+   * @param timestamp maximum version timestamp (only for delete row)
    * @param rowLock previously acquired row lock, or null
    */
   public Delete(byte [] row, long timestamp, RowLock rowLock) {
@@ -170,6 +173,18 @@
   }
   
   /**
+   * Delete all versions of the specified column, given in 
+   * <code>family:qualifier</code> notation, and with a timestamp less than
+   * or equal to the specified timestamp. 
+   * @param column colon-delimited family and qualifier
+   * @param timestamp maximum version timestamp 
+   */
+  public void deleteColumns(byte [] column, long timestamp) {
+    byte [][] parts = KeyValue.parseColumn(column);
+    this.deleteColumns(parts[0], parts[1], timestamp);
+  }
+  
+  /**
    * Delete the latest version of the specified column.
    * This is an expensive call in that on the server-side, it first does a
    * get to find the latest versions timestamp.  Then it adds a delete using

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/Get.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/Get.java?rev=787318&r1=787317&r2=787318&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/Get.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/Get.java Mon Jun 22 17:22:52
2009
@@ -57,7 +57,7 @@
  * To limit the number of versions of each column to be returned, execute
  * {@link #setMaxVersions(int) setMaxVersions}.
  * <p>
- * To add a filter, execute {@link #setFilter(RowFilterInterface) setFilter}.
+ * To add a filter, execute {@link #setFilter(Filter) setFilter}.
  */
 public class Get implements Writable {
   private byte [] row = null;
@@ -402,4 +402,4 @@
       }
     }
   }
-}
\ No newline at end of file
+}

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/HTable.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/HTable.java?rev=787318&r1=787317&r2=787318&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/HTable.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/HTable.java Mon Jun 22 17:22:52
2009
@@ -1411,7 +1411,9 @@
       final RowLock rl)
   throws IOException {
     Delete d = new Delete(row, ts, rl);
-    d.deleteColumn(column);
+    if(column != null) {
+      d.deleteColumns(column, ts);
+    }
     delete(d);
   }
   
@@ -1544,9 +1546,8 @@
   public void deleteFamily(final byte [] row, final byte [] family, 
     final long timestamp, final RowLock rl)
   throws IOException {
-    // Is this right?  LATEST_TS? St.Ack
     Delete d = new Delete(row, HConstants.LATEST_TIMESTAMP, rl);
-    d.deleteFamily(family);
+    d.deleteFamily(stripColon(family), timestamp);
     delete(d);
   }
   
@@ -2071,4 +2072,14 @@
       };
     }
   }
+  
+  private static byte [] stripColon(final byte [] n) {
+    byte col = n[n.length-1];
+    if (col == ':') {
+      byte [] res = new byte[n.length-1];
+      System.arraycopy(n, 0, res, 0, n.length-1);
+      return res;
+    }
+    return n;
+  }
 }

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/filter/RowWhileMatchFilter.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/filter/RowWhileMatchFilter.java?rev=787318&r1=787317&r2=787318&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/filter/RowWhileMatchFilter.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/filter/RowWhileMatchFilter.java Mon
Jun 22 17:22:52 2009
@@ -93,4 +93,4 @@
       throw new RuntimeException("Failed deserialize.", e);
     }
   }
-}
\ No newline at end of file
+}

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/filter/package-info.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/filter/package-info.java?rev=787318&r1=787317&r2=787318&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/filter/package-info.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/filter/package-info.java Mon Jun 22
17:22:52 2009
@@ -20,11 +20,11 @@
 /**Provides row-level filters applied to HRegion scan results during calls to
  * {@link org.apache.hadoop.hbase.client.ResultScanner#next()}. 
 
-<p>Since HBase 0.20.0, {@link Filter} is the new Interface used filtering.
-It replaces the deprecated {@link RowFilterInterface}.
+<p>Since HBase 0.20.0, {@link org.apache.hadoop.hbase.filter.Filter} is the new Interface
used filtering.
+It replaces the deprecated {@link org.apache.hadoop.hbase.filter.RowFilterInterface}.
 Filters run the extent of a table unless you wrap your filter in a
-{@link RowWhileMatchFilter}.  The latter returns as soon as the filter
-stops matching.
+{@link org.apache.hadoop.hbase.filter.RowWhileMatchFilter}.
+The latter returns as soon as the filter stops matching.
 </p>
 */
 package org.apache.hadoop.hbase.filter;



Mime
View raw message