hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From st...@apache.org
Subject svn commit: r584844 - in /lucene/hadoop/trunk/src/contrib/hbase: CHANGES.txt src/java/org/apache/hadoop/hbase/HTable.java
Date Mon, 15 Oct 2007 18:42:28 GMT
Author: stack
Date: Mon Oct 15 11:42:28 2007
New Revision: 584844

URL: http://svn.apache.org/viewvc?rev=584844&view=rev
Log:
HADOOP-2401 Add convenience put method that takes writable

Modified:
    lucene/hadoop/trunk/src/contrib/hbase/CHANGES.txt
    lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HTable.java

Modified: lucene/hadoop/trunk/src/contrib/hbase/CHANGES.txt
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/CHANGES.txt?rev=584844&r1=584843&r2=584844&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/CHANGES.txt (original)
+++ lucene/hadoop/trunk/src/contrib/hbase/CHANGES.txt Mon Oct 15 11:42:28 2007
@@ -2,6 +2,19 @@
 
 
 Trunk (unreleased changes)
+  INCOMPATIBLE CHANGES
+
+  NEW FEATURES
+
+  OPTIMIZATIONS
+
+  BUG FIXES
+
+  IMPROVEMENTS
+    HADOOP-2401 Add convenience put method that takes writable
+                (Johan Oskarsson via Stack)
+
+Branch 0.15 (unreleased changes)
 
   INCOMPATIBLE CHANGES
     HADOOP-1931 Hbase scripts take --ARG=ARG_VALUE when should be like hadoop

Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HTable.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HTable.java?rev=584844&r1=584843&r2=584844&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HTable.java (original)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HTable.java Mon
Oct 15 11:42:28 2007
@@ -35,6 +35,7 @@
 import org.apache.hadoop.hbase.filter.RowFilterInterface;
 import org.apache.hadoop.hbase.io.BatchUpdate;
 import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
+import org.apache.hadoop.hbase.util.Writables;
 import org.apache.hadoop.io.MapWritable;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.Writable;
@@ -543,6 +544,20 @@
     }
     updateInProgress(true);
     batch.get().put(lockid, column, val);
+  }
+  
+  /** 
+   * Change a value for the specified column.
+   * Runs {@link #abort(long)} if exception thrown.
+   *
+   * @param lockid lock id returned from startUpdate
+   * @param column column whose value is being set
+   * @param val new value for column
+   * @throws IOException throws this if the writable can't be
+   * converted into a byte array 
+   */
+  public void put(long lockid, Text column, Writable val) throws IOException {    
+    put(lockid, column, Writables.getBytes(val));
   }
   
   /** 



Mime
View raw message