hbase-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From st...@apache.org
Subject svn commit: r656868 [3/10] - in /hadoop/hbase/trunk: ./ src/java/org/apache/hadoop/hbase/ src/java/org/apache/hadoop/hbase/client/ src/java/org/apache/hadoop/hbase/filter/ src/java/org/apache/hadoop/hbase/hql/ src/java/org/apache/hadoop/hbase/io/ src/j...
Date Thu, 15 May 2008 22:10:50 GMT
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/hql/AlterCommand.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/hql/AlterCommand.java?rev=656868&r1=656867&r2=656868&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/hql/AlterCommand.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/hql/AlterCommand.java Thu May 15 15:10:47 2008
@@ -31,6 +31,7 @@
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.client.HConnection;
 import org.apache.hadoop.hbase.client.HConnectionManager;
+import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.io.Text;
 
 import org.apache.hadoop.hbase.BloomFilterDescriptor;
@@ -57,7 +58,7 @@
   public ReturnMsg execute(HBaseConfiguration conf) {
     try {
       HConnection conn = HConnectionManager.getConnection(conf);
-      if (!conn.tableExists(new Text(this.tableName))) {
+      if (!conn.tableExists(Bytes.toBytes(this.tableName))) {
         return new ReturnMsg(0, "'" + this.tableName + "'" + TABLE_NOT_FOUND);
       }
 
@@ -95,7 +96,7 @@
 
           // get the table descriptor so we can get the old column descriptor
           HTableDescriptor tDesc = getTableDescByName(admin, tableName);
-          HColumnDescriptor oldColumnDesc = tDesc.families().get(columnName);
+          HColumnDescriptor oldColumnDesc = tDesc.getFamily(columnName.getBytes());
 
           // combine the options specified in the shell with the options
           // from the exiting descriptor to produce the new descriptor
@@ -168,11 +169,11 @@
     return CommandType.DDL;
   }
 
-  private HTableDescriptor getTableDescByName(HBaseAdmin admin, String tableName)
+  private HTableDescriptor getTableDescByName(HBaseAdmin admin, String tn)
       throws IOException {
     HTableDescriptor[] tables = admin.listTables();
     for (HTableDescriptor tDesc : tables) {
-      if (tDesc.getName().toString().equals(tableName)) {
+      if (tDesc.getName().toString().equals(tn)) {
         return tDesc;
       }
     }
@@ -184,7 +185,7 @@
    * instance of HColumnDescriptor representing the column spec, with empty
    * values drawn from the original as defaults
    */
-  protected HColumnDescriptor getColumnDescriptor(String column,
+  protected HColumnDescriptor getColumnDescriptor(String c,
       Map<String, Object> columnSpec, HColumnDescriptor original)
       throws IllegalArgumentException {
     initOptions(original);
@@ -230,9 +231,10 @@
       }
     }
 
-    column = appendDelimiter(column);
+    c = appendDelimiter(c);
 
-    HColumnDescriptor columnDesc = new HColumnDescriptor(new Text(column),
+    HColumnDescriptor columnDesc =
+      new HColumnDescriptor(Bytes.toBytes(c),
         maxVersions, compression, inMemory, blockCacheEnabled,
         maxLength, timeToLive, bloomFilterDesc);
 

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/hql/CreateCommand.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/hql/CreateCommand.java?rev=656868&r1=656867&r2=656868&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/hql/CreateCommand.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/hql/CreateCommand.java Thu May 15 15:10:47 2008
@@ -24,12 +24,10 @@
 import java.util.Map;
 import java.util.Set;
 
-import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HColumnDescriptor;
-import org.apache.hadoop.hbase.client.HConnection;
-import org.apache.hadoop.hbase.client.HConnectionManager;
 import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.hadoop.io.Text;
 
 /**
@@ -50,7 +48,7 @@
         return new ReturnMsg(0, "'" + tableName + "' table already exist.");
       }
 
-      HTableDescriptor tableDesc = new HTableDescriptor(tableName.toString());
+      HTableDescriptor tableDesc = new HTableDescriptor(tableName.getBytes());
       HColumnDescriptor columnDesc = null;
       Set<String> columns = columnSpecMap.keySet();
       for (String column : columns) {

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/hql/DeleteCommand.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/hql/DeleteCommand.java?rev=656868&r1=656867&r2=656868&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/hql/DeleteCommand.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/hql/DeleteCommand.java Thu May 15 15:10:47 2008
@@ -56,9 +56,9 @@
       HTable hTable = new HTable(conf, tableName);
 
       if (rowKey != null) {
-        BatchUpdate bu = new BatchUpdate(rowKey);
+        BatchUpdate bu = new BatchUpdate(rowKey.getBytes());
         for (Text column : getColumnList(admin, hTable)) {
-          bu.delete(new Text(column));
+          bu.delete(column.getBytes());
         }
         hTable.commit(bu);
       } else {

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/hql/DescCommand.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/hql/DescCommand.java?rev=656868&r1=656867&r2=656868&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/hql/DescCommand.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/hql/DescCommand.java Thu May 15 15:10:47 2008
@@ -59,8 +59,7 @@
       HColumnDescriptor[] columns = null;
       for (int i = 0; i < tables.length; i++) {
         if (tables[i].getName().equals(tableName)) {
-          columns = tables[i].getFamilies().values().toArray(
-              new HColumnDescriptor[] {});
+          columns = tables[i].getFamilies().toArray(new HColumnDescriptor[] {});
           break;
         }
       }

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/hql/InsertCommand.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/hql/InsertCommand.java?rev=656868&r1=656867&r2=656868&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/hql/InsertCommand.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/hql/InsertCommand.java Thu May 15 15:10:47 2008
@@ -61,8 +61,8 @@
       try {
         HTable table = new HTable(conf, tableName);
         BatchUpdate batchUpdate = timestamp == null ? 
-          new BatchUpdate(getRow()) 
-          : new BatchUpdate(getRow(), Long.parseLong(timestamp));
+          new BatchUpdate(getRow().getBytes()) 
+          : new BatchUpdate(getRow().getBytes(), Long.parseLong(timestamp));
 
         for (int i = 0; i < values.size(); i++) {
           Text column = null;
@@ -70,7 +70,7 @@
             column = getColumn(i);
           else
             column = new Text(getColumn(i) + ":");
-          batchUpdate.put(column, getValue(i));
+          batchUpdate.put(column.getBytes(), getValue(i));
         }
 
         table.commit(batchUpdate);

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/hql/SchemaModificationCommand.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/hql/SchemaModificationCommand.java?rev=656868&r1=656867&r2=656868&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/hql/SchemaModificationCommand.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/hql/SchemaModificationCommand.java Thu May 15 15:10:47 2008
@@ -110,7 +110,7 @@
 
     column = appendDelimiter(column);
 
-    HColumnDescriptor columnDesc = new HColumnDescriptor(new Text(column),
+    HColumnDescriptor columnDesc = new HColumnDescriptor(column.getBytes(),
         maxVersions, compression, inMemory, blockCacheEnabled,
         maxLength, timeToLive, bloomFilterDesc);
 

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/hql/SelectCommand.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/hql/SelectCommand.java?rev=656868&r1=656867&r2=656868&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/hql/SelectCommand.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/hql/SelectCommand.java Thu May 15 15:10:47 2008
@@ -26,24 +26,24 @@
 import java.util.Arrays;
 import java.util.List;
 import java.util.Map;
-import java.util.TreeMap;
 
 import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HConstants;
-import org.apache.hadoop.hbase.HStoreKey;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.Shell;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hbase.client.HTable;
+import org.apache.hadoop.hbase.client.Scanner;
 import org.apache.hadoop.hbase.filter.RowFilterInterface;
 import org.apache.hadoop.hbase.filter.StopRowFilter;
 import org.apache.hadoop.hbase.filter.WhileMatchRowFilter;
 import org.apache.hadoop.hbase.hql.generated.HQLParser;
-import org.apache.hadoop.hbase.util.Writables;
-import org.apache.hadoop.io.Text;
-import org.apache.hadoop.hbase.client.HTable;
-import org.apache.hadoop.hbase.client.HBaseAdmin;
-import org.apache.hadoop.hbase.client.Scanner;
 import org.apache.hadoop.hbase.io.Cell;
 import org.apache.hadoop.hbase.io.RowResult;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.hbase.util.Writables;
+import org.apache.hadoop.io.Text;
 
 /**
  * Selects values from tables.
@@ -106,8 +106,8 @@
   }
 
   private boolean isMetaTable() {
-    return (tableName.equals(HConstants.ROOT_TABLE_NAME) || tableName
-        .equals(HConstants.META_TABLE_NAME)) ? true : false;
+    return (tableName.equals(new Text(HConstants.ROOT_TABLE_NAME)) ||
+      tableName.equals(new Text(HConstants.META_TABLE_NAME))) ? true : false;
   }
 
   private int compoundWherePrint(HTable table, HBaseAdmin admin) {
@@ -118,14 +118,14 @@
         Cell[] result = null;
         ParsedColumns parsedColumns = getColumns(admin, false);
         boolean multiple = parsedColumns.isMultiple() || version > 1;
-        for (Text column : parsedColumns.getColumns()) {
+        for (byte [] column : parsedColumns.getColumns()) {
           if (count == 0) {
             formatter.header(multiple ? HEADER_COLUMN_CELL : null);
           }
           if (timestamp != 0) {
-            result = table.get(rowKey, column, timestamp, version);
+            result = table.get(rowKey.getBytes(), column, timestamp, version);
           } else {
-            result = table.get(rowKey, column, version);
+            result = table.get(rowKey.getBytes(), column, version);
           }
           for (int ii = 0; result != null && ii < result.length; ii++) {
             if (multiple) {
@@ -138,11 +138,11 @@
           }
         }
       } else {
-        for (Map.Entry<Text, Cell> e : table.getRow(rowKey).entrySet()) {
+        for (Map.Entry<byte [], Cell> e : table.getRow(rowKey).entrySet()) {
           if (count == 0) {
             formatter.header(isMultiple() ? HEADER_COLUMN_CELL : null);
           }
-          Text key = e.getKey();
+          byte [] key = e.getKey();
           String keyStr = key.toString();
           if (!columns.contains(ASTERISK) && !columns.contains(keyStr)) {
             continue;
@@ -167,28 +167,27 @@
     return 1;
   }
 
-  private String toString(final Text columnName, final byte[] cell)
+  private String toString(final byte [] columnName, final byte[] cell)
       throws IOException {
     String result = null;
-    if (columnName.equals(HConstants.COL_REGIONINFO)
-        || columnName.equals(HConstants.COL_SPLITA)
-        || columnName.equals(HConstants.COL_SPLITA)) {
+    if (Bytes.equals(columnName, HConstants.COL_REGIONINFO)
+        || Bytes.equals(columnName, HConstants.COL_SPLITA)
+        || Bytes.equals(columnName, HConstants.COL_SPLITB)) {
       result = Writables.getHRegionInfoOrNull(cell).toString();
-    } else if (columnName.equals(HConstants.COL_STARTCODE)) {
-      result = Long.toString(Writables.bytesToLong(cell));
+    } else if (Bytes.equals(columnName, HConstants.COL_STARTCODE)) {
+      result = Long.toString(Bytes.toLong(cell));
     } else {
-      result = Writables.bytesToString(cell);
+      result = Bytes.toString(cell);
     }
     return result;
   }
 
-  private String toString(final Text columnName, final Cell cell) 
+  private String toString(final byte [] columnName, final Cell cell) 
   throws IOException {
     if (cell == null) {
       return null;
-    } else {
-      return toString(columnName, cell.getValue());
     }
+    return toString(columnName, cell.getValue());
   }
 
   /**
@@ -196,19 +195,19 @@
    * could return more than one column.
    */
   class ParsedColumns {
-    private final List<Text> cols;
+    private final List<byte []> cols;
     private final boolean isMultiple;
 
-    ParsedColumns(final List<Text> columns) {
+    ParsedColumns(final List<byte []> columns) {
       this(columns, true);
     }
 
-    ParsedColumns(final List<Text> columns, final boolean isMultiple) {
+    ParsedColumns(final List<byte []> columns, final boolean isMultiple) {
       this.cols = columns;
       this.isMultiple = isMultiple;
     }
 
-    public List<Text> getColumns() {
+    public List<byte []> getColumns() {
       return this.cols;
     }
 
@@ -226,13 +225,14 @@
       if (timestamp == 0) {
         scan = table.getScanner(cols, rowKey);
       } else {
-        scan = table.getScanner(cols, rowKey, timestamp);
+        scan = table.getScanner(Bytes.toByteArrays(cols), rowKey.getBytes(),
+          timestamp);
       }
 
       if (this.stopRow.toString().length() > 0) {
         RowFilterInterface filter = new WhileMatchRowFilter(new StopRowFilter(
-            stopRow));
-        scan = table.getScanner(cols, rowKey, filter);
+            stopRow.getBytes()));
+        scan = table.getScanner(Bytes.toByteArrays(cols), rowKey.getBytes(), filter);
       }
 
       RowResult results = scan.next();
@@ -243,10 +243,10 @@
           formatter.header((parsedColumns.isMultiple()) ? HEADER : HEADER_ROW_CELL);
         }
 
-        Text r = results.getRow();
+        byte [] r = results.getRow();
 
         if (!countFunction) {
-          for (Text columnKey : results.keySet()) {
+          for (byte [] columnKey : results.keySet()) {
             String cellData = toString(columnKey, results.get(columnKey));
             if (parsedColumns.isMultiple()) {
               formatter.row(new String[] { r.toString(), columnKey.toString(),
@@ -287,23 +287,26 @@
     ParsedColumns result = null;
     try {
       if (columns.contains(ASTERISK)) {
-        if (tableName.equals(HConstants.ROOT_TABLE_NAME)
-            || tableName.equals(HConstants.META_TABLE_NAME)) {
+        if (tableName.equals(new Text(HConstants.ROOT_TABLE_NAME))
+            || tableName.equals(new Text(HConstants.META_TABLE_NAME))) {
           result = new ParsedColumns(Arrays.asList(HConstants.COLUMN_FAMILY_ARRAY));
         } else {
           HTableDescriptor[] tables = admin.listTables();
           for (int i = 0; i < tables.length; i++) {
-            if (tables[i].getName().equals(tableName)) {
-              result = new ParsedColumns(new ArrayList<Text>(tables[i].families()
-                  .keySet()));
+            if (tables[i].getNameAsString().equals(tableName.toString())) {
+              List<byte []> cols = new ArrayList<byte []>();
+              for (HColumnDescriptor h: tables[i].getFamilies()) {
+                cols.add(h.getName());
+              }
+              result = new ParsedColumns(cols);
               break;
             }
           }
         }
       } else {
-        List<Text> tmpList = new ArrayList<Text>();
+        List<byte []> tmpList = new ArrayList<byte []>();
         for (int i = 0; i < columns.size(); i++) {
-          Text column = null;
+          byte [] column = null;
           // Add '$' to column name if we are scanning. Scanners support
           // regex column names. Adding '$', the column becomes a
           // regex that does an explicit match on the supplied column name.
@@ -311,8 +314,8 @@
           // default behavior is to fetch all columns that have a matching
           // column family.
           column = (columns.get(i).contains(":")) ? new Text(columns.get(i)
-              + (scanning ? "$" : "")) : new Text(columns.get(i) + ":"
-              + (scanning ? "$" : ""));
+              + (scanning ? "$" : "")).getBytes() : new Text(columns.get(i) + ":"
+              + (scanning ? "$" : "")).getBytes();
           tmpList.add(column);
         }
         result = new ParsedColumns(tmpList, tmpList.size() > 1);

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/hql/TruncateCommand.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/hql/TruncateCommand.java?rev=656868&r1=656867&r2=656868&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/hql/TruncateCommand.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/hql/TruncateCommand.java Thu May 15 15:10:47 2008
@@ -51,8 +51,8 @@
       HTableDescriptor[] tables = admin.listTables();
       HColumnDescriptor[] columns = null;
       for (int i = 0; i < tables.length; i++) {
-        if (tables[i].getName().equals(tableName)) {
-          columns = tables[i].getFamilies().values().toArray(
+        if (tables[i].getNameAsString().equals(tableName.toString())) {
+          columns = tables[i].getFamilies().toArray(
               new HColumnDescriptor[] {});
           break;
         }
@@ -60,7 +60,7 @@
       println("Truncating a '" + tableName + "' table ... Please wait.");
 
       admin.deleteTable(tableName); // delete the table
-      HTableDescriptor tableDesc = new HTableDescriptor(tableName.toString());
+      HTableDescriptor tableDesc = new HTableDescriptor(tableName.getBytes());
       for (int i = 0; i < columns.length; i++) {
         tableDesc.addFamily(columns[i]);
       }

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/BatchOperation.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/BatchOperation.java?rev=656868&r1=656867&r2=656868&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/BatchOperation.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/BatchOperation.java Thu May 15 15:10:47 2008
@@ -23,7 +23,7 @@
 import java.io.DataOutput;
 import java.io.IOException;
 
-import org.apache.hadoop.io.Text;
+import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.io.Writable;
 
 /**
@@ -36,20 +36,23 @@
  * @see BatchUpdate 
  */
 public class BatchOperation implements Writable {
-  private Text column;
+  private byte [] column = null;
   
   // A null value defines DELETE operations.
-  private byte[] value;
-
-  /** Default constructor used by Writable */
+  private byte[] value = null;
+  
+  /**
+   * Default constructor
+   */
   public BatchOperation() {
-    this(new Text());
+    this(null);
   }
+
   /**
    * Creates a DELETE batch operation.
    * @param column column name
    */
-  public BatchOperation(final Text column) {
+  public BatchOperation(final byte [] column) {
     this(column, null);
   }
 
@@ -58,7 +61,7 @@
    * @param column column name
    * @param value column value.  If non-null, this is a PUT operation.
    */
-  public BatchOperation(final Text column, final byte [] value) {
+  public BatchOperation(final byte [] column, final byte [] value) {
     this.column = column;
     this.value = value;
   }
@@ -66,7 +69,7 @@
   /**
    * @return the column
    */
-  public Text getColumn() {
+  public byte [] getColumn() {
     return this.column;
   }
 
@@ -90,7 +93,7 @@
   // In Performance Evaluation sequentialWrite, 70% of object allocations are
   // done in here.
   public void readFields(final DataInput in) throws IOException {
-    this.column.readFields(in);
+    this.column = Bytes.readByteArray(in);
     // Is there a value to read?
     if (in.readBoolean()) {
       this.value = new byte[in.readInt()];
@@ -99,7 +102,7 @@
   }
 
   public void write(final DataOutput out) throws IOException {
-    this.column.write(out);
+    Bytes.writeByteArray(out, this.column);
     boolean p = isPut();
     out.writeBoolean(p);
     if (p) {

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/BatchUpdate.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/BatchUpdate.java?rev=656868&r1=656867&r2=656868&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/BatchUpdate.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/BatchUpdate.java Thu May 15 15:10:47 2008
@@ -25,9 +25,10 @@
 import java.util.ArrayList;
 import java.util.Iterator;
 
+import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.Writable;
-import org.apache.hadoop.hbase.HConstants;
 
 /**
  * A Writable object that contains a series of BatchOperations
@@ -39,42 +40,82 @@
 public class BatchUpdate implements Writable, Iterable<BatchOperation> {
   
   // the row being updated
-  private Text row;
+  private byte [] row = null;
     
   // the batched operations
-  private ArrayList<BatchOperation> operations;
+  private ArrayList<BatchOperation> operations =
+    new ArrayList<BatchOperation>();
   
-  private long timestamp;
+  private long timestamp = HConstants.LATEST_TIMESTAMP;
   
-  /** Default constructor - used by Writable. */
+  /**
+   * Default constructor used serializing.
+   */
   public BatchUpdate() {
-    this(new Text());
+    this ((byte [])null);
   }
-  
+
   /**
    * Initialize a BatchUpdate operation on a row. Timestamp is assumed to be
    * now.
    * 
    * @param row
    */
-  public BatchUpdate(Text row) {
+  public BatchUpdate(final Text row) {
     this(row, HConstants.LATEST_TIMESTAMP);
   }
   
   /**
+   * Initialize a BatchUpdate operation on a row. Timestamp is assumed to be
+   * now.
+   * 
+   * @param row
+   */
+  public BatchUpdate(final String row) {
+    this(Bytes.toBytes(row), HConstants.LATEST_TIMESTAMP);
+  }
+
+  /**
+   * Initialize a BatchUpdate operation on a row. Timestamp is assumed to be
+   * now.
+   * 
+   * @param row
+   */
+  public BatchUpdate(final byte [] row) {
+    this(row, HConstants.LATEST_TIMESTAMP);
+  }
+
+  /**
+   * Initialize a BatchUpdate operation on a row with a specific timestamp.
+   * 
+   * @param row
+   */
+  public BatchUpdate(final String row, long timestamp){
+    this(Bytes.toBytes(row), timestamp);
+  }
+
+  /**
+   * Initialize a BatchUpdate operation on a row with a specific timestamp.
+   * 
+   * @param row
+   */
+  public BatchUpdate(final Text row, long timestamp){
+    this(row.getBytes(), timestamp);
+  }
+
+  /**
    * Initialize a BatchUpdate operation on a row with a specific timestamp.
    * 
    * @param row
    */
-  public BatchUpdate(Text row, long timestamp){
+  public BatchUpdate(final byte [] row, long timestamp){
     this.row = row;
     this.timestamp = timestamp;
     this.operations = new ArrayList<BatchOperation>();
   }
 
-  
   /** @return the row */
-  public Text getRow() {
+  public byte [] getRow() {
     return row;
   }
 
@@ -91,7 +132,7 @@
   public void setTimestamp(long timestamp) {
     this.timestamp = timestamp;
   }
-  
+
   /** 
    * Change a value for the specified column
    *
@@ -99,20 +140,60 @@
    * @param val new value for column.  Cannot be null (can be empty).
    */
   public synchronized void put(final Text column, final byte val[]) {
+    put(column.getBytes(), val);
+  }
+  
+  /** 
+   * Change a value for the specified column
+   *
+   * @param column column whose value is being set
+   * @param val new value for column.  Cannot be null (can be empty).
+   */
+  public synchronized void put(final String column, final byte val[]) {
+    put(Bytes.toBytes(column), val);
+  }
+
+  /** 
+   * Change a value for the specified column
+   *
+   * @param column column whose value is being set
+   * @param val new value for column.  Cannot be null (can be empty).
+   */
+  public synchronized void put(final byte [] column, final byte val[]) {
     if (val == null) {
       // If null, the PUT becomes a DELETE operation.
       throw new IllegalArgumentException("Passed value cannot be null");
     }
     operations.add(new BatchOperation(column, val));
   }
-  
+
+  /** 
+   * Delete the value for a column
+   * Deletes the cell whose row/column/commit-timestamp match those of the
+   * delete.
+   * @param column name of column whose value is to be deleted
+   */
+  public void delete(final Text column) {
+    delete(column.getBytes());
+  }
+ 
+  /** 
+   * Delete the value for a column
+   * Deletes the cell whose row/column/commit-timestamp match those of the
+   * delete.
+   * @param column name of column whose value is to be deleted
+   */
+  public void delete(final String column) {
+    delete(Bytes.toBytes(column));
+  }
+
   /** 
    * Delete the value for a column
    * Deletes the cell whose row/column/commit-timestamp match those of the
    * delete.
    * @param column name of column whose value is to be deleted
    */
-  public synchronized void delete(final Text column) {
+  public synchronized void delete(final byte [] column) {
     operations.add(new BatchOperation(column));
   }
 
@@ -137,18 +218,18 @@
     if (this.operations.size() != 0) {
       this.operations.clear();
     }
-    row.readFields(in);
+    this.row = Bytes.readByteArray(in);
     timestamp = in.readLong();
     int nOps = in.readInt();
     for (int i = 0; i < nOps; i++) {
       BatchOperation op = new BatchOperation();
       op.readFields(in);
-      operations.add(op);
+      this.operations.add(op);
     }
   }
 
   public void write(final DataOutput out) throws IOException {
-    row.write(out);
+    Bytes.writeByteArray(out, this.row);
     out.writeLong(timestamp);
     out.writeInt(operations.size());
     for (BatchOperation op: operations) {

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/Cell.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/Cell.java?rev=656868&r1=656867&r2=656868&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/Cell.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/Cell.java Thu May 15 15:10:47 2008
@@ -23,6 +23,7 @@
 import java.io.DataOutput;
 import java.io.IOException;
 
+import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.io.Writable;
 
 /**
@@ -69,6 +70,11 @@
     return timestamp;
   }
   
+  @Override
+  public String toString() {
+    return "timestamp=" + this.timestamp + ", value=" +
+      Bytes.toString(this.value);
+  }
   //
   // Writable
   //
@@ -76,15 +82,12 @@
   /** {@inheritDoc} */
   public void readFields(final DataInput in) throws IOException {
     timestamp = in.readLong();
-    int valueSize = in.readInt();
-    value = new byte[valueSize];
-    in.readFully(value, 0, valueSize);
+    this.value = Bytes.readByteArray(in);
   }
 
   /** {@inheritDoc} */
   public void write(final DataOutput out) throws IOException {
     out.writeLong(timestamp);
-    out.writeInt(value.length);
-    out.write(value);
+    Bytes.writeByteArray(out, this.value);
   } 
 }
\ No newline at end of file

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/HbaseMapWritable.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/HbaseMapWritable.java?rev=656868&r1=656867&r2=656868&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/HbaseMapWritable.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/HbaseMapWritable.java Thu May 15 15:10:47 2008
@@ -26,34 +26,37 @@
 import java.util.HashMap;
 import java.util.Map;
 import java.util.Set;
+import java.util.TreeMap;
 import java.util.concurrent.atomic.AtomicReference;
 
 import org.apache.hadoop.conf.Configurable;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.HStoreKey;
+import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.util.ReflectionUtils;
 
-import org.apache.hadoop.hbase.HStoreKey;
-import org.apache.hadoop.hbase.io.Cell;
-
 /**
  * A Writable Map.
  * Like {@link org.apache.hadoop.io.MapWritable} but dumb. It will fail
  * if passed a Writable it has not already been told about. Its also been
- * primed with hbase Writables.
+ * primed with hbase Writables.  Keys are always byte arrays.  Thats other
+ * difference from MapWritable.
+ * TODO: Have generics enforce V is a subclass of Writable and K is a byte []
+ * only.
  */
-public class HbaseMapWritable implements Map<Writable, Writable>, Writable,
-    Configurable {
+public class HbaseMapWritable <K, V>
+implements Map<byte [], V>, Writable, Configurable {
   private AtomicReference<Configuration> conf =
     new AtomicReference<Configuration>();
   
   // Static maps of code to class and vice versa.  Includes types used in hbase
   // only.
-  static final Map<Byte, Class<? extends Writable>> CODE_TO_CLASS =
-    new HashMap<Byte, Class<? extends Writable>>();
-  static final Map<Class<? extends Writable>, Byte> CLASS_TO_CODE =
-    new HashMap<Class<? extends Writable>, Byte>();
+  static final Map<Byte, Class<?>> CODE_TO_CLASS =
+    new HashMap<Byte, Class<?>>();
+  static final Map<Class<?>, Byte> CLASS_TO_CODE =
+    new HashMap<Class<?>, Byte>();
 
   static {
     byte code = 0;
@@ -61,22 +64,18 @@
     addToMap(ImmutableBytesWritable.class, code++);
     addToMap(Text.class, code++);
     addToMap(Cell.class, code++);
+    addToMap(byte [].class, code++);
   }
 
   @SuppressWarnings("boxing")
-  private static void addToMap(final Class<? extends Writable> clazz,
+  private static void addToMap(final Class<?> clazz,
       final byte code) {
     CLASS_TO_CODE.put(clazz, code);
     CODE_TO_CLASS.put(code, clazz);
   }
   
-  private Map<Writable, Writable> instance;
-  
-  /** Default constructor. */
-  public HbaseMapWritable() {
-    super();
-    this.instance = new HashMap<Writable, Writable>();
-  }
+  private Map<byte [], V> instance =
+    new TreeMap<byte [], V>(Bytes.BYTES_COMPARATOR);
 
   /** @return the conf */
   public Configuration getConf() {
@@ -104,12 +103,12 @@
   }
 
   /** {@inheritDoc} */
-  public Set<Map.Entry<Writable, Writable>> entrySet() {
+  public Set<Entry<byte [], V>> entrySet() {
     return instance.entrySet();
   }
 
   /** {@inheritDoc} */
-  public Writable get(Object key) {
+  public V get(Object key) {
     return instance.get(key);
   }
   
@@ -119,43 +118,17 @@
   }
 
   /** {@inheritDoc} */
-  public Set<Writable> keySet() {
+  public Set<byte []> keySet() {
     return instance.keySet();
   }
 
   /** {@inheritDoc} */
-  @SuppressWarnings("unchecked")
-  public Writable put(Writable key, Writable value) {
-    if (!CLASS_TO_CODE.containsKey(key.getClass())) {
-      throw new NullPointerException("Unsupported class " + 
-        key.getClass() + " cannot be used as a key.");
-    }
-    if (!CLASS_TO_CODE.containsKey(value.getClass())) {
-      throw new NullPointerException("Unsupported class " + 
-        value.getClass() + " cannot be used as a value.");
-    }
-    return instance.put(key, value);
-  }
-
-  /** {@inheritDoc} */
-  public void putAll(Map<? extends Writable, ? extends Writable> t) {
-    for (Map.Entry<? extends Writable, ? extends Writable> e: t.entrySet()) {
-      instance.put(e.getKey(), e.getValue());
-    }
-  }
-
-  /** {@inheritDoc} */
-  public Writable remove(Object key) {
-    return instance.remove(key);
-  }
-
-  /** {@inheritDoc} */
   public int size() {
     return instance.size();
   }
 
   /** {@inheritDoc} */
-  public Collection<Writable> values() {
+  public Collection<V> values() {
     return instance.values();
   }
   
@@ -176,18 +149,22 @@
     }
     return b;
   }
+  
+  @Override
+  public String toString() {
+    return this.instance.toString();
+  }
 
   /** {@inheritDoc} */
   public void write(DataOutput out) throws IOException {
     // Write out the number of entries in the map
-    out.writeInt(instance.size());
+    out.writeInt(this.instance.size());
 
     // Then write out each key/value pair
-    for (Map.Entry<Writable, Writable> e: instance.entrySet()) {
-      out.writeByte(getId(e.getKey().getClass()));
-      e.getKey().write(out);
+    for (Map.Entry<byte [], V> e: instance.entrySet()) {
+      Bytes.writeByteArray(out, e.getKey());
       out.writeByte(getId(e.getValue().getClass()));
-      e.getValue().write(out);
+      ((Writable)e.getValue()).write(out);
     }
   }
 
@@ -202,16 +179,24 @@
     
     // Then read each key/value pair
     for (int i = 0; i < entries; i++) {
-      Writable key = (Writable) ReflectionUtils.newInstance(getClass(
-          in.readByte()), getConf());
-      
-      key.readFields(in);
-      
-      Writable value = (Writable) ReflectionUtils.newInstance(getClass(
-          in.readByte()), getConf());
-      
+      byte [] key = Bytes.readByteArray(in);
+      Writable value = (Writable)ReflectionUtils.
+        newInstance(getClass(in.readByte()), getConf());
       value.readFields(in);
-      instance.put(key, value);
+      V v = (V)value;
+      this.instance.put(key, v);
     }
   }
-}
+
+  public void putAll(Map<? extends byte [], ? extends V> m) {
+    this.instance.putAll(m);
+  }
+
+  public V remove(Object key) {
+    return this.instance.remove(key);
+  }
+
+  public V put(byte [] key, V value) {
+    return this.instance.put(key, value);
+  }
+}
\ No newline at end of file

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/HbaseObjectWritable.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/HbaseObjectWritable.java?rev=656868&r1=656867&r2=656868&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/HbaseObjectWritable.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/HbaseObjectWritable.java Thu May 15 15:10:47 2008
@@ -39,12 +39,14 @@
 import org.apache.hadoop.hbase.filter.RowFilterInterface;
 import org.apache.hadoop.hbase.filter.RowFilterSet;
 import org.apache.hadoop.hbase.io.HbaseMapWritable;
+import org.apache.hadoop.io.MapWritable;
 import org.apache.hadoop.io.ObjectWritable;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.WritableFactories;
 import org.apache.hadoop.hbase.io.Cell;
 import org.apache.hadoop.hbase.io.RowResult;
+import org.apache.hadoop.hbase.util.Bytes;
 
 /** 
  * This is a customized version of the polymorphic hadoop
@@ -123,6 +125,7 @@
     }
     addToMap(RowResult.class, code++);
     addToMap(HRegionInfo[].class, code++);
+    addToMap(MapWritable.class, code++);
   }
   
   private Class<?> declaredClass;
@@ -210,17 +213,24 @@
                                  Class declaredClass, 
                                  Configuration conf)
   throws IOException {
+
     if (instance == null) {                       // null
       instance = new NullInstance(declaredClass, conf);
       declaredClass = Writable.class;
     }
     writeClassCode(out, declaredClass);
     if (declaredClass.isArray()) {                // array
-      int length = Array.getLength(instance);
-      out.writeInt(length);
-      for (int i = 0; i < length; i++) {
-        writeObject(out, Array.get(instance, i),
+      // If bytearray, just dump it out -- avoid the recursion and
+      // byte-at-a-time we were previously doing.
+      if (declaredClass.equals(byte [].class)) {
+        Bytes.writeByteArray(out, (byte [])instance);
+      } else {
+        int length = Array.getLength(instance);
+        out.writeInt(length);
+        for (int i = 0; i < length; i++) {
+          writeObject(out, Array.get(instance, i),
                     declaredClass.getComponentType(), conf);
+        }
       }
     } else if (declaredClass == String.class) {   // String
       Text.writeString(out, (String)instance);
@@ -301,10 +311,14 @@
         throw new IllegalArgumentException("Not a primitive: "+declaredClass);
       }
     } else if (declaredClass.isArray()) {              // array
-      int length = in.readInt();
-      instance = Array.newInstance(declaredClass.getComponentType(), length);
-      for (int i = 0; i < length; i++) {
-        Array.set(instance, i, readObject(in, conf));
+      if (declaredClass.equals(byte [].class)) {
+        instance = Bytes.readByteArray(in);
+      } else {
+        int length = in.readInt();
+        instance = Array.newInstance(declaredClass.getComponentType(), length);
+        for (int i = 0; i < length; i++) {
+          Array.set(instance, i, readObject(in, conf));
+        }
       }
     } else if (declaredClass == String.class) {        // String
       instance = Text.readString(in);
@@ -353,4 +367,4 @@
     return this.conf;
   }
   
-}
+}
\ No newline at end of file

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/RowResult.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/RowResult.java?rev=656868&r1=656867&r2=656868&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/RowResult.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/RowResult.java Thu May 15 15:10:47 2008
@@ -23,39 +23,41 @@
 import java.io.DataOutput;
 import java.io.IOException;
 import java.util.ArrayList;
-import java.util.Iterator;
+import java.util.Collection;
+import java.util.Collections;
 import java.util.Map;
 import java.util.Set;
-import java.util.HashSet;
-import java.util.Collection;
+import java.util.TreeSet;
 
-import org.apache.hadoop.io.Text;
+import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.hbase.util.Writables;
 import org.apache.hadoop.io.Writable;
 
-public class RowResult implements Writable, Map<Text, Cell> {
-  protected Text row;
-  protected HbaseMapWritable cells;
-   
-  /**
-   * Used by Writable
-   */
-  public RowResult () {
-    row = new Text();
-    cells = new HbaseMapWritable();
+/**
+ * Holds row name and then a map of columns to cells.
+ */
+public class RowResult implements Writable, Map<byte [], Cell> {
+  private byte [] row = null;
+  private final HbaseMapWritable<byte [], Cell> cells;
+
+  public RowResult() {
+    this(null, new HbaseMapWritable<byte [], Cell>());
   }
-  
+
   /**
    * Create a RowResult from a row and Cell map
    */
-  public RowResult (final Text row, final HbaseMapWritable hbw) {
+  public RowResult (final byte [] row,
+      final HbaseMapWritable<byte [], Cell> m) {
     this.row = row;
-    this.cells = hbw;
+    this.cells = m;
   }
   
   /**
    * Get the row for this RowResult
    */
-  public Text getRow() {
+  public byte [] getRow() {
     return row;
   }
 
@@ -63,19 +65,21 @@
   // Map interface
   // 
   
-  public Cell put(Text key, Cell value) {
+  public Cell put(@SuppressWarnings("unused") byte [] key,
+    @SuppressWarnings("unused") Cell value) {
     throw new UnsupportedOperationException("RowResult is read-only!");
   }
 
-  public void putAll(Map map) {
+  @SuppressWarnings("unchecked")
+  public void putAll(@SuppressWarnings("unused") Map map) {
     throw new UnsupportedOperationException("RowResult is read-only!");
   }
 
   public Cell get(Object key) {
-    return (Cell)cells.get(key);
+    return (Cell)this.cells.get(key);
   }
 
-  public Cell remove(Object key) {
+  public Cell remove(@SuppressWarnings("unused") Object key) {
     throw new UnsupportedOperationException("RowResult is read-only!");
   }
 
@@ -83,7 +87,7 @@
     return cells.containsKey(key);
   }
 
-  public boolean containsValue(Object value) {
+  public boolean containsValue(@SuppressWarnings("unused") Object value) {
     throw new UnsupportedOperationException("Don't support containsValue!");
   }
 
@@ -99,20 +103,16 @@
     throw new UnsupportedOperationException("RowResult is read-only!");
   }
 
-  public Set<Text> keySet() {
-    Set<Text> result = new HashSet<Text>();
-    for (Writable w : cells.keySet()) {
-      result.add((Text)w);
+  public Set<byte []> keySet() {
+    Set<byte []> result = new TreeSet<byte []>(Bytes.BYTES_COMPARATOR);
+    for (byte [] w : cells.keySet()) {
+      result.add(w);
     }
     return result;
   }
 
-  public Set<Map.Entry<Text, Cell>> entrySet() {
-    Set<Map.Entry<Text, Cell>> result = new HashSet<Map.Entry<Text, Cell>>();
-    for (Map.Entry<Writable, Writable> e : cells.entrySet()) {
-      result.add(new Entry((Text)e.getKey(), (Cell)e.getValue()));
-    }
-    return result;
+  public Set<Map.Entry<byte [], Cell>> entrySet() {
+    return Collections.unmodifiableSet(this.cells.entrySet());
   }
 
   public Collection<Cell> values() {
@@ -126,25 +126,28 @@
   /**
    * Get the Cell that corresponds to column
    */
-  public Cell get(Text column) {
-    return (Cell)cells.get(column);
+  public Cell get(byte [] column) {
+    return this.cells.get(column);
   }
   
-  public class Entry implements Map.Entry<Text, Cell> {
-    private Text row;
-    private Cell cell;
+  /**
+   * Row entry.
+   */
+  public class Entry implements Map.Entry<byte [], Cell> {
+    private final byte [] column;
+    private final Cell cell;
     
-    Entry(Text row, Cell cell) {
-      this.row = row;
+    Entry(byte [] row, Cell cell) {
+      this.column = row;
       this.cell = cell;
     }
     
-    public Cell setValue(Cell c) {
+    public Cell setValue(@SuppressWarnings("unused") Cell c) {
       throw new UnsupportedOperationException("RowResult is read-only!");
     }
     
-    public Text getKey() {
-      return row;
+    public byte [] getKey() {
+      return column;
     }
     
     public Cell getValue() {
@@ -152,17 +155,51 @@
     }
   }
   
+  @Override
+  public String toString() {
+    StringBuilder sb = new StringBuilder();
+    sb.append("row=");
+    sb.append(Bytes.toString(this.row));
+    sb.append(", cells={");
+    boolean moreThanOne = false;
+    for (Map.Entry<byte [], Cell> e: this.cells.entrySet()) {
+      if (moreThanOne) {
+        sb.append(", ");
+      } else {
+        moreThanOne = true;
+      }
+      sb.append("(column=");
+      sb.append(Bytes.toString(e.getKey()));
+      sb.append(", timestamp=");
+      sb.append(Long.toString(e.getValue().getTimestamp()));
+      sb.append(", value=");
+      byte [] v = e.getValue().getValue();
+      if (Bytes.equals(e.getKey(), HConstants.COL_REGIONINFO)) {
+        try {
+          sb.append(Writables.getHRegionInfo(v).toString());
+        } catch (IOException ioe) {
+          sb.append(ioe.toString());
+        }
+      } else {
+        sb.append(v); 
+      }
+      sb.append(")");
+    }
+    sb.append("}");
+    return sb.toString();
+  }
+  
   //
   // Writable
   //
-
+  
   public void readFields(final DataInput in) throws IOException {
-    row.readFields(in);
-    cells.readFields(in);
+    this.row = Bytes.readByteArray(in);
+    this.cells.readFields(in);
   }
 
   public void write(final DataOutput out) throws IOException {
-    row.write(out);
-    cells.write(out);
-  }  
-}
+    Bytes.writeByteArray(out, this.row);
+    this.cells.write(out);
+  }
+}
\ No newline at end of file

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/ipc/HMasterInterface.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/ipc/HMasterInterface.java?rev=656868&r1=656867&r2=656868&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/ipc/HMasterInterface.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/ipc/HMasterInterface.java Thu May 15 15:10:47 2008
@@ -19,13 +19,12 @@
  */
 package org.apache.hadoop.hbase.ipc;
 
-import org.apache.hadoop.io.Text;
-import org.apache.hadoop.ipc.VersionedProtocol;
-import org.apache.hadoop.hbase.HTableDescriptor;
+import java.io.IOException;
+
 import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HServerAddress;
-
-import java.io.IOException;
+import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.ipc.VersionedProtocol;
 
 /**
  * Clients interact with the HMasterInterface to gain access to meta-level
@@ -38,8 +37,9 @@
    * Version was incremented to 2 when we brought the hadoop RPC local to hbase
    * -- HADOOP-2495 and then to 3 when we changed the RPC to send codes instead
    * of actual class names (HADOOP-2519).
+   * <p>Version 4 when we moved to all byte arrays (HBASE-42).
    */
-  public static final long versionID = 3L;
+  public static final long versionID = 4L;
 
   /** @return true if master is available */
   public boolean isMasterRunning();
@@ -58,7 +58,7 @@
    * @param tableName
    * @throws IOException
    */
-  public void deleteTable(Text tableName) throws IOException;
+  public void deleteTable(final byte [] tableName) throws IOException;
   
   /**
    * Adds a column to the specified table
@@ -66,7 +66,8 @@
    * @param column column descriptor
    * @throws IOException
    */
-  public void addColumn(Text tableName, HColumnDescriptor column) throws IOException;
+  public void addColumn(final byte [] tableName, HColumnDescriptor column)
+  throws IOException;
 
   /**
    * Modifies an existing column on the specified table
@@ -75,7 +76,7 @@
    * @param descriptor new column descriptor
    * @throws IOException
    */
-  public void modifyColumn(Text tableName, Text columnName, 
+  public void modifyColumn(final byte [] tableName, final byte [] columnName, 
     HColumnDescriptor descriptor) 
   throws IOException;
 
@@ -86,14 +87,15 @@
    * @param columnName
    * @throws IOException
    */
-  public void deleteColumn(Text tableName, Text columnName) throws IOException;
+  public void deleteColumn(final byte [] tableName, final byte [] columnName)
+  throws IOException;
   
   /**
    * Puts the table on-line (only needed if table has been previously taken offline)
    * @param tableName
    * @throws IOException
    */
-  public void enableTable(Text tableName) throws IOException;
+  public void enableTable(final byte [] tableName) throws IOException;
   
   /**
    * Take table offline
@@ -101,7 +103,7 @@
    * @param tableName
    * @throws IOException
    */
-  public void disableTable(Text tableName) throws IOException;
+  public void disableTable(final byte [] tableName) throws IOException;
   
   /**
    * Shutdown an HBase cluster.
@@ -114,4 +116,4 @@
    * @return address of server that serves the root region
    */
   public HServerAddress findRootRegion();
-}
+}
\ No newline at end of file

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/ipc/HMasterRegionInterface.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/ipc/HMasterRegionInterface.java?rev=656868&r1=656867&r2=656868&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/ipc/HMasterRegionInterface.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/ipc/HMasterRegionInterface.java Thu May 15 15:10:47 2008
@@ -21,7 +21,7 @@
 
 import java.io.IOException;
 
-import org.apache.hadoop.hbase.io.HbaseMapWritable;
+import org.apache.hadoop.io.MapWritable;
 import org.apache.hadoop.ipc.VersionedProtocol;
 import org.apache.hadoop.hbase.HServerInfo;
 import org.apache.hadoop.hbase.HMsg;
@@ -32,8 +32,11 @@
  * goings-on and to obtain data-handling instructions from the HMaster.
  */
 public interface HMasterRegionInterface extends VersionedProtocol {
-  /** Interface version number */
-  public static final long versionID = 1L;
+  /** Interface version number.
+   * Version 2 was when the regionServerStartup was changed to return a
+   * MapWritable instead of a HbaseMapWritable.
+   */
+  public static final long versionID = 2L;
   
   /**
    * Called when a region server first starts
@@ -42,7 +45,7 @@
    * @return Configuration for the regionserver to use: e.g. filesystem,
    * hbase rootdir, etc.
    */
-  public HbaseMapWritable regionServerStartup(HServerInfo info) throws IOException;
+  public MapWritable regionServerStartup(HServerInfo info) throws IOException;
   
   /**
    * Called to renew lease, tell master what the region server is doing and to
@@ -59,4 +62,4 @@
   public HMsg[] regionServerReport(HServerInfo info, HMsg msgs[], 
     HRegionInfo mostLoadedRegions[])
   throws IOException;
-}
+}
\ No newline at end of file

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/ipc/HRegionInterface.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/ipc/HRegionInterface.java?rev=656868&r1=656867&r2=656868&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/ipc/HRegionInterface.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/ipc/HRegionInterface.java Thu May 15 15:10:47 2008
@@ -26,7 +26,6 @@
 import org.apache.hadoop.hbase.io.Cell;
 import org.apache.hadoop.hbase.io.RowResult;
 
-import org.apache.hadoop.io.Text;
 import org.apache.hadoop.ipc.VersionedProtocol;
 import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.NotServingRegionException;
@@ -35,8 +34,11 @@
  * Clients interact with HRegionServers using a handle to the HRegionInterface.
  */
 public interface HRegionInterface extends VersionedProtocol {
-  /** initial version */
-  public static final long versionID = 2L;
+  /**
+   * Protocol version.
+   * Upped to 3 when we went from Text to byte arrays for row and column names.
+   */
+  public static final long versionID = 3L;
 
   /** 
    * Get metainfo about an HRegion
@@ -45,7 +47,7 @@
    * @return HRegionInfo object for region
    * @throws NotServingRegionException
    */
-  public HRegionInfo getRegionInfo(final Text regionName)
+  public HRegionInfo getRegionInfo(final byte [] regionName)
   throws NotServingRegionException;
 
   /**
@@ -58,7 +60,7 @@
    * @return alue for that region/row/column
    * @throws IOException
    */
-  public Cell get(final Text regionName, final Text row, final Text column)
+  public Cell get(final byte [] regionName, final byte [] row, final byte [] column)
   throws IOException;
 
   /**
@@ -71,8 +73,8 @@
    * @return array of values
    * @throws IOException
    */
-  public Cell[] get(final Text regionName, final Text row,
-    final Text column, final int numVersions)
+  public Cell[] get(final byte [] regionName, final byte [] row,
+    final byte [] column, final int numVersions)
   throws IOException;
   
   /**
@@ -87,8 +89,8 @@
    * @return array of values
    * @throws IOException
    */
-  public Cell[] get(final Text regionName, final Text row,
-    final Text column, final long timestamp, final int numVersions)
+  public Cell[] get(final byte [] regionName, final byte [] row,
+    final byte [] column, final long timestamp, final int numVersions)
   throws IOException;
   
   /**
@@ -99,7 +101,8 @@
    * @return map of values
    * @throws IOException
    */
-  public RowResult getRow(final Text regionName, final Text row, final long ts)
+  public RowResult getRow(final byte [] regionName, final byte [] row,
+    final long ts)
   throws IOException;
 
   /**
@@ -111,7 +114,8 @@
    * @return map of values
    * @throws IOException
    */
-  public RowResult getClosestRowBefore(final Text regionName, final Text row)
+  public RowResult getClosestRowBefore(final byte [] regionName,
+    final byte [] row)
   throws IOException;
 
   /**
@@ -122,8 +126,8 @@
    * @return map of values
    * @throws IOException
    */
-  public RowResult getRow(final Text regionName, final Text row, 
-    final Text[] columns, final long ts)
+  public RowResult getRow(final byte [] regionName, final byte [] row, 
+    final byte[][] columns, final long ts)
   throws IOException;
 
   /**
@@ -134,8 +138,8 @@
    * @return map of values
    * @throws IOException
    */
-  public RowResult getRow(final Text regionName, final Text row, 
-    final Text[] columns)
+  public RowResult getRow(final byte [] regionName, final byte [] row, 
+    final byte[][] columns)
   throws IOException;
 
   /**
@@ -145,7 +149,7 @@
    * @param b BatchUpdate
    * @throws IOException
    */
-  public void batchUpdate(Text regionName, BatchUpdate b)
+  public void batchUpdate(final byte [] regionName, final BatchUpdate b)
   throws IOException;
   
   /**
@@ -158,7 +162,8 @@
    * @param timestamp Delete all entries that have this timestamp or older
    * @throws IOException
    */
-  public void deleteAll(Text regionName, Text row, Text column, long timestamp)
+  public void deleteAll(byte [] regionName, byte [] row, byte [] column,
+    long timestamp)
   throws IOException;
 
   /**
@@ -170,7 +175,7 @@
    * @param timestamp Delete all entries that have this timestamp or older
    * @throws IOException
    */
-  public void deleteAll(Text regionName, Text row, long timestamp)
+  public void deleteAll(byte [] regionName, byte [] row, long timestamp)
   throws IOException;
 
   /**
@@ -182,7 +187,7 @@
    * @param family The column family to match
    * @param timestamp Timestamp to match
    */
-  public void deleteFamily(Text regionName, Text row, Text family, 
+  public void deleteFamily(byte [] regionName, byte [] row, byte [] family, 
     long timestamp)
   throws IOException;
 
@@ -207,13 +212,12 @@
    * @return scannerId scanner identifier used in other calls
    * @throws IOException
    */
-  public long openScanner(Text regionName, Text[] columns, Text startRow,
-      long timestamp, RowFilterInterface filter)
+  public long openScanner(final byte [] regionName, final byte [][] columns,
+      final byte []startRow, long timestamp, RowFilterInterface filter)
   throws IOException;
 
   /**
    * Get the next set of values
-   * 
    * @param scannerId clientId passed to openScanner
    * @return map of values
    * @throws IOException
@@ -227,4 +231,4 @@
    * @throws IOException
    */
   public void close(long scannerId) throws IOException;
-}
+}
\ No newline at end of file

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/GroupingTableMap.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/GroupingTableMap.java?rev=656868&r1=656867&r2=656868&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/GroupingTableMap.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/GroupingTableMap.java Thu May 15 15:10:47 2008
@@ -26,8 +26,9 @@
 
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.io.Cell;
+import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
 import org.apache.hadoop.hbase.io.RowResult;
-import org.apache.hadoop.io.Text;
+import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.OutputCollector;
 import org.apache.hadoop.mapred.Reporter;
@@ -36,7 +37,7 @@
 /**
  * Extract grouping columns from input record
  */
-public class GroupingTableMap extends TableMap<Text,RowResult> {
+public class GroupingTableMap extends TableMap<ImmutableBytesWritable,RowResult> {
 
   /**
    * JobConf parameter to specify the columns used to produce the key passed to 
@@ -45,7 +46,7 @@
   public static final String GROUP_COLUMNS =
     "hbase.mapred.groupingtablemap.columns";
   
-  protected Text[] m_columns;
+  protected byte [][] m_columns;
 
   /**
    * Use this before submitting a TableMap job. It will appropriately set up the
@@ -62,7 +63,7 @@
   public static void initJob(String table, String columns, String groupColumns, 
     Class<? extends TableMap> mapper, JobConf job) {
     
-    initJob(table, columns, mapper, Text.class, RowResult.class, job);
+    initJob(table, columns, mapper, ImmutableBytesWritable.class, RowResult.class, job);
     job.set(GROUP_COLUMNS, groupColumns);
   }
 
@@ -71,9 +72,9 @@
   public void configure(JobConf job) {
     super.configure(job);
     String[] cols = job.get(GROUP_COLUMNS, "").split(" ");
-    m_columns = new Text[cols.length];
+    m_columns = new byte[cols.length][];
     for(int i = 0; i < cols.length; i++) {
-      m_columns[i] = new Text(cols[i]);
+      m_columns[i] = Bytes.toBytes(cols[i]);
     }
   }
 
@@ -84,13 +85,13 @@
    * If any of the grouping columns are not found in the value, the record is skipped.
    */
   @Override
-  public void map(@SuppressWarnings("unused") Text key,
-      RowResult value, OutputCollector<Text,RowResult> output,
+  public void map(@SuppressWarnings("unused") ImmutableBytesWritable key,
+      RowResult value, OutputCollector<ImmutableBytesWritable,RowResult> output,
       @SuppressWarnings("unused") Reporter reporter) throws IOException {
     
     byte[][] keyVals = extractKeyValues(value);
     if(keyVals != null) {
-      Text tKey = createGroupKey(keyVals);
+      ImmutableBytesWritable tKey = createGroupKey(keyVals);
       output.collect(tKey, value);
     }
   }
@@ -109,10 +110,10 @@
     ArrayList<byte[]> foundList = new ArrayList<byte[]>();
     int numCols = m_columns.length;
     if(numCols > 0) {
-      for (Map.Entry<Text, Cell> e: r.entrySet()) {
-        Text column = e.getKey();
+      for (Map.Entry<byte [], Cell> e: r.entrySet()) {
+        byte [] column = e.getKey();
         for (int i = 0; i < numCols; i++) {
-          if (column.equals(m_columns[i])) {
+          if (Bytes.equals(column, m_columns[i])) {
             foundList.add(e.getValue().getValue());
             break;
           }
@@ -132,7 +133,7 @@
    * @param vals
    * @return key generated by concatenating multiple column values
    */
-  protected Text createGroupKey(byte[][] vals) {
+  protected ImmutableBytesWritable createGroupKey(byte[][] vals) {
     if(vals == null) {
       return null;
     }
@@ -147,6 +148,6 @@
         throw new RuntimeException(e);
       }
     }
-    return new Text(sb.toString());
+    return new ImmutableBytesWritable(Bytes.toBytes(sb.toString()));
   }
 }

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/IdentityTableMap.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/IdentityTableMap.java?rev=656868&r1=656867&r2=656868&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/IdentityTableMap.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/IdentityTableMap.java Thu May 15 15:10:47 2008
@@ -21,8 +21,8 @@
 
 import java.io.IOException;
 
+import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
 import org.apache.hadoop.hbase.io.RowResult;
-import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.OutputCollector;
 import org.apache.hadoop.mapred.Reporter;
@@ -30,7 +30,7 @@
 /**
  * Pass the given key and record as-is to reduce
  */
-public class IdentityTableMap extends TableMap<Text, RowResult> {
+public class IdentityTableMap extends TableMap<ImmutableBytesWritable, RowResult> {
 
   /** constructor */
   public IdentityTableMap() {
@@ -49,15 +49,16 @@
   @SuppressWarnings("unchecked")
   public static void initJob(String table, String columns,
     Class<? extends TableMap> mapper, JobConf job) {
-    TableMap.initJob(table, columns, mapper, Text.class, RowResult.class, job);
+    TableMap.initJob(table, columns, mapper, ImmutableBytesWritable.class,
+      RowResult.class, job);
   }
 
   /**
    * Pass the key, value to reduce
    */
   @Override
-  public void map(Text key, RowResult value,
-      OutputCollector<Text,RowResult> output,
+  public void map(ImmutableBytesWritable key, RowResult value,
+      OutputCollector<ImmutableBytesWritable,RowResult> output,
       @SuppressWarnings("unused") Reporter reporter) throws IOException {
     
     // convert 

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/IdentityTableReduce.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/IdentityTableReduce.java?rev=656868&r1=656867&r2=656868&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/IdentityTableReduce.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/IdentityTableReduce.java Thu May 15 15:10:47 2008
@@ -22,19 +22,16 @@
 import java.io.IOException;
 import java.util.Iterator;
 
-import org.apache.hadoop.io.MapWritable;
-import org.apache.hadoop.io.Text;
-import org.apache.hadoop.mapred.OutputCollector;
-import org.apache.hadoop.mapred.Reporter;
-import org.apache.hadoop.hbase.io.BatchUpdate;
-
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hbase.io.BatchUpdate;import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
+import org.apache.hadoop.mapred.OutputCollector;
+import org.apache.hadoop.mapred.Reporter;
 
 /**
  * Write to table each key, record pair
  */
-public class IdentityTableReduce extends TableReduce<Text, BatchUpdate> {
+public class IdentityTableReduce extends TableReduce<ImmutableBytesWritable, BatchUpdate> {
   private static final Log LOG =
     LogFactory.getLog(IdentityTableReduce.class.getName());
   
@@ -44,8 +41,8 @@
    * @see org.apache.hadoop.hbase.mapred.TableReduce#reduce(org.apache.hadoop.io.WritableComparable, java.util.Iterator, org.apache.hadoop.mapred.OutputCollector, org.apache.hadoop.mapred.Reporter)
    */
   @Override
-  public void reduce(Text key, Iterator<BatchUpdate> values,
-      OutputCollector<Text, BatchUpdate> output,
+  public void reduce(ImmutableBytesWritable key, Iterator<BatchUpdate> values,
+      OutputCollector<ImmutableBytesWritable, BatchUpdate> output,
       @SuppressWarnings("unused") Reporter reporter)
       throws IOException {
     
@@ -53,4 +50,4 @@
       output.collect(key, values.next());
     }
   }
-}
+}
\ No newline at end of file

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/IndexOutputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/IndexOutputFormat.java?rev=656868&r1=656867&r2=656868&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/IndexOutputFormat.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/IndexOutputFormat.java Thu May 15 15:10:47 2008
@@ -26,7 +26,7 @@
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.Text;
+import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.OutputFormatBase;
 import org.apache.hadoop.mapred.RecordWriter;
@@ -42,11 +42,11 @@
  * the index, and copy the index to the destination.
  */
 public class IndexOutputFormat extends
-    OutputFormatBase<Text, LuceneDocumentWrapper> {
+    OutputFormatBase<ImmutableBytesWritable, LuceneDocumentWrapper> {
   static final Log LOG = LogFactory.getLog(IndexOutputFormat.class);
 
   @Override
-  public RecordWriter<Text, LuceneDocumentWrapper> getRecordWriter(
+  public RecordWriter<ImmutableBytesWritable, LuceneDocumentWrapper> getRecordWriter(
     final FileSystem fs, JobConf job, String name, final Progressable progress)
   throws IOException {
 
@@ -97,11 +97,11 @@
     }
     writer.setUseCompoundFile(indexConf.isUseCompoundFile());
 
-    return new RecordWriter<Text, LuceneDocumentWrapper>() {
+    return new RecordWriter<ImmutableBytesWritable, LuceneDocumentWrapper>() {
       private boolean closed;
       private long docCount = 0;
 
-      public void write(@SuppressWarnings("unused") Text key,
+      public void write(@SuppressWarnings("unused") ImmutableBytesWritable key,
         LuceneDocumentWrapper value)
       throws IOException {
         // unwrap and index doc

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/IndexTableReduce.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/IndexTableReduce.java?rev=656868&r1=656867&r2=656868&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/IndexTableReduce.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/IndexTableReduce.java Thu May 15 15:10:47 2008
@@ -24,11 +24,10 @@
 import java.util.Map;
 
 import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.io.Cell;
 import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
 import org.apache.hadoop.hbase.io.RowResult;
-import org.apache.hadoop.hbase.io.Cell;
-import org.apache.hadoop.io.Text;
-import org.apache.hadoop.io.Writable;
+import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.MapReduceBase;
 import org.apache.hadoop.mapred.OutputCollector;
@@ -43,7 +42,7 @@
  * to build a Lucene index
  */
 public class IndexTableReduce extends MapReduceBase implements
-    Reducer<Text, RowResult, Text, LuceneDocumentWrapper> {
+    Reducer<ImmutableBytesWritable, RowResult, ImmutableBytesWritable, LuceneDocumentWrapper> {
   private static final Logger LOG = Logger.getLogger(IndexTableReduce.class);
 
   private IndexConfiguration indexConf;
@@ -64,9 +63,10 @@
     super.close();
   }
 
-  public void reduce(Text key, Iterator<RowResult> values,
-      OutputCollector<Text, LuceneDocumentWrapper> output, Reporter reporter)
-      throws IOException {
+  public void reduce(ImmutableBytesWritable key, Iterator<RowResult> values,
+      OutputCollector<ImmutableBytesWritable, LuceneDocumentWrapper> output,
+      Reporter reporter)
+  throws IOException {
     if (!values.hasNext()) {
       return;
     }
@@ -74,7 +74,8 @@
     Document doc = new Document();
 
     // index and store row key, row key already UTF-8 encoded
-    Field keyField = new Field(indexConf.getRowkeyName(), key.toString(),
+    Field keyField = new Field(indexConf.getRowkeyName(),
+      Bytes.toString(key.get()),
       Field.Store.YES, Field.Index.UN_TOKENIZED);
     keyField.setOmitNorms(true);
     doc.add(keyField);
@@ -83,7 +84,7 @@
       RowResult value = values.next();
 
       // each column (name-value pair) is a field (name-value pair)
-      for (Map.Entry<Text, Cell> entry : value.entrySet()) {
+      for (Map.Entry<byte [], Cell> entry : value.entrySet()) {
         // name is already UTF-8 encoded
         String column = entry.getKey().toString();
         byte[] columnValue = entry.getValue().getValue();

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/RowCounter.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/RowCounter.java?rev=656868&r1=656867&r2=656868&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/RowCounter.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/RowCounter.java Thu May 15 15:10:47 2008
@@ -26,8 +26,8 @@
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.io.Cell;
+import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
 import org.apache.hadoop.hbase.io.RowResult;
-import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapred.JobClient;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.OutputCollector;
@@ -41,7 +41,7 @@
  * Map outputs table rows IF the input row has columns that have content.  
  * Uses an {@link IdentityReducer}
  */
-public class RowCounter extends TableMap<Text, RowResult> implements Tool {
+public class RowCounter extends TableMap<ImmutableBytesWritable, RowResult> implements Tool {
   /* Name of this 'program'
    */
   static final String NAME = "rowcounter";
@@ -51,12 +51,12 @@
   private static enum Counters {ROWS}
   
   @Override
-  public void map(Text row, RowResult value,
-    OutputCollector<Text, RowResult> output,
+  public void map(ImmutableBytesWritable row, RowResult value,
+    OutputCollector<ImmutableBytesWritable, RowResult> output,
     @SuppressWarnings("unused") Reporter reporter)
   throws IOException {
     boolean content = false;
-    for (Map.Entry<Text, Cell> e: value.entrySet()) {
+    for (Map.Entry<byte [], Cell> e: value.entrySet()) {
       Cell cell = e.getValue();
       if (cell != null && cell.getValue().length > 0) {
         content = true;
@@ -85,8 +85,8 @@
       sb.append(args[i]);
     }
     // Second argument is the table name.
-    TableMap.initJob(args[1], sb.toString(), this.getClass(), Text.class,
-      RowResult.class, c);
+    TableMap.initJob(args[1], sb.toString(), this.getClass(),
+      ImmutableBytesWritable.class, RowResult.class, c);
     c.setReducerClass(IdentityReducer.class);
     // First arg is the output directory.
     c.setOutputPath(new Path(args[0]));

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/TableInputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/TableInputFormat.java?rev=656868&r1=656867&r2=656868&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/TableInputFormat.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/TableInputFormat.java Thu May 15 15:10:47 2008
@@ -26,6 +26,7 @@
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.client.HTable;
+import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.JobConfigurable;
@@ -50,14 +51,13 @@
     Path[] tableNames = job.getInputPaths();
     String colArg = job.get(COLUMN_LIST);
     String[] colNames = colArg.split(" ");
-    Text[] m_cols = new Text[colNames.length];
+    byte [][] m_cols = new byte[colNames.length][];
     for (int i = 0; i < m_cols.length; i++) {
-      m_cols[i] = new Text(colNames[i]);
+      m_cols[i] = Bytes.toBytes(colNames[i]);
     }
     setInputColums(m_cols);
     try {
-      setHTable(new HTable(new HBaseConfiguration(job), new Text(tableNames[0]
-          .getName())));
+      setHTable(new HTable(new HBaseConfiguration(job), tableNames[0].getName()));
     } catch (Exception e) {
       LOG.error(e);
     }
@@ -66,7 +66,7 @@
   /** {@inheritDoc} */
   public void validateInput(JobConf job) throws IOException {
     // expecting exactly one path
-    Path[] tableNames = job.getInputPaths();
+    Path [] tableNames = job.getInputPaths();
     if (tableNames == null || tableNames.length > 1) {
       throw new IOException("expecting one table name");
     }
@@ -77,4 +77,4 @@
       throw new IOException("expecting at least one column");
     }
   }
-}
\ No newline at end of file
+}

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/TableInputFormatBase.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/TableInputFormatBase.java?rev=656868&r1=656867&r2=656868&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/TableInputFormatBase.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/TableInputFormatBase.java Thu May 15 15:10:47 2008
@@ -1,14 +1,21 @@
-/*
- * $Id$
+/**
+ * Copyright 2008 The Apache Software Foundation
  *
- * Copyright   Critical Software S.A., All Rights Reserved.
- * (www.criticalsoftware.com)
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  *
- * This software is the proprietary information of Critical Software S.A.
- * Use is subject to license terms.
+ *     http://www.apache.org/licenses/LICENSE-2.0
  *
- * Last changed on : $Date$
- * Last changed by : $Author$
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
  */
 package org.apache.hadoop.hbase.mapred;
 
@@ -18,11 +25,13 @@
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.client.HTable;
 import org.apache.hadoop.hbase.client.Scanner;
 import org.apache.hadoop.hbase.filter.RowFilterInterface;
 import org.apache.hadoop.hbase.filter.RowFilterSet;
 import org.apache.hadoop.hbase.filter.StopRowFilter;
+import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
 import org.apache.hadoop.hbase.io.RowResult;
 import org.apache.hadoop.hbase.regionserver.HRegion;
 import org.apache.hadoop.hbase.util.Writables;
@@ -36,7 +45,7 @@
 /**
  * A Base for {@link TableInputFormat}s. Receives a {@link HTable}, a
  * {@link Text}[] of input columns and optionally a {@link RowFilterInterface}.
- * Subclasses may use other {@link TableRecordReader} implementations.
+ * Subclasses may use other TableRecordReader implementations.
  * <p>
  * An example of a subclass:
  * <code>
@@ -44,11 +53,11 @@
  *
  *     public void configure(JobConf job) {
  *       HTable exampleTable = new HTable(new HBaseConfiguration(job),
- *         new Text("exampleTable"));
+ *         Bytes.toBytes("exampleTable"));
  *       // mandatory
  *       setHTable(exampleTable);
- *       Text[] inputColumns = new Text[] { new Text("columnA"),
- *         new Text("columnB") };
+ *       Text[] inputColumns = new byte [][] { Bytes.toBytes("columnA"),
+ *         Bytes.toBytes("columnB") };
  *       // mandatory
  *       setInputColums(inputColumns);
  *       RowFilterInterface exampleFilter = new RegExpRowFilter("keyPrefix.*");
@@ -62,9 +71,9 @@
  * </code>
  */
 public abstract class TableInputFormatBase
-implements InputFormat<Text, RowResult> {
+implements InputFormat<ImmutableBytesWritable, RowResult> {
   private final Log LOG = LogFactory.getLog(TableInputFormatBase.class);
-  private Text[] inputColumns;
+  private byte [][] inputColumns;
   private HTable table;
   private TableRecordReader tableRecordReader;
   private RowFilterInterface rowFilter;
@@ -72,14 +81,14 @@
   /**
    * Iterate over an HBase table data, return (Text, RowResult) pairs
    */
-  protected class TableRecordReader implements RecordReader<Text, RowResult> {
-
-    private Text startRow;
-    private Text endRow;
+  protected class TableRecordReader
+  implements RecordReader<ImmutableBytesWritable, RowResult> {
+    private byte [] startRow;
+    private byte [] endRow;
     private RowFilterInterface trrRowFilter;
     private Scanner scanner;
     private HTable htable;
-    private Text[] trrInputColumns;
+    private byte [][] trrInputColumns;
 
     /**
      * Build the scanner. Not done in constructor to allow for extension.
@@ -87,7 +96,7 @@
      * @throws IOException
      */
     public void init() throws IOException {
-      if ((endRow != null) && (endRow.getLength() > 0)) {
+      if ((endRow != null) && (endRow.length > 0)) {
         if (trrRowFilter != null) {
           final Set<RowFilterInterface> rowFiltersSet =
             new HashSet<RowFilterInterface>();
@@ -116,14 +125,14 @@
     /**
      * @param inputColumns the columns to be placed in {@link RowResult}.
      */
-    public void setInputColumns(Text[] inputColumns) {
+    public void setInputColumns(final byte [][] inputColumns) {
       this.trrInputColumns = inputColumns;
     }
 
     /**
      * @param startRow the first row in the split
      */
-    public void setStartRow(Text startRow) {
+    public void setStartRow(final byte [] startRow) {
       this.startRow = startRow;
     }
 
@@ -131,7 +140,7 @@
      *
      * @param endRow the last row in the split
      */
-    public void setEndRow(Text endRow) {
+    public void setEndRow(final byte [] endRow) {
       this.endRow = endRow;
     }
 
@@ -148,12 +157,12 @@
     }
 
     /**
-     * @return Text
+     * @return ImmutableBytesWritable
      *
      * @see org.apache.hadoop.mapred.RecordReader#createKey()
      */
-    public Text createKey() {
-      return new Text();
+    public ImmutableBytesWritable createKey() {
+      return new ImmutableBytesWritable();
     }
 
     /**
@@ -188,26 +197,26 @@
      * @throws IOException
      */
     @SuppressWarnings("unchecked")
-    public boolean next(Text key, RowResult value) throws IOException {
+    public boolean next(ImmutableBytesWritable key, RowResult value)
+    throws IOException {
       RowResult result = this.scanner.next();
-      boolean hasMore = result != null;
+      boolean hasMore = result != null && result.size() > 0;
       if (hasMore) {
-        Writables.copyWritable(result.getRow(), key);
+        key.set(result.getRow());
         Writables.copyWritable(result, value);
       }
       return hasMore;
     }
-
   }
 
   /**
-   * Builds a {@link TableRecordReader}. If no {@link TableRecordReader} was
-   * provided uses the default.
+   * Builds a TableRecordReader. If no TableRecordReader was provided, uses
+   * the default.
    *
    * @see org.apache.hadoop.mapred.InputFormat#getRecordReader(InputSplit,
    *      JobConf, Reporter)
    */
-  public RecordReader<Text, RowResult> getRecordReader(InputSplit split,
+  public RecordReader<ImmutableBytesWritable, RowResult> getRecordReader(InputSplit split,
       @SuppressWarnings("unused")
       JobConf job, @SuppressWarnings("unused")
       Reporter reporter)
@@ -245,7 +254,7 @@
    * @see org.apache.hadoop.mapred.InputFormat#getSplits(org.apache.hadoop.mapred.JobConf, int)
    */
   public InputSplit[] getSplits(JobConf job, int numSplits) throws IOException {
-    Text[] startKeys = this.table.getStartKeys();
+    byte [][] startKeys = this.table.getStartKeys();
     if (startKeys == null || startKeys.length == 0) {
       throw new IOException("Expecting at least one region");
     }
@@ -265,7 +274,7 @@
       lastPos = startKeys.length % realNumSplits > i ? lastPos + 1 : lastPos;
       splits[i] = new TableSplit(this.table.getTableName(),
           startKeys[startPos], ((i + 1) < realNumSplits) ? startKeys[lastPos]
-              : new Text());
+              : HConstants.EMPTY_START_ROW);
       if (LOG.isDebugEnabled()) {
         LOG.debug("split: " + i + "->" + splits[i]);
       }
@@ -278,7 +287,7 @@
   /**
    * @param inputColumns to be passed in {@link RowResult} to the map task.
    */
-  protected void setInputColums(Text[] inputColumns) {
+  protected void setInputColums(byte [][] inputColumns) {
     this.inputColumns = inputColumns;
   }
 

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/TableMap.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/TableMap.java?rev=656868&r1=656867&r2=656868&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/TableMap.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/TableMap.java Thu May 15 15:10:47 2008
@@ -22,9 +22,8 @@
 import java.io.IOException;
 
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.HStoreKey;
-import org.apache.hadoop.io.MapWritable;
-import org.apache.hadoop.io.Text;
+import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
+import org.apache.hadoop.hbase.io.RowResult;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.WritableComparable;
 import org.apache.hadoop.mapred.JobConf;
@@ -32,8 +31,6 @@
 import org.apache.hadoop.mapred.Mapper;
 import org.apache.hadoop.mapred.OutputCollector;
 import org.apache.hadoop.mapred.Reporter;
-import org.apache.hadoop.hbase.io.RowResult;
-import org.apache.hadoop.hbase.io.BatchUpdate;
 
 /**
  * Scan an HBase table to sort by a specified sort column.
@@ -44,7 +41,7 @@
  */
 @SuppressWarnings("unchecked")
 public abstract class TableMap<K extends WritableComparable, V extends Writable>
-    extends MapReduceBase implements Mapper<Text, RowResult, K, V> {
+    extends MapReduceBase implements Mapper<ImmutableBytesWritable, RowResult, K, V> {
   /**
    * Use this before submitting a TableMap job. It will
    * appropriately set up the JobConf.
@@ -77,6 +74,6 @@
    * @param reporter
    * @throws IOException
    */
-  public abstract void map(Text key, RowResult value,
+  public abstract void map(ImmutableBytesWritable key, RowResult value,
       OutputCollector<K, V> output, Reporter reporter) throws IOException;
 }

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/TableOutputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/TableOutputFormat.java?rev=656868&r1=656867&r2=656868&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/TableOutputFormat.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/TableOutputFormat.java Thu May 15 15:10:47 2008
@@ -27,7 +27,7 @@
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.client.HTable;
 import org.apache.hadoop.hbase.io.BatchUpdate;
-import org.apache.hadoop.io.Text;
+import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
 import org.apache.hadoop.mapred.FileAlreadyExistsException;
 import org.apache.hadoop.mapred.InvalidJobConfException;
 import org.apache.hadoop.mapred.JobConf;
@@ -39,7 +39,7 @@
 /**
  * Convert Map/Reduce output and write it to an HBase table
  */
-public class TableOutputFormat extends OutputFormatBase<Text, BatchUpdate> {
+public class TableOutputFormat extends OutputFormatBase<ImmutableBytesWritable, BatchUpdate> {
 
   /** JobConf parameter that specifies the output table */
   public static final String OUTPUT_TABLE = "hbase.mapred.outputtable";
@@ -50,7 +50,7 @@
    * and write to an HBase table
    */
   protected class TableRecordWriter
-    implements RecordWriter<Text, BatchUpdate> {
+    implements RecordWriter<ImmutableBytesWritable, BatchUpdate> {
     private HTable m_table;
 
     /**
@@ -68,7 +68,7 @@
     }
 
     /** {@inheritDoc} */
-    public void write(Text key, BatchUpdate value) throws IOException {
+    public void write(ImmutableBytesWritable key, BatchUpdate value) throws IOException {
       m_table.commit(value);
     }
   }
@@ -84,7 +84,7 @@
     
     // expecting exactly one path
     
-    Text tableName = new Text(job.get(OUTPUT_TABLE));
+    String tableName = job.get(OUTPUT_TABLE);
     HTable table = null;
     try {
       table = new HTable(new HBaseConfiguration(job), tableName);

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/TableReduce.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/TableReduce.java?rev=656868&r1=656867&r2=656868&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/TableReduce.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/mapred/TableReduce.java Thu May 15 15:10:47 2008
@@ -22,8 +22,8 @@
 import java.io.IOException;
 import java.util.Iterator;
 
-import org.apache.hadoop.io.MapWritable;
-import org.apache.hadoop.io.Text;
+import org.apache.hadoop.hbase.io.BatchUpdate;
+import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.WritableComparable;
 import org.apache.hadoop.mapred.JobConf;
@@ -31,14 +31,13 @@
 import org.apache.hadoop.mapred.OutputCollector;
 import org.apache.hadoop.mapred.Reducer;
 import org.apache.hadoop.mapred.Reporter;
-import org.apache.hadoop.hbase.io.BatchUpdate;
 
 /**
  * Write a table, sorting by the input key
  */
 @SuppressWarnings("unchecked")
 public abstract class TableReduce<K extends WritableComparable, V extends Writable>
-    extends MapReduceBase implements Reducer<K, V, Text, BatchUpdate> {
+    extends MapReduceBase implements Reducer<K, V, ImmutableBytesWritable, BatchUpdate> {
   /**
    * Use this before submitting a TableReduce job. It will
    * appropriately set up the JobConf.
@@ -52,7 +51,7 @@
     job.setOutputFormat(TableOutputFormat.class);
     job.setReducerClass(reducer);
     job.set(TableOutputFormat.OUTPUT_TABLE, table);
-    job.setOutputKeyClass(Text.class);
+    job.setOutputKeyClass(ImmutableBytesWritable.class);
     job.setOutputValueClass(BatchUpdate.class);
   }
 
@@ -65,6 +64,6 @@
    * @throws IOException
    */
   public abstract void reduce(K key, Iterator<V> values,
-      OutputCollector<Text, BatchUpdate> output, Reporter reporter)
-      throws IOException;
-}
+    OutputCollector<ImmutableBytesWritable, BatchUpdate> output, Reporter reporter)
+  throws IOException;
+}
\ No newline at end of file



Mime
View raw message