hbase-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From raw...@apache.org
Subject svn commit: r782178 [9/16] - in /hadoop/hbase/trunk: bin/ src/java/org/apache/hadoop/hbase/ src/java/org/apache/hadoop/hbase/client/ src/java/org/apache/hadoop/hbase/client/tableindexed/ src/java/org/apache/hadoop/hbase/client/transactional/ src/java/o...
Date Sat, 06 Jun 2009 01:26:27 GMT
Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/rest/ScannerModel.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/rest/ScannerModel.java?rev=782178&r1=782177&r2=782178&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/rest/ScannerModel.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/rest/ScannerModel.java Sat Jun  6 01:26:21 2009
@@ -1,5 +1,5 @@
 /**
- * Copyright 2007 The Apache Software Foundation
+ * Copyright 2009 The Apache Software Foundation
  *
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
@@ -29,9 +29,10 @@
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.hadoop.hbase.client.HTable;
-import org.apache.hadoop.hbase.client.Scanner;
+import org.apache.hadoop.hbase.client.Result;
+import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.client.ResultScanner;
 import org.apache.hadoop.hbase.filter.RowFilterInterface;
-import org.apache.hadoop.hbase.io.RowResult;
 import org.apache.hadoop.hbase.rest.descriptors.ScannerIdentifier;
 import org.apache.hadoop.hbase.rest.exception.HBaseRestException;
 
@@ -49,20 +50,20 @@
   //
   protected static class ScannerMaster {
 
-    protected static final Map<Integer, Scanner> scannerMap = new ConcurrentHashMap<Integer, Scanner>();
+    protected static final Map<Integer, ResultScanner> scannerMap = new ConcurrentHashMap<Integer, ResultScanner>();
     protected static final AtomicInteger nextScannerId = new AtomicInteger(1);
 
-    public Integer addScanner(Scanner scanner) {
+    public Integer addScanner(ResultScanner scanner) {
       Integer i = Integer.valueOf(nextScannerId.getAndIncrement());
       scannerMap.put(i, scanner);
       return i;
     }
 
-    public Scanner getScanner(Integer id) {
+    public ResultScanner getScanner(Integer id) {
       return scannerMap.get(id);
     }
 
-    public Scanner removeScanner(Integer id) {
+    public ResultScanner removeScanner(Integer id) {
       return scannerMap.remove(id);
     }
 
@@ -71,7 +72,7 @@
      *          id of scanner to close
      */
     public void scannerClose(Integer id) {
-      Scanner s = scannerMap.remove(id);
+      ResultScanner s = scannerMap.remove(id);
       s.close();
     }
   }
@@ -79,7 +80,7 @@
   protected static final ScannerMaster scannerMaster = new ScannerMaster();
 
   /**
-   * returns the next numResults RowResults from the Scaner mapped to Integer
+   * returns the next numResults Results from the Scaner mapped to Integer
    * id. If the end of the table is reached, the scanner is closed and all
    * succesfully retrieved rows are returned.
    * 
@@ -90,14 +91,14 @@
    * @return all successfully retrieved rows.
    * @throws org.apache.hadoop.hbase.rest.exception.HBaseRestException
    */
-  public RowResult[] scannerGet(Integer id, Long numRows)
+  public Result[] scannerGet(Integer id, Long numRows)
       throws HBaseRestException {
     try {
-      ArrayList<RowResult> a;
-      Scanner s;
-      RowResult r;
+      ArrayList<Result> a;
+      ResultScanner s;
+      Result r;
 
-      a = new ArrayList<RowResult>();
+      a = new ArrayList<Result>();
       s = scannerMaster.getScanner(id);
 
       if (s == null) {
@@ -114,7 +115,7 @@
         }
       }
 
-      return a.toArray(new RowResult[0]);
+      return a.toArray(new Result[0]);
     } catch (IOException e) {
       throw new HBaseRestException(e);
     }
@@ -129,13 +130,13 @@
    * @return all rows till end of table
    * @throws org.apache.hadoop.hbase.rest.exception.HBaseRestException
    */
-  public RowResult[] scannerGet(Integer id) throws HBaseRestException {
+  public Result[] scannerGet(Integer id) throws HBaseRestException {
     try {
-      ArrayList<RowResult> a;
-      Scanner s;
-      RowResult r;
+      ArrayList<Result> a;
+      ResultScanner s;
+      Result r;
 
-      a = new ArrayList<RowResult>();
+      a = new ArrayList<Result>();
       s = scannerMaster.getScanner(id);
 
       while ((r = s.next()) != null) {
@@ -144,14 +145,14 @@
 
       scannerMaster.scannerClose(id);
 
-      return a.toArray(new RowResult[0]);
+      return a.toArray(new Result[0]);
     } catch (IOException e) {
       throw new HBaseRestException(e);
     }
   }
 
   public boolean scannerClose(Integer id) throws HBaseRestException {
-    Scanner s = scannerMaster.removeScanner(id);
+    ResultScanner s = scannerMaster.removeScanner(id);
 
     if (s == null) {
       throw new HBaseRestException("Scanner id: " + id + " does not exist");
@@ -208,8 +209,11 @@
     try {
       HTable table;
       table = new HTable(tableName);
+      Scan scan = new Scan();
+      scan.addColumns(columns);
+      scan.setTimeRange(0, timestamp);
       return new ScannerIdentifier(scannerMaster.addScanner(table.getScanner(
-          columns, HConstants.EMPTY_START_ROW, timestamp)));
+          scan)));
     } catch (IOException e) {
       throw new HBaseRestException(e);
     }
@@ -225,8 +229,11 @@
     try {
       HTable table;
       table = new HTable(tableName);
+      Scan scan = new Scan(startRow);
+      scan.addColumns(columns);
+      scan.setTimeRange(0, timestamp);
       return new ScannerIdentifier(scannerMaster.addScanner(table.getScanner(
-          columns, startRow, timestamp)));
+          scan)));
     } catch (IOException e) {
       throw new HBaseRestException(e);
     }
@@ -243,8 +250,12 @@
     try {
       HTable table;
       table = new HTable(tableName);
+      Scan scan = new Scan();
+      scan.addColumns(columns);
+      scan.setTimeRange(0, timestamp);
+//      scan.setFilter(filter);
       return new ScannerIdentifier(scannerMaster.addScanner(table.getScanner(
-          columns, HConstants.EMPTY_START_ROW, timestamp, filter)));
+          scan)));
     } catch (IOException e) {
       throw new HBaseRestException(e);
     }
@@ -261,8 +272,12 @@
     try {
       HTable table;
       table = new HTable(tableName);
+      Scan scan = new Scan(startRow);
+      scan.addColumns(columns);
+      scan.setTimeRange(0, timestamp);
+//      scan.setFilter(filter);
       return new ScannerIdentifier(scannerMaster.addScanner(table.getScanner(
-          columns, startRow, timestamp, filter)));
+          scan)));
     } catch (IOException e) {
       throw new HBaseRestException(e);
     }

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/rest/TableModel.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/rest/TableModel.java?rev=782178&r1=782177&r2=782178&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/rest/TableModel.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/rest/TableModel.java Sat Jun  6 01:26:21 2009
@@ -29,8 +29,9 @@
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.hadoop.hbase.client.HTable;
-import org.apache.hadoop.hbase.client.Scanner;
-import org.apache.hadoop.hbase.io.RowResult;
+import org.apache.hadoop.hbase.client.Result;
+import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.client.ResultScanner;
 import org.apache.hadoop.hbase.rest.exception.HBaseRestException;
 import org.apache.hadoop.hbase.rest.serializer.IRestSerializer;
 import org.apache.hadoop.hbase.rest.serializer.ISerializable;
@@ -48,7 +49,7 @@
   }
 
   // Get Methods
-  public RowResult[] get(byte[] tableName) throws HBaseRestException {
+  public Result[] get(byte [] tableName) throws HBaseRestException {
     return get(tableName, getColumns(tableName));
   }
 
@@ -63,26 +64,28 @@
    * @return resultant rows
    * @throws org.apache.hadoop.hbase.rest.exception.HBaseRestException
    */
-  public RowResult[] get(byte[] tableName, byte[][] columnNames)
+  public Result[] get(byte [] tableName, byte[][] columnNames)
       throws HBaseRestException {
     try {
-      ArrayList<RowResult> a = new ArrayList<RowResult>();
+      ArrayList<Result> a = new ArrayList<Result>();
       HTable table = new HTable(tableName);
 
-      Scanner s = table.getScanner(columnNames);
-      RowResult r;
+      Scan scan = new Scan();
+      scan.addColumns(columnNames);
+      ResultScanner s = table.getScanner(scan);
+      Result r;
 
       while ((r = s.next()) != null) {
         a.add(r);
       }
 
-      return a.toArray(new RowResult[0]);
+      return a.toArray(new Result[0]);
     } catch (Exception e) {
       throw new HBaseRestException(e);
     }
   }
 
-  protected boolean doesTableExist(byte[] tableName) throws HBaseRestException {
+  protected boolean doesTableExist(byte [] tableName) throws HBaseRestException {
     try {
       return this.admin.tableExists(tableName);
     } catch (IOException e) {
@@ -90,7 +93,7 @@
     }
   }
   
-  protected void disableTable(byte[] tableName) throws HBaseRestException {
+  protected void disableTable(byte [] tableName) throws HBaseRestException {
     try {
       this.admin.disableTable(tableName);
     } catch (IOException e) {
@@ -98,7 +101,7 @@
     }
   }
   
-  protected void enableTable(byte[] tableName) throws HBaseRestException {
+  protected void enableTable(byte [] tableName) throws HBaseRestException {
     try {
       this.admin.enableTable(tableName);
     } catch (IOException e) {
@@ -110,7 +113,7 @@
       ArrayList<HColumnDescriptor> columns) throws HBaseRestException {
     HTableDescriptor htc = null;
     try {
-      htc = this.admin.getTableDescriptor(tableName);
+      htc = this.admin.getTableDescriptor(Bytes.toBytes(tableName));
     } catch (IOException e) {
       throw new HBaseRestException("Table does not exist");
     }
@@ -204,7 +207,7 @@
    *         tableName not existing.
    * @throws org.apache.hadoop.hbase.rest.exception.HBaseRestException
    */
-  public boolean post(byte[] tableName, HTableDescriptor htd)
+  public boolean post(byte [] tableName, HTableDescriptor htd)
       throws HBaseRestException {
     try {
       if (!this.admin.tableExists(tableName)) {
@@ -225,7 +228,7 @@
    * @return true if table exists and deleted, false if table does not exist.
    * @throws org.apache.hadoop.hbase.rest.exception.HBaseRestException
    */
-  public boolean delete(byte[] tableName) throws HBaseRestException {
+  public boolean delete(byte [] tableName) throws HBaseRestException {
     try {
       if (this.admin.tableExists(tableName)) {
         this.admin.disableTable(tableName);
@@ -241,7 +244,7 @@
   public static class Regions implements ISerializable {
     byte[][] regionKey;
 
-    public Regions(byte[][] bs) {
+    public Regions(byte [][] bs) {
       super();
       this.regionKey = bs;
     }

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/rest/TimestampModel.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/rest/TimestampModel.java?rev=782178&r1=782177&r2=782178&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/rest/TimestampModel.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/rest/TimestampModel.java Sat Jun  6 01:26:21 2009
@@ -20,11 +20,19 @@
 package org.apache.hadoop.hbase.rest;
 
 import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.client.Delete;
+import org.apache.hadoop.hbase.client.Get;
 import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.hadoop.hbase.client.HTable;
+import org.apache.hadoop.hbase.client.Put;
+import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.io.BatchUpdate;
 import org.apache.hadoop.hbase.io.Cell;
 import org.apache.hadoop.hbase.io.RowResult;
@@ -39,56 +47,80 @@
     super.initialize(conf, admin);
   }
 
-  public void delete(byte[] tableName, byte[] rowName, long timestamp)
-      throws HBaseRestException {
+  public void delete(byte [] tableName, Delete delete)
+  throws HBaseRestException {
     try {
       HTable table = new HTable(tableName);
-      table.deleteAll(rowName, timestamp);
+      table.delete(delete);
     } catch (IOException e) {
       throw new HBaseRestException(e);
     }
   }
-
+  
+  @Deprecated
+  public void delete(byte[] tableName, byte[] rowName, long timestamp)
+      throws HBaseRestException {
+    Delete delete = new Delete(rowName, timestamp, null);
+    delete(tableName, delete);
+  }
+  
+  @Deprecated
   public void delete(byte[] tableName, byte[] rowName, byte[][] columns,
       long timestamp) throws HBaseRestException {
-    try {
-      HTable table = new HTable(tableName);
-      for (byte[] column : columns) {
-        table.deleteAll(rowName, column, timestamp);
-      }
-    } catch (IOException e) {
-      throw new HBaseRestException(e);
+    Delete delete  = new Delete(rowName, timestamp, null);
+    for(byte [] column : columns) {
+      byte [][] famAndQf = KeyValue.parseColumn(column);
+      delete.deleteColumn(famAndQf[0], famAndQf[1]);
     }
+    delete(tableName, delete);
   }
 
-  public Cell get(byte[] tableName, byte[] rowName, byte[] columnName,
-      long timestamp) throws HBaseRestException {
+  public Result get(final byte [] tableName, final Get get)
+  throws HBaseRestException {
     try {
       HTable table = new HTable(tableName);
-      return table.get(rowName, columnName, timestamp, 1)[0];
+      return table.get(get);
     } catch (IOException e) {
       throw new HBaseRestException(e);
     }
   }
+  
+  @Deprecated
+  public Cell get(byte[] tableName, byte[] rowName, byte[] columnName,
+      long timestamp) throws HBaseRestException {
+    Get get = new Get(rowName);
+    byte [][] famAndQf = KeyValue.parseColumn(columnName); 
+    get.addColumn(famAndQf[0], famAndQf[1]);
+    get.setTimeStamp(timestamp);
+    return get(tableName, get).getCellValue(famAndQf[0], famAndQf[1]);
+  }
 
+  @Deprecated
   public Cell[] get(byte[] tableName, byte[] rowName, byte[] columnName,
-      long timestamp, int numVersions) throws HBaseRestException {
-    try {
-      HTable table = new HTable(tableName);
-      return table.get(rowName, columnName, timestamp, numVersions);
-    } catch (IOException e) {
-      throw new HBaseRestException(e);
+      long timestamp, int numVersions) throws IOException, HBaseRestException {
+    Get get = new Get(rowName);
+    byte [][] famAndQf = KeyValue.parseColumn(columnName); 
+    get.addColumn(famAndQf[0], famAndQf[1]);
+    get.setTimeStamp(timestamp);
+    get.setMaxVersions(numVersions);
+    Result result = get(tableName, get);
+    List<Cell> cells = new ArrayList<Cell>();
+    for(KeyValue kv : result.sorted()) {
+      cells.add(new Cell(kv.getValue(), kv.getTimestamp()));
     }
+    return cells.toArray(new Cell [0]);
   }
 
+  @Deprecated
   public RowResult get(byte[] tableName, byte[] rowName, byte[][] columns,
       long timestamp) throws HBaseRestException {
-    try {
-      HTable table = new HTable(tableName);
-      return table.getRow(rowName, columns, timestamp);
-    } catch (IOException e) {
-      throw new HBaseRestException(e);
+    Get get = new Get(rowName);
+    for(byte [] column : columns) {
+      byte [][] famAndQf = KeyValue.parseColumn(column);
+      get.addColumn(famAndQf[0], famAndQf[1]);
     }
+    get.setTimeStamp(timestamp);
+    return get(tableName, get).getRowResult();
   }
 
   /**
@@ -100,25 +132,20 @@
    */
   public RowResult get(byte[] tableName, byte[] rowName, long timestamp)
       throws HBaseRestException {
-    try {
-      HTable table = new HTable(tableName);
-      return table.getRow(rowName, timestamp);
-    } catch (IOException e) {
-      throw new HBaseRestException(e);
-    }
+    Get get = new Get(rowName);
+    get.setTimeStamp(timestamp);
+    return get(tableName, get).getRowResult();
   }
 
   public void post(byte[] tableName, byte[] rowName, byte[] columnName,
       long timestamp, byte[] value) throws HBaseRestException {
     try {
-      HTable table;
-      BatchUpdate b;
-
-      table = new HTable(tableName);
-      b = new BatchUpdate(rowName, timestamp);
-
-      b.put(columnName, value);
-      table.commit(b);
+      HTable table = new HTable(tableName);
+      Put put = new Put(rowName);
+      put.setTimeStamp(timestamp);
+      byte [][] famAndQf = KeyValue.parseColumn(columnName);
+      put.add(famAndQf[0], famAndQf[1], value);
+      table.put(put);
     } catch (IOException e) {
       throw new HBaseRestException(e);
     }

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/rest/parser/XMLRestParser.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/rest/parser/XMLRestParser.java?rev=782178&r1=782177&r2=782178&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/rest/parser/XMLRestParser.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/rest/parser/XMLRestParser.java Sat Jun  6 01:26:21 2009
@@ -96,7 +96,6 @@
     String compression = HColumnDescriptor.DEFAULT_COMPRESSION;
     boolean in_memory = HColumnDescriptor.DEFAULT_IN_MEMORY;
     boolean block_cache = HColumnDescriptor.DEFAULT_BLOCKCACHE;
-    int max_cell_size = HColumnDescriptor.DEFAULT_LENGTH;
     int ttl = HColumnDescriptor.DEFAULT_TTL;
     boolean bloomfilter = HColumnDescriptor.DEFAULT_BLOOMFILTER;
 
@@ -108,7 +107,6 @@
         // compression = currentCDesp.getCompression();
         in_memory = currentCDesp.isInMemory();
         block_cache = currentCDesp.isBlockCacheEnabled();
-        max_cell_size = currentCDesp.getMaxValueLength();
         ttl = currentCDesp.getTimeToLive();
         bloomfilter = currentCDesp.isBloomfilter();
       }
@@ -141,13 +139,6 @@
           .getNodeValue());
     }
 
-    NodeList max_cell_size_list = columnfamily
-        .getElementsByTagName("max-cell-size");
-    if (max_cell_size_list.getLength() > 0) {
-      max_cell_size = Integer.valueOf(max_cell_size_list.item(0)
-          .getFirstChild().getNodeValue());
-    }
-
     NodeList ttl_list = columnfamily.getElementsByTagName("time-to-live");
     if (ttl_list.getLength() > 0) {
       ttl = Integer.valueOf(ttl_list.item(0).getFirstChild().getNodeValue());
@@ -162,7 +153,7 @@
 
     HColumnDescriptor hcd = new HColumnDescriptor(Bytes.toBytes(colname),
         max_versions, compression, in_memory, block_cache,
-        max_cell_size, ttl, bloomfilter);
+        ttl, bloomfilter);
 
     NodeList metadataList = columnfamily.getElementsByTagName("metadata");
     for (int i = 0; i < metadataList.getLength(); i++) {

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/rest/serializer/SimpleXMLSerializer.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/rest/serializer/SimpleXMLSerializer.java?rev=782178&r1=782177&r2=782178&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/rest/serializer/SimpleXMLSerializer.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/rest/serializer/SimpleXMLSerializer.java Sat Jun  6 01:26:21 2009
@@ -163,10 +163,6 @@
     printer.print("<max-versions>");
     printer.print(column.getMaxVersions());
     printer.print("</max-versions>");
-    // max-length
-    printer.print("<max-length>");
-    printer.print(column.getMaxValueLength());
-    printer.print("</max-length>");
     printer.print("</columnfamily>");
     printer.flush();
   }

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/thrift/ThriftServer.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/thrift/ThriftServer.java?rev=782178&r1=782177&r2=782178&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/thrift/ThriftServer.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/thrift/ThriftServer.java Sat Jun  6 01:26:21 2009
@@ -33,10 +33,16 @@
 import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.HServerAddress;
 import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.MasterNotRunningException;
+import org.apache.hadoop.hbase.client.Delete;
+import org.apache.hadoop.hbase.client.Get;
 import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.hadoop.hbase.client.HTable;
-import org.apache.hadoop.hbase.client.Scanner;
+import org.apache.hadoop.hbase.client.Put;
+import org.apache.hadoop.hbase.client.Result;
+import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.client.ResultScanner;
 import org.apache.hadoop.hbase.io.BatchUpdate;
 import org.apache.hadoop.hbase.io.Cell;
 import org.apache.hadoop.hbase.io.RowResult;
@@ -76,7 +82,7 @@
 
     // nextScannerId and scannerMap are used to manage scanner state
     protected int nextScannerId = 0;
-    protected HashMap<Integer, Scanner> scannerMap = null;
+    protected HashMap<Integer, ResultScanner> scannerMap = null;
     
     /**
      * Returns a list of all the column families for a given htable.
@@ -115,7 +121,7 @@
      * @param scanner
      * @return integer scanner id
      */
-    protected synchronized int addScanner(Scanner scanner) {
+    protected synchronized int addScanner(ResultScanner scanner) {
       int id = nextScannerId++;
       scannerMap.put(id, scanner);
       return id;
@@ -127,7 +133,7 @@
      * @param id
      * @return a Scanner, or null if ID was invalid.
      */
-    protected synchronized Scanner getScanner(int id) {
+    protected synchronized ResultScanner getScanner(int id) {
       return scannerMap.get(id);
     }
     
@@ -138,7 +144,7 @@
      * @param id
      * @return a Scanner, or null if ID was invalid.
      */
-    protected synchronized Scanner removeScanner(int id) {
+    protected synchronized ResultScanner removeScanner(int id) {
       return scannerMap.remove(id);
     }
     
@@ -150,7 +156,7 @@
     HBaseHandler() throws MasterNotRunningException {
       conf = new HBaseConfiguration();
       admin = new HBaseAdmin(conf);
-      scannerMap = new HashMap<Integer, Scanner>();
+      scannerMap = new HashMap<Integer, ResultScanner>();
     }
     
     public void enableTable(final byte[] tableName) throws IOError {
@@ -228,35 +234,78 @@
       }
     }
     
+    @Deprecated
     public List<TCell> get(byte[] tableName, byte[] row, byte[] column)
         throws IOError {
+      byte [][] famAndQf = KeyValue.parseColumn(column);
+      return get(tableName, row, famAndQf[0], famAndQf[1]);
+    }
+
+    public List<TCell> get(byte [] tableName, byte [] row, byte [] family,
+        byte [] qualifier) throws IOError {
       try {
         HTable table = getTable(tableName);
-        Cell cell = table.get(row, column);
+        Get get = new Get(row);
+        if (qualifier == null || qualifier.length == 0) {
+          get.addFamily(family);
+        } else {
+          get.addColumn(family, qualifier);
+        }
+        Result result = table.get(get);
+        Cell cell = result.getCellValue(family, qualifier);
         return ThriftUtilities.cellFromHBase(cell);
       } catch (IOException e) {
         throw new IOError(e.getMessage());
       }
     }
     
+    @Deprecated
     public List<TCell> getVer(byte[] tableName, byte[] row,
         byte[] column, int numVersions) throws IOError {
+      byte [][] famAndQf = KeyValue.parseColumn(column);
+      return getVer(tableName, row, famAndQf[0], famAndQf[1], numVersions);
+    }
+
+    public List<TCell> getVer(byte [] tableName, byte [] row, byte [] family, 
+        byte [] qualifier, int numVersions) throws IOError {
       try {
         HTable table = getTable(tableName);
-        Cell[] cells = 
-          table.get(row, column, numVersions);
-        return ThriftUtilities.cellFromHBase(cells);
+        Get get = new Get(row);
+        get.addColumn(family, qualifier);
+        get.setMaxVersions(numVersions);
+        Result result = table.get(get);
+        List<Cell> cells = new ArrayList<Cell>();
+        for(KeyValue kv : result.sorted()) {
+          cells.add(new Cell(kv.getValue(), kv.getTimestamp()));
+        }
+        return ThriftUtilities.cellFromHBase(cells.toArray(new Cell[0]));
       } catch (IOException e) {
         throw new IOError(e.getMessage());
       }
     }
     
+    @Deprecated
     public List<TCell> getVerTs(byte[] tableName, byte[] row,
         byte[] column, long timestamp, int numVersions) throws IOError {
+      byte [][] famAndQf = KeyValue.parseColumn(column);
+      return getVerTs(tableName, row, famAndQf[0], famAndQf[1], timestamp, 
+          numVersions);
+    }
+
+    public List<TCell> getVerTs(byte [] tableName, byte [] row, byte [] family,
+        byte [] qualifier, long timestamp, int numVersions) throws IOError {
       try {
         HTable table = getTable(tableName);
-        Cell[] cells = table.get(row, column, timestamp, numVersions);
-        return ThriftUtilities.cellFromHBase(cells);
+        Get get = new Get(row);
+        get.addColumn(family, qualifier);
+        get.setTimeStamp(timestamp);
+        get.setMaxVersions(numVersions);
+        Result result = table.get(get);
+        List<Cell> cells = new ArrayList<Cell>();
+        for(KeyValue kv : result.sorted()) {
+          cells.add(new Cell(kv.getValue(), kv.getTimestamp()));
+        }
+        return ThriftUtilities.cellFromHBase(cells.toArray(new Cell[0]));
       } catch (IOException e) {
         throw new IOError(e.getMessage());
       }
@@ -285,12 +334,20 @@
       try {
         HTable table = getTable(tableName);
         if (columns == null) {
-          return ThriftUtilities.rowResultFromHBase(table.getRow(row,
-                                                        timestamp));
+          Get get = new Get(row);
+          get.setTimeStamp(timestamp);
+          Result result = table.get(get);
+          return ThriftUtilities.rowResultFromHBase(result.getRowResult());
         }
         byte[][] columnArr = columns.toArray(new byte[columns.size()][]);
-        return ThriftUtilities.rowResultFromHBase(table.getRow(row,
-                                                      columnArr, timestamp));
+        Get get = new Get(row);
+        for(byte [] column : columnArr) {
+          byte [][] famAndQf = KeyValue.parseColumn(column);
+          get.addColumn(famAndQf[0], famAndQf[1]);
+        }
+        get.setTimeStamp(timestamp);
+        Result result = table.get(get);
+        return ThriftUtilities.rowResultFromHBase(result.getRowResult());
       } catch (IOException e) {
         throw new IOError(e.getMessage());
       }
@@ -305,7 +362,15 @@
         long timestamp) throws IOError {
       try {
         HTable table = getTable(tableName);
-        table.deleteAll(row, column, timestamp);
+        Delete delete  = new Delete(row, timestamp, null);
+        byte [][] famAndQf = KeyValue.parseColumn(column);
+        if(famAndQf[1].length == 0){
+          delete.deleteFamily(famAndQf[0]);
+        } else {
+          delete.deleteColumns(famAndQf[0], famAndQf[1]);
+        }
+        table.delete(delete);
+        
       } catch (IOException e) {
         throw new IOError(e.getMessage());
       }
@@ -319,7 +384,8 @@
         throws IOError {
       try {
         HTable table = getTable(tableName);
-        table.deleteAll(row, timestamp);
+        Delete delete  = new Delete(row, timestamp, null);
+        table.delete(delete);
       } catch (IOException e) {
         throw new IOError(e.getMessage());
       }
@@ -369,15 +435,13 @@
       HTable table = null;
       try {
         table = getTable(tableName);
-        BatchUpdate batchUpdate = new BatchUpdate(row, timestamp);
+        Put put = new Put(row);
+        put.setTimeStamp(timestamp);
         for (Mutation m : mutations) {
-          if (m.isDelete) {
-            batchUpdate.delete(m.column);
-          } else {
-            batchUpdate.put(m.column, m.value);
-          }
+          byte [][] famAndQf = KeyValue.parseColumn(m.column);
+          put.add(famAndQf[0], famAndQf[1], m.value);
         }
-        table.commit(batchUpdate);
+        table.put(put);
       } catch (IOException e) {
         throw new IOError(e.getMessage());
       } catch (IllegalArgumentException e) {
@@ -392,26 +456,24 @@
  
     public void mutateRowsTs(byte[] tableName, List<BatchMutation> rowBatches, long timestamp)
         throws IOError, IllegalArgument, TException {
-      List<BatchUpdate> batchUpdates = new ArrayList<BatchUpdate>();
+      List<Put> puts = new ArrayList<Put>();
        
       for (BatchMutation batch : rowBatches) {
         byte[] row = batch.row;
         List<Mutation> mutations = batch.mutations;
-        BatchUpdate batchUpdate = new BatchUpdate(row, timestamp);
+        Put put = new Put(row);
+        put.setTimeStamp(timestamp);
         for (Mutation m : mutations) {
-          if (m.isDelete) {
-            batchUpdate.delete(m.column);
-          } else {
-            batchUpdate.put(m.column, m.value);
-          }
+          byte [][] famAndQf = KeyValue.parseColumn(m.column);
+          put.add(famAndQf[0], famAndQf[1], m.value);
         }
-        batchUpdates.add(batchUpdate);
+        puts.add(put);
       }
 
       HTable table = null;
       try {
         table = getTable(tableName);
-        table.commit(batchUpdates);
+        table.put(puts);
       } catch (IOException e) {
         throw new IOError(e.getMessage());
       } catch (IllegalArgumentException e) {
@@ -419,19 +481,28 @@
       }
     }
 
-    public long atomicIncrement(byte[] tableName, byte[] row, byte[] column, long amount) throws IOError, IllegalArgument, TException {
+    @Deprecated
+    public long atomicIncrement(byte[] tableName, byte[] row, byte[] column, 
+        long amount) throws IOError, IllegalArgument, TException {
+      byte [][] famAndQf = KeyValue.parseColumn(column);
+      return atomicIncrement(tableName, row, famAndQf[0], famAndQf[1], amount);
+    }
+
+    public long atomicIncrement(byte [] tableName, byte [] row, byte [] family,
+        byte [] qualifier, long amount) 
+    throws IOError, IllegalArgument, TException {
       HTable table;
       try {
         table = getTable(tableName);
-        return table.incrementColumnValue(row, column, amount);
+        return table.incrementColumnValue(row, family, qualifier, amount);
       } catch (IOException e) {
         throw new IOError(e.getMessage());
       }
     }
-
+    
     public void scannerClose(int id) throws IOError, IllegalArgument {
       LOG.debug("scannerClose: id=" + id);
-      Scanner scanner = getScanner(id);
+      ResultScanner scanner = getScanner(id);
       if (scanner == null) {
         throw new IllegalArgument("scanner ID is invalid");
       }
@@ -441,12 +512,12 @@
     
     public List<TRowResult> scannerGetList(int id,int nbRows) throws IllegalArgument, IOError {
         LOG.debug("scannerGetList: id=" + id);
-        Scanner scanner = getScanner(id);
+        ResultScanner scanner = getScanner(id);
         if (null == scanner) {
             throw new IllegalArgument("scanner ID is invalid");
         }
 
-        RowResult [] results = null;
+        Result [] results = null;
         try {
             results = scanner.next(nbRows);
             if (null == results) {
@@ -470,7 +541,9 @@
         } else {
           columnsArray = columns.toArray(new byte[0][]);
         }
-        return addScanner(table.getScanner(columnsArray, startRow));
+        Scan scan = new Scan(startRow);
+        scan.addColumns(columnsArray);
+        return addScanner(table.getScanner(scan));
       } catch (IOException e) {
         throw new IOError(e.getMessage());
       }
@@ -486,7 +559,9 @@
         } else {
           columnsArray = columns.toArray(new byte[0][]);
         }
-        return addScanner(table.getScanner(columnsArray, startRow, stopRow));
+        Scan scan = new Scan(startRow, stopRow);
+        scan.addColumns(columnsArray);
+        return addScanner(table.getScanner(scan));
       } catch (IOException e) {
         throw new IOError(e.getMessage());
       }
@@ -502,7 +577,10 @@
         } else {
           columnsArray = columns.toArray(new byte[0][]);
         }
-        return addScanner(table.getScanner(columnsArray, startRow, timestamp));
+        Scan scan = new Scan(startRow);
+        scan.addColumns(columnsArray);
+        scan.setTimeRange(0, timestamp);
+        return addScanner(table.getScanner(scan));
       } catch (IOException e) {
         throw new IOError(e.getMessage());
       }
@@ -519,8 +597,10 @@
         } else {
           columnsArray = columns.toArray(new byte[0][]);
         }
-        return addScanner(table.getScanner(columnsArray, startRow, stopRow,
-            timestamp));
+        Scan scan = new Scan(startRow, stopRow);
+        scan.addColumns(columnsArray);
+        scan.setTimeRange(0, timestamp);
+        return addScanner(table.getScanner(scan));
       } catch (IOException e) {
         throw new IOError(e.getMessage());
       }

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/thrift/ThriftUtilities.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/thrift/ThriftUtilities.java?rev=782178&r1=782177&r2=782178&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/thrift/ThriftUtilities.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/thrift/ThriftUtilities.java Sat Jun  6 01:26:21 2009
@@ -24,6 +24,7 @@
 import java.util.TreeMap;
 
 import org.apache.hadoop.hbase.HColumnDescriptor;
+import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.io.Cell;
 import org.apache.hadoop.hbase.io.RowResult;
 import org.apache.hadoop.hbase.io.hfile.Compression;
@@ -58,7 +59,7 @@
     }
     HColumnDescriptor col = new HColumnDescriptor(in.name,
         in.maxVersions, comp.getName(), in.inMemory, in.blockCacheEnabled,
-        in.maxValueLength, in.timeToLive, bloom);
+        in.timeToLive, bloom);
     return col;
   }
   
@@ -77,7 +78,6 @@
     col.compression = in.getCompression().toString();
     col.inMemory = in.isInMemory();
     col.blockCacheEnabled = in.isBlockCacheEnabled();
-    col.maxValueLength = in.getMaxValueLength();
     col.bloomFilterType = Boolean.toString(in.isBloomfilter());
     return col;
   }
@@ -150,5 +150,38 @@
     return rowResultFromHBase(result);
   }
 
+  /**
+   * This utility method creates a list of Thrift TRowResult "struct" based on
+   * an Hbase RowResult object. The empty list is returned if the input is
+   * null.
+   * 
+   * @param in
+   *          Hbase RowResult object
+   * @return Thrift TRowResult array
+   */
+  static public List<TRowResult> rowResultFromHBase(Result[] in) {
+    List<TRowResult> results = new ArrayList<TRowResult>();
+    for ( Result result_ : in) {
+        if(null == result_) {
+            continue;
+        }
+        RowResult rowResult_ = result_.getRowResult();
+        TRowResult result = new TRowResult();
+        result.row = rowResult_.getRow();
+        result.columns = new TreeMap<byte[], TCell>(Bytes.BYTES_COMPARATOR);
+        for (Map.Entry<byte[], Cell> entry : rowResult_.entrySet()){
+            Cell cell = entry.getValue();
+            result.columns.put(entry.getKey(),
+                new TCell(cell.getValue(), cell.getTimestamp()));
+
+        }
+        results.add(result);
+    }
+    return results;
+  }
+  static public List<TRowResult> rowResultFromHBase(Result in) {
+    Result [] result = { in };
+    return rowResultFromHBase(result);
+  }
 }
 

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/thrift/generated/ColumnDescriptor.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/thrift/generated/ColumnDescriptor.java?rev=782178&r1=782177&r2=782178&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/thrift/generated/ColumnDescriptor.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/thrift/generated/ColumnDescriptor.java Sat Jun  6 01:26:21 2009
@@ -45,12 +45,11 @@
   private static final TField MAX_VERSIONS_FIELD_DESC = new TField("maxVersions", TType.I32, (short)2);
   private static final TField COMPRESSION_FIELD_DESC = new TField("compression", TType.STRING, (short)3);
   private static final TField IN_MEMORY_FIELD_DESC = new TField("inMemory", TType.BOOL, (short)4);
-  private static final TField MAX_VALUE_LENGTH_FIELD_DESC = new TField("maxValueLength", TType.I32, (short)5);
-  private static final TField BLOOM_FILTER_TYPE_FIELD_DESC = new TField("bloomFilterType", TType.STRING, (short)6);
-  private static final TField BLOOM_FILTER_VECTOR_SIZE_FIELD_DESC = new TField("bloomFilterVectorSize", TType.I32, (short)7);
-  private static final TField BLOOM_FILTER_NB_HASHES_FIELD_DESC = new TField("bloomFilterNbHashes", TType.I32, (short)8);
-  private static final TField BLOCK_CACHE_ENABLED_FIELD_DESC = new TField("blockCacheEnabled", TType.BOOL, (short)9);
-  private static final TField TIME_TO_LIVE_FIELD_DESC = new TField("timeToLive", TType.I32, (short)10);
+  private static final TField BLOOM_FILTER_TYPE_FIELD_DESC = new TField("bloomFilterType", TType.STRING, (short)5);
+  private static final TField BLOOM_FILTER_VECTOR_SIZE_FIELD_DESC = new TField("bloomFilterVectorSize", TType.I32, (short)6);
+  private static final TField BLOOM_FILTER_NB_HASHES_FIELD_DESC = new TField("bloomFilterNbHashes", TType.I32, (short)7);
+  private static final TField BLOCK_CACHE_ENABLED_FIELD_DESC = new TField("blockCacheEnabled", TType.BOOL, (short)8);
+  private static final TField TIME_TO_LIVE_FIELD_DESC = new TField("timeToLive", TType.I32, (short)9);
 
   public byte[] name;
   public static final int NAME = 1;
@@ -60,24 +59,21 @@
   public static final int COMPRESSION = 3;
   public boolean inMemory;
   public static final int INMEMORY = 4;
-  public int maxValueLength;
-  public static final int MAXVALUELENGTH = 5;
   public String bloomFilterType;
-  public static final int BLOOMFILTERTYPE = 6;
+  public static final int BLOOMFILTERTYPE = 5;
   public int bloomFilterVectorSize;
-  public static final int BLOOMFILTERVECTORSIZE = 7;
+  public static final int BLOOMFILTERVECTORSIZE = 6;
   public int bloomFilterNbHashes;
-  public static final int BLOOMFILTERNBHASHES = 8;
+  public static final int BLOOMFILTERNBHASHES = 7;
   public boolean blockCacheEnabled;
-  public static final int BLOCKCACHEENABLED = 9;
+  public static final int BLOCKCACHEENABLED = 8;
   public int timeToLive;
-  public static final int TIMETOLIVE = 10;
+  public static final int TIMETOLIVE = 9;
 
   private final Isset __isset = new Isset();
   private static final class Isset implements java.io.Serializable {
     public boolean maxVersions = false;
     public boolean inMemory = false;
-    public boolean maxValueLength = false;
     public boolean bloomFilterVectorSize = false;
     public boolean bloomFilterNbHashes = false;
     public boolean blockCacheEnabled = false;
@@ -93,8 +89,6 @@
         new FieldValueMetaData(TType.STRING)));
     put(INMEMORY, new FieldMetaData("inMemory", TFieldRequirementType.DEFAULT, 
         new FieldValueMetaData(TType.BOOL)));
-    put(MAXVALUELENGTH, new FieldMetaData("maxValueLength", TFieldRequirementType.DEFAULT, 
-        new FieldValueMetaData(TType.I32)));
     put(BLOOMFILTERTYPE, new FieldMetaData("bloomFilterType", TFieldRequirementType.DEFAULT, 
         new FieldValueMetaData(TType.STRING)));
     put(BLOOMFILTERVECTORSIZE, new FieldMetaData("bloomFilterVectorSize", TFieldRequirementType.DEFAULT, 
@@ -118,8 +112,6 @@
 
     this.inMemory = false;
 
-    this.maxValueLength = 2147483647;
-
     this.bloomFilterType = "NONE";
 
     this.bloomFilterVectorSize = 0;
@@ -151,8 +143,6 @@
     this.compression = compression;
     this.inMemory = inMemory;
     this.__isset.inMemory = true;
-    this.maxValueLength = maxValueLength;
-    this.__isset.maxValueLength = true;
     this.bloomFilterType = bloomFilterType;
     this.bloomFilterVectorSize = bloomFilterVectorSize;
     this.__isset.bloomFilterVectorSize = true;
@@ -178,8 +168,6 @@
     }
     __isset.inMemory = other.__isset.inMemory;
     this.inMemory = other.inMemory;
-    __isset.maxValueLength = other.__isset.maxValueLength;
-    this.maxValueLength = other.maxValueLength;
     if (other.isSetBloomFilterType()) {
       this.bloomFilterType = other.bloomFilterType;
     }
@@ -288,28 +276,6 @@
     this.__isset.inMemory = value;
   }
 
-  public int getMaxValueLength() {
-    return this.maxValueLength;
-  }
-
-  public void setMaxValueLength(int maxValueLength) {
-    this.maxValueLength = maxValueLength;
-    this.__isset.maxValueLength = true;
-  }
-
-  public void unsetMaxValueLength() {
-    this.__isset.maxValueLength = false;
-  }
-
-  // Returns true if field maxValueLength is set (has been asigned a value) and false otherwise
-  public boolean isSetMaxValueLength() {
-    return this.__isset.maxValueLength;
-  }
-
-  public void setMaxValueLengthIsSet(boolean value) {
-    this.__isset.maxValueLength = value;
-  }
-
   public String getBloomFilterType() {
     return this.bloomFilterType;
   }
@@ -455,14 +421,6 @@
       }
       break;
 
-    case MAXVALUELENGTH:
-      if (value == null) {
-        unsetMaxValueLength();
-      } else {
-        setMaxValueLength((Integer)value);
-      }
-      break;
-
     case BLOOMFILTERTYPE:
       if (value == null) {
         unsetBloomFilterType();
@@ -522,9 +480,6 @@
     case INMEMORY:
       return new Boolean(isInMemory());
 
-    case MAXVALUELENGTH:
-      return new Integer(getMaxValueLength());
-
     case BLOOMFILTERTYPE:
       return getBloomFilterType();
 
@@ -556,8 +511,6 @@
       return isSetCompression();
     case INMEMORY:
       return isSetInMemory();
-    case MAXVALUELENGTH:
-      return isSetMaxValueLength();
     case BLOOMFILTERTYPE:
       return isSetBloomFilterType();
     case BLOOMFILTERVECTORSIZE:
@@ -622,15 +575,6 @@
         return false;
     }
 
-    boolean this_present_maxValueLength = true;
-    boolean that_present_maxValueLength = true;
-    if (this_present_maxValueLength || that_present_maxValueLength) {
-      if (!(this_present_maxValueLength && that_present_maxValueLength))
-        return false;
-      if (this.maxValueLength != that.maxValueLength)
-        return false;
-    }
-
     boolean this_present_bloomFilterType = true && this.isSetBloomFilterType();
     boolean that_present_bloomFilterType = true && that.isSetBloomFilterType();
     if (this_present_bloomFilterType || that_present_bloomFilterType) {
@@ -725,14 +669,6 @@
             TProtocolUtil.skip(iprot, field.type);
           }
           break;
-        case MAXVALUELENGTH:
-          if (field.type == TType.I32) {
-            this.maxValueLength = iprot.readI32();
-            this.__isset.maxValueLength = true;
-          } else { 
-            TProtocolUtil.skip(iprot, field.type);
-          }
-          break;
         case BLOOMFILTERTYPE:
           if (field.type == TType.STRING) {
             this.bloomFilterType = iprot.readString();
@@ -805,9 +741,6 @@
     oprot.writeFieldBegin(IN_MEMORY_FIELD_DESC);
     oprot.writeBool(this.inMemory);
     oprot.writeFieldEnd();
-    oprot.writeFieldBegin(MAX_VALUE_LENGTH_FIELD_DESC);
-    oprot.writeI32(this.maxValueLength);
-    oprot.writeFieldEnd();
     if (this.bloomFilterType != null) {
       oprot.writeFieldBegin(BLOOM_FILTER_TYPE_FIELD_DESC);
       oprot.writeString(this.bloomFilterType);
@@ -858,10 +791,6 @@
     sb.append(this.inMemory);
     first = false;
     if (!first) sb.append(", ");
-    sb.append("maxValueLength:");
-    sb.append(this.maxValueLength);
-    first = false;
-    if (!first) sb.append(", ");
     sb.append("bloomFilterType:");
     if (this.bloomFilterType == null) {
       sb.append("null");

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/Bytes.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/Bytes.java?rev=782178&r1=782177&r2=782178&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/Bytes.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/Bytes.java Sat Jun  6 01:26:21 2009
@@ -39,6 +39,7 @@
  * HashSets, etc.
  */
 public class Bytes {
+  
   /**
    * Size of long in bytes
    */
@@ -81,6 +82,9 @@
    * Byte array comparator class.
    */
   public static class ByteArrayComparator implements RawComparator<byte []> {
+    /**
+     * Constructor
+     */
     public ByteArrayComparator() {
       super();
     }
@@ -143,13 +147,19 @@
    */
   public static void writeByteArray(final DataOutput out, final byte [] b)
   throws IOException {
-    writeByteArray(out, b, 0, b.length);
+    if(b == null) {
+      WritableUtils.writeVInt(out, 0);
+    } else {
+      writeByteArray(out, b, 0, b.length);
+    }
   }
 
   /**
    * Write byte-array to out with a vint length prefix.
    * @param out
    * @param b
+   * @param offset
+   * @param length
    * @throws IOException
    */
   public static void writeByteArray(final DataOutput out, final byte [] b,
@@ -182,6 +192,8 @@
    * @param tgtBytes the byte array
    * @param tgtOffset position in the array
    * @param srcBytes byte to write out
+   * @param srcOffset
+   * @param srcLength
    * @return incremented offset
    */
   public static int putBytes(byte[] tgtBytes, int tgtOffset, byte[] srcBytes,
@@ -219,9 +231,18 @@
    * @return String made from <code>b</code>
    */
   public static String toString(final byte [] b) {
+    if(b == null) {
+      return null;
+    }
     return toString(b, 0, b.length);
   }
 
+  public static String toString(final byte [] b1,
+                                String sep,
+                                final byte [] b2) {
+    return toString(b1, 0, b1.length) + sep + toString(b2, 0, b2.length);
+  }
+
   /**
    * @param b Presumed UTF-8 encoded byte array.
    * @param off
@@ -229,6 +250,12 @@
    * @return String made from <code>b</code>
    */
   public static String toString(final byte [] b, int off, int len) {
+    if(b == null) {
+      return null;
+    }
+    if(len == 0) {
+      return "";
+    }
     String result = null;
     try {
       result = new String(b, off, len, HConstants.UTF8_ENCODING);
@@ -382,6 +409,10 @@
     return putInt(bytes, offset, i);
   }
 
+  /**
+   * @param f
+   * @return the float represented as byte []
+   */
   public static byte [] toBytes(final float f) {
     // Encode it as int
     int i = Float.floatToRawIntBits(f);
@@ -417,6 +448,10 @@
     return putLong(bytes, offset, l);
   }
 
+  /**
+   * @param d
+   * @return the double represented as byte []
+   */
   public static byte [] toBytes(final double d) {
     // Encode it as a long
     long l = Double.doubleToRawLongBits(d);
@@ -521,6 +556,7 @@
   /**
    * Converts a byte array to a short value
    * @param bytes
+   * @param offset
    * @return the short value
    */
   public static short toShort(byte[] bytes, int offset) {
@@ -530,6 +566,8 @@
   /**
    * Converts a byte array to a short value
    * @param bytes
+   * @param offset
+   * @param lengths
    * @return the short value
    */
   public static short toShort(byte[] bytes, int offset, final int length) {
@@ -899,12 +937,17 @@
     
     while (low <= high) {
       int mid = (low+high) >>> 1;
-      int cmp = comparator.compare(arr[mid], 0, arr[mid].length, key, offset,
-        length);
-      if (cmp < 0) 
+      // we have to compare in this order, because the comparator order
+      // has special logic when the 'left side' is a special key.
+      int cmp = comparator.compare(key, offset, length,
+          arr[mid], 0, arr[mid].length);
+      // key lives above the midpoint
+      if (cmp > 0)
         low = mid + 1;
-      else if (cmp > 0)
+      // key lives below the midpoint
+      else if (cmp < 0)
         high = mid - 1;
+      // BAM. how often does this really happen?
       else 
         return mid;
     }

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/Merge.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/Merge.java?rev=782178&r1=782177&r2=782178&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/Merge.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/Merge.java Sat Jun  6 01:26:21 2009
@@ -33,6 +33,8 @@
 import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.MasterNotRunningException;
+import org.apache.hadoop.hbase.client.Delete;
+import org.apache.hadoop.hbase.client.Get;
 import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.hadoop.hbase.regionserver.HLog;
 import org.apache.hadoop.hbase.regionserver.HRegion;
@@ -69,7 +71,7 @@
     this.conf = conf;
     this.mergeInfo = null;
   }
-
+  
   public int run(String[] args) throws Exception {
     if (parseArgs(args) != 0) {
       return -1;
@@ -140,11 +142,14 @@
    */
   private void mergeTwoMetaRegions() throws IOException {
     HRegion rootRegion = utils.getRootRegion();
-    List<KeyValue> cells1 =
-      rootRegion.get(region1, HConstants.COL_REGIONINFO, -1, -1);
+    Get get = new Get(region1);
+    get.addColumn(HConstants.CATALOG_FAMILY, HConstants.REGIONINFO_QUALIFIER);
+    List<KeyValue> cells1 =  rootRegion.get(get, null).list();
     HRegionInfo info1 = Writables.getHRegionInfo((cells1 == null)? null: cells1.get(0).getValue());
-    List<KeyValue> cells2 =
-      rootRegion.get(region2, HConstants.COL_REGIONINFO, -1, -1);
+
+    get = new Get(region2);
+    get.addColumn(HConstants.CATALOG_FAMILY, HConstants.REGIONINFO_QUALIFIER);
+    List<KeyValue> cells2 =  rootRegion.get(get, null).list();
     HRegionInfo info2 = Writables.getHRegionInfo((cells2 == null)? null: cells2.get(0).getValue());
     HRegion merged = merge(info1, rootRegion, info2, rootRegion); 
     LOG.info("Adding " + merged.getRegionInfo() + " to " +
@@ -206,7 +211,9 @@
     LOG.info("Found meta for region1 " + Bytes.toString(meta1.getRegionName()) +
       ", meta for region2 " + Bytes.toString(meta2.getRegionName()));
     HRegion metaRegion1 = this.utils.getMetaRegion(meta1);
-    List<KeyValue> cells1 = metaRegion1.get(region1, HConstants.COL_REGIONINFO, -1, -1);
+    Get get = new Get(region1);
+    get.addColumn(HConstants.CATALOG_FAMILY, HConstants.REGIONINFO_QUALIFIER);
+    List<KeyValue> cells1 =  metaRegion1.get(get, null).list();
     HRegionInfo info1 = Writables.getHRegionInfo((cells1 == null)? null: cells1.get(0).getValue());
     if (info1== null) {
       throw new NullPointerException("info1 is null using key " +
@@ -219,7 +226,9 @@
     } else {
       metaRegion2 = utils.getMetaRegion(meta2);
     }
-    List<KeyValue> cells2 = metaRegion2.get(region2, HConstants.COL_REGIONINFO, -1, -1);
+    get = new Get(region2);
+    get.addColumn(HConstants.CATALOG_FAMILY, HConstants.REGIONINFO_QUALIFIER);
+    List<KeyValue> cells2 =  metaRegion2.get(get, null).list();
     HRegionInfo info2 = Writables.getHRegionInfo((cells2 == null)? null: cells2.get(0).getValue());
     if (info2 == null) {
       throw new NullPointerException("info2 is null using key " + meta2);
@@ -309,7 +318,10 @@
     if (LOG.isDebugEnabled()) {
       LOG.debug("Removing region: " + regioninfo + " from " + meta);
     }
-    meta.deleteAll(regioninfo.getRegionName(), System.currentTimeMillis(), null);
+    
+    Delete delete  = new Delete(regioninfo.getRegionName(), 
+        System.currentTimeMillis(), null);
+    meta.delete(delete, null, true);
   }
 
   /*

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/MetaUtils.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/MetaUtils.java?rev=782178&r1=782177&r2=782178&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/MetaUtils.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/MetaUtils.java Sat Jun  6 01:26:21 2009
@@ -36,7 +36,12 @@
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.client.Delete;
+import org.apache.hadoop.hbase.client.Get;
 import org.apache.hadoop.hbase.client.HTable;
+import org.apache.hadoop.hbase.client.Put;
+import org.apache.hadoop.hbase.client.Result;
+import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.io.BatchUpdate;
 import org.apache.hadoop.hbase.io.Cell;
 import org.apache.hadoop.hbase.regionserver.HLog;
@@ -188,28 +193,31 @@
       openRootRegion();
     }
 
-    InternalScanner rootScanner = rootRegion.getScanner(
-        HConstants.COL_REGIONINFO_ARRAY, HConstants.EMPTY_START_ROW,
-        HConstants.LATEST_TIMESTAMP, null);
+    Scan scan = new Scan();
+    scan.addColumn(HConstants.CATALOG_FAMILY, HConstants.REGIONINFO_QUALIFIER);
+    InternalScanner rootScanner = 
+      rootRegion.getScanner(scan);
 
     try {
       List<KeyValue> results = new ArrayList<KeyValue>();
-      while (rootScanner.next(results)) {
+      boolean hasNext = true;
+      do {
+        hasNext = rootScanner.next(results);
         HRegionInfo info = null;
         for (KeyValue kv: results) {
           info = Writables.getHRegionInfoOrNull(kv.getValue());
           if (info == null) {
             LOG.warn("region info is null for row " +
-              Bytes.toString(kv.getRow()) + " in table " +
+                Bytes.toString(kv.getRow()) + " in table " +
                 HConstants.ROOT_TABLE_NAME);
-            }
-            continue;
-          }
-          if (!listener.processRow(info)) {
-            break;
           }
-          results.clear();
-       }
+          continue;
+        }
+        if (!listener.processRow(info)) {
+          break;
+        }
+        results.clear();
+      } while (hasNext);
     } finally {
       rootScanner.close();
     }
@@ -243,16 +251,19 @@
    */
   public void scanMetaRegion(final HRegion m, final ScannerListener listener)
   throws IOException {
-    InternalScanner metaScanner = m.getScanner(HConstants.COL_REGIONINFO_ARRAY,
-      HConstants.EMPTY_START_ROW, HConstants.LATEST_TIMESTAMP, null);
+    
+    Scan scan = new Scan();
+    scan.addColumn(HConstants.CATALOG_FAMILY, HConstants.REGIONINFO_QUALIFIER);
+    InternalScanner metaScanner = 
+      m.getScanner(scan);
+    
     try {
       List<KeyValue> results = new ArrayList<KeyValue>();
       while (metaScanner.next(results)) {
         HRegionInfo info = null;
         for (KeyValue kv: results) {
-          if (KeyValue.META_COMPARATOR.compareColumns(kv,
-            HConstants.COL_REGIONINFO, 0, HConstants.COL_REGIONINFO.length,
-              HConstants.COLUMN_FAMILY_STR.length()) == 0) {
+          if(kv.matchingColumn(HConstants.CATALOG_FAMILY,
+              HConstants.REGIONINFO_QUALIFIER)) {
             info = Writables.getHRegionInfoOrNull(kv.getValue());
             if (info == null) {
               LOG.warn("region info is null for row " +
@@ -306,18 +317,30 @@
       final byte [] row, final boolean onlineOffline)
   throws IOException {
     HTable t = new HTable(c, HConstants.META_TABLE_NAME);
-    Cell cell = t.get(row, HConstants.COL_REGIONINFO);
-    if (cell == null) {
+    Get get = new Get(row);
+    get.addColumn(HConstants.CATALOG_FAMILY, HConstants.REGIONINFO_QUALIFIER);
+    Result res = t.get(get);
+    KeyValue [] kvs = res.raw();
+    if(kvs.length <= 0) {
       throw new IOException("no information for row " + Bytes.toString(row));
     }
-    // Throws exception if null.
-    HRegionInfo info = Writables.getHRegionInfo(cell);
-    BatchUpdate b = new BatchUpdate(row);
+    byte [] value = kvs[0].getValue();
+    if (value == null) {
+      throw new IOException("no information for row " + Bytes.toString(row));
+    }
+    HRegionInfo info = Writables.getHRegionInfo(value);
+    Put put = new Put(row);
     info.setOffline(onlineOffline);
-    b.put(HConstants.COL_REGIONINFO, Writables.getBytes(info));
-    b.delete(HConstants.COL_SERVER);
-    b.delete(HConstants.COL_STARTCODE);
-    t.commit(b);
+    put.add(HConstants.CATALOG_FAMILY, HConstants.REGIONINFO_QUALIFIER, 
+        Writables.getBytes(info));
+    t.put(put);
+    
+    Delete delete = new Delete(row);
+    delete.deleteColumns(HConstants.CATALOG_FAMILY, HConstants.SERVER_QUALIFIER);
+    delete.deleteColumns(HConstants.CATALOG_FAMILY,
+        HConstants.STARTCODE_QUALIFIER);
+    
+    t.delete(delete);
   }
   
   /**
@@ -402,21 +425,45 @@
   public void updateMETARegionInfo(HRegion r, final HRegionInfo hri) 
   throws IOException {
     if (LOG.isDebugEnabled()) {
-      HRegionInfo h = Writables.getHRegionInfoOrNull(
-        r.get(hri.getRegionName(), HConstants.COL_REGIONINFO, -1, -1).get(0).getValue());
-      LOG.debug("Old " + Bytes.toString(HConstants.COL_REGIONINFO) +
-        " for " + hri.toString() + " in " + r.toString() + " is: " +
-        h.toString());
-    }
-    BatchUpdate b = new BatchUpdate(hri.getRegionName());
-    b.put(HConstants.COL_REGIONINFO, Writables.getBytes(hri));
-    r.batchUpdate(b, null);
+      Get get = new Get(hri.getRegionName());
+      get.addColumn(HConstants.CATALOG_FAMILY, HConstants.REGIONINFO_QUALIFIER);
+      Result res = r.get(get, null);
+      KeyValue [] kvs = res.raw();
+      if(kvs.length <= 0) {
+        return;
+      }
+      byte [] value = kvs[0].getValue();
+      if (value == null) {
+        return;
+      }
+      HRegionInfo h = Writables.getHRegionInfoOrNull(value);
+      
+      LOG.debug("Old " + Bytes.toString(HConstants.CATALOG_FAMILY) + ":" + 
+          Bytes.toString(HConstants.REGIONINFO_QUALIFIER) + " for " +
+          hri.toString() + " in " + r.toString() + " is: " + h.toString());
+    }
+    
+    Put put = new Put(hri.getRegionName());
+    put.add(HConstants.CATALOG_FAMILY, HConstants.REGIONINFO_QUALIFIER, 
+        Writables.getBytes(hri));
+    r.put(put);
+
     if (LOG.isDebugEnabled()) {
-      HRegionInfo h = Writables.getHRegionInfoOrNull(
-          r.get(hri.getRegionName(), HConstants.COL_REGIONINFO, -1, -1).get(0).getValue());
-        LOG.debug("New " + Bytes.toString(HConstants.COL_REGIONINFO) +
-          " for " + hri.toString() + " in " + r.toString() + " is: " +
-          h.toString());
+      Get get = new Get(hri.getRegionName());
+      get.addColumn(HConstants.CATALOG_FAMILY, HConstants.REGIONINFO_QUALIFIER);
+      Result res = r.get(get, null);
+      KeyValue [] kvs = res.raw();
+      if(kvs.length <= 0) {
+        return;
+      }
+      byte [] value = kvs[0].getValue();
+      if (value == null) {
+        return;
+      }
+      HRegionInfo h = Writables.getHRegionInfoOrNull(value);
+        LOG.debug("New " + Bytes.toString(HConstants.CATALOG_FAMILY) + ":" + 
+            Bytes.toString(HConstants.REGIONINFO_QUALIFIER) + " for " + 
+            hri.toString() + " in " + r.toString() + " is: " +  h.toString());
     }
   }
 

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/Migrate.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/Migrate.java?rev=782178&r1=782177&r2=782178&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/Migrate.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/Migrate.java Sat Jun  6 01:26:21 2009
@@ -34,6 +34,7 @@
 import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.MasterNotRunningException;
 import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.io.BatchUpdate;
 import org.apache.hadoop.hbase.regionserver.HRegion;
 import org.apache.hadoop.util.GenericOptionsParser;
@@ -249,9 +250,10 @@
     if (!enableBlockCache(oldHri)) {
       return;
     }
-    BatchUpdate b = new BatchUpdate(oldHri.getRegionName());
-    b.put(HConstants.COL_REGIONINFO, Writables.getBytes(oldHri));
-    mr.batchUpdate(b);
+    Put put = new Put(oldHri.getRegionName());
+    put.add(HConstants.CATALOG_FAMILY, HConstants.REGIONINFO_QUALIFIER, 
+        Writables.getBytes(oldHri));
+    mr.put(put);
     LOG.info("Enabled blockcache on " + oldHri.getRegionNameAsString());
   }
 
@@ -262,7 +264,7 @@
   private boolean enableBlockCache(final HRegionInfo hri) {
     boolean result = false;
     HColumnDescriptor hcd =
-      hri.getTableDesc().getFamily(HConstants.COLUMN_FAMILY);
+      hri.getTableDesc().getFamily(HConstants.CATALOG_FAMILY);
     if (hcd == null) {
       LOG.info("No info family in: " + hri.getRegionNameAsString());
       return result;
@@ -283,9 +285,10 @@
     if (!updateVersions(oldHri)) {
       return;
     }
-    BatchUpdate b = new BatchUpdate(oldHri.getRegionName());
-    b.put(HConstants.COL_REGIONINFO, Writables.getBytes(oldHri));
-    mr.batchUpdate(b);
+    Put put = new Put(oldHri.getRegionName());
+    put.add(HConstants.CATALOG_FAMILY, HConstants.REGIONINFO_QUALIFIER, 
+        Writables.getBytes(oldHri));
+    mr.put(put);
     LOG.info("Upped versions on " + oldHri.getRegionNameAsString());
   }
 
@@ -296,7 +299,7 @@
   private boolean updateVersions(final HRegionInfo hri) {
     boolean result = false;
     HColumnDescriptor hcd =
-      hri.getTableDesc().getFamily(HConstants.COLUMN_FAMILY_HISTORIAN);
+      hri.getTableDesc().getFamily(HConstants.CATALOG_HISTORIAN_FAMILY);
     if (hcd == null) {
       LOG.info("No region historian family in: " + hri.getRegionNameAsString());
       return result;
@@ -307,7 +310,7 @@
       result = true;
     }
     // Set the versions up to 10 from old default of 1.
-    hcd = hri.getTableDesc().getFamily(HConstants.COLUMN_FAMILY);
+    hcd = hri.getTableDesc().getFamily(HConstants.CATALOG_FAMILY);
     if (hcd.getMaxVersions() == 1) {
       // Set it to 10, an arbitrary high number
       hcd.setMaxVersions(10);

Modified: hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/AbstractMergeTestBase.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/AbstractMergeTestBase.java?rev=782178&r1=782177&r2=782178&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/AbstractMergeTestBase.java (original)
+++ hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/AbstractMergeTestBase.java Sat Jun  6 01:26:21 2009
@@ -23,6 +23,7 @@
 import java.io.UnsupportedEncodingException;
 import java.util.Random;
 
+import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.io.BatchUpdate;
 import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
 import org.apache.hadoop.hbase.regionserver.HRegion;
@@ -34,7 +35,7 @@
 public abstract class AbstractMergeTestBase extends HBaseClusterTestCase {
   static final Log LOG =
     LogFactory.getLog(AbstractMergeTestBase.class.getName());
-  static final byte [] COLUMN_NAME = Bytes.toBytes("contents:");
+  static final byte [] COLUMN_NAME = Bytes.toBytes("contents");
   protected final Random rand = new Random();
   protected HTableDescriptor desc;
   protected ImmutableBytesWritable value;
@@ -126,11 +127,10 @@
 
     HRegionIncommon r = new HRegionIncommon(region);
     for(int i = firstRow; i < firstRow + nrows; i++) {
-      BatchUpdate batchUpdate = new BatchUpdate(Bytes.toBytes("row_"
+      Put put = new Put(Bytes.toBytes("row_"
           + String.format("%1$05d", i)));
-
-      batchUpdate.put(COLUMN_NAME, value.get());
-      region.batchUpdate(batchUpdate, null);
+      put.add(COLUMN_NAME, null,  value.get());
+      region.put(put);
       if(i % 10000 == 0) {
         System.out.println("Flushing write #" + i);
         r.flushcache();

Modified: hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/DFSAbort.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/DFSAbort.java?rev=782178&r1=782177&r2=782178&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/DFSAbort.java (original)
+++ hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/DFSAbort.java Sat Jun  6 01:26:21 2009
@@ -40,7 +40,7 @@
     try {
       super.setUp();
       HTableDescriptor desc = new HTableDescriptor(getName());
-      desc.addFamily(new HColumnDescriptor(HConstants.COLUMN_FAMILY_STR));
+      desc.addFamily(new HColumnDescriptor(HConstants.CATALOG_FAMILY));
       HBaseAdmin admin = new HBaseAdmin(conf);
       admin.createTable(desc);
     } catch (Exception e) {

Modified: hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/HBaseTestCase.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/HBaseTestCase.java?rev=782178&r1=782177&r2=782178&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/HBaseTestCase.java (original)
+++ hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/HBaseTestCase.java Sat Jun  6 01:26:21 2009
@@ -25,6 +25,7 @@
 import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
+import java.util.NavigableMap;
 import java.util.SortedMap;
 
 import junit.framework.TestCase;
@@ -33,8 +34,13 @@
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.client.Delete;
+import org.apache.hadoop.hbase.client.Get;
 import org.apache.hadoop.hbase.client.HTable;
-import org.apache.hadoop.hbase.client.Scanner;
+import org.apache.hadoop.hbase.client.Put;
+import org.apache.hadoop.hbase.client.Result;
+import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.client.ResultScanner;
 import org.apache.hadoop.hbase.io.BatchUpdate;
 import org.apache.hadoop.hbase.io.Cell;
 import org.apache.hadoop.hbase.io.RowResult;
@@ -52,11 +58,11 @@
   /** configuration parameter name for test directory */
   public static final String TEST_DIRECTORY_KEY = "test.build.data";
 
-  protected final static byte [] COLFAMILY_NAME1 = Bytes.toBytes("colfamily1:");
-  protected final static byte [] COLFAMILY_NAME2 = Bytes.toBytes("colfamily2:");
-  protected final static byte [] COLFAMILY_NAME3 = Bytes.toBytes("colfamily3:");
-  protected static final byte [][] COLUMNS = {COLFAMILY_NAME1,
-    COLFAMILY_NAME2, COLFAMILY_NAME3};
+  protected final static byte [] fam1 = Bytes.toBytes("colfamily1");
+  protected final static byte [] fam2 = Bytes.toBytes("colfamily2");
+  protected final static byte [] fam3 = Bytes.toBytes("colfamily3");
+  protected static final byte [][] COLUMNS = {fam1,
+    fam2, fam3};
 
   private boolean localfs = false;
   protected Path testDir = null;
@@ -189,13 +195,13 @@
   protected HTableDescriptor createTableDescriptor(final String name,
       final int versions) {
     HTableDescriptor htd = new HTableDescriptor(name);
-    htd.addFamily(new HColumnDescriptor(COLFAMILY_NAME1, versions,
+    htd.addFamily(new HColumnDescriptor(fam1, versions,
       HColumnDescriptor.DEFAULT_COMPRESSION, false, false,
       Integer.MAX_VALUE, HConstants.FOREVER, false));
-    htd.addFamily(new HColumnDescriptor(COLFAMILY_NAME2, versions,
+    htd.addFamily(new HColumnDescriptor(fam2, versions,
         HColumnDescriptor.DEFAULT_COMPRESSION, false, false,
         Integer.MAX_VALUE, HConstants.FOREVER, false));
-    htd.addFamily(new HColumnDescriptor(COLFAMILY_NAME3, versions,
+    htd.addFamily(new HColumnDescriptor(fam3, versions,
         HColumnDescriptor.DEFAULT_COMPRESSION, false, false,
         Integer.MAX_VALUE,  HConstants.FOREVER, false));
     return htd;
@@ -284,11 +290,13 @@
             break EXIT;
           }
           try {
-            BatchUpdate batchUpdate = ts == -1 ? 
-              new BatchUpdate(t) : new BatchUpdate(t, ts);
+            Put put = new Put(t);
+            if(ts != -1) {
+              put.setTimeStamp(ts);
+            }
             try {
-              batchUpdate.put(column, t);
-              updater.commit(batchUpdate);
+              put.add(Bytes.toBytes(column), null, t);
+              updater.put(put);
               count++;
             } catch (RuntimeException ex) {
               ex.printStackTrace();
@@ -331,44 +339,23 @@
    */
   public static interface Incommon {
     /**
-     * @param row
-     * @param column
-     * @return value for row/column pair
+     * 
+     * @param delete
+     * @param lockid
+     * @param writeToWAL
      * @throws IOException
      */
-    public Cell get(byte [] row, byte [] column) throws IOException;
-    /**
-     * @param row
-     * @param column
-     * @param versions
-     * @return value for row/column pair for number of versions requested
-     * @throws IOException
-     */
-    public Cell[] get(byte [] row, byte [] column, int versions) throws IOException;
-    /**
-     * @param row
-     * @param column
-     * @param ts
-     * @param versions
-     * @return value for row/column/timestamp tuple for number of versions
-     * @throws IOException
-     */
-    public Cell[] get(byte [] row, byte [] column, long ts, int versions)
+    public void delete(Delete delete,  Integer lockid, boolean writeToWAL)
     throws IOException;
-    /**
-     * @param row
-     * @param column
-     * @param ts
-     * @throws IOException
-     */
-    public void deleteAll(byte [] row, byte [] column, long ts) throws IOException;
 
     /**
-     * @param batchUpdate
+     * @param put
      * @throws IOException
      */
-    public void commit(BatchUpdate batchUpdate) throws IOException;
+    public void put(Put put) throws IOException;
 
+    public Result get(Get get) throws IOException;
+    
     /**
      * @param columns
      * @param firstRow
@@ -393,48 +380,46 @@
       this.region = HRegion;
     }
     
-    public void commit(BatchUpdate batchUpdate) throws IOException {
-      region.batchUpdate(batchUpdate, null);
+    public void put(Put put) throws IOException {
+      region.put(put);
     }
     
-    public void deleteAll(byte [] row, byte [] column, long ts)
+    public void delete(Delete delete,  Integer lockid, boolean writeToWAL)
     throws IOException {
-      this.region.deleteAll(row, column, ts, null);
+      this.region.delete(delete, lockid, writeToWAL);
     }
-
+    
+    public Result get(Get get) throws IOException {
+      return region.get(get, null);
+    }
+    
     public ScannerIncommon getScanner(byte [][] columns, byte [] firstRow,
       long ts) 
     throws IOException {
+      Scan scan = new Scan(firstRow);
+      scan.addColumns(columns);
+      scan.setTimeRange(0, ts);
       return new 
-        InternalScannerIncommon(region.getScanner(columns, firstRow, ts, null));
-    }
-
-    public Cell get(byte [] row, byte [] column) throws IOException {
-      // TODO: Fix profligacy converting from List to Cell [].
-      Cell[] result = Cell.createSingleCellArray(this.region.get(row, column, -1, -1));
-      return (result == null)? null : result[0];
-    }
-
-    public Cell[] get(byte [] row, byte [] column, int versions)
-    throws IOException {
-      // TODO: Fix profligacy converting from List to Cell [].
-      return Cell.createSingleCellArray(this.region.get(row, column, -1, versions));
-    }
-
-    public Cell[] get(byte [] row, byte [] column, long ts, int versions)
-    throws IOException {
-      // TODO: Fix profligacy converting from List to Cell [].
-      return Cell.createSingleCellArray(this.region.get(row, column, ts, versions));
+        InternalScannerIncommon(region.getScanner(scan));
     }
-
-    /**
-     * @param row
-     * @return values for each column in the specified row
-     * @throws IOException
-     */
-    public Map<byte [], Cell> getFull(byte [] row) throws IOException {
-      return region.getFull(row, null, HConstants.LATEST_TIMESTAMP, 1, null);
+    
+    //New
+    public ScannerIncommon getScanner(byte [] family, byte [][] qualifiers,
+        byte [] firstRow, long ts) 
+      throws IOException {
+        Scan scan = new Scan(firstRow);
+        for(int i=0; i<qualifiers.length; i++){
+          scan.addColumn(HConstants.CATALOG_FAMILY, qualifiers[i]);
+        }
+        scan.setTimeRange(0, ts);
+        return new 
+          InternalScannerIncommon(region.getScanner(scan));
+      }
+    
+    public Result get(Get get, Integer lockid) throws IOException{
+      return this.region.get(get, lockid);
     }
+    
 
     public void flushcache() throws IOException {
       this.region.flushcache();
@@ -455,33 +440,27 @@
       this.table = table;
     }
     
-    public void commit(BatchUpdate batchUpdate) throws IOException {
-      table.commit(batchUpdate);
+    public void put(Put put) throws IOException {
+      table.put(put);
     }
     
-    public void deleteAll(byte [] row, byte [] column, long ts)
-    throws IOException {
-      this.table.deleteAll(row, column, ts);
-    }
     
-    public ScannerIncommon getScanner(byte [][] columns, byte [] firstRow, long ts) 
+    public void delete(Delete delete,  Integer lockid, boolean writeToWAL)
     throws IOException {
-      return new 
-        ClientScannerIncommon(table.getScanner(columns, firstRow, ts, null));
+      this.table.delete(delete);
     }
     
-    public Cell get(byte [] row, byte [] column) throws IOException {
-      return this.table.get(row, column);
+    public Result get(Get get) throws IOException {
+      return table.get(get);
     }
     
-    public Cell[] get(byte [] row, byte [] column, int versions)
-    throws IOException {
-      return this.table.get(row, column, versions);
-    }
-    
-    public Cell[] get(byte [] row, byte [] column, long ts, int versions)
+    public ScannerIncommon getScanner(byte [][] columns, byte [] firstRow, long ts) 
     throws IOException {
-      return this.table.get(row, column, ts, versions);
+      Scan scan = new Scan(firstRow);
+      scan.addColumns(columns);
+      scan.setTimeStamp(ts);
+      return new 
+        ClientScannerIncommon(table.getScanner(scan));
     }
   }
   
@@ -494,22 +473,19 @@
   }
   
   public static class ClientScannerIncommon implements ScannerIncommon {
-    Scanner scanner;
-    public ClientScannerIncommon(Scanner scanner) {
+    ResultScanner scanner;
+    public ClientScannerIncommon(ResultScanner scanner) {
       this.scanner = scanner;
     }
     
     public boolean next(List<KeyValue> values)
     throws IOException {
-      RowResult results = scanner.next();
+      Result results = scanner.next();
       if (results == null) {
         return false;
       }
       values.clear();
-      for (Map.Entry<byte [], Cell> entry : results.entrySet()) {
-        values.add(new KeyValue(results.getRow(), entry.getKey(),
-          entry.getValue().getTimestamp(), entry.getValue().getValue()));
-      }
+      values.addAll(results.list());
       return true;
     }
     
@@ -544,25 +520,53 @@
     }
   }
   
-  protected void assertCellEquals(final HRegion region, final byte [] row,
-    final byte [] column, final long timestamp, final String value)
-  throws IOException {
-    Map<byte [], Cell> result = region.getFull(row, null, timestamp, 1, null);
-    Cell cell_value = result.get(column);
-    if (value == null) {
-      assertEquals(Bytes.toString(column) + " at timestamp " + timestamp, null,
-        cell_value);
-    } else {
-      if (cell_value == null) {
-        fail(Bytes.toString(column) + " at timestamp " + timestamp + 
-          "\" was expected to be \"" + value + " but was null");
-      }
-      if (cell_value != null) {
-        assertEquals(Bytes.toString(column) + " at timestamp " 
-            + timestamp, value, new String(cell_value.getValue()));
+//  protected void assertCellEquals(final HRegion region, final byte [] row,
+//    final byte [] column, final long timestamp, final String value)
+//  throws IOException {
+//    Map<byte [], Cell> result = region.getFull(row, null, timestamp, 1, null);
+//    Cell cell_value = result.get(column);
+//    if (value == null) {
+//      assertEquals(Bytes.toString(column) + " at timestamp " + timestamp, null,
+//        cell_value);
+//    } else {
+//      if (cell_value == null) {
+//        fail(Bytes.toString(column) + " at timestamp " + timestamp + 
+//          "\" was expected to be \"" + value + " but was null");
+//      }
+//      if (cell_value != null) {
+//        assertEquals(Bytes.toString(column) + " at timestamp " 
+//            + timestamp, value, new String(cell_value.getValue()));
+//      }
+//    }
+//  }
+
+  protected void assertResultEquals(final HRegion region, final byte [] row,
+      final byte [] family, final byte [] qualifier, final long timestamp,
+      final byte [] value)
+    throws IOException {
+      Get get = new Get(row);
+      get.setTimeStamp(timestamp);
+      Result res = region.get(get, null);
+      NavigableMap<byte[], NavigableMap<byte[], NavigableMap<Long, byte[]>>> map = 
+        res.getMap();
+      byte [] res_value = map.get(family).get(qualifier).get(timestamp);
+    
+      if (value == null) {
+        assertEquals(Bytes.toString(family) + " " + Bytes.toString(qualifier) +
+            " at timestamp " + timestamp, null, res_value);
+      } else {
+        if (res_value == null) {
+          fail(Bytes.toString(family) + " " + Bytes.toString(qualifier) + 
+              " at timestamp " + timestamp + "\" was expected to be \"" + 
+              value + " but was null");
+        }
+        if (res_value != null) {
+          assertEquals(Bytes.toString(family) + " " + Bytes.toString(qualifier) +
+              " at timestamp " + 
+              timestamp, value, new String(res_value));
+        }
       }
     }
-  }
   
   /**
    * Initializes parameters used in the test environment:

Added: hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/KeyValueTestUtil.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/KeyValueTestUtil.java?rev=782178&view=auto
==============================================================================
--- hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/KeyValueTestUtil.java (added)
+++ hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/KeyValueTestUtil.java Sat Jun  6 01:26:21 2009
@@ -0,0 +1,54 @@
+/*
+ * Copyright 2009 The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase;
+
+import org.apache.hadoop.hbase.util.Bytes;
+
+public class KeyValueTestUtil {
+
+  public static KeyValue create(
+      String row,
+      String family,
+      String qualifier,
+      long timestamp,
+      String value)
+  {
+    return create(row, family, qualifier, timestamp, KeyValue.Type.Put, value);
+  }
+
+  public static KeyValue create(
+      String row,
+      String family,
+      String qualifier,
+      long timestamp,
+      KeyValue.Type type,
+      String value)
+  {
+      return new KeyValue(
+          Bytes.toBytes(row),
+          Bytes.toBytes(family),
+          Bytes.toBytes(qualifier),
+          timestamp,
+          type,
+          Bytes.toBytes(value)
+      );
+  }
+}

Modified: hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/MiniHBaseCluster.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/MiniHBaseCluster.java?rev=782178&r1=782177&r2=782178&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/MiniHBaseCluster.java (original)
+++ hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/MiniHBaseCluster.java Sat Jun  6 01:26:21 2009
@@ -62,7 +62,7 @@
         } catch (BindException e) {
           //this port is already in use. try to use another (for multiple testing)
           int port = conf.getInt("hbase.master.port", DEFAULT_MASTER_PORT);
-          LOG.info("MiniHBaseCluster: Failed binding Master to port: " + port);
+          LOG.info("Failed binding Master to port: " + port, e);
           port++;
           conf.setInt("hbase.master.port", port);
           continue;

Modified: hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/PerformanceEvaluation.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/PerformanceEvaluation.java?rev=782178&r1=782177&r2=782178&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/PerformanceEvaluation.java (original)
+++ hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/PerformanceEvaluation.java Sat Jun  6 01:26:21 2009
@@ -37,13 +37,15 @@
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.client.Get;
 import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.hadoop.hbase.client.HTable;
-import org.apache.hadoop.hbase.client.Scanner;
-import org.apache.hadoop.hbase.filter.PageRowFilter;
-import org.apache.hadoop.hbase.filter.WhileMatchRowFilter;
-import org.apache.hadoop.hbase.io.BatchUpdate;
-import org.apache.hadoop.hbase.io.RowResult;
+import org.apache.hadoop.hbase.client.Put;
+import org.apache.hadoop.hbase.client.Result;
+import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.client.ResultScanner;
+import org.apache.hadoop.hbase.filter.PageFilter;
+import org.apache.hadoop.hbase.filter.RowWhileMatchFilter;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.FSUtils;
 import org.apache.hadoop.hbase.util.Hash;
@@ -86,12 +88,13 @@
   private static final int ONE_GB = 1024 * 1024 * 1000;
   private static final int ROWS_PER_GB = ONE_GB / ROW_LENGTH;
   
-  static final byte [] COLUMN_NAME = Bytes.toBytes(COLUMN_FAMILY_STR + "data");
+  static final byte [] FAMILY_NAME = Bytes.toBytes("info");
+  static final byte [] QUALIFIER_NAME = Bytes.toBytes("data");
   
   protected static final HTableDescriptor TABLE_DESCRIPTOR;
   static {
     TABLE_DESCRIPTOR = new HTableDescriptor("TestTable");
-    TABLE_DESCRIPTOR.addFamily(new HColumnDescriptor(COLUMN_FAMILY));
+    TABLE_DESCRIPTOR.addFamily(new HColumnDescriptor(CATALOG_FAMILY));
   }
   
   private static final String RANDOM_READ = "randomRead";
@@ -431,11 +434,12 @@
     
     @Override
     void testRow(final int i) throws IOException {
-      Scanner s = this.table.getScanner(new byte [][] {COLUMN_NAME},
-        getRandomRow(this.rand, this.totalRows),
-        new WhileMatchRowFilter(new PageRowFilter(120)));
+      Scan scan = new Scan(getRandomRow(this.rand, this.totalRows));
+      scan.addColumn(FAMILY_NAME, QUALIFIER_NAME);
+      scan.setFilter(new RowWhileMatchFilter(new PageFilter(120)));
+      ResultScanner s = this.table.getScanner(scan);
       //int count = 0;
-      for (RowResult rr = null; (rr = s.next()) != null;) {
+      for (Result rr = null; (rr = s.next()) != null;) {
         // LOG.info("" + count++ + " " + rr.toString());
       }
       s.close();
@@ -461,7 +465,9 @@
     
     @Override
     void testRow(final int i) throws IOException {
-      this.table.get(getRandomRow(this.rand, this.totalRows), COLUMN_NAME);
+      Get get = new Get(getRandomRow(this.rand, this.totalRows));
+      get.addColumn(FAMILY_NAME, QUALIFIER_NAME);
+      this.table.get(get);
     }
 
     @Override
@@ -485,9 +491,9 @@
     @Override
     void testRow(final int i) throws IOException {
       byte [] row = getRandomRow(this.rand, this.totalRows);
-      BatchUpdate b = new BatchUpdate(row);
-      b.put(COLUMN_NAME, generateValue(this.rand));
-      table.commit(b);
+      Put put = new Put(row);
+      put.add(FAMILY_NAME, QUALIFIER_NAME, generateValue(this.rand));
+      table.put(put);
     }
 
     @Override
@@ -497,7 +503,7 @@
   }
   
   class ScanTest extends Test {
-    private Scanner testScanner;
+    private ResultScanner testScanner;
     
     ScanTest(final HBaseConfiguration conf, final int startRow,
         final int perClientRunRows, final int totalRows, final Status status) {
@@ -507,8 +513,9 @@
     @Override
     void testSetup() throws IOException {
       super.testSetup();
-      this.testScanner = table.getScanner(new byte [][] {COLUMN_NAME},
-        format(this.startRow));
+      Scan scan = new Scan(format(this.startRow));
+      scan.addColumn(FAMILY_NAME, QUALIFIER_NAME);
+      this.testScanner = table.getScanner(scan);
     }
     
     @Override
@@ -539,7 +546,9 @@
     
     @Override
     void testRow(final int i) throws IOException {
-      table.get(format(i), COLUMN_NAME);
+      Get get = new Get(format(i));
+      get.addColumn(FAMILY_NAME, QUALIFIER_NAME);
+      table.get(get);
     }
 
     @Override
@@ -556,9 +565,9 @@
     
     @Override
     void testRow(final int i) throws IOException {
-      BatchUpdate b = new BatchUpdate(format(i));
-      b.put(COLUMN_NAME, generateValue(this.rand));
-      table.commit(b);
+      Put put = new Put(format(i));
+      put.add(FAMILY_NAME, QUALIFIER_NAME, generateValue(this.rand));
+      table.put(put);
     }
 
     @Override

Modified: hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/TestEmptyMetaInfo.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/TestEmptyMetaInfo.java?rev=782178&r1=782177&r2=782178&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/TestEmptyMetaInfo.java (original)
+++ hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/TestEmptyMetaInfo.java Sat Jun  6 01:26:21 2009
@@ -23,9 +23,10 @@
 import java.io.IOException;
 
 import org.apache.hadoop.hbase.client.HTable;
-import org.apache.hadoop.hbase.client.Scanner;
-import org.apache.hadoop.hbase.io.BatchUpdate;
-import org.apache.hadoop.hbase.io.RowResult;
+import org.apache.hadoop.hbase.client.Put;
+import org.apache.hadoop.hbase.client.Result;
+import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.client.ResultScanner;
 import org.apache.hadoop.hbase.util.Bytes;
 
 /**
@@ -44,9 +45,10 @@
       byte [] regionName = HRegionInfo.createRegionName(tableName,
         Bytes.toBytes(i == 0? "": Integer.toString(i)),
         Long.toString(System.currentTimeMillis()));
-      BatchUpdate b = new BatchUpdate(regionName);
-      b.put(HConstants.COL_SERVER, Bytes.toBytes("localhost:1234"));
-      t.commit(b);
+      Put put = new Put(regionName);
+      put.add(HConstants.CATALOG_FAMILY, HConstants.SERVER_QUALIFIER,
+          Bytes.toBytes("localhost:1234"));
+      t.put(put);
     }
     long sleepTime =
       conf.getLong("hbase.master.meta.thread.rescanfrequency", 10000);
@@ -59,11 +61,18 @@
       } catch (InterruptedException e) {
         // ignore
       }
-      Scanner scanner = t.getScanner(HConstants.ALL_META_COLUMNS, tableName);
+      Scan scan = new Scan();
+      scan.addColumn(HConstants.CATALOG_FAMILY, HConstants.REGIONINFO_QUALIFIER);
+      scan.addColumn(HConstants.CATALOG_FAMILY, HConstants.SERVER_QUALIFIER);
+      scan.addColumn(HConstants.CATALOG_FAMILY, HConstants.STARTCODE_QUALIFIER);
+      scan.addColumn(HConstants.CATALOG_FAMILY, HConstants.SPLITA_QUALIFIER);
+      scan.addColumn(HConstants.CATALOG_FAMILY, HConstants.SPLITB_QUALIFIER);
+      ResultScanner scanner = t.getScanner(scan);
       try {
         count = 0;
-        for (RowResult r: scanner) {
-          if (r.size() > 0) {
+        Result r;
+        while((r = scanner.next()) != null) {
+          if (!r.isEmpty()) {
             count += 1;
           }
         }



Mime
View raw message