hbase-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From apurt...@apache.org
Subject svn commit: r782445 [11/17] - in /hadoop/hbase/trunk_on_hadoop-0.18.3: ./ bin/ src/java/org/apache/hadoop/hbase/ src/java/org/apache/hadoop/hbase/client/ src/java/org/apache/hadoop/hbase/client/tableindexed/ src/java/org/apache/hadoop/hbase/client/tran...
Date Sun, 07 Jun 2009 19:57:43 GMT
Modified: hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/TestKeyValue.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/TestKeyValue.java?rev=782445&r1=782444&r2=782445&view=diff
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/TestKeyValue.java
(original)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/TestKeyValue.java
Sun Jun  7 19:57:37 2009
@@ -30,6 +30,7 @@
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.KeyValue.KVComparator;
+import org.apache.hadoop.hbase.KeyValue.Type;
 import org.apache.hadoop.hbase.util.Bytes;
 
 public class TestKeyValue extends TestCase {
@@ -39,13 +40,21 @@
     final byte [] a = Bytes.toBytes("aaa");
     byte [] column1 = Bytes.toBytes("abc:def");
     byte [] column2 = Bytes.toBytes("abcd:ef");
-    KeyValue aaa = new KeyValue(a, column1, a);
-    assertFalse(KeyValue.COMPARATOR.
-      compareColumns(aaa, column2, 0, column2.length, 4) == 0);
+    byte [] family2 = Bytes.toBytes("abcd");
+    byte [] qualifier2 = Bytes.toBytes("ef"); 
+    KeyValue aaa = new KeyValue(a, column1, 0L, Type.Put, a);
+    assertFalse(aaa.matchingColumn(column2));
+    assertTrue(aaa.matchingColumn(column1));
+    aaa = new KeyValue(a, column2, 0L, Type.Put, a);
+    assertFalse(aaa.matchingColumn(column1));
+    assertTrue(aaa.matchingColumn(family2,qualifier2));
     column1 = Bytes.toBytes("abcd:");
-    aaa = new KeyValue(a, column1, a);
-    assertFalse(KeyValue.COMPARATOR.
-      compareColumns(aaa, column1, 0, column1.length, 4) == 0);
+    aaa = new KeyValue(a, column1, 0L, Type.Put, a);
+    assertTrue(aaa.matchingColumn(family2,null));
+    assertFalse(aaa.matchingColumn(family2,qualifier2));
+    // Previous test had an assertFalse that I don't understand
+    //    assertFalse(KeyValue.COMPARATOR.
+    //    compareColumns(aaa, column1, 0, column1.length, 4) == 0);
   }
 
   public void testBasics() throws Exception {
@@ -111,31 +120,31 @@
   public void testMoreComparisons() throws Exception {
     // Root compares
     long now = System.currentTimeMillis();
-    KeyValue a = new KeyValue(".META.,,99999999999999", now);
-    KeyValue b = new KeyValue(".META.,,1", now);
+    KeyValue a = new KeyValue(Bytes.toBytes(".META.,,99999999999999"), now);
+    KeyValue b = new KeyValue(Bytes.toBytes(".META.,,1"), now);
     KVComparator c = new KeyValue.RootComparator();
     assertTrue(c.compare(b, a) < 0);
-    KeyValue aa = new KeyValue(".META.,,1", now);
-    KeyValue bb = new KeyValue(".META.,,1", "info:regioninfo",
-      1235943454602L);
+    KeyValue aa = new KeyValue(Bytes.toBytes(".META.,,1"), now);
+    KeyValue bb = new KeyValue(Bytes.toBytes(".META.,,1"), 
+        Bytes.toBytes("info:regioninfo"), 1235943454602L);
     assertTrue(c.compare(aa, bb) < 0);
     
     // Meta compares
-    KeyValue aaa =
-      new KeyValue("TestScanMultipleVersions,row_0500,1236020145502", now);
-    KeyValue bbb = new KeyValue("TestScanMultipleVersions,,99999999999999",
-      now);
+    KeyValue aaa = new KeyValue(
+        Bytes.toBytes("TestScanMultipleVersions,row_0500,1236020145502"), now);
+    KeyValue bbb = new KeyValue(
+        Bytes.toBytes("TestScanMultipleVersions,,99999999999999"), now);
     c = new KeyValue.MetaComparator();
     assertTrue(c.compare(bbb, aaa) < 0);
     
-    KeyValue aaaa = new KeyValue("TestScanMultipleVersions,,1236023996656",
-      "info:regioninfo", 1236024396271L);
+    KeyValue aaaa = new KeyValue(Bytes.toBytes("TestScanMultipleVersions,,1236023996656"),
+        Bytes.toBytes("info:regioninfo"), 1236024396271L);
     assertTrue(c.compare(aaaa, bbb) < 0);
     
-    KeyValue x = new KeyValue("TestScanMultipleVersions,row_0500,1236034574162",
-      "", 9223372036854775807L);
-    KeyValue y = new KeyValue("TestScanMultipleVersions,row_0500,1236034574162",
-      "info:regioninfo", 1236034574912L);
+    KeyValue x = new KeyValue(Bytes.toBytes("TestScanMultipleVersions,row_0500,1236034574162"),
+        Bytes.toBytes(""), 9223372036854775807L);
+    KeyValue y = new KeyValue(Bytes.toBytes("TestScanMultipleVersions,row_0500,1236034574162"),
+        Bytes.toBytes("info:regioninfo"), 1236034574912L);
     assertTrue(c.compare(x, y) < 0);
     comparisons(new KeyValue.MetaComparator());
     comparisons(new KeyValue.KVComparator());
@@ -151,53 +160,53 @@
   public void testKeyValueBorderCases() throws IOException {
     // % sorts before , so if we don't do special comparator, rowB would
     // come before rowA.
-    KeyValue rowA = new KeyValue("testtable,www.hbase.org/,1234",
-      "", Long.MAX_VALUE);
-    KeyValue rowB = new KeyValue("testtable,www.hbase.org/%20,99999",
-      "", Long.MAX_VALUE);
+    KeyValue rowA = new KeyValue(Bytes.toBytes("testtable,www.hbase.org/,1234"),
+      Bytes.toBytes(""), Long.MAX_VALUE);
+    KeyValue rowB = new KeyValue(Bytes.toBytes("testtable,www.hbase.org/%20,99999"),
+      Bytes.toBytes(""), Long.MAX_VALUE);
     assertTrue(KeyValue.META_COMPARATOR.compare(rowA, rowB) < 0);
 
-    rowA = new KeyValue("testtable,,1234", "", Long.MAX_VALUE);
-    rowB = new KeyValue("testtable,$www.hbase.org/,99999", "", Long.MAX_VALUE);
+    rowA = new KeyValue(Bytes.toBytes("testtable,,1234"), Bytes.toBytes(""), Long.MAX_VALUE);
+    rowB = new KeyValue(Bytes.toBytes("testtable,$www.hbase.org/,99999"), Bytes.toBytes(""),
Long.MAX_VALUE);
     assertTrue(KeyValue.META_COMPARATOR.compare(rowA, rowB) < 0);
 
-    rowA = new KeyValue(".META.,testtable,www.hbase.org/,1234,4321", "",
+    rowA = new KeyValue(Bytes.toBytes(".META.,testtable,www.hbase.org/,1234,4321"), Bytes.toBytes(""),
       Long.MAX_VALUE);
-    rowB = new KeyValue(".META.,testtable,www.hbase.org/%20,99999,99999", "",
+    rowB = new KeyValue(Bytes.toBytes(".META.,testtable,www.hbase.org/%20,99999,99999"),
Bytes.toBytes(""),
       Long.MAX_VALUE);
     assertTrue(KeyValue.ROOT_COMPARATOR.compare(rowA, rowB) < 0);
   }
 
   private void metacomparisons(final KeyValue.MetaComparator c) {
     long now = System.currentTimeMillis();
-    assertTrue(c.compare(new KeyValue(".META.,a,,0,1", now),
-      new KeyValue(".META.,a,,0,1", now)) == 0);
-    KeyValue a = new KeyValue(".META.,a,,0,1", now);
-    KeyValue b = new KeyValue(".META.,a,,0,2", now);
+    assertTrue(c.compare(new KeyValue(Bytes.toBytes(".META.,a,,0,1"), now),
+      new KeyValue(Bytes.toBytes(".META.,a,,0,1"), now)) == 0);
+    KeyValue a = new KeyValue(Bytes.toBytes(".META.,a,,0,1"), now);
+    KeyValue b = new KeyValue(Bytes.toBytes(".META.,a,,0,2"), now);
     assertTrue(c.compare(a, b) < 0);
-    assertTrue(c.compare(new KeyValue(".META.,a,,0,2", now),
-      new KeyValue(".META.,a,,0,1", now)) > 0);
+    assertTrue(c.compare(new KeyValue(Bytes.toBytes(".META.,a,,0,2"), now),
+      new KeyValue(Bytes.toBytes(".META.,a,,0,1"), now)) > 0);
   }
 
   private void comparisons(final KeyValue.KVComparator c) {
     long now = System.currentTimeMillis();
-    assertTrue(c.compare(new KeyValue(".META.,,1", now),
-      new KeyValue(".META.,,1", now)) == 0);
-    assertTrue(c.compare(new KeyValue(".META.,,1", now),
-      new KeyValue(".META.,,2", now)) < 0);
-    assertTrue(c.compare(new KeyValue(".META.,,2", now),
-      new KeyValue(".META.,,1", now)) > 0);
+    assertTrue(c.compare(new KeyValue(Bytes.toBytes(".META.,,1"), now),
+      new KeyValue(Bytes.toBytes(".META.,,1"), now)) == 0);
+    assertTrue(c.compare(new KeyValue(Bytes.toBytes(".META.,,1"), now),
+      new KeyValue(Bytes.toBytes(".META.,,2"), now)) < 0);
+    assertTrue(c.compare(new KeyValue(Bytes.toBytes(".META.,,2"), now),
+      new KeyValue(Bytes.toBytes(".META.,,1"), now)) > 0);
   }
 
   public void testBinaryKeys() throws Exception {
     Set<KeyValue> set = new TreeSet<KeyValue>(KeyValue.COMPARATOR);
-    String column = "col:umn";
-    KeyValue [] keys = {new KeyValue("aaaaa,\u0000\u0000,2", column, 2),
-      new KeyValue("aaaaa,\u0001,3", column, 3),
-      new KeyValue("aaaaa,,1", column, 1),
-      new KeyValue("aaaaa,\u1000,5", column, 5),
-      new KeyValue("aaaaa,a,4", column, 4),
-      new KeyValue("a,a,0", column, 0),
+    byte [] column = Bytes.toBytes("col:umn");
+    KeyValue [] keys = {new KeyValue(Bytes.toBytes("aaaaa,\u0000\u0000,2"), column, 2),
+      new KeyValue(Bytes.toBytes("aaaaa,\u0001,3"), column, 3),
+      new KeyValue(Bytes.toBytes("aaaaa,,1"), column, 1),
+      new KeyValue(Bytes.toBytes("aaaaa,\u1000,5"), column, 5),
+      new KeyValue(Bytes.toBytes("aaaaa,a,4"), column, 4),
+      new KeyValue(Bytes.toBytes("a,a,0"), column, 0),
     };
     // Add to set with bad comparator
     for (int i = 0; i < keys.length; i++) {
@@ -226,12 +235,12 @@
     }
     // Make up -ROOT- table keys.
     KeyValue [] rootKeys = {
-        new KeyValue(".META.,aaaaa,\u0000\u0000,0,2", column, 2),
-        new KeyValue(".META.,aaaaa,\u0001,0,3", column, 3),
-        new KeyValue(".META.,aaaaa,,0,1", column, 1),
-        new KeyValue(".META.,aaaaa,\u1000,0,5", column, 5),
-        new KeyValue(".META.,aaaaa,a,0,4", column, 4),
-        new KeyValue(".META.,,0", column, 0),
+        new KeyValue(Bytes.toBytes(".META.,aaaaa,\u0000\u0000,0,2"), column, 2),
+        new KeyValue(Bytes.toBytes(".META.,aaaaa,\u0001,0,3"), column, 3),
+        new KeyValue(Bytes.toBytes(".META.,aaaaa,,0,1"), column, 1),
+        new KeyValue(Bytes.toBytes(".META.,aaaaa,\u1000,0,5"), column, 5),
+        new KeyValue(Bytes.toBytes(".META.,aaaaa,a,0,4"), column, 4),
+        new KeyValue(Bytes.toBytes(".META.,,0"), column, 0),
       };
     // This will output the keys incorrectly.
     set = new TreeSet<KeyValue>(new KeyValue.MetaComparator());
@@ -260,4 +269,11 @@
       assertTrue(count++ == k.getTimestamp());
     }
   }
+
+  public void testStackedUpKeyValue() {
+    // Test multiple KeyValues in a single blob.
+
+    // TODO actually write this test!
+    
+  }
 }
\ No newline at end of file

Modified: hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/TestRegionRebalancing.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/TestRegionRebalancing.java?rev=782445&r1=782444&r2=782445&view=diff
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/TestRegionRebalancing.java
(original)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/TestRegionRebalancing.java
Sun Jun  7 19:57:37 2009
@@ -28,6 +28,7 @@
 
 import org.apache.hadoop.hbase.io.BatchUpdate;
 import org.apache.hadoop.hbase.client.HTable;
+import org.apache.hadoop.hbase.client.Put;
 
 import org.apache.hadoop.hbase.regionserver.HRegionServer;
 import org.apache.hadoop.hbase.regionserver.HRegion;
@@ -223,9 +224,10 @@
   throws IOException {
     HRegion region = createNewHRegion(desc, startKey, endKey);
     byte [] keyToWrite = startKey == null ? Bytes.toBytes("row_000") : startKey;
-    BatchUpdate bu = new BatchUpdate(keyToWrite);
-    bu.put(COLUMN_NAME, "test".getBytes());
-    region.batchUpdate(bu, null);
+    Put put = new Put(keyToWrite);
+    byte [][] famAndQf = KeyValue.parseColumn(COLUMN_NAME);
+    put.add(famAndQf[0], famAndQf[1], Bytes.toBytes("test"));
+    region.put(put);
     region.close();
     region.getLog().closeAndDelete();
     return region;

Modified: hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/TestScanMultipleVersions.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/TestScanMultipleVersions.java?rev=782445&r1=782444&r2=782445&view=diff
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/TestScanMultipleVersions.java
(original)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/TestScanMultipleVersions.java
Sun Jun  7 19:57:37 2009
@@ -21,11 +21,12 @@
 package org.apache.hadoop.hbase;
 
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.client.Get;
 import org.apache.hadoop.hbase.client.HTable;
-import org.apache.hadoop.hbase.client.Scanner;
-import org.apache.hadoop.hbase.io.BatchUpdate;
-import org.apache.hadoop.hbase.io.Cell;
-import org.apache.hadoop.hbase.io.RowResult;
+import org.apache.hadoop.hbase.client.Put;
+import org.apache.hadoop.hbase.client.Result;
+import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.client.ResultScanner;
 import org.apache.hadoop.hbase.regionserver.HRegion;
 import org.apache.hadoop.hbase.util.Bytes;
 
@@ -53,7 +54,7 @@
     // Create table description
     
     this.desc = new HTableDescriptor(TABLE_NAME);
-    this.desc.addFamily(new HColumnDescriptor(HConstants.COLUMN_FAMILY));
+    this.desc.addFamily(new HColumnDescriptor(HConstants.CATALOG_FAMILY));
 
     // Region 0 will contain the key range [,row_0500)
     INFOS[0] = new HRegionInfo(this.desc, HConstants.EMPTY_START_ROW,
@@ -70,9 +71,11 @@
         HRegion.createHRegion(this.INFOS[i], this.testDir, this.conf);
       // Insert data
       for (int j = 0; j < TIMESTAMPS.length; j++) {
-        BatchUpdate b = new BatchUpdate(ROWS[i], TIMESTAMPS[j]);
-        b.put(HConstants.COLUMN_FAMILY, Bytes.toBytes(TIMESTAMPS[j]));
-        REGIONS[i].batchUpdate(b, null);
+        Put put = new Put(ROWS[i]);
+        put.setTimeStamp(TIMESTAMPS[j]);
+        put.add(HConstants.CATALOG_FAMILY, null, TIMESTAMPS[j], 
+            Bytes.toBytes(TIMESTAMPS[j]));
+        REGIONS[i].put(put);
       }
       // Insert the region we created into the meta
       HRegion.addRegionToMETA(meta, REGIONS[i]);
@@ -93,19 +96,25 @@
     HTable t = new HTable(conf, TABLE_NAME);
     for (int i = 0; i < ROWS.length; i++) {
       for (int j = 0; j < TIMESTAMPS.length; j++) {
-        Cell [] cells =
-          t.get(ROWS[i], HConstants.COLUMN_FAMILY, TIMESTAMPS[j], 1);
-        assertTrue(cells != null && cells.length == 1);
-        System.out.println("Row=" + Bytes.toString(ROWS[i]) + ", cell=" +
-          cells[0]);
+        Get get = new Get(ROWS[i]);
+        get.addFamily(HConstants.CATALOG_FAMILY);
+        get.setTimeStamp(TIMESTAMPS[j]);
+        Result result = t.get(get);
+        int cellCount = 0;
+        for(@SuppressWarnings("unused")KeyValue kv : result.sorted()) {
+          cellCount++;
+        }
+        assertTrue(cellCount == 1);
       }
     }
     
     // Case 1: scan with LATEST_TIMESTAMP. Should get two rows
     int count = 0;
-    Scanner s = t.getScanner(HConstants.COLUMN_FAMILY_ARRAY);
+    Scan scan = new Scan();
+    scan.addFamily(HConstants.CATALOG_FAMILY);
+    ResultScanner s = t.getScanner(scan);
     try {
-      for (RowResult rr = null; (rr = s.next()) != null;) {
+      for (Result rr = null; (rr = s.next()) != null;) {
         System.out.println(rr.toString());
         count += 1;
       }
@@ -118,8 +127,11 @@
     // (in this case > 1000 and < LATEST_TIMESTAMP. Should get 2 rows.
     
     count = 0;
-    s = t.getScanner(HConstants.COLUMN_FAMILY_ARRAY, HConstants.EMPTY_START_ROW,
-        10000L);
+    scan = new Scan();
+    scan.setTimeRange(1000L, Long.MAX_VALUE);
+    scan.addFamily(HConstants.CATALOG_FAMILY);
+
+    s = t.getScanner(scan);
     try {
       while (s.next() != null) {
         count += 1;
@@ -133,8 +145,11 @@
     // (in this case == 1000. Should get 2 rows.
     
     count = 0;
-    s = t.getScanner(HConstants.COLUMN_FAMILY_ARRAY, HConstants.EMPTY_START_ROW,
-        1000L);
+    scan = new Scan();
+    scan.setTimeStamp(1000L);
+    scan.addFamily(HConstants.CATALOG_FAMILY);
+
+    s = t.getScanner(scan);
     try {
       while (s.next() != null) {
         count += 1;
@@ -148,8 +163,11 @@
     // second timestamp (100 < timestamp < 1000). Should get 2 rows.
     
     count = 0;
-    s = t.getScanner(HConstants.COLUMN_FAMILY_ARRAY, HConstants.EMPTY_START_ROW,
-        500L);
+    scan = new Scan();
+    scan.setTimeRange(100L, 1000L);
+    scan.addFamily(HConstants.CATALOG_FAMILY);
+
+    s = t.getScanner(scan);
     try {
       while (s.next() != null) {
         count += 1;
@@ -163,8 +181,11 @@
     // Should get 2 rows.
     
     count = 0;
-    s = t.getScanner(HConstants.COLUMN_FAMILY_ARRAY, HConstants.EMPTY_START_ROW,
-        100L);
+    scan = new Scan();
+    scan.setTimeStamp(100L);
+    scan.addFamily(HConstants.CATALOG_FAMILY);
+
+    s = t.getScanner(scan);
     try {
       while (s.next() != null) {
         count += 1;

Modified: hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/TestSerialization.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/TestSerialization.java?rev=782445&r1=782444&r2=782445&view=diff
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/TestSerialization.java
(original)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/TestSerialization.java
Sun Jun  7 19:57:37 2009
@@ -1,5 +1,5 @@
 /**
- * Copyright 2007 The Apache Software Foundation
+ * Copyright 2009 The Apache Software Foundation
  *
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
@@ -20,13 +20,31 @@
 package org.apache.hadoop.hbase;
 
 
+import java.io.ByteArrayOutputStream;
+import java.io.DataOutputStream;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.NavigableSet;
+
+import org.apache.hadoop.hbase.client.Delete;
+import org.apache.hadoop.hbase.client.Get;
+import org.apache.hadoop.hbase.client.Put;
+import org.apache.hadoop.hbase.client.Result;
+import org.apache.hadoop.hbase.client.RowLock;
+import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.io.BatchOperation;
 import org.apache.hadoop.hbase.io.BatchUpdate;
 import org.apache.hadoop.hbase.io.Cell;
 import org.apache.hadoop.hbase.io.HbaseMapWritable;
 import org.apache.hadoop.hbase.io.RowResult;
+import org.apache.hadoop.hbase.io.TimeRange;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.Writables;
+import org.apache.hadoop.io.DataInputBuffer;
+import org.apache.hadoop.io.Writable;
 
 /**
  * Test HBase Writables serializations
@@ -52,6 +70,7 @@
     assertTrue(KeyValue.COMPARATOR.compare(original, newone) == 0);
   }
 
+  @SuppressWarnings("unchecked")
   public void testHbaseMapWritable() throws Exception {
     HbaseMapWritable<byte [], byte []> hmw =
       new HbaseMapWritable<byte[], byte[]>();
@@ -157,7 +176,7 @@
     assertTrue(Bytes.equals(bu.getRow(), bubu.getRow()));
     // Assert has same number of BatchOperations.
     int firstCount = 0;
-    for (BatchOperation bo: bubu) {
+    for (@SuppressWarnings("unused")BatchOperation bo: bubu) {
       firstCount++;
     }
     // Now deserialize again into same instance to ensure we're not
@@ -166,9 +185,358 @@
     // Assert rows are same again.
     assertTrue(Bytes.equals(bu.getRow(), bububu.getRow()));
     int secondCount = 0;
-    for (BatchOperation bo: bububu) {
+    for (@SuppressWarnings("unused")BatchOperation bo: bububu) {
       secondCount++;
     }
     assertEquals(firstCount, secondCount);
   }
+  
+  
+  //
+  // HBASE-880
+  //
+  
+  public void testPut() throws Exception{
+    byte[] row = "row".getBytes();
+    byte[] fam = "fam".getBytes();
+    byte[] qf1 = "qf1".getBytes();
+    byte[] qf2 = "qf2".getBytes();
+    byte[] qf3 = "qf3".getBytes();
+    byte[] qf4 = "qf4".getBytes();
+    byte[] qf5 = "qf5".getBytes();
+    byte[] qf6 = "qf6".getBytes();
+    byte[] qf7 = "qf7".getBytes();
+    byte[] qf8 = "qf8".getBytes();
+    
+    long ts = System.currentTimeMillis();
+    byte[] val = "val".getBytes();
+    
+    Put put = new Put(row);
+    put.add(fam, qf1, ts, val);
+    put.add(fam, qf2, ts, val);
+    put.add(fam, qf3, ts, val);
+    put.add(fam, qf4, ts, val);
+    put.add(fam, qf5, ts, val);
+    put.add(fam, qf6, ts, val);
+    put.add(fam, qf7, ts, val);
+    put.add(fam, qf8, ts, val);
+    
+    byte[] sb = Writables.getBytes(put);
+    Put desPut = (Put)Writables.getWritable(sb, new Put());
+
+    //Timing test
+//    long start = System.nanoTime();
+//    desPut = (Put)Writables.getWritable(sb, new Put());
+//    long stop = System.nanoTime();
+//    System.out.println("timer " +(stop-start));
+    
+    assertTrue(Bytes.equals(put.getRow(), desPut.getRow()));
+    List<KeyValue> list = null;
+    List<KeyValue> desList = null;
+    for(Map.Entry<byte[], List<KeyValue>> entry : put.getFamilyMap().entrySet()){
+      assertTrue(desPut.getFamilyMap().containsKey(entry.getKey()));
+      list = entry.getValue();
+      desList = desPut.getFamilyMap().get(entry.getKey());
+      for(int i=0; i<list.size(); i++){
+        assertTrue(list.get(i).equals(desList.get(i)));
+      }
+    }
+  }
+
+  
+  public void testPut2() throws Exception{
+    byte[] row = "testAbort,,1243116656250".getBytes();
+    byte[] fam = "historian".getBytes();
+    byte[] qf1 = "creation".getBytes();
+    
+    long ts = 9223372036854775807L;
+    byte[] val = "dont-care".getBytes();
+    
+    Put put = new Put(row);
+    put.add(fam, qf1, ts, val);
+    
+    byte[] sb = Writables.getBytes(put);
+    Put desPut = (Put)Writables.getWritable(sb, new Put());
+
+    assertTrue(Bytes.equals(put.getRow(), desPut.getRow()));
+    List<KeyValue> list = null;
+    List<KeyValue> desList = null;
+    for(Map.Entry<byte[], List<KeyValue>> entry : put.getFamilyMap().entrySet()){
+      assertTrue(desPut.getFamilyMap().containsKey(entry.getKey()));
+      list = entry.getValue();
+      desList = desPut.getFamilyMap().get(entry.getKey());
+      for(int i=0; i<list.size(); i++){
+        assertTrue(list.get(i).equals(desList.get(i)));
+      }
+    }
+  }
+  
+  
+  public void testDelete() throws Exception{
+    byte[] row = "row".getBytes();
+    byte[] fam = "fam".getBytes();
+    byte[] qf1 = "qf1".getBytes();
+    
+    long ts = System.currentTimeMillis();
+    
+    Delete delete = new Delete(row);
+    delete.deleteColumn(fam, qf1, ts);
+    
+    byte[] sb = Writables.getBytes(delete);
+    Delete desDelete = (Delete)Writables.getWritable(sb, new Delete());
+
+    assertTrue(Bytes.equals(delete.getRow(), desDelete.getRow()));
+    List<KeyValue> list = null;
+    List<KeyValue> desList = null;
+    for(Map.Entry<byte[], List<KeyValue>> entry :
+        delete.getFamilyMap().entrySet()){
+      assertTrue(desDelete.getFamilyMap().containsKey(entry.getKey()));
+      list = entry.getValue();
+      desList = desDelete.getFamilyMap().get(entry.getKey());
+      for(int i=0; i<list.size(); i++){
+        assertTrue(list.get(i).equals(desList.get(i)));
+      }
+    }
+  }
+ 
+  public void testGet() throws Exception{
+    byte[] row = "row".getBytes();
+    byte[] fam = "fam".getBytes();
+    byte[] qf1 = "qf1".getBytes();
+    
+    long ts = System.currentTimeMillis();
+    int maxVersions = 2;
+    long lockid = 5;
+    RowLock rowLock = new RowLock(lockid);
+    
+    Get get = new Get(row, rowLock);
+    get.addColumn(fam, qf1);
+    get.setTimeRange(ts, ts+1);
+    get.setMaxVersions(maxVersions);
+    
+    byte[] sb = Writables.getBytes(get);
+    Get desGet = (Get)Writables.getWritable(sb, new Get());
+
+    assertTrue(Bytes.equals(get.getRow(), desGet.getRow()));
+    Set<byte[]> set = null;
+    Set<byte[]> desSet = null;
+    
+    for(Map.Entry<byte[], NavigableSet<byte[]>> entry :
+        get.getFamilyMap().entrySet()){
+      assertTrue(desGet.getFamilyMap().containsKey(entry.getKey()));
+      set = entry.getValue();
+      desSet = desGet.getFamilyMap().get(entry.getKey());
+      for(byte [] qualifier : set){
+        assertTrue(desSet.contains(qualifier));
+      }
+    }
+    
+    assertEquals(get.getLockId(), desGet.getLockId());
+    assertEquals(get.getMaxVersions(), desGet.getMaxVersions());
+    TimeRange tr = get.getTimeRange();
+    TimeRange desTr = desGet.getTimeRange();
+    assertEquals(tr.getMax(), desTr.getMax());
+    assertEquals(tr.getMin(), desTr.getMin());
+  }
+  
+
+  public void testScan() throws Exception{
+    byte[] startRow = "startRow".getBytes();
+    byte[] stopRow  = "stopRow".getBytes();
+    byte[] fam = "fam".getBytes();
+    byte[] qf1 = "qf1".getBytes();
+    
+    long ts = System.currentTimeMillis();
+    int maxVersions = 2;
+    
+    Scan scan = new Scan(startRow, stopRow);
+    scan.addColumn(fam, qf1);
+    scan.setTimeRange(ts, ts+1);
+    scan.setMaxVersions(maxVersions);
+    
+    byte[] sb = Writables.getBytes(scan);
+    Scan desScan = (Scan)Writables.getWritable(sb, new Scan());
+
+    assertTrue(Bytes.equals(scan.getStartRow(), desScan.getStartRow()));
+    assertTrue(Bytes.equals(scan.getStopRow(), desScan.getStopRow()));
+    Set<byte[]> set = null;
+    Set<byte[]> desSet = null;
+    
+    for(Map.Entry<byte[], NavigableSet<byte[]>> entry :
+        scan.getFamilyMap().entrySet()){
+      assertTrue(desScan.getFamilyMap().containsKey(entry.getKey()));
+      set = entry.getValue();
+      desSet = desScan.getFamilyMap().get(entry.getKey());
+      for(byte[] column : set){
+        assertTrue(desSet.contains(column));
+      }
+    }
+    
+    assertEquals(scan.getMaxVersions(), desScan.getMaxVersions());
+    TimeRange tr = scan.getTimeRange();
+    TimeRange desTr = desScan.getTimeRange();
+    assertEquals(tr.getMax(), desTr.getMax());
+    assertEquals(tr.getMin(), desTr.getMin());
+  }
+  
+  public void testResultEmpty() throws Exception {
+    List<KeyValue> keys = new ArrayList<KeyValue>();
+    Result r = new Result(keys);
+    assertTrue(r.isEmpty());
+    byte [] rb = Writables.getBytes(r);
+    Result deserializedR = (Result)Writables.getWritable(rb, new Result());
+    assertTrue(deserializedR.isEmpty());
+  }
+  
+  
+  public void testResult() throws Exception {
+    byte [] rowA = Bytes.toBytes("rowA");
+    byte [] famA = Bytes.toBytes("famA");
+    byte [] qfA = Bytes.toBytes("qfA");
+    byte [] valueA = Bytes.toBytes("valueA");
+    
+    byte [] rowB = Bytes.toBytes("rowB");
+    byte [] famB = Bytes.toBytes("famB");
+    byte [] qfB = Bytes.toBytes("qfB");
+    byte [] valueB = Bytes.toBytes("valueB");
+    
+    KeyValue kvA = new KeyValue(rowA, famA, qfA, valueA);
+    KeyValue kvB = new KeyValue(rowB, famB, qfB, valueB);
+    
+    Result result = new Result(new KeyValue[]{kvA, kvB});
+    
+    byte [] rb = Writables.getBytes(result);
+    Result deResult = (Result)Writables.getWritable(rb, new Result());
+    
+    assertTrue("results are not equivalent, first key mismatch",
+        result.sorted()[0].equals(deResult.sorted()[0]));
+    
+    assertTrue("results are not equivalent, second key mismatch",
+        result.sorted()[1].equals(deResult.sorted()[1]));
+    
+    // Test empty Result
+    Result r = new Result();
+    byte [] b = Writables.getBytes(r);
+    Result deserialized = (Result)Writables.getWritable(b, new Result());
+    assertEquals(r.size(), deserialized.size());
+  }
+  
+  public void testResultArray() throws Exception {
+    byte [] rowA = Bytes.toBytes("rowA");
+    byte [] famA = Bytes.toBytes("famA");
+    byte [] qfA = Bytes.toBytes("qfA");
+    byte [] valueA = Bytes.toBytes("valueA");
+    
+    byte [] rowB = Bytes.toBytes("rowB");
+    byte [] famB = Bytes.toBytes("famB");
+    byte [] qfB = Bytes.toBytes("qfB");
+    byte [] valueB = Bytes.toBytes("valueB");
+    
+    KeyValue kvA = new KeyValue(rowA, famA, qfA, valueA);
+    KeyValue kvB = new KeyValue(rowB, famB, qfB, valueB);
+
+    
+    Result result1 = new Result(new KeyValue[]{kvA, kvB});
+    Result result2 = new Result(new KeyValue[]{kvB});
+    Result result3 = new Result(new KeyValue[]{kvB});
+    
+    Result [] results = new Result [] {result1, result2, result3};
+    
+    ByteArrayOutputStream byteStream = new ByteArrayOutputStream();
+    DataOutputStream out = new DataOutputStream(byteStream);
+    Result.writeArray(out, results);
+    
+    byte [] rb = byteStream.toByteArray();
+    
+    DataInputBuffer in = new DataInputBuffer();
+    in.reset(rb, 0, rb.length);
+    
+    Result [] deResults = Result.readArray(in);
+    
+    assertTrue(results.length == deResults.length);
+    
+    for(int i=0;i<results.length;i++) {
+      KeyValue [] keysA = results[i].sorted();
+      KeyValue [] keysB = deResults[i].sorted();
+      assertTrue(keysA.length == keysB.length);
+      for(int j=0;j<keysA.length;j++) {
+        assertTrue("Expected equivalent keys but found:\n" +
+            "KeyA : " + keysA[j].toString() + "\n" +
+            "KeyB : " + keysB[j].toString() + "\n" + 
+            keysA.length + " total keys, " + i + "th so far"
+            ,keysA[j].equals(keysB[j]));
+      }
+    }
+    
+  }
+  
+  public void testResultArrayEmpty() throws Exception {
+    List<KeyValue> keys = new ArrayList<KeyValue>();
+    Result r = new Result(keys);
+    Result [] results = new Result [] {r};
+
+    ByteArrayOutputStream byteStream = new ByteArrayOutputStream();
+    DataOutputStream out = new DataOutputStream(byteStream);
+    
+    Result.writeArray(out, results);
+    
+    results = null;
+    
+    byteStream = new ByteArrayOutputStream();
+    out = new DataOutputStream(byteStream);
+    Result.writeArray(out, results);
+    
+    byte [] rb = byteStream.toByteArray();
+    
+    DataInputBuffer in = new DataInputBuffer();
+    in.reset(rb, 0, rb.length);
+    
+    Result [] deResults = Result.readArray(in);
+    
+    assertTrue(deResults.length == 0);
+    
+    results = new Result[0];
+
+    byteStream = new ByteArrayOutputStream();
+    out = new DataOutputStream(byteStream);
+    Result.writeArray(out, results);
+    
+    rb = byteStream.toByteArray();
+    
+    in = new DataInputBuffer();
+    in.reset(rb, 0, rb.length);
+    
+    deResults = Result.readArray(in);
+    
+    assertTrue(deResults.length == 0);
+    
+  }
+  
+  public void testTimeRange(String[] args) throws Exception{
+    TimeRange tr = new TimeRange(0,5);
+    byte [] mb = Writables.getBytes(tr);
+    TimeRange deserializedTr =
+      (TimeRange)Writables.getWritable(mb, new TimeRange());
+    
+    assertEquals(tr.getMax(), deserializedTr.getMax());
+    assertEquals(tr.getMin(), deserializedTr.getMin());
+    
+  }
+  
+  public void testKeyValue2() throws Exception {
+    byte[] row = getName().getBytes();
+    byte[] fam = "fam".getBytes();
+    byte[] qf = "qf".getBytes();
+    long ts = System.currentTimeMillis();
+    byte[] val = "val".getBytes();
+    
+    KeyValue kv = new KeyValue(row, fam, qf, ts, val);
+    
+    byte [] mb = Writables.getBytes(kv);
+    KeyValue deserializedKv =
+      (KeyValue)Writables.getWritable(mb, new KeyValue());
+    assertTrue(Bytes.equals(kv.getBuffer(), deserializedKv.getBuffer()));
+    assertEquals(kv.getOffset(), deserializedKv.getOffset());
+    assertEquals(kv.getLength(), deserializedKv.getLength());
+  }
 }
\ No newline at end of file

Modified: hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/TestTable.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/TestTable.java?rev=782445&r1=782444&r2=782445&view=diff
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/TestTable.java (original)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/TestTable.java Sun
Jun  7 19:57:37 2009
@@ -23,6 +23,7 @@
 
 import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.hadoop.hbase.client.HTable;
+import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.io.BatchUpdate;
 import org.apache.hadoop.hbase.util.Bytes;
 
@@ -57,7 +58,7 @@
     // Try doing a duplicate database create.
     msg = null;
     HTableDescriptor desc = new HTableDescriptor(getName());
-    desc.addFamily(new HColumnDescriptor(HConstants.COLUMN_FAMILY));
+    desc.addFamily(new HColumnDescriptor(HConstants.CATALOG_FAMILY));
     admin.createTable(desc);
     assertTrue("First table creation completed", admin.listTables().length == 1);
     boolean gotException = false;
@@ -74,7 +75,7 @@
     // Now try and do concurrent creation with a bunch of threads.
     final HTableDescriptor threadDesc =
       new HTableDescriptor("threaded_" + getName());
-    threadDesc.addFamily(new HColumnDescriptor(HConstants.COLUMN_FAMILY));
+    threadDesc.addFamily(new HColumnDescriptor(HConstants.CATALOG_FAMILY));
     int count = 10;
     Thread [] threads = new Thread [count];
     final AtomicInteger successes = new AtomicInteger(0);
@@ -109,8 +110,8 @@
     }
     // All threads are now dead.  Count up how many tables were created and
     // how many failed w/ appropriate exception.
-    assertTrue(successes.get() == 1);
-    assertTrue(failures.get() == (count - 1));
+    assertEquals(1, successes.get());
+    assertEquals(count - 1, failures.get());
   }
   
   /**
@@ -140,10 +141,12 @@
     HTable table = new HTable(conf, getName());
     try {
       byte[] value = Bytes.toBytes("somedata");
-      BatchUpdate update = new BatchUpdate();
-      update.put(colName, value);
-      table.commit(update);
-      fail("BatchUpdate on read only table succeeded");  
+      // This used to use an empty row... That must have been a bug
+      Put put = new Put(value);
+      byte [][] famAndQf = KeyValue.parseColumn(colName);
+      put.add(famAndQf[0], famAndQf[1], value);
+      table.put(put);
+      fail("Put on read only table succeeded");  
     } catch (Exception e) {
       // expected
     }

Modified: hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/TestZooKeeper.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/TestZooKeeper.java?rev=782445&r1=782444&r2=782445&view=diff
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/TestZooKeeper.java
(original)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/TestZooKeeper.java
Sun Jun  7 19:57:37 2009
@@ -25,6 +25,7 @@
 import org.apache.hadoop.hbase.client.HConnection;
 import org.apache.hadoop.hbase.client.HConnectionManager;
 import org.apache.hadoop.hbase.client.HTable;
+import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.io.BatchUpdate;
 import org.apache.hadoop.hbase.master.HMaster;
 import org.apache.hadoop.hbase.regionserver.HRegionServer;
@@ -141,9 +142,9 @@
       admin.createTable(desc);
   
       HTable table = new HTable("test");
-      BatchUpdate batchUpdate = new BatchUpdate("testrow");
-      batchUpdate.put("fam:col", Bytes.toBytes("testdata"));
-      table.commit(batchUpdate);
+      Put put = new Put(Bytes.toBytes("testrow"));
+      put.add(Bytes.toBytes("fam"), Bytes.toBytes("col"), Bytes.toBytes("testdata"));
+      table.put(put);
     } catch (Exception e) {
       e.printStackTrace();
       fail();

Modified: hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/TimestampTestBase.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/TimestampTestBase.java?rev=782445&r1=782444&r2=782445&view=diff
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/TimestampTestBase.java
(original)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/TimestampTestBase.java
Sun Jun  7 19:57:37 2009
@@ -19,8 +19,13 @@
 package org.apache.hadoop.hbase;
 
 import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
 
-import org.apache.hadoop.hbase.io.BatchUpdate;
+import org.apache.hadoop.hbase.client.Delete;
+import org.apache.hadoop.hbase.client.Get;
+import org.apache.hadoop.hbase.client.Put;
+import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.io.Cell;
 import org.apache.hadoop.hbase.util.Bytes;
 
@@ -34,7 +39,7 @@
   private static final long T1 = 100L;
   private static final long T2 = 200L;
   
-  private static final String COLUMN_NAME = "contents:";
+  private static final String COLUMN_NAME = "contents:contents";
   
   private static final byte [] COLUMN = Bytes.toBytes(COLUMN_NAME);
   private static final byte [] ROW = Bytes.toBytes("row");
@@ -55,6 +60,7 @@
     put(incommon);
     // Verify that returned versions match passed timestamps.
     assertVersions(incommon, new long [] {HConstants.LATEST_TIMESTAMP, T2, T1});
+
     // If I delete w/o specifying a timestamp, this means I'm deleting the
     // latest.
     delete(incommon);
@@ -74,14 +80,23 @@
     // Flush everything out to disk and then retry
     flusher.flushcache();
     assertVersions(incommon, new long [] {HConstants.LATEST_TIMESTAMP, T1, T0});
-    
+
     // Now try deleting all from T2 back inclusive (We first need to add T2
     // back into the mix and to make things a little interesting, delete and
     // then readd T1.
     put(incommon, T2);
     delete(incommon, T1);
     put(incommon, T1);
-    incommon.deleteAll(ROW, COLUMN, T2);
+
+    Delete delete = new Delete(ROW);
+    byte [][] famAndQf = KeyValue.parseColumn(COLUMN);
+    if (famAndQf[1].length == 0){
+      delete.deleteFamily(famAndQf[0], T2);
+    } else {
+      delete.deleteColumns(famAndQf[0], famAndQf[1], T2);
+    }
+    incommon.delete(delete, null, true);
+ 
     // Should only be current value in set.  Assert this is so
     assertOnlyLatest(incommon, HConstants.LATEST_TIMESTAMP);
     
@@ -93,12 +108,16 @@
   private static void assertOnlyLatest(final Incommon incommon,
     final long currentTime)
   throws IOException {
-    Cell [] cellValues = incommon.get(ROW, COLUMN, 3/*Ask for too much*/);
-    assertEquals(1, cellValues.length);
-    long time = Bytes.toLong(cellValues[0].getValue());
+    Get get = null;
+    byte [][] famAndQf = null;
+    get = new Get(ROW);
+    famAndQf = KeyValue.parseColumn(COLUMN);
+    get.addColumn(famAndQf[0], famAndQf[1]);
+    get.setMaxVersions(3);
+    Result result = incommon.get(get);
+    assertEquals(1, result.size());
+    long time = Bytes.toLong(result.sorted()[0].getValue());
     assertEquals(time, currentTime);
-    assertNull(incommon.get(ROW, COLUMN, T1, 3 /*Too many*/));
-    assertTrue(assertScanContentTimestamp(incommon, T1) == 0);
   }
   
   /*
@@ -112,22 +131,49 @@
   public static void assertVersions(final Incommon incommon, final long [] tss)
   throws IOException {
     // Assert that 'latest' is what we expect.
-    byte [] bytes = incommon.get(ROW, COLUMN).getValue();
-    assertEquals(Bytes.toLong(bytes), tss[0]);
+    Get get = null;
+    byte [][] famAndQf = null;
+    get = new Get(ROW);
+    famAndQf = KeyValue.parseColumn(COLUMN);
+    get.addColumn(famAndQf[0], famAndQf[1]);
+    Result r = incommon.get(get);
+    byte [] bytes = r.getValue(famAndQf[0], famAndQf[1]);
+    long t = Bytes.toLong(bytes);
+    assertEquals(tss[0], t);
+
     // Now assert that if we ask for multiple versions, that they come out in
     // order.
-    Cell[] cellValues = incommon.get(ROW, COLUMN, tss.length);
-    assertEquals(tss.length, cellValues.length);
-    for (int i = 0; i < cellValues.length; i++) {
-      long ts = Bytes.toLong(cellValues[i].getValue());
+    get = new Get(ROW);
+    famAndQf = KeyValue.parseColumn(COLUMN);
+    get.addColumn(famAndQf[0], famAndQf[1]);
+    get.setMaxVersions(tss.length);
+    Result result = incommon.get(get);
+    List<Cell> cells = new ArrayList<Cell>();
+    for(KeyValue kv : result.sorted()) {
+      cells.add(new Cell(kv.getValue(), kv.getTimestamp()));
+    }
+    assertEquals(tss.length, cells.size());
+    for (int i = 0; i < cells.size(); i++) {
+      long ts = Bytes.toLong(cells.get(i).getValue());
       assertEquals(ts, tss[i]);
     }
+    
     // Specify a timestamp get multiple versions.
-    cellValues = incommon.get(ROW, COLUMN, tss[0], cellValues.length - 1);
-    for (int i = 1; i < cellValues.length; i++) {
-      long ts = Bytes.toLong(cellValues[i].getValue());
+    get = new Get(ROW);
+    famAndQf = KeyValue.parseColumn(COLUMN);
+    get.addColumn(famAndQf[0], famAndQf[1]);
+    get.setTimeStamp(tss[0]);
+    get.setMaxVersions(cells.size() - 1);
+    result = incommon.get(get);
+    cells = new ArrayList<Cell>();
+    for(KeyValue kv : result.sorted()) {
+      cells.add(new Cell(kv.getValue(), kv.getTimestamp()));
+    }
+    for (int i = 1; i < cells.size(); i++) {
+      long ts = Bytes.toLong(cells.get(i).getValue());
       assertEquals(ts, tss[i]);
     }
+    
     // Test scanner returns expected version
     assertScanContentTimestamp(incommon, tss[0]);
   }
@@ -211,20 +257,44 @@
   public static void put(final Incommon loader, final byte [] bytes,
     final long ts)
   throws IOException {
-    BatchUpdate batchUpdate = ts == HConstants.LATEST_TIMESTAMP ? 
-      new BatchUpdate(ROW) : new BatchUpdate(ROW, ts);
-    batchUpdate.put(COLUMN, bytes);
-    loader.commit(batchUpdate);
+    Put put = new Put(ROW);
+    if(ts != HConstants.LATEST_TIMESTAMP) {
+      put.setTimeStamp(ts);
+    }
+    byte [][] famAndQf = KeyValue.parseColumn(COLUMN);
+    put.add(famAndQf[0], famAndQf[1], bytes);
+    loader.put(put);
   }
   
   public static void delete(final Incommon loader) throws IOException {
-    delete(loader, HConstants.LATEST_TIMESTAMP);
+    delete(loader, null);
+  }
+
+  public static void delete(final Incommon loader, final byte [] column)
+  throws IOException {
+    delete(loader, column, HConstants.LATEST_TIMESTAMP);
+  }
+
+  public static void delete(final Incommon loader, final long ts)
+  throws IOException {
+    delete(loader, null, ts);
+  }
+
+  public static void delete(final Incommon loader, final byte [] column,
+      final long ts)
+  throws IOException {
+    Delete delete = ts == HConstants.LATEST_TIMESTAMP?
+      new Delete(ROW): new Delete(ROW, ts, null);
+    byte [][] famAndQf = KeyValue.parseColumn(column == null? COLUMN: column);
+    if (famAndQf[1].length == 0) {
+      delete.deleteFamily(famAndQf[0], ts);
+    } else {
+      delete.deleteColumn(famAndQf[0], famAndQf[1], ts);
+    }
+    loader.delete(delete, null, true);
   }
 
-  public static void delete(final Incommon loader, final long ts) throws IOException {
-    BatchUpdate batchUpdate = ts == HConstants.LATEST_TIMESTAMP ? 
-      new BatchUpdate(ROW) : new BatchUpdate(ROW, ts);
-    batchUpdate.delete(COLUMN);
-    loader.commit(batchUpdate);  
+  public static Result get(final Incommon loader) throws IOException {
+    return loader.get(new Get(ROW));
   }
-}
+}
\ No newline at end of file

Modified: hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/client/TestBatchUpdate.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/client/TestBatchUpdate.java?rev=782445&r1=782444&r2=782445&view=diff
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/client/TestBatchUpdate.java
(original)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/client/TestBatchUpdate.java
Sun Jun  7 19:57:37 2009
@@ -75,85 +75,6 @@
     table = new HTable(conf, desc.getName());
   }
 
-  /**
-   * @throws IOException
-   */
-  public void testBatchUpdate() throws IOException {
-    BatchUpdate bu = new BatchUpdate("row1");
-    bu.put(CONTENTS, value);
-    bu.delete(CONTENTS);
-    table.commit(bu);
-
-    bu = new BatchUpdate("row2");
-    bu.put(CONTENTS, value);
-    byte[][] getColumns = bu.getColumns();
-    assertEquals(getColumns.length, 1);
-    assertTrue(Arrays.equals(getColumns[0], CONTENTS));
-    assertTrue(bu.hasColumn(CONTENTS));
-    assertFalse(bu.hasColumn(new byte[] {}));
-    byte[] getValue = bu.get(getColumns[0]);
-    assertTrue(Arrays.equals(getValue, value));
-    table.commit(bu);
-
-    byte [][] columns = { CONTENTS };
-    Scanner scanner = table.getScanner(columns, HConstants.EMPTY_START_ROW);
-    for (RowResult r : scanner) {
-      for(Map.Entry<byte [], Cell> e: r.entrySet()) {
-        System.out.println(Bytes.toString(r.getRow()) + ": row: " + e.getKey() + " value:
" + 
-            new String(e.getValue().getValue(), HConstants.UTF8_ENCODING));
-      }
-    }
-  }
-  
-  public void testBatchUpdateMaxLength() {
-    // Test for a single good value
-    BatchUpdate batchUpdate = new BatchUpdate("row1");
-    batchUpdate.put(SMALLFAM, value);
-    try {
-      table.commit(batchUpdate);
-      fail("Value is too long, should throw exception");
-    } catch (IOException e) {
-      // This is expected
-    }
-    // Try to see if it's still inserted
-    try {
-      Cell cell = table.get("row1", SMALLFAM_STR);
-      assertNull(cell);
-    } catch (IOException e) {
-      e.printStackTrace();
-      fail("This is unexpected");
-    }
-    // Try to put a good value
-    batchUpdate = new BatchUpdate("row1");
-    batchUpdate.put(SMALLFAM, smallValue);
-    try {
-      table.commit(batchUpdate);
-    } catch (IOException e) {
-      fail("Value is long enough, should not throw exception");
-    }
-  }
-  
-  public void testRowsBatchUpdate() {
-    ArrayList<BatchUpdate> rowsUpdate = new ArrayList<BatchUpdate>();
-    for(int i = 0; i < NB_BATCH_ROWS; i++) {
-      BatchUpdate batchUpdate = new BatchUpdate("row"+i);
-      batchUpdate.put(CONTENTS, value);
-      rowsUpdate.add(batchUpdate);
-    }
-    try {
-      table.commit(rowsUpdate);  
-    
-      byte [][] columns = { CONTENTS };
-      Scanner scanner = table.getScanner(columns, HConstants.EMPTY_START_ROW);
-      int nbRows = 0;
-      for(@SuppressWarnings("unused") RowResult row : scanner)
-        nbRows++;
-      assertEquals(NB_BATCH_ROWS, nbRows);
-    } catch (IOException e) {
-      fail("This is unexpected : " + e);
-    }
-  }
-  
   public void testRowsBatchUpdateBufferedOneFlush() {
     table.setAutoFlush(false);
     ArrayList<BatchUpdate> rowsUpdate = new ArrayList<BatchUpdate>();
@@ -168,17 +89,15 @@
       byte [][] columns = { CONTENTS };
       Scanner scanner = table.getScanner(columns, HConstants.EMPTY_START_ROW);
       int nbRows = 0;
-      for(@SuppressWarnings("unused") RowResult row : scanner)
-        nbRows++;
+      for(@SuppressWarnings("unused") RowResult row : scanner) nbRows++;
       assertEquals(0, nbRows);  
       scanner.close();
-      
+
       table.flushCommits();
       
       scanner = table.getScanner(columns, HConstants.EMPTY_START_ROW);
       nbRows = 0;
-      for(@SuppressWarnings("unused") RowResult row : scanner)
-        nbRows++;
+      for(@SuppressWarnings("unused") RowResult row : scanner) nbRows++;
       assertEquals(NB_BATCH_ROWS*10, nbRows);
     } catch (IOException e) {
       fail("This is unexpected : " + e);
@@ -209,6 +128,55 @@
       fail("This is unexpected : " + e);
     }
   }
+
+  /**
+   * @throws IOException
+   */
+  public void testBatchUpdate() throws IOException {
+    BatchUpdate bu = new BatchUpdate("row1");
+    bu.put(CONTENTS, value);
+    // Can't do this in 0.20.0 mix and match put and delete -- bu.delete(CONTENTS);
+    table.commit(bu);
+
+    bu = new BatchUpdate("row2");
+    bu.put(CONTENTS, value);
+    byte[][] getColumns = bu.getColumns();
+    assertEquals(getColumns.length, 1);
+    assertTrue(Arrays.equals(getColumns[0], CONTENTS));
+    assertTrue(bu.hasColumn(CONTENTS));
+    assertFalse(bu.hasColumn(new byte[] {}));
+    byte[] getValue = bu.get(getColumns[0]);
+    assertTrue(Arrays.equals(getValue, value));
+    table.commit(bu);
+
+    byte [][] columns = { CONTENTS };
+    Scanner scanner = table.getScanner(columns, HConstants.EMPTY_START_ROW);
+    for (RowResult r : scanner) {
+      for(Map.Entry<byte [], Cell> e: r.entrySet()) {
+        System.out.println(Bytes.toString(r.getRow()) + ": row: " + e.getKey() + " value:
" + 
+            new String(e.getValue().getValue(), HConstants.UTF8_ENCODING));
+      }
+    }
+  }
   
-  
+  public void testRowsBatchUpdate() {
+    ArrayList<BatchUpdate> rowsUpdate = new ArrayList<BatchUpdate>();
+    for(int i = 0; i < NB_BATCH_ROWS; i++) {
+      BatchUpdate batchUpdate = new BatchUpdate("row"+i);
+      batchUpdate.put(CONTENTS, value);
+      rowsUpdate.add(batchUpdate);
+    }
+    try {
+      table.commit(rowsUpdate);  
+    
+      byte [][] columns = { CONTENTS };
+      Scanner scanner = table.getScanner(columns, HConstants.EMPTY_START_ROW);
+      int nbRows = 0;
+      for(@SuppressWarnings("unused") RowResult row : scanner)
+        nbRows++;
+      assertEquals(NB_BATCH_ROWS, nbRows);
+    } catch (IOException e) {
+      fail("This is unexpected : " + e);
+    }
+  }
 }



Mime
View raw message