hbase-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From jmhs...@apache.org
Subject svn commit: r1519077 [5/6] - in /hbase/trunk: hbase-client/src/main/java/org/apache/hadoop/hbase/ hbase-client/src/main/java/org/apache/hadoop/hbase/client/ hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/ hbase-client/src/main/ja...
Date Fri, 30 Aug 2013 20:31:50 GMT
Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterWithScanLimits.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterWithScanLimits.java?rev=1519077&r1=1519076&r2=1519077&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterWithScanLimits.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterWithScanLimits.java Fri Aug 30 20:31:47 2013
@@ -19,6 +19,11 @@
  */
 package org.apache.hadoop.hbase.filter;
 
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.List;
@@ -27,17 +32,16 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.commons.logging.impl.Log4JLogger;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.HTableDescriptor;
-import org.apache.hadoop.hbase.KeyValue;
-import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.MasterNotRunningException;
+import org.apache.hadoop.hbase.MediumTests;
+import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.ZooKeeperConnectionException;
-import org.apache.hadoop.hbase.ipc.RpcClient;
-import org.apache.hadoop.hbase.ipc.RpcServer;
 import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.hadoop.hbase.client.HTable;
 import org.apache.hadoop.hbase.client.Put;
@@ -45,14 +49,13 @@ import org.apache.hadoop.hbase.client.Re
 import org.apache.hadoop.hbase.client.ResultScanner;
 import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.client.ScannerCallable;
+import org.apache.hadoop.hbase.ipc.RpcClient;
+import org.apache.hadoop.hbase.ipc.RpcServer;
 import org.apache.hadoop.hbase.util.Bytes;
-
+import org.apache.log4j.Level;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.Test;
-import static org.junit.Assert.*;
-import org.apache.hadoop.hbase.MediumTests;
-import org.apache.log4j.Level;
 import org.junit.experimental.categories.Category;
 
 /**
@@ -90,7 +93,7 @@ public class TestFilterWithScanLimits {
       // row2 => <f1:c5, 2_c5>
 
       for (Result result : scanner) {
-        for (KeyValue kv : result.list()) {
+        for (Cell kv : result.list()) {
           kv_number++;
           LOG.debug(kv_number + ". kv: " + kv);
         }

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterWrapper.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterWrapper.java?rev=1519077&r1=1519076&r2=1519077&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterWrapper.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterWrapper.java Fri Aug 30 20:31:47 2013
@@ -26,6 +26,8 @@ import java.util.List;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.CellUtil;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.HColumnDescriptor;
@@ -87,10 +89,10 @@ public class TestFilterWrapper {
       // row2 (c1-c4) and row3(c1-c4) are returned
       for (Result result : scanner) {
         row_number++;
-        for (KeyValue kv : result.list()) {
+        for (Cell kv : result.list()) {
           LOG.debug(kv_number + ". kv: " + kv);
           kv_number++;
-          assertEquals("Returned row is not correct", new String(kv.getRow()),
+          assertEquals("Returned row is not correct", new String(CellUtil.getRowArray(kv)),
               "row" + ( row_number + 1 ));
         }
       }

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFuzzyRowAndColumnRangeFilter.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFuzzyRowAndColumnRangeFilter.java?rev=1519077&r1=1519076&r2=1519077&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFuzzyRowAndColumnRangeFilter.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFuzzyRowAndColumnRangeFilter.java Fri Aug 30 20:31:47 2013
@@ -15,26 +15,36 @@
  */
 package org.apache.hadoop.hbase.filter;
 
-import com.google.common.collect.Lists;
+import static org.junit.Assert.assertEquals;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.CellUtil;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
-import org.apache.hadoop.hbase.KeyValue;
-import org.apache.hadoop.hbase.KeyValueTestUtil;
 import org.apache.hadoop.hbase.MediumTests;
-import org.apache.hadoop.hbase.client.*;
+import org.apache.hadoop.hbase.client.Durability;
+import org.apache.hadoop.hbase.client.HTable;
+import org.apache.hadoop.hbase.client.Put;
+import org.apache.hadoop.hbase.client.Result;
+import org.apache.hadoop.hbase.client.ResultScanner;
+import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.Pair;
 import org.jboss.netty.buffer.ChannelBuffer;
 import org.jboss.netty.buffer.ChannelBuffers;
-import org.junit.*;
+import org.junit.After;
+import org.junit.AfterClass;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.List;
-
-import static org.junit.Assert.assertEquals;
+import com.google.common.collect.Lists;
 
 /**
  */
@@ -149,13 +159,13 @@ public class TestFuzzyRowAndColumnRangeF
     scan.setFilter(filterList);
 
     ResultScanner scanner = hTable.getScanner(scan);
-    List<KeyValue> results = new ArrayList<KeyValue>();
+    List<Cell> results = new ArrayList<Cell>();
     Result result;
     long timeBeforeScan = System.currentTimeMillis();
     while ((result = scanner.next()) != null) {
-      for (KeyValue kv : result.list()) {
-        LOG.info("Got rk: " + Bytes.toStringBinary(kv.getRow()) + " cq: "
-                + Bytes.toStringBinary(kv.getQualifier()));
+      for (Cell kv : result.list()) {
+        LOG.info("Got rk: " + Bytes.toStringBinary(CellUtil.getRowArray(kv)) + " cq: "
+                + Bytes.toStringBinary(CellUtil.getQualifierArray(kv)));
         results.add(kv);
       }
     }

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestMultipleColumnPrefixFilter.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestMultipleColumnPrefixFilter.java?rev=1519077&r1=1519076&r2=1519077&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestMultipleColumnPrefixFilter.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestMultipleColumnPrefixFilter.java Fri Aug 30 20:31:47 2013
@@ -59,14 +59,14 @@ public class TestMultipleColumnPrefixFil
     List<String> columns = generateRandomWords(10000, "column");
     long maxTimestamp = 2;
 
-    List<KeyValue> kvList = new ArrayList<KeyValue>();
+    List<Cell> kvList = new ArrayList<Cell>();
 
-    Map<String, List<KeyValue>> prefixMap = new HashMap<String,
-        List<KeyValue>>();
+    Map<String, List<Cell>> prefixMap = new HashMap<String,
+        List<Cell>>();
 
-    prefixMap.put("p", new ArrayList<KeyValue>());
-    prefixMap.put("q", new ArrayList<KeyValue>());
-    prefixMap.put("s", new ArrayList<KeyValue>());
+    prefixMap.put("p", new ArrayList<Cell>());
+    prefixMap.put("q", new ArrayList<Cell>());
+    prefixMap.put("s", new ArrayList<Cell>());
 
     String valueString = "ValueString";
 
@@ -98,7 +98,7 @@ public class TestMultipleColumnPrefixFil
     
     filter = new MultipleColumnPrefixFilter(filter_prefix);
     scan.setFilter(filter);
-    List<KeyValue> results = new ArrayList<KeyValue>();  
+    List<Cell> results = new ArrayList<Cell>();  
     InternalScanner scanner = region.getScanner(scan);
     while(scanner.next(results));
     assertEquals(prefixMap.get("p").size() + prefixMap.get("q").size(), results.size());
@@ -125,14 +125,14 @@ public class TestMultipleColumnPrefixFil
     List<String> columns = generateRandomWords(10000, "column");
     long maxTimestamp = 3;
 
-    List<KeyValue> kvList = new ArrayList<KeyValue>();
+    List<Cell> kvList = new ArrayList<Cell>();
 
-    Map<String, List<KeyValue>> prefixMap = new HashMap<String,
-        List<KeyValue>>();
+    Map<String, List<Cell>> prefixMap = new HashMap<String,
+        List<Cell>>();
 
-    prefixMap.put("p", new ArrayList<KeyValue>());
-    prefixMap.put("q", new ArrayList<KeyValue>());
-    prefixMap.put("s", new ArrayList<KeyValue>());
+    prefixMap.put("p", new ArrayList<Cell>());
+    prefixMap.put("q", new ArrayList<Cell>());
+    prefixMap.put("s", new ArrayList<Cell>());
 
     String valueString = "ValueString";
 
@@ -142,7 +142,7 @@ public class TestMultipleColumnPrefixFil
       for (String column: columns) {
         for (long timestamp = 1; timestamp <= maxTimestamp; timestamp++) {
           double rand = Math.random();
-          KeyValue kv;
+          Cell kv;
           if (rand < 0.5) 
             kv = KeyValueTestUtil.create(row, family1, column, timestamp,
                 valueString);
@@ -170,7 +170,7 @@ public class TestMultipleColumnPrefixFil
     
     filter = new MultipleColumnPrefixFilter(filter_prefix);
     scan.setFilter(filter);
-    List<KeyValue> results = new ArrayList<KeyValue>();  
+    List<Cell> results = new ArrayList<Cell>();  
     InternalScanner scanner = region.getScanner(scan);
     while(scanner.next(results));
     assertEquals(prefixMap.get("p").size() + prefixMap.get("q").size(), results.size());
@@ -214,7 +214,7 @@ public class TestMultipleColumnPrefixFil
  
     multiplePrefixFilter = new MultipleColumnPrefixFilter(filter_prefix);
     scan1.setFilter(multiplePrefixFilter);
-    List<KeyValue> results1 = new ArrayList<KeyValue>();  
+    List<Cell> results1 = new ArrayList<Cell>();  
     InternalScanner scanner1 = region.getScanner(scan1);
     while(scanner1.next(results1));
     
@@ -224,7 +224,7 @@ public class TestMultipleColumnPrefixFil
     singlePrefixFilter = new ColumnPrefixFilter(Bytes.toBytes("p"));
  
     scan2.setFilter(singlePrefixFilter);
-    List<KeyValue> results2 = new ArrayList<KeyValue>();  
+    List<Cell> results2 = new ArrayList<Cell>();  
     InternalScanner scanner2 = region.getScanner(scan1);
     while(scanner2.next(results2));
     

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestSingleColumnValueExcludeFilter.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestSingleColumnValueExcludeFilter.java?rev=1519077&r1=1519076&r2=1519077&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestSingleColumnValueExcludeFilter.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestSingleColumnValueExcludeFilter.java Fri Aug 30 20:31:47 2013
@@ -18,6 +18,7 @@
  */
 package org.apache.hadoop.hbase.filter;
 
+import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.SmallTests;
 import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
@@ -55,14 +56,14 @@ public class TestSingleColumnValueExclud
         CompareOp.EQUAL, VAL_1);
 
     // A 'match' situation
-    List<KeyValue> kvs = new ArrayList<KeyValue>();
+    List<Cell> kvs = new ArrayList<Cell>();
     KeyValue kv = new KeyValue(ROW, COLUMN_FAMILY, COLUMN_QUALIFIER_2, VAL_1);
 
     kvs.add (new KeyValue(ROW, COLUMN_FAMILY, COLUMN_QUALIFIER_2, VAL_1));
     kvs.add (new KeyValue(ROW, COLUMN_FAMILY, COLUMN_QUALIFIER, VAL_1));
     kvs.add (new KeyValue(ROW, COLUMN_FAMILY, COLUMN_QUALIFIER_2, VAL_1));
 
-    filter.filterRow(kvs);
+    filter.filterRowCells(kvs);
 
     assertEquals("resultSize", kvs.size(), 2);
     assertTrue("leftKV1", KeyValue.COMPARATOR.compare(kvs.get(0), kv) == 0);

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestChangingEncoding.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestChangingEncoding.java?rev=1519077&r1=1519076&r2=1519077&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestChangingEncoding.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestChangingEncoding.java Fri Aug 30 20:31:47 2013
@@ -16,7 +16,7 @@
  */
 package org.apache.hadoop.hbase.io.encoding;
 
-import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
 
 import java.io.IOException;
 import java.util.ArrayList;
@@ -28,32 +28,29 @@ import java.util.Random;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.CellUtil;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.HTableDescriptor;
-import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.LargeTests;
 import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.Durability;
 import org.apache.hadoop.hbase.client.Get;
 import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.hadoop.hbase.client.HTable;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.Result;
-import org.apache.hadoop.hbase.client.Durability;
-import org.apache.hadoop.hbase.ipc.RpcClient;
-import org.apache.hadoop.hbase.ipc.RpcServer;
 import org.apache.hadoop.hbase.regionserver.HRegionServer;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.Threads;
-import org.apache.log4j.Level;
 import org.junit.After;
 import org.junit.AfterClass;
 import org.junit.Before;
 import org.junit.BeforeClass;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
-import org.apache.commons.logging.impl.Log4JLogger;
 
 /**
  * Tests changing data block encoding settings of a column family.
@@ -159,9 +156,8 @@ public class TestChangingEncoding {
       Get get = new Get(getRowKey(batchId, i));
       Result result = table.get(get);
       for (int j = 0; j < NUM_COLS_PER_ROW; ++j) {
-        KeyValue kv = result.getColumnLatest(CF_BYTES, getQualifier(j));
-        assertEquals(Bytes.toStringBinary(getValue(batchId, i, j)),
-            Bytes.toStringBinary(kv.getValue()));
+        Cell kv = result.getColumnLatest(CF_BYTES, getQualifier(j));
+        assertTrue(CellUtil.matchingValue(kv, getValue(batchId, i, j)));
       }
     }
     table.close();

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingKeyRange.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingKeyRange.java?rev=1519077&r1=1519076&r2=1519077&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingKeyRange.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingKeyRange.java Fri Aug 30 20:31:47 2013
@@ -27,13 +27,13 @@ import java.util.Map;
 import java.util.Set;
 
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.HTableDescriptor;
-import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.SmallTests;
+import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.regionserver.BloomType;
@@ -117,7 +117,7 @@ public class TestScannerSelectionUsingKe
     LruBlockCache cache = (LruBlockCache) cacheConf.getBlockCache();
     cache.clearCache();
     InternalScanner scanner = region.getScanner(scan);
-    List<KeyValue> results = new ArrayList<KeyValue>();
+    List<Cell> results = new ArrayList<Cell>();
     while (scanner.next(results)) {
     }
     scanner.close();

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingTTL.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingTTL.java?rev=1519077&r1=1519076&r2=1519077&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingTTL.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingTTL.java Fri Aug 30 20:31:47 2013
@@ -27,6 +27,7 @@ import java.util.Set;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.HColumnDescriptor;
@@ -136,7 +137,7 @@ public class TestScannerSelectionUsingTT
     LruBlockCache cache = (LruBlockCache) cacheConf.getBlockCache();
     cache.clearCache();
     InternalScanner scanner = region.getScanner(scan);
-    List<KeyValue> results = new ArrayList<KeyValue>();
+    List<Cell> results = new ArrayList<Cell>();
     final int expectedKVsPerRow = numFreshFiles * NUM_COLS_PER_ROW;
     int numReturnedRows = 0;
     LOG.info("Scanning the entire table");

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestTableMapReduce.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestTableMapReduce.java?rev=1519077&r1=1519076&r2=1519077&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestTableMapReduce.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestTableMapReduce.java Fri Aug 30 20:31:47 2013
@@ -213,12 +213,12 @@ public class TestTableMapReduce {
         byte[] firstValue = null;
         byte[] secondValue = null;
         int count = 0;
-         for(KeyValue kv : r.list()) {
+         for(Cell kv : r.list()) {
           if (count == 0) {
-            firstValue = kv.getValue();
+            firstValue = CellUtil.getValueArray(kv);
           }
           if (count == 1) {
-            secondValue = kv.getValue();
+            secondValue = CellUtil.getValueArray(kv);;
           }
           count++;
           if (count == 2) {

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCopyTable.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCopyTable.java?rev=1519077&r1=1519076&r2=1519077&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCopyTable.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCopyTable.java Fri Aug 30 20:31:47 2013
@@ -17,6 +17,18 @@
  */
 package org.apache.hadoop.hbase.mapreduce;
 
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.io.PrintStream;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.CellUtil;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.LargeTests;
 import org.apache.hadoop.hbase.MiniHBaseCluster;
@@ -32,12 +44,6 @@ import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
-import org.apache.hadoop.conf.Configuration;
-import java.io.ByteArrayOutputStream;
-import java.io.IOException;
-import java.io.PrintStream;
-
-import static org.junit.Assert.*;
 
 /**
  * Basic test for the CopyTable M/R tool
@@ -100,7 +106,7 @@ public class TestCopyTable {
       Get g = new Get(Bytes.toBytes("row" + i));
       Result r = t2.get(g);
       assertEquals(1, r.size());
-      assertTrue(Bytes.equals(COLUMN1, r.raw()[0].getQualifier()));
+      assertTrue(CellUtil.matchingQualifier(r.raw()[0], COLUMN1));
     }
     
     t1.close();
@@ -144,8 +150,8 @@ public class TestCopyTable {
     Get g = new Get(ROW1);
     Result r = t2.get(g);
     assertEquals(1, r.size());
-    assertTrue(Bytes.equals(COLUMN1, r.raw()[0].getQualifier()));
-    
+    assertTrue(CellUtil.matchingQualifier(r.raw()[0], COLUMN1));
+
     g = new Get(ROW0);
     r = t2.get(g);
     assertEquals(0, r.size());

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestGroupingTableMapper.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestGroupingTableMapper.java?rev=1519077&r1=1519076&r2=1519077&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestGroupingTableMapper.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestGroupingTableMapper.java Fri Aug 30 20:31:47 2013
@@ -19,6 +19,7 @@ import java.util.ArrayList;
 import java.util.List;
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.SmallTests;
 import org.apache.hadoop.hbase.client.Result;
@@ -49,7 +50,7 @@ public class TestGroupingTableMapper {
     Mapper<ImmutableBytesWritable, Result, ImmutableBytesWritable, Result>.Context context =
         mock(Mapper.Context.class);
     context.write(any(ImmutableBytesWritable.class), any(Result.class));
-    List<KeyValue> keyValue = new ArrayList<KeyValue>();
+    List<Cell> keyValue = new ArrayList<Cell>();
     byte[] row = {};
     keyValue.add(new KeyValue(row, Bytes.toBytes("family2"), Bytes.toBytes("clm"), Bytes
         .toBytes("value1")));

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat.java?rev=1519077&r1=1519076&r2=1519077&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat.java Fri Aug 30 20:31:47 2013
@@ -30,7 +30,6 @@ import java.util.Arrays;
 import java.util.HashMap;
 import java.util.Map;
 import java.util.Map.Entry;
-import java.util.Set;
 import java.util.Random;
 import java.util.Set;
 import java.util.concurrent.Callable;
@@ -43,8 +42,9 @@ import org.apache.hadoop.conf.Configurat
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.CellUtil;
 import org.apache.hadoop.hbase.CompatibilitySingletonFactory;
-import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.HColumnDescriptor;
@@ -54,6 +54,7 @@ import org.apache.hadoop.hbase.HadoopShi
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.LargeTests;
 import org.apache.hadoop.hbase.PerformanceEvaluation;
+import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.hadoop.hbase.client.HTable;
 import org.apache.hadoop.hbase.client.Put;
@@ -427,10 +428,10 @@ public class TestHFileOutputFormat  {
       ResultScanner results = table.getScanner(scan);
       for (Result res : results) {
         assertEquals(FAMILIES.length, res.raw().length);
-        KeyValue first = res.raw()[0];
-        for (KeyValue kv : res.raw()) {
-          assertTrue(KeyValue.COMPARATOR.matchingRows(first, kv));
-          assertTrue(Bytes.equals(first.getValue(), kv.getValue()));
+        Cell first = res.raw()[0];
+        for (Cell kv : res.raw()) {
+          assertTrue(CellUtil.matchingRow(first, kv));
+          assertTrue(Bytes.equals(CellUtil.getValueArray(first), CellUtil.getValueArray(kv)));
         }
       }
       results.close();

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java?rev=1519077&r1=1519076&r2=1519077&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java Fri Aug 30 20:31:47 2013
@@ -31,6 +31,8 @@ import java.util.List;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.CellUtil;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HTableDescriptor;
@@ -284,11 +286,11 @@ public class TestImportExport {
     s.setRaw(true);
     ResultScanner scanner = t.getScanner(s);
     Result r = scanner.next();
-    KeyValue[] res = r.raw();
-    assertTrue(res[0].isDeleteFamily());
+    Cell[] res = r.raw();
+    assertTrue(CellUtil.isDeleteFamily(res[0]));
     assertEquals(now+4, res[1].getTimestamp());
     assertEquals(now+3, res[2].getTimestamp());
-    assertTrue(res[3].isDelete());
+    assertTrue(CellUtil.isDelete(res[3]));
     assertEquals(now+2, res[4].getTimestamp());
     assertEquals(now+1, res[5].getTimestamp());
     assertEquals(now, res[6].getTimestamp());

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTsv.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTsv.java?rev=1519077&r1=1519076&r2=1519077&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTsv.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTsv.java Fri Aug 30 20:31:47 2013
@@ -38,6 +38,8 @@ import org.apache.hadoop.fs.FSDataOutput
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.CellUtil;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.KeyValue;
@@ -314,13 +316,11 @@ public class TestImportTsv implements Co
         ResultScanner resScanner = table.getScanner(scan);
         for (Result res : resScanner) {
           assertTrue(res.size() == 2);
-          List<KeyValue> kvs = res.list();
-          assertArrayEquals(kvs.get(0).getRow(), Bytes.toBytes("KEY"));
-          assertArrayEquals(kvs.get(1).getRow(), Bytes.toBytes("KEY"));
-          assertArrayEquals(kvs.get(0).getValue(),
-            Bytes.toBytes("VALUE" + valueMultiplier));
-          assertArrayEquals(kvs.get(1).getValue(),
-            Bytes.toBytes("VALUE" + 2 * valueMultiplier));
+          List<Cell> kvs = res.list();
+          assertTrue(CellUtil.matchingRow(kvs.get(0), Bytes.toBytes("KEY")));
+          assertTrue(CellUtil.matchingRow(kvs.get(1), Bytes.toBytes("KEY")));
+          assertTrue(CellUtil.matchingValue(kvs.get(0), Bytes.toBytes("VALUE" + valueMultiplier)));
+          assertTrue(CellUtil.matchingValue(kvs.get(1), Bytes.toBytes("VALUE" + 2 * valueMultiplier)));
           // Only one result set is expected, so let it loop.
         }
         verified = true;

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultithreadedTableMapper.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultithreadedTableMapper.java?rev=1519077&r1=1519076&r2=1519077&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultithreadedTableMapper.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultithreadedTableMapper.java Fri Aug 30 20:31:47 2013
@@ -213,11 +213,11 @@ public class TestMultithreadedTableMappe
         byte[] firstValue = null;
         byte[] secondValue = null;
         int count = 0;
-        for(KeyValue kv : r.list()) {
+        for(Cell kv : r.list()) {
           if (count == 0) {
-            firstValue = kv.getValue();
+            firstValue = CellUtil.getValueArray(kv);
           }else if (count == 1) {
-            secondValue = kv.getValue();
+            secondValue = CellUtil.getValueArray(kv);
           }else if (count == 2) {
             break;
           }

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduce.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduce.java?rev=1519077&r1=1519076&r2=1519077&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduce.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduce.java Fri Aug 30 20:31:47 2013
@@ -18,6 +18,10 @@
  */
 package org.apache.hadoop.hbase.mapreduce;
 
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
 import java.io.File;
 import java.io.IOException;
 import java.util.Iterator;
@@ -29,9 +33,10 @@ import org.apache.commons.logging.LogFac
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.CellUtil;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.HConstants;
-import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.LargeTests;
 import org.apache.hadoop.hbase.client.HTable;
 import org.apache.hadoop.hbase.client.Put;
@@ -47,10 +52,6 @@ import org.junit.BeforeClass;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
-import static org.junit.Assert.fail;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.assertFalse;
-
 /**
  * Test Map/Reduce job over HBase tables. The map/reduce process we're testing
  * on our tables is simple - take every row in the table, reverse the value of
@@ -225,12 +226,12 @@ public class TestTableMapReduce {
         byte[] firstValue = null;
         byte[] secondValue = null;
         int count = 0;
-        for(KeyValue kv : r.list()) {
+        for(Cell kv : r.list()) {
           if (count == 0) {
-            firstValue = kv.getValue();
+            firstValue = CellUtil.getValueArray(kv);
           }
           if (count == 1) {
-            secondValue = kv.getValue();
+            secondValue = CellUtil.getValueArray(kv);
           }
           count++;
           if (count == 2) {

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTimeRangeMapRed.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTimeRangeMapRed.java?rev=1519077&r1=1519076&r2=1519077&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTimeRangeMapRed.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTimeRangeMapRed.java Fri Aug 30 20:31:47 2013
@@ -110,7 +110,7 @@ public class TestTimeRangeMapRed {
         Context context)
     throws IOException {
       List<Long> tsList = new ArrayList<Long>();
-      for (KeyValue kv : result.list()) {
+      for (Cell kv : result.list()) {
         tsList.add(kv.getTimestamp());
       }
 
@@ -196,12 +196,12 @@ public class TestTimeRangeMapRed {
     scan.setMaxVersions(1);
     ResultScanner scanner = table.getScanner(scan);
     for (Result r: scanner) {
-      for (KeyValue kv : r.list()) {
-        log.debug(Bytes.toString(r.getRow()) + "\t" + Bytes.toString(kv.getFamily())
-            + "\t" + Bytes.toString(kv.getQualifier())
-            + "\t" + kv.getTimestamp() + "\t" + Bytes.toBoolean(kv.getValue()));
+      for (Cell kv : r.list()) {
+        log.debug(Bytes.toString(r.getRow()) + "\t" + Bytes.toString(CellUtil.getFamilyArray(kv))
+            + "\t" + Bytes.toString(CellUtil.getQualifierArray(kv))
+            + "\t" + kv.getTimestamp() + "\t" + Bytes.toBoolean(CellUtil.getValueArray(kv)));
         org.junit.Assert.assertEquals(TIMESTAMP.get(kv.getTimestamp()),
-          (Boolean)Bytes.toBoolean(kv.getValue()));
+          (Boolean)Bytes.toBoolean(CellUtil.getValueArray(kv)));
       }
     }
     scanner.close();

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALPlayer.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALPlayer.java?rev=1519077&r1=1519076&r2=1519077&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALPlayer.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALPlayer.java Fri Aug 30 20:31:47 2013
@@ -28,6 +28,7 @@ import java.util.List;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.CellUtil;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.HConstants;
@@ -123,7 +124,7 @@ public class TestWALPlayer {
     Get g = new Get(ROW);
     Result r = t2.get(g);
     assertEquals(1, r.size());
-    assertTrue(Bytes.equals(COLUMN2, r.raw()[0].getQualifier()));
+    assertTrue(CellUtil.matchingQualifier(r.raw()[0], COLUMN2));
   }
 
   /**

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/DataBlockEncodingTool.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/DataBlockEncodingTool.java?rev=1519077&r1=1519076&r2=1519077&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/DataBlockEncodingTool.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/DataBlockEncodingTool.java Fri Aug 30 20:31:47 2013
@@ -36,8 +36,10 @@ import org.apache.commons.logging.LogFac
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.KeyValueUtil;
 import org.apache.hadoop.hbase.io.compress.Compression;
 import org.apache.hadoop.hbase.io.compress.Compression.Algorithm;
 import org.apache.hadoop.hbase.io.encoding.DataBlockEncoder;
@@ -227,8 +229,8 @@ public class DataBlockEncodingTool {
     KeyValue currentKv;
 
     scanner.seek(KeyValue.LOWESTKEY);
-    List<Iterator<KeyValue>> codecIterators =
-        new ArrayList<Iterator<KeyValue>>();
+    List<Iterator<Cell>> codecIterators =
+        new ArrayList<Iterator<Cell>>();
     for(EncodedDataBlock codec : codecs) {
       codecIterators.add(codec.getIterator(HFileBlock.headerSize(minorVersion)));
     }
@@ -237,8 +239,9 @@ public class DataBlockEncodingTool {
     while ((currentKv = scanner.next()) != null && j < kvLimit) {
       // Iterates through key/value pairs
       ++j;
-      for (Iterator<KeyValue> it : codecIterators) {
-        KeyValue codecKv = it.next();
+      for (Iterator<Cell> it : codecIterators) {
+        Cell c = it.next();
+        KeyValue codecKv = KeyValueUtil.ensureKeyValue(c);
         if (codecKv == null || 0 != Bytes.compareTo(
             codecKv.getBuffer(), codecKv.getOffset(), codecKv.getLength(),
             currentKv.getBuffer(), currentKv.getOffset(),
@@ -320,7 +323,7 @@ public class DataBlockEncodingTool {
     for (int itTime = 0; itTime < benchmarkNTimes; ++itTime) {
       totalSize = 0;
 
-      Iterator<KeyValue> it;
+      Iterator<Cell> it;
 
       it = codec.getIterator(HFileBlock.headerSize(minorVersion));
 
@@ -328,7 +331,7 @@ public class DataBlockEncodingTool {
       // (expect first time)
       final long startTime = System.nanoTime();
       while (it.hasNext()) {
-        totalSize += it.next().getLength();
+        totalSize += KeyValueUtil.ensureKeyValue(it.next()).getLength();
       }
       final long finishTime = System.nanoTime();
       if (itTime >= benchmarkNOmit) {

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/HFileReadWriteTest.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/HFileReadWriteTest.java?rev=1519077&r1=1519076&r2=1519077&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/HFileReadWriteTest.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/HFileReadWriteTest.java Fri Aug 30 20:31:47 2013
@@ -46,11 +46,13 @@ import org.apache.commons.logging.LogFac
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.KeyValueUtil;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.io.compress.Compression;
@@ -401,11 +403,12 @@ public class HFileReadWriteTest {
       scanner = new StoreScanner(store, store.getScanInfo(), scan, scanners,
           ScanType.COMPACT_DROP_DELETES, Long.MIN_VALUE, Long.MIN_VALUE);
 
-      ArrayList<KeyValue> kvs = new ArrayList<KeyValue>();
+      ArrayList<Cell> kvs = new ArrayList<Cell>();
 
       while (scanner.next(kvs) || kvs.size() != 0) {
         numKV.addAndGet(kvs.size());
-        for (KeyValue kv : kvs) {
+        for (Cell c : kvs) {
+          KeyValue kv = KeyValueUtil.ensureKeyValue(c);
           totalBytes.addAndGet(kv.getLength());
           writer.append(kv);
         }

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.java?rev=1519077&r1=1519076&r2=1519077&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.java Fri Aug 30 20:31:47 2013
@@ -36,13 +36,14 @@ import org.apache.commons.logging.LogFac
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.CellUtil;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.HTableDescriptor;
-import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.MediumTests;
 import org.apache.hadoop.hbase.MultithreadedTestUtil;
 import org.apache.hadoop.hbase.MultithreadedTestUtil.TestContext;
@@ -173,8 +174,8 @@ public class TestAtomicOperation {
     Result result = region.get(get);
     assertEquals(1, result.size());
 
-    KeyValue kv = result.raw()[0];
-    long r = Bytes.toLong(kv.getValue());
+    Cell kv = result.raw()[0];
+    long r = Bytes.toLong(CellUtil.getValueArray(kv));
     assertEquals(amount, r);
   }
 
@@ -449,7 +450,7 @@ public class TestAtomicOperation {
               // check: should always see exactly one column
               Scan s = new Scan(row);
               RegionScanner rs = region.getScanner(s);
-              List<KeyValue> r = new ArrayList<KeyValue>();
+              List<Cell> r = new ArrayList<Cell>();
               while(rs.next(r));
               rs.close();
               if (r.size() != 1) {
@@ -542,10 +543,10 @@ public class TestAtomicOperation {
     ctx.stop();
     Scan s = new Scan();
     RegionScanner scanner = region.getScanner(s);
-    List<KeyValue> results = new ArrayList<KeyValue>();
+    List<Cell> results = new ArrayList<Cell>();
     scanner.next(results, 2);
-    for (KeyValue keyValue : results) {
-      assertEquals("50",Bytes.toString(keyValue.getValue()));
+    for (Cell keyValue : results) {
+      assertEquals("50",Bytes.toString(CellUtil.getValueArray(keyValue)));
     }
 
   }

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBlocksRead.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBlocksRead.java?rev=1519077&r1=1519076&r2=1519077&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBlocksRead.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBlocksRead.java Fri Aug 30 20:31:47 2013
@@ -27,20 +27,21 @@ import java.util.List;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.CellUtil;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HBaseTestCase;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.HTableDescriptor;
-import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.MediumTests;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.Delete;
+import org.apache.hadoop.hbase.client.Durability;
 import org.apache.hadoop.hbase.client.Get;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.Scan;
-import org.apache.hadoop.hbase.client.Durability;
 import org.apache.hadoop.hbase.io.hfile.BlockCache;
 import org.apache.hadoop.hbase.io.hfile.CacheConfig;
 import org.apache.hadoop.hbase.io.hfile.HFile;
@@ -138,16 +139,16 @@ public class TestBlocksRead extends HBas
     region.put(put);
   }
 
-  private KeyValue[] getData(String family, String row, List<String> columns,
+  private Cell[] getData(String family, String row, List<String> columns,
       int expBlocks) throws IOException {
     return getData(family, row, columns, expBlocks, expBlocks, expBlocks);
   }
 
-  private KeyValue[] getData(String family, String row, List<String> columns,
+  private Cell[] getData(String family, String row, List<String> columns,
       int expBlocksRowCol, int expBlocksRow, int expBlocksNone)
       throws IOException {
     int[] expBlocks = new int[] { expBlocksRowCol, expBlocksRow, expBlocksNone };
-    KeyValue[] kvs = null;
+    Cell[] kvs = null;
 
     for (int i = 0; i < BLOOM_TYPE.length; i++) {
       BloomType bloomType = BLOOM_TYPE[i];
@@ -171,13 +172,13 @@ public class TestBlocksRead extends HBas
     return kvs;
   }
 
-  private KeyValue[] getData(String family, String row, String column,
+  private Cell[] getData(String family, String row, String column,
       int expBlocks) throws IOException {
     return getData(family, row, Arrays.asList(column), expBlocks, expBlocks,
         expBlocks);
   }
 
-  private KeyValue[] getData(String family, String row, String column,
+  private Cell[] getData(String family, String row, String column,
       int expBlocksRowCol, int expBlocksRow, int expBlocksNone)
       throws IOException {
     return getData(family, row, Arrays.asList(column), expBlocksRowCol,
@@ -193,14 +194,12 @@ public class TestBlocksRead extends HBas
     region.delete(del);
   }
 
-  private static void verifyData(KeyValue kv, String expectedRow,
+  private static void verifyData(Cell kv, String expectedRow,
       String expectedCol, long expectedVersion) {
-    assertEquals("RowCheck", expectedRow, Bytes.toString(kv.getRow()));
-    assertEquals("ColumnCheck", expectedCol, Bytes.toString(kv.getQualifier()));
+    assertTrue("RowCheck", CellUtil.matchingRow(kv,  Bytes.toBytes(expectedRow)));
+    assertTrue("ColumnCheck", CellUtil.matchingQualifier(kv, Bytes.toBytes(expectedCol)));
     assertEquals("TSCheck", expectedVersion, kv.getTimestamp());
-    assertEquals("ValueCheck",
-        Bytes.toString(genValue(expectedRow, expectedCol, expectedVersion)),
-        Bytes.toString(kv.getValue()));
+    assertTrue("ValueCheck", CellUtil.matchingValue(kv, genValue(expectedRow, expectedCol, expectedVersion)));
   }
 
   private static long getBlkAccessCount(byte[] cf) {
@@ -220,7 +219,7 @@ public class TestBlocksRead extends HBas
   public void testBlocksRead() throws Exception {
     byte[] TABLE = Bytes.toBytes("testBlocksRead");
     String FAMILY = "cf1";
-    KeyValue kvs[];
+    Cell kvs[];
     HBaseConfiguration conf = getConf();
     this.region = initHRegion(TABLE, getName(), conf, FAMILY);
 
@@ -277,7 +276,7 @@ public class TestBlocksRead extends HBas
   public void testLazySeekBlocksRead() throws Exception {
     byte[] TABLE = Bytes.toBytes("testLazySeekBlocksRead");
     String FAMILY = "cf1";
-    KeyValue kvs[];
+    Cell kvs[];
     HBaseConfiguration conf = getConf();
     this.region = initHRegion(TABLE, getName(), conf, FAMILY);
 
@@ -400,7 +399,7 @@ public class TestBlocksRead extends HBas
       Scan scan = new Scan();
       scan.setCacheBlocks(false);
       RegionScanner rs = region.getScanner(scan);
-      List<KeyValue> result = new ArrayList<KeyValue>(2);
+      List<Cell> result = new ArrayList<Cell>(2);
       rs.next(result);
       assertEquals(2 * BLOOM_TYPE.length, result.size());
       rs.close();
@@ -413,7 +412,7 @@ public class TestBlocksRead extends HBas
       blocksStart = blocksEnd;
       scan.setCacheBlocks(true);
       rs = region.getScanner(scan);
-      result = new ArrayList<KeyValue>(2);
+      result = new ArrayList<Cell>(2);
       rs.next(result);
       assertEquals(2 * BLOOM_TYPE.length, result.size());
       rs.close();
@@ -430,7 +429,7 @@ public class TestBlocksRead extends HBas
   public void testLazySeekBlocksReadWithDelete() throws Exception {
     byte[] TABLE = Bytes.toBytes("testLazySeekBlocksReadWithDelete");
     String FAMILY = "cf1";
-    KeyValue kvs[];
+    Cell kvs[];
     HBaseConfiguration conf = getConf();
     this.region = initHRegion(TABLE, getName(), conf, FAMILY);
     try {

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBlocksScanned.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBlocksScanned.java?rev=1519077&r1=1519076&r2=1519077&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBlocksScanned.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBlocksScanned.java Fri Aug 30 20:31:47 2013
@@ -20,11 +20,13 @@ import java.util.ArrayList;
 import java.util.List;
 import java.util.Map;
 
+import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.HBaseTestCase;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.KeyValueUtil;
 import org.apache.hadoop.hbase.SmallTests;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.Scan;
@@ -76,14 +78,15 @@ public class TestBlocksScanned extends H
     scan.setMaxVersions(1);
 
     InternalScanner s = r.getScanner(scan);
-    List<KeyValue> results = new ArrayList<KeyValue>();
+    List<Cell> results = new ArrayList<Cell>();
     while (s.next(results));
     s.close();
 
     int expectResultSize = 'z' - 'a';
     Assert.assertEquals(expectResultSize, results.size());
 
-    int kvPerBlock = (int) Math.ceil(BLOCK_SIZE / (double) results.get(0).getLength());
+    int kvPerBlock = (int) Math.ceil(BLOCK_SIZE / 
+        (double) KeyValueUtil.ensureKeyValue(results.get(0)).getLength());
     Assert.assertEquals(2, kvPerBlock);
   }
 

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestColumnSeeking.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestColumnSeeking.java?rev=1519077&r1=1519076&r2=1519077&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestColumnSeeking.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestColumnSeeking.java Fri Aug 30 20:31:47 2013
@@ -149,7 +149,7 @@ public class TestColumnSeeking {
 
         }
         InternalScanner scanner = region.getScanner(scan);
-        List<KeyValue> results = new ArrayList<KeyValue>();
+        List<Cell> results = new ArrayList<Cell>();
         while (scanner.next(results))
           ;
         assertEquals(kvSet.size(), results.size());
@@ -261,7 +261,7 @@ public class TestColumnSeeking {
 
       }
       InternalScanner scanner = region.getScanner(scan);
-      List<KeyValue> results = new ArrayList<KeyValue>();
+      List<Cell> results = new ArrayList<Cell>();
       while (scanner.next(results))
         ;
       assertEquals(kvSet.size(), results.size());

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompaction.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompaction.java?rev=1519077&r1=1519076&r2=1519077&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompaction.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompaction.java Fri Aug 30 20:31:47 2013
@@ -39,6 +39,8 @@ import org.apache.hadoop.fs.FSDataOutput
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.CellUtil;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HBaseTestCase;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
@@ -140,9 +142,9 @@ public class TestCompaction extends HBas
     // Now delete everything.
     InternalScanner s = r.getScanner(new Scan());
     do {
-      List<KeyValue> results = new ArrayList<KeyValue>();
+      List<Cell> results = new ArrayList<Cell>();
       boolean result = s.next(results);
-      r.delete(new Delete(results.get(0).getRow()));
+      r.delete(new Delete(CellUtil.getRowArray(results.get(0))));
       if (!result) break;
     } while(true);
     s.close();
@@ -153,7 +155,7 @@ public class TestCompaction extends HBas
     s = r.getScanner(new Scan());
     int counter = 0;
     do {
-      List<KeyValue> results = new ArrayList<KeyValue>();
+      List<Cell> results = new ArrayList<Cell>();
       boolean result = s.next(results);
       if (!result) break;
       counter++;

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestGetClosestAtOrBefore.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestGetClosestAtOrBefore.java?rev=1519077&r1=1519076&r2=1519077&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestGetClosestAtOrBefore.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestGetClosestAtOrBefore.java Fri Aug 30 20:31:47 2013
@@ -26,19 +26,21 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.CellUtil;
 import org.apache.hadoop.hbase.HBaseTestCase;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.MediumTests;
+import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.catalog.MetaEditor;
 import org.apache.hadoop.hbase.client.Delete;
+import org.apache.hadoop.hbase.client.Durability;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.client.Scan;
-import org.apache.hadoop.hbase.client.Durability;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.junit.experimental.categories.Category;
 
@@ -88,7 +90,7 @@ public class TestGetClosestAtOrBefore ex
     }
     InternalScanner s = mr.getScanner(new Scan());
     try {
-      List<KeyValue> keys = new ArrayList<KeyValue>();
+      List<Cell> keys = new ArrayList<Cell>();
       while(s.next(keys)) {
         LOG.info(keys);
         keys.clear();
@@ -112,9 +114,9 @@ public class TestGetClosestAtOrBefore ex
     Scan scan = new Scan(firstRowInC);
     s = mr.getScanner(scan);
     try {
-      List<KeyValue> keys = new ArrayList<KeyValue>();
+      List<Cell> keys = new ArrayList<Cell>();
       while (s.next(keys)) {
-        mr.delete(new Delete(keys.get(0).getRow()));
+        mr.delete(new Delete(CellUtil.getRowArray(keys.get(0))));
         keys.clear();
       }
     } finally {

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java?rev=1519077&r1=1519076&r2=1519077&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java Fri Aug 30 20:31:47 2013
@@ -49,14 +49,13 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.CellComparator;
+import org.apache.hadoop.hbase.CellUtil;
 import org.apache.hadoop.hbase.CompatibilitySingletonFactory;
-import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HBaseTestCase;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HConstants;
-import org.apache.hadoop.hbase.NotServingRegionException;
 import org.apache.hadoop.hbase.HConstants.OperationStatusCode;
 import org.apache.hadoop.hbase.HDFSBlocksDistribution;
 import org.apache.hadoop.hbase.HRegionInfo;
@@ -67,6 +66,8 @@ import org.apache.hadoop.hbase.MiniHBase
 import org.apache.hadoop.hbase.MultithreadedTestUtil;
 import org.apache.hadoop.hbase.MultithreadedTestUtil.RepeatingTestThread;
 import org.apache.hadoop.hbase.MultithreadedTestUtil.TestThread;
+import org.apache.hadoop.hbase.NotServingRegionException;
+import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.Waiter;
 import org.apache.hadoop.hbase.client.Append;
 import org.apache.hadoop.hbase.client.Delete;
@@ -193,7 +194,7 @@ public class TestHRegion extends HBaseTe
     // open the second scanner
     RegionScanner scanner2 = region.getScanner(scan);
 
-    List<KeyValue> results = new ArrayList<KeyValue>();
+    List<Cell> results = new ArrayList<Cell>();
 
     System.out.println("Smallest read point:" + region.getSmallestReadPoint());
 
@@ -245,10 +246,10 @@ public class TestHRegion extends HBaseTe
     region.compactStores(true);
 
     scanner1.reseek(Bytes.toBytes("r2"));
-    List<KeyValue> results = new ArrayList<KeyValue>();
+    List<Cell> results = new ArrayList<Cell>();
     scanner1.next(results);
-    KeyValue keyValue = results.get(0);
-    Assert.assertTrue(Bytes.compareTo(keyValue.getRow(), Bytes.toBytes("r2")) == 0);
+    Cell keyValue = results.get(0);
+    Assert.assertTrue(Bytes.compareTo(CellUtil.getRowArray(keyValue), Bytes.toBytes("r2")) == 0);
     scanner1.close();
   }
 
@@ -293,9 +294,9 @@ public class TestHRegion extends HBaseTe
       Get get = new Get(row);
       Result result = region.get(get);
       for (long i = minSeqId; i <= maxSeqId; i += 10) {
-        List<KeyValue> kvs = result.getColumn(family, Bytes.toBytes(i));
+        List<Cell> kvs = result.getColumn(family, Bytes.toBytes(i));
         assertEquals(1, kvs.size());
-        assertEquals(Bytes.toBytes(i), kvs.get(0).getValue());
+        assertEquals(Bytes.toBytes(i), CellUtil.getValueArray(kvs.get(0)));
       }
     } finally {
       HRegion.closeHRegion(this.region);
@@ -346,12 +347,12 @@ public class TestHRegion extends HBaseTe
       Get get = new Get(row);
       Result result = region.get(get);
       for (long i = minSeqId; i <= maxSeqId; i += 10) {
-        List<KeyValue> kvs = result.getColumn(family, Bytes.toBytes(i));
+        List<Cell> kvs = result.getColumn(family, Bytes.toBytes(i));
         if (i < recoverSeqId) {
           assertEquals(0, kvs.size());
         } else {
           assertEquals(1, kvs.size());
-          assertEquals(Bytes.toBytes(i), kvs.get(0).getValue());
+          assertEquals(Bytes.toBytes(i), CellUtil.getValueArray(kvs.get(0)));
         }
       }
     } finally {
@@ -670,14 +671,14 @@ public class TestHRegion extends HBaseTe
     InternalScanner scanner = buildScanner(keyPrefix, value, r);
     int count = 0;
     boolean more = false;
-    List<KeyValue> results = new ArrayList<KeyValue>();
+    List<Cell> results = new ArrayList<Cell>();
     do {
       more = scanner.next(results);
       if (results != null && !results.isEmpty())
         count++;
       else
         break;
-      Delete delete = new Delete(results.get(0).getRow());
+      Delete delete = new Delete(CellUtil.getRowArray(results.get(0)));
       delete.deleteColumn(Bytes.toBytes("trans-tags"), Bytes.toBytes("qual2"));
       r.delete(delete);
       results.clear();
@@ -688,14 +689,14 @@ public class TestHRegion extends HBaseTe
   private int getNumberOfRows(String keyPrefix, String value, HRegion r) throws Exception {
     InternalScanner resultScanner = buildScanner(keyPrefix, value, r);
     int numberOfResults = 0;
-    List<KeyValue> results = new ArrayList<KeyValue>();
+    List<Cell> results = new ArrayList<Cell>();
     boolean more = false;
     do {
       more = resultScanner.next(results);
       if (results != null && !results.isEmpty()) numberOfResults++;
       else break;
-      for (KeyValue kv: results) {
-        System.out.println("kv=" + kv.toString() + ", " + Bytes.toString(kv.getValue()));
+      for (Cell kv: results) {
+        System.out.println("kv=" + kv.toString() + ", " + Bytes.toString(CellUtil.getValueArray(kv)));
       }
       results.clear();
     } while(more);
@@ -1065,9 +1066,9 @@ public class TestHRegion extends HBaseTe
 
       Get get = new Get(row1);
       get.addColumn(fam2, qf1);
-      KeyValue [] actual = region.get(get).raw();
+      Cell [] actual = region.get(get).raw();
 
-      KeyValue [] expected = {kv};
+      Cell [] expected = {kv};
 
       assertEquals(expected.length, actual.length);
       for(int i=0; i<actual.length; i++) {
@@ -1385,7 +1386,7 @@ public class TestHRegion extends HBaseTe
       Get get = new Get(row).addColumn(fam, qual);
       Result result = region.get(get);
       assertEquals(1, result.size());
-      KeyValue kv = result.raw()[0];
+      Cell kv = result.raw()[0];
       LOG.info("Got: " + kv);
       assertTrue("LATEST_TIMESTAMP was not replaced with real timestamp",
           kv.getTimestamp() != HConstants.LATEST_TIMESTAMP);
@@ -1479,13 +1480,13 @@ public class TestHRegion extends HBaseTe
       Scan scan = new Scan();
       scan.addFamily(fam1).addFamily(fam2);
       InternalScanner s = region.getScanner(scan);
-      List<KeyValue> results = new ArrayList<KeyValue>();
+      List<Cell> results = new ArrayList<Cell>();
       s.next(results);
-      assertTrue(Bytes.equals(rowA, results.get(0).getRow()));
+      assertTrue(CellUtil.matchingRow(results.get(0), rowA));
 
       results.clear();
       s.next(results);
-      assertTrue(Bytes.equals(rowB, results.get(0).getRow()));
+      assertTrue(CellUtil.matchingRow(results.get(0), rowB));
     } finally {
       HRegion.closeHRegion(this.region);
       this.region = null;
@@ -1536,15 +1537,15 @@ public class TestHRegion extends HBaseTe
       scan.addColumn(fam1, qual1);
       InternalScanner s = region.getScanner(scan);
 
-      List<KeyValue> results = new ArrayList<KeyValue>();
+      List<Cell> results = new ArrayList<Cell>();
       assertEquals(false, s.next(results));
       assertEquals(1, results.size());
-      KeyValue kv = results.get(0);
+      Cell kv = results.get(0);
 
-      assertByteEquals(value2, kv.getValue());
-      assertByteEquals(fam1, kv.getFamily());
-      assertByteEquals(qual1, kv.getQualifier());
-      assertByteEquals(row, kv.getRow());
+      assertByteEquals(value2, CellUtil.getValueArray(kv));
+      assertByteEquals(fam1, CellUtil.getFamilyArray(kv));
+      assertByteEquals(qual1, CellUtil.getQualifierArray(kv));
+      assertByteEquals(row, CellUtil.getRowArray(kv));
     } finally {
       HRegion.closeHRegion(this.region);
       this.region = null;
@@ -1655,13 +1656,9 @@ public class TestHRegion extends HBaseTe
       Result res = region.get(get);
       assertEquals(expected.length, res.size());
       for(int i=0; i<res.size(); i++){
-        assertEquals(0,
-            Bytes.compareTo(expected[i].getRow(), res.raw()[i].getRow()));
-        assertEquals(0,
-            Bytes.compareTo(expected[i].getFamily(), res.raw()[i].getFamily()));
-        assertEquals(0,
-            Bytes.compareTo(
-                expected[i].getQualifier(), res.raw()[i].getQualifier()));
+        assertTrue(CellUtil.matchingRow(expected[i], res.raw()[i]));
+        assertTrue(CellUtil.matchingFamily(expected[i], res.raw()[i]));
+        assertTrue(CellUtil.matchingQualifier(expected[i], res.raw()[i]));
       }
 
       // Test using a filter on a Get
@@ -1950,25 +1947,25 @@ public class TestHRegion extends HBaseTe
       scan.addFamily(fam4);
       InternalScanner is = region.getScanner(scan);
 
-      List<KeyValue> res = null;
+      List<Cell> res = null;
 
       //Result 1
-      List<KeyValue> expected1 = new ArrayList<KeyValue>();
+      List<Cell> expected1 = new ArrayList<Cell>();
       expected1.add(new KeyValue(row1, fam2, null, ts, KeyValue.Type.Put, null));
       expected1.add(new KeyValue(row1, fam4, null, ts, KeyValue.Type.Put, null));
 
-      res = new ArrayList<KeyValue>();
+      res = new ArrayList<Cell>();
       is.next(res);
       for (int i = 0; i < res.size(); i++) {
         assertTrue(CellComparator.equalsIgnoreMvccVersion(expected1.get(i), res.get(i)));
       }
 
       //Result 2
-      List<KeyValue> expected2 = new ArrayList<KeyValue>();
+      List<Cell> expected2 = new ArrayList<Cell>();
       expected2.add(new KeyValue(row2, fam2, null, ts, KeyValue.Type.Put, null));
       expected2.add(new KeyValue(row2, fam4, null, ts, KeyValue.Type.Put, null));
 
-      res = new ArrayList<KeyValue>();
+      res = new ArrayList<Cell>();
       is.next(res);
       for(int i=0; i<res.size(); i++) {
         assertTrue(CellComparator.equalsIgnoreMvccVersion(expected2.get(i), res.get(i)));
@@ -2016,14 +2013,14 @@ public class TestHRegion extends HBaseTe
       region.put(put);
 
       //Expected
-      List<KeyValue> expected = new ArrayList<KeyValue>();
+      List<Cell> expected = new ArrayList<Cell>();
       expected.add(kv13);
       expected.add(kv12);
 
       Scan scan = new Scan(row1);
       scan.addColumn(fam1, qf1);
       scan.setMaxVersions(MAX_VERSIONS);
-      List<KeyValue> actual = new ArrayList<KeyValue>();
+      List<Cell> actual = new ArrayList<Cell>();
       InternalScanner scanner = region.getScanner(scan);
 
       boolean hasNext = scanner.next(actual);
@@ -2077,7 +2074,7 @@ public class TestHRegion extends HBaseTe
       region.flushcache();
 
       //Expected
-      List<KeyValue> expected = new ArrayList<KeyValue>();
+      List<Cell> expected = new ArrayList<Cell>();
       expected.add(kv13);
       expected.add(kv12);
       expected.add(kv23);
@@ -2087,7 +2084,7 @@ public class TestHRegion extends HBaseTe
       scan.addColumn(fam1, qf1);
       scan.addColumn(fam1, qf2);
       scan.setMaxVersions(MAX_VERSIONS);
-      List<KeyValue> actual = new ArrayList<KeyValue>();
+      List<Cell> actual = new ArrayList<Cell>();
       InternalScanner scanner = region.getScanner(scan);
 
       boolean hasNext = scanner.next(actual);
@@ -2157,7 +2154,7 @@ public class TestHRegion extends HBaseTe
       region.put(put);
 
       //Expected
-      List<KeyValue> expected = new ArrayList<KeyValue>();
+      List<Cell> expected = new ArrayList<Cell>();
       expected.add(kv14);
       expected.add(kv13);
       expected.add(kv12);
@@ -2170,7 +2167,7 @@ public class TestHRegion extends HBaseTe
       scan.addColumn(fam1, qf2);
       int versions = 3;
       scan.setMaxVersions(versions);
-      List<KeyValue> actual = new ArrayList<KeyValue>();
+      List<Cell> actual = new ArrayList<Cell>();
       InternalScanner scanner = region.getScanner(scan);
 
       boolean hasNext = scanner.next(actual);
@@ -2223,7 +2220,7 @@ public class TestHRegion extends HBaseTe
       region.put(put);
 
       //Expected
-      List<KeyValue> expected = new ArrayList<KeyValue>();
+      List<Cell> expected = new ArrayList<Cell>();
       expected.add(kv13);
       expected.add(kv12);
       expected.add(kv23);
@@ -2232,7 +2229,7 @@ public class TestHRegion extends HBaseTe
       Scan scan = new Scan(row1);
       scan.addFamily(fam1);
       scan.setMaxVersions(MAX_VERSIONS);
-      List<KeyValue> actual = new ArrayList<KeyValue>();
+      List<Cell> actual = new ArrayList<Cell>();
       InternalScanner scanner = region.getScanner(scan);
 
       boolean hasNext = scanner.next(actual);
@@ -2285,7 +2282,7 @@ public class TestHRegion extends HBaseTe
       region.flushcache();
 
       //Expected
-      List<KeyValue> expected = new ArrayList<KeyValue>();
+      List<Cell> expected = new ArrayList<Cell>();
       expected.add(kv13);
       expected.add(kv12);
       expected.add(kv23);
@@ -2294,7 +2291,7 @@ public class TestHRegion extends HBaseTe
       Scan scan = new Scan(row1);
       scan.addFamily(fam1);
       scan.setMaxVersions(MAX_VERSIONS);
-      List<KeyValue> actual = new ArrayList<KeyValue>();
+      List<Cell> actual = new ArrayList<Cell>();
       InternalScanner scanner = region.getScanner(scan);
 
       boolean hasNext = scanner.next(actual);
@@ -2350,7 +2347,7 @@ public class TestHRegion extends HBaseTe
       scan.addColumn(family, col1);
       InternalScanner s = region.getScanner(scan);
 
-      List<KeyValue> results = new ArrayList<KeyValue>();
+      List<Cell> results = new ArrayList<Cell>();
       assertEquals(false, s.next(results));
       assertEquals(0, results.size());
     } finally {
@@ -2369,8 +2366,8 @@ public class TestHRegion extends HBaseTe
     Result result = region.get(get);
     assertEquals(1, result.size());
 
-    KeyValue kv = result.raw()[0];
-    long r = Bytes.toLong(kv.getValue());
+    Cell kv = result.raw()[0];
+    long r = Bytes.toLong(CellUtil.getValueArray(kv));
     assertEquals(amount, r);
   }
 
@@ -2384,8 +2381,8 @@ public class TestHRegion extends HBaseTe
     Result result = region.get(get);
     assertEquals(1, result.size());
 
-    KeyValue kv = result.raw()[0];
-    int r = Bytes.toInt(kv.getValue());
+    Cell kv = result.raw()[0];
+    int r = Bytes.toInt(CellUtil.getValueArray(kv));
     assertEquals(amount, r);
   }
 
@@ -2453,7 +2450,7 @@ public class TestHRegion extends HBaseTe
       Scan scan = new Scan(row1);
       int versions = 3;
       scan.setMaxVersions(versions);
-      List<KeyValue> actual = new ArrayList<KeyValue>();
+      List<Cell> actual = new ArrayList<Cell>();
       InternalScanner scanner = region.getScanner(scan);
 
       boolean hasNext = scanner.next(actual);
@@ -2518,16 +2515,16 @@ public class TestHRegion extends HBaseTe
       scan.setLoadColumnFamiliesOnDemand(true);
       InternalScanner s = region.getScanner(scan);
 
-      List<KeyValue> results = new ArrayList<KeyValue>();
+      List<Cell> results = new ArrayList<Cell>();
       assertTrue(s.next(results));
       assertEquals(results.size(), 1);
       results.clear();
 
       assertTrue(s.next(results));
       assertEquals(results.size(), 3);
-      assertTrue("orderCheck", results.get(0).matchingFamily(cf_alpha));
-      assertTrue("orderCheck", results.get(1).matchingFamily(cf_essential));
-      assertTrue("orderCheck", results.get(2).matchingFamily(cf_joined));
+      assertTrue("orderCheck", CellUtil.matchingFamily(results.get(0), cf_alpha));
+      assertTrue("orderCheck", CellUtil.matchingFamily(results.get(1), cf_essential));
+      assertTrue("orderCheck", CellUtil.matchingFamily(results.get(2), cf_joined));
       results.clear();
 
       assertFalse(s.next(results));
@@ -2607,7 +2604,7 @@ public class TestHRegion extends HBaseTe
       // r8: first:a
       // r9: first:a
 
-      List<KeyValue> results = new ArrayList<KeyValue>();
+      List<Cell> results = new ArrayList<Cell>();
       int index = 0;
       while (true) {
         boolean more = s.next(results, 3);
@@ -2790,7 +2787,7 @@ public class TestHRegion extends HBaseTe
           CompareOp.EQUAL, new BinaryComparator(Bytes.toBytes(5L))));
 
       int expectedCount = 0;
-      List<KeyValue> res = new ArrayList<KeyValue>();
+      List<Cell> res = new ArrayList<Cell>();
 
       boolean toggle=true;
       for (long i = 0; i < numRows; i++) {
@@ -2924,7 +2921,7 @@ public class TestHRegion extends HBaseTe
       //      new BinaryComparator(Bytes.toBytes("row0"))));
 
       int expectedCount = numFamilies * numQualifiers;
-      List<KeyValue> res = new ArrayList<KeyValue>();
+      List<Cell> res = new ArrayList<Cell>();
 
       long prevTimestamp = 0L;
       for (int i = 0; i < testCount; i++) {
@@ -3120,27 +3117,27 @@ public class TestHRegion extends HBaseTe
           // TODO this was removed, now what dangit?!
           // search looking for the qualifier in question?
           long timestamp = 0;
-          for (KeyValue kv : result.raw()) {
-            if (Bytes.equals(kv.getFamily(), families[0])
-                && Bytes.equals(kv.getQualifier(), qualifiers[0])) {
+          for (Cell kv : result.raw()) {
+            if (CellUtil.matchingFamily(kv, families[0])
+                && CellUtil.matchingQualifier(kv, qualifiers[0])) {
               timestamp = kv.getTimestamp();
             }
           }
           assertTrue(timestamp >= prevTimestamp);
           prevTimestamp = timestamp;
-          KeyValue previousKV = null;
+          Cell previousKV = null;
 
-          for (KeyValue kv : result.raw()) {
-            byte[] thisValue = kv.getValue();
+          for (Cell kv : result.raw()) {
+            byte[] thisValue = CellUtil.getValueArray(kv);
             if (previousKV != null) {
-              if (Bytes.compareTo(previousKV.getValue(), thisValue) != 0) {
+              if (Bytes.compareTo(CellUtil.getValueArray(previousKV), thisValue) != 0) {
                 LOG.warn("These two KV should have the same value." +
                     " Previous KV:" +
                     previousKV + "(memStoreTS:" + previousKV.getMvccVersion() + ")" +
                     ", New KV: " +
                     kv + "(memStoreTS:" + kv.getMvccVersion() + ")"
                     );
-                assertEquals(0, Bytes.compareTo(previousKV.getValue(), thisValue));
+                assertEquals(0, Bytes.compareTo(CellUtil.getValueArray(previousKV), thisValue));
               }
             }
             previousKV = kv;
@@ -3219,7 +3216,7 @@ public class TestHRegion extends HBaseTe
                   new BinaryComparator(Bytes.toBytes(3L)))
               )));
       InternalScanner scanner = region.getScanner(idxScan);
-      List<KeyValue> res = new ArrayList<KeyValue>();
+      List<Cell> res = new ArrayList<Cell>();
 
       //long start = System.nanoTime();
       while (scanner.next(res)) ;
@@ -3327,7 +3324,7 @@ public class TestHRegion extends HBaseTe
       //Get rows
       Get get = new Get(row);
       get.setMaxVersions();
-      KeyValue[] kvs = region.get(get).raw();
+      Cell[] kvs = region.get(get).raw();
 
       //Check if rows are correct
       assertEquals(4, kvs.length);
@@ -3378,7 +3375,7 @@ public class TestHRegion extends HBaseTe
       Get get = new Get(row);
       get.addColumn(familyName, col);
 
-      KeyValue[] keyValues = region.get(get).raw();
+      Cell[] keyValues = region.get(get).raw();
       assertTrue(keyValues.length == 0);
     } finally {
       HRegion.closeHRegion(this.region);
@@ -3612,13 +3609,13 @@ public class TestHRegion extends HBaseTe
     get.addColumn(Incrementer.family, Incrementer.qualifier);
     get.setMaxVersions(1);
     Result res = this.region.get(get);
-    List<KeyValue> kvs = res.getColumn(Incrementer.family,
+    List<Cell> kvs = res.getColumn(Incrementer.family,
         Incrementer.qualifier);
 
     //we just got the latest version
     assertEquals(kvs.size(), 1);
-    KeyValue kv = kvs.get(0);
-    assertEquals(expected, Bytes.toLong(kv.getBuffer(), kv.getValueOffset()));
+    Cell kv = kvs.get(0);
+    assertEquals(expected, Bytes.toLong(kv.getValueArray(), kv.getValueOffset()));
     this.region = null;
   }
 
@@ -3706,14 +3703,14 @@ public class TestHRegion extends HBaseTe
     get.addColumn(Appender.family, Appender.qualifier);
     get.setMaxVersions(1);
     Result res = this.region.get(get);
-    List<KeyValue> kvs = res.getColumn(Appender.family,
+    List<Cell> kvs = res.getColumn(Appender.family,
         Appender.qualifier);
 
     //we just got the latest version
     assertEquals(kvs.size(), 1);
-    KeyValue kv = kvs.get(0);
+    Cell kv = kvs.get(0);
     byte[] appendResult = new byte[kv.getValueLength()];
-    System.arraycopy(kv.getBuffer(), kv.getValueOffset(), appendResult, 0, kv.getValueLength());
+    System.arraycopy(kv.getValueArray(), kv.getValueOffset(), appendResult, 0, kv.getValueLength());
     assertEquals(expected, appendResult);
     this.region = null;
   }
@@ -3732,7 +3729,7 @@ public class TestHRegion extends HBaseTe
     this.region = initHRegion(tableName, method, conf, family);
     Put put = null;
     Get get = null;
-    List<KeyValue> kvs = null;
+    List<Cell> kvs = null;
     Result res = null;
 
     put = new Put(row);
@@ -3745,7 +3742,7 @@ public class TestHRegion extends HBaseTe
     res = this.region.get(get);
     kvs = res.getColumn(family, qualifier);
     assertEquals(1, kvs.size());
-    assertEquals(Bytes.toBytes("value0"), kvs.get(0).getValue());
+    assertEquals(Bytes.toBytes("value0"), CellUtil.getValueArray(kvs.get(0)));
 
     region.flushcache();
     get = new Get(row);
@@ -3754,7 +3751,7 @@ public class TestHRegion extends HBaseTe
     res = this.region.get(get);
     kvs = res.getColumn(family, qualifier);
     assertEquals(1, kvs.size());
-    assertEquals(Bytes.toBytes("value0"), kvs.get(0).getValue());
+    assertEquals(Bytes.toBytes("value0"), CellUtil.getValueArray(kvs.get(0)));
 
     put = new Put(row);
     value = Bytes.toBytes("value1");
@@ -3766,7 +3763,7 @@ public class TestHRegion extends HBaseTe
     res = this.region.get(get);
     kvs = res.getColumn(family, qualifier);
     assertEquals(1, kvs.size());
-    assertEquals(Bytes.toBytes("value1"), kvs.get(0).getValue());
+    assertEquals(Bytes.toBytes("value1"), CellUtil.getValueArray(kvs.get(0)));
 
     region.flushcache();
     get = new Get(row);
@@ -3775,7 +3772,7 @@ public class TestHRegion extends HBaseTe
     res = this.region.get(get);
     kvs = res.getColumn(family, qualifier);
     assertEquals(1, kvs.size());
-    assertEquals(Bytes.toBytes("value1"), kvs.get(0).getValue());
+    assertEquals(Bytes.toBytes("value1"), CellUtil.getValueArray(kvs.get(0)));
   }
 
   @Test
@@ -3902,12 +3899,12 @@ public class TestHRegion extends HBaseTe
         get.addColumn(family, qf);
       }
       Result result = newReg.get(get);
-      KeyValue [] raw = result.raw();
+      Cell [] raw = result.raw();
       assertEquals(families.length, result.size());
       for(int j=0; j<families.length; j++) {
-        assertEquals(0, Bytes.compareTo(row, raw[j].getRow()));
-        assertEquals(0, Bytes.compareTo(families[j], raw[j].getFamily()));
-        assertEquals(0, Bytes.compareTo(qf, raw[j].getQualifier()));
+        assertTrue(CellUtil.matchingRow(raw[j], row));
+        assertTrue(CellUtil.matchingFamily(raw[j], families[j]));
+        assertTrue(CellUtil.matchingQualifier(raw[j], qf));
       }
     }
   }
@@ -3916,9 +3913,9 @@ public class TestHRegion extends HBaseTe
   throws IOException {
     // Now I have k, get values out and assert they are as expected.
     Get get = new Get(k).addFamily(family).setMaxVersions();
-    KeyValue [] results = r.get(get).raw();
+    Cell [] results = r.get(get).raw();
     for (int j = 0; j < results.length; j++) {
-      byte [] tmp = results[j].getValue();
+      byte [] tmp = CellUtil.getValueArray(results[j]);
       // Row should be equal to value every time.
       assertTrue(Bytes.equals(k, tmp));
     }
@@ -3939,11 +3936,11 @@ public class TestHRegion extends HBaseTe
     for (int i = 0; i < families.length; i++) scan.addFamily(families[i]);
     InternalScanner s = r.getScanner(scan);
     try {
-      List<KeyValue> curVals = new ArrayList<KeyValue>();
+      List<Cell> curVals = new ArrayList<Cell>();
       boolean first = true;
       OUTER_LOOP: while(s.next(curVals)) {
-        for (KeyValue kv: curVals) {
-          byte [] val = kv.getValue();
+        for (Cell kv: curVals) {
+          byte [] val = CellUtil.getValueArray(kv);
           byte [] curval = val;
           if (first) {
             first = false;
@@ -4065,22 +4062,22 @@ public class TestHRegion extends HBaseTe
   }
 
   /**
-   * Assert that the passed in KeyValue has expected contents for the
+   * Assert that the passed in Cell has expected contents for the
    * specified row, column & timestamp.
    */
-  private void checkOneCell(KeyValue kv, byte[] cf,
+  private void checkOneCell(Cell kv, byte[] cf,
                              int rowIdx, int colIdx, long ts) {
     String ctx = "rowIdx=" + rowIdx + "; colIdx=" + colIdx + "; ts=" + ts;
     assertEquals("Row mismatch which checking: " + ctx,
-                 "row:"+ rowIdx, Bytes.toString(kv.getRow()));
+                 "row:"+ rowIdx, Bytes.toString(CellUtil.getRowArray(kv)));
     assertEquals("ColumnFamily mismatch while checking: " + ctx,
-                 Bytes.toString(cf), Bytes.toString(kv.getFamily()));
+                 Bytes.toString(cf), Bytes.toString(CellUtil.getFamilyArray(kv)));
     assertEquals("Column qualifier mismatch while checking: " + ctx,
-                 "column:" + colIdx, Bytes.toString(kv.getQualifier()));
+                 "column:" + colIdx, Bytes.toString(CellUtil.getQualifierArray(kv)));
     assertEquals("Timestamp mismatch while checking: " + ctx,
                  ts, kv.getTimestamp());
     assertEquals("Value mismatch while checking: " + ctx,
-                 "value-version-" + ts, Bytes.toString(kv.getValue()));
+                 "value-version-" + ts, Bytes.toString(CellUtil.getValueArray(kv)));
   }
 }
 



Mime
View raw message