hbase-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From anoopsamj...@apache.org
Subject git commit: HBASE-11805 KeyValue to Cell Convert in WALEdit APIs.
Date Sun, 07 Sep 2014 10:20:49 GMT
Repository: hbase
Updated Branches:
  refs/heads/branch-1 5360fa252 -> ecb015d9a


HBASE-11805 KeyValue to Cell Convert in WALEdit APIs.


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/ecb015d9
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/ecb015d9
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/ecb015d9

Branch: refs/heads/branch-1
Commit: ecb015d9a3c7d0aa0320670674cc1e55f390373f
Parents: 5360fa2
Author: anoopsjohn <anoopsamjohn@gmail.com>
Authored: Sun Sep 7 15:50:24 2014 +0530
Committer: anoopsjohn <anoopsamjohn@gmail.com>
Committed: Sun Sep 7 15:50:24 2014 +0530

----------------------------------------------------------------------
 .../java/org/apache/hadoop/hbase/CellUtil.java  | 34 ++++++++++-
 .../hadoop/hbase/mapreduce/WALPlayer.java       | 29 +++++-----
 .../hbase/protobuf/ReplicationProtbufUtil.java  | 26 ++++-----
 .../hadoop/hbase/regionserver/HRegion.java      | 28 ++++-----
 .../regionserver/MultiRowMutationProcessor.java |  7 +--
 .../hadoop/hbase/regionserver/wal/FSHLog.java   |  4 +-
 .../regionserver/wal/HLogPrettyPrinter.java     | 38 ++++++++++--
 .../hbase/regionserver/wal/HLogSplitter.java    | 33 +++++------
 .../regionserver/wal/ProtobufLogWriter.java     |  6 +-
 .../hadoop/hbase/regionserver/wal/WALEdit.java  | 50 ++++++++--------
 .../regionserver/wal/WALEditsReplaySink.java    |  8 +--
 .../hbase/replication/ScopeWALEntryFilter.java  | 18 +++---
 .../replication/TableCfWALEntryFilter.java      | 17 +++---
 .../replication/regionserver/Replication.java   |  8 +--
 .../regionserver/ReplicationSource.java         | 10 ++--
 .../coprocessor/SampleRegionWALObserver.java    | 21 +++----
 .../hbase/coprocessor/TestWALObserver.java      | 61 ++++++++++++--------
 .../hbase/mapreduce/TestHLogRecordReader.java   |  7 ++-
 .../hadoop/hbase/mapreduce/TestWALPlayer.java   | 15 ++---
 .../master/TestDistributedLogSplitting.java     |  2 +-
 .../hadoop/hbase/regionserver/TestHRegion.java  | 22 +++----
 .../wal/HLogPerformanceEvaluation.java          | 13 ++---
 .../hadoop/hbase/regionserver/wal/TestHLog.java | 18 +++---
 .../hbase/regionserver/wal/TestHLogSplit.java   | 19 +++---
 .../hbase/regionserver/wal/TestLogRolling.java  |  8 +--
 .../hbase/regionserver/wal/TestSecureHLog.java  | 13 +++--
 .../hbase/regionserver/wal/TestWALReplay.java   |  4 +-
 .../replication/TestReplicationEndpoint.java    | 20 +++----
 .../TestReplicationWALEntryFilters.java         | 11 ++--
 29 files changed, 313 insertions(+), 237 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hbase/blob/ecb015d9/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java
index fd7d252..26caa17 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java
@@ -28,6 +28,7 @@ import java.util.NavigableMap;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.hbase.KeyValue.Type;
+import org.apache.hadoop.hbase.io.HeapSize;
 import org.apache.hadoop.hbase.util.ByteRange;
 import org.apache.hadoop.hbase.util.Bytes;
 
@@ -465,8 +466,23 @@ public final class CellUtil {
       // Serialization is probably preceded by a length (it is in the KeyValueCodec at least).
       Bytes.SIZEOF_INT;
   }
-  
-  
+
+  /**
+   * This is an estimate of the heap space occupied by a cell. When the cell is of type
+   * {@link HeapSize} we call {@link HeapSize#heapSize()} so cell can give a correct value. In other
+   * cases we just consider the byte occupied by the cell components ie. row, CF, qualifier,
+   * timestamp, type, value and tags.
+   * @param cell
+   * @return estimate of the heap space
+   */
+  public static long estimatedHeapSizeOf(final Cell cell) {
+    if (cell instanceof HeapSize) {
+      return ((HeapSize) cell).heapSize();
+    }
+    return cell.getRowLength() + cell.getFamilyLength() + cell.getQualifierLength()
+        + cell.getValueLength() + cell.getTagsLength() + KeyValue.TIMESTAMP_TYPE_SIZE;
+  }
+
   /********************* tags *************************************/
   /**
    * Util method to iterate through the tags
@@ -530,4 +546,18 @@ public final class CellUtil {
           + SettableSequenceId.class.getName()));
     }
   }
+
+  /**
+   * Estimation of total number of bytes used by the cell to store its key, value and tags. When the
+   * cell is a {@link KeyValue} we include the extra infrastructure size used by it.
+   * @param cell
+   * @return estimated length
+   */
+  public static int estimatedLengthOf(final Cell cell) {
+    if (cell instanceof KeyValue) {
+      return ((KeyValue)cell).getLength();
+    }
+    return cell.getRowLength() + cell.getFamilyLength() + cell.getQualifierLength()
+        + cell.getValueLength() + cell.getTagsLength() + KeyValue.TIMESTAMP_TYPE_SIZE;
+  }
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/ecb015d9/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/WALPlayer.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/WALPlayer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/WALPlayer.java
index 456d04f..8db21e7 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/WALPlayer.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/WALPlayer.java
@@ -28,9 +28,11 @@ import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.CellUtil;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.KeyValueUtil;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.Delete;
 import org.apache.hadoop.hbase.client.HTable;
@@ -82,7 +84,8 @@ public class WALPlayer extends Configured implements Tool {
       try {
         // skip all other tables
         if (Bytes.equals(table, key.getTablename().getName())) {
-          for (KeyValue kv : value.getKeyValues()) {
+          for (Cell cell : value.getCells()) {
+            KeyValue kv = KeyValueUtil.ensureKeyValue(cell);
             if (WALEdit.isMetaEditFamily(kv.getFamily())) continue;
             context.write(new ImmutableBytesWritable(kv.getRow()), kv);
           }
@@ -125,33 +128,33 @@ public class WALPlayer extends Configured implements Tool {
           ImmutableBytesWritable tableOut = new ImmutableBytesWritable(targetTable.getName());
           Put put = null;
           Delete del = null;
-          KeyValue lastKV = null;
-          for (KeyValue kv : value.getKeyValues()) {
+          Cell lastCell = null;
+          for (Cell cell : value.getCells()) {
             // filtering HLog meta entries
-            if (WALEdit.isMetaEditFamily(kv.getFamily())) continue;
+            if (WALEdit.isMetaEditFamily(cell.getFamily())) continue;
 
             // A WALEdit may contain multiple operations (HBASE-3584) and/or
             // multiple rows (HBASE-5229).
             // Aggregate as much as possible into a single Put/Delete
             // operation before writing to the context.
-            if (lastKV == null || lastKV.getType() != kv.getType()
-                || !CellUtil.matchingRow(lastKV, kv)) {
+            if (lastCell == null || lastCell.getTypeByte() != cell.getTypeByte()
+                || !CellUtil.matchingRow(lastCell, cell)) {
               // row or type changed, write out aggregate KVs.
               if (put != null) context.write(tableOut, put);
               if (del != null) context.write(tableOut, del);
 
-              if (kv.isDelete()) {
-                del = new Delete(kv.getRow());
+              if (CellUtil.isDelete(cell)) {
+                del = new Delete(cell.getRow());
               } else {
-                put = new Put(kv.getRow());
+                put = new Put(cell.getRow());
               }
             }
-            if (kv.isDelete()) {
-              del.addDeleteMarker(kv);
+            if (CellUtil.isDelete(cell)) {
+              del.addDeleteMarker(cell);
             } else {
-              put.add(kv);
+              put.add(cell);
             }
-            lastKV = kv;
+            lastCell = cell;
           }
           // write residual KVs
           if (put != null) context.write(tableOut, put);

http://git-wip-us.apache.org/repos/asf/hbase/blob/ecb015d9/hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/ReplicationProtbufUtil.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/ReplicationProtbufUtil.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/ReplicationProtbufUtil.java
index fdf4bb3..4d48d32 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/ReplicationProtbufUtil.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/ReplicationProtbufUtil.java
@@ -28,12 +28,11 @@ import java.util.Map;
 import java.util.NavigableMap;
 import java.util.UUID;
 
-import org.apache.hadoop.hbase.util.ByteStringer;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.CellScanner;
+import org.apache.hadoop.hbase.CellUtil;
 import org.apache.hadoop.hbase.HConstants;
-import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.io.SizedCellScanner;
 import org.apache.hadoop.hbase.ipc.PayloadCarryingRpcController;
 import org.apache.hadoop.hbase.protobuf.generated.AdminProtos;
@@ -43,6 +42,7 @@ import org.apache.hadoop.hbase.protobuf.generated.WALProtos;
 import org.apache.hadoop.hbase.regionserver.wal.HLog;
 import org.apache.hadoop.hbase.regionserver.wal.HLogKey;
 import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
+import org.apache.hadoop.hbase.util.ByteStringer;
 import org.apache.hadoop.hbase.util.Pair;
 
 import com.google.protobuf.ServiceException;
@@ -77,8 +77,8 @@ public class ReplicationProtbufUtil {
    */
   public static Pair<AdminProtos.ReplicateWALEntryRequest, CellScanner>
       buildReplicateWALEntryRequest(final HLog.Entry[] entries) {
-    // Accumulate all the KVs seen in here.
-    List<List<? extends Cell>> allkvs = new ArrayList<List<? extends Cell>>(entries.length);
+    // Accumulate all the Cells seen in here.
+    List<List<? extends Cell>> allCells = new ArrayList<List<? extends Cell>>(entries.length);
     int size = 0;
     WALProtos.FamilyScope.Builder scopeBuilder = WALProtos.FamilyScope.newBuilder();
     AdminProtos.WALEntry.Builder entryBuilder = AdminProtos.WALEntry.newBuilder();
@@ -120,19 +120,19 @@ public class ReplicationProtbufUtil {
           keyBuilder.addScopes(scopeBuilder.build());
         }
       }
-      List<KeyValue> kvs = edit.getKeyValues();
-      // Add up the size.  It is used later serializing out the kvs.
-      for (KeyValue kv: kvs) {
-        size += kv.getLength();
+      List<Cell> cells = edit.getCells();
+      // Add up the size.  It is used later serializing out the cells.
+      for (Cell cell: cells) {
+        size += CellUtil.estimatedLengthOf(cell);
       }
-      // Collect up the kvs
-      allkvs.add(kvs);
-      // Write out how many kvs associated with this entry.
-      entryBuilder.setAssociatedCellCount(kvs.size());
+      // Collect up the cells
+      allCells.add(cells);
+      // Write out how many cells associated with this entry.
+      entryBuilder.setAssociatedCellCount(cells.size());
       builder.addEntry(entryBuilder.build());
     }
     return new Pair<AdminProtos.ReplicateWALEntryRequest, CellScanner>(builder.build(),
-      getCellScanner(allkvs, size));
+      getCellScanner(allCells, size));
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/hbase/blob/ecb015d9/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
index 333c718..89cc5c9 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
@@ -2720,8 +2720,8 @@ public class HRegion implements HeapSize { // , Writable{
         // Add WAL edits by CP
         WALEdit fromCP = batchOp.walEditsFromCoprocessors[i];
         if (fromCP != null) {
-          for (KeyValue kv : fromCP.getKeyValues()) {
-            walEdit.add(kv);
+          for (Cell cell : fromCP.getCells()) {
+            walEdit.add(cell);
           }
         }
         addFamilyMapToWALEdit(familyMaps[i], walEdit);
@@ -3189,7 +3189,7 @@ public class HRegion implements HeapSize { // , Writable{
       WALEdit walEdit) {
     for (List<Cell> edits : familyMap.values()) {
       for (Cell cell : edits) {
-        walEdit.add(KeyValueUtil.ensureKeyValue(cell));
+        walEdit.add(cell);
       }
     }
   }
@@ -3426,14 +3426,14 @@ public class HRegion implements HeapSize { // , Writable{
           currentReplaySeqId = (key.getOrigLogSeqNum() > 0) ?
             key.getOrigLogSeqNum() : currentEditSeqId;
           boolean flush = false;
-          for (KeyValue kv: val.getKeyValues()) {
+          for (Cell cell: val.getCells()) {
             // Check this edit is for me. Also, guard against writing the special
             // METACOLUMN info such as HBASE::CACHEFLUSH entries
-            if (CellUtil.matchingFamily(kv, WALEdit.METAFAMILY) ||
+            if (CellUtil.matchingFamily(cell, WALEdit.METAFAMILY) ||
                 !Bytes.equals(key.getEncodedRegionName(),
                   this.getRegionInfo().getEncodedNameAsBytes())) {
               //this is a special edit, we should handle it
-              CompactionDescriptor compaction = WALEdit.getCompaction(kv);
+              CompactionDescriptor compaction = WALEdit.getCompaction(cell);
               if (compaction != null) {
                 //replay the compaction
                 completeCompactionMarker(compaction);
@@ -3443,13 +3443,13 @@ public class HRegion implements HeapSize { // , Writable{
               continue;
             }
             // Figure which store the edit is meant for.
-            if (store == null || !CellUtil.matchingFamily(kv, store.getFamily().getName())) {
-              store = getStore(kv);
+            if (store == null || !CellUtil.matchingFamily(cell, store.getFamily().getName())) {
+              store = getStore(cell);
             }
             if (store == null) {
               // This should never happen.  Perhaps schema was changed between
               // crash and redeploy?
-              LOG.warn("No family for " + kv);
+              LOG.warn("No family for " + cell);
               skippedEdits++;
               continue;
             }
@@ -3459,11 +3459,11 @@ public class HRegion implements HeapSize { // , Writable{
               skippedEdits++;
               continue;
             }
-            kv.setSequenceId(currentReplaySeqId);
+            CellUtil.setSequenceId(cell, currentReplaySeqId);
             // Once we are over the limit, restoreEdit will keep returning true to
             // flush -- but don't flush until we've played all the kvs that make up
             // the WALEdit.
-            flush = restoreEdit(store, kv);
+            flush = restoreEdit(store, cell);
             editsCount++;
           }
           if (flush) internalFlushcache(null, currentEditSeqId, status);
@@ -3531,11 +3531,11 @@ public class HRegion implements HeapSize { // , Writable{
   /**
    * Used by tests
    * @param s Store to add edit too.
-   * @param kv KeyValue to add.
+   * @param cell Cell to add.
    * @return True if we should flush.
    */
-  protected boolean restoreEdit(final Store s, final KeyValue kv) {
-    long kvSize = s.add(kv).getFirst();
+  protected boolean restoreEdit(final Store s, final Cell cell) {
+    long kvSize = s.add(cell).getFirst();
     if (this.rsAccounting != null) {
       rsAccounting.addAndGetRegionReplayEditsSize(this.getRegionName(), kvSize);
     }

http://git-wip-us.apache.org/repos/asf/hbase/blob/ecb015d9/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MultiRowMutationProcessor.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MultiRowMutationProcessor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MultiRowMutationProcessor.java
index 06c3a2b..00e7eeb 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MultiRowMutationProcessor.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MultiRowMutationProcessor.java
@@ -96,8 +96,7 @@ MultiRowMutationProcessorResponse> {
       for (List<Cell> cells : m.getFamilyCellMap().values()) {
         boolean writeToWAL = m.getDurability() != Durability.SKIP_WAL;
         for (Cell cell : cells) {
-          KeyValue kv = KeyValueUtil.ensureKeyValue(cell);
-          if (writeToWAL) walEdit.add(kv);
+          if (writeToWAL) walEdit.add(cell);
         }
       }
     }
@@ -146,8 +145,8 @@ MultiRowMutationProcessorResponse> {
         // itself. No need to apply again to region
         if (walEditsFromCP[i] != null) {
           // Add the WALEdit created by CP hook
-          for (KeyValue walKv : walEditsFromCP[i].getKeyValues()) {
-            walEdit.add(walKv);
+          for (Cell walCell : walEditsFromCP[i].getCells()) {
+            walEdit.add(walCell);
           }
         }
       }

http://git-wip-us.apache.org/repos/asf/hbase/blob/ecb015d9/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FSHLog.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FSHLog.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FSHLog.java
index 680253d..a20077c 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FSHLog.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FSHLog.java
@@ -57,11 +57,13 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.Syncable;
 import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.CellUtil;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.KeyValueUtil;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.ClassSize;
@@ -1455,7 +1457,7 @@ class FSHLog implements HLog, Syncable {
   public long postAppend(final Entry e, final long elapsedTime) {
     long len = 0;
     if (this.metrics == null) return len;
-    for (KeyValue kv : e.getEdit().getKeyValues()) len += kv.getLength();
+    for (Cell cell : e.getEdit().getCells()) len += CellUtil.estimatedLengthOf(cell);
     metrics.finishAppend(elapsedTime, len);
     return len;
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/ecb015d9/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLogPrettyPrinter.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLogPrettyPrinter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLogPrettyPrinter.java
index 0aa9b2f..4786aa1 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLogPrettyPrinter.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLogPrettyPrinter.java
@@ -23,6 +23,7 @@ import java.io.PrintStream;
 import java.util.ArrayList;
 import java.util.Date;
 import java.util.HashMap;
+import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
 
@@ -37,8 +38,10 @@ import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.CellUtil;
 import org.apache.hadoop.hbase.HBaseConfiguration;
-import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.Tag;
 import org.apache.hadoop.hbase.regionserver.wal.HLog.Reader;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.FSUtils;
@@ -256,12 +259,10 @@ public class HLogPrettyPrinter {
           continue;
         // initialize list into which we will store atomic actions
         List<Map> actions = new ArrayList<Map>();
-        for (KeyValue kv : edit.getKeyValues()) {
+        for (Cell cell : edit.getCells()) {
           // add atomic operation to txn
-          Map<String, Object> op = 
-            new HashMap<String, Object>(kv.toStringMap());
-          if (outputValues)
-            op.put("value", Bytes.toStringBinary(kv.getValue()));
+          Map<String, Object> op = new HashMap<String, Object>(toStringMap(cell));
+          if (outputValues) op.put("value", Bytes.toStringBinary(cell.getValue()));
           // check row output filter
           if (row == null || ((String) op.get("row")).equals(row))
             actions.add(op);
@@ -306,6 +307,31 @@ public class HLogPrettyPrinter {
     }
   }
 
+  private static Map<String, Object> toStringMap(Cell cell) {
+    Map<String, Object> stringMap = new HashMap<String, Object>();
+    stringMap.put("row",
+        Bytes.toStringBinary(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength()));
+    stringMap.put("family", Bytes.toStringBinary(cell.getFamilyArray(), cell.getFamilyOffset(),
+                cell.getFamilyLength()));
+    stringMap.put("qualifier",
+        Bytes.toStringBinary(cell.getQualifierArray(), cell.getQualifierOffset(),
+            cell.getQualifierLength()));
+    stringMap.put("timestamp", cell.getTimestamp());
+    stringMap.put("vlen", cell.getValueLength());
+    if (cell.getTagsLength() > 0) {
+      List<String> tagsString = new ArrayList<String>();
+      Iterator<Tag> tagsIterator = CellUtil.tagsIterator(cell.getTagsArray(), cell.getTagsOffset(),
+          cell.getTagsLength());
+      while (tagsIterator.hasNext()) {
+        Tag tag = tagsIterator.next();
+        tagsString.add((tag.getType()) + ":"
+            + Bytes.toStringBinary(tag.getBuffer(), tag.getTagOffset(), tag.getTagLength()));
+      }
+      stringMap.put("tag", tagsString);
+    }
+    return stringMap;
+  }
+
   public static void main(String[] args) throws IOException {
     run(args);
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/ecb015d9/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLogSplitter.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLogSplitter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLogSplitter.java
index 9304ac8..453d060 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLogSplitter.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLogSplitter.java
@@ -56,14 +56,13 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.CellScanner;
 import org.apache.hadoop.hbase.CellUtil;
-import org.apache.hadoop.hbase.CoordinatedStateManager;
 import org.apache.hadoop.hbase.CoordinatedStateException;
+import org.apache.hadoop.hbase.CoordinatedStateManager;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.HRegionLocation;
 import org.apache.hadoop.hbase.KeyValue;
-import org.apache.hadoop.hbase.KeyValueUtil;
 import org.apache.hadoop.hbase.RemoteExceptionHandler;
 import org.apache.hadoop.hbase.ServerName;
 import org.apache.hadoop.hbase.TableName;
@@ -1451,16 +1450,16 @@ public class HLogSplitter {
         boolean needSkip = false;
         HRegionLocation loc = null;
         String locKey = null;
-        List<KeyValue> kvs = edit.getKeyValues();
-        List<KeyValue> skippedKVs = new ArrayList<KeyValue>();
+        List<Cell> cells = edit.getCells();
+        List<Cell> skippedCells = new ArrayList<Cell>();
         HConnection hconn = this.getConnectionByTableName(table);
 
-        for (KeyValue kv : kvs) {
-          byte[] row = kv.getRow();
-          byte[] family = kv.getFamily();
+        for (Cell cell : cells) {
+          byte[] row = cell.getRow();
+          byte[] family = cell.getFamily();
           boolean isCompactionEntry = false;
-          if (CellUtil.matchingFamily(kv, WALEdit.METAFAMILY)) {
-            CompactionDescriptor compaction = WALEdit.getCompaction(kv);
+          if (CellUtil.matchingFamily(cell, WALEdit.METAFAMILY)) {
+            CompactionDescriptor compaction = WALEdit.getCompaction(cell);
             if (compaction != null && compaction.hasRegionName()) {
               try {
                 byte[][] regionName = HRegionInfo.parseRegionName(compaction.getRegionName()
@@ -1470,11 +1469,11 @@ public class HLogSplitter {
                 isCompactionEntry = true;
               } catch (Exception ex) {
                 LOG.warn("Unexpected exception received, ignoring " + ex);
-                skippedKVs.add(kv);
+                skippedCells.add(cell);
                 continue;
               }
             } else {
-              skippedKVs.add(kv);
+              skippedCells.add(cell);
               continue;
             }
           }
@@ -1521,7 +1520,7 @@ public class HLogSplitter {
               Long maxStoreSeqId = maxStoreSequenceIds.get(family);
               if (maxStoreSeqId == null || maxStoreSeqId >= entry.getKey().getLogSeqNum()) {
                 // skip current kv if column family doesn't exist anymore or already flushed
-                skippedKVs.add(kv);
+                skippedCells.add(cell);
                 continue;
               }
             }
@@ -1531,8 +1530,8 @@ public class HLogSplitter {
         // skip the edit
         if (loc == null || needSkip) continue;
 
-        if (!skippedKVs.isEmpty()) {
-          kvs.removeAll(skippedKVs);
+        if (!skippedCells.isEmpty()) {
+          cells.removeAll(skippedCells);
         }
 
         synchronized (serverToBufferQueueMap) {
@@ -1967,7 +1966,7 @@ public class HLogSplitter {
         throw new ArrayIndexOutOfBoundsException("Expected=" + count + ", index=" + i);
       }
       Cell cell = cells.current();
-      if (val != null) val.add(KeyValueUtil.ensureKeyValue(cell));
+      if (val != null) val.add(cell);
 
       boolean isNewRowOrType =
           previousCell == null || previousCell.getTypeByte() != cell.getTypeByte()
@@ -1989,13 +1988,13 @@ public class HLogSplitter {
         }
       }
       if (CellUtil.isDelete(cell)) {
-        ((Delete) m).addDeleteMarker(KeyValueUtil.ensureKeyValue(cell));
+        ((Delete) m).addDeleteMarker(cell);
       } else {
         Cell tmpNewCell = cell;
         if (addLogReplayTag) {
           tmpNewCell = tagReplayLogSequenceNumber(replaySeqId, cell);
         }
-        ((Put) m).add(KeyValueUtil.ensureKeyValue(tmpNewCell));
+        ((Put) m).add(tmpNewCell);
       }
       previousCell = cell;
     }

http://git-wip-us.apache.org/repos/asf/hbase/blob/ecb015d9/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogWriter.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogWriter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogWriter.java
index 8e3ee5d..7502d8c 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogWriter.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogWriter.java
@@ -28,8 +28,8 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.HBaseInterfaceAudience;
-import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.codec.Codec;
 import org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader;
 import org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALTrailer;
@@ -114,9 +114,9 @@ public class ProtobufLogWriter extends WriterBase {
     entry.setCompressionContext(compressionContext);
     entry.getKey().getBuilder(compressor).
       setFollowingKvCount(entry.getEdit().size()).build().writeDelimitedTo(output);
-    for (KeyValue kv : entry.getEdit().getKeyValues()) {
+    for (Cell cell : entry.getEdit().getCells()) {
       // cellEncoder must assume little about the stream, since we write PB and cells in turn.
-      cellEncoder.write(kv);
+      cellEncoder.write(cell);
     }
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/ecb015d9/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALEdit.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALEdit.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALEdit.java
index 9ba3353..f44eb37 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALEdit.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALEdit.java
@@ -33,6 +33,7 @@ import org.apache.hadoop.hbase.CellUtil;
 import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.HBaseInterfaceAudience;
 import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.KeyValueUtil;
 import org.apache.hadoop.hbase.codec.Codec;
 import org.apache.hadoop.hbase.io.HeapSize;
 import org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor;
@@ -78,7 +79,8 @@ import org.apache.hadoop.io.Writable;
  * is an old style KeyValue or the new style WALEdit.
  *
  */
-@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.REPLICATION)
+@InterfaceAudience.LimitedPrivate({ HBaseInterfaceAudience.REPLICATION,
+    HBaseInterfaceAudience.COPROC })
 public class WALEdit implements Writable, HeapSize {
   public static final Log LOG = LogFactory.getLog(WALEdit.class);
 
@@ -92,7 +94,7 @@ public class WALEdit implements Writable, HeapSize {
   private final int VERSION_2 = -1;
   private final boolean isReplay;
 
-  private final ArrayList<KeyValue> kvs = new ArrayList<KeyValue>(1);
+  private final ArrayList<Cell> cells = new ArrayList<Cell>(1);
 
   public static final WALEdit EMPTY_WALEDIT = new WALEdit();
 
@@ -134,21 +136,21 @@ public class WALEdit implements Writable, HeapSize {
     this.compressionContext = compressionContext;
   }
 
-  public WALEdit add(KeyValue kv) {
-    this.kvs.add(kv);
+  public WALEdit add(Cell cell) {
+    this.cells.add(cell);
     return this;
   }
 
   public boolean isEmpty() {
-    return kvs.isEmpty();
+    return cells.isEmpty();
   }
 
   public int size() {
-    return kvs.size();
+    return cells.size();
   }
 
-  public ArrayList<KeyValue> getKeyValues() {
-    return kvs;
+  public ArrayList<Cell> getCells() {
+    return cells;
   }
 
   public NavigableMap<byte[], Integer> getAndRemoveScopes() {
@@ -159,7 +161,7 @@ public class WALEdit implements Writable, HeapSize {
 
   @Override
   public void readFields(DataInput in) throws IOException {
-    kvs.clear();
+    cells.clear();
     if (scopes != null) {
       scopes.clear();
     }
@@ -197,9 +199,11 @@ public class WALEdit implements Writable, HeapSize {
   public void write(DataOutput out) throws IOException {
     LOG.warn("WALEdit is being serialized to writable - only expected in test code");
     out.writeInt(VERSION_2);
-    out.writeInt(kvs.size());
+    out.writeInt(cells.size());
     // We interleave the two lists for code simplicity
-    for (KeyValue kv : kvs) {
+    for (Cell cell : cells) {
+      // This is not used in any of the core code flows so it is just fine to convert to KV
+      KeyValue kv = KeyValueUtil.ensureKeyValue(cell);
       if (compressionContext != null) {
         KeyValueCompression.writeKV(out, kv, compressionContext);
       } else{
@@ -224,23 +228,19 @@ public class WALEdit implements Writable, HeapSize {
    * @return Number of KVs read.
    */
   public int readFromCells(Codec.Decoder cellDecoder, int expectedCount) throws IOException {
-    kvs.clear();
-    kvs.ensureCapacity(expectedCount);
-    while (kvs.size() < expectedCount && cellDecoder.advance()) {
-      Cell cell = cellDecoder.current();
-      if (!(cell instanceof KeyValue)) {
-        throw new IOException("WAL edit only supports KVs as cells");
-      }
-      kvs.add((KeyValue)cell);
+    cells.clear();
+    cells.ensureCapacity(expectedCount);
+    while (cells.size() < expectedCount && cellDecoder.advance()) {
+      cells.add(cellDecoder.current());
     }
-    return kvs.size();
+    return cells.size();
   }
 
   @Override
   public long heapSize() {
     long ret = ClassSize.ARRAYLIST;
-    for (KeyValue kv : kvs) {
-      ret += kv.heapSize();
+    for (Cell cell : cells) {
+      ret += CellUtil.estimatedHeapSizeOf(cell);
     }
     if (scopes != null) {
       ret += ClassSize.TREEMAP;
@@ -254,9 +254,9 @@ public class WALEdit implements Writable, HeapSize {
   public String toString() {
     StringBuilder sb = new StringBuilder();
 
-    sb.append("[#edits: " + kvs.size() + " = <");
-    for (KeyValue kv : kvs) {
-      sb.append(kv.toString());
+    sb.append("[#edits: " + cells.size() + " = <");
+    for (Cell cell : cells) {
+      sb.append(cell);
       sb.append("; ");
     }
     if (scopes != null) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/ecb015d9/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALEditsReplaySink.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALEditsReplaySink.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALEditsReplaySink.java
index 58bbe83..30cbcae 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALEditsReplaySink.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALEditsReplaySink.java
@@ -29,11 +29,11 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.CellScanner;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.HRegionLocation;
-import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.HConnection;
 import org.apache.hadoop.hbase.client.RegionServerCallable;
@@ -230,10 +230,10 @@ public class WALEditsReplaySink {
       boolean skip = false;
       for (HLog.Entry entry : this.entries) {
         WALEdit edit = entry.getEdit();
-        List<KeyValue> kvs = edit.getKeyValues();
-        for (KeyValue kv : kvs) {
+        List<Cell> cells = edit.getCells();
+        for (Cell cell : cells) {
           // filtering HLog meta entries
-          setLocation(conn.locateRegion(tableName, kv.getRow()));
+          setLocation(conn.locateRegion(tableName, cell.getRow()));
           skip = true;
           break;
         }

http://git-wip-us.apache.org/repos/asf/hbase/blob/ecb015d9/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/ScopeWALEntryFilter.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/ScopeWALEntryFilter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/ScopeWALEntryFilter.java
index 82d976d..3c44bfc 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/ScopeWALEntryFilter.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/ScopeWALEntryFilter.java
@@ -22,8 +22,8 @@ import java.util.ArrayList;
 import java.util.NavigableMap;
 
 import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.HConstants;
-import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.regionserver.wal.HLog.Entry;
 
 /**
@@ -38,19 +38,19 @@ public class ScopeWALEntryFilter implements WALEntryFilter {
     if (scopes == null || scopes.isEmpty()) {
       return null;
     }
-    ArrayList<KeyValue> kvs = entry.getEdit().getKeyValues();
-    int size = kvs.size();
+    ArrayList<Cell> cells = entry.getEdit().getCells();
+    int size = cells.size();
     for (int i = size - 1; i >= 0; i--) {
-      KeyValue kv = kvs.get(i);
+      Cell cell = cells.get(i);
       // The scope will be null or empty if
       // there's nothing to replicate in that WALEdit
-      if (!scopes.containsKey(kv.getFamily())
-          || scopes.get(kv.getFamily()) == HConstants.REPLICATION_SCOPE_LOCAL) {
-        kvs.remove(i);
+      if (!scopes.containsKey(cell.getFamily())
+          || scopes.get(cell.getFamily()) == HConstants.REPLICATION_SCOPE_LOCAL) {
+        cells.remove(i);
       }
     }
-    if (kvs.size() < size / 2) {
-      kvs.trimToSize();
+    if (cells.size() < size / 2) {
+      cells.trimToSize();
     }
     return entry;
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/ecb015d9/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/TableCfWALEntryFilter.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/TableCfWALEntryFilter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/TableCfWALEntryFilter.java
index b7dfc4e..44e3c1c 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/TableCfWALEntryFilter.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/TableCfWALEntryFilter.java
@@ -21,9 +21,10 @@ package org.apache.hadoop.hbase.replication;
 import java.util.ArrayList;
 import java.util.List;
 import java.util.Map;
+
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.regionserver.wal.HLog.Entry;
 import org.apache.hadoop.hbase.util.Bytes;
 
@@ -39,7 +40,7 @@ public class TableCfWALEntryFilter implements WALEntryFilter {
   @Override
   public Entry filter(Entry entry) {
     String tabName = entry.getKey().getTablename().getNameAsString();
-    ArrayList<KeyValue> kvs = entry.getEdit().getKeyValues();
+    ArrayList<Cell> cells = entry.getEdit().getCells();
     Map<String, List<String>> tableCFs = null;
 
     try {
@@ -48,7 +49,7 @@ public class TableCfWALEntryFilter implements WALEntryFilter {
       LOG.error("should not happen: can't get tableCFs for peer " + peer.getId() +
           ", degenerate as if it's not configured by keeping tableCFs==null");
     }
-    int size = kvs.size();
+    int size = cells.size();
 
     // return null(prevent replicating) if logKey's table isn't in this peer's
     // replicable table list (empty tableCFs means all table are replicable)
@@ -57,16 +58,16 @@ public class TableCfWALEntryFilter implements WALEntryFilter {
     } else {
       List<String> cfs = (tableCFs == null) ? null : tableCFs.get(tabName);
       for (int i = size - 1; i >= 0; i--) {
-        KeyValue kv = kvs.get(i);
+        Cell cell = cells.get(i);
         // ignore(remove) kv if its cf isn't in the replicable cf list
         // (empty cfs means all cfs of this table are replicable)
-        if ((cfs != null && !cfs.contains(Bytes.toString(kv.getFamily())))) {
-          kvs.remove(i);
+        if ((cfs != null && !cfs.contains(Bytes.toString(cell.getFamily())))) {
+          cells.remove(i);
         }
       }
     }
-    if (kvs.size() < size/2) {
-      kvs.trimToSize();
+    if (cells.size() < size/2) {
+      cells.trimToSize();
     }
     return entry;
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/ecb015d9/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/Replication.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/Replication.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/Replication.java
index f06ddef..180ac44 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/Replication.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/Replication.java
@@ -37,12 +37,12 @@ import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.CellScanner;
 import org.apache.hadoop.hbase.CellUtil;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.HTableDescriptor;
-import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.Server;
 import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry;
 import org.apache.hadoop.hbase.regionserver.ReplicationSinkService;
@@ -245,10 +245,10 @@ public class Replication implements WALActionsListener,
     NavigableMap<byte[], Integer> scopes =
         new TreeMap<byte[], Integer>(Bytes.BYTES_COMPARATOR);
     byte[] family;
-    for (KeyValue kv : logEdit.getKeyValues()) {
-      family = kv.getFamily();
+    for (Cell cell : logEdit.getCells()) {
+      family = cell.getFamily();
       // This is expected and the KV should not be replicated
-      if (CellUtil.matchingFamily(kv, WALEdit.METAFAMILY)) continue;
+      if (CellUtil.matchingFamily(cell, WALEdit.METAFAMILY)) continue;
       // Unexpected, has a tendency to happen in unit tests
       assert htd.getFamily(family) != null;
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/ecb015d9/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSource.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSource.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSource.java
index 6451f91..fa5e8fe 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSource.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSource.java
@@ -36,10 +36,10 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.CellUtil;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HConstants;
-import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.Stoppable;
 import org.apache.hadoop.hbase.regionserver.wal.HLog;
 import org.apache.hadoop.hbase.regionserver.wal.HLogKey;
@@ -634,16 +634,16 @@ public class ReplicationSource extends Thread
 
   /**
    * Count the number of different row keys in the given edit because of
-   * mini-batching. We assume that there's at least one KV in the WALEdit.
+   * mini-batching. We assume that there's at least one Cell in the WALEdit.
    * @param edit edit to count row keys from
    * @return number of different row keys
    */
   private int countDistinctRowKeys(WALEdit edit) {
-    List<KeyValue> kvs = edit.getKeyValues();
+    List<Cell> cells = edit.getCells();
     int distinctRowKeys = 1;
-    KeyValue lastKV = kvs.get(0);
+    Cell lastCell = cells.get(0);
     for (int i = 0; i < edit.size(); i++) {
-      if (!CellUtil.matchingRow(kvs.get(i), lastKV)) {
+      if (!CellUtil.matchingRow(cells.get(i), lastCell)) {
         distinctRowKeys++;
       }
     }

http://git-wip-us.apache.org/repos/asf/hbase/blob/ecb015d9/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/SampleRegionWALObserver.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/SampleRegionWALObserver.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/SampleRegionWALObserver.java
index c659fe6..599accf 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/SampleRegionWALObserver.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/SampleRegionWALObserver.java
@@ -26,6 +26,7 @@ import java.util.Arrays;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 
+import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.regionserver.wal.HLogKey;
@@ -93,28 +94,28 @@ implements WALObserver {
     preWALWriteCalled = true;
     // here we're going to remove one keyvalue from the WALEdit, and add
     // another one to it.
-    List<KeyValue> kvs = logEdit.getKeyValues();
-    KeyValue deletedKV = null;
-    for (KeyValue kv : kvs) {
+    List<Cell> cells = logEdit.getCells();
+    Cell deletedCell = null;
+    for (Cell cell : cells) {
       // assume only one kv from the WALEdit matches.
-      byte[] family = kv.getFamily();
-      byte[] qulifier = kv.getQualifier();
+      byte[] family = cell.getFamily();
+      byte[] qulifier = cell.getQualifier();
 
       if (Arrays.equals(family, ignoredFamily) &&
           Arrays.equals(qulifier, ignoredQualifier)) {
         LOG.debug("Found the KeyValue from WALEdit which should be ignored.");
-        deletedKV = kv;
+        deletedCell = cell;
       }
       if (Arrays.equals(family, changedFamily) &&
           Arrays.equals(qulifier, changedQualifier)) {
         LOG.debug("Found the KeyValue from WALEdit which should be changed.");
-        kv.getValueArray()[kv.getValueOffset()] += 1;
+        cell.getValueArray()[cell.getValueOffset()] += 1;
       }
     }
-    kvs.add(new KeyValue(row, addedFamily, addedQualifier));
-    if (deletedKV != null) {
+    cells.add(new KeyValue(row, addedFamily, addedQualifier));
+    if (deletedCell != null) {
       LOG.debug("About to delete a KeyValue from WALEdit.");
-      kvs.remove(deletedKV);
+      cells.remove(deletedCell);
     }
     return bypass;
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/ecb015d9/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestWALObserver.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestWALObserver.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestWALObserver.java
index 5706cb3..3ce3307 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestWALObserver.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestWALObserver.java
@@ -19,14 +19,36 @@
 
 package org.apache.hadoop.hbase.coprocessor;
 
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+
+import java.io.IOException;
+import java.security.PrivilegedExceptionAction;
+import java.util.Arrays;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.atomic.AtomicLong;
+
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.*;
-import org.apache.hadoop.hbase.regionserver.HRegion;
+import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.Coprocessor;
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.HColumnDescriptor;
+import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.HRegionInfo;
+import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.MediumTests;
+import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.Put;
+import org.apache.hadoop.hbase.regionserver.HRegion;
 import org.apache.hadoop.hbase.regionserver.wal.HLog;
 import org.apache.hadoop.hbase.regionserver.wal.HLogFactory;
 import org.apache.hadoop.hbase.regionserver.wal.HLogSplitter;
@@ -34,9 +56,9 @@ import org.apache.hadoop.hbase.regionserver.wal.WALCoprocessorHost;
 import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
 import org.apache.hadoop.hbase.security.User;
 import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.hadoop.hbase.util.FSUtils;
 import org.apache.hadoop.hbase.util.EnvironmentEdge;
 import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
+import org.apache.hadoop.hbase.util.FSUtils;
 import org.junit.After;
 import org.junit.AfterClass;
 import org.junit.Before;
@@ -44,15 +66,6 @@ import org.junit.BeforeClass;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
-import java.io.IOException;
-import java.security.PrivilegedExceptionAction;
-import java.util.Arrays;
-import java.util.List;
-import java.util.Map;
-import java.util.concurrent.atomic.AtomicLong;
-
-import static org.junit.Assert.*;
-
 /**
  * Tests invocation of the
  * {@link org.apache.hadoop.hbase.coprocessor.MasterObserver} interface hooks at
@@ -167,17 +180,17 @@ public class TestWALObserver {
     boolean foundFamily2 = false;
     boolean modifiedFamily1 = false;
 
-    List<KeyValue> kvs = edit.getKeyValues();
+    List<Cell> cells = edit.getCells();
 
-    for (KeyValue kv : kvs) {
-      if (Arrays.equals(kv.getFamily(), TEST_FAMILY[0])) {
+    for (Cell cell : cells) {
+      if (Arrays.equals(cell.getFamily(), TEST_FAMILY[0])) {
         foundFamily0 = true;
       }
-      if (Arrays.equals(kv.getFamily(), TEST_FAMILY[2])) {
+      if (Arrays.equals(cell.getFamily(), TEST_FAMILY[2])) {
         foundFamily2 = true;
       }
-      if (Arrays.equals(kv.getFamily(), TEST_FAMILY[1])) {
-        if (!Arrays.equals(kv.getValue(), TEST_VALUE[1])) {
+      if (Arrays.equals(cell.getFamily(), TEST_FAMILY[1])) {
+        if (!Arrays.equals(cell.getValue(), TEST_VALUE[1])) {
           modifiedFamily1 = true;
         }
       }
@@ -194,15 +207,15 @@ public class TestWALObserver {
     foundFamily0 = false;
     foundFamily2 = false;
     modifiedFamily1 = false;
-    for (KeyValue kv : kvs) {
-      if (Arrays.equals(kv.getFamily(), TEST_FAMILY[0])) {
+    for (Cell cell : cells) {
+      if (Arrays.equals(cell.getFamily(), TEST_FAMILY[0])) {
         foundFamily0 = true;
       }
-      if (Arrays.equals(kv.getFamily(), TEST_FAMILY[2])) {
+      if (Arrays.equals(cell.getFamily(), TEST_FAMILY[2])) {
         foundFamily2 = true;
       }
-      if (Arrays.equals(kv.getFamily(), TEST_FAMILY[1])) {
-        if (!Arrays.equals(kv.getValue(), TEST_VALUE[1])) {
+      if (Arrays.equals(cell.getFamily(), TEST_FAMILY[1])) {
+        if (!Arrays.equals(cell.getValue(), TEST_VALUE[1])) {
           modifiedFamily1 = true;
         }
       }
@@ -350,7 +363,7 @@ public class TestWALObserver {
     for (List<Cell> edits : familyMap.values()) {
       for (Cell cell : edits) {
         // KeyValue v1 expectation. Cast for now until we go all Cell all the time. TODO.
-        walEdit.add((KeyValue)cell);
+        walEdit.add(cell);
       }
     }
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/ecb015d9/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHLogRecordReader.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHLogRecordReader.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHLogRecordReader.java
index 401eee8..22acfa9 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHLogRecordReader.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHLogRecordReader.java
@@ -30,6 +30,7 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.HColumnDescriptor;
@@ -234,10 +235,10 @@ public class TestHLogRecordReader {
 
     for (byte[] column : columns) {
       assertTrue(reader.nextKeyValue());
-      KeyValue kv = reader.getCurrentValue().getKeyValues().get(0);
-      if (!Bytes.equals(column, kv.getQualifier())) {
+      Cell cell = reader.getCurrentValue().getCells().get(0);
+      if (!Bytes.equals(column, cell.getQualifier())) {
         assertTrue("expected [" + Bytes.toString(column) + "], actual ["
-            + Bytes.toString(kv.getQualifier()) + "]", false);
+            + Bytes.toString(cell.getQualifier()) + "]", false);
       }
     }
     assertFalse(reader.nextKeyValue());

http://git-wip-us.apache.org/repos/asf/hbase/blob/ecb015d9/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALPlayer.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALPlayer.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALPlayer.java
index 309af73..89a46d8 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALPlayer.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALPlayer.java
@@ -20,21 +20,25 @@ package org.apache.hadoop.hbase.mapreduce;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.fail;
+import static org.mockito.Matchers.any;
+import static org.mockito.Mockito.doAnswer;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
 
 import java.io.ByteArrayOutputStream;
 import java.io.PrintStream;
 import java.util.ArrayList;
-import java.util.List;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.CellUtil;
-import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.LargeTests;
 import org.apache.hadoop.hbase.MiniHBaseCluster;
+import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.Delete;
 import org.apache.hadoop.hbase.client.Get;
 import org.apache.hadoop.hbase.client.HTable;
@@ -56,9 +60,6 @@ import org.junit.experimental.categories.Category;
 import org.mockito.invocation.InvocationOnMock;
 import org.mockito.stubbing.Answer;
 
-import static org.mockito.Matchers.any;
-import static org.mockito.Mockito.*;
-
 /**
  * Basic test for the WALPlayer M/R tool
  */
@@ -143,12 +144,12 @@ public class TestWALPlayer {
     when(context.getConfiguration()).thenReturn(configuration);
 
     WALEdit value = mock(WALEdit.class);
-    ArrayList<KeyValue> values = new ArrayList<KeyValue>();
+    ArrayList<Cell> values = new ArrayList<Cell>();
     KeyValue kv1 = mock(KeyValue.class);
     when(kv1.getFamily()).thenReturn(Bytes.toBytes("family"));
     when(kv1.getRow()).thenReturn(Bytes.toBytes("row"));
     values.add(kv1);
-    when(value.getKeyValues()).thenReturn(values);
+    when(value.getCells()).thenReturn(values);
     mapper.setup(context);
 
     doAnswer(new Answer<Void>() {

http://git-wip-us.apache.org/repos/asf/hbase/blob/ecb015d9/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestDistributedLogSplitting.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestDistributedLogSplitting.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestDistributedLogSplitting.java
index f5dbaf0..390cd47 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestDistributedLogSplitting.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestDistributedLogSplitting.java
@@ -1512,7 +1512,7 @@ public class TestDistributedLogSplitting {
     HLog.Reader in = HLogFactory.createReader(fs, log, conf);
     HLog.Entry e;
     while ((e = in.next()) != null) {
-      if (!WALEdit.isMetaEditFamily(e.getEdit().getKeyValues().get(0))) {
+      if (!WALEdit.isMetaEditFamily(e.getEdit().getCells().get(0))) {
         count++;
       }
     }

http://git-wip-us.apache.org/repos/asf/hbase/blob/ecb015d9/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java
index 19c8ff8..d3d97ca 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java
@@ -846,7 +846,7 @@ public class TestHRegion {
         if (entry == null) {
           break;
         }
-        Cell cell = entry.getEdit().getKeyValues().get(0);
+        Cell cell = entry.getEdit().getCells().get(0);
         if (WALEdit.isMetaEditFamily(cell)) {
           FlushDescriptor flushDesc = WALEdit.getFlushDescriptor(cell);
           assertNotNull(flushDesc);
@@ -912,14 +912,14 @@ public class TestHRegion {
     }
     @Override
     public boolean matches(Object edit) {
-      List<KeyValue> kvs = ((WALEdit)edit).getKeyValues();
-      if (kvs.isEmpty()) {
+      List<Cell> cells = ((WALEdit)edit).getCells();
+      if (cells.isEmpty()) {
         return false;
       }
-      if (WALEdit.isMetaEditFamily(kvs.get(0))) {
+      if (WALEdit.isMetaEditFamily(cells.get(0))) {
         FlushDescriptor desc = null;
         try {
-          desc = WALEdit.getFlushDescriptor(kvs.get(0));
+          desc = WALEdit.getFlushDescriptor(cells.get(0));
         } catch (IOException e) {
           LOG.warn(e);
           return false;
@@ -5527,9 +5527,9 @@ public class TestHRegion {
 
       WALEdit edit = editCaptor.getValue();
       assertNotNull(edit);
-      assertNotNull(edit.getKeyValues());
-      assertEquals(1, edit.getKeyValues().size());
-      RegionEventDescriptor desc = WALEdit.getRegionEventDescriptor(edit.getKeyValues().get(0));
+      assertNotNull(edit.getCells());
+      assertEquals(1, edit.getCells().size());
+      RegionEventDescriptor desc = WALEdit.getRegionEventDescriptor(edit.getCells().get(0));
       assertNotNull(desc);
 
       LOG.info("RegionEventDescriptor from WAL: " + desc);
@@ -5591,9 +5591,9 @@ public class TestHRegion {
 
     WALEdit edit = editCaptor.getAllValues().get(1);
     assertNotNull(edit);
-    assertNotNull(edit.getKeyValues());
-    assertEquals(1, edit.getKeyValues().size());
-    RegionEventDescriptor desc = WALEdit.getRegionEventDescriptor(edit.getKeyValues().get(0));
+    assertNotNull(edit.getCells());
+    assertEquals(1, edit.getCells().size());
+    RegionEventDescriptor desc = WALEdit.getRegionEventDescriptor(edit.getCells().get(0));
     assertNotNull(desc);
 
     LOG.info("RegionEventDescriptor from WAL: " + desc);

http://git-wip-us.apache.org/repos/asf/hbase/blob/ecb015d9/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/HLogPerformanceEvaluation.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/HLogPerformanceEvaluation.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/HLogPerformanceEvaluation.java
index 44c2d9e..90b6f8d 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/HLogPerformanceEvaluation.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/HLogPerformanceEvaluation.java
@@ -42,8 +42,6 @@ import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.HTableDescriptor;
-import org.apache.hadoop.hbase.KeyValue;
-import org.apache.hadoop.hbase.KeyValueUtil;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.io.crypto.KeyProviderForTesting;
@@ -54,15 +52,15 @@ import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.FSUtils;
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
+import org.htrace.Sampler;
+import org.htrace.Trace;
+import org.htrace.TraceScope;
+import org.htrace.impl.ProbabilitySampler;
 
 import com.yammer.metrics.core.Histogram;
 import com.yammer.metrics.core.Meter;
 import com.yammer.metrics.core.MetricsRegistry;
 import com.yammer.metrics.reporting.ConsoleReporter;
-import org.htrace.Sampler;
-import org.htrace.Trace;
-import org.htrace.TraceScope;
-import org.htrace.impl.ProbabilitySampler;
 
 /**
  * This class runs performance benchmarks for {@link HLog}.
@@ -534,8 +532,7 @@ public final class HLogPerformanceEvaluation extends Configured implements Tool
       WALEdit walEdit) {
     for (List<Cell> edits : familyMap.values()) {
       for (Cell cell : edits) {
-        KeyValue kv = KeyValueUtil.ensureKeyValue(cell);
-        walEdit.add(kv);
+        walEdit.add(cell);
       }
     }
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/ecb015d9/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestHLog.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestHLog.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestHLog.java
index c7c363d..a2ea5f0 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestHLog.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestHLog.java
@@ -47,6 +47,7 @@ import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.CellScanner;
 import org.apache.hadoop.hbase.Coprocessor;
 import org.apache.hadoop.hbase.HBaseConfiguration;
@@ -56,7 +57,6 @@ import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.KeyValue;
-import org.apache.hadoop.hbase.KeyValueUtil;
 import org.apache.hadoop.hbase.LargeTests;
 import org.apache.hadoop.hbase.ServerName;
 import org.apache.hadoop.hbase.TableName;
@@ -216,7 +216,7 @@ public class TestHLog  {
       for (Put p: puts) {
         CellScanner cs = p.cellScanner();
         while (cs.advance()) {
-          edits.add(KeyValueUtil.ensureKeyValue(cs.current()));
+          edits.add(cs.current());
         }
       }
       // Add any old cluster id.
@@ -567,7 +567,7 @@ public class TestHLog  {
     while (reader.next(entry) != null) {
       count++;
       assertTrue("Should be one KeyValue per WALEdit",
-                  entry.getEdit().getKeyValues().size() == 1);
+                  entry.getEdit().getCells().size() == 1);
     }
     assertEquals(total, count);
     reader.close();
@@ -623,9 +623,9 @@ public class TestHLog  {
         WALEdit val = entry.getEdit();
         assertTrue(Bytes.equals(info.getEncodedNameAsBytes(), key.getEncodedRegionName()));
         assertTrue(tableName.equals(key.getTablename()));
-        KeyValue kv = val.getKeyValues().get(0);
-        assertTrue(Bytes.equals(row, kv.getRow()));
-        assertEquals((byte)(i + '0'), kv.getValue()[0]);
+        Cell cell = val.getCells().get(0);
+        assertTrue(Bytes.equals(row, cell.getRow()));
+        assertEquals((byte)(i + '0'), cell.getValue()[0]);
         System.out.println(key + " " + val);
       }
     } finally {
@@ -675,7 +675,7 @@ public class TestHLog  {
       HLog.Entry entry = reader.next();
       assertEquals(COL_COUNT, entry.getEdit().size());
       int idx = 0;
-      for (KeyValue val : entry.getEdit().getKeyValues()) {
+      for (Cell val : entry.getEdit().getCells()) {
         assertTrue(Bytes.equals(hri.getEncodedNameAsBytes(),
           entry.getKey().getEncodedRegionName()));
         assertTrue(tableName.equals(entry.getKey().getTablename()));
@@ -917,7 +917,7 @@ public class TestHLog  {
         assertArrayEquals(hri.getEncodedNameAsBytes(), entry.getKey().getEncodedRegionName());
         assertEquals(tableName, entry.getKey().getTablename());
         int idx = 0;
-        for (KeyValue val : entry.getEdit().getKeyValues()) {
+        for (Cell val : entry.getEdit().getCells()) {
           assertTrue(Bytes.equals(row, val.getRow()));
           String value = i + "" + idx;
           assertArrayEquals(Bytes.toBytes(value), val.getValue());
@@ -1007,7 +1007,7 @@ public class TestHLog  {
         assertArrayEquals(hri.getEncodedNameAsBytes(), entry.getKey().getEncodedRegionName());
         assertEquals(tableName, entry.getKey().getTablename());
         int idx = 0;
-        for (KeyValue val : entry.getEdit().getKeyValues()) {
+        for (Cell val : entry.getEdit().getCells()) {
           assertTrue(Bytes.equals(row, val.getRow()));
           String value = i + "" + idx;
           assertArrayEquals(Bytes.toBytes(value), val.getValue());

http://git-wip-us.apache.org/repos/asf/hbase/blob/ecb015d9/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestHLogSplit.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestHLogSplit.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestHLogSplit.java
index cffc9cd..8a17eb4 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestHLogSplit.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestHLogSplit.java
@@ -42,11 +42,6 @@ import java.util.concurrent.atomic.AtomicLong;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.commons.logging.impl.Log4JLogger;
-import org.apache.hadoop.hbase.TableName;
-import org.apache.log4j.Level;
-import org.apache.hadoop.hdfs.server.datanode.DataNode;
-import org.apache.hadoop.hdfs.server.namenode.FSNamesystem;
-import org.apache.hadoop.hdfs.server.namenode.LeaseManager;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FSDataOutputStream;
@@ -54,6 +49,7 @@ import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.HColumnDescriptor;
@@ -62,6 +58,7 @@ import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.LargeTests;
+import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask.RecoveryMode;
 import org.apache.hadoop.hbase.regionserver.HRegion;
 import org.apache.hadoop.hbase.regionserver.wal.HLog.Entry;
@@ -74,8 +71,12 @@ import org.apache.hadoop.hbase.util.FSUtils;
 import org.apache.hadoop.hbase.util.Threads;
 import org.apache.hadoop.hdfs.DFSTestUtil;
 import org.apache.hadoop.hdfs.DistributedFileSystem;
+import org.apache.hadoop.hdfs.server.datanode.DataNode;
+import org.apache.hadoop.hdfs.server.namenode.FSNamesystem;
 import org.apache.hadoop.hdfs.server.namenode.LeaseExpiredException;
+import org.apache.hadoop.hdfs.server.namenode.LeaseManager;
 import org.apache.hadoop.ipc.RemoteException;
+import org.apache.log4j.Level;
 import org.junit.After;
 import org.junit.AfterClass;
 import org.junit.Assert;
@@ -1021,12 +1022,12 @@ public class TestHLogSplit {
             }
             HLog.Entry entry = (Entry) invocation.getArguments()[0];
             WALEdit edit = entry.getEdit();
-            List<KeyValue> keyValues = edit.getKeyValues();
-            assertEquals(1, keyValues.size());
-            KeyValue kv = keyValues.get(0);
+            List<Cell> cells = edit.getCells();
+            assertEquals(1, cells.size());
+            Cell cell = cells.get(0);
 
             // Check that the edits come in the right order.
-            assertEquals(expectedIndex, Bytes.toInt(kv.getRow()));
+            assertEquals(expectedIndex, Bytes.toInt(cell.getRow()));
             expectedIndex++;
             return null;
           }

http://git-wip-us.apache.org/repos/asf/hbase/blob/ecb015d9/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRolling.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRolling.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRolling.java
index ddac2b7..2a6f4f5 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRolling.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRolling.java
@@ -38,12 +38,12 @@ import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.HTableDescriptor;
-import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.LargeTests;
 import org.apache.hadoop.hbase.MiniHBaseCluster;
 import org.apache.hadoop.hbase.TableName;
@@ -539,9 +539,9 @@ public class TestLogRolling  {
               TEST_UTIL.getConfiguration());
           HLog.Entry entry;
           while ((entry = reader.next()) != null) {
-            LOG.debug("#"+entry.getKey().getLogSeqNum()+": "+entry.getEdit().getKeyValues());
-            for (KeyValue kv : entry.getEdit().getKeyValues()) {
-              loggedRows.add(Bytes.toStringBinary(kv.getRow()));
+            LOG.debug("#"+entry.getKey().getLogSeqNum()+": "+entry.getEdit().getCells());
+            for (Cell cell : entry.getEdit().getCells()) {
+              loggedRows.add(Bytes.toStringBinary(cell.getRow()));
             }
           }
         } catch (EOFException e) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/ecb015d9/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestSecureHLog.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestSecureHLog.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestSecureHLog.java
index 6b4304d..607149c 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestSecureHLog.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestSecureHLog.java
@@ -32,6 +32,7 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HConstants;
@@ -110,14 +111,14 @@ public class TestSecureHLog {
     HLog.Entry entry = new HLog.Entry();
     while (reader.next(entry) != null) {
       count++;
-      List<KeyValue> kvs = entry.getEdit().getKeyValues();
-      assertTrue("Should be one KV per WALEdit", kvs.size() == 1);
-      for (KeyValue kv: kvs) {
-        byte[] thisRow = kv.getRow();
+      List<Cell> cells = entry.getEdit().getCells();
+      assertTrue("Should be one KV per WALEdit", cells.size() == 1);
+      for (Cell cell: cells) {
+        byte[] thisRow = cell.getRow();
         assertTrue("Incorrect row", Bytes.equals(thisRow, row));
-        byte[] thisFamily = kv.getFamily();
+        byte[] thisFamily = cell.getFamily();
         assertTrue("Incorrect family", Bytes.equals(thisFamily, family));
-        byte[] thisValue = kv.getValue();
+        byte[] thisValue = cell.getValue();
         assertTrue("Incorrect value", Bytes.equals(thisValue, value));
       }
     }

http://git-wip-us.apache.org/repos/asf/hbase/blob/ecb015d9/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALReplay.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALReplay.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALReplay.java
index 653dab6..5b13931 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALReplay.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALReplay.java
@@ -530,8 +530,8 @@ public class TestWALReplay {
         final AtomicInteger countOfRestoredEdits = new AtomicInteger(0);
         HRegion region3 = new HRegion(basedir, wal3, newFS, newConf, hri, htd, null) {
           @Override
-          protected boolean restoreEdit(Store s, KeyValue kv) {
-            boolean b = super.restoreEdit(s, kv);
+          protected boolean restoreEdit(Store s, Cell cell) {
+            boolean b = super.restoreEdit(s, cell);
             countOfRestoredEdits.incrementAndGet();
             return b;
           }

http://git-wip-us.apache.org/repos/asf/hbase/blob/ecb015d9/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationEndpoint.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationEndpoint.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationEndpoint.java
index 38aaa7a..c545dda 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationEndpoint.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationEndpoint.java
@@ -26,7 +26,7 @@ import java.util.concurrent.atomic.AtomicBoolean;
 import java.util.concurrent.atomic.AtomicInteger;
 import java.util.concurrent.atomic.AtomicReference;
 
-import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.MediumTests;
 import org.apache.hadoop.hbase.Waiter;
 import org.apache.hadoop.hbase.client.HTable;
@@ -173,10 +173,10 @@ public class TestReplicationEndpoint extends TestReplicationBase {
       return; // first call
     }
     Assert.assertEquals(1, ReplicationEndpointForTest.lastEntries.size());
-    List<KeyValue> kvs = ReplicationEndpointForTest.lastEntries.get(0).getEdit().getKeyValues();
-    Assert.assertEquals(1, kvs.size());
-    Assert.assertTrue(Bytes.equals(kvs.get(0).getRowArray(), kvs.get(0).getRowOffset(),
-      kvs.get(0).getRowLength(), row, 0, row.length));
+    List<Cell> cells = ReplicationEndpointForTest.lastEntries.get(0).getEdit().getCells();
+    Assert.assertEquals(1, cells.size());
+    Assert.assertTrue(Bytes.equals(cells.get(0).getRowArray(), cells.get(0).getRowOffset(),
+      cells.get(0).getRowLength(), row, 0, row.length));
   }
 
   public static class ReplicationEndpointForTest extends BaseReplicationEndpoint {
@@ -255,13 +255,13 @@ public class TestReplicationEndpoint extends TestReplicationBase {
       return new ChainWALEntryFilter(super.getWALEntryfilter(), new WALEntryFilter() {
         @Override
         public Entry filter(Entry entry) {
-          ArrayList<KeyValue> kvs = entry.getEdit().getKeyValues();
-          int size = kvs.size();
+          ArrayList<Cell> cells = entry.getEdit().getCells();
+          int size = cells.size();
           for (int i = size-1; i >= 0; i--) {
-            KeyValue kv = kvs.get(i);
-            if (!Bytes.equals(kv.getRowArray(), kv.getRowOffset(), kv.getRowLength(),
+            Cell cell = cells.get(i);
+            if (!Bytes.equals(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength(),
               row, 0, row.length)) {
-              kvs.remove(i);
+              cells.remove(i);
             }
           }
           return entry;

http://git-wip-us.apache.org/repos/asf/hbase/blob/ecb015d9/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationWALEntryFilters.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationWALEntryFilters.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationWALEntryFilters.java
index d4b412e..41a4c14 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationWALEntryFilters.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationWALEntryFilters.java
@@ -23,6 +23,7 @@ import java.util.List;
 import java.util.Map;
 import java.util.TreeMap;
 
+import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.HTableDescriptor;
@@ -265,11 +266,11 @@ public class TestReplicationWALEntryFilters {
     if (e1.getEdit() == null) {
       return;
     }
-    List<KeyValue> kvs1 = e1.getEdit().getKeyValues();
-    List<KeyValue> kvs2 = e2.getEdit().getKeyValues();
-    Assert.assertEquals(kvs1.size(), kvs2.size());
-    for (int i = 0; i < kvs1.size(); i++) {
-      KeyValue.COMPARATOR.compare(kvs1.get(i), kvs2.get(i));
+    List<Cell> cells1 = e1.getEdit().getCells();
+    List<Cell> cells2 = e2.getEdit().getCells();
+    Assert.assertEquals(cells1.size(), cells2.size());
+    for (int i = 0; i < cells1.size(); i++) {
+      KeyValue.COMPARATOR.compare(cells1.get(i), cells2.get(i));
     }
   }
 


Mime
View raw message