hbase-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From ndimi...@apache.org
Subject [2/2] git commit: HBASE-11118 non environment variable solution for "IllegalAccessError: class com.google.protobuf.ZeroCopyLiteralByteString cannot access its superclass com.google.protobuf.LiteralByteString"
Date Fri, 11 Jul 2014 23:56:30 GMT
HBASE-11118 non environment variable solution for "IllegalAccessError: class com.google.protobuf.ZeroCopyLiteralByteString cannot access its superclass com.google.protobuf.LiteralByteString"


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/8884ad04
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/8884ad04
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/8884ad04

Branch: refs/heads/0.98
Commit: 8884ad0444cada23dfbd4dbde9db9e2a20ae262a
Parents: 3574502
Author: Nick Dimiduk <ndimiduk@apache.org>
Authored: Fri Jul 11 16:51:47 2014 -0700
Committer: Nick Dimiduk <ndimiduk@apache.org>
Committed: Fri Jul 11 16:51:47 2014 -0700

----------------------------------------------------------------------
 .../org/apache/hadoop/hbase/ClusterStatus.java  |  4 +-
 .../apache/hadoop/hbase/HColumnDescriptor.java  |  8 +-
 .../org/apache/hadoop/hbase/HRegionInfo.java    |  6 +-
 .../apache/hadoop/hbase/HTableDescriptor.java   |  6 +-
 .../apache/hadoop/hbase/RegionTransition.java   |  6 +-
 .../BigDecimalColumnInterpreter.java            |  4 +-
 .../coprocessor/SecureBulkLoadClient.java       |  8 +-
 .../hbase/filter/ByteArrayComparable.java       |  4 +-
 .../hbase/filter/ColumnPaginationFilter.java    |  4 +-
 .../hadoop/hbase/filter/ColumnPrefixFilter.java |  4 +-
 .../hadoop/hbase/filter/ColumnRangeFilter.java  |  6 +-
 .../hbase/filter/DependentColumnFilter.java     |  6 +-
 .../FirstKeyValueMatchingQualifiersFilter.java  |  4 +-
 .../hadoop/hbase/filter/FuzzyRowFilter.java     |  6 +-
 .../hbase/filter/InclusiveStopFilter.java       |  5 +-
 .../filter/MultipleColumnPrefixFilter.java      |  4 +-
 .../hadoop/hbase/filter/PrefixFilter.java       |  4 +-
 .../hbase/filter/SingleColumnValueFilter.java   |  6 +-
 .../hbase/ipc/MasterCoprocessorRpcChannel.java  |  4 +-
 .../hbase/ipc/RegionCoprocessorRpcChannel.java  |  4 +-
 .../hadoop/hbase/protobuf/ProtobufUtil.java     | 77 ++++++++++----------
 .../hadoop/hbase/protobuf/RequestConverter.java | 46 ++++++------
 .../hadoop/hbase/security/EncryptionUtil.java   |  8 +-
 .../security/access/AccessControlClient.java    | 10 +--
 .../security/visibility/VisibilityClient.java   | 10 +--
 .../apache/hadoop/hbase/zookeeper/ZKUtil.java   |  4 +-
 .../hbase/client/TestClientNoCluster.java       | 16 ++--
 hbase-protocol/pom.xml                          |  4 +
 .../protobuf/HBaseZeroCopyByteString.java       |  3 +-
 .../apache/hadoop/hbase/util/ByteStringer.java  | 67 +++++++++++++++++
 .../apache/hadoop/hbase/codec/MessageCodec.java | 10 +--
 .../org/apache/hadoop/hbase/io/Reference.java   |  4 +-
 .../hadoop/hbase/io/hfile/FixedFileTrailer.java |  4 +-
 .../org/apache/hadoop/hbase/io/hfile/HFile.java |  6 +-
 .../hadoop/hbase/io/hfile/HFileBlock.java       |  4 +-
 .../hbase/protobuf/ReplicationProtbufUtil.java  |  8 +-
 .../hbase/regionserver/HRegionServer.java       |  7 +-
 .../hadoop/hbase/regionserver/wal/HLogKey.java  |  8 +-
 .../wal/SecureProtobufLogWriter.java            |  4 +-
 .../hadoop/hbase/rest/model/CellModel.java      |  6 +-
 .../hadoop/hbase/rest/model/CellSetModel.java   |  8 +-
 .../hadoop/hbase/rest/model/ScannerModel.java   |  8 +-
 .../rest/model/StorageClusterStatusModel.java   |  4 +-
 .../hadoop/hbase/rest/model/TableInfoModel.java |  6 +-
 .../visibility/VisibilityController.java        |  4 +-
 .../security/visibility/VisibilityUtils.java    |  6 +-
 .../TestBatchCoprocessorEndpoint.java           | 15 ++--
 .../coprocessor/TestCoprocessorEndpoint.java    |  6 +-
 .../coprocessor/TestRowProcessorEndpoint.java   | 14 ++--
 .../hadoop/hbase/protobuf/TestProtobufUtil.java | 10 +--
 .../hbase/regionserver/TestPriorityRpc.java     |  6 +-
 .../regionserver/TestReplicationSink.java       |  6 +-
 52 files changed, 284 insertions(+), 218 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hbase/blob/8884ad04/hbase-client/src/main/java/org/apache/hadoop/hbase/ClusterStatus.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ClusterStatus.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ClusterStatus.java
index 53704ea..236304c 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ClusterStatus.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ClusterStatus.java
@@ -26,7 +26,7 @@ import java.util.Collections;
 import java.util.HashMap;
 import java.util.Map;
 
-import com.google.protobuf.HBaseZeroCopyByteString;
+import org.apache.hadoop.hbase.util.ByteStringer;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.hbase.master.RegionState;
@@ -334,7 +334,7 @@ public class ClusterStatus extends VersionedWritable {
         ClusterStatusProtos.RegionState rs = rit.getValue().convert();
         RegionSpecifier.Builder spec =
             RegionSpecifier.newBuilder().setType(RegionSpecifierType.REGION_NAME);
-        spec.setValue(HBaseZeroCopyByteString.wrap(Bytes.toBytes(rit.getKey())));
+        spec.setValue(ByteStringer.wrap(Bytes.toBytes(rit.getKey())));
 
         RegionInTransition pbRIT =
             RegionInTransition.newBuilder().setSpec(spec.build()).setRegionState(rs).build();

http://git-wip-us.apache.org/repos/asf/hbase/blob/8884ad04/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java
index 2a5a70c..e7cdddb 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java
@@ -45,7 +45,7 @@ import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.WritableComparable;
 
 import com.google.common.base.Preconditions;
-import com.google.protobuf.HBaseZeroCopyByteString;
+import org.apache.hadoop.hbase.util.ByteStringer;
 import com.google.protobuf.InvalidProtocolBufferException;
 
 /**
@@ -1272,11 +1272,11 @@ public class HColumnDescriptor implements WritableComparable<HColumnDescriptor>
    */
   public ColumnFamilySchema convert() {
     ColumnFamilySchema.Builder builder = ColumnFamilySchema.newBuilder();
-    builder.setName(HBaseZeroCopyByteString.wrap(getName()));
+    builder.setName(ByteStringer.wrap(getName()));
     for (Map.Entry<ImmutableBytesWritable, ImmutableBytesWritable> e: this.values.entrySet()) {
       BytesBytesPair.Builder aBuilder = BytesBytesPair.newBuilder();
-      aBuilder.setFirst(HBaseZeroCopyByteString.wrap(e.getKey().get()));
-      aBuilder.setSecond(HBaseZeroCopyByteString.wrap(e.getValue().get()));
+      aBuilder.setFirst(ByteStringer.wrap(e.getKey().get()));
+      aBuilder.setSecond(ByteStringer.wrap(e.getValue().get()));
       builder.addAttributes(aBuilder.build());
     }
     for (Map.Entry<String, String> e : this.configuration.entrySet()) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/8884ad04/hbase-client/src/main/java/org/apache/hadoop/hbase/HRegionInfo.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/HRegionInfo.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/HRegionInfo.java
index afd1d04..502688c 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/HRegionInfo.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/HRegionInfo.java
@@ -29,7 +29,7 @@ import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;
 
-import com.google.protobuf.HBaseZeroCopyByteString;
+import org.apache.hadoop.hbase.util.ByteStringer;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
@@ -842,10 +842,10 @@ public class HRegionInfo implements Comparable<HRegionInfo> {
     builder.setTableName(ProtobufUtil.toProtoTableName(info.getTable()));
     builder.setRegionId(info.getRegionId());
     if (info.getStartKey() != null) {
-      builder.setStartKey(HBaseZeroCopyByteString.wrap(info.getStartKey()));
+      builder.setStartKey(ByteStringer.wrap(info.getStartKey()));
     }
     if (info.getEndKey() != null) {
-      builder.setEndKey(HBaseZeroCopyByteString.wrap(info.getEndKey()));
+      builder.setEndKey(ByteStringer.wrap(info.getEndKey()));
     }
     builder.setOffline(info.isOffline());
     builder.setSplit(info.isSplit());

http://git-wip-us.apache.org/repos/asf/hbase/blob/8884ad04/hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java
index d8dad03..702618c 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java
@@ -34,7 +34,7 @@ import java.util.TreeMap;
 import java.util.TreeSet;
 import java.util.regex.Matcher;
 
-import com.google.protobuf.HBaseZeroCopyByteString;
+import org.apache.hadoop.hbase.util.ByteStringer;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
@@ -1432,8 +1432,8 @@ public class HTableDescriptor implements WritableComparable<HTableDescriptor> {
     builder.setTableName(ProtobufUtil.toProtoTableName(getTableName()));
     for (Map.Entry<ImmutableBytesWritable, ImmutableBytesWritable> e: this.values.entrySet()) {
       BytesBytesPair.Builder aBuilder = BytesBytesPair.newBuilder();
-      aBuilder.setFirst(HBaseZeroCopyByteString.wrap(e.getKey().get()));
-      aBuilder.setSecond(HBaseZeroCopyByteString.wrap(e.getValue().get()));
+      aBuilder.setFirst(ByteStringer.wrap(e.getKey().get()));
+      aBuilder.setSecond(ByteStringer.wrap(e.getValue().get()));
       builder.addAttributes(aBuilder.build());
     }
     for (HColumnDescriptor hcd: getColumnFamilies()) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/8884ad04/hbase-client/src/main/java/org/apache/hadoop/hbase/RegionTransition.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/RegionTransition.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/RegionTransition.java
index 0632825..7e7f5c1 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/RegionTransition.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/RegionTransition.java
@@ -17,7 +17,7 @@
  */
 package org.apache.hadoop.hbase;
 
-import com.google.protobuf.HBaseZeroCopyByteString;
+import org.apache.hadoop.hbase.util.ByteStringer;
 import com.google.protobuf.InvalidProtocolBufferException;
 
 import org.apache.hadoop.classification.InterfaceAudience;
@@ -104,10 +104,10 @@ public class RegionTransition {
       org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.newBuilder().
         setHostName(sn.getHostname()).setPort(sn.getPort()).setStartCode(sn.getStartcode()).build();
     ZooKeeperProtos.RegionTransition.Builder builder = ZooKeeperProtos.RegionTransition.newBuilder().
-      setEventTypeCode(type.getCode()).setRegionName(HBaseZeroCopyByteString.wrap(regionName)).
+      setEventTypeCode(type.getCode()).setRegionName(ByteStringer.wrap(regionName)).
         setServerName(pbsn);
     builder.setCreateTime(System.currentTimeMillis());
-    if (payload != null) builder.setPayload(HBaseZeroCopyByteString.wrap(payload));
+    if (payload != null) builder.setPayload(ByteStringer.wrap(payload));
     return new RegionTransition(builder.build());
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/8884ad04/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/BigDecimalColumnInterpreter.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/BigDecimalColumnInterpreter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/BigDecimalColumnInterpreter.java
index cc1fcfd..f52b739 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/BigDecimalColumnInterpreter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/BigDecimalColumnInterpreter.java
@@ -22,7 +22,7 @@ import java.io.IOException;
 import java.math.BigDecimal;
 import java.math.RoundingMode;
 
-import com.google.protobuf.HBaseZeroCopyByteString;
+import org.apache.hadoop.hbase.util.ByteStringer;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.CellUtil;
@@ -121,7 +121,7 @@ public class BigDecimalColumnInterpreter extends ColumnInterpreter<BigDecimal, B
 
   private BigDecimalMsg getProtoForType(BigDecimal t) {
     BigDecimalMsg.Builder builder = BigDecimalMsg.newBuilder();
-    return builder.setBigdecimalMsg(HBaseZeroCopyByteString.wrap(Bytes.toBytes(t))).build();
+    return builder.setBigdecimalMsg(ByteStringer.wrap(Bytes.toBytes(t))).build();
   }
   
   @Override

http://git-wip-us.apache.org/repos/asf/hbase/blob/8884ad04/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/SecureBulkLoadClient.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/SecureBulkLoadClient.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/SecureBulkLoadClient.java
index dc94895..48986b1 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/SecureBulkLoadClient.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/SecureBulkLoadClient.java
@@ -21,7 +21,7 @@ package org.apache.hadoop.hbase.client.coprocessor;
 import static org.apache.hadoop.hbase.HConstants.EMPTY_START_ROW;
 import static org.apache.hadoop.hbase.HConstants.LAST_ROW;
 
-import com.google.protobuf.HBaseZeroCopyByteString;
+import org.apache.hadoop.hbase.util.ByteStringer;
 
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.fs.Path;
@@ -136,8 +136,8 @@ public class SecureBulkLoadClient {
       if(userToken != null) {
         protoDT =
             SecureBulkLoadProtos.DelegationToken.newBuilder()
-              .setIdentifier(HBaseZeroCopyByteString.wrap(userToken.getIdentifier()))
-              .setPassword(HBaseZeroCopyByteString.wrap(userToken.getPassword()))
+              .setIdentifier(ByteStringer.wrap(userToken.getIdentifier()))
+              .setPassword(ByteStringer.wrap(userToken.getPassword()))
               .setKind(userToken.getKind().toString())
               .setService(userToken.getService().toString()).build();
       }
@@ -146,7 +146,7 @@ public class SecureBulkLoadClient {
           new ArrayList<ClientProtos.BulkLoadHFileRequest.FamilyPath>();
       for(Pair<byte[], String> el: familyPaths) {
         protoFamilyPaths.add(ClientProtos.BulkLoadHFileRequest.FamilyPath.newBuilder()
-          .setFamily(HBaseZeroCopyByteString.wrap(el.getFirst()))
+          .setFamily(ByteStringer.wrap(el.getFirst()))
           .setPath(el.getSecond()).build());
       }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/8884ad04/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ByteArrayComparable.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ByteArrayComparable.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ByteArrayComparable.java
index 73d735a..fc4e234 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ByteArrayComparable.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ByteArrayComparable.java
@@ -18,7 +18,7 @@
  */
 package org.apache.hadoop.hbase.filter;
 
-import com.google.protobuf.HBaseZeroCopyByteString;
+import org.apache.hadoop.hbase.util.ByteStringer;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.hbase.exceptions.DeserializationException;
@@ -53,7 +53,7 @@ public abstract class ByteArrayComparable implements Comparable<byte[]> {
   ComparatorProtos.ByteArrayComparable convert() {
     ComparatorProtos.ByteArrayComparable.Builder builder =
       ComparatorProtos.ByteArrayComparable.newBuilder();
-    if (value != null) builder.setValue(HBaseZeroCopyByteString.wrap(value));
+    if (value != null) builder.setValue(ByteStringer.wrap(value));
     return builder.build();
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/8884ad04/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPaginationFilter.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPaginationFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPaginationFilter.java
index 8094395..487d512 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPaginationFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPaginationFilter.java
@@ -20,7 +20,7 @@ package org.apache.hadoop.hbase.filter;
 
 import java.util.ArrayList;
 
-import com.google.protobuf.HBaseZeroCopyByteString;
+import org.apache.hadoop.hbase.util.ByteStringer;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.hbase.Cell;
@@ -175,7 +175,7 @@ public class ColumnPaginationFilter extends FilterBase
       builder.setOffset(this.offset);
     }
     if (this.columnOffset != null) {
-      builder.setColumnOffset(HBaseZeroCopyByteString.wrap(this.columnOffset));
+      builder.setColumnOffset(ByteStringer.wrap(this.columnOffset));
     }
     return builder.build().toByteArray();
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/8884ad04/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPrefixFilter.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPrefixFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPrefixFilter.java
index a6873d9..012a96d 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPrefixFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPrefixFilter.java
@@ -21,7 +21,7 @@ package org.apache.hadoop.hbase.filter;
 
 import java.util.ArrayList;
 
-import com.google.protobuf.HBaseZeroCopyByteString;
+import org.apache.hadoop.hbase.util.ByteStringer;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.hbase.Cell;
@@ -95,7 +95,7 @@ public class ColumnPrefixFilter extends FilterBase {
   public byte [] toByteArray() {
     FilterProtos.ColumnPrefixFilter.Builder builder =
       FilterProtos.ColumnPrefixFilter.newBuilder();
-    if (this.prefix != null) builder.setPrefix(HBaseZeroCopyByteString.wrap(this.prefix));
+    if (this.prefix != null) builder.setPrefix(ByteStringer.wrap(this.prefix));
     return builder.build().toByteArray();
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/8884ad04/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnRangeFilter.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnRangeFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnRangeFilter.java
index 85c96f0..408d665 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnRangeFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnRangeFilter.java
@@ -22,7 +22,7 @@ package org.apache.hadoop.hbase.filter;
 import static org.apache.hadoop.hbase.util.Bytes.len;
 
 import com.google.common.base.Preconditions;
-import com.google.protobuf.HBaseZeroCopyByteString;
+import org.apache.hadoop.hbase.util.ByteStringer;
 import com.google.protobuf.InvalidProtocolBufferException;
 
 import org.apache.hadoop.classification.InterfaceAudience;
@@ -173,9 +173,9 @@ public class ColumnRangeFilter extends FilterBase {
   public byte [] toByteArray() {
     FilterProtos.ColumnRangeFilter.Builder builder =
       FilterProtos.ColumnRangeFilter.newBuilder();
-    if (this.minColumn != null) builder.setMinColumn(HBaseZeroCopyByteString.wrap(this.minColumn));
+    if (this.minColumn != null) builder.setMinColumn(ByteStringer.wrap(this.minColumn));
     builder.setMinColumnInclusive(this.minColumnInclusive);
-    if (this.maxColumn != null) builder.setMaxColumn(HBaseZeroCopyByteString.wrap(this.maxColumn));
+    if (this.maxColumn != null) builder.setMaxColumn(ByteStringer.wrap(this.maxColumn));
     builder.setMaxColumnInclusive(this.maxColumnInclusive);
     return builder.build().toByteArray();
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/8884ad04/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/DependentColumnFilter.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/DependentColumnFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/DependentColumnFilter.java
index 32a3dcd..aa0586d 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/DependentColumnFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/DependentColumnFilter.java
@@ -25,7 +25,7 @@ import java.util.Iterator;
 import java.util.List;
 import java.util.Set;
 
-import com.google.protobuf.HBaseZeroCopyByteString;
+import org.apache.hadoop.hbase.util.ByteStringer;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.hbase.Cell;
@@ -225,10 +225,10 @@ public class DependentColumnFilter extends CompareFilter {
       FilterProtos.DependentColumnFilter.newBuilder();
     builder.setCompareFilter(super.convert());
     if (this.columnFamily != null) {
-      builder.setColumnFamily(HBaseZeroCopyByteString.wrap(this.columnFamily));
+      builder.setColumnFamily(ByteStringer.wrap(this.columnFamily));
     }
     if (this.columnQualifier != null) {
-      builder.setColumnQualifier(HBaseZeroCopyByteString.wrap(this.columnQualifier));
+      builder.setColumnQualifier(ByteStringer.wrap(this.columnQualifier));
     }
     builder.setDropDependentColumn(this.dropDependentColumn);
     return builder.build().toByteArray();

http://git-wip-us.apache.org/repos/asf/hbase/blob/8884ad04/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyValueMatchingQualifiersFilter.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyValueMatchingQualifiersFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyValueMatchingQualifiersFilter.java
index edee29a..9bad808 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyValueMatchingQualifiersFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyValueMatchingQualifiersFilter.java
@@ -19,7 +19,7 @@
 package org.apache.hadoop.hbase.filter;
 
 import com.google.protobuf.ByteString;
-import com.google.protobuf.HBaseZeroCopyByteString;
+import org.apache.hadoop.hbase.util.ByteStringer;
 import com.google.protobuf.InvalidProtocolBufferException;
 
 import org.apache.hadoop.classification.InterfaceAudience;
@@ -89,7 +89,7 @@ public class FirstKeyValueMatchingQualifiersFilter extends FirstKeyOnlyFilter {
     FilterProtos.FirstKeyValueMatchingQualifiersFilter.Builder builder =
       FilterProtos.FirstKeyValueMatchingQualifiersFilter.newBuilder();
     for (byte[] qualifier : qualifiers) {
-      if (qualifier != null) builder.addQualifiers(HBaseZeroCopyByteString.wrap(qualifier));
+      if (qualifier != null) builder.addQualifiers(ByteStringer.wrap(qualifier));
     }
     return builder.build().toByteArray();
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/8884ad04/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FuzzyRowFilter.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FuzzyRowFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FuzzyRowFilter.java
index ee3ece2..d565f31 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FuzzyRowFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FuzzyRowFilter.java
@@ -17,7 +17,7 @@
  */
 package org.apache.hadoop.hbase.filter;
 
-import com.google.protobuf.HBaseZeroCopyByteString;
+import org.apache.hadoop.hbase.util.ByteStringer;
 import com.google.protobuf.InvalidProtocolBufferException;
 
 import org.apache.hadoop.classification.InterfaceAudience;
@@ -147,8 +147,8 @@ public class FuzzyRowFilter extends FilterBase {
       FilterProtos.FuzzyRowFilter.newBuilder();
     for (Pair<byte[], byte[]> fuzzyData : fuzzyKeysData) {
       BytesBytesPair.Builder bbpBuilder = BytesBytesPair.newBuilder();
-      bbpBuilder.setFirst(HBaseZeroCopyByteString.wrap(fuzzyData.getFirst()));
-      bbpBuilder.setSecond(HBaseZeroCopyByteString.wrap(fuzzyData.getSecond()));
+      bbpBuilder.setFirst(ByteStringer.wrap(fuzzyData.getFirst()));
+      bbpBuilder.setSecond(ByteStringer.wrap(fuzzyData.getSecond()));
       builder.addFuzzyKeysData(bbpBuilder);
     }
     return builder.build().toByteArray();

http://git-wip-us.apache.org/repos/asf/hbase/blob/8884ad04/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/InclusiveStopFilter.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/InclusiveStopFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/InclusiveStopFilter.java
index 2c03067..c96ca11 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/InclusiveStopFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/InclusiveStopFilter.java
@@ -21,12 +21,11 @@ package org.apache.hadoop.hbase.filter;
 
 import java.util.ArrayList;
 
-import com.google.protobuf.HBaseZeroCopyByteString;
+import org.apache.hadoop.hbase.util.ByteStringer;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.exceptions.DeserializationException;
-import org.apache.hadoop.hbase.filter.Filter.ReturnCode;
 import org.apache.hadoop.hbase.protobuf.generated.FilterProtos;
 import org.apache.hadoop.hbase.util.Bytes;
 
@@ -94,7 +93,7 @@ public class InclusiveStopFilter extends FilterBase {
   public byte [] toByteArray() {
     FilterProtos.InclusiveStopFilter.Builder builder =
       FilterProtos.InclusiveStopFilter.newBuilder();
-    if (this.stopRowKey != null) builder.setStopRowKey(HBaseZeroCopyByteString.wrap(this.stopRowKey));
+    if (this.stopRowKey != null) builder.setStopRowKey(ByteStringer.wrap(this.stopRowKey));
     return builder.build().toByteArray();
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/8884ad04/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultipleColumnPrefixFilter.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultipleColumnPrefixFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultipleColumnPrefixFilter.java
index 88ea67b..e80b094 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultipleColumnPrefixFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultipleColumnPrefixFilter.java
@@ -18,7 +18,7 @@
 package org.apache.hadoop.hbase.filter;
 
 
-import com.google.protobuf.HBaseZeroCopyByteString;
+import org.apache.hadoop.hbase.util.ByteStringer;
 import com.google.protobuf.InvalidProtocolBufferException;
 
 import org.apache.hadoop.classification.InterfaceAudience;
@@ -114,7 +114,7 @@ public class MultipleColumnPrefixFilter extends FilterBase {
     FilterProtos.MultipleColumnPrefixFilter.Builder builder =
       FilterProtos.MultipleColumnPrefixFilter.newBuilder();
     for (byte [] element : sortedPrefixes) {
-      if (element != null) builder.addSortedPrefixes(HBaseZeroCopyByteString.wrap(element));
+      if (element != null) builder.addSortedPrefixes(ByteStringer.wrap(element));
     }
     return builder.build().toByteArray();
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/8884ad04/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PrefixFilter.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PrefixFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PrefixFilter.java
index 29bdcde..c0c8396 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PrefixFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PrefixFilter.java
@@ -20,7 +20,7 @@
 package org.apache.hadoop.hbase.filter;
 
 import com.google.common.base.Preconditions;
-import com.google.protobuf.HBaseZeroCopyByteString;
+import org.apache.hadoop.hbase.util.ByteStringer;
 import com.google.protobuf.InvalidProtocolBufferException;
 
 import org.apache.hadoop.classification.InterfaceAudience;
@@ -98,7 +98,7 @@ public class PrefixFilter extends FilterBase {
   public byte [] toByteArray() {
     FilterProtos.PrefixFilter.Builder builder =
       FilterProtos.PrefixFilter.newBuilder();
-    if (this.prefix != null) builder.setPrefix(HBaseZeroCopyByteString.wrap(this.prefix));
+    if (this.prefix != null) builder.setPrefix(ByteStringer.wrap(this.prefix));
     return builder.build().toByteArray();
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/8884ad04/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.java
index 771efeb..21a2207 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.java
@@ -22,7 +22,7 @@ package org.apache.hadoop.hbase.filter;
 import java.io.IOException;
 import java.util.ArrayList;
 
-import com.google.protobuf.HBaseZeroCopyByteString;
+import org.apache.hadoop.hbase.util.ByteStringer;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
@@ -307,10 +307,10 @@ public class SingleColumnValueFilter extends FilterBase {
     FilterProtos.SingleColumnValueFilter.Builder builder =
       FilterProtos.SingleColumnValueFilter.newBuilder();
     if (this.columnFamily != null) {
-      builder.setColumnFamily(HBaseZeroCopyByteString.wrap(this.columnFamily));
+      builder.setColumnFamily(ByteStringer.wrap(this.columnFamily));
     }
     if (this.columnQualifier != null) {
-      builder.setColumnQualifier(HBaseZeroCopyByteString.wrap(this.columnQualifier));
+      builder.setColumnQualifier(ByteStringer.wrap(this.columnQualifier));
     }
     HBaseProtos.CompareType compareOp = CompareType.valueOf(this.compareOp.name());
     builder.setCompareOp(compareOp);

http://git-wip-us.apache.org/repos/asf/hbase/blob/8884ad04/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/MasterCoprocessorRpcChannel.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/MasterCoprocessorRpcChannel.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/MasterCoprocessorRpcChannel.java
index 97fdb6f..c60b6b8 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/MasterCoprocessorRpcChannel.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/MasterCoprocessorRpcChannel.java
@@ -20,7 +20,7 @@ package org.apache.hadoop.hbase.ipc;
 
 import java.io.IOException;
 
-import com.google.protobuf.HBaseZeroCopyByteString;
+import org.apache.hadoop.hbase.util.ByteStringer;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
@@ -61,7 +61,7 @@ public class MasterCoprocessorRpcChannel extends CoprocessorRpcChannel{
 
     final ClientProtos.CoprocessorServiceCall call =
         ClientProtos.CoprocessorServiceCall.newBuilder()
-            .setRow(HBaseZeroCopyByteString.wrap(HConstants.EMPTY_BYTE_ARRAY))
+            .setRow(ByteStringer.wrap(HConstants.EMPTY_BYTE_ARRAY))
             .setServiceName(method.getService().getFullName())
             .setMethodName(method.getName())
             .setRequest(request.toByteString()).build();

http://git-wip-us.apache.org/repos/asf/hbase/blob/8884ad04/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RegionCoprocessorRpcChannel.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RegionCoprocessorRpcChannel.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RegionCoprocessorRpcChannel.java
index de1259d..818268b 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RegionCoprocessorRpcChannel.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RegionCoprocessorRpcChannel.java
@@ -20,7 +20,7 @@ package org.apache.hadoop.hbase.ipc;
 
 import java.io.IOException;
 
-import com.google.protobuf.HBaseZeroCopyByteString;
+import org.apache.hadoop.hbase.util.ByteStringer;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
@@ -80,7 +80,7 @@ public class RegionCoprocessorRpcChannel extends CoprocessorRpcChannel{
 
     final ClientProtos.CoprocessorServiceCall call =
         ClientProtos.CoprocessorServiceCall.newBuilder()
-            .setRow(HBaseZeroCopyByteString.wrap(row))
+            .setRow(ByteStringer.wrap(row))
             .setServiceName(method.getService().getFullName())
             .setMethodName(method.getName())
             .setRequest(request.toByteString()).build();

http://git-wip-us.apache.org/repos/asf/hbase/blob/8884ad04/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
index 606a585..fe7708b 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
@@ -124,6 +124,7 @@ import org.apache.hadoop.hbase.security.access.UserPermission;
 import org.apache.hadoop.hbase.security.token.AuthenticationTokenIdentifier;
 import org.apache.hadoop.hbase.security.visibility.Authorizations;
 import org.apache.hadoop.hbase.security.visibility.CellVisibility;
+import org.apache.hadoop.hbase.util.ByteStringer;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.DynamicClassLoader;
 import org.apache.hadoop.hbase.util.ExceptionUtil;
@@ -825,17 +826,17 @@ public final class ProtobufUtil {
       NameBytesPair.Builder attributeBuilder = NameBytesPair.newBuilder();
       for (Map.Entry<String, byte[]> attribute: attributes.entrySet()) {
         attributeBuilder.setName(attribute.getKey());
-        attributeBuilder.setValue(HBaseZeroCopyByteString.wrap(attribute.getValue()));
+        attributeBuilder.setValue(ByteStringer.wrap(attribute.getValue()));
         scanBuilder.addAttribute(attributeBuilder.build());
       }
     }
     byte[] startRow = scan.getStartRow();
     if (startRow != null && startRow.length > 0) {
-      scanBuilder.setStartRow(HBaseZeroCopyByteString.wrap(startRow));
+      scanBuilder.setStartRow(ByteStringer.wrap(startRow));
     }
     byte[] stopRow = scan.getStopRow();
     if (stopRow != null && stopRow.length > 0) {
-      scanBuilder.setStopRow(HBaseZeroCopyByteString.wrap(stopRow));
+      scanBuilder.setStopRow(ByteStringer.wrap(stopRow));
     }
     if (scan.hasFilter()) {
       scanBuilder.setFilter(ProtobufUtil.toFilter(scan.getFilter()));
@@ -844,12 +845,12 @@ public final class ProtobufUtil {
       Column.Builder columnBuilder = Column.newBuilder();
       for (Map.Entry<byte[],NavigableSet<byte []>>
           family: scan.getFamilyMap().entrySet()) {
-        columnBuilder.setFamily(HBaseZeroCopyByteString.wrap(family.getKey()));
+        columnBuilder.setFamily(ByteStringer.wrap(family.getKey()));
         NavigableSet<byte []> qualifiers = family.getValue();
         columnBuilder.clearQualifier();
         if (qualifiers != null && qualifiers.size() > 0) {
           for (byte [] qualifier: qualifiers) {
-            columnBuilder.addQualifier(HBaseZeroCopyByteString.wrap(qualifier));
+            columnBuilder.addQualifier(ByteStringer.wrap(qualifier));
           }
         }
         scanBuilder.addColumn(columnBuilder.build());
@@ -957,7 +958,7 @@ public final class ProtobufUtil {
       final Get get) throws IOException {
     ClientProtos.Get.Builder builder =
       ClientProtos.Get.newBuilder();
-    builder.setRow(HBaseZeroCopyByteString.wrap(get.getRow()));
+    builder.setRow(ByteStringer.wrap(get.getRow()));
     builder.setCacheBlocks(get.getCacheBlocks());
     builder.setMaxVersions(get.getMaxVersions());
     if (get.getFilter() != null) {
@@ -976,7 +977,7 @@ public final class ProtobufUtil {
       NameBytesPair.Builder attributeBuilder = NameBytesPair.newBuilder();
       for (Map.Entry<String, byte[]> attribute: attributes.entrySet()) {
         attributeBuilder.setName(attribute.getKey());
-        attributeBuilder.setValue(HBaseZeroCopyByteString.wrap(attribute.getValue()));
+        attributeBuilder.setValue(ByteStringer.wrap(attribute.getValue()));
         builder.addAttribute(attributeBuilder.build());
       }
     }
@@ -985,11 +986,11 @@ public final class ProtobufUtil {
       Map<byte[], NavigableSet<byte[]>> families = get.getFamilyMap();
       for (Map.Entry<byte[], NavigableSet<byte[]>> family: families.entrySet()) {
         NavigableSet<byte[]> qualifiers = family.getValue();
-        columnBuilder.setFamily(HBaseZeroCopyByteString.wrap(family.getKey()));
+        columnBuilder.setFamily(ByteStringer.wrap(family.getKey()));
         columnBuilder.clearQualifier();
         if (qualifiers != null && qualifiers.size() > 0) {
           for (byte[] qualifier: qualifiers) {
-            columnBuilder.addQualifier(HBaseZeroCopyByteString.wrap(qualifier));
+            columnBuilder.addQualifier(ByteStringer.wrap(qualifier));
           }
         }
         builder.addColumn(columnBuilder.build());
@@ -1018,7 +1019,7 @@ public final class ProtobufUtil {
    */
   public static MutationProto toMutation(
     final Increment increment, final MutationProto.Builder builder, long nonce) {
-    builder.setRow(HBaseZeroCopyByteString.wrap(increment.getRow()));
+    builder.setRow(ByteStringer.wrap(increment.getRow()));
     builder.setMutateType(MutationType.INCREMENT);
     builder.setDurability(toDurability(increment.getDurability()));
     if (nonce != HConstants.NO_NONCE) {
@@ -1035,18 +1036,18 @@ public final class ProtobufUtil {
     ColumnValue.Builder columnBuilder = ColumnValue.newBuilder();
     QualifierValue.Builder valueBuilder = QualifierValue.newBuilder();
     for (Map.Entry<byte[], List<Cell>> family: increment.getFamilyCellMap().entrySet()) {
-      columnBuilder.setFamily(HBaseZeroCopyByteString.wrap(family.getKey()));
+      columnBuilder.setFamily(ByteStringer.wrap(family.getKey()));
       columnBuilder.clearQualifierValue();
       List<Cell> values = family.getValue();
       if (values != null && values.size() > 0) {
         for (Cell cell: values) {
           KeyValue kv = KeyValueUtil.ensureKeyValue(cell);
-          valueBuilder.setQualifier(HBaseZeroCopyByteString.wrap(
+          valueBuilder.setQualifier(ByteStringer.wrap(
               kv.getQualifierArray(), kv.getQualifierOffset(), kv.getQualifierLength()));
-          valueBuilder.setValue(HBaseZeroCopyByteString.wrap(
+          valueBuilder.setValue(ByteStringer.wrap(
               kv.getValueArray(), kv.getValueOffset(), kv.getValueLength()));
           if (kv.getTagsLength() > 0) {
-            valueBuilder.setTags(HBaseZeroCopyByteString.wrap(kv.getTagsArray(),
+            valueBuilder.setTags(ByteStringer.wrap(kv.getTagsArray(),
                 kv.getTagsOffset(), kv.getTagsLength()));
           }
           columnBuilder.addQualifierValue(valueBuilder.build());
@@ -1059,7 +1060,7 @@ public final class ProtobufUtil {
       NameBytesPair.Builder attributeBuilder = NameBytesPair.newBuilder();
       for (Map.Entry<String, byte[]> attribute : attributes.entrySet()) {
         attributeBuilder.setName(attribute.getKey());
-        attributeBuilder.setValue(HBaseZeroCopyByteString.wrap(attribute.getValue()));
+        attributeBuilder.setValue(ByteStringer.wrap(attribute.getValue()));
         builder.addAttribute(attributeBuilder.build());
       }
     }
@@ -1100,16 +1101,16 @@ public final class ProtobufUtil {
     QualifierValue.Builder valueBuilder = QualifierValue.newBuilder();
     for (Map.Entry<byte[],List<Cell>> family: mutation.getFamilyCellMap().entrySet()) {
       columnBuilder.clear();
-      columnBuilder.setFamily(HBaseZeroCopyByteString.wrap(family.getKey()));
+      columnBuilder.setFamily(ByteStringer.wrap(family.getKey()));
       for (Cell cell: family.getValue()) {
         KeyValue kv = KeyValueUtil.ensureKeyValue(cell);
-        valueBuilder.setQualifier(HBaseZeroCopyByteString.wrap(
+        valueBuilder.setQualifier(ByteStringer.wrap(
             kv.getQualifierArray(), kv.getQualifierOffset(), kv.getQualifierLength()));
-        valueBuilder.setValue(HBaseZeroCopyByteString.wrap(
+        valueBuilder.setValue(ByteStringer.wrap(
             kv.getValueArray(), kv.getValueOffset(), kv.getValueLength()));
         valueBuilder.setTimestamp(kv.getTimestamp());
         if(cell.getTagsLength() > 0) {
-          valueBuilder.setTags(HBaseZeroCopyByteString.wrap(kv.getTagsArray(), kv.getTagsOffset(),
+          valueBuilder.setTags(ByteStringer.wrap(kv.getTagsArray(), kv.getTagsOffset(),
               kv.getTagsLength()));
         }
         if (type == MutationType.DELETE) {
@@ -1170,7 +1171,7 @@ public final class ProtobufUtil {
    */
   private static MutationProto.Builder getMutationBuilderAndSetCommonFields(final MutationType type,
       final Mutation mutation, MutationProto.Builder builder) {
-    builder.setRow(HBaseZeroCopyByteString.wrap(mutation.getRow()));
+    builder.setRow(ByteStringer.wrap(mutation.getRow()));
     builder.setMutateType(type);
     builder.setDurability(toDurability(mutation.getDurability()));
     builder.setTimestamp(mutation.getTimeStamp());
@@ -1179,7 +1180,7 @@ public final class ProtobufUtil {
       NameBytesPair.Builder attributeBuilder = NameBytesPair.newBuilder();
       for (Map.Entry<String, byte[]> attribute: attributes.entrySet()) {
         attributeBuilder.setName(attribute.getKey());
-        attributeBuilder.setValue(HBaseZeroCopyByteString.wrap(attribute.getValue()));
+        attributeBuilder.setValue(ByteStringer.wrap(attribute.getValue()));
         builder.addAttribute(attributeBuilder.build());
       }
     }
@@ -1310,7 +1311,7 @@ public final class ProtobufUtil {
   public static ComparatorProtos.Comparator toComparator(ByteArrayComparable comparator) {
     ComparatorProtos.Comparator.Builder builder = ComparatorProtos.Comparator.newBuilder();
     builder.setName(comparator.getClass().getName());
-    builder.setSerializedComparator(HBaseZeroCopyByteString.wrap(comparator.toByteArray()));
+    builder.setSerializedComparator(ByteStringer.wrap(comparator.toByteArray()));
     return builder.build();
   }
 
@@ -1372,7 +1373,7 @@ public final class ProtobufUtil {
   public static FilterProtos.Filter toFilter(Filter filter) throws IOException {
     FilterProtos.Filter.Builder builder = FilterProtos.Filter.newBuilder();
     builder.setName(filter.getClass().getName());
-    builder.setSerializedFilter(HBaseZeroCopyByteString.wrap(filter.toByteArray()));
+    builder.setSerializedFilter(ByteStringer.wrap(filter.toByteArray()));
     return builder.build();
   }
 
@@ -1915,10 +1916,10 @@ public final class ProtobufUtil {
             AccessControlProtos.TablePermission.newBuilder();
         builder.setTableName(ProtobufUtil.toProtoTableName(tablePerm.getTableName()));
         if (tablePerm.hasFamily()) {
-          builder.setFamily(HBaseZeroCopyByteString.wrap(tablePerm.getFamily()));
+          builder.setFamily(ByteStringer.wrap(tablePerm.getFamily()));
         }
         if (tablePerm.hasQualifier()) {
-          builder.setQualifier(HBaseZeroCopyByteString.wrap(tablePerm.getQualifier()));
+          builder.setQualifier(ByteStringer.wrap(tablePerm.getQualifier()));
         }
         Permission.Action actions[] = perm.getActions();
         if (actions != null) {
@@ -2014,7 +2015,7 @@ public final class ProtobufUtil {
    */
   public static AccessControlProtos.UserPermission toUserPermission(UserPermission perm) {
     return AccessControlProtos.UserPermission.newBuilder()
-        .setUser(HBaseZeroCopyByteString.wrap(perm.getUser()))
+        .setUser(ByteStringer.wrap(perm.getUser()))
         .setPermission(toPermission(perm))
         .build();
   }
@@ -2270,7 +2271,7 @@ public final class ProtobufUtil {
     AccessControlProtos.GetUserPermissionsRequest.Builder builder =
       AccessControlProtos.GetUserPermissionsRequest.newBuilder();
     if (namespace != null) {
-      builder.setNamespaceName(HBaseZeroCopyByteString.wrap(namespace));
+      builder.setNamespaceName(ByteStringer.wrap(namespace));
     }
     builder.setType(AccessControlProtos.Permission.Type.Namespace);
     AccessControlProtos.GetUserPermissionsRequest request = builder.build();
@@ -2314,8 +2315,8 @@ public final class ProtobufUtil {
    */
   public static AuthenticationProtos.Token toToken(Token<AuthenticationTokenIdentifier> token) {
     AuthenticationProtos.Token.Builder builder = AuthenticationProtos.Token.newBuilder();
-    builder.setIdentifier(HBaseZeroCopyByteString.wrap(token.getIdentifier()));
-    builder.setPassword(HBaseZeroCopyByteString.wrap(token.getPassword()));
+    builder.setIdentifier(ByteStringer.wrap(token.getIdentifier()));
+    builder.setPassword(ByteStringer.wrap(token.getPassword()));
     if (token.getService() != null) {
       builder.setService(ByteString.copyFromUtf8(token.getService().toString()));
     }
@@ -2412,15 +2413,15 @@ public final class ProtobufUtil {
     // Doing this is going to kill us if we do it for all data passed.
     // St.Ack 20121205
     CellProtos.Cell.Builder kvbuilder = CellProtos.Cell.newBuilder();
-    kvbuilder.setRow(HBaseZeroCopyByteString.wrap(kv.getRowArray(), kv.getRowOffset(),
+    kvbuilder.setRow(ByteStringer.wrap(kv.getRowArray(), kv.getRowOffset(),
         kv.getRowLength()));
-    kvbuilder.setFamily(HBaseZeroCopyByteString.wrap(kv.getFamilyArray(),
+    kvbuilder.setFamily(ByteStringer.wrap(kv.getFamilyArray(),
         kv.getFamilyOffset(), kv.getFamilyLength()));
-    kvbuilder.setQualifier(HBaseZeroCopyByteString.wrap(kv.getQualifierArray(),
+    kvbuilder.setQualifier(ByteStringer.wrap(kv.getQualifierArray(),
         kv.getQualifierOffset(), kv.getQualifierLength()));
     kvbuilder.setCellType(CellProtos.CellType.valueOf(kv.getTypeByte()));
     kvbuilder.setTimestamp(kv.getTimestamp());
-    kvbuilder.setValue(HBaseZeroCopyByteString.wrap(kv.getValueArray(), kv.getValueOffset(),
+    kvbuilder.setValue(ByteStringer.wrap(kv.getValueArray(), kv.getValueOffset(),
         kv.getValueLength()));
     return kvbuilder.build();
   }
@@ -2499,9 +2500,9 @@ public final class ProtobufUtil {
     // input / output paths are relative to the store dir
     // store dir is relative to region dir
     CompactionDescriptor.Builder builder = CompactionDescriptor.newBuilder()
-        .setTableName(HBaseZeroCopyByteString.wrap(info.getTableName()))
-        .setEncodedRegionName(HBaseZeroCopyByteString.wrap(info.getEncodedNameAsBytes()))
-        .setFamilyName(HBaseZeroCopyByteString.wrap(family))
+        .setTableName(ByteStringer.wrap(info.getTableName()))
+        .setEncodedRegionName(ByteStringer.wrap(info.getEncodedNameAsBytes()))
+        .setFamilyName(ByteStringer.wrap(family))
         .setStoreHomeDir(storeDir.getName()); //make relative
     for (Path inputPath : inputPaths) {
       builder.addCompactionInput(inputPath.getName()); //relative path
@@ -2579,8 +2580,8 @@ public final class ProtobufUtil {
 
   public static HBaseProtos.TableName toProtoTableName(TableName tableName) {
     return HBaseProtos.TableName.newBuilder()
-        .setNamespace(HBaseZeroCopyByteString.wrap(tableName.getNamespace()))
-        .setQualifier(HBaseZeroCopyByteString.wrap(tableName.getQualifier())).build();
+        .setNamespace(ByteStringer.wrap(tableName.getNamespace()))
+        .setQualifier(ByteStringer.wrap(tableName.getQualifier())).build();
   }
 
   public static TableName[] getTableNameArray(List<HBaseProtos.TableName> tableNamesList) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/8884ad04/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/RequestConverter.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/RequestConverter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/RequestConverter.java
index 1a6b42d..544af85 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/RequestConverter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/RequestConverter.java
@@ -20,7 +20,7 @@ package org.apache.hadoop.hbase.protobuf;
 import java.io.IOException;
 import java.util.List;
 
-import com.google.protobuf.HBaseZeroCopyByteString;
+import org.apache.hadoop.hbase.util.ByteStringer;
 
 import org.apache.commons.configuration.Configuration;
 import org.apache.hadoop.classification.InterfaceAudience;
@@ -137,10 +137,10 @@ public final class RequestConverter {
     builder.setRegion(region);
 
     Column.Builder columnBuilder = Column.newBuilder();
-    columnBuilder.setFamily(HBaseZeroCopyByteString.wrap(family));
+    columnBuilder.setFamily(ByteStringer.wrap(family));
     ClientProtos.Get.Builder getBuilder =
       ClientProtos.Get.newBuilder();
-    getBuilder.setRow(HBaseZeroCopyByteString.wrap(row));
+    getBuilder.setRow(ByteStringer.wrap(row));
     getBuilder.addColumn(columnBuilder.build());
     getBuilder.setClosestRowBefore(true);
     builder.setGet(getBuilder.build());
@@ -185,14 +185,14 @@ public final class RequestConverter {
     builder.setRegion(region);
 
     MutationProto.Builder mutateBuilder = MutationProto.newBuilder();
-    mutateBuilder.setRow(HBaseZeroCopyByteString.wrap(row));
+    mutateBuilder.setRow(ByteStringer.wrap(row));
     mutateBuilder.setMutateType(MutationType.INCREMENT);
     mutateBuilder.setDurability(ProtobufUtil.toDurability(durability));
     ColumnValue.Builder columnBuilder = ColumnValue.newBuilder();
-    columnBuilder.setFamily(HBaseZeroCopyByteString.wrap(family));
+    columnBuilder.setFamily(ByteStringer.wrap(family));
     QualifierValue.Builder valueBuilder = QualifierValue.newBuilder();
-    valueBuilder.setValue(HBaseZeroCopyByteString.wrap(Bytes.toBytes(amount)));
-    valueBuilder.setQualifier(HBaseZeroCopyByteString.wrap(qualifier));
+    valueBuilder.setValue(ByteStringer.wrap(Bytes.toBytes(amount)));
+    valueBuilder.setQualifier(ByteStringer.wrap(qualifier));
     columnBuilder.addQualifierValue(valueBuilder.build());
     mutateBuilder.addColumnValue(columnBuilder.build());
     if (nonce != HConstants.NO_NONCE) {
@@ -495,7 +495,7 @@ public final class RequestConverter {
     builder.setRegion(region);
     FamilyPath.Builder familyPathBuilder = FamilyPath.newBuilder();
     for (Pair<byte[], String> familyPath: familyPaths) {
-      familyPathBuilder.setFamily(HBaseZeroCopyByteString.wrap(familyPath.getFirst()));
+      familyPathBuilder.setFamily(ByteStringer.wrap(familyPath.getFirst()));
       familyPathBuilder.setPath(familyPath.getSecond());
       builder.addFamilyPath(familyPathBuilder.build());
     }
@@ -540,7 +540,7 @@ public final class RequestConverter {
         RegionCoprocessorServiceExec exec = (RegionCoprocessorServiceExec) row;
         regionActionBuilder.addAction(actionBuilder.setServiceCall(
             ClientProtos.CoprocessorServiceCall.newBuilder()
-              .setRow(HBaseZeroCopyByteString.wrap(exec.getRow()))
+              .setRow(ByteStringer.wrap(exec.getRow()))
               .setServiceName(exec.getMethod().getService().getFullName())
               .setMethodName(exec.getMethod().getName())
               .setRequest(exec.getRequest().toByteString())));
@@ -671,7 +671,7 @@ public final class RequestConverter {
    RegionSpecifier region = buildRegionSpecifier(
      RegionSpecifierType.REGION_NAME, regionName);
    builder.setRegion(region);
-   builder.addFamily(HBaseZeroCopyByteString.wrap(family));
+   builder.addFamily(ByteStringer.wrap(family));
    return builder.build();
  }
 
@@ -834,7 +834,7 @@ public final class RequestConverter {
      RegionSpecifierType.REGION_NAME, regionName);
    builder.setRegion(region);
    if (splitPoint != null) {
-     builder.setSplitPoint(HBaseZeroCopyByteString.wrap(splitPoint));
+     builder.setSplitPoint(ByteStringer.wrap(splitPoint));
    }
    return builder.build();
  }
@@ -874,7 +874,7 @@ public final class RequestConverter {
    builder.setRegion(region);
    builder.setMajor(major);
    if (family != null) {
-     builder.setFamily(HBaseZeroCopyByteString.wrap(family));
+     builder.setFamily(ByteStringer.wrap(family));
    }
    return builder.build();
  }
@@ -933,7 +933,7 @@ public final class RequestConverter {
   public static RegionSpecifier buildRegionSpecifier(
       final RegionSpecifierType type, final byte[] value) {
     RegionSpecifier.Builder regionBuilder = RegionSpecifier.newBuilder();
-    regionBuilder.setValue(HBaseZeroCopyByteString.wrap(value));
+    regionBuilder.setValue(ByteStringer.wrap(value));
     regionBuilder.setType(type);
     return regionBuilder.build();
   }
@@ -954,9 +954,9 @@ public final class RequestConverter {
       final ByteArrayComparable comparator,
       final CompareType compareType) throws IOException {
     Condition.Builder builder = Condition.newBuilder();
-    builder.setRow(HBaseZeroCopyByteString.wrap(row));
-    builder.setFamily(HBaseZeroCopyByteString.wrap(family));
-    builder.setQualifier(HBaseZeroCopyByteString.wrap(qualifier));
+    builder.setRow(ByteStringer.wrap(row));
+    builder.setFamily(ByteStringer.wrap(family));
+    builder.setQualifier(ByteStringer.wrap(qualifier));
     builder.setComparator(ProtobufUtil.toComparator(comparator));
     builder.setCompareType(compareType);
     return builder.build();
@@ -988,7 +988,7 @@ public final class RequestConverter {
       final TableName tableName, final byte [] columnName) {
     DeleteColumnRequest.Builder builder = DeleteColumnRequest.newBuilder();
     builder.setTableName(ProtobufUtil.toProtoTableName((tableName)));
-    builder.setColumnName(HBaseZeroCopyByteString.wrap(columnName));
+    builder.setColumnName(ByteStringer.wrap(columnName));
     return builder.build();
   }
 
@@ -1128,7 +1128,7 @@ public final class RequestConverter {
     builder.setTableSchema(hTableDesc.convert());
     if (splitKeys != null) {
       for (byte [] splitKey : splitKeys) {
-        builder.addSplitKeys(HBaseZeroCopyByteString.wrap(splitKey));
+        builder.addSplitKeys(ByteStringer.wrap(splitKey));
       }
     }
     return builder.build();
@@ -1281,7 +1281,7 @@ public final class RequestConverter {
   public static GetLastFlushedSequenceIdRequest buildGetLastFlushedSequenceIdRequest(
       byte[] regionName) {
     return GetLastFlushedSequenceIdRequest.newBuilder().setRegionName(
-        HBaseZeroCopyByteString.wrap(regionName)).build();
+        ByteStringer.wrap(regionName)).build();
   }
 
   /**
@@ -1336,10 +1336,10 @@ public final class RequestConverter {
     permissionBuilder.setTableName(ProtobufUtil.toProtoTableName(tableName));
 
     if (family != null) {
-      permissionBuilder.setFamily(HBaseZeroCopyByteString.wrap(family));
+      permissionBuilder.setFamily(ByteStringer.wrap(family));
     }
     if (qualifier != null) {
-      permissionBuilder.setQualifier(HBaseZeroCopyByteString.wrap(qualifier));
+      permissionBuilder.setQualifier(ByteStringer.wrap(qualifier));
     }
     ret.setType(AccessControlProtos.Permission.Type.Table)
        .setTablePermission(permissionBuilder);
@@ -1432,10 +1432,10 @@ public final class RequestConverter {
       permissionBuilder.setTableName(ProtobufUtil.toProtoTableName(tableName));
     }
     if (family != null) {
-      permissionBuilder.setFamily(HBaseZeroCopyByteString.wrap(family));
+      permissionBuilder.setFamily(ByteStringer.wrap(family));
     }
     if (qualifier != null) {
-      permissionBuilder.setQualifier(HBaseZeroCopyByteString.wrap(qualifier));
+      permissionBuilder.setQualifier(ByteStringer.wrap(qualifier));
     }
     ret.setType(AccessControlProtos.Permission.Type.Table)
        .setTablePermission(permissionBuilder);

http://git-wip-us.apache.org/repos/asf/hbase/blob/8884ad04/hbase-client/src/main/java/org/apache/hadoop/hbase/security/EncryptionUtil.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/EncryptionUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/EncryptionUtil.java
index 2853aaa..0936ee1 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/EncryptionUtil.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/EncryptionUtil.java
@@ -26,7 +26,7 @@ import java.security.SecureRandom;
 
 import javax.crypto.spec.SecretKeySpec;
 
-import com.google.protobuf.HBaseZeroCopyByteString;
+import org.apache.hadoop.hbase.util.ByteStringer;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
@@ -83,15 +83,15 @@ public class EncryptionUtil {
     if (cipher.getIvLength() > 0) {
       iv = new byte[cipher.getIvLength()];
       RNG.nextBytes(iv);
-      builder.setIv(HBaseZeroCopyByteString.wrap(iv));
+      builder.setIv(ByteStringer.wrap(iv));
     }
     byte[] keyBytes = key.getEncoded();
     builder.setLength(keyBytes.length);
-    builder.setHash(HBaseZeroCopyByteString.wrap(Encryption.hash128(keyBytes)));
+    builder.setHash(ByteStringer.wrap(Encryption.hash128(keyBytes)));
     ByteArrayOutputStream out = new ByteArrayOutputStream();
     Encryption.encryptWithSubjectKey(out, new ByteArrayInputStream(keyBytes), subject,
       conf, cipher, iv);
-    builder.setData(HBaseZeroCopyByteString.wrap(out.toByteArray()));
+    builder.setData(ByteStringer.wrap(out.toByteArray()));
     // Build and return the protobuf message
     out.reset();
     builder.build().writeDelimitedTo(out);

http://git-wip-us.apache.org/repos/asf/hbase/blob/8884ad04/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlClient.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlClient.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlClient.java
index e8675a2..faa03e3 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlClient.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlClient.java
@@ -23,7 +23,7 @@ import java.util.List;
 import java.util.Map;
 import java.util.regex.Pattern;
 
-import com.google.protobuf.HBaseZeroCopyByteString;
+import org.apache.hadoop.hbase.util.ByteStringer;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
@@ -93,10 +93,10 @@ public class AccessControlClient {
           permissionBuilder.setTableName(ProtobufUtil.toProtoTableName(tableName));
 
           if (family != null) {
-            permissionBuilder.setFamily(HBaseZeroCopyByteString.wrap(family));
+            permissionBuilder.setFamily(ByteStringer.wrap(family));
           }
           if (qual != null) {
-            permissionBuilder.setQualifier(HBaseZeroCopyByteString.wrap(qual));
+            permissionBuilder.setQualifier(ByteStringer.wrap(qual));
           }
           ret.setType(AccessControlProtos.Permission.Type.Table).setTablePermission(
               permissionBuilder);
@@ -157,10 +157,10 @@ public class AccessControlClient {
             permissionBuilder.setTableName(ProtobufUtil.toProtoTableName(tableName));
           }
           if (family != null) {
-            permissionBuilder.setFamily(HBaseZeroCopyByteString.wrap(family));
+            permissionBuilder.setFamily(ByteStringer.wrap(family));
           }
           if (qualifier != null) {
-            permissionBuilder.setQualifier(HBaseZeroCopyByteString.wrap(qualifier));
+            permissionBuilder.setQualifier(ByteStringer.wrap(qualifier));
           }
           ret.setType(AccessControlProtos.Permission.Type.Table).setTablePermission(
               permissionBuilder);

http://git-wip-us.apache.org/repos/asf/hbase/blob/8884ad04/hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityClient.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityClient.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityClient.java
index bf1b01f..8a17994 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityClient.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityClient.java
@@ -22,7 +22,7 @@ import static org.apache.hadoop.hbase.security.visibility.VisibilityConstants.LA
 import java.io.IOException;
 import java.util.Map;
 
-import com.google.protobuf.HBaseZeroCopyByteString;
+import org.apache.hadoop.hbase.util.ByteStringer;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
@@ -86,7 +86,7 @@ public class VisibilityClient {
           for (String label : labels) {
             if (label.length() > 0) {
               VisibilityLabel.Builder newBuilder = VisibilityLabel.newBuilder();
-              newBuilder.setLabel(HBaseZeroCopyByteString.wrap(Bytes.toBytes(label)));
+              newBuilder.setLabel(ByteStringer.wrap(Bytes.toBytes(label)));
               builder.addVisLabel(newBuilder.build());
             }
           }
@@ -137,7 +137,7 @@ public class VisibilityClient {
 
         public GetAuthsResponse call(VisibilityLabelsService service) throws IOException {
           GetAuthsRequest.Builder getAuthReqBuilder = GetAuthsRequest.newBuilder();
-          getAuthReqBuilder.setUser(HBaseZeroCopyByteString.wrap(Bytes.toBytes(user)));
+          getAuthReqBuilder.setUser(ByteStringer.wrap(Bytes.toBytes(user)));
           service.getAuths(controller, getAuthReqBuilder.build(), rpcCallback);
           return rpcCallback.get();
         }
@@ -179,10 +179,10 @@ public class VisibilityClient {
 
         public VisibilityLabelsResponse call(VisibilityLabelsService service) throws IOException {
           SetAuthsRequest.Builder setAuthReqBuilder = SetAuthsRequest.newBuilder();
-          setAuthReqBuilder.setUser(HBaseZeroCopyByteString.wrap(Bytes.toBytes(user)));
+          setAuthReqBuilder.setUser(ByteStringer.wrap(Bytes.toBytes(user)));
           for (String auth : auths) {
             if (auth.length() > 0) {
-              setAuthReqBuilder.addAuth(HBaseZeroCopyByteString.wrap(Bytes.toBytes(auth)));
+              setAuthReqBuilder.addAuth(ByteStringer.wrap(Bytes.toBytes(auth)));
             }
           }
           if (setOrClear) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/8884ad04/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKUtil.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKUtil.java
index 1c503e0..2a8b51a 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKUtil.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKUtil.java
@@ -35,7 +35,7 @@ import java.util.Properties;
 import javax.security.auth.login.AppConfigurationEntry;
 import javax.security.auth.login.AppConfigurationEntry.LoginModuleControlFlag;
 
-import com.google.protobuf.HBaseZeroCopyByteString;
+import org.apache.hadoop.hbase.util.ByteStringer;
 import org.apache.commons.lang.StringUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -1949,7 +1949,7 @@ public class ZKUtil {
       for (Map.Entry<byte[], Long> e : storeSequenceIds.entrySet()){
         byte[] columnFamilyName = e.getKey();
         Long curSeqId = e.getValue();
-        storeSequenceIdBuilder.setFamilyName(HBaseZeroCopyByteString.wrap(columnFamilyName));
+        storeSequenceIdBuilder.setFamilyName(ByteStringer.wrap(columnFamilyName));
         storeSequenceIdBuilder.setSequenceId(curSeqId);
         regionSequenceIdsBuilder.addStoreSequenceId(storeSequenceIdBuilder.build());
         storeSequenceIdBuilder.clear();

http://git-wip-us.apache.org/repos/asf/hbase/blob/8884ad04/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientNoCluster.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientNoCluster.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientNoCluster.java
index 7ee4476..fa0819d 100644
--- a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientNoCluster.java
+++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientNoCluster.java
@@ -33,7 +33,7 @@ import java.util.concurrent.Executors;
 import java.util.concurrent.atomic.AtomicInteger;
 import java.util.concurrent.atomic.AtomicLong;
 
-import com.google.protobuf.HBaseZeroCopyByteString;
+import org.apache.hadoop.hbase.util.ByteStringer;
 import org.apache.commons.lang.NotImplementedException;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -510,7 +510,7 @@ public class TestClientNoCluster extends Configured implements Tool {
       if (max <= 0) break;
       if (++count > max) break;
       HRegionInfo hri = e.getValue().getFirst();
-      ByteString row = HBaseZeroCopyByteString.wrap(hri.getRegionName());
+      ByteString row = ByteStringer.wrap(hri.getRegionName());
       resultBuilder.clear();
       resultBuilder.addCell(getRegionInfo(row, hri));
       resultBuilder.addCell(getServer(row, e.getValue().getSecond()));
@@ -565,11 +565,11 @@ public class TestClientNoCluster extends Configured implements Tool {
   }
 
   private final static ByteString CATALOG_FAMILY_BYTESTRING =
-      HBaseZeroCopyByteString.wrap(HConstants.CATALOG_FAMILY);
+      ByteStringer.wrap(HConstants.CATALOG_FAMILY);
   private final static ByteString REGIONINFO_QUALIFIER_BYTESTRING =
-      HBaseZeroCopyByteString.wrap(HConstants.REGIONINFO_QUALIFIER);
+      ByteStringer.wrap(HConstants.REGIONINFO_QUALIFIER);
   private final static ByteString SERVER_QUALIFIER_BYTESTRING =
-      HBaseZeroCopyByteString.wrap(HConstants.SERVER_QUALIFIER);
+      ByteStringer.wrap(HConstants.SERVER_QUALIFIER);
 
   static CellProtos.Cell.Builder getBaseCellBuilder(final ByteString row) {
     CellProtos.Cell.Builder cellBuilder = CellProtos.Cell.newBuilder();
@@ -582,7 +582,7 @@ public class TestClientNoCluster extends Configured implements Tool {
   static CellProtos.Cell getRegionInfo(final ByteString row, final HRegionInfo hri) {
     CellProtos.Cell.Builder cellBuilder = getBaseCellBuilder(row);
     cellBuilder.setQualifier(REGIONINFO_QUALIFIER_BYTESTRING);
-    cellBuilder.setValue(HBaseZeroCopyByteString.wrap(hri.toByteArray()));
+    cellBuilder.setValue(ByteStringer.wrap(hri.toByteArray()));
     return cellBuilder.build();
   }
 
@@ -595,9 +595,9 @@ public class TestClientNoCluster extends Configured implements Tool {
 
   static CellProtos.Cell getStartCode(final ByteString row) {
     CellProtos.Cell.Builder cellBuilder = getBaseCellBuilder(row);
-    cellBuilder.setQualifier(HBaseZeroCopyByteString.wrap(HConstants.STARTCODE_QUALIFIER));
+    cellBuilder.setQualifier(ByteStringer.wrap(HConstants.STARTCODE_QUALIFIER));
     // TODO:
-    cellBuilder.setValue(HBaseZeroCopyByteString.wrap(Bytes.toBytes(META_SERVERNAME.getStartcode())));
+    cellBuilder.setValue(ByteStringer.wrap(Bytes.toBytes(META_SERVERNAME.getStartcode())));
     return cellBuilder.build();
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/8884ad04/hbase-protocol/pom.xml
----------------------------------------------------------------------
diff --git a/hbase-protocol/pom.xml b/hbase-protocol/pom.xml
index fc3a833..ec8d825 100644
--- a/hbase-protocol/pom.xml
+++ b/hbase-protocol/pom.xml
@@ -108,6 +108,10 @@
         <groupId>com.google.protobuf</groupId>
         <artifactId>protobuf-java</artifactId>
       </dependency>
+      <dependency>
+        <groupId>commons-logging</groupId>
+        <artifactId>commons-logging</artifactId>
+      </dependency>
     </dependencies>
 
     <profiles>

http://git-wip-us.apache.org/repos/asf/hbase/blob/8884ad04/hbase-protocol/src/main/java/com/google/protobuf/HBaseZeroCopyByteString.java
----------------------------------------------------------------------
diff --git a/hbase-protocol/src/main/java/com/google/protobuf/HBaseZeroCopyByteString.java b/hbase-protocol/src/main/java/com/google/protobuf/HBaseZeroCopyByteString.java
index 68c4f6f..f7e9c7f 100644
--- a/hbase-protocol/src/main/java/com/google/protobuf/HBaseZeroCopyByteString.java
+++ b/hbase-protocol/src/main/java/com/google/protobuf/HBaseZeroCopyByteString.java
@@ -64,5 +64,4 @@ public final class HBaseZeroCopyByteString extends LiteralByteString {
     throw new UnsupportedOperationException("Need a LiteralByteString, got a "
                                             + buf.getClass().getName());
   }
-
-}
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hbase/blob/8884ad04/hbase-protocol/src/main/java/org/apache/hadoop/hbase/util/ByteStringer.java
----------------------------------------------------------------------
diff --git a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/util/ByteStringer.java b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/util/ByteStringer.java
new file mode 100644
index 0000000..ab354bc
--- /dev/null
+++ b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/util/ByteStringer.java
@@ -0,0 +1,67 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.util;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+
+import com.google.protobuf.ByteString;
+import com.google.protobuf.HBaseZeroCopyByteString;
+
+/**
+ * Hack to workaround HBASE-1304 issue that keeps bubbling up when a mapreduce context.
+ */
+public class ByteStringer {
+  private static final Log LOG = LogFactory.getLog(ByteStringer.class);
+
+  /**
+   * Flag set at class loading time.
+   */
+  private static boolean USE_ZEROCOPYBYTESTRING = true;
+
+  // Can I classload HBaseZeroCopyByteString without IllegalAccessError?
+  // If we can, use it passing ByteStrings to pb else use native ByteString though more costly
+  // because it makes a copy of the passed in array.
+  static {
+    try {
+      HBaseZeroCopyByteString.wrap(new byte [0]);
+    } catch (IllegalAccessError iae) {
+      USE_ZEROCOPYBYTESTRING = false;
+      LOG.debug("Failed to classload HBaseZeroCopyByteString: " + iae.toString());
+    }
+  }
+
+  private ByteStringer() {
+    super();
+  }
+
+  /**
+   * Wraps a byte array in a {@link ByteString} without copying it.
+   */
+  public static ByteString wrap(final byte[] array) {
+    return USE_ZEROCOPYBYTESTRING? HBaseZeroCopyByteString.wrap(array): ByteString.copyFrom(array);
+  }
+
+  /**
+   * Wraps a subset of a byte array in a {@link ByteString} without copying it.
+   */
+  public static ByteString wrap(final byte[] array, int offset, int length) {
+    return USE_ZEROCOPYBYTESTRING? HBaseZeroCopyByteString.wrap(array, offset, length):
+      ByteString.copyFrom(array, offset, length);
+  }
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/8884ad04/hbase-server/src/main/java/org/apache/hadoop/hbase/codec/MessageCodec.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/codec/MessageCodec.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/codec/MessageCodec.java
index db6bea8..00b713c 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/codec/MessageCodec.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/codec/MessageCodec.java
@@ -21,7 +21,7 @@ import java.io.IOException;
 import java.io.InputStream;
 import java.io.OutputStream;
 
-import com.google.protobuf.HBaseZeroCopyByteString;
+import org.apache.hadoop.hbase.util.ByteStringer;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.CellUtil;
@@ -44,15 +44,15 @@ public class MessageCodec implements Codec {
       CellProtos.Cell.Builder builder = CellProtos.Cell.newBuilder();
       // This copies bytes from Cell to ByteString.  I don't see anyway around the copy.
       // ByteString is final.
-      builder.setRow(HBaseZeroCopyByteString.wrap(cell.getRowArray(), cell.getRowOffset(),
+      builder.setRow(ByteStringer.wrap(cell.getRowArray(), cell.getRowOffset(),
           cell.getRowLength()));
-      builder.setFamily(HBaseZeroCopyByteString.wrap(cell.getFamilyArray(), cell.getFamilyOffset(),
+      builder.setFamily(ByteStringer.wrap(cell.getFamilyArray(), cell.getFamilyOffset(),
           cell.getFamilyLength()));
-      builder.setQualifier(HBaseZeroCopyByteString.wrap(cell.getQualifierArray(),
+      builder.setQualifier(ByteStringer.wrap(cell.getQualifierArray(),
           cell.getQualifierOffset(), cell.getQualifierLength()));
       builder.setTimestamp(cell.getTimestamp());
       builder.setCellType(CellProtos.CellType.valueOf(cell.getTypeByte()));
-      builder.setValue(HBaseZeroCopyByteString.wrap(cell.getValueArray(), cell.getValueOffset(),
+      builder.setValue(ByteStringer.wrap(cell.getValueArray(), cell.getValueOffset(),
           cell.getValueLength()));
       CellProtos.Cell pbcell = builder.build();
       pbcell.writeDelimitedTo(this.out);

http://git-wip-us.apache.org/repos/asf/hbase/blob/8884ad04/hbase-server/src/main/java/org/apache/hadoop/hbase/io/Reference.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/Reference.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/Reference.java
index 7ed9f6b..de05575 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/Reference.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/Reference.java
@@ -24,7 +24,7 @@ import java.io.DataInputStream;
 import java.io.IOException;
 import java.io.InputStream;
 
-import com.google.protobuf.HBaseZeroCopyByteString;
+import org.apache.hadoop.hbase.util.ByteStringer;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileSystem;
@@ -193,7 +193,7 @@ public class Reference {
     FSProtos.Reference.Builder builder = FSProtos.Reference.newBuilder();
     builder.setRange(isTopFileRegion(getFileRegion())?
       FSProtos.Reference.Range.TOP: FSProtos.Reference.Range.BOTTOM);
-    builder.setSplitkey(HBaseZeroCopyByteString.wrap(getSplitKey()));
+    builder.setSplitkey(ByteStringer.wrap(getSplitKey()));
     return builder.build();
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/8884ad04/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/FixedFileTrailer.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/FixedFileTrailer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/FixedFileTrailer.java
index 8428493..1b065ec 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/FixedFileTrailer.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/FixedFileTrailer.java
@@ -26,7 +26,7 @@ import java.io.DataOutputStream;
 import java.io.IOException;
 import java.nio.ByteBuffer;
 
-import com.google.protobuf.HBaseZeroCopyByteString;
+import org.apache.hadoop.hbase.util.ByteStringer;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.hbase.KeyValue;
@@ -205,7 +205,7 @@ public class FixedFileTrailer {
       .setComparatorClassName(comparatorClassName)
       .setCompressionCodec(compressionCodec.ordinal());
     if (encryptionKey != null) {
-      builder.setEncryptionKey(HBaseZeroCopyByteString.wrap(encryptionKey));
+      builder.setEncryptionKey(ByteStringer.wrap(encryptionKey));
     }
     // We need this extra copy unfortunately to determine the final size of the
     // delimited output, see use of baos.size() below.

http://git-wip-us.apache.org/repos/asf/hbase/blob/8884ad04/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java
index 71f380a..4d1c2fd 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java
@@ -40,7 +40,7 @@ import java.util.concurrent.BlockingQueue;
 import java.util.concurrent.atomic.AtomicInteger;
 import java.util.concurrent.atomic.AtomicLong;
 
-import com.google.protobuf.HBaseZeroCopyByteString;
+import org.apache.hadoop.hbase.util.ByteStringer;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
@@ -739,8 +739,8 @@ public class HFile {
       HFileProtos.FileInfoProto.Builder builder = HFileProtos.FileInfoProto.newBuilder();
       for (Map.Entry<byte [], byte[]> e: this.map.entrySet()) {
         HBaseProtos.BytesBytesPair.Builder bbpBuilder = HBaseProtos.BytesBytesPair.newBuilder();
-        bbpBuilder.setFirst(HBaseZeroCopyByteString.wrap(e.getKey()));
-        bbpBuilder.setSecond(HBaseZeroCopyByteString.wrap(e.getValue()));
+        bbpBuilder.setFirst(ByteStringer.wrap(e.getKey()));
+        bbpBuilder.setSecond(ByteStringer.wrap(e.getValue()));
         builder.addMapEntry(bbpBuilder.build());
       }
       out.write(ProtobufUtil.PB_MAGIC);

http://git-wip-us.apache.org/repos/asf/hbase/blob/8884ad04/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java
index 89702c5..c43b036 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java
@@ -1220,7 +1220,6 @@ public class HFileBlock implements Cacheable {
       } else {
         // Positional read. Better for random reads; or when the streamLock is already locked.
         int extraSize = peekIntoNextBlock ? hdrSize : 0;
-
         int ret = istream.read(fileOffset, dest, destOffset, size + extraSize);
         if (ret < size) {
           throw new IOException("Positional read of " + size + " bytes " +
@@ -1234,8 +1233,7 @@ public class HFileBlock implements Cacheable {
       }
 
       assert peekIntoNextBlock;
-      return Bytes.toInt(dest, destOffset + size + BlockType.MAGIC_LENGTH) +
-          hdrSize;
+      return Bytes.toInt(dest, destOffset + size + BlockType.MAGIC_LENGTH) + hdrSize;
     }
 
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/8884ad04/hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/ReplicationProtbufUtil.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/ReplicationProtbufUtil.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/ReplicationProtbufUtil.java
index d156b71..1ce10ca 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/ReplicationProtbufUtil.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/ReplicationProtbufUtil.java
@@ -28,7 +28,7 @@ import java.util.Map;
 import java.util.NavigableMap;
 import java.util.UUID;
 
-import com.google.protobuf.HBaseZeroCopyByteString;
+import org.apache.hadoop.hbase.util.ByteStringer;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.CellScanner;
@@ -91,8 +91,8 @@ public class ReplicationProtbufUtil {
       WALProtos.WALKey.Builder keyBuilder = entryBuilder.getKeyBuilder();
       HLogKey key = entry.getKey();
       keyBuilder.setEncodedRegionName(
-        HBaseZeroCopyByteString.wrap(key.getEncodedRegionName()));
-      keyBuilder.setTableName(HBaseZeroCopyByteString.wrap(key.getTablename().getName()));
+        ByteStringer.wrap(key.getEncodedRegionName()));
+      keyBuilder.setTableName(ByteStringer.wrap(key.getTablename().getName()));
       keyBuilder.setLogSequenceNumber(key.getLogSeqNum());
       keyBuilder.setWriteTime(key.getWriteTime());
       if (key.getNonce() != HConstants.NO_NONCE) {
@@ -110,7 +110,7 @@ public class ReplicationProtbufUtil {
       NavigableMap<byte[], Integer> scopes = key.getScopes();
       if (scopes != null && !scopes.isEmpty()) {
         for (Map.Entry<byte[], Integer> scope: scopes.entrySet()) {
-          scopeBuilder.setFamily(HBaseZeroCopyByteString.wrap(scope.getKey()));
+          scopeBuilder.setFamily(ByteStringer.wrap(scope.getKey()));
           WALProtos.ScopeType scopeType =
               WALProtos.ScopeType.valueOf(scope.getValue().intValue());
           scopeBuilder.setScopeType(scopeType);

http://git-wip-us.apache.org/repos/asf/hbase/blob/8884ad04/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
index aaea9f4..e574200 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
@@ -53,7 +53,7 @@ import java.util.concurrent.locks.ReentrantReadWriteLock;
 import javax.management.ObjectName;
 
 import com.google.common.annotations.VisibleForTesting;
-import com.google.protobuf.HBaseZeroCopyByteString;
+import org.apache.hadoop.hbase.util.ByteStringer;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
@@ -200,7 +200,6 @@ import org.apache.hadoop.hbase.regionserver.handler.CloseMetaHandler;
 import org.apache.hadoop.hbase.regionserver.handler.CloseRegionHandler;
 import org.apache.hadoop.hbase.regionserver.handler.OpenMetaHandler;
 import org.apache.hadoop.hbase.regionserver.handler.OpenRegionHandler;
-import org.apache.hadoop.hbase.regionserver.snapshot.RegionServerSnapshotManager;
 import org.apache.hadoop.hbase.regionserver.wal.HLog;
 import org.apache.hadoop.hbase.regionserver.wal.HLogFactory;
 import org.apache.hadoop.hbase.regionserver.wal.HLogKey;
@@ -1361,7 +1360,7 @@ public class HRegionServer implements ClientProtos.ClientService.BlockingInterfa
       regionSpecifier = RegionSpecifier.newBuilder();
     }
     regionSpecifier.setType(RegionSpecifierType.REGION_NAME);
-    regionSpecifier.setValue(HBaseZeroCopyByteString.wrap(name));
+    regionSpecifier.setValue(ByteStringer.wrap(name));
     regionLoadBldr.setRegionSpecifier(regionSpecifier.build())
       .setStores(stores)
       .setStorefiles(storefiles)
@@ -4079,7 +4078,7 @@ public class HRegionServer implements ClientProtos.ClientService.BlockingInterfa
       RollWALWriterResponse.Builder builder = RollWALWriterResponse.newBuilder();
       if (regionsToFlush != null) {
         for (byte[] region: regionsToFlush) {
-          builder.addRegionToFlush(HBaseZeroCopyByteString.wrap(region));
+          builder.addRegionToFlush(ByteStringer.wrap(region));
         }
       }
       return builder.build();


Mime
View raw message