hbase-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From st...@apache.org
Subject svn commit: r1426729 [1/2] - in /hbase/trunk: hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ hbase-protocol/src/main/protobuf/ hbase-server/src/main/java/org/apache/hadoop/hbase/io/ hbase-server/src/main/java/org/apache/hadoop...
Date Sat, 29 Dec 2012 09:27:24 GMT
Author: stack
Date: Sat Dec 29 09:27:24 2012
New Revision: 1426729

URL: http://svn.apache.org/viewvc?rev=1426729&view=rev
Log:
HBASE-7224 Remove references to Writable in the ipc package

Added:
    hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/CodeToClassAndBackFor96Migration.java
    hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/HbaseObjectWritableFor96Migration.java
Removed:
    hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/io/CodeToClassAndBack.java
    hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/io/HbaseObjectWritable.java
    hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/Invocation.java
    hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHbaseObjectWritable.java
Modified:
    hbase/trunk/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ClientProtos.java
    hbase/trunk/hbase-protocol/src/main/protobuf/Client.proto
    hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/io/Reference.java
    hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/io/TimeRange.java
    hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/BlockingRpcCallback.java
    hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/CallerDisconnectedException.java
    hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/ClientCache.java
    hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/Delayable.java
    hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/HBaseClient.java
    hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/HBaseServer.java
    hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/MasterCoprocessorRpcChannel.java
    hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/MetricsHBaseServer.java
    hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/ProtobufRpcClientEngine.java
    hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/ProtobufRpcServerEngine.java
    hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/UnknownProtocolException.java
    hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
    hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/ResponseConverter.java
    hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
    hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/TimeRangeTracker.java
    hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java
    hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/ZKPermissionWatcher.java
    hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/TestSerialization.java
    hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestTablePermissions.java

Modified: hbase/trunk/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ClientProtos.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ClientProtos.java?rev=1426729&r1=1426728&r2=1426729&view=diff
==============================================================================
--- hbase/trunk/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ClientProtos.java (original)
+++ hbase/trunk/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ClientProtos.java Sat Dec 29 09:27:24 2012
@@ -18533,10 +18533,10 @@ public final class ClientProtos {
   public interface ActionResultOrBuilder
       extends com.google.protobuf.MessageOrBuilder {
     
-    // optional .NameBytesPair value = 1;
+    // optional .Result value = 1;
     boolean hasValue();
-    org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getValue();
-    org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getValueOrBuilder();
+    org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getValue();
+    org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder getValueOrBuilder();
     
     // optional .NameBytesPair exception = 2;
     boolean hasException();
@@ -18572,16 +18572,16 @@ public final class ClientProtos {
     }
     
     private int bitField0_;
-    // optional .NameBytesPair value = 1;
+    // optional .Result value = 1;
     public static final int VALUE_FIELD_NUMBER = 1;
-    private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value_;
+    private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result value_;
     public boolean hasValue() {
       return ((bitField0_ & 0x00000001) == 0x00000001);
     }
-    public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getValue() {
+    public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getValue() {
       return value_;
     }
-    public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getValueOrBuilder() {
+    public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder getValueOrBuilder() {
       return value_;
     }
     
@@ -18599,7 +18599,7 @@ public final class ClientProtos {
     }
     
     private void initFields() {
-      value_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance();
+      value_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance();
       exception_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance();
     }
     private byte memoizedIsInitialized = -1;
@@ -18818,7 +18818,7 @@ public final class ClientProtos {
       public Builder clear() {
         super.clear();
         if (valueBuilder_ == null) {
-          value_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance();
+          value_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance();
         } else {
           valueBuilder_.clear();
         }
@@ -18949,7 +18949,7 @@ public final class ClientProtos {
               break;
             }
             case 10: {
-              org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.newBuilder();
+              org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.newBuilder();
               if (hasValue()) {
                 subBuilder.mergeFrom(getValue());
               }
@@ -18972,21 +18972,21 @@ public final class ClientProtos {
       
       private int bitField0_;
       
-      // optional .NameBytesPair value = 1;
-      private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance();
+      // optional .Result value = 1;
+      private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result value_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance();
       private com.google.protobuf.SingleFieldBuilder<
-          org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> valueBuilder_;
+          org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder> valueBuilder_;
       public boolean hasValue() {
         return ((bitField0_ & 0x00000001) == 0x00000001);
       }
-      public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getValue() {
+      public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getValue() {
         if (valueBuilder_ == null) {
           return value_;
         } else {
           return valueBuilder_.getMessage();
         }
       }
-      public Builder setValue(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) {
+      public Builder setValue(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result value) {
         if (valueBuilder_ == null) {
           if (value == null) {
             throw new NullPointerException();
@@ -19000,7 +19000,7 @@ public final class ClientProtos {
         return this;
       }
       public Builder setValue(
-          org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) {
+          org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder builderForValue) {
         if (valueBuilder_ == null) {
           value_ = builderForValue.build();
           onChanged();
@@ -19010,12 +19010,12 @@ public final class ClientProtos {
         bitField0_ |= 0x00000001;
         return this;
       }
-      public Builder mergeValue(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) {
+      public Builder mergeValue(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result value) {
         if (valueBuilder_ == null) {
           if (((bitField0_ & 0x00000001) == 0x00000001) &&
-              value_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance()) {
+              value_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance()) {
             value_ =
-              org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.newBuilder(value_).mergeFrom(value).buildPartial();
+              org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.newBuilder(value_).mergeFrom(value).buildPartial();
           } else {
             value_ = value;
           }
@@ -19028,7 +19028,7 @@ public final class ClientProtos {
       }
       public Builder clearValue() {
         if (valueBuilder_ == null) {
-          value_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance();
+          value_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance();
           onChanged();
         } else {
           valueBuilder_.clear();
@@ -19036,12 +19036,12 @@ public final class ClientProtos {
         bitField0_ = (bitField0_ & ~0x00000001);
         return this;
       }
-      public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder getValueBuilder() {
+      public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder getValueBuilder() {
         bitField0_ |= 0x00000001;
         onChanged();
         return getValueFieldBuilder().getBuilder();
       }
-      public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getValueOrBuilder() {
+      public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder getValueOrBuilder() {
         if (valueBuilder_ != null) {
           return valueBuilder_.getMessageOrBuilder();
         } else {
@@ -19049,11 +19049,11 @@ public final class ClientProtos {
         }
       }
       private com.google.protobuf.SingleFieldBuilder<
-          org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> 
+          org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder> 
           getValueFieldBuilder() {
         if (valueBuilder_ == null) {
           valueBuilder_ = new com.google.protobuf.SingleFieldBuilder<
-              org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>(
+              org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder>(
                   value_,
                   getParentForChildren(),
                   isClean());
@@ -21479,25 +21479,25 @@ public final class ClientProtos {
       "oprocessorServiceResponse\022 \n\006region\030\001 \002(" +
       "\0132\020.RegionSpecifier\022\035\n\005value\030\002 \002(\0132\016.Nam" +
       "eBytesPair\"9\n\013MultiAction\022\027\n\006mutate\030\001 \001(" +
-      "\0132\007.Mutate\022\021\n\003get\030\002 \001(\0132\004.Get\"P\n\014ActionR" +
-      "esult\022\035\n\005value\030\001 \001(\0132\016.NameBytesPair\022!\n\t",
-      "exception\030\002 \001(\0132\016.NameBytesPair\"^\n\014Multi" +
-      "Request\022 \n\006region\030\001 \002(\0132\020.RegionSpecifie" +
-      "r\022\034\n\006action\030\002 \003(\0132\014.MultiAction\022\016\n\006atomi" +
-      "c\030\003 \001(\010\".\n\rMultiResponse\022\035\n\006result\030\001 \003(\013" +
-      "2\r.ActionResult2\223\003\n\rClientService\022 \n\003get" +
-      "\022\013.GetRequest\032\014.GetResponse\022)\n\006mutate\022\016." +
-      "MutateRequest\032\017.MutateResponse\022#\n\004scan\022\014" +
-      ".ScanRequest\032\r.ScanResponse\022,\n\007lockRow\022\017" +
-      ".LockRowRequest\032\020.LockRowResponse\0222\n\tunl" +
-      "ockRow\022\021.UnlockRowRequest\032\022.UnlockRowRes",
-      "ponse\022>\n\rbulkLoadHFile\022\025.BulkLoadHFileRe" +
-      "quest\032\026.BulkLoadHFileResponse\022F\n\013execSer" +
-      "vice\022\032.CoprocessorServiceRequest\032\033.Copro" +
-      "cessorServiceResponse\022&\n\005multi\022\r.MultiRe" +
-      "quest\032\016.MultiResponseBB\n*org.apache.hado" +
-      "op.hbase.protobuf.generatedB\014ClientProto" +
-      "sH\001\210\001\001\240\001\001"
+      "\0132\007.Mutate\022\021\n\003get\030\002 \001(\0132\004.Get\"I\n\014ActionR" +
+      "esult\022\026\n\005value\030\001 \001(\0132\007.Result\022!\n\texcepti",
+      "on\030\002 \001(\0132\016.NameBytesPair\"^\n\014MultiRequest" +
+      "\022 \n\006region\030\001 \002(\0132\020.RegionSpecifier\022\034\n\006ac" +
+      "tion\030\002 \003(\0132\014.MultiAction\022\016\n\006atomic\030\003 \001(\010" +
+      "\".\n\rMultiResponse\022\035\n\006result\030\001 \003(\0132\r.Acti" +
+      "onResult2\223\003\n\rClientService\022 \n\003get\022\013.GetR" +
+      "equest\032\014.GetResponse\022)\n\006mutate\022\016.MutateR" +
+      "equest\032\017.MutateResponse\022#\n\004scan\022\014.ScanRe" +
+      "quest\032\r.ScanResponse\022,\n\007lockRow\022\017.LockRo" +
+      "wRequest\032\020.LockRowResponse\0222\n\tunlockRow\022" +
+      "\021.UnlockRowRequest\032\022.UnlockRowResponse\022>",
+      "\n\rbulkLoadHFile\022\025.BulkLoadHFileRequest\032\026" +
+      ".BulkLoadHFileResponse\022F\n\013execService\022\032." +
+      "CoprocessorServiceRequest\032\033.CoprocessorS" +
+      "erviceResponse\022&\n\005multi\022\r.MultiRequest\032\016" +
+      ".MultiResponseBB\n*org.apache.hadoop.hbas" +
+      "e.protobuf.generatedB\014ClientProtosH\001\210\001\001\240" +
+      "\001\001"
     };
     com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
       new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {

Modified: hbase/trunk/hbase-protocol/src/main/protobuf/Client.proto
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-protocol/src/main/protobuf/Client.proto?rev=1426729&r1=1426728&r2=1426729&view=diff
==============================================================================
--- hbase/trunk/hbase-protocol/src/main/protobuf/Client.proto (original)
+++ hbase/trunk/hbase-protocol/src/main/protobuf/Client.proto Sat Dec 29 09:27:24 2012
@@ -291,7 +291,7 @@ message MultiAction {
  * is returned as a stringified parameter.
  */
 message ActionResult {
-  optional NameBytesPair value = 1;
+  optional Result value = 1;
   optional NameBytesPair exception = 2;
 }
 

Modified: hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/io/Reference.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/io/Reference.java?rev=1426729&r1=1426728&r2=1426729&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/io/Reference.java (original)
+++ hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/io/Reference.java Sat Dec 29 09:27:24 2012
@@ -21,7 +21,6 @@ package org.apache.hadoop.hbase.io;
 import java.io.BufferedInputStream;
 import java.io.DataInput;
 import java.io.DataInputStream;
-import java.io.DataOutput;
 import java.io.IOException;
 import java.io.InputStream;
 
@@ -33,7 +32,6 @@ import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
 import org.apache.hadoop.hbase.protobuf.generated.FSProtos;
 import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.hadoop.io.Writable;
 
 import com.google.protobuf.ByteString;
 
@@ -56,7 +54,7 @@ import com.google.protobuf.ByteString;
  * references.  References are cleaned up by compactions.
  */
 @InterfaceAudience.Private
-public class Reference implements Writable {
+public class Reference {
   private byte [] splitkey;
   private Range region;
 
@@ -99,7 +97,6 @@ public class Reference implements Writab
 
   /**
    * Used by serializations.
-   * @deprecated Use the pb serializations instead.  Writables are going away.
    */
   @Deprecated
   // Make this private when it comes time to let go of this constructor.  Needed by pb serialization.
@@ -130,18 +127,14 @@ public class Reference implements Writab
     return "" + this.region;
   }
 
-  /**
-   * @deprecated Writables are going away. Use the pb serialization methods instead.
-   */
-  @Deprecated
-  public void write(DataOutput out) throws IOException {
-    // Write true if we're doing top of the file.
-    out.writeBoolean(isTopFileRegion(this.region));
-    Bytes.writeByteArray(out, this.splitkey);
+  public static boolean isTopFileRegion(final Range r) {
+    return r.equals(Range.top);
   }
 
   /**
    * @deprecated Writables are going away. Use the pb serialization methods instead.
+   * Remove in a release after 0.96 goes out.  This is here only to migrate
+   * old Reference files written with Writables before 0.96.
    */
   @Deprecated
   public void readFields(DataInput in) throws IOException {
@@ -151,10 +144,6 @@ public class Reference implements Writab
     this.splitkey = Bytes.readByteArray(in);
   }
 
-  public static boolean isTopFileRegion(final Range r) {
-    return r.equals(Range.top);
-  }
-
   public Path write(final FileSystem fs, final Path p)
   throws IOException {
     FSDataOutputStream out = fs.create(p, false);

Modified: hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/io/TimeRange.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/io/TimeRange.java?rev=1426729&r1=1426728&r2=1426729&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/io/TimeRange.java (original)
+++ hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/io/TimeRange.java Sat Dec 29 09:27:24 2012
@@ -19,12 +19,8 @@
 
 package org.apache.hadoop.hbase.io;
 
-import java.io.DataInput;
-import java.io.DataOutput;
 import java.io.IOException;
 
-import org.apache.hadoop.io.Writable;
-
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.hbase.util.Bytes;
@@ -39,7 +35,7 @@ import org.apache.hadoop.hbase.util.Byte
  */
 @InterfaceAudience.Public
 @InterfaceStability.Stable
-public class TimeRange implements Writable {
+public class TimeRange {
   private long minStamp = 0L;
   private long maxStamp = Long.MAX_VALUE;
   private boolean allTime = false;
@@ -184,17 +180,4 @@ public class TimeRange implements Writab
     sb.append(this.minStamp);
     return sb.toString();
   }
-
-  //Writable
-  public void readFields(final DataInput in) throws IOException {
-    this.minStamp = in.readLong();
-    this.maxStamp = in.readLong();
-    this.allTime = in.readBoolean();
-  }
-
-  public void write(final DataOutput out) throws IOException {
-    out.writeLong(minStamp);
-    out.writeLong(maxStamp);
-    out.writeBoolean(this.allTime);
-  }
-}
+}
\ No newline at end of file

Modified: hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/BlockingRpcCallback.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/BlockingRpcCallback.java?rev=1426729&r1=1426728&r2=1426729&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/BlockingRpcCallback.java (original)
+++ hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/BlockingRpcCallback.java Sat Dec 29 09:27:24 2012
@@ -70,4 +70,4 @@ public class BlockingRpcCallback<R> impl
     }
     return result;
   }
-}
+}
\ No newline at end of file

Modified: hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/CallerDisconnectedException.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/CallerDisconnectedException.java?rev=1426729&r1=1426728&r2=1426729&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/CallerDisconnectedException.java (original)
+++ hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/CallerDisconnectedException.java Sat Dec 29 09:27:24 2012
@@ -25,11 +25,8 @@ import java.io.IOException;
  * but is only used for logging on the server side, etc.
  */
 public class CallerDisconnectedException extends IOException {
+  private static final long serialVersionUID = 1L;
   public CallerDisconnectedException(String msg) {
     super(msg);
   }
-
-  private static final long serialVersionUID = 1L;
-
-  
 }

Modified: hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/ClientCache.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/ClientCache.java?rev=1426729&r1=1426728&r2=1426729&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/ClientCache.java (original)
+++ hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/ClientCache.java Sat Dec 29 09:27:24 2012
@@ -27,8 +27,6 @@ import javax.net.SocketFactory;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HConstants;
-import org.apache.hadoop.hbase.io.HbaseObjectWritable;
-import org.apache.hadoop.io.Writable;
 
 /**
  * Cache a client using its socket factory as the hash key.

Modified: hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/Delayable.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/Delayable.java?rev=1426729&r1=1426728&r2=1426729&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/Delayable.java (original)
+++ hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/Delayable.java Sat Dec 29 09:27:24 2012
@@ -70,4 +70,4 @@ public interface Delayable {
    * @throws IOException
    */
   public void endDelayThrowing(Throwable t) throws IOException;
-}
+}
\ No newline at end of file

Modified: hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/HBaseClient.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/HBaseClient.java?rev=1426729&r1=1426728&r2=1426729&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/HBaseClient.java (original)
+++ hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/HBaseClient.java Sat Dec 29 09:27:24 2012
@@ -1337,7 +1337,6 @@ public class HBaseClient {
    * @param exception the relevant exception
    * @return an exception to throw
    */
-  @SuppressWarnings({"ThrowableInstanceNeverThrown"})
   protected IOException wrapException(InetSocketAddress addr,
                                          IOException exception) {
     if (exception instanceof ConnectException) {

Modified: hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/HBaseServer.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/HBaseServer.java?rev=1426729&r1=1426728&r2=1426729&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/HBaseServer.java (original)
+++ hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/HBaseServer.java Sat Dec 29 09:27:24 2012
@@ -46,18 +46,17 @@ import java.nio.channels.WritableByteCha
 import java.security.PrivilegedExceptionAction;
 import java.util.ArrayList;
 import java.util.Collections;
-import java.util.HashMap;
 import java.util.Iterator;
 import java.util.LinkedList;
 import java.util.List;
 import java.util.Map;
 import java.util.Random;
-import java.util.concurrent.atomic.AtomicInteger;
 import java.util.concurrent.BlockingQueue;
 import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.ExecutorService;
 import java.util.concurrent.Executors;
 import java.util.concurrent.LinkedBlockingQueue;
+import java.util.concurrent.atomic.AtomicInteger;
 
 import javax.security.sasl.Sasl;
 import javax.security.sasl.SaslException;
@@ -68,28 +67,28 @@ import org.apache.commons.logging.LogFac
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.monitoring.MonitoredRPCHandler;
+import org.apache.hadoop.hbase.monitoring.TaskMonitor;
 import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader;
 import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException;
 import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody;
 import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestHeader;
 import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader;
-import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation;
 import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponseHeader.Status;
-import org.apache.hadoop.hbase.monitoring.MonitoredRPCHandler;
-import org.apache.hadoop.hbase.monitoring.TaskMonitor;
-import org.apache.hadoop.hbase.security.User;
+import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation;
 import org.apache.hadoop.hbase.security.HBaseSaslRpcServer;
 import org.apache.hadoop.hbase.security.HBaseSaslRpcServer.AuthMethod;
 import org.apache.hadoop.hbase.security.HBaseSaslRpcServer.SaslDigestCallbackHandler;
 import org.apache.hadoop.hbase.security.HBaseSaslRpcServer.SaslGssCallbackHandler;
 import org.apache.hadoop.hbase.security.HBaseSaslRpcServer.SaslStatus;
+import org.apache.hadoop.hbase.security.User;
 import org.apache.hadoop.hbase.util.ByteBufferOutputStream;
 import org.apache.hadoop.io.BytesWritable;
 import org.apache.hadoop.io.IntWritable;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.WritableUtils;
-import org.apache.hadoop.ipc.Server;
 import org.apache.hadoop.ipc.RPC.VersionMismatch;
+import org.apache.hadoop.ipc.Server;
 import org.apache.hadoop.security.AccessControlException;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod;
@@ -97,20 +96,20 @@ import org.apache.hadoop.security.author
 import org.apache.hadoop.security.authorize.ProxyUsers;
 import org.apache.hadoop.security.authorize.ServiceAuthorizationManager;
 import org.apache.hadoop.security.token.SecretManager;
-import org.apache.hadoop.security.token.TokenIdentifier;
 import org.apache.hadoop.security.token.SecretManager.InvalidToken;
+import org.apache.hadoop.security.token.TokenIdentifier;
 import org.apache.hadoop.util.StringUtils;
-
-import com.google.common.base.Function;
-import com.google.common.util.concurrent.ThreadFactoryBuilder;
-import com.google.protobuf.Message;
-
 import org.cliffc.high_scale_lib.Counter;
 import org.cloudera.htrace.Sampler;
 import org.cloudera.htrace.Span;
+import org.cloudera.htrace.Trace;
 import org.cloudera.htrace.TraceInfo;
 import org.cloudera.htrace.impl.NullSpan;
-import org.cloudera.htrace.Trace;
+
+import com.google.common.base.Function;
+import com.google.common.util.concurrent.ThreadFactoryBuilder;
+import com.google.protobuf.Message;
+// Uses Writables doing sasl
 
 /** A client for an IPC service.  IPC calls take a single Protobuf message as a
  * parameter, and return a single Protobuf message as their value.  A service runs on
@@ -169,10 +168,6 @@ public abstract class HBaseServer implem
     new ThreadLocal<RpcServer>();
   private volatile boolean started = false;
 
-  // For generated protocol classes which doesn't have VERSION field
-  private static final Map<Class<?>, Long>
-    PROTOCOL_VERSION = new HashMap<Class<?>, Long>();
-
   private static final Map<String, Class<? extends VersionedProtocol>>
       PROTOCOL_CACHE =
       new ConcurrentHashMap<String, Class<? extends VersionedProtocol>>();
@@ -263,8 +258,6 @@ public abstract class HBaseServer implem
 
   protected int highPriorityLevel;  // what level a high priority call is at
 
-  private volatile int responseQueueLen; // size of response queue for this server
-
   protected final List<Connection> connectionList =
     Collections.synchronizedList(new LinkedList<Connection>());
   //maintain a list
@@ -1000,7 +993,6 @@ public abstract class HBaseServer implem
             return true;
           }
           if (!call.response.hasRemaining()) {
-            responseQueueLen--;
             call.connection.decRpcCount();
             //noinspection RedundantIfStatement
             if (numElements == 1) {    // last call fully processes.
@@ -1070,7 +1062,6 @@ public abstract class HBaseServer implem
     void doRespond(Call call) throws IOException {
       // set the serve time when the response has to be sent later
       call.timestamp = System.currentTimeMillis();
-      responseQueueLen++;
 
       boolean doRegister = false;
       synchronized (call.connection.responseQueue) {

Modified: hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/MasterCoprocessorRpcChannel.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/MasterCoprocessorRpcChannel.java?rev=1426729&r1=1426728&r2=1426729&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/MasterCoprocessorRpcChannel.java (original)
+++ hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/MasterCoprocessorRpcChannel.java Sat Dec 29 09:27:24 2012
@@ -18,26 +18,20 @@
 
 package org.apache.hadoop.hbase.ipc;
 
-import com.google.protobuf.BlockingRpcChannel;
-import com.google.protobuf.ByteString;
-import com.google.protobuf.Descriptors;
-import com.google.protobuf.Message;
-import com.google.protobuf.RpcCallback;
-import com.google.protobuf.RpcChannel;
-import com.google.protobuf.RpcController;
-import com.google.protobuf.ServiceException;
+import java.io.IOException;
+
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.client.HConnection;
 import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
-import org.apache.hadoop.hbase.protobuf.ResponseConverter;
 import org.apache.hadoop.hbase.protobuf.generated.ClientProtos;
+import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse;
 
-import java.io.IOException;
-
-import static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse;
+import com.google.protobuf.ByteString;
+import com.google.protobuf.Descriptors;
+import com.google.protobuf.Message;
 
 /**
  * Provides clients with an RPC connection to call coprocessor endpoint {@link com.google.protobuf.Service}s

Modified: hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/MetricsHBaseServer.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/MetricsHBaseServer.java?rev=1426729&r1=1426728&r2=1426729&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/MetricsHBaseServer.java (original)
+++ hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/MetricsHBaseServer.java Sat Dec 29 09:27:24 2012
@@ -19,14 +19,11 @@
 
 package org.apache.hadoop.hbase.ipc;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.CompatibilitySingletonFactory;
 
 @InterfaceAudience.Private
 public class MetricsHBaseServer {
-  private static Log LOG = LogFactory.getLog(MetricsHBaseServer.class);
   private MetricsHBaseServerSource source;
 
   public MetricsHBaseServer(String serverName, MetricsHBaseServerWrapper wrapper) {
@@ -69,4 +66,4 @@ public class MetricsHBaseServer {
   public MetricsHBaseServerSource getMetricsSource() {
     return source;
   }
-}
+}
\ No newline at end of file

Modified: hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/ProtobufRpcClientEngine.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/ProtobufRpcClientEngine.java?rev=1426729&r1=1426728&r2=1426729&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/ProtobufRpcClientEngine.java (original)
+++ hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/ProtobufRpcClientEngine.java Sat Dec 29 09:27:24 2012
@@ -24,7 +24,15 @@ import com.google.protobuf.ServiceExcept
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.MasterAdminProtocol;
+import org.apache.hadoop.hbase.MasterMonitorProtocol;
+import org.apache.hadoop.hbase.RegionServerStatusProtocol;
+import org.apache.hadoop.hbase.client.AdminProtocol;
+import org.apache.hadoop.hbase.client.ClientProtocol;
+import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.AdminService;
+import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ClientService;
 import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody;
+import org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.RegionServerStatusService;
 import org.apache.hadoop.hbase.security.User;
 import org.apache.hadoop.ipc.RemoteException;
 
@@ -34,6 +42,7 @@ import java.lang.reflect.InvocationHandl
 import java.lang.reflect.Method;
 import java.lang.reflect.Proxy;
 import java.net.InetSocketAddress;
+import java.util.HashMap;
 import java.util.Map;
 import java.util.concurrent.ConcurrentHashMap;
 
@@ -76,6 +85,21 @@ public class ProtobufRpcClientEngine imp
     final private int rpcTimeout;
     private final long clientProtocolVersion;
 
+    // For generated protocol classes which don't have VERSION field,
+    // such as protobuf interfaces.
+    static final Map<Class<?>, Long>
+      PROTOCOL_VERSION = new HashMap<Class<?>, Long>();
+    static {
+      PROTOCOL_VERSION.put(ClientService.BlockingInterface.class,
+        Long.valueOf(ClientProtocol.VERSION));
+      PROTOCOL_VERSION.put(AdminService.BlockingInterface.class,
+        Long.valueOf(AdminProtocol.VERSION));
+      PROTOCOL_VERSION.put(RegionServerStatusService.BlockingInterface.class,
+        Long.valueOf(RegionServerStatusProtocol.VERSION));
+      PROTOCOL_VERSION.put(MasterMonitorProtocol.class,Long.valueOf(MasterMonitorProtocol.VERSION));
+      PROTOCOL_VERSION.put(MasterAdminProtocol.class,Long.valueOf(MasterAdminProtocol.VERSION));
+    }
+
     public Invoker(Class<? extends VersionedProtocol> protocol,
                    InetSocketAddress addr, User ticket, Configuration conf,
                    SocketFactory factory, int rpcTimeout) throws IOException {
@@ -84,7 +108,7 @@ public class ProtobufRpcClientEngine imp
       this.ticket = ticket;
       this.client = CLIENTS.getClient(conf, factory);
       this.rpcTimeout = rpcTimeout;
-      Long version = Invocation.PROTOCOL_VERSION.get(protocol);
+      Long version = PROTOCOL_VERSION.get(protocol);
       if (version != null) {
         this.clientProtocolVersion = version;
       } else {

Modified: hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/ProtobufRpcServerEngine.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/ProtobufRpcServerEngine.java?rev=1426729&r1=1426728&r2=1426729&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/ProtobufRpcServerEngine.java (original)
+++ hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/ProtobufRpcServerEngine.java Sat Dec 29 09:27:24 2012
@@ -36,10 +36,9 @@ import org.apache.hadoop.hbase.protobuf.
 import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequestBody;
 import org.apache.hadoop.hbase.regionserver.HRegionServer;
 import org.apache.hadoop.hbase.security.HBasePolicyProvider;
-import org.apache.hadoop.hbase.security.User;
 import org.apache.hadoop.hbase.security.token.AuthenticationTokenSecretManager;
-import org.apache.hadoop.security.authorize.ServiceAuthorizationManager;
 import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.security.authorize.ServiceAuthorizationManager;
 import org.codehaus.jackson.map.ObjectMapper;
 
 import com.google.protobuf.Message;

Modified: hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/UnknownProtocolException.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/UnknownProtocolException.java?rev=1426729&r1=1426728&r2=1426729&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/UnknownProtocolException.java (original)
+++ hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/UnknownProtocolException.java Sat Dec 29 09:27:24 2012
@@ -24,6 +24,7 @@ import org.apache.hadoop.hbase.DoNotRetr
 /**
  * An error requesting an RPC protocol that the server is not serving.
  */
+@SuppressWarnings("serial")
 public class UnknownProtocolException extends DoNotRetryIOException {
   private Class<?> protocol;
 
@@ -44,4 +45,4 @@ public class UnknownProtocolException ex
   public Class getProtocol() {
     return protocol;
   }
-}
+}
\ No newline at end of file

Modified: hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java?rev=1426729&r1=1426728&r2=1426729&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java (original)
+++ hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java Sat Dec 29 09:27:24 2012
@@ -19,12 +19,7 @@ package org.apache.hadoop.hbase.protobuf
 
 import static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType.REGION_NAME;
 
-import java.io.ByteArrayInputStream;
 import java.io.ByteArrayOutputStream;
-import java.io.DataInput;
-import java.io.DataInputStream;
-import java.io.DataOutput;
-import java.io.DataOutputStream;
 import java.io.IOException;
 import java.lang.reflect.Constructor;
 import java.lang.reflect.Method;
@@ -65,7 +60,6 @@ import org.apache.hadoop.hbase.client.Sc
 import org.apache.hadoop.hbase.client.metrics.ScanMetrics;
 import org.apache.hadoop.hbase.filter.ByteArrayComparable;
 import org.apache.hadoop.hbase.filter.Filter;
-import org.apache.hadoop.hbase.io.HbaseObjectWritable;
 import org.apache.hadoop.hbase.io.TimeRange;
 import org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos;
 import org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.AccessControlService;
@@ -1010,29 +1004,6 @@ public final class ProtobufUtil {
   }
 
   /**
-   * Convert a protocol buffer Parameter to a Java object
-   *
-   * @param parameter the protocol buffer Parameter to convert
-   * @return the converted Java object
-   * @throws IOException if failed to deserialize the parameter
-   */
-  public static Object toObject(
-      final NameBytesPair parameter) throws IOException {
-    if (parameter == null || !parameter.hasValue()) return null;
-    byte[] bytes = parameter.getValue().toByteArray();
-    ByteArrayInputStream bais = null;
-    try {
-      bais = new ByteArrayInputStream(bytes);
-      DataInput in = new DataInputStream(bais);
-      return HbaseObjectWritable.readObject(in, null);
-    } finally {
-      if (bais != null) {
-        bais.close();
-      }
-    }
-  }
-
-  /**
    * Convert a stringified protocol buffer exception Parameter to a Java Exception
    *
    * @param parameter the protocol buffer Parameter to convert
@@ -1056,57 +1027,6 @@ public final class ProtobufUtil {
     }
   }
 
-  /**
-   * Serialize a Java Object into a Parameter. The Java Object should be a
-   * Writable or protocol buffer Message
-   *
-   * @param value the Writable/Message object to be serialized
-   * @return the converted protocol buffer Parameter
-   * @throws IOException if failed to serialize the object
-   */
-  public static NameBytesPair toParameter(
-      final Object value) throws IOException {
-    Class<?> declaredClass = Object.class;
-    if (value != null) {
-      declaredClass = value.getClass();
-    }
-    return toParameter(declaredClass, value);
-  }
-
-  /**
-   * Serialize a Java Object into a Parameter. The Java Object should be a
-   * Writable or protocol buffer Message
-   *
-   * @param declaredClass the declared class of the parameter
-   * @param value the Writable/Message object to be serialized
-   * @return the converted protocol buffer Parameter
-   * @throws IOException if failed to serialize the object
-   */
-  public static NameBytesPair toParameter(
-      final Class<?> declaredClass, final Object value) throws IOException {
-    NameBytesPair.Builder builder = NameBytesPair.newBuilder();
-    builder.setName(declaredClass.getName());
-    if (value != null) {
-      ByteArrayOutputStream baos = null;
-      try {
-        baos = new ByteArrayOutputStream();
-        DataOutput out = new DataOutputStream(baos);
-        Class<?> clz = declaredClass;
-        if (HbaseObjectWritable.getClassCode(declaredClass) == null) {
-          clz = value.getClass();
-        }
-        HbaseObjectWritable.writeObject(out, value, clz, null);
-        builder.setValue(
-          ByteString.copyFrom(baos.toByteArray()));
-      } finally {
-        if (baos != null) {
-          baos.close();
-        }
-      }
-    }
-    return builder.build();
-  }
-
 // Start helpers for Client
 
   /**
@@ -1185,8 +1105,7 @@ public final class ProtobufUtil {
         if (actions.size() > rowMutations) {
           MultiRequest request =
             RequestConverter.buildMultiRequest(regionName, actions);
-          ClientProtos.MultiResponse
-            proto = client.multi(null, request);
+          ClientProtos.MultiResponse proto = client.multi(null, request);
           List<Object> results = ResponseConverter.getResults(proto);
           for (int i = 0, n = results.size(); i < n; i++) {
             int originalIndex = actions.get(i).getOriginalIndex();

Modified: hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/ResponseConverter.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/ResponseConverter.java?rev=1426729&r1=1426728&r2=1426729&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/ResponseConverter.java (original)
+++ hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/ResponseConverter.java Sat Dec 29 09:27:24 2012
@@ -91,7 +91,8 @@ public final class ResponseConverter {
       if (result.hasException()) {
         results.add(ProtobufUtil.toException(result.getException()));
       } else if (result.hasValue()) {
-        Object value = ProtobufUtil.toObject(result.getValue());
+        ClientProtos.Result r = result.getValue();
+        Object value = ProtobufUtil.toResult(r);
         if (value instanceof ClientProtos.Result) {
           results.add(ProtobufUtil.toResult((ClientProtos.Result)value));
         } else {

Modified: hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java?rev=1426729&r1=1426728&r2=1426729&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java (original)
+++ hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java Sat Dec 29 09:27:24 2012
@@ -101,11 +101,9 @@ import org.apache.hadoop.hbase.filter.By
 import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
 import org.apache.hadoop.hbase.fs.HFileSystem;
 import org.apache.hadoop.hbase.io.hfile.CacheConfig;
-import org.apache.hadoop.hbase.ipc.HBaseRPCErrorHandler;
 import org.apache.hadoop.hbase.ipc.HBaseClientRPC;
 import org.apache.hadoop.hbase.ipc.HBaseRPCErrorHandler;
 import org.apache.hadoop.hbase.ipc.HBaseServerRPC;
-import org.apache.hadoop.hbase.ipc.MetricsHBaseServer;
 import org.apache.hadoop.hbase.ipc.ProtocolSignature;
 import org.apache.hadoop.hbase.ipc.RpcServer;
 import org.apache.hadoop.hbase.ipc.ServerNotRunningYetException;
@@ -163,7 +161,6 @@ import org.apache.hadoop.hbase.protobuf.
 import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.UnlockRowResponse;
 import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos;
 import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor;
-import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair;
 import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair;
 import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad;
 import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier;
@@ -3191,7 +3188,7 @@ public class  HRegionServer implements C
         for (ClientProtos.MultiAction actionUnion : request.getActionList()) {
           requestCount.increment();
           try {
-            Object result = null;
+            ClientProtos.Result result = null;
             if (actionUnion.hasGet()) {
               Get get = ProtobufUtil.toGet(actionUnion.getGet());
               Integer lock = getLockFromId(get.getLockId());
@@ -3242,8 +3239,7 @@ public class  HRegionServer implements C
               } else {
                 resultBuilder.clear();
               }
-              NameBytesPair value = ProtobufUtil.toParameter(result);
-              resultBuilder.setValue(value);
+              resultBuilder.setValue(result);
               builder.addResult(resultBuilder.build());
             }
           } catch (IOException ie) {
@@ -3757,10 +3753,8 @@ public class  HRegionServer implements C
     boolean batchContainsPuts = false, batchContainsDelete = false;
     try {
       ActionResult.Builder resultBuilder = ActionResult.newBuilder();
-      NameBytesPair value = ProtobufUtil.toParameter(ClientProtos.Result.newBuilder().build());
-      resultBuilder.setValue(value);
+      resultBuilder.setValue(ClientProtos.Result.newBuilder().build());
       ActionResult result = resultBuilder.build();
-
       int i = 0;
       for (Mutate m : mutates) {
         Mutation mutation = null;

Modified: hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/TimeRangeTracker.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/TimeRangeTracker.java?rev=1426729&r1=1426728&r2=1426729&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/TimeRangeTracker.java (original)
+++ hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/TimeRangeTracker.java Sat Dec 29 09:27:24 2012
@@ -148,6 +148,4 @@ public class TimeRangeTracker implements
   public String toString() {
     return "[" + minimumTimestamp + "," + maximumTimestamp + "]";
   }
-
 }
-

Modified: hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java?rev=1426729&r1=1426728&r2=1426729&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java (original)
+++ hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java Sat Dec 29 09:27:24 2012
@@ -18,9 +18,17 @@
 
 package org.apache.hadoop.hbase.security.access;
 
-import com.google.protobuf.InvalidProtocolBufferException;
-import com.google.common.collect.ArrayListMultimap;
-import com.google.common.collect.ListMultimap;
+import java.io.ByteArrayInputStream;
+import java.io.DataInput;
+import java.io.DataInputStream;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.TreeMap;
+import java.util.TreeSet;
+
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
@@ -37,26 +45,23 @@ import org.apache.hadoop.hbase.client.Pu
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.client.ResultScanner;
 import org.apache.hadoop.hbase.client.Scan;
-import org.apache.hadoop.hbase.io.HbaseObjectWritable;
+import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
+import org.apache.hadoop.hbase.filter.QualifierFilter;
+import org.apache.hadoop.hbase.filter.RegexStringComparator;
 import org.apache.hadoop.hbase.io.compress.Compression;
 import org.apache.hadoop.hbase.master.MasterServices;
+import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
+import org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos;
 import org.apache.hadoop.hbase.regionserver.BloomType;
 import org.apache.hadoop.hbase.regionserver.HRegion;
 import org.apache.hadoop.hbase.regionserver.InternalScanner;
-import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
-import org.apache.hadoop.hbase.filter.RegexStringComparator;
-import org.apache.hadoop.hbase.filter.QualifierFilter;
-import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
-import org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.Pair;
 import org.apache.hadoop.io.Text;
 
-import java.io.ByteArrayInputStream;
-import java.io.DataInput;
-import java.io.DataInputStream;
-import java.io.IOException;
-import java.util.*;
+import com.google.common.collect.ArrayListMultimap;
+import com.google.common.collect.ListMultimap;
+import com.google.protobuf.InvalidProtocolBufferException;
 
 /**
  * Maintains lists of permission grants to users and groups to allow for
@@ -510,7 +515,8 @@ public class AccessControlLists {
    *
    * Writes a set of permission [user: table permission]
    */
-  public static byte[] writePermissionsAsBytes(ListMultimap<String, TablePermission> perms, Configuration conf) {
+  public static byte[] writePermissionsAsBytes(ListMultimap<String, TablePermission> perms,
+      Configuration conf) {
     return ProtobufUtil.prependPBMagic(ProtobufUtil.toUserTablePermissions(perms).toByteArray());
   }
 
@@ -519,7 +525,8 @@ public class AccessControlLists {
    * from the input stream.
    */
   public static ListMultimap<String, TablePermission> readPermissions(byte[] data,
-      Configuration conf) throws DeserializationException {
+      Configuration conf)
+  throws DeserializationException {
     if (ProtobufUtil.isPBMagicPrefix(data)) {
       int pblen = ProtobufUtil.lengthOfPBMagic();
       try {
@@ -537,7 +544,8 @@ public class AccessControlLists {
         int length = in.readInt();
         for (int i=0; i<length; i++) {
           String user = Text.readString(in);
-          List<TablePermission> userPerms = (List)HbaseObjectWritable.readObject(in, conf);
+          List<TablePermission> userPerms =
+            (List)HbaseObjectWritableFor96Migration.readObject(in, conf);
           perms.putAll(user, userPerms);
         }
       } catch (IOException e) {

Added: hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/CodeToClassAndBackFor96Migration.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/CodeToClassAndBackFor96Migration.java?rev=1426729&view=auto
==============================================================================
--- hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/CodeToClassAndBackFor96Migration.java (added)
+++ hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/CodeToClassAndBackFor96Migration.java Sat Dec 29 09:27:24 2012
@@ -0,0 +1,77 @@
+/**
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.security.access;
+
+import java.util.*;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+
+/**
+ * A Static Interface.
+ * Instead of having this code in the the HbaseMapWritable code, where it
+ * blocks the possibility of altering the variables and changing their types,
+ * it is put here in this static interface where the static final Maps are
+ * loaded one time. Only byte[] and Cell are supported at this time.
+ * @deprecated  In place until we come up on 0.96 and then it can be removed
+ * along with {@link HbaseObjectWritableFor96Migration}; needed to read
+ * pre-0.96 TablePermissions.
+ */
+@Deprecated
+@InterfaceAudience.Private
+interface CodeToClassAndBackFor96Migration {
+  /**
+   * Static map that contains mapping from code to class
+   */
+  static final Map<Byte, Class<?>> CODE_TO_CLASS =
+    new HashMap<Byte, Class<?>>();
+
+  /**
+   * Static map that contains mapping from class to code
+   */
+  static final Map<Class<?>, Byte> CLASS_TO_CODE =
+    new HashMap<Class<?>, Byte>();
+
+  /**
+   * Class list for supported classes
+   */
+  Class<?>[] classList = {byte[].class};
+
+  /**
+   * The static loader that is used instead of the static constructor in
+   * HbaseMapWritable.
+   */
+  InternalStaticLoader sl =
+    new InternalStaticLoader(classList, CODE_TO_CLASS, CLASS_TO_CODE);
+
+  /**
+   * Class that loads the static maps with their values.
+   */
+  class InternalStaticLoader{
+    InternalStaticLoader(Class<?>[] classList,
+        Map<Byte,Class<?>> CODE_TO_CLASS, Map<Class<?>, Byte> CLASS_TO_CODE){
+      byte code = 1;
+      for(int i=0; i<classList.length; i++){
+        CLASS_TO_CODE.put(classList[i], code);
+        CODE_TO_CLASS.put(code, classList[i]);
+        code++;
+      }
+    }
+  }
+}

Added: hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/HbaseObjectWritableFor96Migration.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/HbaseObjectWritableFor96Migration.java?rev=1426729&view=auto
==============================================================================
--- hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/HbaseObjectWritableFor96Migration.java (added)
+++ hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/HbaseObjectWritableFor96Migration.java Sat Dec 29 09:27:24 2012
@@ -0,0 +1,812 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.security.access;
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.DataInput;
+import java.io.DataOutput;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.ObjectInputStream;
+import java.io.ObjectOutputStream;
+import java.io.Serializable;
+import java.lang.reflect.Array;
+import java.lang.reflect.InvocationTargetException;
+import java.lang.reflect.Method;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.NavigableSet;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.conf.Configurable;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.conf.Configured;
+import org.apache.hadoop.hbase.ClusterStatus;
+import org.apache.hadoop.hbase.HColumnDescriptor;
+import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.HRegionInfo;
+import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.client.Action;
+import org.apache.hadoop.hbase.client.Append;
+import org.apache.hadoop.hbase.client.Delete;
+import org.apache.hadoop.hbase.client.Get;
+import org.apache.hadoop.hbase.client.Increment;
+import org.apache.hadoop.hbase.client.MultiAction;
+import org.apache.hadoop.hbase.client.MultiResponse;
+import org.apache.hadoop.hbase.client.Put;
+import org.apache.hadoop.hbase.client.Result;
+import org.apache.hadoop.hbase.client.Row;
+import org.apache.hadoop.hbase.client.RowMutations;
+import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.filter.BinaryComparator;
+import org.apache.hadoop.hbase.filter.BitComparator;
+import org.apache.hadoop.hbase.filter.ByteArrayComparable;
+import org.apache.hadoop.hbase.filter.ColumnCountGetFilter;
+import org.apache.hadoop.hbase.filter.ColumnPrefixFilter;
+import org.apache.hadoop.hbase.filter.ColumnRangeFilter;
+import org.apache.hadoop.hbase.filter.CompareFilter;
+import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
+import org.apache.hadoop.hbase.filter.DependentColumnFilter;
+import org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter;
+import org.apache.hadoop.hbase.filter.InclusiveStopFilter;
+import org.apache.hadoop.hbase.filter.KeyOnlyFilter;
+import org.apache.hadoop.hbase.filter.PageFilter;
+import org.apache.hadoop.hbase.filter.PrefixFilter;
+import org.apache.hadoop.hbase.filter.QualifierFilter;
+import org.apache.hadoop.hbase.filter.RandomRowFilter;
+import org.apache.hadoop.hbase.filter.RowFilter;
+import org.apache.hadoop.hbase.filter.SingleColumnValueExcludeFilter;
+import org.apache.hadoop.hbase.filter.SingleColumnValueFilter;
+import org.apache.hadoop.hbase.filter.SkipFilter;
+import org.apache.hadoop.hbase.filter.ValueFilter;
+import org.apache.hadoop.hbase.filter.WhileMatchFilter;
+import org.apache.hadoop.hbase.io.DataOutputOutputStream;
+import org.apache.hadoop.hbase.io.WritableWithSize;
+import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
+import org.apache.hadoop.hbase.protobuf.generated.ClientProtos;
+import org.apache.hadoop.hbase.regionserver.RegionOpeningState;
+import org.apache.hadoop.hbase.regionserver.wal.HLog;
+import org.apache.hadoop.hbase.regionserver.wal.HLogKey;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.hbase.util.ProtoUtil;
+import org.apache.hadoop.io.MapWritable;
+import org.apache.hadoop.io.ObjectWritable;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.io.Writable;
+import org.apache.hadoop.io.WritableFactories;
+import org.apache.hadoop.io.WritableUtils;
+
+import com.google.protobuf.Message;
+import com.google.protobuf.RpcController;
+
+/**
+ * <p>This is a customized version of the polymorphic hadoop
+ * {@link ObjectWritable}.  It removes UTF8 (HADOOP-414).
+ * Using {@link Text} intead of UTF-8 saves ~2% CPU between reading and writing
+ * objects running a short sequentialWrite Performance Evaluation test just in
+ * ObjectWritable alone; more when we're doing randomRead-ing.  Other
+ * optimizations include our passing codes for classes instead of the
+ * actual class names themselves.  This makes it so this class needs amendment
+ * if non-Writable classes are introduced -- if passed a Writable for which we
+ * have no code, we just do the old-school passing of the class name, etc. --
+ * but passing codes the  savings are large particularly when cell
+ * data is small (If < a couple of kilobytes, the encoding/decoding of class
+ * name and reflection to instantiate class was costing in excess of the cell
+ * handling).
+ * @deprecated This class is needed migrating TablePermissions written with
+ * Writables.  It is needed to read old permissions written pre-0.96.  This
+ * class is to be removed after HBase 0.96 ships since then all permissions
+ * will have been migrated and written with protobufs.
+ */
+@Deprecated
+@InterfaceAudience.Private
+class HbaseObjectWritableFor96Migration implements Writable, WritableWithSize, Configurable {
+  protected final static Log LOG = LogFactory.getLog(HbaseObjectWritableFor96Migration.class);
+
+  // Here we maintain two static maps of classes to code and vice versa.
+  // Add new classes+codes as wanted or figure way to auto-generate these
+  // maps.
+  static final Map<Integer, Class<?>> CODE_TO_CLASS =
+    new HashMap<Integer, Class<?>>();
+  static final Map<Class<?>, Integer> CLASS_TO_CODE =
+    new HashMap<Class<?>, Integer>();
+  // Special code that means 'not-encoded'; in this case we do old school
+  // sending of the class name using reflection, etc.
+  private static final byte NOT_ENCODED = 0;
+  //Generic array means that the array type is not one of the pre-defined arrays
+  //in the CLASS_TO_CODE map, but we have to still encode the array since it's
+  //elements are serializable by this class.
+  private static final int GENERIC_ARRAY_CODE;
+  private static final int NEXT_CLASS_CODE;
+  static {
+    ////////////////////////////////////////////////////////////////////////////
+    // WARNING: Please do not insert, remove or swap any line in this static  //
+    // block.  Doing so would change or shift all the codes used to serialize //
+    // objects, which makes backwards compatibility very hard for clients.    //
+    // New codes should always be added at the end. Code removal is           //
+    // discouraged because code is a short now.                               //
+    ////////////////////////////////////////////////////////////////////////////
+
+    int code = NOT_ENCODED + 1;
+    // Primitive types.
+    addToMap(Boolean.TYPE, code++);
+    addToMap(Byte.TYPE, code++);
+    addToMap(Character.TYPE, code++);
+    addToMap(Short.TYPE, code++);
+    addToMap(Integer.TYPE, code++);
+    addToMap(Long.TYPE, code++);
+    addToMap(Float.TYPE, code++);
+    addToMap(Double.TYPE, code++);
+    addToMap(Void.TYPE, code++);
+
+    // Other java types
+    addToMap(String.class, code++);
+    addToMap(byte [].class, code++);
+    addToMap(byte [][].class, code++);
+
+    // Hadoop types
+    addToMap(Text.class, code++);
+    addToMap(Writable.class, code++);
+    addToMap(Writable [].class, code++);
+    code++; // Removed
+    addToMap(NullInstance.class, code++);
+
+    // Hbase types
+    addToMap(HColumnDescriptor.class, code++);
+    addToMap(HConstants.Modify.class, code++);
+
+    // We used to have a class named HMsg but its been removed.  Rather than
+    // just axe it, use following random Integer class -- we just chose any
+    // class from java.lang -- instead just so codes that follow stay
+    // in same relative place.
+    addToMap(Integer.class, code++);
+    addToMap(Integer[].class, code++);
+
+    //HRegion shouldn't be pushed across the wire.
+    code++; //addToMap(HRegion.class, code++);
+    code++; //addToMap(HRegion[].class, code++);
+
+    addToMap(HRegionInfo.class, code++);
+    addToMap(HRegionInfo[].class, code++);
+    code++; // Removed
+    code++; // Removed
+    addToMap(HTableDescriptor.class, code++);
+    addToMap(MapWritable.class, code++);
+
+    //
+    // HBASE-880
+    //
+    addToMap(ClusterStatus.class, code++);
+    addToMap(Delete.class, code++);
+    addToMap(Get.class, code++);
+    addToMap(KeyValue.class, code++);
+    addToMap(KeyValue[].class, code++);
+    addToMap(Put.class, code++);
+    addToMap(Put[].class, code++);
+    addToMap(Result.class, code++);
+    addToMap(Result[].class, code++);
+    addToMap(Scan.class, code++);
+
+    addToMap(WhileMatchFilter.class, code++);
+    addToMap(PrefixFilter.class, code++);
+    addToMap(PageFilter.class, code++);
+    addToMap(InclusiveStopFilter.class, code++);
+    addToMap(ColumnCountGetFilter.class, code++);
+    addToMap(SingleColumnValueFilter.class, code++);
+    addToMap(SingleColumnValueExcludeFilter.class, code++);
+    addToMap(BinaryComparator.class, code++);
+    addToMap(BitComparator.class, code++);
+    addToMap(CompareFilter.class, code++);
+    addToMap(RowFilter.class, code++);
+    addToMap(ValueFilter.class, code++);
+    addToMap(QualifierFilter.class, code++);
+    addToMap(SkipFilter.class, code++);
+    addToMap(ByteArrayComparable.class, code++);
+    addToMap(FirstKeyOnlyFilter.class, code++);
+    addToMap(DependentColumnFilter.class, code++);
+
+    addToMap(Delete [].class, code++);
+
+    addToMap(HLog.Entry.class, code++);
+    addToMap(HLog.Entry[].class, code++);
+    addToMap(HLogKey.class, code++);
+
+    addToMap(List.class, code++);
+
+    addToMap(NavigableSet.class, code++);
+    addToMap(ColumnPrefixFilter.class, code++);
+
+    // Multi
+    addToMap(Row.class, code++);
+    addToMap(Action.class, code++);
+    addToMap(MultiAction.class, code++);
+    addToMap(MultiResponse.class, code++);
+
+    // coprocessor execution
+    // Exec no longer exists --> addToMap(Exec.class, code++);
+    code++;
+    addToMap(Increment.class, code++);
+
+    addToMap(KeyOnlyFilter.class, code++);
+
+    // serializable
+    addToMap(Serializable.class, code++);
+
+    addToMap(RandomRowFilter.class, code++);
+
+    addToMap(CompareOp.class, code++);
+
+    addToMap(ColumnRangeFilter.class, code++);
+
+    // HServerLoad no longer exists; increase code so other classes stay the same.
+    code++;
+    //addToMap(HServerLoad.class, code++);
+
+    addToMap(RegionOpeningState.class, code++);
+
+    addToMap(HTableDescriptor[].class, code++);
+
+    addToMap(Append.class, code++);
+
+    addToMap(RowMutations.class, code++);
+
+    addToMap(Message.class, code++);
+
+    //java.lang.reflect.Array is a placeholder for arrays not defined above
+    GENERIC_ARRAY_CODE = code++;
+    addToMap(Array.class, GENERIC_ARRAY_CODE);
+
+    addToMap(RpcController.class, code++);
+
+    // make sure that this is the last statement in this static block
+    NEXT_CLASS_CODE = code;
+  }
+
+  private Class<?> declaredClass;
+  private Object instance;
+  private Configuration conf;
+
+  /** default constructor for writable */
+  HbaseObjectWritableFor96Migration() {
+    super();
+  }
+
+  /**
+   * @param instance
+   */
+  HbaseObjectWritableFor96Migration(Object instance) {
+    set(instance);
+  }
+
+  /**
+   * @param declaredClass
+   * @param instance
+   */
+  HbaseObjectWritableFor96Migration(Class<?> declaredClass, Object instance) {
+    this.declaredClass = declaredClass;
+    this.instance = instance;
+  }
+
+  /** @return the instance, or null if none. */
+  Object get() { return instance; }
+
+  /** @return the class this is meant to be. */
+  Class<?> getDeclaredClass() { return declaredClass; }
+
+  /**
+   * Reset the instance.
+   * @param instance
+   */
+  void set(Object instance) {
+    this.declaredClass = instance.getClass();
+    this.instance = instance;
+  }
+
+  /**
+   * @see java.lang.Object#toString()
+   */
+  @Override
+  public String toString() {
+    return "OW[class=" + declaredClass + ",value=" + instance + "]";
+  }
+
+
+  public void readFields(DataInput in) throws IOException {
+    readObject(in, this, this.conf);
+  }
+
+  public void write(DataOutput out) throws IOException {
+    writeObject(out, instance, declaredClass, conf);
+  }
+
+  public long getWritableSize() {
+    return getWritableSize(instance, declaredClass, conf);
+  }
+
+  private static class NullInstance extends Configured implements Writable {
+    Class<?> declaredClass;
+    /** default constructor for writable */
+    @SuppressWarnings("unused")
+    public NullInstance() { super(null); }
+
+    /**
+     * @param declaredClass
+     * @param conf
+     */
+    public NullInstance(Class<?> declaredClass, Configuration conf) {
+      super(conf);
+      this.declaredClass = declaredClass;
+    }
+
+    public void readFields(DataInput in) throws IOException {
+      this.declaredClass = CODE_TO_CLASS.get(WritableUtils.readVInt(in));
+    }
+
+    public void write(DataOutput out) throws IOException {
+      writeClassCode(out, this.declaredClass);
+    }
+  }
+
+  static Integer getClassCode(final Class<?> c)
+  throws IOException {
+    Integer code = CLASS_TO_CODE.get(c);
+    if (code == null ) {
+      if (List.class.isAssignableFrom(c)) {
+        code = CLASS_TO_CODE.get(List.class);
+      } else if (Writable.class.isAssignableFrom(c)) {
+        code = CLASS_TO_CODE.get(Writable.class);
+      } else if (c.isArray()) {
+        code = CLASS_TO_CODE.get(Array.class);
+      } else if (Message.class.isAssignableFrom(c)) {
+        code = CLASS_TO_CODE.get(Message.class);
+      } else if (Serializable.class.isAssignableFrom(c)){
+        code = CLASS_TO_CODE.get(Serializable.class);
+      } else if (Scan.class.isAssignableFrom(c)) {
+        code = CLASS_TO_CODE.get(Scan.class);
+      }
+    }
+    return code;
+  }
+
+  /**
+   * @return the next object code in the list.  Used in testing to verify that additional fields are not added 
+   */
+  static int getNextClassCode(){
+    return NEXT_CLASS_CODE;
+  }
+
+  /**
+   * Write out the code for passed Class.
+   * @param out
+   * @param c
+   * @throws IOException
+   */
+  static void writeClassCode(final DataOutput out, final Class<?> c)
+      throws IOException {
+    Integer code = getClassCode(c);
+
+    if (code == null) {
+      LOG.error("Unsupported type " + c);
+      StackTraceElement[] els = new Exception().getStackTrace();
+      for(StackTraceElement elem : els) {
+        LOG.error(elem.getMethodName());
+      }
+      throw new UnsupportedOperationException("No code for unexpected " + c);
+    }
+    WritableUtils.writeVInt(out, code);
+  }
+
+  static long getWritableSize(Object instance, Class declaredClass,
+                                     Configuration conf) {
+    return 0L; // no hint is the default.
+  }
+  /**
+   * Write a {@link Writable}, {@link String}, primitive type, or an array of
+   * the preceding.
+   * @param out
+   * @param instance
+   * @param declaredClass
+   * @param conf
+   * @throws IOException
+   */
+  @SuppressWarnings("unchecked")
+  static void writeObject(DataOutput out, Object instance,
+                                 Class declaredClass,
+                                 Configuration conf)
+  throws IOException {
+
+    Object instanceObj = instance;
+    Class declClass = declaredClass;
+
+    if (instanceObj == null) {                       // null
+      instanceObj = new NullInstance(declClass, conf);
+      declClass = Writable.class;
+    }
+    writeClassCode(out, declClass);
+    if (declClass.isArray()) {                // array
+      // If bytearray, just dump it out -- avoid the recursion and
+      // byte-at-a-time we were previously doing.
+      if (declClass.equals(byte [].class)) {
+        Bytes.writeByteArray(out, (byte [])instanceObj);
+      } else {
+        //if it is a Generic array, write the element's type
+        if (getClassCode(declaredClass) == GENERIC_ARRAY_CODE) {
+          Class<?> componentType = declaredClass.getComponentType();
+          writeClass(out, componentType);
+        }
+
+        int length = Array.getLength(instanceObj);
+        out.writeInt(length);
+        for (int i = 0; i < length; i++) {
+          Object item = Array.get(instanceObj, i);
+          writeObject(out, item,
+                    item.getClass(), conf);
+        }
+      }
+    } else if (List.class.isAssignableFrom(declClass)) {
+      List list = (List)instanceObj;
+      int length = list.size();
+      out.writeInt(length);
+      for (int i = 0; i < length; i++) {
+        Object elem = list.get(i);
+        writeObject(out, elem,
+                  elem == null ? Writable.class : elem.getClass(), conf);
+      }
+    } else if (declClass == String.class) {   // String
+      Text.writeString(out, (String)instanceObj);
+    } else if (declClass.isPrimitive()) {     // primitive type
+      if (declClass == Boolean.TYPE) {        // boolean
+        out.writeBoolean(((Boolean)instanceObj).booleanValue());
+      } else if (declClass == Character.TYPE) { // char
+        out.writeChar(((Character)instanceObj).charValue());
+      } else if (declClass == Byte.TYPE) {    // byte
+        out.writeByte(((Byte)instanceObj).byteValue());
+      } else if (declClass == Short.TYPE) {   // short
+        out.writeShort(((Short)instanceObj).shortValue());
+      } else if (declClass == Integer.TYPE) { // int
+        out.writeInt(((Integer)instanceObj).intValue());
+      } else if (declClass == Long.TYPE) {    // long
+        out.writeLong(((Long)instanceObj).longValue());
+      } else if (declClass == Float.TYPE) {   // float
+        out.writeFloat(((Float)instanceObj).floatValue());
+      } else if (declClass == Double.TYPE) {  // double
+        out.writeDouble(((Double)instanceObj).doubleValue());
+      } else if (declClass == Void.TYPE) {    // void
+      } else {
+        throw new IllegalArgumentException("Not a primitive: "+declClass);
+      }
+    } else if (declClass.isEnum()) {         // enum
+      Text.writeString(out, ((Enum)instanceObj).name());
+    } else if (Message.class.isAssignableFrom(declaredClass)) {
+      Text.writeString(out, instanceObj.getClass().getName());
+      ((Message)instance).writeDelimitedTo(
+          DataOutputOutputStream.constructOutputStream(out));
+    } else if (Writable.class.isAssignableFrom(declClass)) { // Writable
+      Class <?> c = instanceObj.getClass();
+      Integer code = CLASS_TO_CODE.get(c);
+      if (code == null) {
+        out.writeByte(NOT_ENCODED);
+        Text.writeString(out, c.getName());
+      } else {
+        writeClassCode(out, c);
+      }
+      ((Writable)instanceObj).write(out);
+    } else if (Serializable.class.isAssignableFrom(declClass)) {
+      Class <?> c = instanceObj.getClass();
+      Integer code = CLASS_TO_CODE.get(c);
+      if (code == null) {
+        out.writeByte(NOT_ENCODED);
+        Text.writeString(out, c.getName());
+      } else {
+        writeClassCode(out, c);
+      }
+      ByteArrayOutputStream bos = null;
+      ObjectOutputStream oos = null;
+      try{
+        bos = new ByteArrayOutputStream();
+        oos = new ObjectOutputStream(bos);
+        oos.writeObject(instanceObj);
+        byte[] value = bos.toByteArray();
+        out.writeInt(value.length);
+        out.write(value);
+      } finally {
+        if(bos!=null) bos.close();
+        if(oos!=null) oos.close();
+      }
+    } else if (Scan.class.isAssignableFrom(declClass)) {
+      Scan scan = (Scan)instanceObj;
+      byte [] scanBytes = ProtobufUtil.toScan(scan).toByteArray();
+      out.writeInt(scanBytes.length);
+      out.write(scanBytes);
+    } else {
+      throw new IOException("Can't write: "+instanceObj+" as "+declClass);
+    }
+  }
+
+  /** Writes the encoded class code as defined in CLASS_TO_CODE, or
+   * the whole class name if not defined in the mapping.
+   */
+  static void writeClass(DataOutput out, Class<?> c) throws IOException {
+    Integer code = CLASS_TO_CODE.get(c);
+    if (code == null) {
+      WritableUtils.writeVInt(out, NOT_ENCODED);
+      Text.writeString(out, c.getName());
+    } else {
+      WritableUtils.writeVInt(out, code);
+    }
+  }
+
+  /** Reads and returns the class as written by {@link #writeClass(DataOutput, Class)} */
+  static Class<?> readClass(Configuration conf, DataInput in) throws IOException {
+    Class<?> instanceClass = null;
+    int b = (byte)WritableUtils.readVInt(in);
+    if (b == NOT_ENCODED) {
+      String className = Text.readString(in);
+      try {
+        instanceClass = getClassByName(conf, className);
+      } catch (ClassNotFoundException e) {
+        LOG.error("Can't find class " + className, e);
+        throw new IOException("Can't find class " + className, e);
+      }
+    } else {
+      instanceClass = CODE_TO_CLASS.get(b);
+    }
+    return instanceClass;
+  }
+
+  /**
+   * Read a {@link Writable}, {@link String}, primitive type, or an array of
+   * the preceding.
+   * @param in
+   * @param conf
+   * @return the object
+   * @throws IOException
+   */
+  static Object readObject(DataInput in, Configuration conf)
+    throws IOException {
+    return readObject(in, null, conf);
+  }
+
+  /**
+   * Read a {@link Writable}, {@link String}, primitive type, or an array of
+   * the preceding.
+   * @param in
+   * @param objectWritable
+   * @param conf
+   * @return the object
+   * @throws IOException
+   */
+  @SuppressWarnings("unchecked")
+  static Object readObject(DataInput in,
+      HbaseObjectWritableFor96Migration objectWritable, Configuration conf)
+  throws IOException {
+    Class<?> declaredClass = CODE_TO_CLASS.get(WritableUtils.readVInt(in));
+    Object instance;
+    if (declaredClass.isPrimitive()) {            // primitive types
+      if (declaredClass == Boolean.TYPE) {             // boolean
+        instance = Boolean.valueOf(in.readBoolean());
+      } else if (declaredClass == Character.TYPE) {    // char
+        instance = Character.valueOf(in.readChar());
+      } else if (declaredClass == Byte.TYPE) {         // byte
+        instance = Byte.valueOf(in.readByte());
+      } else if (declaredClass == Short.TYPE) {        // short
+        instance = Short.valueOf(in.readShort());
+      } else if (declaredClass == Integer.TYPE) {      // int
+        instance = Integer.valueOf(in.readInt());
+      } else if (declaredClass == Long.TYPE) {         // long
+        instance = Long.valueOf(in.readLong());
+      } else if (declaredClass == Float.TYPE) {        // float
+        instance = Float.valueOf(in.readFloat());
+      } else if (declaredClass == Double.TYPE) {       // double
+        instance = Double.valueOf(in.readDouble());
+      } else if (declaredClass == Void.TYPE) {         // void
+        instance = null;
+      } else {
+        throw new IllegalArgumentException("Not a primitive: "+declaredClass);
+      }
+    } else if (declaredClass.isArray()) {              // array
+      if (declaredClass.equals(byte [].class)) {
+        instance = Bytes.readByteArray(in);
+      } else {
+        int length = in.readInt();
+        instance = Array.newInstance(declaredClass.getComponentType(), length);
+        for (int i = 0; i < length; i++) {
+          Array.set(instance, i, readObject(in, conf));
+        }
+      }
+    } else if (declaredClass.equals(Array.class)) { //an array not declared in CLASS_TO_CODE
+      Class<?> componentType = readClass(conf, in);
+      int length = in.readInt();
+      instance = Array.newInstance(componentType, length);
+      for (int i = 0; i < length; i++) {
+        Array.set(instance, i, readObject(in, conf));
+      }
+    } else if (List.class.isAssignableFrom(declaredClass)) {            // List
+      int length = in.readInt();
+      instance = new ArrayList(length);
+      for (int i = 0; i < length; i++) {
+        ((ArrayList)instance).add(readObject(in, conf));
+      }
+    } else if (declaredClass == String.class) {        // String
+      instance = Text.readString(in);
+    } else if (declaredClass.isEnum()) {         // enum
+      instance = Enum.valueOf((Class<? extends Enum>) declaredClass,
+        Text.readString(in));
+    } else if (declaredClass == Message.class) {
+      String className = Text.readString(in);
+      try {
+        declaredClass = getClassByName(conf, className);
+        instance = tryInstantiateProtobuf(declaredClass, in);
+      } catch (ClassNotFoundException e) {
+        LOG.error("Can't find class " + className, e);
+        throw new IOException("Can't find class " + className, e);
+      }
+    } else if (Scan.class.isAssignableFrom(declaredClass)) {
+      int length = in.readInt();
+      byte [] scanBytes = new byte[length];
+      in.readFully(scanBytes);
+      ClientProtos.Scan.Builder scanProto = ClientProtos.Scan.newBuilder();
+      instance = ProtobufUtil.toScan(scanProto.mergeFrom(scanBytes).build());
+    } else {                                      // Writable or Serializable
+      Class instanceClass = null;
+      int b = (byte)WritableUtils.readVInt(in);
+      if (b == NOT_ENCODED) {
+        String className = Text.readString(in);
+        try {
+          instanceClass = getClassByName(conf, className);
+        } catch (ClassNotFoundException e) {
+          LOG.error("Can't find class " + className, e);
+          throw new IOException("Can't find class " + className, e);
+        }
+      } else {
+        instanceClass = CODE_TO_CLASS.get(b);
+      }
+      if(Writable.class.isAssignableFrom(instanceClass)){
+        Writable writable = WritableFactories.newInstance(instanceClass, conf);
+        try {
+          writable.readFields(in);
+        } catch (Exception e) {
+          LOG.error("Error in readFields", e);
+          throw new IOException("Error in readFields" , e);
+        }
+        instance = writable;
+        if (instanceClass == NullInstance.class) {  // null
+          declaredClass = ((NullInstance)instance).declaredClass;
+          instance = null;
+        }
+      } else {
+        int length = in.readInt();
+        byte[] objectBytes = new byte[length];
+        in.readFully(objectBytes);
+        ByteArrayInputStream bis = null;
+        ObjectInputStream ois = null;
+        try {
+          bis = new ByteArrayInputStream(objectBytes);
+          ois = new ObjectInputStream(bis);
+          instance = ois.readObject();
+        } catch (ClassNotFoundException e) {
+          LOG.error("Class not found when attempting to deserialize object", e);
+          throw new IOException("Class not found when attempting to " +
+              "deserialize object", e);
+        } finally {
+          if(bis!=null) bis.close();
+          if(ois!=null) ois.close();
+        }
+      }
+    }
+    if (objectWritable != null) {                 // store values
+      objectWritable.declaredClass = declaredClass;
+      objectWritable.instance = instance;
+    }
+    return instance;
+  }
+
+  /**
+   * Try to instantiate a protocol buffer of the given message class
+   * from the given input stream.
+   *
+   * @param protoClass the class of the generated protocol buffer
+   * @param dataIn the input stream to read from
+   * @return the instantiated Message instance
+   * @throws IOException if an IO problem occurs
+   */
+  static Message tryInstantiateProtobuf(
+      Class<?> protoClass,
+      DataInput dataIn) throws IOException {
+
+    try {
+      if (dataIn instanceof InputStream) {
+        // We can use the built-in parseDelimitedFrom and not have to re-copy
+        // the data
+        Method parseMethod = getStaticProtobufMethod(protoClass,
+            "parseDelimitedFrom", InputStream.class);
+        return (Message)parseMethod.invoke(null, (InputStream)dataIn);
+      } else {
+        // Have to read it into a buffer first, since protobuf doesn't deal
+        // with the DataInput interface directly.
+
+        // Read the size delimiter that writeDelimitedTo writes
+        int size = ProtoUtil.readRawVarint32(dataIn);
+        if (size < 0) {
+          throw new IOException("Invalid size: " + size);
+        }
+
+        byte[] data = new byte[size];
+        dataIn.readFully(data);
+        Method parseMethod = getStaticProtobufMethod(protoClass,
+            "parseFrom", byte[].class);
+        return (Message)parseMethod.invoke(null, data);
+      }
+    } catch (InvocationTargetException e) {
+
+      if (e.getCause() instanceof IOException) {
+        throw (IOException)e.getCause();
+      } else {
+        throw new IOException(e.getCause());
+      }
+    } catch (IllegalAccessException iae) {
+      throw new AssertionError("Could not access parse method in " +
+          protoClass);
+    }
+  }
+
+  static Method getStaticProtobufMethod(Class<?> declaredClass, String method,
+      Class<?> ... args) {
+
+    try {
+      return declaredClass.getMethod(method, args);
+    } catch (Exception e) {
+      // This is a bug in Hadoop - protobufs should all have this static method
+      throw new AssertionError("Protocol buffer class " + declaredClass +
+          " does not have an accessible parseFrom(InputStream) method!");
+    }
+  }
+
+  @SuppressWarnings("unchecked")
+  private static Class getClassByName(Configuration conf, String className)
+  throws ClassNotFoundException {
+    if(conf != null) {
+      return conf.getClassByName(className);
+    }
+    ClassLoader cl = Thread.currentThread().getContextClassLoader();
+    if(cl == null) {
+      cl = HbaseObjectWritableFor96Migration.class.getClassLoader();
+    }
+    return Class.forName(className, true, cl);
+  }
+
+  private static void addToMap(final Class<?> clazz, final int code) {
+    CLASS_TO_CODE.put(clazz, code);
+    CODE_TO_CLASS.put(code, clazz);
+  }
+
+  public void setConf(Configuration conf) {
+    this.conf = conf;
+  }
+
+  public Configuration getConf() {
+    return this.conf;
+  }
+}
\ No newline at end of file



Mime
View raw message