hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From szets...@apache.org
Subject svn commit: r1483137 - in /hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common: ./ dev-support/ src/main/docs/ src/main/java/ src/main/java/org/apache/hadoop/ipc/ src/main/java/org/apache/hadoop/util/ src/main/proto/ src/test/core/
Date Thu, 16 May 2013 00:59:48 GMT
Author: szetszwo
Date: Thu May 16 00:59:47 2013
New Revision: 1483137

URL: http://svn.apache.org/r1483137
Log:
svn merge -c 1423189 from trunk for HADOOP-9140 Cleanup rpc PB protos.

Added:
    hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/proto/ProtobufRpcEngine.proto
      - copied unchanged from r1423189, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/proto/ProtobufRpcEngine.proto
    hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/proto/RpcHeader.proto
      - copied unchanged from r1423189, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/proto/RpcHeader.proto
Removed:
    hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/proto/RpcPayloadHeader.proto
    hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/proto/hadoop_rpc.proto
Modified:
    hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/   (props changed)
    hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt   (contents,
props changed)
    hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml
    hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/pom.xml
    hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/docs/   (props
changed)
    hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/   (props
changed)
    hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
    hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java
    hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
    hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ProtoUtil.java
    hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/core/   (props
changed)

Propchange: hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common:r1423189

Modified: hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1483137&r1=1483136&r2=1483137&view=diff
==============================================================================
--- hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt (original)
+++ hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt Thu May
16 00:59:47 2013
@@ -63,6 +63,8 @@ Release 2.0.5-beta - UNRELEASED
     HADOOP-9560. metrics2#JvmMetrics should have max memory size of JVM.
     (Tsuyoshi Ozawa via suresh)
 
+    HADOOP-9140 Cleanup rpc PB protos (sanjay Radia)
+
   OPTIMIZATIONS
 
     HADOOP-9150. Avoid unnecessary DNS resolution attempts for logical URIs

Propchange: hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt:r1423189

Modified: hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml?rev=1483137&r1=1483136&r2=1483137&view=diff
==============================================================================
--- hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml
(original)
+++ hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml
Thu May 16 00:59:47 2013
@@ -272,7 +272,7 @@
     </Match>
     <Match>
       <!-- protobuf generated code -->
-      <Class name="~org\.apache\.hadoop\.ipc\.protobuf\.HadoopRpcProtos.*"/>
+      <Class name="~org\.apache\.hadoop\.ipc\.protobuf\.ProtobufRpcEngineProtos.*"/>
     </Match>
     <Match>
       <!-- protobuf generated code -->
@@ -284,7 +284,7 @@
     </Match>
         <Match>
       <!-- protobuf generated code -->
-      <Class name="~org\.apache\.hadoop\.ipc\.protobuf\.RpcPayloadHeaderProtos.*"/>
+      <Class name="~org\.apache\.hadoop\.ipc\.protobuf\.RpcHeaderProtos.*"/>
     </Match>
     <Match>
       <!-- protobuf generated code -->

Modified: hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/pom.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/pom.xml?rev=1483137&r1=1483136&r2=1483137&view=diff
==============================================================================
--- hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/pom.xml (original)
+++ hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/pom.xml Thu May 16
00:59:47 2013
@@ -317,9 +317,9 @@
                   <include>HAServiceProtocol.proto</include>
                   <include>IpcConnectionContext.proto</include>
                   <include>ProtocolInfo.proto</include>
-                  <include>RpcPayloadHeader.proto</include>
+                  <include>RpcHeader.proto</include>
                   <include>ZKFCProtocol.proto</include>
-                  <include>hadoop_rpc.proto</include>
+                  <include>ProtobufRpcEngine.proto</include>
                   <include>Security.proto</include>
                 </includes>
               </source>

Propchange: hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/docs/
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/docs:r1423189

Propchange: hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java:r1423189

Modified: hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java?rev=1483137&r1=1483136&r2=1483137&view=diff
==============================================================================
--- hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
(original)
+++ hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
Thu May 16 00:59:47 2013
@@ -63,11 +63,10 @@ import org.apache.hadoop.io.WritableUtil
 import org.apache.hadoop.io.retry.RetryPolicies;
 import org.apache.hadoop.io.retry.RetryPolicy;
 import org.apache.hadoop.io.retry.RetryPolicy.RetryAction;
-import org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.IpcConnectionContextProto;
-import org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.RpcPayloadHeaderProto;
-import org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.RpcPayloadOperationProto;
-import org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.RpcResponseHeaderProto;
-import org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.RpcStatusProto;
+import org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto;
+import org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto.OperationProto;
+import org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto;
+import org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcStatusProto;
 import org.apache.hadoop.net.ConnectTimeoutException;
 import org.apache.hadoop.net.NetUtils;
 import org.apache.hadoop.security.KerberosInfo;
@@ -201,7 +200,7 @@ public class Client {
    */
   private class Call {
     final int id;               // call id
-    final Writable rpcRequest;  // the serialized rpc request - RpcPayload
+    final Writable rpcRequest;  // the serialized rpc request
     Writable rpcResponse;       // null if rpc has error
     IOException error;          // exception, null if success
     final RPC.RpcKind rpcKind;      // Rpc EngineKind
@@ -277,7 +276,7 @@ public class Client {
     private AtomicBoolean shouldCloseConnection = new AtomicBoolean();  // indicate if the
connection is closed
     private IOException closeException; // close reason
     
-    private final Object sendParamsLock = new Object();
+    private final Object sendRpcRequestLock = new Object();
 
     public Connection(ConnectionId remoteId, int serviceClass) throws IOException {
       this.remoteId = remoteId;
@@ -776,7 +775,7 @@ public class Client {
           remoteId.getTicket(),
           authMethod).writeTo(buf);
       
-      // Write out the payload length
+      // Write out the packet length
       int bufLen = buf.getLength();
 
       out.writeInt(bufLen);
@@ -840,7 +839,7 @@ public class Client {
 
       try {
         while (waitForWork()) {//wait here for work - read or close connection
-          receiveResponse();
+          receiveRpcResponse();
         }
       } catch (Throwable t) {
         // This truly is unexpected, since we catch IOException in receiveResponse
@@ -857,11 +856,12 @@ public class Client {
             + connections.size());
     }
 
-    /** Initiates a call by sending the parameter to the remote server.
+    /** Initiates a rpc call by sending the rpc request to the remote server.
      * Note: this is not called from the Connection thread, but by other
      * threads.
+     * @param call - the rpc request
      */
-    public void sendParam(final Call call)
+    public void sendRpcRequest(final Call call)
         throws InterruptedException, IOException {
       if (shouldCloseConnection.get()) {
         return;
@@ -874,17 +874,17 @@ public class Client {
       //
       // Format of a call on the wire:
       // 0) Length of rest below (1 + 2)
-      // 1) PayloadHeader  - is serialized Delimited hence contains length
-      // 2) the Payload - the RpcRequest
+      // 1) RpcRequestHeader  - is serialized Delimited hence contains length
+      // 2) RpcRequest
       //
       // Items '1' and '2' are prepared here. 
       final DataOutputBuffer d = new DataOutputBuffer();
-      RpcPayloadHeaderProto header = ProtoUtil.makeRpcPayloadHeader(
-         call.rpcKind, RpcPayloadOperationProto.RPC_FINAL_PAYLOAD, call.id);
+      RpcRequestHeaderProto header = ProtoUtil.makeRpcRequestHeader(
+         call.rpcKind, OperationProto.RPC_FINAL_PACKET, call.id);
       header.writeDelimitedTo(d);
       call.rpcRequest.write(d);
 
-      synchronized (sendParamsLock) {
+      synchronized (sendRpcRequestLock) {
         Future<?> senderFuture = SEND_PARAMS_EXECUTOR.submit(new Runnable() {
           @Override
           public void run() {
@@ -900,7 +900,7 @@ public class Client {
                 byte[] data = d.getData();
                 int totalLength = d.getLength();
                 out.writeInt(totalLength); // Total Length
-                out.write(data, 0, totalLength);//PayloadHeader + RpcRequest
+                out.write(data, 0, totalLength);// RpcRequestHeader + RpcRequest
                 out.flush();
               }
             } catch (IOException e) {
@@ -935,7 +935,7 @@ public class Client {
     /* Receive a response.
      * Because only one receiver, so no synchronization on in.
      */
-    private void receiveResponse() {
+    private void receiveRpcResponse() {
       if (shouldCloseConnection.get()) {
         return;
       }
@@ -1236,12 +1236,12 @@ public class Client {
     Call call = new Call(rpcKind, rpcRequest);
     Connection connection = getConnection(remoteId, call, serviceClass);
     try {
-      connection.sendParam(call);                 // send the parameter
+      connection.sendRpcRequest(call);                 // send the rpc request
     } catch (RejectedExecutionException e) {
       throw new IOException("connection has been closed", e);
     } catch (InterruptedException e) {
       Thread.currentThread().interrupt();
-      LOG.warn("interrupted waiting to send params to server", e);
+      LOG.warn("interrupted waiting to send rpc request to server", e);
       throw new IOException(e);
     }
 

Modified: hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java?rev=1483137&r1=1483136&r2=1483137&view=diff
==============================================================================
--- hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java
(original)
+++ hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java
Thu May 16 00:59:47 2013
@@ -39,7 +39,7 @@ import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.retry.RetryPolicy;
 import org.apache.hadoop.ipc.Client.ConnectionId;
 import org.apache.hadoop.ipc.RPC.RpcInvoker;
-import org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcRequestProto;
+import org.apache.hadoop.ipc.protobuf.ProtobufRpcEngineProtos.RequestProto;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.token.SecretManager;
 import org.apache.hadoop.security.token.TokenIdentifier;
@@ -128,10 +128,10 @@ public class ProtobufRpcEngine implement
           .getProtocolVersion(protocol);
     }
 
-    private HadoopRpcRequestProto constructRpcRequest(Method method,
+    private RequestProto constructRpcRequest(Method method,
         Object[] params) throws ServiceException {
-      HadoopRpcRequestProto rpcRequest;
-      HadoopRpcRequestProto.Builder builder = HadoopRpcRequestProto
+      RequestProto rpcRequest;
+      RequestProto.Builder builder = RequestProto
           .newBuilder();
       builder.setMethodName(method.getName());
 
@@ -190,7 +190,7 @@ public class ProtobufRpcEngine implement
         startTime = Time.now();
       }
 
-      HadoopRpcRequestProto rpcRequest = constructRpcRequest(method, args);
+      RequestProto rpcRequest = constructRpcRequest(method, args);
       RpcResponseWritable val = null;
       
       if (LOG.isTraceEnabled()) {
@@ -271,13 +271,13 @@ public class ProtobufRpcEngine implement
    * Writable Wrapper for Protocol Buffer Requests
    */
   private static class RpcRequestWritable implements Writable {
-    HadoopRpcRequestProto message;
+    RequestProto message;
 
     @SuppressWarnings("unused")
     public RpcRequestWritable() {
     }
 
-    RpcRequestWritable(HadoopRpcRequestProto message) {
+    RpcRequestWritable(RequestProto message) {
       this.message = message;
     }
 
@@ -292,7 +292,7 @@ public class ProtobufRpcEngine implement
       int length = ProtoUtil.readRawVarint32(in);
       byte[] bytes = new byte[length];
       in.readFully(bytes);
-      message = HadoopRpcRequestProto.parseFrom(bytes);
+      message = RequestProto.parseFrom(bytes);
     }
     
     @Override
@@ -426,7 +426,7 @@ public class ProtobufRpcEngine implement
       public Writable call(RPC.Server server, String protocol,
           Writable writableRequest, long receiveTime) throws Exception {
         RpcRequestWritable request = (RpcRequestWritable) writableRequest;
-        HadoopRpcRequestProto rpcRequest = request.message;
+        RequestProto rpcRequest = request.message;
         String methodName = rpcRequest.getMethodName();
         String protoName = rpcRequest.getDeclaringClassProtocolName();
         long clientVersion = rpcRequest.getClientProtocolVersion();

Modified: hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java?rev=1483137&r1=1483136&r2=1483137&view=diff
==============================================================================
--- hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
(original)
+++ hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
Thu May 16 00:59:47 2013
@@ -80,7 +80,8 @@ import org.apache.hadoop.ipc.RPC.Version
 import org.apache.hadoop.ipc.metrics.RpcDetailedMetrics;
 import org.apache.hadoop.ipc.metrics.RpcMetrics;
 import org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.IpcConnectionContextProto;
-import org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.*;
+import org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcStatusProto;
+import org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.*;
 import org.apache.hadoop.net.NetUtils;
 import org.apache.hadoop.security.AccessControlException;
 import org.apache.hadoop.security.SaslRpcServer;
@@ -158,7 +159,7 @@ public abstract class Server {
   public static final ByteBuffer HEADER = ByteBuffer.wrap("hrpc".getBytes());
   
   /**
-   * Serialization type for ConnectionContext and RpcPayloadHeader
+   * Serialization type for ConnectionContext and RpcRequestHeader
    */
   public enum IpcSerializationType {
     // Add new serialization type to the end without affecting the enum order
@@ -195,7 +196,7 @@ public abstract class Server {
   // 4 : Introduced SASL security layer
   // 5 : Introduced use of {@link ArrayPrimitiveWritable$Internal}
   //     in ObjectWritable to efficiently transmit arrays of primitives
-  // 6 : Made RPC payload header explicit
+  // 6 : Made RPC Request header explicit
   // 7 : Changed Ipc Connection Header to use Protocol buffers
   // 8 : SASL server always sends a final response
   public static final byte CURRENT_VERSION = 8;
@@ -1650,14 +1651,15 @@ public abstract class Server {
     private void processData(byte[] buf) throws  IOException, InterruptedException {
       DataInputStream dis =
         new DataInputStream(new ByteArrayInputStream(buf));
-      RpcPayloadHeaderProto header = RpcPayloadHeaderProto.parseDelimitedFrom(dis);
+      RpcRequestHeaderProto header = RpcRequestHeaderProto.parseDelimitedFrom(dis);
         
       if (LOG.isDebugEnabled())
         LOG.debug(" got #" + header.getCallId());
       if (!header.hasRpcOp()) {
-        throw new IOException(" IPC Server: No rpc op in rpcPayloadHeader");
+        throw new IOException(" IPC Server: No rpc op in rpcRequestHeader");
       }
-      if (header.getRpcOp() != RpcPayloadOperationProto.RPC_FINAL_PAYLOAD) {
+      if (header.getRpcOp() != 
+          RpcRequestHeaderProto.OperationProto.RPC_FINAL_PACKET) {
         throw new IOException("IPC Server does not implement operation" + 
               header.getRpcOp());
       }
@@ -1665,7 +1667,7 @@ public abstract class Server {
       // (Note it would make more sense to have the handler deserialize but 
       // we continue with this original design.
       if (!header.hasRpcKind()) {
-        throw new IOException(" IPC Server: No rpc kind in rpcPayloadHeader");
+        throw new IOException(" IPC Server: No rpc kind in rpcRequestHeader");
       }
       Class<? extends Writable> rpcRequestClass = 
           getRpcRequestWrapper(header.getRpcKind());

Modified: hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ProtoUtil.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ProtoUtil.java?rev=1483137&r1=1483136&r2=1483137&view=diff
==============================================================================
--- hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ProtoUtil.java
(original)
+++ hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ProtoUtil.java
Thu May 16 00:59:47 2013
@@ -24,7 +24,7 @@ import java.io.IOException;
 import org.apache.hadoop.ipc.RPC;
 import org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.IpcConnectionContextProto;
 import org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProto;
-import org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.*;
+import org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.*;
 import org.apache.hadoop.security.SaslRpcServer.AuthMethod;
 import org.apache.hadoop.security.UserGroupInformation;
 
@@ -157,9 +157,9 @@ public abstract class ProtoUtil {
     return null;
   }
  
-  public static RpcPayloadHeaderProto makeRpcPayloadHeader(RPC.RpcKind rpcKind,
-      RpcPayloadOperationProto operation, int callId) {
-    RpcPayloadHeaderProto.Builder result = RpcPayloadHeaderProto.newBuilder();
+  public static RpcRequestHeaderProto makeRpcRequestHeader(RPC.RpcKind rpcKind,
+      RpcRequestHeaderProto.OperationProto operation, int callId) {
+    RpcRequestHeaderProto.Builder result = RpcRequestHeaderProto.newBuilder();
     result.setRpcKind(convert(rpcKind)).setRpcOp(operation).setCallId(callId);
     return result.build();
   }

Propchange: hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/core/
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/core:r1423189



Mime
View raw message