hbase-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From st...@apache.org
Subject svn commit: r1478637 [8/9] - in /hbase/trunk: hbase-client/src/main/java/org/apache/hadoop/hbase/ hbase-client/src/main/java/org/apache/hadoop/hbase/catalog/ hbase-client/src/main/java/org/apache/hadoop/hbase/client/ hbase-client/src/main/java/org/apac...
Date Fri, 03 May 2013 03:52:18 GMT
Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin.java?rev=1478637&r1=1478636&r2=1478637&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin.java Fri May  3 03:52:15 2013
@@ -52,8 +52,8 @@ import org.apache.hadoop.hbase.exception
 import org.apache.hadoop.hbase.exceptions.TableNotFoundException;
 import org.apache.hadoop.hbase.exceptions.ZooKeeperConnectionException;
 import org.apache.hadoop.hbase.executor.EventHandler;
-import org.apache.hadoop.hbase.ipc.HBaseClient;
-import org.apache.hadoop.hbase.ipc.HBaseServer;
+import org.apache.hadoop.hbase.ipc.RpcClient;
+import org.apache.hadoop.hbase.ipc.RpcServer;
 import org.apache.hadoop.hbase.master.AssignmentManager;
 import org.apache.hadoop.hbase.master.HMaster;
 import org.apache.hadoop.hbase.protobuf.ProtobufUtil;

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientScannerRPCTimeout.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientScannerRPCTimeout.java?rev=1478637&r1=1478636&r2=1478637&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientScannerRPCTimeout.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientScannerRPCTimeout.java Fri May  3 03:52:15 2013
@@ -29,8 +29,8 @@ import org.apache.hadoop.hbase.HBaseTest
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.MediumTests;
 import org.apache.hadoop.hbase.MiniHBaseCluster.MiniHBaseClusterRegionServer;
-import org.apache.hadoop.hbase.ipc.HBaseClient;
-import org.apache.hadoop.hbase.ipc.HBaseServer;
+import org.apache.hadoop.hbase.ipc.RpcClient;
+import org.apache.hadoop.hbase.ipc.RpcServer;
 import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest;
 import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse;
 import org.apache.hadoop.hbase.util.Bytes;
@@ -59,10 +59,12 @@ public class TestClientScannerRPCTimeout
 
   @BeforeClass
   public static void setUpBeforeClass() throws Exception {
-    ((Log4JLogger)HBaseServer.LOG).getLogger().setLevel(Level.ALL);
-    ((Log4JLogger)HBaseClient.LOG).getLogger().setLevel(Level.ALL);
+    ((Log4JLogger)RpcServer.LOG).getLogger().setLevel(Level.ALL);
+    ((Log4JLogger)RpcClient.LOG).getLogger().setLevel(Level.ALL);
     ((Log4JLogger)ScannerCallable.LOG).getLogger().setLevel(Level.ALL);
     Configuration conf = TEST_UTIL.getConfiguration();
+    // Don't report so often so easier to see other rpcs
+    conf.setInt("hbase.regionserver.msginterval", 3 * 10000);
     conf.setInt(HConstants.HBASE_RPC_TIMEOUT_KEY, rpcTimeout);
     conf.setStrings(HConstants.REGION_SERVER_IMPL, RegionServerWithScanTimeout.class.getName());
     conf.setInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER, CLIENT_RETRIES_NUMBER);
@@ -84,12 +86,14 @@ public class TestClientScannerRPCTimeout
     putToTable(ht, r1);
     putToTable(ht, r2);
     putToTable(ht, r3);
+    LOG.info("Wrote our three values");
     RegionServerWithScanTimeout.seqNoToSleepOn = 1;
     Scan scan = new Scan();
     scan.setCaching(1);
     ResultScanner scanner = ht.getScanner(scan);
     Result result = scanner.next();
     assertTrue("Expected row: row-1", Bytes.equals(r1, result.getRow()));
+    LOG.info("Got expected first row");
     long t1 = System.currentTimeMillis();
     result = scanner.next();
     assertTrue((System.currentTimeMillis() - t1) > rpcTimeout);
@@ -127,7 +131,8 @@ public class TestClientScannerRPCTimeout
     private static boolean sleepAlways = false;
     private static int tryNumber = 0;
 
-    public RegionServerWithScanTimeout(Configuration conf) throws IOException, InterruptedException {
+    public RegionServerWithScanTimeout(Configuration conf)
+    throws IOException, InterruptedException {
       super(conf);
     }
 
@@ -139,6 +144,7 @@ public class TestClientScannerRPCTimeout
         if (this.tableScannerId == request.getScannerId() && 
             (sleepAlways || (!slept && seqNoToSleepOn == request.getNextCallSeq()))) {
           try {
+            LOG.info("SLEEPING " + (rpcTimeout + 500));
             Thread.sleep(rpcTimeout + 500);
           } catch (InterruptedException e) {
           }

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientTimeouts.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientTimeouts.java?rev=1478637&r1=1478636&r2=1478637&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientTimeouts.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientTimeouts.java Fri May  3 03:52:15 2013
@@ -19,7 +19,12 @@
  */
 package org.apache.hadoop.hbase.client;
 
-import static org.junit.Assert.*;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+
+import java.net.SocketTimeoutException;
+import java.util.Random;
+import java.util.concurrent.atomic.AtomicInteger;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -27,14 +32,22 @@ import org.apache.hadoop.conf.Configurat
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.HConstants;
-import org.apache.hadoop.hbase.exceptions.MasterNotRunningException;
 import org.apache.hadoop.hbase.MediumTests;
-import org.apache.hadoop.hbase.ipc.RandomTimeoutRpcEngine;
+import org.apache.hadoop.hbase.ServerName;
+import org.apache.hadoop.hbase.exceptions.MasterNotRunningException;
+import org.apache.hadoop.hbase.ipc.RpcClient;
+import org.apache.hadoop.hbase.security.User;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
+import com.google.protobuf.BlockingRpcChannel;
+import com.google.protobuf.Descriptors.MethodDescriptor;
+import com.google.protobuf.Message;
+import com.google.protobuf.RpcController;
+import com.google.protobuf.ServiceException;
+
 @Category(MediumTests.class)
 public class TestClientTimeouts {
   final Log LOG = LogFactory.getLog(getClass());
@@ -46,7 +59,6 @@ public class TestClientTimeouts {
    */
   @BeforeClass
   public static void setUpBeforeClass() throws Exception {
-    Configuration conf = TEST_UTIL.getConfiguration();
     TEST_UTIL.startMiniCluster(SLAVES);
   }
 
@@ -68,22 +80,29 @@ public class TestClientTimeouts {
     long lastLimit = HConstants.DEFAULT_HBASE_CLIENT_PREFETCH_LIMIT;
     HConnection lastConnection = null;
     boolean lastFailed = false;
-    int initialInvocations = RandomTimeoutRpcEngine.getNumberOfInvocations();
-
-    RandomTimeoutRpcEngine engine = new RandomTimeoutRpcEngine(TEST_UTIL.getConfiguration());
+    int initialInvocations = RandomTimeoutBlockingRpcChannel.invokations.get();
+    RpcClient rpcClient = new RpcClient(TEST_UTIL.getConfiguration(), TEST_UTIL.getClusterKey()) {
+      // Return my own instance, one that does random timeouts
+      @Override
+      public BlockingRpcChannel createBlockingRpcChannel(ServerName sn,
+          User ticket, int rpcTimeout) {
+        return new RandomTimeoutBlockingRpcChannel(this, sn, ticket, rpcTimeout);
+      }
+    };
     try {
       for (int i = 0; i < 5 || (lastFailed && i < 100); ++i) {
         lastFailed = false;
         // Ensure the HBaseAdmin uses a new connection by changing Configuration.
         Configuration conf = HBaseConfiguration.create(TEST_UTIL.getConfiguration());
         conf.setLong(HConstants.HBASE_CLIENT_PREFETCH_LIMIT, ++lastLimit);
+        HBaseAdmin admin = null;
         try {
-          HBaseAdmin admin = new HBaseAdmin(conf);
+          admin = new HBaseAdmin(conf);
           HConnection connection = admin.getConnection();
           assertFalse(connection == lastConnection);
           lastConnection = connection;
-          // override the connection's rpc engine for timeout testing
-          ((HConnectionManager.HConnectionImplementation)connection).setRpcEngine(engine);
+          // Override the connection's rpc client for timeout testing
+          ((HConnectionManager.HConnectionImplementation)connection).setRpcClient(rpcClient);
           // run some admin commands
           HBaseAdmin.checkHBaseAvailable(conf);
           admin.setBalancerRunning(false, false);
@@ -91,13 +110,43 @@ public class TestClientTimeouts {
           // Since we are randomly throwing SocketTimeoutExceptions, it is possible to get
           // a MasterNotRunningException.  It's a bug if we get other exceptions.
           lastFailed = true;
+        } finally {
+          admin.close();
         }
       }
       // Ensure the RandomTimeoutRpcEngine is actually being used.
       assertFalse(lastFailed);
-      assertTrue(RandomTimeoutRpcEngine.getNumberOfInvocations() > initialInvocations);
+      assertTrue(RandomTimeoutBlockingRpcChannel.invokations.get() > initialInvocations);
     } finally {
-      engine.close();
+      rpcClient.stop();
+    }
+  }
+
+  /**
+   * Blocking rpc channel that goes via hbase rpc.
+   */
+  static class RandomTimeoutBlockingRpcChannel extends RpcClient.BlockingRpcChannelImplementation {
+    private static final Random RANDOM = new Random(System.currentTimeMillis());
+    public static final double CHANCE_OF_TIMEOUT = 0.3;
+    private static AtomicInteger invokations = new AtomicInteger();
+
+    RandomTimeoutBlockingRpcChannel(final RpcClient rpcClient, final ServerName sn,
+        final User ticket, final int rpcTimeout) {
+      super(rpcClient, sn, ticket, rpcTimeout);
+    }
+
+    @Override
+    public Message callBlockingMethod(MethodDescriptor md,
+        RpcController controller, Message param, Message returnType)
+        throws ServiceException {
+      invokations.getAndIncrement();
+      if (RANDOM.nextFloat() < CHANCE_OF_TIMEOUT) {
+        // throw a ServiceException, becuase that is the only exception type that
+        // {@link ProtobufRpcEngine} throws.  If this RpcEngine is used with a different
+        // "actual" type, this may not properly mimic the underlying RpcEngine.
+        throw new ServiceException(new SocketTimeoutException("fake timeout"));
+      }
+      return super.callBlockingMethod(md, controller, param, returnType);
     }
   }
-}
+}
\ No newline at end of file

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java?rev=1478637&r1=1478636&r2=1478637&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java Fri May  3 03:52:15 2013
@@ -78,8 +78,8 @@ import org.apache.hadoop.hbase.filter.Wh
 import org.apache.hadoop.hbase.io.hfile.BlockCache;
 import org.apache.hadoop.hbase.io.hfile.CacheConfig;
 import org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel;
-import org.apache.hadoop.hbase.ipc.HBaseClient;
-import org.apache.hadoop.hbase.ipc.HBaseServer;
+import org.apache.hadoop.hbase.ipc.RpcClient;
+import org.apache.hadoop.hbase.ipc.RpcServer;
 import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
 import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto;
 import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType;
@@ -124,8 +124,8 @@ public class TestFromClientSide {
    */
   @BeforeClass
   public static void setUpBeforeClass() throws Exception {
-    ((Log4JLogger)HBaseServer.LOG).getLogger().setLevel(Level.ALL);
-    ((Log4JLogger)HBaseClient.LOG).getLogger().setLevel(Level.ALL);
+    ((Log4JLogger)RpcServer.LOG).getLogger().setLevel(Level.ALL);
+    ((Log4JLogger)RpcClient.LOG).getLogger().setLevel(Level.ALL);
     ((Log4JLogger)ScannerCallable.LOG).getLogger().setLevel(Level.ALL);
     Configuration conf = TEST_UTIL.getConfiguration();
     conf.setStrings(CoprocessorHost.REGION_COPROCESSOR_CONF_KEY,

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide3.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide3.java?rev=1478637&r1=1478636&r2=1478637&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide3.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide3.java Fri May  3 03:52:15 2013
@@ -36,8 +36,8 @@ import org.apache.hadoop.hbase.HColumnDe
 import org.apache.hadoop.hbase.HRegionLocation;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.LargeTests;
-import org.apache.hadoop.hbase.client.AdminProtocol;
 import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
+import org.apache.hadoop.hbase.protobuf.generated.AdminProtos;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.Pair;
 import org.junit.After;
@@ -116,7 +116,7 @@ public class TestFromClientSide3 {
     HConnection conn = HConnectionManager.getConnection(TEST_UTIL
         .getConfiguration());
     HRegionLocation loc = table.getRegionLocation(row, true);
-    AdminProtocol server = conn.getAdmin(loc.getServerName());
+    AdminProtos.AdminService.BlockingInterface server = conn.getAdmin(loc.getServerName());
     byte[] regName = loc.getRegionInfo().getRegionName();
 
     for (int i = 0; i < nFlushes; i++) {
@@ -163,7 +163,8 @@ public class TestFromClientSide3 {
     // Verify we have multiple store files.
     HRegionLocation loc = hTable.getRegionLocation(row, true);
     byte[] regionName = loc.getRegionInfo().getRegionName();
-    AdminProtocol server = connection.getAdmin(loc.getServerName());
+    AdminProtos.AdminService.BlockingInterface server =
+      connection.getAdmin(loc.getServerName());
     assertTrue(ProtobufUtil.getStoreFiles(
       server, regionName, FAMILY).size() > 1);
 

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHCM.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHCM.java?rev=1478637&r1=1478636&r2=1478637&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHCM.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHCM.java Fri May  3 03:52:15 2013
@@ -45,7 +45,6 @@ import org.apache.hadoop.hbase.MediumTes
 import org.apache.hadoop.hbase.ServerName;
 import org.apache.hadoop.hbase.Waiter;
 import org.apache.hadoop.hbase.client.HConnectionManager.HConnectionImplementation;
-import org.apache.hadoop.hbase.client.HConnectionManager.HConnectionKey;
 import org.apache.hadoop.hbase.exceptions.RegionServerStoppedException;
 import org.apache.hadoop.hbase.exceptions.ZooKeeperConnectionException;
 import org.apache.hadoop.hbase.master.ClusterStatusPublisher;
@@ -99,7 +98,7 @@ public class TestHCM {
   IllegalArgumentException, NoSuchFieldException,
   IllegalAccessException, InterruptedException, ZooKeeperConnectionException, IOException {
     HConnection last = null;
-    for (int i = 0; i <= (HConnectionManager.MAX_CACHED_HBASE_INSTANCES * 2); i++) {
+    for (int i = 0; i <= (HConnectionManager.MAX_CACHED_CONNECTION_INSTANCES * 2); i++) {
       // set random key to differentiate the connection from previous ones
       Configuration configuration = HBaseConfiguration.create();
       configuration.set("somekey", String.valueOf(_randy.nextInt()));
@@ -186,9 +185,9 @@ public class TestHCM {
     // Save off current HConnections
     Map<HConnectionKey, HConnectionImplementation> oldHBaseInstances =
         new HashMap<HConnectionKey, HConnectionImplementation>();
-    oldHBaseInstances.putAll(HConnectionManager.HBASE_INSTANCES);
+    oldHBaseInstances.putAll(HConnectionManager.CONNECTION_INSTANCES);
 
-    HConnectionManager.HBASE_INSTANCES.clear();
+    HConnectionManager.CONNECTION_INSTANCES.clear();
 
     try {
       HConnection connection = HConnectionManager.getConnection(TEST_UTIL.getConfiguration());
@@ -198,8 +197,8 @@ public class TestHCM {
         HConnectionManager.getConnection(TEST_UTIL.getConfiguration()));
     } finally {
       // Put original HConnections back
-      HConnectionManager.HBASE_INSTANCES.clear();
-      HConnectionManager.HBASE_INSTANCES.putAll(oldHBaseInstances);
+      HConnectionManager.CONNECTION_INSTANCES.clear();
+      HConnectionManager.CONNECTION_INSTANCES.putAll(oldHBaseInstances);
     }
   }
 

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultiParallel.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultiParallel.java?rev=1478637&r1=1478636&r2=1478637&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultiParallel.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultiParallel.java Fri May  3 03:52:15 2013
@@ -36,8 +36,8 @@ import org.apache.hadoop.hbase.HBaseTest
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.MediumTests;
 import org.apache.hadoop.hbase.Waiter;
-import org.apache.hadoop.hbase.ipc.HBaseClient;
-import org.apache.hadoop.hbase.ipc.HBaseServer;
+import org.apache.hadoop.hbase.ipc.RpcClient;
+import org.apache.hadoop.hbase.ipc.RpcServer;
 import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.JVMClusterUtil;
@@ -53,8 +53,8 @@ import org.junit.experimental.categories
 public class TestMultiParallel {
   private static final Log LOG = LogFactory.getLog(TestMultiParallel.class);
   {
-    ((Log4JLogger)HBaseServer.LOG).getLogger().setLevel(Level.ALL);
-    ((Log4JLogger)HBaseClient.LOG).getLogger().setLevel(Level.ALL);
+    ((Log4JLogger)RpcServer.LOG).getLogger().setLevel(Level.ALL);
+    ((Log4JLogger)RpcClient.LOG).getLogger().setLevel(Level.ALL);
   }
   private static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
   private static final byte[] VALUE = Bytes.toBytes("value");
@@ -68,8 +68,8 @@ public class TestMultiParallel {
   private static final int slaves = 2; // also used for testing HTable pool size
 
   @BeforeClass public static void beforeClass() throws Exception {
-    ((Log4JLogger)HBaseServer.LOG).getLogger().setLevel(Level.ALL);
-    ((Log4JLogger)HBaseClient.LOG).getLogger().setLevel(Level.ALL);
+    ((Log4JLogger)RpcServer.LOG).getLogger().setLevel(Level.ALL);
+    ((Log4JLogger)RpcClient.LOG).getLogger().setLevel(Level.ALL);
     ((Log4JLogger)ScannerCallable.LOG).getLogger().setLevel(Level.ALL);
     UTIL.startMiniCluster(slaves);
     HTable t = UTIL.createTable(Bytes.toBytes(TEST_TABLE), Bytes.toBytes(FAMILY));

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterWithScanLimits.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterWithScanLimits.java?rev=1478637&r1=1478636&r2=1478637&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterWithScanLimits.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterWithScanLimits.java Fri May  3 03:52:15 2013
@@ -34,8 +34,8 @@ import org.apache.hadoop.hbase.HTableDes
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.exceptions.MasterNotRunningException;
 import org.apache.hadoop.hbase.exceptions.ZooKeeperConnectionException;
-import org.apache.hadoop.hbase.ipc.HBaseClient;
-import org.apache.hadoop.hbase.ipc.HBaseServer;
+import org.apache.hadoop.hbase.ipc.RpcClient;
+import org.apache.hadoop.hbase.ipc.RpcServer;
 import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.hadoop.hbase.client.HTable;
 import org.apache.hadoop.hbase.client.Put;
@@ -177,8 +177,8 @@ public class TestFilterWithScanLimits {
 
   @BeforeClass
   public static void setUp() throws Exception {
-    ((Log4JLogger)HBaseServer.LOG).getLogger().setLevel(Level.ALL);
-    ((Log4JLogger)HBaseClient.LOG).getLogger().setLevel(Level.ALL);
+    ((Log4JLogger)RpcServer.LOG).getLogger().setLevel(Level.ALL);
+    ((Log4JLogger)RpcClient.LOG).getLogger().setLevel(Level.ALL);
     ((Log4JLogger)ScannerCallable.LOG).getLogger().setLevel(Level.ALL);
     TEST_UTIL.startMiniCluster(1);
     initialize(TEST_UTIL.getConfiguration());

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestChangingEncoding.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestChangingEncoding.java?rev=1478637&r1=1478636&r2=1478637&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestChangingEncoding.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestChangingEncoding.java Fri May  3 03:52:15 2013
@@ -40,32 +40,33 @@ import org.apache.hadoop.hbase.client.HT
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.client.Durability;
+import org.apache.hadoop.hbase.ipc.RpcClient;
+import org.apache.hadoop.hbase.ipc.RpcServer;
 import org.apache.hadoop.hbase.regionserver.HRegionServer;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.Threads;
+import org.apache.log4j.Level;
 import org.junit.After;
 import org.junit.AfterClass;
 import org.junit.Before;
 import org.junit.BeforeClass;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
+import org.apache.commons.logging.impl.Log4JLogger;
 
 /**
  * Tests changing data block encoding settings of a column family.
  */
 @Category(LargeTests.class)
 public class TestChangingEncoding {
-
   private static final Log LOG = LogFactory.getLog(TestChangingEncoding.class);
-
   static final String CF = "EncodingTestCF";
   static final byte[] CF_BYTES = Bytes.toBytes(CF);
 
   private static final int NUM_ROWS_PER_BATCH = 100;
   private static final int NUM_COLS_PER_ROW = 20;
 
-  private static final HBaseTestingUtility TEST_UTIL =
-      new HBaseTestingUtility();
+  private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
   private static final Configuration conf = TEST_UTIL.getConfiguration();
 
   private static final int TIMEOUT_MS = 240000;
@@ -100,6 +101,8 @@ public class TestChangingEncoding {
   public static void setUpBeforeClass() throws Exception {
     // Use a small flush size to create more HFiles.
     conf.setInt(HConstants.HREGION_MEMSTORE_FLUSH_SIZE, 1024 * 1024);
+    // ((Log4JLogger)RpcServerImplementation.LOG).getLogger().setLevel(Level.TRACE);
+    // ((Log4JLogger)RpcClient.LOG).getLogger().setLevel(Level.TRACE);
     TEST_UTIL.startMiniCluster();
   }
 
@@ -190,6 +193,7 @@ public class TestChangingEncoding {
     prepareTest("ChangingEncoding");
     for (boolean encodeOnDisk : new boolean[]{false, true}) {
       for (DataBlockEncoding encoding : ENCODINGS_TO_ITERATE) {
+        LOG.info("encoding=" + encoding + ", encodeOnDisk=" + encodeOnDisk);
         setEncodingConf(encoding, encodeOnDisk);
         writeSomeNewData();
         verifyAllData();

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestDelayedRpc.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestDelayedRpc.java?rev=1478637&r1=1478636&r2=1478637&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestDelayedRpc.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestDelayedRpc.java Fri May  3 03:52:15 2013
@@ -28,15 +28,18 @@ import java.net.InetSocketAddress;
 import java.util.ArrayList;
 import java.util.List;
 
+import com.google.common.collect.Lists;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HConstants;
-import org.apache.hadoop.hbase.IpcProtocol;
 import org.apache.hadoop.hbase.MediumTests;
+import org.apache.hadoop.hbase.ServerName;
+import org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos;
 import org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestArg;
 import org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestResponse;
+import org.apache.hadoop.hbase.security.User;
 import org.apache.log4j.AppenderSkeleton;
 import org.apache.log4j.Level;
 import org.apache.log4j.Logger;
@@ -44,6 +47,9 @@ import org.apache.log4j.spi.LoggingEvent
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
+import com.google.protobuf.BlockingRpcChannel;
+import com.google.protobuf.BlockingService;
+import com.google.protobuf.RpcController;
 import com.google.protobuf.ServiceException;
 
 /**
@@ -55,7 +61,7 @@ import com.google.protobuf.ServiceExcept
 public class TestDelayedRpc {
   private static final Log LOG = LogFactory.getLog(TestDelayedRpc.class);
 
-  public static RpcServer rpcServer;
+  public static RpcServerInterface rpcServer;
 
   public static final int UNDELAYED = 0;
   public static final int DELAYED = 1;
@@ -73,23 +79,25 @@ public class TestDelayedRpc {
   private void testDelayedRpc(boolean delayReturnValue) throws Exception {
     Configuration conf = HBaseConfiguration.create();
     InetSocketAddress isa = new InetSocketAddress("localhost", 0);
-    TestRpcImpl instance = new TestRpcImpl(delayReturnValue);
-    rpcServer = HBaseServerRPC.getServer(instance.getClass(), instance,
-        new Class<?>[]{ TestRpcImpl.class },
-        isa.getHostName(), isa.getPort(), 1, 0, true, conf, 0);
+    TestDelayedImplementation instance = new TestDelayedImplementation(delayReturnValue);
+    BlockingService service =
+      TestDelayedRpcProtos.TestDelayedService.newReflectiveBlockingService(instance);
+    rpcServer = new RpcServer(null, "testDelayedRpc",
+        Lists.newArrayList(new RpcServer.BlockingServiceAndInterface(service, null)),
+        isa, 1, 0, conf, 0);
     rpcServer.start();
-
-    ProtobufRpcClientEngine clientEngine =
-        new ProtobufRpcClientEngine(conf, HConstants.CLUSTER_ID_DEFAULT);
+    RpcClient rpcClient = new RpcClient(conf, HConstants.DEFAULT_CLUSTER_ID.toString());
     try {
-      TestRpc client = clientEngine.getProxy(TestRpc.class,
-          rpcServer.getListenerAddress(), conf, 1000);
-
+      BlockingRpcChannel channel = rpcClient.createBlockingRpcChannel(
+          new ServerName(rpcServer.getListenerAddress().getHostName(),
+              rpcServer.getListenerAddress().getPort(), System.currentTimeMillis()),
+          User.getCurrent(), 1000);
+      TestDelayedRpcProtos.TestDelayedService.BlockingInterface stub =
+        TestDelayedRpcProtos.TestDelayedService.newBlockingStub(channel);
       List<Integer> results = new ArrayList<Integer>();
-
-      TestThread th1 = new TestThread(client, true, results);
-      TestThread th2 = new TestThread(client, false, results);
-      TestThread th3 = new TestThread(client, false, results);
+      TestThread th1 = new TestThread(stub, true, results);
+      TestThread th2 = new TestThread(stub, false, results);
+      TestThread th3 = new TestThread(stub, false, results);
       th1.start();
       Thread.sleep(100);
       th2.start();
@@ -104,7 +112,7 @@ public class TestDelayedRpc {
       assertEquals(UNDELAYED, results.get(1).intValue());
       assertEquals(results.get(2).intValue(), delayReturnValue ? DELAYED :  0xDEADBEEF);
     } finally {
-      clientEngine.close();
+      rpcClient.stop();
     }
   }
 
@@ -130,34 +138,41 @@ public class TestDelayedRpc {
     }
   }
 
+  /**
+   * Tests that we see a WARN message in the logs.
+   * @throws Exception
+   */
   @Test
   public void testTooManyDelayedRpcs() throws Exception {
     Configuration conf = HBaseConfiguration.create();
     final int MAX_DELAYED_RPC = 10;
     conf.setInt("hbase.ipc.warn.delayedrpc.number", MAX_DELAYED_RPC);
-
+    // Set up an appender to catch the "Too many delayed calls" that we expect.
     ListAppender listAppender = new ListAppender();
-    Logger log = Logger.getLogger("org.apache.hadoop.ipc.HBaseServer");
+    Logger log = Logger.getLogger("org.apache.hadoop.ipc.RpcServer");
     log.addAppender(listAppender);
     log.setLevel(Level.WARN);
 
+
     InetSocketAddress isa = new InetSocketAddress("localhost", 0);
-    TestRpcImpl instance = new TestRpcImpl(true);
-    rpcServer = HBaseServerRPC.getServer(instance.getClass(), instance,
-        new Class<?>[]{ TestRpcImpl.class },
-        isa.getHostName(), isa.getPort(), 1, 0, true, conf, 0);
+    TestDelayedImplementation instance = new TestDelayedImplementation(true);
+    BlockingService service =
+      TestDelayedRpcProtos.TestDelayedService.newReflectiveBlockingService(instance);
+    rpcServer = new RpcServer(null, "testTooManyDelayedRpcs",
+      Lists.newArrayList(new RpcServer.BlockingServiceAndInterface(service, null)),
+        isa, 1, 0, conf, 0);
     rpcServer.start();
-
-    ProtobufRpcClientEngine clientEngine =
-        new ProtobufRpcClientEngine(conf, HConstants.CLUSTER_ID_DEFAULT);
+    RpcClient rpcClient = new RpcClient(conf, HConstants.DEFAULT_CLUSTER_ID.toString());
     try {
-      TestRpc client = clientEngine.getProxy(TestRpc.class,
-          rpcServer.getListenerAddress(), conf, 1000);
-
+      BlockingRpcChannel channel = rpcClient.createBlockingRpcChannel(
+          new ServerName(rpcServer.getListenerAddress().getHostName(),
+              rpcServer.getListenerAddress().getPort(), System.currentTimeMillis()),
+          User.getCurrent(), 1000);
+      TestDelayedRpcProtos.TestDelayedService.BlockingInterface stub =
+        TestDelayedRpcProtos.TestDelayedService.newBlockingStub(channel);
       Thread threads[] = new Thread[MAX_DELAYED_RPC + 1];
-
       for (int i = 0; i < MAX_DELAYED_RPC; i++) {
-        threads[i] = new TestThread(client, true, null);
+        threads[i] = new TestThread(stub, true, null);
         threads[i].start();
       }
 
@@ -165,7 +180,7 @@ public class TestDelayedRpc {
       assertTrue(listAppender.getMessages().isEmpty());
 
       /* This should give a warning. */
-      threads[MAX_DELAYED_RPC] = new TestThread(client, true, null);
+      threads[MAX_DELAYED_RPC] = new TestThread(stub, true, null);
       threads[MAX_DELAYED_RPC].start();
 
       for (int i = 0; i < MAX_DELAYED_RPC; i++) {
@@ -173,20 +188,16 @@ public class TestDelayedRpc {
       }
 
       assertFalse(listAppender.getMessages().isEmpty());
-      assertTrue(listAppender.getMessages().get(0).startsWith(
-          "Too many delayed calls"));
+      assertTrue(listAppender.getMessages().get(0).startsWith("Too many delayed calls"));
 
       log.removeAppender(listAppender);
     } finally {
-      clientEngine.close();
+      rpcClient.stop();
     }
   }
 
-  public interface TestRpc extends IpcProtocol {
-    TestResponse test(final Object rpcController, TestArg delay) throws ServiceException;
-  }
-
-  private static class TestRpcImpl implements TestRpc {
+  static class TestDelayedImplementation
+  implements TestDelayedRpcProtos.TestDelayedService.BlockingInterface {
     /**
      * Should the return value of delayed call be set at the end of the delay
      * or at call return.
@@ -197,12 +208,12 @@ public class TestDelayedRpc {
      * @param delayReturnValue Should the response to the delayed call be set
      * at the start or the end of the delay.
      */
-    public TestRpcImpl(boolean delayReturnValue) {
+    public TestDelayedImplementation(boolean delayReturnValue) {
       this.delayReturnValue = delayReturnValue;
     }
 
     @Override
-    public TestResponse test(final Object rpcController, final TestArg testArg)
+    public TestResponse test(final RpcController rpcController, final TestArg testArg)
     throws ServiceException {
       boolean delay = testArg.getDelay();
       TestResponse.Builder responseBuilder = TestResponse.newBuilder();
@@ -210,7 +221,7 @@ public class TestDelayedRpc {
         responseBuilder.setResponse(UNDELAYED);
         return responseBuilder.build();
       }
-      final Delayable call = HBaseServer.getCurrentCall();
+      final Delayable call = RpcServer.getCurrentCall();
       call.startDelay(delayReturnValue);
       new Thread() {
         public void run() {
@@ -232,28 +243,30 @@ public class TestDelayedRpc {
   }
 
   private static class TestThread extends Thread {
-    private TestRpc server;
+    private TestDelayedRpcProtos.TestDelayedService.BlockingInterface stub;
     private boolean delay;
     private List<Integer> results;
 
-    public TestThread(TestRpc server, boolean delay, List<Integer> results) {
-      this.server = server;
+    public TestThread(TestDelayedRpcProtos.TestDelayedService.BlockingInterface stub,
+        boolean delay, List<Integer> results) {
+      this.stub = stub;
       this.delay = delay;
       this.results = results;
     }
 
     @Override
     public void run() {
+      Integer result;
       try {
-        Integer result = new Integer(server.test(null, TestArg.newBuilder().setDelay(delay).
+        result = new Integer(stub.test(null, TestArg.newBuilder().setDelay(delay).
           build()).getResponse());
-        if (results != null) {
-          synchronized (results) {
-            results.add(result);
-          }
+      } catch (ServiceException e) {
+        throw new RuntimeException(e);
+      }
+      if (results != null) {
+        synchronized (results) {
+          results.add(result);
         }
-      } catch (Exception e) {
-         fail("Unexpected exception: "+e.getMessage());
       }
     }
   }
@@ -262,22 +275,26 @@ public class TestDelayedRpc {
   public void testEndDelayThrowing() throws IOException {
     Configuration conf = HBaseConfiguration.create();
     InetSocketAddress isa = new InetSocketAddress("localhost", 0);
-    FaultyTestRpc instance = new FaultyTestRpc();
-    rpcServer = HBaseServerRPC.getServer(instance.getClass(), instance,
-        new Class<?>[]{ TestRpcImpl.class },
-        isa.getHostName(), isa.getPort(), 1, 0, true, conf, 0);
+    FaultyTestDelayedImplementation instance = new FaultyTestDelayedImplementation();
+    BlockingService service =
+      TestDelayedRpcProtos.TestDelayedService.newReflectiveBlockingService(instance);
+    rpcServer = new RpcServer(null, "testEndDelayThrowing",
+        Lists.newArrayList(new RpcServer.BlockingServiceAndInterface(service, null)),
+        isa, 1, 0, conf, 0);
     rpcServer.start();
-
-    ProtobufRpcClientEngine clientEngine =
-        new ProtobufRpcClientEngine(conf, HConstants.CLUSTER_ID_DEFAULT);
+    RpcClient rpcClient = new RpcClient(conf, HConstants.DEFAULT_CLUSTER_ID.toString());
     try {
-      TestRpc client = clientEngine.getProxy(TestRpc.class,
-          rpcServer.getListenerAddress(), conf, 1000);
+      BlockingRpcChannel channel = rpcClient.createBlockingRpcChannel(
+        new ServerName(rpcServer.getListenerAddress().getHostName(),
+          rpcServer.getListenerAddress().getPort(), System.currentTimeMillis()),
+        User.getCurrent(), 1000);
+      TestDelayedRpcProtos.TestDelayedService.BlockingInterface stub =
+        TestDelayedRpcProtos.TestDelayedService.newBlockingStub(channel);
 
       int result = 0xDEADBEEF;
 
       try {
-        result = client.test(null, TestArg.newBuilder().setDelay(false).build()).getResponse();
+        result = stub.test(null, TestArg.newBuilder().setDelay(false).build()).getResponse();
       } catch (Exception e) {
         fail("No exception should have been thrown.");
       }
@@ -285,30 +302,36 @@ public class TestDelayedRpc {
 
       boolean caughtException = false;
       try {
-        result = client.test(null, TestArg.newBuilder().setDelay(true).build()).getResponse();
+        result = stub.test(null, TestArg.newBuilder().setDelay(true).build()).getResponse();
       } catch(Exception e) {
         // Exception thrown by server is enclosed in a RemoteException.
-        if (e.getCause().getMessage().contains(
-            "java.lang.Exception: Something went wrong"))
+        if (e.getCause().getMessage().contains("java.lang.Exception: Something went wrong")) {
           caughtException = true;
-        LOG.warn(e);
+        }
+        LOG.warn("Caught exception, expected=" + caughtException);
       }
       assertTrue(caughtException);
     } finally {
-      clientEngine.close();
+      rpcClient.stop();
     }
   }
 
   /**
    * Delayed calls to this class throw an exception.
    */
-  private static class FaultyTestRpc implements TestRpc {
+  private static class FaultyTestDelayedImplementation extends TestDelayedImplementation {
+    public FaultyTestDelayedImplementation() {
+      super(false);
+    }
+
     @Override
-    public TestResponse test(Object rpcController, TestArg arg) {
-      if (!arg.getDelay())
-        return TestResponse.newBuilder().setResponse(UNDELAYED).build();
-      Delayable call = HBaseServer.getCurrentCall();
+    public TestResponse test(RpcController rpcController, TestArg arg)
+    throws ServiceException {
+      LOG.info("In faulty test, delay=" + arg.getDelay());
+      if (!arg.getDelay()) return TestResponse.newBuilder().setResponse(UNDELAYED).build();
+      Delayable call = RpcServer.getCurrentCall();
       call.startDelay(true);
+      LOG.info("In faulty test, delaying");
       try {
         call.endDelayThrowing(new Exception("Something went wrong"));
       } catch (IOException e) {

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestHBaseClient.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestHBaseClient.java?rev=1478637&r1=1478636&r2=1478637&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestHBaseClient.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestHBaseClient.java Fri May  3 03:52:15 2013
@@ -37,7 +37,7 @@ public class TestHBaseClient {
   public void testFailedServer(){
     ManualEnvironmentEdge ee = new ManualEnvironmentEdge();
     EnvironmentEdgeManager.injectEdge(  ee );
-    HBaseClient.FailedServers fs = new HBaseClient.FailedServers(new Configuration());
+    RpcClient.FailedServers fs = new RpcClient.FailedServers(new Configuration());
 
     InetSocketAddress ia = InetSocketAddress.createUnresolved("bad", 12);
     InetSocketAddress ia2 = InetSocketAddress.createUnresolved("bad", 12);  // same server as ia
@@ -55,7 +55,7 @@ public class TestHBaseClient {
     Assert.assertTrue( fs.isFailedServer(ia) );
     Assert.assertTrue( fs.isFailedServer(ia2) );
 
-    ee.incValue( HBaseClient.FAILED_SERVER_EXPIRY_DEFAULT + 1 );
+    ee.incValue( RpcClient.FAILED_SERVER_EXPIRY_DEFAULT + 1 );
     Assert.assertFalse( fs.isFailedServer(ia) );
     Assert.assertFalse( fs.isFailedServer(ia2) );
 
@@ -68,7 +68,7 @@ public class TestHBaseClient {
     Assert.assertTrue( fs.isFailedServer(ia3) );
     Assert.assertTrue( fs.isFailedServer(ia4) );
 
-    ee.incValue( HBaseClient.FAILED_SERVER_EXPIRY_DEFAULT + 1 );
+    ee.incValue( RpcClient.FAILED_SERVER_EXPIRY_DEFAULT + 1 );
     Assert.assertFalse( fs.isFailedServer(ia) );
     Assert.assertFalse( fs.isFailedServer(ia2) );
     Assert.assertFalse( fs.isFailedServer(ia3) );

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestIPC.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestIPC.java?rev=1478637&r1=1478636&r2=1478637&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestIPC.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestIPC.java Fri May  3 03:52:15 2013
@@ -18,6 +18,7 @@
   */
 package org.apache.hadoop.hbase.ipc;
 
+import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.fail;
 import static org.mockito.Matchers.anyInt;
@@ -25,7 +26,6 @@ import static org.mockito.Mockito.doThro
 import static org.mockito.Mockito.spy;
 
 import java.io.IOException;
-import java.lang.reflect.Method;
 import java.net.InetSocketAddress;
 import java.net.Socket;
 import java.util.ArrayList;
@@ -33,6 +33,7 @@ import java.util.List;
 
 import javax.net.SocketFactory;
 
+import com.google.common.collect.Lists;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
@@ -42,12 +43,16 @@ import org.apache.hadoop.hbase.CellScann
 import org.apache.hadoop.hbase.CellUtil;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HConstants;
-import org.apache.hadoop.hbase.IpcProtocol;
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.KeyValueUtil;
 import org.apache.hadoop.hbase.SmallTests;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.RowMutations;
+import org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoRequestProto;
+import org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoResponseProto;
+import org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProto;
+import org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto;
+import org.apache.hadoop.hbase.ipc.protobuf.generated.TestRpcServiceProtos;
 import org.apache.hadoop.hbase.monitoring.MonitoredRPCHandler;
 import org.apache.hadoop.hbase.protobuf.RequestConverter;
 import org.apache.hadoop.hbase.security.User;
@@ -62,64 +67,115 @@ import org.mockito.Mockito;
 import org.mockito.invocation.InvocationOnMock;
 import org.mockito.stubbing.Answer;
 
+import com.google.protobuf.BlockingService;
+import com.google.protobuf.Descriptors.MethodDescriptor;
 import com.google.protobuf.Message;
+import com.google.protobuf.RpcController;
+import com.google.protobuf.ServiceException;
 
+/**
+ * Some basic ipc tests.
+ */
 @Category(SmallTests.class)
 public class TestIPC {
   public static final Log LOG = LogFactory.getLog(TestIPC.class);
   static byte [] CELL_BYTES =  Bytes.toBytes("xyz");
   static Cell CELL = new KeyValue(CELL_BYTES, CELL_BYTES, CELL_BYTES, CELL_BYTES);
+  // We are using the test TestRpcServiceProtos generated classes and Service because they are
+  // available and basic with methods like 'echo', and ping.  Below we make a blocking service
+  // by passing in implementation of blocking interface.  We use this service in all tests that
+  // follow.
+  private static final BlockingService SERVICE =
+   TestRpcServiceProtos.TestProtobufRpcProto.newReflectiveBlockingService(
+     new TestRpcServiceProtos.TestProtobufRpcProto.BlockingInterface() {
 
-  private static class TestRpcServer extends HBaseServer {
-    TestRpcServer() throws IOException {
-      super("0.0.0.0", 0, 1, 1, HBaseConfiguration.create(), "TestRpcServer", 0);
+    @Override
+    public EmptyResponseProto ping(RpcController controller,
+        EmptyRequestProto request) throws ServiceException {
+      // TODO Auto-generated method stub
+      return null;
+    }
+
+    @Override
+    public EmptyResponseProto error(RpcController controller,
+        EmptyRequestProto request) throws ServiceException {
+      // TODO Auto-generated method stub
+      return null;
     }
 
     @Override
-    public Pair<Message, CellScanner> call(Class<? extends IpcProtocol> protocol, Method method,
-        Message param, final CellScanner cells, long receiveTime, MonitoredRPCHandler status)
-    throws IOException {
-      /*
-      List<Cell> cellsOut = new ArrayList<Cell>();
-      while (cells.advance()) {
-        Cell cell = cells.current();
-        Bytes.equals(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength(),
-          CELL_BYTES, 0, CELL_BYTES.length);
-        cellsOut.add(cell);
+    public EchoResponseProto echo(RpcController controller, EchoRequestProto request)
+    throws ServiceException {
+      if (controller instanceof PayloadCarryingRpcController) {
+        PayloadCarryingRpcController pcrc = (PayloadCarryingRpcController)controller;
+        // If cells, scan them to check we are able to iterate what we were given and since this is
+        // an echo, just put them back on the controller creating a new block.  Tests our block
+        // building.
+        CellScanner cellScanner = pcrc.cellScanner();
+        List<Cell> list = new ArrayList<Cell>();
+        while(cellScanner.advance()) {
+          list.add(cellScanner.current());
+        }
+        cellScanner = CellUtil.createCellScanner(list);
+        ((PayloadCarryingRpcController)controller).setCellScanner(cellScanner);
       }
-      return new Pair<Message, CellScanner>(param, CellUtil.createCellScanner(cellsOut));
-      */
-      return new Pair<Message, CellScanner>(param, null);
+      return EchoResponseProto.newBuilder().setMessage(request.getMessage()).build();
     }
-  }
+  });
 
   /**
-   * A nothing protocol used in test below.
+   * Instance of server.  We actually don't do anything speical in here so could just use
+   * HBaseRpcServer directly.
    */
-  interface NothingProtocol extends IpcProtocol {
-    void doNothing();
-  }
+  private static class TestRpcServer extends RpcServer {
+    TestRpcServer() throws IOException {
+      super(null, "testRpcServer",
+          Lists.newArrayList(new BlockingServiceAndInterface(SERVICE, null)),
+        new InetSocketAddress("0.0.0.0", 0), 1, 1,
+        HBaseConfiguration.create(), 0);
+    }
 
-  public static class DoNothing implements NothingProtocol {
-    public void doNothing() {}
+    @Override
+    public Pair<Message, CellScanner> call(BlockingService service,
+        MethodDescriptor md, Message param, CellScanner cellScanner,
+        long receiveTime, MonitoredRPCHandler status) throws IOException {
+      return super.call(service, md, param, cellScanner, receiveTime, status);
+    }
   }
 
+  /**
+   * It is hard to verify the compression is actually happening under the wraps.  Hope that if
+   * unsupported, we'll get an exception out of some time (meantime, have to trace it manually
+   * to confirm that compression is happening down in the client and server).
+   * @throws IOException
+   * @throws InterruptedException
+   * @throws SecurityException
+   * @throws NoSuchMethodException
+   */
   @Test
   public void testCompressCellBlock()
   throws IOException, InterruptedException, SecurityException, NoSuchMethodException {
+    // Currently, you set 
     Configuration conf = HBaseConfiguration.create();
     conf.set("hbase.client.rpc.compressor", GzipCodec.class.getCanonicalName());
     TestRpcServer rpcServer = new TestRpcServer();
-    HBaseClient client = new HBaseClient(conf, HConstants.CLUSTER_ID_DEFAULT);
+    RpcClient client = new RpcClient(conf, HConstants.CLUSTER_ID_DEFAULT);
     List<Cell> cells = new ArrayList<Cell>();
-    cells.add(CELL);
+    int count = 3;
+    for (int i = 0; i < count; i++) cells.add(CELL);
     try {
       rpcServer.start();
       InetSocketAddress address = rpcServer.getListenerAddress();
-      // Get any method name... just so it is not null
-      Method m = NothingProtocol.class.getMethod("doNothing");
-      client.call(m, null, CellUtil.createCellScanner(cells), address, NothingProtocol.class,
-        User.getCurrent(), 0);
+      MethodDescriptor md = SERVICE.getDescriptorForType().findMethodByName("echo");
+      EchoRequestProto param = EchoRequestProto.newBuilder().setMessage("hello").build();
+      Pair<Message, CellScanner> r = client.call(md, param, CellUtil.createCellScanner(cells),
+        md.getOutputType().toProto(), User.getCurrent(), address, 0);
+      int index = 0;
+      while (r.getSecond().advance()) {
+        assertTrue(CELL.equals(r.getSecond().current()));
+        index++;
+      }
+      assertEquals(count, index);
     } finally {
       client.stop();
       rpcServer.stop();
@@ -140,11 +196,13 @@ public class TestIPC {
     }).when(spyFactory).createSocket();
 
     TestRpcServer rpcServer = new TestRpcServer();
-    HBaseClient client = new HBaseClient(conf, HConstants.CLUSTER_ID_DEFAULT, spyFactory);
+    RpcClient client = new RpcClient(conf, HConstants.CLUSTER_ID_DEFAULT, spyFactory);
     try {
       rpcServer.start();
       InetSocketAddress address = rpcServer.getListenerAddress();
-      client.call(null, null, null, address, null, User.getCurrent(), 0);
+      MethodDescriptor md = SERVICE.getDescriptorForType().findMethodByName("echo");
+      EchoRequestProto param = EchoRequestProto.newBuilder().setMessage("hello").build();
+      client.call(md, param, null, null, User.getCurrent(), address, 0);
       fail("Expected an exception to have been thrown!");
     } catch (Exception e) {
       LOG.info("Caught expected exception: " + e.toString());
@@ -167,7 +225,7 @@ public class TestIPC {
     int cellcount = Integer.parseInt(args[1]);
     Configuration conf = HBaseConfiguration.create();
     TestRpcServer rpcServer = new TestRpcServer();
-    HBaseClient client = new HBaseClient(conf, HConstants.CLUSTER_ID_DEFAULT);
+    RpcClient client = new RpcClient(conf, HConstants.CLUSTER_ID_DEFAULT);
     KeyValue kv = KeyValueUtil.ensureKeyValue(CELL);
     Put p = new Put(kv.getRow());
     for (int i = 0; i < cellcount; i++) {
@@ -178,8 +236,6 @@ public class TestIPC {
     try {
       rpcServer.start();
       InetSocketAddress address = rpcServer.getListenerAddress();
-      // Get any method name... just so it is not null
-      Method m = NothingProtocol.class.getMethod("doNothing");
       long startTime = System.currentTimeMillis();
       User user = User.getCurrent();
       for (int i = 0; i < cycles; i++) {
@@ -194,7 +250,7 @@ public class TestIPC {
           //  "Thread dump " + Thread.currentThread().getName());
         }
         Pair<Message, CellScanner> response =
-          client.call(m, param, cellScanner, address, NothingProtocol.class, user, 0);
+          client.call(null, param, cellScanner, null, user, address, 0);
         /*
         int count = 0;
         while (p.getSecond().advance()) {
@@ -209,4 +265,4 @@ public class TestIPC {
       rpcServer.stop();
     }
   }
-}
+}
\ No newline at end of file

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestProtoBufRpc.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestProtoBufRpc.java?rev=1478637&r1=1478636&r2=1478637&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestProtoBufRpc.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestProtoBufRpc.java Fri May  3 03:52:15 2013
@@ -20,15 +20,19 @@ package org.apache.hadoop.hbase.ipc;
 import java.io.IOException;
 import java.net.InetSocketAddress;
 
+import com.google.common.collect.Lists;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HConstants;
-import org.apache.hadoop.hbase.IpcProtocol;
 import org.apache.hadoop.hbase.MediumTests;
+import org.apache.hadoop.hbase.ServerName;
+import org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos;
+import org.apache.hadoop.hbase.ipc.protobuf.generated.TestRpcServiceProtos;
 import org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoRequestProto;
 import org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoResponseProto;
 import org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProto;
 import org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto;
-import org.apache.hadoop.hbase.ipc.protobuf.generated.TestRpcServiceProtos.TestProtobufRpcProto;
+import org.apache.hadoop.hbase.security.User;
 import org.apache.log4j.Level;
 import org.apache.log4j.Logger;
 import org.junit.Assert;
@@ -37,30 +41,29 @@ import org.junit.Before;
 import org.junit.After;
 import org.junit.experimental.categories.Category;
 
+import com.google.protobuf.BlockingRpcChannel;
+import com.google.protobuf.BlockingService;
 import com.google.protobuf.RpcController;
 import com.google.protobuf.ServiceException;
 
 /**
  * Test for testing protocol buffer based RPC mechanism.
- * This test depends on test.proto definition of types in
- * hbase-server/src/test/protobuf/test.proto
- * and protobuf service definition from
- * hbase-server/src/test/protobuf/test_rpc_service.proto
+ * This test depends on test.proto definition of types in <code>src/test/protobuf/test.proto</code>
+ * and protobuf service definition from <code>src/test/protobuf/test_rpc_service.proto</code>
  */
 @Category(MediumTests.class)
 public class TestProtoBufRpc {
   public final static String ADDRESS = "0.0.0.0";
-  public final static int PORT = 0;
-  private static InetSocketAddress addr;
-  private static Configuration conf;
-  private static RpcServer server;
-
-  public interface TestRpcService extends TestProtobufRpcProto.BlockingInterface, IpcProtocol {
-    public long VERSION = 1;
-  }
-
-  public static class PBServerImpl implements TestRpcService {
-
+  public static int PORT = 0;
+  private InetSocketAddress isa;
+  private Configuration conf;
+  private RpcServerInterface server;
+
+  /**
+   * Implementation of the test service defined out in TestRpcServiceProtos
+   */
+  static class PBServerImpl
+  implements TestRpcServiceProtos.TestProtobufRpcProto.BlockingInterface {
     @Override
     public EmptyResponseProto ping(RpcController unused,
         EmptyRequestProto request) throws ServiceException {
@@ -83,19 +86,21 @@ public class TestProtoBufRpc {
 
   @Before
   public  void setUp() throws IOException { // Setup server for both protocols
-    conf = new Configuration();
+    this.conf = HBaseConfiguration.create();
     Logger log = Logger.getLogger("org.apache.hadoop.ipc.HBaseServer");
     log.setLevel(Level.DEBUG);
     log = Logger.getLogger("org.apache.hadoop.ipc.HBaseServer.trace");
     log.setLevel(Level.TRACE);
     // Create server side implementation
     PBServerImpl serverImpl = new PBServerImpl();
+    BlockingService service =
+      TestRpcServiceProtos.TestProtobufRpcProto.newReflectiveBlockingService(serverImpl);
     // Get RPC server for server side implementation
-    server = HBaseServerRPC.getServer(TestRpcService.class,serverImpl,
-        new Class[]{TestRpcService.class},
-        ADDRESS, PORT, 10, 10, true, conf, 0);
-    addr = server.getListenerAddress();
-    server.start();
+    this.server = new RpcServer(null, "testrpc",
+        Lists.newArrayList(new RpcServer.BlockingServiceAndInterface(service, null)),
+        new InetSocketAddress(ADDRESS, PORT), 10, 10, conf, 0);
+    this.isa = server.getListenerAddress();
+    this.server.start();
   }
 
   @After
@@ -105,27 +110,31 @@ public class TestProtoBufRpc {
 
   @Test
   public void testProtoBufRpc() throws Exception {
-    ProtobufRpcClientEngine clientEngine =
-        new ProtobufRpcClientEngine(conf, HConstants.CLUSTER_ID_DEFAULT);
+    RpcClient rpcClient = new RpcClient(conf, HConstants.CLUSTER_ID_DEFAULT);
     try {
-      TestRpcService client = clientEngine.getProxy(TestRpcService.class, addr, conf, 100000);
+      BlockingRpcChannel channel = rpcClient.createBlockingRpcChannel(
+        new ServerName(this.isa.getHostName(), this.isa.getPort(), System.currentTimeMillis()),
+        User.getCurrent(), 0);
+      TestRpcServiceProtos.TestProtobufRpcProto.BlockingInterface stub =
+        TestRpcServiceProtos.TestProtobufRpcProto.newBlockingStub(channel);
       // Test ping method
-      EmptyRequestProto emptyRequest = EmptyRequestProto.newBuilder().build();
-      client.ping(null, emptyRequest);
+      TestProtos.EmptyRequestProto emptyRequest =
+        TestProtos.EmptyRequestProto.newBuilder().build();
+      stub.ping(null, emptyRequest);
 
       // Test echo method
       EchoRequestProto echoRequest = EchoRequestProto.newBuilder().setMessage("hello").build();
-      EchoResponseProto echoResponse = client.echo(null, echoRequest);
+      EchoResponseProto echoResponse = stub.echo(null, echoRequest);
       Assert.assertEquals(echoResponse.getMessage(), "hello");
 
       // Test error method - error should be thrown as RemoteException
       try {
-        client.error(null, emptyRequest);
+        stub.error(null, emptyRequest);
         Assert.fail("Expected exception is not thrown");
       } catch (ServiceException e) {
       }
     } finally {
-      clientEngine.close();
+      rpcClient.stop();
     }
   }
-}
+}
\ No newline at end of file

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/protobuf/generated/TestDelayedRpcProtos.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/protobuf/generated/TestDelayedRpcProtos.java?rev=1478637&r1=1478636&r2=1478637&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/protobuf/generated/TestDelayedRpcProtos.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/protobuf/generated/TestDelayedRpcProtos.java Fri May  3 03:52:15 2013
@@ -766,6 +766,227 @@ public final class TestDelayedRpcProtos 
     // @@protoc_insertion_point(class_scope:TestResponse)
   }
   
+  public static abstract class TestDelayedService
+      implements com.google.protobuf.Service {
+    protected TestDelayedService() {}
+    
+    public interface Interface {
+      public abstract void test(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestArg request,
+          com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestResponse> done);
+      
+    }
+    
+    public static com.google.protobuf.Service newReflectiveService(
+        final Interface impl) {
+      return new TestDelayedService() {
+        @java.lang.Override
+        public  void test(
+            com.google.protobuf.RpcController controller,
+            org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestArg request,
+            com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestResponse> done) {
+          impl.test(controller, request, done);
+        }
+        
+      };
+    }
+    
+    public static com.google.protobuf.BlockingService
+        newReflectiveBlockingService(final BlockingInterface impl) {
+      return new com.google.protobuf.BlockingService() {
+        public final com.google.protobuf.Descriptors.ServiceDescriptor
+            getDescriptorForType() {
+          return getDescriptor();
+        }
+        
+        public final com.google.protobuf.Message callBlockingMethod(
+            com.google.protobuf.Descriptors.MethodDescriptor method,
+            com.google.protobuf.RpcController controller,
+            com.google.protobuf.Message request)
+            throws com.google.protobuf.ServiceException {
+          if (method.getService() != getDescriptor()) {
+            throw new java.lang.IllegalArgumentException(
+              "Service.callBlockingMethod() given method descriptor for " +
+              "wrong service type.");
+          }
+          switch(method.getIndex()) {
+            case 0:
+              return impl.test(controller, (org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestArg)request);
+            default:
+              throw new java.lang.AssertionError("Can't get here.");
+          }
+        }
+        
+        public final com.google.protobuf.Message
+            getRequestPrototype(
+            com.google.protobuf.Descriptors.MethodDescriptor method) {
+          if (method.getService() != getDescriptor()) {
+            throw new java.lang.IllegalArgumentException(
+              "Service.getRequestPrototype() given method " +
+              "descriptor for wrong service type.");
+          }
+          switch(method.getIndex()) {
+            case 0:
+              return org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestArg.getDefaultInstance();
+            default:
+              throw new java.lang.AssertionError("Can't get here.");
+          }
+        }
+        
+        public final com.google.protobuf.Message
+            getResponsePrototype(
+            com.google.protobuf.Descriptors.MethodDescriptor method) {
+          if (method.getService() != getDescriptor()) {
+            throw new java.lang.IllegalArgumentException(
+              "Service.getResponsePrototype() given method " +
+              "descriptor for wrong service type.");
+          }
+          switch(method.getIndex()) {
+            case 0:
+              return org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestResponse.getDefaultInstance();
+            default:
+              throw new java.lang.AssertionError("Can't get here.");
+          }
+        }
+        
+      };
+    }
+    
+    public abstract void test(
+        com.google.protobuf.RpcController controller,
+        org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestArg request,
+        com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestResponse> done);
+    
+    public static final
+        com.google.protobuf.Descriptors.ServiceDescriptor
+        getDescriptor() {
+      return org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.getDescriptor().getServices().get(0);
+    }
+    public final com.google.protobuf.Descriptors.ServiceDescriptor
+        getDescriptorForType() {
+      return getDescriptor();
+    }
+    
+    public final void callMethod(
+        com.google.protobuf.Descriptors.MethodDescriptor method,
+        com.google.protobuf.RpcController controller,
+        com.google.protobuf.Message request,
+        com.google.protobuf.RpcCallback<
+          com.google.protobuf.Message> done) {
+      if (method.getService() != getDescriptor()) {
+        throw new java.lang.IllegalArgumentException(
+          "Service.callMethod() given method descriptor for wrong " +
+          "service type.");
+      }
+      switch(method.getIndex()) {
+        case 0:
+          this.test(controller, (org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestArg)request,
+            com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestResponse>specializeCallback(
+              done));
+          return;
+        default:
+          throw new java.lang.AssertionError("Can't get here.");
+      }
+    }
+    
+    public final com.google.protobuf.Message
+        getRequestPrototype(
+        com.google.protobuf.Descriptors.MethodDescriptor method) {
+      if (method.getService() != getDescriptor()) {
+        throw new java.lang.IllegalArgumentException(
+          "Service.getRequestPrototype() given method " +
+          "descriptor for wrong service type.");
+      }
+      switch(method.getIndex()) {
+        case 0:
+          return org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestArg.getDefaultInstance();
+        default:
+          throw new java.lang.AssertionError("Can't get here.");
+      }
+    }
+    
+    public final com.google.protobuf.Message
+        getResponsePrototype(
+        com.google.protobuf.Descriptors.MethodDescriptor method) {
+      if (method.getService() != getDescriptor()) {
+        throw new java.lang.IllegalArgumentException(
+          "Service.getResponsePrototype() given method " +
+          "descriptor for wrong service type.");
+      }
+      switch(method.getIndex()) {
+        case 0:
+          return org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestResponse.getDefaultInstance();
+        default:
+          throw new java.lang.AssertionError("Can't get here.");
+      }
+    }
+    
+    public static Stub newStub(
+        com.google.protobuf.RpcChannel channel) {
+      return new Stub(channel);
+    }
+    
+    public static final class Stub extends org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestDelayedService implements Interface {
+      private Stub(com.google.protobuf.RpcChannel channel) {
+        this.channel = channel;
+      }
+      
+      private final com.google.protobuf.RpcChannel channel;
+      
+      public com.google.protobuf.RpcChannel getChannel() {
+        return channel;
+      }
+      
+      public  void test(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestArg request,
+          com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestResponse> done) {
+        channel.callMethod(
+          getDescriptor().getMethods().get(0),
+          controller,
+          request,
+          org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestResponse.getDefaultInstance(),
+          com.google.protobuf.RpcUtil.generalizeCallback(
+            done,
+            org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestResponse.class,
+            org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestResponse.getDefaultInstance()));
+      }
+    }
+    
+    public static BlockingInterface newBlockingStub(
+        com.google.protobuf.BlockingRpcChannel channel) {
+      return new BlockingStub(channel);
+    }
+    
+    public interface BlockingInterface {
+      public org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestResponse test(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestArg request)
+          throws com.google.protobuf.ServiceException;
+    }
+    
+    private static final class BlockingStub implements BlockingInterface {
+      private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) {
+        this.channel = channel;
+      }
+      
+      private final com.google.protobuf.BlockingRpcChannel channel;
+      
+      public org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestResponse test(
+          com.google.protobuf.RpcController controller,
+          org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestArg request)
+          throws com.google.protobuf.ServiceException {
+        return (org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestResponse) channel.callBlockingMethod(
+          getDescriptor().getMethods().get(0),
+          controller,
+          request,
+          org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestResponse.getDefaultInstance());
+      }
+      
+    }
+  }
+  
   private static com.google.protobuf.Descriptors.Descriptor
     internal_static_TestArg_descriptor;
   private static
@@ -787,9 +1008,10 @@ public final class TestDelayedRpcProtos 
     java.lang.String[] descriptorData = {
       "\n\026test_delayed_rpc.proto\"\030\n\007TestArg\022\r\n\005d" +
       "elay\030\001 \002(\010\" \n\014TestResponse\022\020\n\010response\030\001" +
-      " \002(\005BL\n.org.apache.hadoop.hbase.ipc.prot" +
-      "obuf.generatedB\024TestDelayedRpcProtos\210\001\001\240" +
-      "\001\001"
+      " \002(\00525\n\022TestDelayedService\022\037\n\004test\022\010.Tes" +
+      "tArg\032\r.TestResponseBL\n.org.apache.hadoop" +
+      ".hbase.ipc.protobuf.generatedB\024TestDelay" +
+      "edRpcProtos\210\001\001\240\001\001"
     };
     com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
       new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFilesSplitRecovery.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFilesSplitRecovery.java?rev=1478637&r1=1478636&r2=1478637&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFilesSplitRecovery.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFilesSplitRecovery.java Fri May  3 03:52:15 2013
@@ -37,20 +37,19 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.HColumnDescriptor;
-import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.HRegionLocation;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.LargeTests;
 import org.apache.hadoop.hbase.ServerName;
-import org.apache.hadoop.hbase.exceptions.TableExistsException;
-import org.apache.hadoop.hbase.client.ClientProtocol;
 import org.apache.hadoop.hbase.client.HConnection;
 import org.apache.hadoop.hbase.client.HTable;
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.client.ResultScanner;
 import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.exceptions.TableExistsException;
 import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
+import org.apache.hadoop.hbase.protobuf.generated.ClientProtos;
 import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest;
 import org.apache.hadoop.hbase.regionserver.HRegionServer;
 import org.apache.hadoop.hbase.regionserver.TestHRegionServerBulkLoad;
@@ -199,12 +198,13 @@ public class TestLoadIncrementalHFilesSp
   /**
    * Checks that all columns have the expected value and that there is the
    * expected number of rows.
+   * @throws IOException 
    */
-  void assertExpectedTable(String table, int count, int value) {
+  void assertExpectedTable(String table, int count, int value) throws IOException {
+    HTable t = null;
     try {
       assertEquals(util.getHBaseAdmin().listTables(table).length, 1);
-
-      HTable t = new HTable(util.getConfiguration(), table);
+      t = new HTable(util.getConfiguration(), table);
       Scan s = new Scan();
       ResultScanner sr = t.getScanner(s);
       int i = 0;
@@ -219,6 +219,8 @@ public class TestLoadIncrementalHFilesSp
       assertEquals(count, i);
     } catch (IOException e) {
       fail("Failed due to exception");
+    } finally {
+      if (t != null) t.close();
     }
   }
 
@@ -277,7 +279,8 @@ public class TestLoadIncrementalHFilesSp
       thenReturn(loc);
     Mockito.when(c.locateRegion((byte[]) Mockito.any(), (byte[]) Mockito.any())).
       thenReturn(loc);
-    ClientProtocol hri = Mockito.mock(ClientProtocol.class);
+    ClientProtos.ClientService.BlockingInterface hri =
+      Mockito.mock(ClientProtos.ClientService.BlockingInterface.class);
     Mockito.when(hri.bulkLoadHFile((RpcController)Mockito.any(), (BulkLoadHFileRequest)Mockito.any())).
       thenThrow(new ServiceException(new IOException("injecting bulk load error")));
     Mockito.when(c.getClient(Mockito.any(ServerName.class))).

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockRegionServer.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockRegionServer.java?rev=1478637&r1=1478636&r2=1478637&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockRegionServer.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockRegionServer.java Fri May  3 03:52:15 2013
@@ -30,16 +30,15 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.ServerName;
 import org.apache.hadoop.hbase.catalog.CatalogTracker;
-import org.apache.hadoop.hbase.client.AdminProtocol;
-import org.apache.hadoop.hbase.client.ClientProtocol;
 import org.apache.hadoop.hbase.client.Get;
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.exceptions.ZooKeeperConnectionException;
 import org.apache.hadoop.hbase.executor.ExecutorService;
-import org.apache.hadoop.hbase.ipc.RpcServer;
+import org.apache.hadoop.hbase.ipc.RpcServerInterface;
 import org.apache.hadoop.hbase.master.TableLockManager.NullTableLockManager;
 import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
+import org.apache.hadoop.hbase.protobuf.generated.AdminProtos;
 import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest;
 import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse;
 import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest;
@@ -100,7 +99,9 @@ import com.google.protobuf.ServiceExcept
  * {@link #setGetResult(byte[], byte[], Result)} for how to fill the backing data
  * store that the get pulls from.
  */
-class MockRegionServer implements AdminProtocol, ClientProtocol, RegionServerServices {
+class MockRegionServer
+implements AdminProtos.AdminService.BlockingInterface,
+ClientProtos.ClientService.BlockingInterface, RegionServerServices {
   private final ServerName sn;
   private final ZooKeeperWatcher zkw;
   private final Configuration conf;
@@ -304,7 +305,7 @@ class MockRegionServer implements AdminP
   }
 
   @Override
-  public RpcServer getRpcServer() {
+  public RpcServerInterface getRpcServer() {
     // TODO Auto-generated method stub
     return null;
   }
@@ -323,7 +324,7 @@ class MockRegionServer implements AdminP
 
   @Override
   public GetResponse get(RpcController controller, GetRequest request)
-      throws ServiceException {
+  throws ServiceException {
     byte[] regionName = request.getRegion().getValue().toByteArray();
     Map<byte [], Result> m = this.gets.get(regionName);
     GetResponse.Builder builder = GetResponse.newBuilder();
@@ -336,7 +337,7 @@ class MockRegionServer implements AdminP
 
   @Override
   public MultiGetResponse multiGet(RpcController controller, MultiGetRequest requests)
-      throws ServiceException {
+  throws ServiceException {
     byte[] regionName = requests.getRegion().getValue().toByteArray();
     Map<byte [], Result> m = this.gets.get(regionName);
     MultiGetResponse.Builder builder = MultiGetResponse.newBuilder();

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestAssignmentManager.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestAssignmentManager.java?rev=1478637&r1=1478636&r2=1478637&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestAssignmentManager.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestAssignmentManager.java Fri May  3 03:52:15 2013
@@ -30,24 +30,23 @@ import java.util.List;
 import java.util.Map;
 import java.util.concurrent.atomic.AtomicBoolean;
 
-import org.apache.hadoop.hbase.exceptions.DeserializationException;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.MediumTests;
-import org.apache.hadoop.hbase.exceptions.RegionException;
 import org.apache.hadoop.hbase.RegionTransition;
 import org.apache.hadoop.hbase.Server;
 import org.apache.hadoop.hbase.ServerLoad;
 import org.apache.hadoop.hbase.ServerName;
-import org.apache.hadoop.hbase.exceptions.ZooKeeperConnectionException;
 import org.apache.hadoop.hbase.catalog.CatalogTracker;
 import org.apache.hadoop.hbase.catalog.MetaMockingUtil;
-import org.apache.hadoop.hbase.client.ClientProtocol;
 import org.apache.hadoop.hbase.client.HConnection;
 import org.apache.hadoop.hbase.client.HConnectionTestingUtility;
 import org.apache.hadoop.hbase.client.Result;
+import org.apache.hadoop.hbase.exceptions.DeserializationException;
+import org.apache.hadoop.hbase.exceptions.RegionException;
+import org.apache.hadoop.hbase.exceptions.ZooKeeperConnectionException;
 import org.apache.hadoop.hbase.executor.EventType;
 import org.apache.hadoop.hbase.executor.ExecutorService;
 import org.apache.hadoop.hbase.executor.ExecutorType;
@@ -58,6 +57,7 @@ import org.apache.hadoop.hbase.master.ba
 import org.apache.hadoop.hbase.master.handler.EnableTableHandler;
 import org.apache.hadoop.hbase.master.handler.ServerShutdownHandler;
 import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
+import org.apache.hadoop.hbase.protobuf.generated.ClientProtos;
 import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest;
 import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse;
 import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest;
@@ -583,7 +583,8 @@ public class TestAssignmentManager {
 
     // Need to set up a fake scan of meta for the servershutdown handler
     // Make an RS Interface implementation.  Make it so a scanner can go against it.
-    ClientProtocol implementation = Mockito.mock(ClientProtocol.class);
+    ClientProtos.ClientService.BlockingInterface implementation =
+      Mockito.mock(ClientProtos.ClientService.BlockingInterface.class);
     // Get a meta row result that has region up on SERVERNAME_A
 
     Result r;
@@ -1047,7 +1048,8 @@ public class TestAssignmentManager {
     // messing with. Needed when "new master" joins cluster. AM will try and
     // rebuild its list of user regions and it will also get the HRI that goes
     // with an encoded name by doing a Get on .META.
-    ClientProtocol ri = Mockito.mock(ClientProtocol.class);
+    ClientProtos.ClientService.BlockingInterface ri =
+      Mockito.mock(ClientProtos.ClientService.BlockingInterface.class);
     // Get a meta row result that has region up on SERVERNAME_A for REGIONINFO
     Result r = MetaMockingUtil.getMetaTableRowResult(REGIONINFO, SERVERNAME_A);
     ScanResponse.Builder builder = ScanResponse.newBuilder();

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitor.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitor.java?rev=1478637&r1=1478636&r2=1478637&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitor.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitor.java Fri May  3 03:52:15 2013
@@ -39,26 +39,25 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.HColumnDescriptor;
-import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.HTableDescriptor;
-import org.apache.hadoop.hbase.exceptions.NotAllMetaRegionsOnlineException;
 import org.apache.hadoop.hbase.Server;
 import org.apache.hadoop.hbase.ServerName;
 import org.apache.hadoop.hbase.SmallTests;
 import org.apache.hadoop.hbase.TableDescriptors;
 import org.apache.hadoop.hbase.catalog.CatalogTracker;
 import org.apache.hadoop.hbase.catalog.MetaMockingUtil;
-import org.apache.hadoop.hbase.client.AdminProtocol;
-import org.apache.hadoop.hbase.client.ClientProtocol;
 import org.apache.hadoop.hbase.client.HConnection;
 import org.apache.hadoop.hbase.client.HConnectionManager;
 import org.apache.hadoop.hbase.client.HConnectionTestingUtility;
 import org.apache.hadoop.hbase.client.Result;
+import org.apache.hadoop.hbase.exceptions.NotAllMetaRegionsOnlineException;
 import org.apache.hadoop.hbase.executor.ExecutorService;
 import org.apache.hadoop.hbase.io.Reference;
 import org.apache.hadoop.hbase.master.CatalogJanitor.SplitParentFirstComparator;
 import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
+import org.apache.hadoop.hbase.protobuf.generated.AdminProtos;
+import org.apache.hadoop.hbase.protobuf.generated.ClientProtos;
 import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest;
 import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse;
 import org.apache.hadoop.hbase.regionserver.HStore;
@@ -91,7 +90,8 @@ public class TestCatalogJanitor {
     MockServer(final HBaseTestingUtility htu)
     throws NotAllMetaRegionsOnlineException, IOException, InterruptedException {
       this.c = htu.getConfiguration();
-      ClientProtocol ri = Mockito.mock(ClientProtocol.class);
+      ClientProtos.ClientService.BlockingInterface ri =
+        Mockito.mock(ClientProtos.ClientService.BlockingInterface.class);
       MutateResponse.Builder builder = MutateResponse.newBuilder();
       builder.setProcessed(true);
       try {
@@ -106,7 +106,7 @@ public class TestCatalogJanitor {
       // to make our test work.
       this.connection =
         HConnectionTestingUtility.getMockedConnectionAndDecorate(this.c,
-          Mockito.mock(AdminProtocol.class), ri,
+          Mockito.mock(AdminProtos.AdminService.BlockingInterface.class), ri,
           new ServerName("example.org,12345,6789"),
           HRegionInfo.FIRST_META_REGIONINFO);
       // Set hbase.rootdir into test dir.
@@ -114,7 +114,8 @@ public class TestCatalogJanitor {
       Path rootdir = FSUtils.getRootDir(this.c);
       FSUtils.setRootDir(this.c, rootdir);
       this.ct = Mockito.mock(CatalogTracker.class);
-      AdminProtocol hri = Mockito.mock(AdminProtocol.class);
+      AdminProtos.AdminService.BlockingInterface hri =
+        Mockito.mock(AdminProtos.AdminService.BlockingInterface.class);
       Mockito.when(this.ct.getConnection()).thenReturn(this.connection);
       Mockito.when(ct.waitForMetaServerConnection(Mockito.anyLong())).thenReturn(hri);
     }

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestHMasterRPCException.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestHMasterRPCException.java?rev=1478637&r1=1478636&r2=1478637&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestHMasterRPCException.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestHMasterRPCException.java Fri May  3 03:52:15 2013
@@ -22,19 +22,19 @@ package org.apache.hadoop.hbase.master;
 import static org.junit.Assert.fail;
 
 import java.io.IOException;
-import java.net.InetSocketAddress;
 import java.net.SocketTimeoutException;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.*;
-import org.apache.hadoop.hbase.ipc.HBaseClientRPC;
-import org.apache.hadoop.hbase.MasterMonitorProtocol;
-import org.apache.hadoop.hbase.ipc.ProtobufRpcClientEngine;
+import org.apache.hadoop.hbase.ipc.RpcClient;
 import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
+import org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos;
 import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest;
+import org.apache.hadoop.hbase.security.User;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
+import com.google.protobuf.BlockingRpcChannel;
 import com.google.protobuf.ServiceException;
 
 @Category(MediumTests.class)
@@ -46,29 +46,31 @@ public class TestHMasterRPCException {
     TEST_UTIL.startMiniZKCluster();
     Configuration conf = TEST_UTIL.getConfiguration();
     conf.set(HConstants.MASTER_PORT, "0");
-
     HMaster hm = new HMaster(conf);
-
     ServerName sm = hm.getServerName();
-    InetSocketAddress isa = new InetSocketAddress(sm.getHostname(), sm.getPort());
-    ProtobufRpcClientEngine engine =
-        new ProtobufRpcClientEngine(conf, HConstants.CLUSTER_ID_DEFAULT);
+    RpcClient rpcClient = new RpcClient(conf, HConstants.CLUSTER_ID_DEFAULT);
     try {
       int i = 0;
       //retry the RPC a few times; we have seen SocketTimeoutExceptions if we
       //try to connect too soon. Retry on SocketTimeoutException.
       while (i < 20) {
         try {
-          MasterMonitorProtocol inf = engine.getProxy(
-              MasterMonitorProtocol.class, isa, conf, 100 * 10);
-          inf.isMasterRunning(null, IsMasterRunningRequest.getDefaultInstance());
+          BlockingRpcChannel channel =
+            rpcClient.createBlockingRpcChannel(sm, User.getCurrent(), 0);
+          MasterMonitorProtos.MasterMonitorService.BlockingInterface stub =
+            MasterMonitorProtos.MasterMonitorService.newBlockingStub(channel);
+          stub.isMasterRunning(null, IsMasterRunningRequest.getDefaultInstance());
           fail();
         } catch (ServiceException ex) {
           IOException ie = ProtobufUtil.getRemoteException(ex);
           if (!(ie instanceof SocketTimeoutException)) {
-            if(ie.getMessage().startsWith(
-                "org.apache.hadoop.hbase.exceptions.ServerNotRunningYetException: Server is not running yet")) {
+            if (ie.getMessage().startsWith("org.apache.hadoop.hbase.exceptions." +
+                "ServerNotRunningYetException: Server is not running yet")) {
+              // Done.  Got the exception we wanted.
+              System.out.println("Expected exception: " + ie.getMessage());
               return;
+            } else {
+              throw ex;
             }
           } else {
             System.err.println("Got SocketTimeoutException. Will retry. ");
@@ -81,7 +83,7 @@ public class TestHMasterRPCException {
       }
       fail();
     } finally {
-      engine.close();
+      rpcClient.stop();
     }
   }
 }
\ No newline at end of file

Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRestartCluster.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRestartCluster.java?rev=1478637&r1=1478636&r2=1478637&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRestartCluster.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRestartCluster.java Fri May  3 03:52:15 2013
@@ -132,6 +132,4 @@ public class TestRestartCluster {
       UTIL.waitTableAvailable(TABLE);
     }
   }
-
-}
-
+}
\ No newline at end of file



Mime
View raw message