hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From w...@apache.org
Subject [2/6] hadoop git commit: HDFS-11395. RequestHedgingProxyProvider#RequestHedgingInvocationHandler hides the Exception thrown from NameNode. Contributed by Nandakumar.
Date Wed, 05 Apr 2017 23:39:43 GMT
HDFS-11395. RequestHedgingProxyProvider#RequestHedgingInvocationHandler hides the Exception
thrown from NameNode. Contributed by Nandakumar.

(cherry picked from commit 55796a0946f80a35055701a34379e374399009c5)
(cherry picked from commit e03d8ff488c5818fbe167fa0143c252d7b6287f0)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/e216c156
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/e216c156
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/e216c156

Branch: refs/heads/branch-2.8
Commit: e216c15625d59f691a877edd97d4766154870ad1
Parents: ab673aa
Author: Jing Zhao <jing9@apache.org>
Authored: Mon Mar 13 14:14:09 2017 -0700
Committer: Andrew Wang <wang@apache.org>
Committed: Wed Apr 5 16:29:04 2017 -0700

----------------------------------------------------------------------
 .../hadoop/io/retry/RetryInvocationHandler.java |  17 ++-
 .../ha/RequestHedgingProxyProvider.java         |  41 +++++--
 .../ha/TestRequestHedgingProxyProvider.java     | 108 ++++++++++++++++++-
 3 files changed, 151 insertions(+), 15 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hadoop/blob/e216c156/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryInvocationHandler.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryInvocationHandler.java
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryInvocationHandler.java
index c657d20..6f90ac4 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryInvocationHandler.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryInvocationHandler.java
@@ -238,12 +238,15 @@ public class RetryInvocationHandler<T> implements RpcInvocationHandler
{
     private final long delay;
     private final RetryAction action;
     private final long expectedFailoverCount;
+    private final Exception failException;
 
-    RetryInfo(long delay, RetryAction action, long expectedFailoverCount) {
+    RetryInfo(long delay, RetryAction action, long expectedFailoverCount,
+        Exception failException) {
       this.delay = delay;
       this.retryTime = Time.monotonicNow() + delay;
       this.action = action;
       this.expectedFailoverCount = expectedFailoverCount;
+      this.failException = failException;
     }
 
     boolean isFailover() {
@@ -256,11 +259,16 @@ public class RetryInvocationHandler<T> implements RpcInvocationHandler
{
           && action.action ==  RetryAction.RetryDecision.FAIL;
     }
 
+    Exception getFailException() {
+      return failException;
+    }
+
     static RetryInfo newRetryInfo(RetryPolicy policy, Exception e,
         Counters counters, boolean idempotentOrAtMostOnce,
         long expectedFailoverCount) throws Exception {
       RetryAction max = null;
       long maxRetryDelay = 0;
+      Exception ex = null;
 
       final Iterable<Exception> exceptions = e instanceof MultiException ?
           ((MultiException) e).getExceptions().values()
@@ -277,10 +285,13 @@ public class RetryInvocationHandler<T> implements RpcInvocationHandler
{
 
         if (max == null || max.action.compareTo(a.action) < 0) {
           max = a;
+          if (a.action == RetryAction.RetryDecision.FAIL) {
+            ex = exception;
+          }
         }
       }
 
-      return new RetryInfo(maxRetryDelay, max, expectedFailoverCount);
+      return new RetryInfo(maxRetryDelay, max, expectedFailoverCount, ex);
     }
   }
 
@@ -357,7 +368,7 @@ public class RetryInvocationHandler<T> implements RpcInvocationHandler
{
               + ". Not retrying because " + retryInfo.action.reason, e);
         }
       }
-      throw e;
+      throw retryInfo.getFailException();
     }
 
     log(method, retryInfo.isFailover(), counters.failovers, retryInfo.delay, e);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/e216c156/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/ha/RequestHedgingProxyProvider.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/ha/RequestHedgingProxyProvider.java
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/ha/RequestHedgingProxyProvider.java
index 945e92f..a765e95 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/ha/RequestHedgingProxyProvider.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/ha/RequestHedgingProxyProvider.java
@@ -17,7 +17,6 @@
  */
 package org.apache.hadoop.hdfs.server.namenode.ha;
 
-import java.io.IOException;
 import java.lang.reflect.InvocationHandler;
 import java.lang.reflect.Method;
 import java.lang.reflect.Proxy;
@@ -122,7 +121,7 @@ public class RequestHedgingProxyProvider<T> extends
           } catch (Exception ex) {
             ProxyInfo<T> tProxyInfo = proxyMap.get(callResultFuture);
             logProxyException(ex, tProxyInfo.proxyInfo);
-            badResults.put(tProxyInfo.proxyInfo, ex);
+            badResults.put(tProxyInfo.proxyInfo, unwrapException(ex));
             LOG.trace("Unsuccessful invocation on [{}]", tProxyInfo.proxyInfo);
             numAttempts--;
           }
@@ -207,16 +206,36 @@ public class RequestHedgingProxyProvider<T> extends
    * @return If the exception is caused by an standby namenode.
    */
   private boolean isStandbyException(Exception ex) {
-    Throwable cause = ex.getCause();
-    if (cause != null) {
-      Throwable cause2 = cause.getCause();
-      if (cause2 instanceof RemoteException) {
-        RemoteException remoteException = (RemoteException)cause2;
-        IOException unwrapRemoteException =
-            remoteException.unwrapRemoteException();
-        return unwrapRemoteException instanceof StandbyException;
-      }
+    Exception exception = unwrapException(ex);
+    if (exception instanceof RemoteException) {
+      return ((RemoteException) exception).unwrapRemoteException()
+          instanceof StandbyException;
     }
     return false;
   }
+
+  /**
+   * Unwraps the exception. <p>
+   * Example:
+   * <blockquote><pre>
+   * if ex is
+   * ExecutionException(InvocationTargetExeption(SomeException))
+   * returns SomeException
+   * </pre></blockquote>
+   *
+   * @return unwrapped exception
+   */
+  private Exception unwrapException(Exception ex) {
+    if (ex != null) {
+      Throwable cause = ex.getCause();
+      if (cause instanceof Exception) {
+        Throwable innerCause = cause.getCause();
+        if (innerCause instanceof Exception) {
+          return (Exception) innerCause;
+        }
+        return (Exception) cause;
+      }
+    }
+    return ex;
+  }
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/e216c156/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestRequestHedgingProxyProvider.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestRequestHedgingProxyProvider.java
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestRequestHedgingProxyProvider.java
index 2ccfd86..fec139d 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestRequestHedgingProxyProvider.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestRequestHedgingProxyProvider.java
@@ -17,7 +17,10 @@
  */
 package org.apache.hadoop.hdfs.server.namenode.ha;
 
+import java.io.EOFException;
+import java.io.FileNotFoundException;
 import java.io.IOException;
+import java.net.ConnectException;
 import java.net.InetSocketAddress;
 import java.net.URI;
 import java.net.URISyntaxException;
@@ -30,6 +33,8 @@ import org.apache.hadoop.hdfs.DFSConfigKeys;
 import org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider.ProxyFactory;
 import org.apache.hadoop.hdfs.server.protocol.NamenodeProtocols;
 import org.apache.hadoop.io.retry.MultiException;
+import org.apache.hadoop.ipc.RemoteException;
+import org.apache.hadoop.ipc.StandbyException;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.test.GenericTestUtils;
 import org.apache.hadoop.util.Time;
@@ -38,6 +43,7 @@ import org.junit.Assert;
 import org.junit.Before;
 import org.junit.BeforeClass;
 import org.junit.Test;
+import org.mockito.Matchers;
 import org.mockito.Mockito;
 import org.mockito.invocation.InvocationOnMock;
 import org.mockito.stubbing.Answer;
@@ -198,7 +204,7 @@ public class TestRequestHedgingProxyProvider {
     Assert.assertTrue(stats.length == 1);
     Assert.assertEquals(2, stats[0]);
 
-    // Counter shuodl update only once
+    // Counter should update only once
     Assert.assertEquals(5, counter.get());
 
     stats = provider.getProxy().proxy.getStats();
@@ -347,6 +353,106 @@ public class TestRequestHedgingProxyProvider {
     Assert.assertEquals(12, counter.get());
   }
 
+  @Test
+  public void testHedgingWhenFileNotFoundException() throws Exception {
+    NamenodeProtocols active = Mockito.mock(NamenodeProtocols.class);
+    Mockito
+        .when(active.getBlockLocations(Matchers.anyString(),
+            Matchers.anyLong(), Matchers.anyLong()))
+        .thenThrow(new RemoteException("java.io.FileNotFoundException",
+            "File does not exist!"));
+
+    NamenodeProtocols standby = Mockito.mock(NamenodeProtocols.class);
+    Mockito
+        .when(standby.getBlockLocations(Matchers.anyString(),
+            Matchers.anyLong(), Matchers.anyLong()))
+        .thenThrow(
+            new RemoteException("org.apache.hadoop.ipc.StandbyException",
+            "Standby NameNode"));
+
+    RequestHedgingProxyProvider<NamenodeProtocols> provider =
+        new RequestHedgingProxyProvider<>(conf, nnUri,
+            NamenodeProtocols.class, createFactory(active, standby));
+    try {
+      provider.getProxy().proxy.getBlockLocations("/tmp/test.file", 0L, 20L);
+      Assert.fail("Should fail since the active namenode throws"
+          + " FileNotFoundException!");
+    } catch (MultiException me) {
+      for (Exception ex : me.getExceptions().values()) {
+        Exception rEx = ((RemoteException) ex).unwrapRemoteException();
+        if (rEx instanceof StandbyException) {
+          continue;
+        }
+        Assert.assertTrue(rEx instanceof FileNotFoundException);
+      }
+    }
+    Mockito.verify(active).getBlockLocations(Matchers.anyString(),
+        Matchers.anyLong(), Matchers.anyLong());
+    Mockito.verify(standby).getBlockLocations(Matchers.anyString(),
+        Matchers.anyLong(), Matchers.anyLong());
+  }
+
+  @Test
+  public void testHedgingWhenConnectException() throws Exception {
+    NamenodeProtocols active = Mockito.mock(NamenodeProtocols.class);
+    Mockito.when(active.getStats()).thenThrow(new ConnectException());
+
+    NamenodeProtocols standby = Mockito.mock(NamenodeProtocols.class);
+    Mockito.when(standby.getStats())
+        .thenThrow(
+            new RemoteException("org.apache.hadoop.ipc.StandbyException",
+            "Standby NameNode"));
+
+    RequestHedgingProxyProvider<NamenodeProtocols> provider =
+        new RequestHedgingProxyProvider<>(conf, nnUri,
+            NamenodeProtocols.class, createFactory(active, standby));
+    try {
+      provider.getProxy().proxy.getStats();
+      Assert.fail("Should fail since the active namenode throws"
+          + " ConnectException!");
+    } catch (MultiException me) {
+      for (Exception ex : me.getExceptions().values()) {
+        if (ex instanceof RemoteException) {
+          Exception rEx = ((RemoteException) ex)
+              .unwrapRemoteException();
+          Assert.assertTrue("Unexpected RemoteException: " + rEx.getMessage(),
+              rEx instanceof StandbyException);
+        } else {
+          Assert.assertTrue(ex instanceof ConnectException);
+        }
+      }
+    }
+    Mockito.verify(active).getStats();
+    Mockito.verify(standby).getStats();
+  }
+
+  @Test
+  public void testHedgingWhenConnectAndEOFException() throws Exception {
+    NamenodeProtocols active = Mockito.mock(NamenodeProtocols.class);
+    Mockito.when(active.getStats()).thenThrow(new EOFException());
+
+    NamenodeProtocols standby = Mockito.mock(NamenodeProtocols.class);
+    Mockito.when(standby.getStats()).thenThrow(new ConnectException());
+
+    RequestHedgingProxyProvider<NamenodeProtocols> provider =
+        new RequestHedgingProxyProvider<>(conf, nnUri,
+            NamenodeProtocols.class, createFactory(active, standby));
+    try {
+      provider.getProxy().proxy.getStats();
+      Assert.fail("Should fail since both active and standby namenodes throw"
+          + " Exceptions!");
+    } catch (MultiException me) {
+      for (Exception ex : me.getExceptions().values()) {
+        if (!(ex instanceof ConnectException) &&
+            !(ex instanceof EOFException)) {
+          Assert.fail("Unexpected Exception " + ex.getMessage());
+        }
+      }
+    }
+    Mockito.verify(active).getStats();
+    Mockito.verify(standby).getStats();
+  }
+
   private ProxyFactory<NamenodeProtocols> createFactory(
       NamenodeProtocols... protos) {
     final Iterator<NamenodeProtocols> iterator =


---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscribe@hadoop.apache.org
For additional commands, e-mail: common-commits-help@hadoop.apache.org


Mime
View raw message