hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From a..@apache.org
Subject [2/2] hadoop git commit: HDFS-12705. WebHdfsFileSystem exceptions should retain the caused by exception. Contributed by Hanisha Koneru.
Date Mon, 13 Nov 2017 21:56:17 GMT
HDFS-12705. WebHdfsFileSystem exceptions should retain the caused by exception. Contributed
by Hanisha Koneru.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/f671c22e
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/f671c22e
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/f671c22e

Branch: refs/heads/branch-2
Commit: f671c22e3e4558fdec7d79844e250891b3a765d1
Parents: 4e847d6
Author: Arpit Agarwal <arp@apache.org>
Authored: Mon Nov 13 11:30:39 2017 -0800
Committer: Arpit Agarwal <arp@apache.org>
Committed: Mon Nov 13 13:56:13 2017 -0800

----------------------------------------------------------------------
 .../hadoop/hdfs/web/WebHdfsFileSystem.java      |  1 +
 .../org/apache/hadoop/hdfs/web/TestWebHDFS.java | 59 ++++++++++++++++++++
 2 files changed, 60 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hadoop/blob/f671c22e/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java
b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java
index 3f9839a..587e1fc 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java
+++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java
@@ -788,6 +788,7 @@ public class WebHdfsFileSystem extends FileSystem
           try {
             IOException newIoe = ioe.getClass().getConstructor(String.class)
                 .newInstance(node + ": " + ioe.getMessage());
+            newIoe.initCause(ioe.getCause());
             newIoe.setStackTrace(ioe.getStackTrace());
             ioe = newIoe;
           } catch (NoSuchMethodException | SecurityException 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/f671c22e/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHDFS.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHDFS.java
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHDFS.java
index 588fd0b..47a5584 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHDFS.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHDFS.java
@@ -1652,4 +1652,63 @@ public class TestWebHDFS {
       }
     }
   }
+
+  /**
+   * Tests that {@link WebHdfsFileSystem.AbstractRunner} propagates original
+   * exception's stacktrace and cause during runWithRetry attempts.
+   * @throws Exception
+   */
+  @Test
+  public void testExceptionPropogationInAbstractRunner() throws Exception{
+    final Configuration conf = WebHdfsTestUtil.createConf();
+    final Path dir = new Path("/testExceptionPropogationInAbstractRunner");
+
+    conf.setBoolean(HdfsClientConfigKeys.Retry.POLICY_ENABLED_KEY, true);
+
+    final short numDatanodes = 1;
+    final MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf)
+        .numDataNodes(numDatanodes)
+        .build();
+    try {
+      cluster.waitActive();
+      final FileSystem fs = WebHdfsTestUtil
+          .getWebHdfsFileSystem(conf, WebHdfsConstants.WEBHDFS_SCHEME);
+
+      //create a file
+      final long length = 1L << 20;
+      final Path file1 = new Path(dir, "testFile");
+
+      DFSTestUtil.createFile(fs, file1, length, numDatanodes, 20120406L);
+
+      //get file status and check that it was written properly.
+      final FileStatus s1 = fs.getFileStatus(file1);
+      assertEquals("Write failed for file " + file1, length, s1.getLen());
+
+      FSDataInputStream in = fs.open(file1);
+      in.read(); // Connection is made only when the first read() occurs.
+      final WebHdfsInputStream webIn =
+          (WebHdfsInputStream)(in.getWrappedStream());
+
+      final String msg = "Throwing dummy exception";
+      IOException ioe = new IOException(msg, new DummyThrowable());
+
+      WebHdfsFileSystem.ReadRunner readRunner = spy(webIn.getReadRunner());
+      doThrow(ioe).when(readRunner).getResponse(any(HttpURLConnection.class));
+
+      webIn.setReadRunner(readRunner);
+
+      try {
+        webIn.read();
+        fail("Read should have thrown IOException.");
+      } catch (IOException e) {
+        assertTrue(e.getMessage().contains(msg));
+        assertTrue(e.getCause() instanceof DummyThrowable);
+      }
+    } finally {
+      cluster.shutdown();
+    }
+  }
+
+  final static class DummyThrowable extends Throwable {
+  }
 }


---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscribe@hadoop.apache.org
For additional commands, e-mail: common-commits-help@hadoop.apache.org


Mime
View raw message