Return-Path: X-Original-To: archive-asf-public-internal@cust-asf2.ponee.io Delivered-To: archive-asf-public-internal@cust-asf2.ponee.io Received: from cust-asf.ponee.io (cust-asf.ponee.io [163.172.22.183]) by cust-asf2.ponee.io (Postfix) with ESMTP id 1F749200D3D for ; Mon, 13 Nov 2017 22:56:18 +0100 (CET) Received: by cust-asf.ponee.io (Postfix) id 1E200160C06; Mon, 13 Nov 2017 21:56:18 +0000 (UTC) Delivered-To: archive-asf-public@cust-asf.ponee.io Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by cust-asf.ponee.io (Postfix) with SMTP id 696F0160BF0 for ; Mon, 13 Nov 2017 22:56:17 +0100 (CET) Received: (qmail 64826 invoked by uid 500); 13 Nov 2017 21:56:16 -0000 Mailing-List: contact common-commits-help@hadoop.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Delivered-To: mailing list common-commits@hadoop.apache.org Received: (qmail 64811 invoked by uid 99); 13 Nov 2017 21:56:16 -0000 Received: from git1-us-west.apache.org (HELO git1-us-west.apache.org) (140.211.11.23) by apache.org (qpsmtpd/0.29) with ESMTP; Mon, 13 Nov 2017 21:56:16 +0000 Received: by git1-us-west.apache.org (ASF Mail Server at git1-us-west.apache.org, from userid 33) id 68BE3DFD86; Mon, 13 Nov 2017 21:56:16 +0000 (UTC) Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit From: arp@apache.org To: common-commits@hadoop.apache.org Date: Mon, 13 Nov 2017 21:56:16 -0000 Message-Id: <6be2c1b409fc41b7bca2e0e4ca196291@git.apache.org> X-Mailer: ASF-Git Admin Mailer Subject: [1/2] hadoop git commit: HDFS-12705. WebHdfsFileSystem exceptions should retain the caused by exception. Contributed by Hanisha Koneru. archived-at: Mon, 13 Nov 2017 21:56:18 -0000 Repository: hadoop Updated Branches: refs/heads/branch-2 4e847d63a -> f671c22e3 refs/heads/trunk 040a38dc4 -> 4908a8970 HDFS-12705. WebHdfsFileSystem exceptions should retain the caused by exception. Contributed by Hanisha Koneru. Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/4908a897 Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/4908a897 Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/4908a897 Branch: refs/heads/trunk Commit: 4908a8970eaf500642a9d8427e322032c1ec047a Parents: 040a38d Author: Arpit Agarwal Authored: Mon Nov 13 11:30:39 2017 -0800 Committer: Arpit Agarwal Committed: Mon Nov 13 11:30:39 2017 -0800 ---------------------------------------------------------------------- .../hadoop/hdfs/web/WebHdfsFileSystem.java | 1 + .../org/apache/hadoop/hdfs/web/TestWebHDFS.java | 59 ++++++++++++++++++++ 2 files changed, 60 insertions(+) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/hadoop/blob/4908a897/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java index 34f5d6e..c1aef49 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java +++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java @@ -780,6 +780,7 @@ public class WebHdfsFileSystem extends FileSystem try { IOException newIoe = ioe.getClass().getConstructor(String.class) .newInstance(node + ": " + ioe.getMessage()); + newIoe.initCause(ioe.getCause()); newIoe.setStackTrace(ioe.getStackTrace()); ioe = newIoe; } catch (NoSuchMethodException | SecurityException http://git-wip-us.apache.org/repos/asf/hadoop/blob/4908a897/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHDFS.java ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHDFS.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHDFS.java index 3ee8ad0..500ec0a 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHDFS.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHDFS.java @@ -1452,4 +1452,63 @@ public class TestWebHDFS { } } } + + /** + * Tests that {@link WebHdfsFileSystem.AbstractRunner} propagates original + * exception's stacktrace and cause during runWithRetry attempts. + * @throws Exception + */ + @Test + public void testExceptionPropogationInAbstractRunner() throws Exception{ + final Configuration conf = WebHdfsTestUtil.createConf(); + final Path dir = new Path("/testExceptionPropogationInAbstractRunner"); + + conf.setBoolean(HdfsClientConfigKeys.Retry.POLICY_ENABLED_KEY, true); + + final short numDatanodes = 1; + final MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf) + .numDataNodes(numDatanodes) + .build(); + try { + cluster.waitActive(); + final FileSystem fs = WebHdfsTestUtil + .getWebHdfsFileSystem(conf, WebHdfsConstants.WEBHDFS_SCHEME); + + //create a file + final long length = 1L << 20; + final Path file1 = new Path(dir, "testFile"); + + DFSTestUtil.createFile(fs, file1, length, numDatanodes, 20120406L); + + //get file status and check that it was written properly. + final FileStatus s1 = fs.getFileStatus(file1); + assertEquals("Write failed for file " + file1, length, s1.getLen()); + + FSDataInputStream in = fs.open(file1); + in.read(); // Connection is made only when the first read() occurs. + final WebHdfsInputStream webIn = + (WebHdfsInputStream)(in.getWrappedStream()); + + final String msg = "Throwing dummy exception"; + IOException ioe = new IOException(msg, new DummyThrowable()); + + WebHdfsFileSystem.ReadRunner readRunner = spy(webIn.getReadRunner()); + doThrow(ioe).when(readRunner).getResponse(any(HttpURLConnection.class)); + + webIn.setReadRunner(readRunner); + + try { + webIn.read(); + fail("Read should have thrown IOException."); + } catch (IOException e) { + assertTrue(e.getMessage().contains(msg)); + assertTrue(e.getCause() instanceof DummyThrowable); + } + } finally { + cluster.shutdown(); + } + } + + final static class DummyThrowable extends Throwable { + } } --------------------------------------------------------------------- To unsubscribe, e-mail: common-commits-unsubscribe@hadoop.apache.org For additional commands, e-mail: common-commits-help@hadoop.apache.org