Return-Path: X-Original-To: archive-asf-public-internal@cust-asf2.ponee.io Delivered-To: archive-asf-public-internal@cust-asf2.ponee.io Received: from cust-asf.ponee.io (cust-asf.ponee.io [163.172.22.183]) by cust-asf2.ponee.io (Postfix) with ESMTP id BE3F2200ACA for ; Thu, 9 Jun 2016 22:56:31 +0200 (CEST) Received: by cust-asf.ponee.io (Postfix) id BCE0D160A58; Thu, 9 Jun 2016 20:56:31 +0000 (UTC) Delivered-To: archive-asf-public@cust-asf.ponee.io Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by cust-asf.ponee.io (Postfix) with SMTP id E4818160A29 for ; Thu, 9 Jun 2016 22:56:30 +0200 (CEST) Received: (qmail 77694 invoked by uid 500); 9 Jun 2016 20:56:30 -0000 Mailing-List: contact common-commits-help@hadoop.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Delivered-To: mailing list common-commits@hadoop.apache.org Received: (qmail 77685 invoked by uid 99); 9 Jun 2016 20:56:30 -0000 Received: from git1-us-west.apache.org (HELO git1-us-west.apache.org) (140.211.11.23) by apache.org (qpsmtpd/0.29) with ESMTP; Thu, 09 Jun 2016 20:56:30 +0000 Received: by git1-us-west.apache.org (ASF Mail Server at git1-us-west.apache.org, from userid 33) id E73FDDFCC0; Thu, 9 Jun 2016 20:56:29 +0000 (UTC) Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit From: epayne@apache.org To: common-commits@hadoop.apache.org Message-Id: <8eca86aac7354dc2a4d65ad711db38e1@git.apache.org> X-Mailer: ASF-Git Admin Mailer Subject: hadoop git commit: HDFS-9905. WebHdfsFileSystem#runWithRetry should display original stack trace on error. (Wei-Chiu Chuang via iwasakims) Date: Thu, 9 Jun 2016 20:56:29 +0000 (UTC) archived-at: Thu, 09 Jun 2016 20:56:31 -0000 Repository: hadoop Updated Branches: refs/heads/branch-2.7 577466c1f -> 5a3fed060 HDFS-9905. WebHdfsFileSystem#runWithRetry should display original stack trace on error. (Wei-Chiu Chuang via iwasakims) cherry-picked from 6fcde2e38da04cae3aad6b13cf442af211f71506 Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/5a3fed06 Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/5a3fed06 Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/5a3fed06 Branch: refs/heads/branch-2.7 Commit: 5a3fed060264596ecdce294b33770faa4d4b51b0 Parents: 577466c Author: Masatake Iwasaki Authored: Sat Apr 23 23:37:56 2016 +0900 Committer: Eric Payne Committed: Thu Jun 9 20:48:58 2016 +0000 ---------------------------------------------------------------------- .../hadoop/hdfs/web/WebHdfsFileSystem.java | 6 +++-- .../hadoop/hdfs/web/TestWebHdfsTimeouts.java | 25 +++++++++++++------- 2 files changed, 20 insertions(+), 11 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/hadoop/blob/5a3fed06/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java index a72fa27..ccfed0b 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java @@ -641,8 +641,10 @@ public class WebHdfsFileSystem extends FileSystem node = url.getAuthority(); } try { - ioe = ioe.getClass().getConstructor(String.class) - .newInstance(node + ": " + ioe.getMessage()); + IOException newIoe = ioe.getClass().getConstructor(String.class) + .newInstance(node + ": " + ioe.getMessage()); + newIoe.setStackTrace(ioe.getStackTrace()); + ioe = newIoe; } catch (NoSuchMethodException | SecurityException | InstantiationException | IllegalAccessException | IllegalArgumentException | InvocationTargetException e) { http://git-wip-us.apache.org/repos/asf/hadoop/blob/5a3fed06/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsTimeouts.java ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsTimeouts.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsTimeouts.java index 5419093..ee97d73 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsTimeouts.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsTimeouts.java @@ -18,7 +18,6 @@ package org.apache.hadoop.hdfs.web; -import static org.junit.Assert.assertEquals; import static org.junit.Assert.fail; import java.io.BufferedReader; @@ -43,6 +42,7 @@ import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.security.authentication.client.ConnectionConfigurator; +import org.apache.hadoop.test.GenericTestUtils; import org.junit.After; import org.junit.Before; import org.junit.Test; @@ -115,7 +115,8 @@ public class TestWebHdfsTimeouts { fs.listFiles(new Path("/"), false); fail("expected timeout"); } catch (SocketTimeoutException e) { - assertEquals(fs.getUri().getAuthority() + ": connect timed out", e.getMessage()); + GenericTestUtils.assertExceptionContains(fs.getUri().getAuthority() + + ": connect timed out",e); } } @@ -128,7 +129,8 @@ public class TestWebHdfsTimeouts { fs.listFiles(new Path("/"), false); fail("expected timeout"); } catch (SocketTimeoutException e) { - assertEquals(fs.getUri().getAuthority() + ": Read timed out", e.getMessage()); + GenericTestUtils.assertExceptionContains(fs.getUri().getAuthority() + + ": Read timed out", e); } } @@ -143,7 +145,8 @@ public class TestWebHdfsTimeouts { fs.getDelegationToken("renewer"); fail("expected timeout"); } catch (SocketTimeoutException e) { - assertEquals(fs.getUri().getAuthority() + ": connect timed out", e.getMessage()); + GenericTestUtils.assertExceptionContains(fs.getUri().getAuthority() + + ": connect timed out", e); } } @@ -157,7 +160,8 @@ public class TestWebHdfsTimeouts { fs.getDelegationToken("renewer"); fail("expected timeout"); } catch (SocketTimeoutException e) { - assertEquals(fs.getUri().getAuthority() + ": Read timed out", e.getMessage()); + GenericTestUtils.assertExceptionContains( + fs.getUri().getAuthority() + ": Read timed out", e); } } @@ -172,7 +176,8 @@ public class TestWebHdfsTimeouts { fs.getFileChecksum(new Path("/file")); fail("expected timeout"); } catch (SocketTimeoutException e) { - assertEquals(fs.getUri().getAuthority() + ": connect timed out", e.getMessage()); + GenericTestUtils.assertExceptionContains( + fs.getUri().getAuthority() + ": connect timed out", e); } } @@ -187,7 +192,8 @@ public class TestWebHdfsTimeouts { fs.getFileChecksum(new Path("/file")); fail("expected timeout"); } catch (SocketTimeoutException e) { - assertEquals(fs.getUri().getAuthority() + ": Read timed out", e.getMessage()); + GenericTestUtils.assertExceptionContains( + fs.getUri().getAuthority() + ": Read timed out", e); } } @@ -203,7 +209,8 @@ public class TestWebHdfsTimeouts { os = fs.create(new Path("/file")); fail("expected timeout"); } catch (SocketTimeoutException e) { - assertEquals(fs.getUri().getAuthority() + ": connect timed out", e.getMessage()); + GenericTestUtils.assertExceptionContains( + fs.getUri().getAuthority() + ": connect timed out", e); } finally { IOUtils.cleanup(LOG, os); } @@ -223,7 +230,7 @@ public class TestWebHdfsTimeouts { os = null; fail("expected timeout"); } catch (SocketTimeoutException e) { - assertEquals("Read timed out", e.getMessage()); + GenericTestUtils.assertExceptionContains("Read timed out", e); } finally { IOUtils.cleanup(LOG, os); } --------------------------------------------------------------------- To unsubscribe, e-mail: common-commits-unsubscribe@hadoop.apache.org For additional commands, e-mail: common-commits-help@hadoop.apache.org