Return-Path: X-Original-To: archive-asf-public-internal@cust-asf2.ponee.io Delivered-To: archive-asf-public-internal@cust-asf2.ponee.io Received: from cust-asf.ponee.io (cust-asf.ponee.io [163.172.22.183]) by cust-asf2.ponee.io (Postfix) with ESMTP id 807B1200C04 for ; Mon, 9 Jan 2017 22:30:10 +0100 (CET) Received: by cust-asf.ponee.io (Postfix) id 7F16E160B4C; Mon, 9 Jan 2017 21:30:10 +0000 (UTC) Delivered-To: archive-asf-public@cust-asf.ponee.io Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by cust-asf.ponee.io (Postfix) with SMTP id 86383160B49 for ; Mon, 9 Jan 2017 22:30:09 +0100 (CET) Received: (qmail 29157 invoked by uid 500); 9 Jan 2017 21:29:59 -0000 Mailing-List: contact common-commits-help@hadoop.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Delivered-To: mailing list common-commits@hadoop.apache.org Received: (qmail 27924 invoked by uid 99); 9 Jan 2017 21:29:56 -0000 Received: from git1-us-west.apache.org (HELO git1-us-west.apache.org) (140.211.11.23) by apache.org (qpsmtpd/0.29) with ESMTP; Mon, 09 Jan 2017 21:29:56 +0000 Received: by git1-us-west.apache.org (ASF Mail Server at git1-us-west.apache.org, from userid 33) id 86B9BDFA22; Mon, 9 Jan 2017 21:29:56 +0000 (UTC) Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit From: xgong@apache.org To: common-commits@hadoop.apache.org Date: Mon, 09 Jan 2017 21:30:01 -0000 Message-Id: <4291005de6ee4ed4b92984f374aedc97@git.apache.org> In-Reply-To: References: X-Mailer: ASF-Git Admin Mailer Subject: [06/21] hadoop git commit: MAPREDUCE-6715. Fix Several Unsafe Practices (Contributed by Yufei Gu via Daniel Templeton) archived-at: Mon, 09 Jan 2017 21:30:10 -0000 MAPREDUCE-6715. Fix Several Unsafe Practices (Contributed by Yufei Gu via Daniel Templeton) Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/0b8a7c18 Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/0b8a7c18 Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/0b8a7c18 Branch: refs/heads/YARN-5734 Commit: 0b8a7c18ddbe73b356b3c9baf4460659ccaee095 Parents: 5d18294 Author: Daniel Templeton Authored: Thu Jan 5 17:55:05 2017 -0800 Committer: Daniel Templeton Committed: Thu Jan 5 17:56:08 2017 -0800 ---------------------------------------------------------------------- .../org/apache/hadoop/mapred/CleanupQueue.java | 7 +++- .../java/org/apache/hadoop/mapred/MapTask.java | 10 ++++- .../mapreduce/lib/output/TextOutputFormat.java | 15 ++++---- .../task/reduce/ShuffleSchedulerImpl.java | 40 +++++++++++--------- .../hadoop/examples/dancing/Pentomino.java | 5 +++ .../hadoop/examples/terasort/TeraScheduler.java | 16 ++++---- 6 files changed, 57 insertions(+), 36 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/hadoop/blob/0b8a7c18/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/CleanupQueue.java ---------------------------------------------------------------------- diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/CleanupQueue.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/CleanupQueue.java index 456ed7c..2282b54 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/CleanupQueue.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/CleanupQueue.java @@ -136,7 +136,12 @@ class CleanupQueue { LOG.debug("DELETED " + context.fullPath); } } catch (InterruptedException t) { - LOG.warn("Interrupted deletion of " + context.fullPath); + if (context == null) { + LOG.warn("Interrupted deletion of an invalid path: Path deletion " + + "context is null."); + } else { + LOG.warn("Interrupted deletion of " + context.fullPath); + } return; } catch (Exception e) { LOG.warn("Error deleting path " + context.fullPath + ": " + e); http://git-wip-us.apache.org/repos/asf/hadoop/blob/0b8a7c18/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/MapTask.java ---------------------------------------------------------------------- diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/MapTask.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/MapTask.java index 3753fba..9ec0914 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/MapTask.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/MapTask.java @@ -411,8 +411,14 @@ public class MapTask extends Task { LOG.warn(msg, e); } } - throw new IOException("Initialization of all the collectors failed. " + - "Error in last collector was :" + lastException.getMessage(), lastException); + + if (lastException != null) { + throw new IOException("Initialization of all the collectors failed. " + + "Error in last collector was:" + lastException.toString(), + lastException); + } else { + throw new IOException("Initialization of all the collectors failed."); + } } @SuppressWarnings("unchecked") http://git-wip-us.apache.org/repos/asf/hadoop/blob/0b8a7c18/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/output/TextOutputFormat.java ---------------------------------------------------------------------- diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/output/TextOutputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/output/TextOutputFormat.java index 1c8ea72..2e49f68 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/output/TextOutputFormat.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/output/TextOutputFormat.java @@ -113,19 +113,18 @@ public class TextOutputFormat extends FileOutputFormat { if (isCompressed) { Class codecClass = getOutputCompressorClass(job, GzipCodec.class); - codec = (CompressionCodec) ReflectionUtils.newInstance(codecClass, conf); + codec = ReflectionUtils.newInstance(codecClass, conf); extension = codec.getDefaultExtension(); } Path file = getDefaultWorkFile(job, extension); FileSystem fs = file.getFileSystem(conf); - if (!isCompressed) { - FSDataOutputStream fileOut = fs.create(file, false); - return new LineRecordWriter(fileOut, keyValueSeparator); + FSDataOutputStream fileOut = fs.create(file, false); + if (isCompressed) { + return new LineRecordWriter<>( + new DataOutputStream(codec.createOutputStream(fileOut)), + keyValueSeparator); } else { - FSDataOutputStream fileOut = fs.create(file, false); - return new LineRecordWriter(new DataOutputStream - (codec.createOutputStream(fileOut)), - keyValueSeparator); + return new LineRecordWriter<>(fileOut, keyValueSeparator); } } } http://git-wip-us.apache.org/repos/asf/hadoop/blob/0b8a7c18/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/ShuffleSchedulerImpl.java ---------------------------------------------------------------------- diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/ShuffleSchedulerImpl.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/ShuffleSchedulerImpl.java index a819771..2b6dc57 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/ShuffleSchedulerImpl.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/ShuffleSchedulerImpl.java @@ -433,25 +433,29 @@ public class ShuffleSchedulerImpl implements ShuffleScheduler { public synchronized MapHost getHost() throws InterruptedException { - while(pendingHosts.isEmpty()) { - wait(); - } + while(pendingHosts.isEmpty()) { + wait(); + } - MapHost host = null; - Iterator iter = pendingHosts.iterator(); - int numToPick = random.nextInt(pendingHosts.size()); - for (int i=0; i <= numToPick; ++i) { - host = iter.next(); - } + Iterator iter = pendingHosts.iterator(); + // Safe to take one because we know pendingHosts isn't empty + MapHost host = iter.next(); + int numToPick = random.nextInt(pendingHosts.size()); + for (int i = 0; i < numToPick; ++i) { + host = iter.next(); + } - pendingHosts.remove(host); - host.markBusy(); + pendingHosts.remove(host); + host.markBusy(); - LOG.debug("Assigning " + host + " with " + host.getNumKnownMapOutputs() + - " to " + Thread.currentThread().getName()); - SHUFFLE_START.set(Time.monotonicNow()); + if (LOG.isDebugEnabled()) { + LOG.debug( + "Assigning " + host + " with " + host.getNumKnownMapOutputs() + " to " + + Thread.currentThread().getName()); + } + SHUFFLE_START.set(Time.monotonicNow()); - return host; + return host; } public synchronized List getMapsForHost(MapHost host) { @@ -477,8 +481,10 @@ public class ShuffleSchedulerImpl implements ShuffleScheduler { host.addKnownMap(id); } } - LOG.debug("assigned " + includedMaps + " of " + totalSize + " to " + - host + " to " + Thread.currentThread().getName()); + if (LOG.isDebugEnabled()) { + LOG.debug("assigned " + includedMaps + " of " + totalSize + " to " + host + + " to " + Thread.currentThread().getName()); + } return result; } http://git-wip-us.apache.org/repos/asf/hadoop/blob/0b8a7c18/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/dancing/Pentomino.java ---------------------------------------------------------------------- diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/dancing/Pentomino.java b/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/dancing/Pentomino.java index 5e636b9..2485728 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/dancing/Pentomino.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/dancing/Pentomino.java @@ -153,6 +153,11 @@ public class Pentomino { break; } } + + if (piece == null) { + continue; + } + // for each point where the piece was placed, mark it with the piece name for(ColumnName item: row) { if (item instanceof Point) { http://git-wip-us.apache.org/repos/asf/hadoop/blob/0b8a7c18/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/terasort/TeraScheduler.java ---------------------------------------------------------------------- diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/terasort/TeraScheduler.java b/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/terasort/TeraScheduler.java index 30b50d8..3e12a3d 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/terasort/TeraScheduler.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/terasort/TeraScheduler.java @@ -73,14 +73,14 @@ class TeraScheduler { List readFile(String filename) throws IOException { List result = new ArrayList(10000); - BufferedReader in = new BufferedReader( - new InputStreamReader(new FileInputStream(filename), Charsets.UTF_8)); - String line = in.readLine(); - while (line != null) { - result.add(line); - line = in.readLine(); - } - in.close(); + try (BufferedReader in = new BufferedReader( + new InputStreamReader(new FileInputStream(filename), Charsets.UTF_8))) { + String line = in.readLine(); + while (line != null) { + result.add(line); + line = in.readLine(); + } + } return result; } --------------------------------------------------------------------- To unsubscribe, e-mail: common-commits-unsubscribe@hadoop.apache.org For additional commands, e-mail: common-commits-help@hadoop.apache.org