Return-Path: X-Original-To: apmail-hadoop-common-commits-archive@www.apache.org Delivered-To: apmail-hadoop-common-commits-archive@www.apache.org Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by minotaur.apache.org (Postfix) with SMTP id 2BC5817677 for ; Mon, 6 Apr 2015 19:22:44 +0000 (UTC) Received: (qmail 30212 invoked by uid 500); 6 Apr 2015 19:22:33 -0000 Delivered-To: apmail-hadoop-common-commits-archive@hadoop.apache.org Received: (qmail 30046 invoked by uid 500); 6 Apr 2015 19:22:33 -0000 Mailing-List: contact common-commits-help@hadoop.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: common-dev@hadoop.apache.org Delivered-To: mailing list common-commits@hadoop.apache.org Received: (qmail 27735 invoked by uid 99); 6 Apr 2015 19:22:32 -0000 Received: from git1-us-west.apache.org (HELO git1-us-west.apache.org) (140.211.11.23) by apache.org (qpsmtpd/0.29) with ESMTP; Mon, 06 Apr 2015 19:22:32 +0000 Received: by git1-us-west.apache.org (ASF Mail Server at git1-us-west.apache.org, from userid 33) id 081A6E1832; Mon, 6 Apr 2015 19:22:32 +0000 (UTC) Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit From: zjshen@apache.org To: common-commits@hadoop.apache.org Date: Mon, 06 Apr 2015 19:22:49 -0000 Message-Id: In-Reply-To: References: X-Mailer: ASF-Git Admin Mailer Subject: [19/50] [abbrv] hadoop git commit: HDFS-8026. Trace FSOutputSummer#writeChecksumChunks rather than DFSOutputStream#writeChunk (cmccabe) HDFS-8026. Trace FSOutputSummer#writeChecksumChunks rather than DFSOutputStream#writeChunk (cmccabe) Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/f4afce08 Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/f4afce08 Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/f4afce08 Branch: refs/heads/YARN-2928 Commit: f4afce0801bc488e301aee820a8091252bbc2748 Parents: f066e73 Author: Colin Patrick Mccabe Authored: Wed Apr 1 13:55:40 2015 -0700 Committer: Zhijie Shen Committed: Mon Apr 6 12:08:12 2015 -0700 ---------------------------------------------------------------------- .../org/apache/hadoop/fs/FSOutputSummer.java | 20 ++++++++++++++++---- hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt | 3 +++ .../org/apache/hadoop/hdfs/DFSOutputStream.java | 15 ++++----------- .../org/apache/hadoop/tracing/TestTracing.java | 4 ++-- 4 files changed, 25 insertions(+), 17 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/hadoop/blob/f4afce08/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSOutputSummer.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSOutputSummer.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSOutputSummer.java index 13a5e26..d2998b6 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSOutputSummer.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSOutputSummer.java @@ -21,6 +21,8 @@ package org.apache.hadoop.fs; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.util.DataChecksum; +import org.apache.htrace.NullScope; +import org.apache.htrace.TraceScope; import java.io.IOException; import java.io.OutputStream; @@ -194,16 +196,26 @@ abstract public class FSOutputSummer extends OutputStream { return sum.getChecksumSize(); } + protected TraceScope createWriteTraceScope() { + return NullScope.INSTANCE; + } + /** Generate checksums for the given data chunks and output chunks & checksums * to the underlying output stream. */ private void writeChecksumChunks(byte b[], int off, int len) throws IOException { sum.calculateChunkedSums(b, off, len, checksum, 0); - for (int i = 0; i < len; i += sum.getBytesPerChecksum()) { - int chunkLen = Math.min(sum.getBytesPerChecksum(), len - i); - int ckOffset = i / sum.getBytesPerChecksum() * getChecksumSize(); - writeChunk(b, off + i, chunkLen, checksum, ckOffset, getChecksumSize()); + TraceScope scope = createWriteTraceScope(); + try { + for (int i = 0; i < len; i += sum.getBytesPerChecksum()) { + int chunkLen = Math.min(sum.getBytesPerChecksum(), len - i); + int ckOffset = i / sum.getBytesPerChecksum() * getChecksumSize(); + writeChunk(b, off + i, chunkLen, checksum, ckOffset, + getChecksumSize()); + } + } finally { + scope.close(); } } http://git-wip-us.apache.org/repos/asf/hadoop/blob/f4afce08/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt index 435fdd7..b5591e0 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt +++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt @@ -378,6 +378,9 @@ Release 2.8.0 - UNRELEASED OPTIMIZATIONS + HDFS-8026. Trace FSOutputSummer#writeChecksumChunks rather than + DFSOutputStream#writeChunk (cmccabe) + BUG FIXES HDFS-7501. TransactionsSinceLastCheckpoint can be negative on SBNs. http://git-wip-us.apache.org/repos/asf/hadoop/blob/f4afce08/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSOutputStream.java ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSOutputStream.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSOutputStream.java index 933d8e6..c88639d 100755 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSOutputStream.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSOutputStream.java @@ -372,21 +372,14 @@ public class DFSOutputStream extends FSOutputSummer } } + protected TraceScope createWriteTraceScope() { + return dfsClient.getPathTraceScope("DFSOutputStream#write", src); + } + // @see FSOutputSummer#writeChunk() @Override protected synchronized void writeChunk(byte[] b, int offset, int len, byte[] checksum, int ckoff, int cklen) throws IOException { - TraceScope scope = - dfsClient.getPathTraceScope("DFSOutputStream#writeChunk", src); - try { - writeChunkImpl(b, offset, len, checksum, ckoff, cklen); - } finally { - scope.close(); - } - } - - private synchronized void writeChunkImpl(byte[] b, int offset, int len, - byte[] checksum, int ckoff, int cklen) throws IOException { dfsClient.checkOpen(); checkClosed(); http://git-wip-us.apache.org/repos/asf/hadoop/blob/f4afce08/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/tracing/TestTracing.java ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/tracing/TestTracing.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/tracing/TestTracing.java index 3720abe..01361b5 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/tracing/TestTracing.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/tracing/TestTracing.java @@ -89,7 +89,7 @@ public class TestTracing { "org.apache.hadoop.hdfs.protocol.ClientProtocol.complete", "ClientNamenodeProtocol#complete", "newStreamForCreate", - "DFSOutputStream#writeChunk", + "DFSOutputStream#write", "DFSOutputStream#close", "dataStreamer", "OpWriteBlockProto", @@ -117,7 +117,7 @@ public class TestTracing { "org.apache.hadoop.hdfs.protocol.ClientProtocol.complete", "ClientNamenodeProtocol#complete", "newStreamForCreate", - "DFSOutputStream#writeChunk", + "DFSOutputStream#write", "DFSOutputStream#close", }; for (String desc : spansInTopTrace) {