Return-Path: X-Original-To: apmail-hadoop-common-commits-archive@www.apache.org Delivered-To: apmail-hadoop-common-commits-archive@www.apache.org Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by minotaur.apache.org (Postfix) with SMTP id EA74718BB1 for ; Fri, 12 Feb 2016 17:05:01 +0000 (UTC) Received: (qmail 29407 invoked by uid 500); 12 Feb 2016 17:05:01 -0000 Delivered-To: apmail-hadoop-common-commits-archive@hadoop.apache.org Received: (qmail 29346 invoked by uid 500); 12 Feb 2016 17:05:01 -0000 Mailing-List: contact common-commits-help@hadoop.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: common-dev@hadoop.apache.org Delivered-To: mailing list common-commits@hadoop.apache.org Received: (qmail 29337 invoked by uid 99); 12 Feb 2016 17:05:01 -0000 Received: from git1-us-west.apache.org (HELO git1-us-west.apache.org) (140.211.11.23) by apache.org (qpsmtpd/0.29) with ESMTP; Fri, 12 Feb 2016 17:05:01 +0000 Received: by git1-us-west.apache.org (ASF Mail Server at git1-us-west.apache.org, from userid 33) id 89AEBE03CD; Fri, 12 Feb 2016 17:05:01 +0000 (UTC) Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit From: aajisaka@apache.org To: common-commits@hadoop.apache.org Message-Id: X-Mailer: ASF-Git Admin Mailer Subject: hadoop git commit: HDFS-9768. Reuse ObjectMapper instance in HDFS to improve the performance. Contributed by Lin Yiqun. Date: Fri, 12 Feb 2016 17:05:01 +0000 (UTC) Repository: hadoop Updated Branches: refs/heads/branch-2 2ea1913ca -> a714d04f5 HDFS-9768. Reuse ObjectMapper instance in HDFS to improve the performance. Contributed by Lin Yiqun. (cherry picked from commit e6a7044b8530afded8f8e86ff309dd0e4d39238a) Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/a714d04f Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/a714d04f Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/a714d04f Branch: refs/heads/branch-2 Commit: a714d04f5d8eaba34a24ee9454ceaec68ab00a15 Parents: 2ea1913 Author: Akira Ajisaka Authored: Sat Feb 13 01:57:24 2016 +0900 Committer: Akira Ajisaka Committed: Sat Feb 13 02:04:01 2016 +0900 ---------------------------------------------------------------------- .../ConfRefreshTokenBasedAccessTokenProvider.java | 8 ++++---- .../oauth2/CredentialBasedAccessTokenProvider.java | 8 +++++--- hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt | 3 +++ .../datanode/fsdataset/impl/FsVolumeImpl.java | 16 ++++++++++------ .../hadoop/hdfs/server/namenode/FSNamesystem.java | 5 ++--- .../tools/offlineImageViewer/FSImageLoader.java | 10 +++------- .../java/org/apache/hadoop/hdfs/web/JsonUtil.java | 5 +++++ 7 files changed, 32 insertions(+), 23 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/hadoop/blob/a714d04f/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/oauth2/ConfRefreshTokenBasedAccessTokenProvider.java ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/oauth2/ConfRefreshTokenBasedAccessTokenProvider.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/oauth2/ConfRefreshTokenBasedAccessTokenProvider.java index 773eeae..2d62d10 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/oauth2/ConfRefreshTokenBasedAccessTokenProvider.java +++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/oauth2/ConfRefreshTokenBasedAccessTokenProvider.java @@ -29,6 +29,7 @@ import org.apache.hadoop.hdfs.web.URLConnectionFactory; import org.apache.hadoop.util.Timer; import org.apache.http.HttpStatus; import org.codehaus.jackson.map.ObjectMapper; +import org.codehaus.jackson.map.ObjectReader; import java.io.IOException; import java.util.Map; @@ -54,6 +55,8 @@ import static org.apache.hadoop.hdfs.web.oauth2.Utils.notNull; @InterfaceStability.Evolving public class ConfRefreshTokenBasedAccessTokenProvider extends AccessTokenProvider { + private static final ObjectReader READER = + new ObjectMapper().reader(Map.class); public static final String OAUTH_REFRESH_TOKEN_KEY = "dfs.webhdfs.oauth2.refresh.token"; @@ -126,10 +129,7 @@ public class ConfRefreshTokenBasedAccessTokenProvider + responseBody.code() + ", text = " + responseBody.toString()); } - ObjectMapper mapper = new ObjectMapper(); - Map response = mapper.reader(Map.class) - .readValue(responseBody.body().string()); - + Map response = READER.readValue(responseBody.body().string()); String newExpiresIn = response.get(EXPIRES_IN).toString(); accessTokenTimer.setExpiresIn(newExpiresIn); http://git-wip-us.apache.org/repos/asf/hadoop/blob/a714d04f/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/oauth2/CredentialBasedAccessTokenProvider.java ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/oauth2/CredentialBasedAccessTokenProvider.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/oauth2/CredentialBasedAccessTokenProvider.java index 15cda88..0d9006e 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/oauth2/CredentialBasedAccessTokenProvider.java +++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/oauth2/CredentialBasedAccessTokenProvider.java @@ -29,6 +29,7 @@ import org.apache.hadoop.hdfs.web.URLConnectionFactory; import org.apache.hadoop.util.Timer; import org.apache.http.HttpStatus; import org.codehaus.jackson.map.ObjectMapper; +import org.codehaus.jackson.map.ObjectReader; import java.io.IOException; import java.util.Map; @@ -54,6 +55,9 @@ import static org.apache.hadoop.hdfs.web.oauth2.Utils.notNull; @InterfaceStability.Evolving public abstract class CredentialBasedAccessTokenProvider extends AccessTokenProvider { + private static final ObjectReader READER = + new ObjectMapper().reader(Map.class); + public static final String OAUTH_CREDENTIAL_KEY = "dfs.webhdfs.oauth2.credential"; @@ -119,9 +123,7 @@ public abstract class CredentialBasedAccessTokenProvider + responseBody.code() + ", text = " + responseBody.toString()); } - ObjectMapper mapper = new ObjectMapper(); - Map response = mapper.reader(Map.class) - .readValue(responseBody.body().string()); + Map response = READER.readValue(responseBody.body().string()); String newExpiresIn = response.get(EXPIRES_IN).toString(); timer.setExpiresIn(newExpiresIn); http://git-wip-us.apache.org/repos/asf/hadoop/blob/a714d04f/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt index 5bc4edd..d8b17c6 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt +++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt @@ -1053,6 +1053,9 @@ Release 2.8.0 - UNRELEASED HDFS-9686. Remove useless boxing/unboxing code. (Kousuke Saruta via aajisaka) + HDFS-9768. Reuse ObjectMapper instance in HDFS to improve the performance. + (Lin Yiqun via aajisaka) + BUG FIXES HDFS-8091: ACLStatus and XAttributes should be presented to http://git-wip-us.apache.org/repos/asf/hadoop/blob/a714d04f/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsVolumeImpl.java ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsVolumeImpl.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsVolumeImpl.java index 775d1f0..f3dd60b 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsVolumeImpl.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsVolumeImpl.java @@ -62,6 +62,8 @@ import org.apache.hadoop.util.Time; import org.apache.hadoop.util.Timer; import org.codehaus.jackson.annotate.JsonProperty; import org.codehaus.jackson.map.ObjectMapper; +import org.codehaus.jackson.map.ObjectReader; +import org.codehaus.jackson.map.ObjectWriter; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -80,6 +82,10 @@ import com.google.common.util.concurrent.ThreadFactoryBuilder; public class FsVolumeImpl implements FsVolumeSpi { public static final Logger LOG = LoggerFactory.getLogger(FsVolumeImpl.class); + private static final ObjectWriter WRITER = + new ObjectMapper().writerWithDefaultPrettyPrinter(); + private static final ObjectReader READER = + new ObjectMapper().reader(BlockIteratorState.class); private final FsDatasetImpl dataset; private final String storageID; @@ -708,10 +714,9 @@ public class FsVolumeImpl implements FsVolumeSpi { public void save() throws IOException { state.lastSavedMs = Time.now(); boolean success = false; - ObjectMapper mapper = new ObjectMapper(); try (BufferedWriter writer = new BufferedWriter(new OutputStreamWriter( new FileOutputStream(getTempSaveFile(), false), "UTF-8"))) { - mapper.writerWithDefaultPrettyPrinter().writeValue(writer, state); + WRITER.writeValue(writer, state); success = true; } finally { if (!success) { @@ -725,17 +730,16 @@ public class FsVolumeImpl implements FsVolumeSpi { StandardCopyOption.ATOMIC_MOVE); if (LOG.isTraceEnabled()) { LOG.trace("save({}, {}): saved {}", storageID, bpid, - mapper.writerWithDefaultPrettyPrinter().writeValueAsString(state)); + WRITER.writeValueAsString(state)); } } public void load() throws IOException { - ObjectMapper mapper = new ObjectMapper(); File file = getSaveFile(); - this.state = mapper.reader(BlockIteratorState.class).readValue(file); + this.state = READER.readValue(file); LOG.trace("load({}, {}): loaded iterator {} from {}: {}", storageID, bpid, name, file.getAbsoluteFile(), - mapper.writerWithDefaultPrettyPrinter().writeValueAsString(state)); + WRITER.writeValueAsString(state)); } File getSaveFile() { http://git-wip-us.apache.org/repos/asf/hadoop/blob/a714d04f/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java index 6674ec3..9366bdf 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java @@ -250,6 +250,7 @@ import org.apache.hadoop.hdfs.server.protocol.NamespaceInfo; import org.apache.hadoop.hdfs.server.protocol.StorageReceivedDeletedBlocks; import org.apache.hadoop.hdfs.server.protocol.StorageReport; import org.apache.hadoop.hdfs.server.protocol.VolumeFailureSummary; +import org.apache.hadoop.hdfs.web.JsonUtil; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.io.Text; import org.apache.hadoop.ipc.CallerContext; @@ -277,7 +278,6 @@ import org.apache.hadoop.util.VersionInfo; import org.apache.log4j.Appender; import org.apache.log4j.AsyncAppender; import org.apache.log4j.Logger; -import org.codehaus.jackson.map.ObjectMapper; import org.mortbay.util.ajax.JSON; import com.google.common.annotations.VisibleForTesting; @@ -4518,9 +4518,8 @@ public class FSNamesystem implements Namesystem, FSNamesystemMBean, Map topMap = new TreeMap(); topMap.put("windows", topWindows); topMap.put("timestamp", DFSUtil.dateToIso8601String(now)); - ObjectMapper mapper = new ObjectMapper(); try { - return mapper.writeValueAsString(topMap); + return JsonUtil.toJsonString(topMap); } catch (IOException e) { LOG.warn("Failed to fetch TopUser metrics", e); } http://git-wip-us.apache.org/repos/asf/hadoop/blob/a714d04f/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/FSImageLoader.java ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/FSImageLoader.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/FSImageLoader.java index 172f599..21e9d2e 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/FSImageLoader.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/FSImageLoader.java @@ -54,7 +54,6 @@ import org.apache.hadoop.hdfs.web.JsonUtil; import org.apache.hadoop.hdfs.web.resources.XAttrEncodingParam; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.util.LimitInputStream; -import org.codehaus.jackson.map.ObjectMapper; import com.google.common.base.Preconditions; import com.google.common.collect.Lists; @@ -263,10 +262,9 @@ class FSImageLoader { * @throws IOException if failed to serialize fileStatus to JSON. */ String getFileStatus(String path) throws IOException { - ObjectMapper mapper = new ObjectMapper(); FsImageProto.INodeSection.INode inode = fromINodeId(lookup(path)); return "{\"FileStatus\":\n" - + mapper.writeValueAsString(getFileStatus(inode, false)) + "\n}\n"; + + JsonUtil.toJsonString(getFileStatus(inode, false)) + "\n}\n"; } /** @@ -277,7 +275,6 @@ class FSImageLoader { */ String listStatus(String path) throws IOException { StringBuilder sb = new StringBuilder(); - ObjectMapper mapper = new ObjectMapper(); List> fileStatusList = getFileStatusList(path); sb.append("{\"FileStatuses\":{\"FileStatus\":[\n"); int i = 0; @@ -285,7 +282,7 @@ class FSImageLoader { if (i++ != 0) { sb.append(','); } - sb.append(mapper.writeValueAsString(fileStatusMap)); + sb.append(JsonUtil.toJsonString(fileStatusMap)); } sb.append("\n]}}\n"); return sb.toString(); @@ -318,9 +315,8 @@ class FSImageLoader { * @throws IOException if failed to serialize ContentSummary to JSON. */ String getContentSummary(String path) throws IOException { - ObjectMapper mapper = new ObjectMapper(); return "{\"ContentSummary\":\n" - + mapper.writeValueAsString(getContentSummaryMap(path)) + "\n}\n"; + + JsonUtil.toJsonString(getContentSummaryMap(path)) + "\n}\n"; } private Map getContentSummaryMap(String path) http://git-wip-us.apache.org/repos/asf/hadoop/blob/a714d04f/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/JsonUtil.java ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/JsonUtil.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/JsonUtil.java index d7b5c31..40f6785 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/JsonUtil.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/JsonUtil.java @@ -388,4 +388,9 @@ public class JsonUtil { finalMap.put("XAttrNames", ret); return MAPPER.writeValueAsString(finalMap); } + + public static String toJsonString(Object obj) throws IOException { + return MAPPER.writeValueAsString(obj); + } + }