From common-commits-return-90980-archive-asf-public=cust-asf.ponee.io@hadoop.apache.org Wed Nov 21 19:42:46 2018 Return-Path: X-Original-To: archive-asf-public@cust-asf.ponee.io Delivered-To: archive-asf-public@cust-asf.ponee.io Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by mx-eu-01.ponee.io (Postfix) with SMTP id AD7FC180668 for ; Wed, 21 Nov 2018 19:42:45 +0100 (CET) Received: (qmail 25306 invoked by uid 500); 21 Nov 2018 18:42:44 -0000 Mailing-List: contact common-commits-help@hadoop.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Delivered-To: mailing list common-commits@hadoop.apache.org Received: (qmail 25297 invoked by uid 99); 21 Nov 2018 18:42:44 -0000 Received: from git1-us-west.apache.org (HELO git1-us-west.apache.org) (140.211.11.23) by apache.org (qpsmtpd/0.29) with ESMTP; Wed, 21 Nov 2018 18:42:44 +0000 Received: by git1-us-west.apache.org (ASF Mail Server at git1-us-west.apache.org, from userid 33) id 9BDFEDFEAB; Wed, 21 Nov 2018 18:42:44 +0000 (UTC) Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit From: brahma@apache.org To: common-commits@hadoop.apache.org Message-Id: <977f0654d3694a48b6ff5f049cb2582f@git.apache.org> X-Mailer: ASF-Git Admin Mailer Subject: hadoop git commit: HDFS-14064. WEBHDFS: Support Enable/Disable EC Policy. Contributed by Ayush Saxena. Date: Wed, 21 Nov 2018 18:42:44 +0000 (UTC) Repository: hadoop Updated Branches: refs/heads/branch-3.2 f6227367f -> 7e10dd03b HDFS-14064. WEBHDFS: Support Enable/Disable EC Policy. Contributed by Ayush Saxena. (cherry picked from commit 15d8f592b51d08a658e88c6a7a7596e0edf5793b) Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/7e10dd03 Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/7e10dd03 Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/7e10dd03 Branch: refs/heads/branch-3.2 Commit: 7e10dd03b5e1a3f22b55a3b970239611e6f078c0 Parents: f622736 Author: Brahma Reddy Battula Authored: Thu Nov 22 00:06:22 2018 +0530 Committer: Brahma Reddy Battula Committed: Thu Nov 22 00:07:38 2018 +0530 ---------------------------------------------------------------------- .../hadoop/hdfs/web/WebHdfsFileSystem.java | 10 +++++ .../hdfs/web/resources/ECPolicyParam.java | 42 ++++++++++++++++++ .../hadoop/hdfs/web/resources/PutOpParam.java | 3 ++ .../federation/router/RouterWebHdfsMethods.java | 8 +++- .../web/resources/NamenodeWebHdfsMethods.java | 25 ++++++++--- .../hadoop-hdfs/src/site/markdown/WebHDFS.md | 45 +++++++++++++++++++ .../org/apache/hadoop/hdfs/web/TestWebHDFS.java | 46 ++++++++++++++++++++ .../hadoop/hdfs/web/resources/TestParam.java | 8 ++++ 8 files changed, 180 insertions(+), 7 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e10dd03/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java index c139cb0..51d4442 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java +++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java @@ -1310,6 +1310,16 @@ public class WebHdfsFileSystem extends FileSystem new FsPathRunner(op, p).run(); } + public void enableECPolicy(String policyName) throws IOException { + final HttpOpParam.Op op = PutOpParam.Op.ENABLEECPOLICY; + new FsPathRunner(op, null, new ECPolicyParam(policyName)).run(); + } + + public void disableECPolicy(String policyName) throws IOException { + final HttpOpParam.Op op = PutOpParam.Op.DISABLEECPOLICY; + new FsPathRunner(op, null, new ECPolicyParam(policyName)).run(); + } + @Override public Path createSnapshot(final Path path, final String snapshotName) throws IOException { http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e10dd03/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/resources/ECPolicyParam.java ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/resources/ECPolicyParam.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/resources/ECPolicyParam.java new file mode 100644 index 0000000..35502e3 --- /dev/null +++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/resources/ECPolicyParam.java @@ -0,0 +1,42 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hdfs.web.resources; + +/** policy parameter. */ +public class ECPolicyParam extends StringParam { + /** Parameter name. */ + public static final String NAME = "ecpolicy"; + /** Default parameter value. */ + public static final String DEFAULT = ""; + + private static final Domain DOMAIN = new Domain(NAME, null); + + /** + * Constructor. + * + * @param str a string representation of the parameter value. + */ + public ECPolicyParam(final String str) { + super(DOMAIN, str == null || str.equals(DEFAULT) ? null : str); + } + + @Override + public String getName() { + return NAME; + } +} http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e10dd03/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/resources/PutOpParam.java ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/resources/PutOpParam.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/resources/PutOpParam.java index 558bb53..7bbd361 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/resources/PutOpParam.java +++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/resources/PutOpParam.java @@ -46,6 +46,9 @@ public class PutOpParam extends HttpOpParam { SETXATTR(false, HttpURLConnection.HTTP_OK), REMOVEXATTR(false, HttpURLConnection.HTTP_OK), + ENABLEECPOLICY(false, HttpURLConnection.HTTP_OK), + DISABLEECPOLICY(false, HttpURLConnection.HTTP_OK), + ALLOWSNAPSHOT(false, HttpURLConnection.HTTP_OK), DISALLOWSNAPSHOT(false, HttpURLConnection.HTTP_OK), CREATESNAPSHOT(false, HttpURLConnection.HTTP_OK), http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e10dd03/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/router/RouterWebHdfsMethods.java ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/router/RouterWebHdfsMethods.java b/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/router/RouterWebHdfsMethods.java index 5e9d4d0..34c3788 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/router/RouterWebHdfsMethods.java +++ b/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/router/RouterWebHdfsMethods.java @@ -54,6 +54,7 @@ import org.apache.hadoop.hdfs.web.resources.CreateParentParam; import org.apache.hadoop.hdfs.web.resources.DelegationParam; import org.apache.hadoop.hdfs.web.resources.DestinationParam; import org.apache.hadoop.hdfs.web.resources.DoAsParam; +import org.apache.hadoop.hdfs.web.resources.ECPolicyParam; import org.apache.hadoop.hdfs.web.resources.ExcludeDatanodesParam; import org.apache.hadoop.hdfs.web.resources.FsActionParam; import org.apache.hadoop.hdfs.web.resources.GetOpParam; @@ -215,7 +216,8 @@ public class RouterWebHdfsMethods extends NamenodeWebHdfsMethods { final ExcludeDatanodesParam exclDatanodes, final CreateFlagParam createFlagParam, final NoRedirectParam noredirectParam, - final StoragePolicyParam policyName + final StoragePolicyParam policyName, + final ECPolicyParam ecpolicy ) throws IOException, URISyntaxException { switch(op.getValue()) { @@ -252,6 +254,8 @@ public class RouterWebHdfsMethods extends NamenodeWebHdfsMethods { case RENAMESNAPSHOT: case DISALLOWSNAPSHOT: case SETSTORAGEPOLICY: + case ENABLEECPOLICY: + case DISABLEECPOLICY: { // Whitelist operations that can handled by NamenodeWebHdfsMethods return super.put(ugi, delegation, username, doAsUser, fullpath, op, @@ -260,7 +264,7 @@ public class RouterWebHdfsMethods extends NamenodeWebHdfsMethods { accessTime, renameOptions, createParent, delegationTokenArgument, aclPermission, xattrName, xattrValue, xattrSetFlag, snapshotName, oldSnapshotName, exclDatanodes, createFlagParam, noredirectParam, - policyName); + policyName, ecpolicy); } default: throw new UnsupportedOperationException(op + " is not supported"); http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e10dd03/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java index c4d3239..6940f2c 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java @@ -492,14 +492,16 @@ public class NamenodeWebHdfsMethods { @QueryParam(NoRedirectParam.NAME) @DefaultValue(NoRedirectParam.DEFAULT) final NoRedirectParam noredirect, @QueryParam(StoragePolicyParam.NAME) @DefaultValue(StoragePolicyParam - .DEFAULT) final StoragePolicyParam policyName + .DEFAULT) final StoragePolicyParam policyName, + @QueryParam(ECPolicyParam.NAME) @DefaultValue(ECPolicyParam + .DEFAULT) final ECPolicyParam ecpolicy ) throws IOException, InterruptedException { return put(ugi, delegation, username, doAsUser, ROOT, op, destination, owner, group, permission, unmaskedPermission, overwrite, bufferSize, replication, blockSize, modificationTime, accessTime, renameOptions, createParent, delegationTokenArgument, aclPermission, xattrName, xattrValue, xattrSetFlag, snapshotName, oldSnapshotName, - excludeDatanodes, createFlagParam, noredirect, policyName); + excludeDatanodes, createFlagParam, noredirect, policyName, ecpolicy); } /** Validate all required params. */ @@ -579,7 +581,9 @@ public class NamenodeWebHdfsMethods { @QueryParam(NoRedirectParam.NAME) @DefaultValue(NoRedirectParam.DEFAULT) final NoRedirectParam noredirect, @QueryParam(StoragePolicyParam.NAME) @DefaultValue(StoragePolicyParam - .DEFAULT) final StoragePolicyParam policyName + .DEFAULT) final StoragePolicyParam policyName, + @QueryParam(ECPolicyParam.NAME) @DefaultValue(ECPolicyParam.DEFAULT) + final ECPolicyParam ecpolicy ) throws IOException, InterruptedException { init(ugi, delegation, username, doAsUser, path, op, destination, owner, @@ -599,7 +603,7 @@ public class NamenodeWebHdfsMethods { renameOptions, createParent, delegationTokenArgument, aclPermission, xattrName, xattrValue, xattrSetFlag, snapshotName, oldSnapshotName, excludeDatanodes, - createFlagParam, noredirect, policyName); + createFlagParam, noredirect, policyName, ecpolicy); } }); } @@ -634,7 +638,8 @@ public class NamenodeWebHdfsMethods { final ExcludeDatanodesParam exclDatanodes, final CreateFlagParam createFlagParam, final NoRedirectParam noredirectParam, - final StoragePolicyParam policyName + final StoragePolicyParam policyName, + final ECPolicyParam ecpolicy ) throws IOException, URISyntaxException { final Configuration conf = (Configuration)context.getAttribute(JspHelper.CURRENT_CONF); final ClientProtocol cp = getRpcClientProtocol(); @@ -795,6 +800,16 @@ public class NamenodeWebHdfsMethods { cp.setStoragePolicy(fullpath, policyName.getValue()); return Response.ok().type(MediaType.APPLICATION_OCTET_STREAM).build(); } + case ENABLEECPOLICY: + validateOpParams(op, ecpolicy); + cp.enableErasureCodingPolicy(ecpolicy.getValue()); + return Response.ok().type(MediaType.APPLICATION_OCTET_STREAM).build(); + + case DISABLEECPOLICY: + validateOpParams(op, ecpolicy); + cp.disableErasureCodingPolicy(ecpolicy.getValue()); + return Response.ok().type(MediaType.APPLICATION_OCTET_STREAM).build(); + default: throw new UnsupportedOperationException(op + " is not supported"); } http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e10dd03/hadoop-hdfs-project/hadoop-hdfs/src/site/markdown/WebHDFS.md ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/site/markdown/WebHDFS.md b/hadoop-hdfs-project/hadoop-hdfs/src/site/markdown/WebHDFS.md index e29b932..383eda0 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/site/markdown/WebHDFS.md +++ b/hadoop-hdfs-project/hadoop-hdfs/src/site/markdown/WebHDFS.md @@ -69,6 +69,8 @@ The HTTP REST API supports the complete [FileSystem](../../api/org/apache/hadoop * [`SETXATTR`](#Set_XAttr) (see [FileSystem](../../api/org/apache/hadoop/fs/FileSystem.html).setXAttr) * [`REMOVEXATTR`](#Remove_XAttr) (see [FileSystem](../../api/org/apache/hadoop/fs/FileSystem.html).removeXAttr) * [`SETSTORAGEPOLICY`](#Set_Storage_Policy) (see [FileSystem](../../api/org/apache/hadoop/fs/FileSystem.html).setStoragePolicy) + * [`ENABLEECPOLICY`](#Enable_EC_Policy) (see [HDFSErasureCoding](./HDFSErasureCoding.html#Administrative_commands).enablePolicy) + * [`DISABLEECPOLICY`](#Disable_EC_Policy) (see [HDFSErasureCoding](./HDFSErasureCoding.html#Administrative_commands).disablePolicy) * HTTP POST * [`APPEND`](#Append_to_a_File) (see [FileSystem](../../api/org/apache/hadoop/fs/FileSystem.html).append) * [`CONCAT`](#Concat_Files) (see [FileSystem](../../api/org/apache/hadoop/fs/FileSystem.html).concat) @@ -1266,6 +1268,37 @@ See also: [FileSystem](../../api/org/apache/hadoop/fs/FileSystem.html).getXAttrs See also: [FileSystem](../../api/org/apache/hadoop/fs/FileSystem.html).listXAttrs +Erasure Coding Operations +------------------------- + +### Enable EC Policy + +* Submit a HTTP PUT request. + + curl -i -X PUT "http://:/webhdfs/v1/?op=ENABLEECPOLICY + &ecpolicy=" + + The client receives a response with zero content length: + + HTTP/1.1 200 OK + Content-Length: 0 + +See also: [HDFSErasureCoding](./HDFSErasureCoding.html#Administrative_commands).enablePolicy) + +### Disable EC Policy + +* Submit a HTTP PUT request. + + curl -i -X PUT "http://:/webhdfs/v1/?op=DISABLEECPOLICY + &ecpolicy=" + + The client receives a response with zero content length: + + HTTP/1.1 200 OK + Content-Length: 0 + +See also: [HDFSErasureCoding](./HDFSErasureCoding.html#Administrative_commands).disablePolicy) + Snapshot Operations ------------------- @@ -2822,6 +2855,18 @@ See also: [Create and Write to a File](#Create_and_Write_to_a_File) See also: [`SETSTORAGEPOLICY`](#Set_Storage_Policy) +### Erasure Coding Policy + +| Name | `ecpolicy` | +|:---- |:---- | +| Description | The name of the erasure coding policy. | +| Type | String | +| Default Value | \ | +| Valid Values | Any valid erasure coding policy name; | +| Syntax | Any string. | + +See also: [`ENABLEECPOLICY`](#Enable_EC_Policy) or [`DISABLEECPOLICY`](#Disable_EC_Policy) + ### Start After | Name | `startAfter` | http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e10dd03/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHDFS.java ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHDFS.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHDFS.java index 71948d6..8173bca 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHDFS.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHDFS.java @@ -47,6 +47,8 @@ import java.net.URL; import java.nio.charset.StandardCharsets; import java.security.PrivilegedExceptionAction; import java.util.Arrays; +import java.util.Collection; +import java.util.Iterator; import java.util.Map; import java.util.Random; @@ -83,6 +85,7 @@ import org.apache.hadoop.hdfs.TestDFSClientRetries; import org.apache.hadoop.hdfs.TestFileCreation; import org.apache.hadoop.hdfs.client.HdfsClientConfigKeys; import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicy; +import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicyInfo; import org.apache.hadoop.hdfs.protocol.SystemErasureCodingPolicies; import org.apache.hadoop.hdfs.protocol.HdfsConstants; import org.apache.hadoop.hdfs.protocol.SnapshotDiffReport; @@ -1599,6 +1602,49 @@ public class TestWebHDFS { } } + private void checkECPolicyState(Collection policies, + String ecpolicy, String state) { + Iterator itr = policies.iterator(); + boolean found = false; + while (policies.iterator().hasNext()) { + ErasureCodingPolicyInfo policy = itr.next(); + if (policy.getPolicy().getName().equals(ecpolicy)) { + found = true; + if (state.equals("disable")) { + Assert.assertTrue(policy.isDisabled()); + } else if (state.equals("enable")) { + Assert.assertTrue(policy.isEnabled()); + } + break; + } + } + Assert.assertTrue(found); + } + + // Test For Enable/Disable EC Policy in DFS. + @Test + public void testEnableDisableECPolicy() throws Exception { + Configuration conf = new HdfsConfiguration(); + try (MiniDFSCluster cluster = + new MiniDFSCluster.Builder(conf).numDataNodes(0).build()) { + cluster.waitActive(); + final DistributedFileSystem dfs = cluster.getFileSystem(); + final WebHdfsFileSystem webHdfs = WebHdfsTestUtil + .getWebHdfsFileSystem(conf, WebHdfsConstants.WEBHDFS_SCHEME); + String policy = "RS-10-4-1024k"; + + // Check for Enable EC policy via WEBHDFS. + dfs.disableErasureCodingPolicy(policy); + checkECPolicyState(dfs.getAllErasureCodingPolicies(), policy, "disable"); + webHdfs.enableECPolicy("RS-10-4-1024k"); + checkECPolicyState(dfs.getAllErasureCodingPolicies(), policy, "enable"); + + // Check for Disable EC policy via WEBHDFS. + webHdfs.disableECPolicy(policy); + checkECPolicyState(dfs.getAllErasureCodingPolicies(), policy, "disable"); + } + } + @Test public void testWebHdfsAppend() throws Exception { MiniDFSCluster cluster = null; http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e10dd03/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/resources/TestParam.java ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/resources/TestParam.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/resources/TestParam.java index 9851ede..c9247df 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/resources/TestParam.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/resources/TestParam.java @@ -506,6 +506,14 @@ public class TestParam { } @Test + public void testECPolicyParam() { + ECPolicyParam p = new ECPolicyParam(ECPolicyParam.DEFAULT); + Assert.assertEquals(null, p.getValue()); + p = new ECPolicyParam("RS-6-3-1024k"); + Assert.assertEquals("RS-6-3-1024k", p.getValue()); + } + + @Test public void testHttpOpParams() { try { new PostOpParam("TEST"); --------------------------------------------------------------------- To unsubscribe, e-mail: common-commits-unsubscribe@hadoop.apache.org For additional commands, e-mail: common-commits-help@hadoop.apache.org