Return-Path: X-Original-To: apmail-hadoop-common-commits-archive@www.apache.org Delivered-To: apmail-hadoop-common-commits-archive@www.apache.org Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by minotaur.apache.org (Postfix) with SMTP id A0F9A173AB for ; Tue, 20 Jan 2015 17:44:16 +0000 (UTC) Received: (qmail 14917 invoked by uid 500); 20 Jan 2015 17:44:12 -0000 Delivered-To: apmail-hadoop-common-commits-archive@hadoop.apache.org Received: (qmail 14691 invoked by uid 500); 20 Jan 2015 17:44:12 -0000 Mailing-List: contact common-commits-help@hadoop.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: common-dev@hadoop.apache.org Delivered-To: mailing list common-commits@hadoop.apache.org Received: (qmail 14454 invoked by uid 99); 20 Jan 2015 17:44:12 -0000 Received: from git1-us-west.apache.org (HELO git1-us-west.apache.org) (140.211.11.23) by apache.org (qpsmtpd/0.29) with ESMTP; Tue, 20 Jan 2015 17:44:12 +0000 Received: by git1-us-west.apache.org (ASF Mail Server at git1-us-west.apache.org, from userid 33) id 64B78E03F3; Tue, 20 Jan 2015 17:44:11 +0000 (UTC) Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit From: zhz@apache.org To: common-commits@hadoop.apache.org Date: Tue, 20 Jan 2015 17:44:36 -0000 Message-Id: <0ded96a24f914994b9d703b4991fa003@git.apache.org> In-Reply-To: <065e9952e2284f8ab86ecad51922ca0c@git.apache.org> References: <065e9952e2284f8ab86ecad51922ca0c@git.apache.org> X-Mailer: ASF-Git Admin Mailer Subject: [27/44] hadoop git commit: HADOOP-11261 Set custom endpoint for S3A. (Thomas Demoor via stevel) HADOOP-11261 Set custom endpoint for S3A. (Thomas Demoor via stevel) Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/bf40a1fc Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/bf40a1fc Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/bf40a1fc Branch: refs/heads/HDFS-EC Commit: bf40a1fc26cd1558645c0c8dc118f38053fbac95 Parents: e473435 Author: Steve Loughran Authored: Fri Jan 16 10:15:22 2015 +0000 Committer: Zhe Zhang Committed: Tue Jan 20 09:43:10 2015 -0800 ---------------------------------------------------------------------- hadoop-common-project/hadoop-common/CHANGES.txt | 2 + .../org/apache/hadoop/fs/s3a/Constants.java | 5 +- .../org/apache/hadoop/fs/s3a/S3AFileSystem.java | 19 +++++ .../hadoop/fs/s3a/TestS3AConfiguration.java | 85 ++++++++++++++++++++ 4 files changed, 110 insertions(+), 1 deletion(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/hadoop/blob/bf40a1fc/hadoop-common-project/hadoop-common/CHANGES.txt ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt b/hadoop-common-project/hadoop-common/CHANGES.txt index e1a6800..6896fe2 100644 --- a/hadoop-common-project/hadoop-common/CHANGES.txt +++ b/hadoop-common-project/hadoop-common/CHANGES.txt @@ -486,6 +486,8 @@ Release 2.7.0 - UNRELEASED HADOOP-8757. Metrics should disallow names with invalid characters (rchiang via rkanter) + HADOOP-11261 Set custom endpoint for S3A. (Thomas Demoor via stevel) + OPTIMIZATIONS HADOOP-11323. WritableComparator#compare keeps reference to byte array. http://git-wip-us.apache.org/repos/asf/hadoop/blob/bf40a1fc/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/Constants.java ---------------------------------------------------------------------- diff --git a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/Constants.java b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/Constants.java index f1b5d3d..0232961 100644 --- a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/Constants.java +++ b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/Constants.java @@ -28,7 +28,10 @@ public class Constants { // connect to s3 over ssl? public static final String SECURE_CONNECTIONS = "fs.s3a.connection.ssl.enabled"; public static final boolean DEFAULT_SECURE_CONNECTIONS = true; - + + //use a custom endpoint? + public static final String ENDPOINT = "fs.s3a.endpoint"; + // number of times we should retry errors public static final String MAX_ERROR_RETRIES = "fs.s3a.attempts.maximum"; public static final int DEFAULT_MAX_ERROR_RETRIES = 10; http://git-wip-us.apache.org/repos/asf/hadoop/blob/bf40a1fc/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/S3AFileSystem.java ---------------------------------------------------------------------- diff --git a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/S3AFileSystem.java b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/S3AFileSystem.java index e6b1557..d8cf73f 100644 --- a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/S3AFileSystem.java +++ b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/S3AFileSystem.java @@ -57,6 +57,7 @@ import com.amazonaws.services.s3.transfer.Upload; import com.amazonaws.event.ProgressListener; import com.amazonaws.event.ProgressEvent; +import com.google.common.annotations.VisibleForTesting; import org.apache.commons.lang.StringUtils; import org.apache.hadoop.conf.Configuration; @@ -176,6 +177,16 @@ public class S3AFileSystem extends FileSystem { DEFAULT_SOCKET_TIMEOUT)); s3 = new AmazonS3Client(credentials, awsConf); + String endPoint = conf.getTrimmed(ENDPOINT,""); + if (!endPoint.isEmpty()) { + try { + s3.setEndpoint(endPoint); + } catch (IllegalArgumentException e) { + String msg = "Incorrect endpoint: " + e.getMessage(); + LOG.error(msg); + throw new IllegalArgumentException(msg, e); + } + } maxKeys = conf.getInt(MAX_PAGING_KEYS, DEFAULT_MAX_PAGING_KEYS); partSize = conf.getLong(MULTIPART_SIZE, DEFAULT_MULTIPART_SIZE); @@ -262,6 +273,14 @@ public class S3AFileSystem extends FileSystem { return uri; } + /** + * Returns the S3 client used by this filesystem. + * @return AmazonS3Client + */ + @VisibleForTesting + AmazonS3Client getAmazonS3Client() { + return s3; + } public S3AFileSystem() { super(); http://git-wip-us.apache.org/repos/asf/hadoop/blob/bf40a1fc/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestS3AConfiguration.java ---------------------------------------------------------------------- diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestS3AConfiguration.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestS3AConfiguration.java new file mode 100644 index 0000000..e4a14d0 --- /dev/null +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestS3AConfiguration.java @@ -0,0 +1,85 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.fs.s3a; + +import com.amazonaws.services.s3.AmazonS3Client; +import org.apache.commons.lang.StringUtils; +import org.apache.hadoop.conf.Configuration; + +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.Timeout; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.fail; + +public class TestS3AConfiguration { + private Configuration conf; + private S3AFileSystem fs; + + private static final Logger LOG = + LoggerFactory.getLogger(TestS3AConfiguration.class); + + private static final String TEST_ENDPOINT = "test.fs.s3a.endpoint"; + + @Rule + public Timeout testTimeout = new Timeout(30 * 60 * 1000); + + /** + * Test if custom endpoint is picked up. + *

+ * The test expects TEST_ENDPOINT to be defined in the Configuration + * describing the endpoint of the bucket to which TEST_FS_S3A_NAME points + * (f.i. "s3-eu-west-1.amazonaws.com" if the bucket is located in Ireland). + * Evidently, the bucket has to be hosted in the region denoted by the + * endpoint for the test to succeed. + *

+ * More info and the list of endpoint identifiers: + * http://docs.aws.amazon.com/general/latest/gr/rande.html#s3_region + * + * @throws Exception + */ + @Test + public void TestEndpoint() throws Exception { + conf = new Configuration(); + String endpoint = conf.getTrimmed(TEST_ENDPOINT, ""); + if (endpoint.isEmpty()) { + LOG.warn("Custom endpoint test skipped as " + TEST_ENDPOINT + "config " + + "setting was not detected"); + } else { + conf.set(Constants.ENDPOINT, endpoint); + fs = S3ATestUtils.createTestFileSystem(conf); + AmazonS3Client s3 = fs.getAmazonS3Client(); + String endPointRegion = ""; + // Differentiate handling of "s3-" and "s3." based endpoint identifiers + String[] endpointParts = StringUtils.split(endpoint, '.'); + if (endpointParts.length == 3) { + endPointRegion = endpointParts[0].substring(3); + } else if (endpointParts.length == 4) { + endPointRegion = endpointParts[1]; + } else { + fail("Unexpected endpoint"); + } + assertEquals("Endpoint config setting and bucket location differ: ", + endPointRegion, s3.getBucketLocation(fs.getUri().getHost())); + } + } +}