From common-commits-return-92334-archive-asf-public=cust-asf.ponee.io@hadoop.apache.org Mon Jan 7 10:56:17 2019 Return-Path: X-Original-To: archive-asf-public@cust-asf.ponee.io Delivered-To: archive-asf-public@cust-asf.ponee.io Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by mx-eu-01.ponee.io (Postfix) with SMTP id DB68C180647 for ; Mon, 7 Jan 2019 10:56:16 +0100 (CET) Received: (qmail 53013 invoked by uid 500); 7 Jan 2019 09:56:15 -0000 Mailing-List: contact common-commits-help@hadoop.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Delivered-To: mailing list common-commits@hadoop.apache.org Received: (qmail 53004 invoked by uid 99); 7 Jan 2019 09:56:15 -0000 Received: from ec2-52-202-80-70.compute-1.amazonaws.com (HELO gitbox.apache.org) (52.202.80.70) by apache.org (qpsmtpd/0.29) with ESMTP; Mon, 07 Jan 2019 09:56:15 +0000 Received: by gitbox.apache.org (ASF Mail Server at gitbox.apache.org, from userid 33) id 238DD85B8C; Mon, 7 Jan 2019 09:56:15 +0000 (UTC) Date: Mon, 07 Jan 2019 09:56:15 +0000 To: "common-commits@hadoop.apache.org" Subject: [hadoop] branch branch-2 updated: HADOOP-16030. AliyunOSS: bring fixes back from HADOOP-15671. Contributed by wujinhu. MIME-Version: 1.0 Content-Type: text/plain; charset=utf-8 Content-Transfer-Encoding: 8bit Message-ID: <154685497498.10452.8883419359874621425@gitbox.apache.org> From: wwei@apache.org X-Git-Host: gitbox.apache.org X-Git-Repo: hadoop X-Git-Refname: refs/heads/branch-2 X-Git-Reftype: branch X-Git-Oldrev: 2c4e4937bd05cffdab7953a904041b7af7afea4c X-Git-Newrev: 2a416fe9f2dc5d1949fb098b1e976597f161035c X-Git-Rev: 2a416fe9f2dc5d1949fb098b1e976597f161035c X-Git-NotificationType: ref_changed_plus_diff X-Git-Multimail-Version: 1.5.dev Auto-Submitted: auto-generated This is an automated email from the ASF dual-hosted git repository. wwei pushed a commit to branch branch-2 in repository https://gitbox.apache.org/repos/asf/hadoop.git The following commit(s) were added to refs/heads/branch-2 by this push: new 2a416fe HADOOP-16030. AliyunOSS: bring fixes back from HADOOP-15671. Contributed by wujinhu. 2a416fe is described below commit 2a416fe9f2dc5d1949fb098b1e976597f161035c Author: Weiwei Yang AuthorDate: Mon Jan 7 15:56:49 2019 +0800 HADOOP-16030. AliyunOSS: bring fixes back from HADOOP-15671. Contributed by wujinhu. (cherry picked from commit f87b3b11c46704dcdb63089dd971e2a5ba1deaac) --- .../fs/aliyun/oss/AliyunOSSBlockOutputStream.java | 5 +++-- .../fs/aliyun/oss/AliyunOSSFileSystemStore.java | 5 +++-- .../hadoop/fs/aliyun/oss/AliyunOSSUtils.java | 8 ++++--- .../fs/aliyun/oss/TestAliyunCredentials.java | 25 ++++++++++++++++++---- 4 files changed, 32 insertions(+), 11 deletions(-) diff --git a/hadoop-tools/hadoop-aliyun/src/main/java/org/apache/hadoop/fs/aliyun/oss/AliyunOSSBlockOutputStream.java b/hadoop-tools/hadoop-aliyun/src/main/java/org/apache/hadoop/fs/aliyun/oss/AliyunOSSBlockOutputStream.java index 42cb0b1..353b2da 100644 --- a/hadoop-tools/hadoop-aliyun/src/main/java/org/apache/hadoop/fs/aliyun/oss/AliyunOSSBlockOutputStream.java +++ b/hadoop-tools/hadoop-aliyun/src/main/java/org/apache/hadoop/fs/aliyun/oss/AliyunOSSBlockOutputStream.java @@ -124,7 +124,8 @@ public class AliyunOSSBlockOutputStream extends OutputStream { if (null == partETags) { throw new IOException("Failed to multipart upload to oss, abort it."); } - store.completeMultipartUpload(key, uploadId, partETags); + store.completeMultipartUpload(key, uploadId, + new ArrayList<>(partETags)); } } finally { removePartFiles(); @@ -133,7 +134,7 @@ public class AliyunOSSBlockOutputStream extends OutputStream { } @Override - public void write(int b) throws IOException { + public synchronized void write(int b) throws IOException { singleByte[0] = (byte)b; write(singleByte, 0, 1); } diff --git a/hadoop-tools/hadoop-aliyun/src/main/java/org/apache/hadoop/fs/aliyun/oss/AliyunOSSFileSystemStore.java b/hadoop-tools/hadoop-aliyun/src/main/java/org/apache/hadoop/fs/aliyun/oss/AliyunOSSFileSystemStore.java index e38cac1..1670f6a 100644 --- a/hadoop-tools/hadoop-aliyun/src/main/java/org/apache/hadoop/fs/aliyun/oss/AliyunOSSFileSystemStore.java +++ b/hadoop-tools/hadoop-aliyun/src/main/java/org/apache/hadoop/fs/aliyun/oss/AliyunOSSFileSystemStore.java @@ -150,7 +150,7 @@ public class AliyunOSSFileSystemStore { "null or empty. Please set proper endpoint with 'fs.oss.endpoint'."); } CredentialsProvider provider = - AliyunOSSUtils.getCredentialsProvider(conf); + AliyunOSSUtils.getCredentialsProvider(uri, conf); ossClient = new OSSClient(endPoint, provider, clientConf); uploadPartSize = AliyunOSSUtils.getMultipartSizeProperty(conf, MULTIPART_UPLOAD_PART_SIZE_KEY, MULTIPART_UPLOAD_PART_SIZE_DEFAULT); @@ -158,6 +158,8 @@ public class AliyunOSSFileSystemStore { serverSideEncryptionAlgorithm = conf.get(SERVER_SIDE_ENCRYPTION_ALGORITHM_KEY, ""); + bucketName = uri.getHost(); + String cannedACLName = conf.get(CANNED_ACL_KEY, CANNED_ACL_DEFAULT); if (StringUtils.isNotEmpty(cannedACLName)) { CannedAccessControlList cannedACL = @@ -167,7 +169,6 @@ public class AliyunOSSFileSystemStore { } maxKeys = conf.getInt(MAX_PAGING_KEYS_KEY, MAX_PAGING_KEYS_DEFAULT); - bucketName = uri.getHost(); } /** diff --git a/hadoop-tools/hadoop-aliyun/src/main/java/org/apache/hadoop/fs/aliyun/oss/AliyunOSSUtils.java b/hadoop-tools/hadoop-aliyun/src/main/java/org/apache/hadoop/fs/aliyun/oss/AliyunOSSUtils.java index 2fe06c1..1f95965 100644 --- a/hadoop-tools/hadoop-aliyun/src/main/java/org/apache/hadoop/fs/aliyun/oss/AliyunOSSUtils.java +++ b/hadoop-tools/hadoop-aliyun/src/main/java/org/apache/hadoop/fs/aliyun/oss/AliyunOSSUtils.java @@ -20,6 +20,7 @@ package org.apache.hadoop.fs.aliyun.oss; import java.io.File; import java.io.IOException; +import java.net.URI; import com.aliyun.oss.common.auth.CredentialsProvider; import com.google.common.base.Preconditions; @@ -95,13 +96,14 @@ final public class AliyunOSSUtils { * Create credential provider specified by configuration, or create default * credential provider if not specified. * + * @param uri uri passed by caller * @param conf configuration * @return a credential provider * @throws IOException on any problem. Class construction issues may be * nested inside the IOE. */ - public static CredentialsProvider getCredentialsProvider(Configuration conf) - throws IOException { + public static CredentialsProvider getCredentialsProvider( + URI uri, Configuration conf) throws IOException { CredentialsProvider credentials; String className = conf.getTrimmed(CREDENTIALS_PROVIDER_KEY); @@ -117,7 +119,7 @@ final public class AliyunOSSUtils { try { credentials = (CredentialsProvider)credClass.getDeclaredConstructor( - Configuration.class).newInstance(conf); + URI.class, Configuration.class).newInstance(uri, conf); } catch (NoSuchMethodException | SecurityException e) { credentials = (CredentialsProvider)credClass.getDeclaredConstructor() diff --git a/hadoop-tools/hadoop-aliyun/src/test/java/org/apache/hadoop/fs/aliyun/oss/TestAliyunCredentials.java b/hadoop-tools/hadoop-aliyun/src/test/java/org/apache/hadoop/fs/aliyun/oss/TestAliyunCredentials.java index e08a4dc..9566901 100644 --- a/hadoop-tools/hadoop-aliyun/src/test/java/org/apache/hadoop/fs/aliyun/oss/TestAliyunCredentials.java +++ b/hadoop-tools/hadoop-aliyun/src/test/java/org/apache/hadoop/fs/aliyun/oss/TestAliyunCredentials.java @@ -19,6 +19,7 @@ package org.apache.hadoop.fs.aliyun.oss; import com.aliyun.oss.common.auth.Credentials; +import com.aliyun.oss.common.auth.CredentialsProvider; import com.aliyun.oss.common.auth.InvalidCredentialsException; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.aliyun.oss.contract.AliyunOSSContract; @@ -27,6 +28,8 @@ import org.apache.hadoop.fs.contract.AbstractFSContractTestBase; import org.junit.Test; import java.io.IOException; +import java.lang.reflect.InvocationTargetException; +import java.net.URI; import static org.apache.hadoop.fs.aliyun.oss.Constants.ACCESS_KEY_ID; import static org.apache.hadoop.fs.aliyun.oss.Constants.ACCESS_KEY_SECRET; @@ -63,16 +66,30 @@ public class TestAliyunCredentials extends AbstractFSContractTestBase { validateCredential(conf); } - private void validateCredential(Configuration conf) { + private void validateCredential(URI uri, Configuration conf) { try { - AliyunCredentialsProvider provider - = new AliyunCredentialsProvider(conf); + CredentialsProvider provider = + AliyunOSSUtils.getCredentialsProvider(uri, conf); Credentials credentials = provider.getCredentials(); fail("Expected a CredentialInitializationException, got " + credentials); } catch (InvalidCredentialsException expected) { // expected } catch (IOException e) { - fail("Unexpected exception."); + Throwable cause = e.getCause(); + if (cause instanceof InvocationTargetException) { + boolean isInstance = + ((InvocationTargetException)cause).getTargetException() + instanceof InvalidCredentialsException; + if (!isInstance) { + fail("Unexpected exception."); + } + } else { + fail("Unexpected exception."); + } } } + + private void validateCredential(Configuration conf) { + validateCredential(null, conf); + } } --------------------------------------------------------------------- To unsubscribe, e-mail: common-commits-unsubscribe@hadoop.apache.org For additional commands, e-mail: common-commits-help@hadoop.apache.org