hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From cnaur...@apache.org
Subject [2/2] hadoop git commit: HADOOP-13392. [Azure Data Lake] OAuth2 configuration should be default set to true when AdlFileSystem is configured. Contributed by Vishwajeet Dusane and Chris Douglas.
Date Fri, 22 Jul 2016 16:28:20 GMT
HADOOP-13392. [Azure Data Lake] OAuth2 configuration should be default set to true when AdlFileSystem
is configured. Contributed by Vishwajeet Dusane and Chris Douglas.

(cherry picked from commit c60a68ce1d2b7823f78e942cb00d48975476a5a6)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/36e16019
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/36e16019
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/36e16019

Branch: refs/heads/branch-3.0.0-alpha1
Commit: 36e16019471524b18be5f21df00aae5bb7e83dd1
Parents: da926e8
Author: Chris Nauroth <cnauroth@apache.org>
Authored: Fri Jul 22 09:16:38 2016 -0700
Committer: Chris Nauroth <cnauroth@apache.org>
Committed: Fri Jul 22 09:21:14 2016 -0700

----------------------------------------------------------------------
 .../web/PrivateAzureDataLakeFileSystem.java     |  8 +++
 .../hdfs/web/TestConfigurationSetting.java      | 54 +++++++++++++++-----
 2 files changed, 48 insertions(+), 14 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hadoop/blob/36e16019/hadoop-tools/hadoop-azure-datalake/src/main/java/org/apache/hadoop/hdfs/web/PrivateAzureDataLakeFileSystem.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-azure-datalake/src/main/java/org/apache/hadoop/hdfs/web/PrivateAzureDataLakeFileSystem.java
b/hadoop-tools/hadoop-azure-datalake/src/main/java/org/apache/hadoop/hdfs/web/PrivateAzureDataLakeFileSystem.java
index c4a19d5..aa7c552 100644
--- a/hadoop-tools/hadoop-azure-datalake/src/main/java/org/apache/hadoop/hdfs/web/PrivateAzureDataLakeFileSystem.java
+++ b/hadoop-tools/hadoop-azure-datalake/src/main/java/org/apache/hadoop/hdfs/web/PrivateAzureDataLakeFileSystem.java
@@ -54,6 +54,8 @@ import org.apache.hadoop.hdfs.web.resources.ReplicationParam;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.util.Progressable;
 import org.apache.hadoop.util.VersionInfo;
+import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.DFS_WEBHDFS_OAUTH_ENABLED_KEY;
+import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.DFS_WEBHDFS_OAUTH_ENABLED_DEFAULT;
 
 import java.io.IOException;
 import java.io.InputStream;
@@ -108,6 +110,12 @@ public class PrivateAzureDataLakeFileSystem extends SWebHdfsFileSystem
{
   @Override
   public synchronized void initialize(URI uri, Configuration conf)
       throws IOException {
+    if (!conf.getBoolean(DFS_WEBHDFS_OAUTH_ENABLED_KEY,
+                         DFS_WEBHDFS_OAUTH_ENABLED_DEFAULT)) {
+      // clone configuration, enable OAuth2
+      conf = new Configuration(conf);
+      conf.setBoolean(DFS_WEBHDFS_OAUTH_ENABLED_KEY, true);
+    }
     super.initialize(uri, conf);
     overrideOwner = getConf()
         .getBoolean(ADLConfKeys.ADL_DEBUG_OVERRIDE_LOCAL_USER_AS_OWNER,

http://git-wip-us.apache.org/repos/asf/hadoop/blob/36e16019/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/hdfs/web/TestConfigurationSetting.java
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/hdfs/web/TestConfigurationSetting.java
b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/hdfs/web/TestConfigurationSetting.java
index 9a54ec2..9f33fb4 100644
--- a/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/hdfs/web/TestConfigurationSetting.java
+++ b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/hdfs/web/TestConfigurationSetting.java
@@ -19,18 +19,23 @@
 
 package org.apache.hadoop.hdfs.web;
 
+import java.io.IOException;
+import java.net.URI;
+import java.net.URISyntaxException;
+
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.adl.TestableAdlFileSystem;
-import org.apache.hadoop.hdfs.client.HdfsClientConfigKeys;
+import org.apache.hadoop.hdfs.web.oauth2.AccessTokenProvider;
 import org.apache.hadoop.hdfs.web.oauth2.ConfCredentialBasedAccessTokenProvider;
-import org.apache.hadoop.hdfs.web.oauth2.CredentialBasedAccessTokenProvider;
+import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.ACCESS_TOKEN_PROVIDER_KEY;
+import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.DFS_WEBHDFS_OAUTH_ENABLED_KEY;
+import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.OAUTH_CLIENT_ID_KEY;
+import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.OAUTH_REFRESH_URL_KEY;
+import static org.apache.hadoop.hdfs.web.oauth2.CredentialBasedAccessTokenProvider.OAUTH_CREDENTIAL_KEY;
+
 import org.junit.Assert;
 import org.junit.Test;
 
-import java.io.IOException;
-import java.net.URI;
-import java.net.URISyntaxException;
-
 /**
  * This class is responsible for testing adl file system required configuration
  * and feature set keys.
@@ -41,14 +46,13 @@ public class TestConfigurationSetting {
   public void testAllConfiguration() throws URISyntaxException, IOException {
     TestableAdlFileSystem fs = new TestableAdlFileSystem();
     Configuration conf = new Configuration();
-    conf.set(HdfsClientConfigKeys.OAUTH_REFRESH_URL_KEY,
-        "http://localhost:1111/refresh");
-    conf.set(CredentialBasedAccessTokenProvider.OAUTH_CREDENTIAL_KEY,
-        "credential");
-    conf.set(HdfsClientConfigKeys.OAUTH_CLIENT_ID_KEY, "MY_CLIENTID");
-    conf.set(HdfsClientConfigKeys.ACCESS_TOKEN_PROVIDER_KEY,
-        ConfCredentialBasedAccessTokenProvider.class.getName());
-    conf.set(HdfsClientConfigKeys.DFS_WEBHDFS_OAUTH_ENABLED_KEY, "true");
+    conf.set(OAUTH_REFRESH_URL_KEY, "http://localhost:1111/refresh");
+    conf.set(OAUTH_CREDENTIAL_KEY, "credential");
+    conf.set(OAUTH_CLIENT_ID_KEY, "MY_CLIENTID");
+    conf.setClass(ACCESS_TOKEN_PROVIDER_KEY,
+        ConfCredentialBasedAccessTokenProvider.class,
+        AccessTokenProvider.class);
+    conf.setBoolean(DFS_WEBHDFS_OAUTH_ENABLED_KEY, true);
 
     URI uri = new URI("adl://localhost:1234");
     fs.initialize(uri, conf);
@@ -109,4 +113,26 @@ public class TestConfigurationSetting {
     Assert.assertEquals(false, fs.isOverrideOwnerFeatureOn());
     fs.close();
   }
+
+  @Test
+  public void testOAuthEnable() throws Exception {
+    try (TestableAdlFileSystem fs = new TestableAdlFileSystem()) {
+      Configuration conf = new Configuration();
+      conf.set(OAUTH_REFRESH_URL_KEY, "http://localhost:1111/refresh");
+      conf.set(OAUTH_CREDENTIAL_KEY, "credential");
+      conf.set(OAUTH_CLIENT_ID_KEY, "MY_CLIENTID");
+      conf.setClass(ACCESS_TOKEN_PROVIDER_KEY,
+          ConfCredentialBasedAccessTokenProvider.class,
+          AccessTokenProvider.class);
+      // disable OAuth2 in configuration, verify overridden
+      conf.setBoolean(DFS_WEBHDFS_OAUTH_ENABLED_KEY, false);
+
+      URI uri = new URI("adl://localhost:1234");
+      fs.initialize(uri, conf);
+      Assert.assertFalse(conf.getBoolean(DFS_WEBHDFS_OAUTH_ENABLED_KEY, false));
+      Assert.assertTrue(fs.getConf().getBoolean(DFS_WEBHDFS_OAUTH_ENABLED_KEY,
+            false));
+    }
+  }
+
 }


---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscribe@hadoop.apache.org
For additional commands, e-mail: common-commits-help@hadoop.apache.org


Mime
View raw message