hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From x...@apache.org
Subject [1/2] hadoop git commit: HADOOP-14445. Use DelegationTokenIssuer to create KMS delegation tokens that can authenticate to all KMS instances. Contributed by Daryn Sharp, Xiao Chen, Rushabh S Shah.
Date Fri, 12 Oct 2018 16:35:53 GMT
Repository: hadoop
Updated Branches:
  refs/heads/trunk 6e0e6daaf -> 5ec86b445


http://git-wip-us.apache.org/repos/asf/hadoop/blob/5ec86b44/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/HdfsKMSUtil.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/HdfsKMSUtil.java
b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/HdfsKMSUtil.java
index de27f7e..30e8aa7 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/HdfsKMSUtil.java
+++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/HdfsKMSUtil.java
@@ -35,14 +35,12 @@ import org.apache.hadoop.crypto.key.KeyProvider;
 import org.apache.hadoop.crypto.key.KeyProvider.KeyVersion;
 import org.apache.hadoop.crypto.key.KeyProviderCryptoExtension;
 import org.apache.hadoop.crypto.key.KeyProviderCryptoExtension.EncryptedKeyVersion;
-import org.apache.hadoop.crypto.key.KeyProviderDelegationTokenExtension;
 import org.apache.hadoop.crypto.key.KeyProviderTokenIssuer;
 import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
 import org.apache.hadoop.fs.FileEncryptionInfo;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.security.Credentials;
 import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.hadoop.security.token.Token;
 import org.apache.hadoop.util.KMSUtil;
 
 /**
@@ -71,32 +69,6 @@ public final class HdfsKMSUtil {
     return KMSUtil.createKeyProvider(conf, keyProviderUriKeyName);
   }
 
-  public static Token<?>[] addDelegationTokensForKeyProvider(
-      KeyProviderTokenIssuer kpTokenIssuer, final String renewer,
-      Credentials credentials, URI namenodeUri, Token<?>[] tokens)
-          throws IOException {
-    KeyProvider keyProvider = kpTokenIssuer.getKeyProvider();
-    if (keyProvider != null) {
-      KeyProviderDelegationTokenExtension keyProviderDelegationTokenExtension
-          = KeyProviderDelegationTokenExtension.
-              createKeyProviderDelegationTokenExtension(keyProvider);
-      Token<?>[] kpTokens = keyProviderDelegationTokenExtension.
-          addDelegationTokens(renewer, credentials);
-      credentials.addSecretKey(getKeyProviderMapKey(namenodeUri),
-          DFSUtilClient.string2Bytes(
-              kpTokenIssuer.getKeyProviderUri().toString()));
-      if (tokens != null && kpTokens != null) {
-        Token<?>[] all = new Token<?>[tokens.length + kpTokens.length];
-        System.arraycopy(tokens, 0, all, 0, tokens.length);
-        System.arraycopy(kpTokens, 0, all, tokens.length, kpTokens.length);
-        tokens = all;
-      } else {
-        tokens = (tokens != null) ? tokens : kpTokens;
-      }
-    }
-    return tokens;
-  }
-
   /**
    * Obtain the crypto protocol version from the provided FileEncryptionInfo,
    * checking to see if this version is supported by.
@@ -161,28 +133,36 @@ public final class HdfsKMSUtil {
     URI keyProviderUri = null;
     // Lookup the secret in credentials object for namenodeuri.
     Credentials credentials = ugi.getCredentials();
+    Text credsKey = getKeyProviderMapKey(namenodeUri);
     byte[] keyProviderUriBytes =
-        credentials.getSecretKey(getKeyProviderMapKey(namenodeUri));
+        credentials.getSecretKey(credsKey);
     if(keyProviderUriBytes != null) {
       keyProviderUri =
           URI.create(DFSUtilClient.bytes2String(keyProviderUriBytes));
-      return keyProviderUri;
     }
-
-    if (keyProviderUriStr != null) {
-      if (!keyProviderUriStr.isEmpty()) {
+    if (keyProviderUri == null) {
+      // NN is old and doesn't report provider, so use conf.
+      if (keyProviderUriStr == null) {
+        keyProviderUri = KMSUtil.getKeyProviderUri(conf, keyProviderUriKeyName);
+      } else if (!keyProviderUriStr.isEmpty()) {
         keyProviderUri = URI.create(keyProviderUriStr);
       }
-      return keyProviderUri;
+      if (keyProviderUri != null) {
+        credentials.addSecretKey(
+            credsKey, DFSUtilClient.string2Bytes(keyProviderUri.toString()));
+      }
     }
+    return keyProviderUri;
+  }
 
-    // Last thing is to trust its own conf to be backwards compatible.
-    String keyProviderUriFromConf = conf.getTrimmed(
-        CommonConfigurationKeysPublic.HADOOP_SECURITY_KEY_PROVIDER_PATH);
-    if (keyProviderUriFromConf != null && !keyProviderUriFromConf.isEmpty()) {
-      keyProviderUri = URI.create(keyProviderUriFromConf);
+  public static KeyProvider getKeyProvider(KeyProviderTokenIssuer issuer,
+                                           Configuration conf)
+      throws IOException {
+    URI keyProviderUri = issuer.getKeyProviderUri();
+    if (keyProviderUri != null) {
+      return KMSUtil.createKeyProviderFromUri(conf, keyProviderUri);
     }
-    return keyProviderUri;
+    return null;
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/hadoop/blob/5ec86b44/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java
b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java
index d504cfe..b7325ba 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java
+++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java
@@ -111,7 +111,6 @@ import org.apache.hadoop.ipc.RemoteException;
 import org.apache.hadoop.ipc.StandbyException;
 import org.apache.hadoop.net.NetUtils;
 import org.apache.hadoop.security.AccessControlException;
-import org.apache.hadoop.security.Credentials;
 import org.apache.hadoop.security.SecurityUtil;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.token.SecretManager.InvalidToken;
@@ -119,6 +118,7 @@ import org.apache.hadoop.security.token.Token;
 import org.apache.hadoop.security.token.TokenIdentifier;
 import org.apache.hadoop.security.token.TokenSelector;
 import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenSelector;
+import org.apache.hadoop.security.token.org.apache.hadoop.security.token.DelegationTokenIssuer;
 import org.apache.hadoop.util.JsonSerialization;
 import org.apache.hadoop.util.KMSUtil;
 import org.apache.hadoop.util.Progressable;
@@ -1691,6 +1691,16 @@ public class WebHdfsFileSystem extends FileSystem
   }
 
   @Override
+  public DelegationTokenIssuer[] getAdditionalTokenIssuers()
+      throws IOException {
+    KeyProvider keyProvider = getKeyProvider();
+    if (keyProvider instanceof DelegationTokenIssuer) {
+      return new DelegationTokenIssuer[] {(DelegationTokenIssuer) keyProvider};
+    }
+    return null;
+  }
+
+  @Override
   public synchronized Token<?> getRenewToken() {
     return delegationToken;
   }
@@ -1725,14 +1735,6 @@ public class WebHdfsFileSystem extends FileSystem
     ).run();
   }
 
-  @Override
-  public Token<?>[] addDelegationTokens(String renewer,
-      Credentials credentials) throws IOException {
-    Token<?>[] tokens = super.addDelegationTokens(renewer, credentials);
-    return HdfsKMSUtil.addDelegationTokensForKeyProvider(this, renewer,
-        credentials, getUri(), tokens);
-  }
-
   public BlockLocation[] getFileBlockLocations(final FileStatus status,
       final long offset, final long length) throws IOException {
     if (status == null) {


---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscribe@hadoop.apache.org
For additional commands, e-mail: common-commits-help@hadoop.apache.org


Mime
View raw message