Return-Path: X-Original-To: apmail-hadoop-common-commits-archive@www.apache.org Delivered-To: apmail-hadoop-common-commits-archive@www.apache.org Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by minotaur.apache.org (Postfix) with SMTP id BB30410114 for ; Wed, 2 Oct 2013 04:00:53 +0000 (UTC) Received: (qmail 63344 invoked by uid 500); 2 Oct 2013 04:00:46 -0000 Delivered-To: apmail-hadoop-common-commits-archive@hadoop.apache.org Received: (qmail 63018 invoked by uid 500); 2 Oct 2013 04:00:35 -0000 Mailing-List: contact common-commits-help@hadoop.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: common-dev@hadoop.apache.org Delivered-To: mailing list common-commits@hadoop.apache.org Received: (qmail 62989 invoked by uid 99); 2 Oct 2013 04:00:32 -0000 Received: from nike.apache.org (HELO nike.apache.org) (192.87.106.230) by apache.org (qpsmtpd/0.29) with ESMTP; Wed, 02 Oct 2013 04:00:32 +0000 X-ASF-Spam-Status: No, hits=-2000.0 required=5.0 tests=ALL_TRUSTED X-Spam-Check-By: apache.org Received: from [140.211.11.4] (HELO eris.apache.org) (140.211.11.4) by apache.org (qpsmtpd/0.29) with ESMTP; Wed, 02 Oct 2013 04:00:28 +0000 Received: from eris.apache.org (localhost [127.0.0.1]) by eris.apache.org (Postfix) with ESMTP id 4CDA623889ED; Wed, 2 Oct 2013 04:00:07 +0000 (UTC) Content-Type: text/plain; charset="utf-8" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Subject: svn commit: r1528301 - in /hadoop/common/trunk/hadoop-common-project/hadoop-common/src: main/java/org/apache/hadoop/security/ main/java/org/apache/hadoop/security/token/ test/java/org/apache/hadoop/security/ Date: Wed, 02 Oct 2013 04:00:07 -0000 To: common-commits@hadoop.apache.org From: suresh@apache.org X-Mailer: svnmailer-1.0.9 Message-Id: <20131002040007.4CDA623889ED@eris.apache.org> X-Virus-Checked: Checked by ClamAV on apache.org Author: suresh Date: Wed Oct 2 04:00:06 2013 New Revision: 1528301 URL: http://svn.apache.org/r1528301 Log: HADOOP-10012. Secure Oozie jobs fail with delegation token renewal exception in Namenode HA setup. Contributed by Daryn Sharp and Suresh Srinivas. Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/Token.java hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestUserGroupInformation.java Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java?rev=1528301&r1=1528300&r2=1528301&view=diff ============================================================================== --- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java (original) +++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java Wed Oct 2 04:00:06 2013 @@ -33,6 +33,7 @@ import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; +import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; @@ -1325,7 +1326,14 @@ public class UserGroupInformation { * @return Credentials of tokens associated with this user */ public synchronized Credentials getCredentials() { - return new Credentials(getCredentialsInternal()); + Credentials creds = new Credentials(getCredentialsInternal()); + Iterator> iter = creds.getAllTokens().iterator(); + while (iter.hasNext()) { + if (iter.next() instanceof Token.PrivateToken) { + iter.remove(); + } + } + return creds; } /** Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/Token.java URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/Token.java?rev=1528301&r1=1528300&r2=1528301&view=diff ============================================================================== --- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/Token.java (original) +++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/Token.java Wed Oct 2 04:00:06 2013 @@ -19,31 +19,20 @@ package org.apache.hadoop.security.token; import com.google.common.collect.Maps; - -import java.io.ByteArrayInputStream; -import java.io.DataInput; -import java.io.DataInputStream; -import java.io.DataOutput; -import java.io.IOException; -import java.util.Arrays; -import java.util.Map; -import java.util.ServiceLoader; - import org.apache.commons.codec.binary.Base64; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; - import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.io.DataInputBuffer; -import org.apache.hadoop.io.DataOutputBuffer; -import org.apache.hadoop.io.Text; -import org.apache.hadoop.io.Writable; -import org.apache.hadoop.io.WritableComparator; -import org.apache.hadoop.io.WritableUtils; +import org.apache.hadoop.io.*; import org.apache.hadoop.util.ReflectionUtils; +import java.io.*; +import java.util.Arrays; +import java.util.Map; +import java.util.ServiceLoader; + /** * The client-side form of the token. */ @@ -195,6 +184,19 @@ public class Token extends Token { + public PrivateToken(Token token) { + super(token); + } + } + @Override public void readFields(DataInput in) throws IOException { int len = WritableUtils.readVInt(in); Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestUserGroupInformation.java URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestUserGroupInformation.java?rev=1528301&r1=1528300&r2=1528301&view=diff ============================================================================== --- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestUserGroupInformation.java (original) +++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestUserGroupInformation.java Wed Oct 2 04:00:06 2013 @@ -16,11 +16,21 @@ */ package org.apache.hadoop.security; -import static org.junit.Assert.*; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.CommonConfigurationKeysPublic; +import org.apache.hadoop.io.Text; +import org.apache.hadoop.ipc.TestSaslRPC; +import org.apache.hadoop.metrics2.MetricsRecordBuilder; +import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod; +import org.apache.hadoop.security.authentication.util.KerberosName; +import org.apache.hadoop.security.token.Token; +import org.apache.hadoop.security.token.TokenIdentifier; +import org.apache.hadoop.util.Shell; import org.junit.*; -import static org.mockito.Mockito.*; - +import javax.security.auth.Subject; +import javax.security.auth.login.AppConfigurationEntry; +import javax.security.auth.login.LoginContext; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStreamReader; @@ -30,21 +40,13 @@ import java.util.Collection; import java.util.LinkedHashSet; import java.util.Set; -import javax.security.auth.Subject; -import javax.security.auth.login.AppConfigurationEntry; -import javax.security.auth.login.LoginContext; - -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.io.Text; -import org.apache.hadoop.metrics2.MetricsRecordBuilder; -import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod; -import org.apache.hadoop.security.authentication.util.KerberosName; -import org.apache.hadoop.security.token.Token; -import org.apache.hadoop.security.token.TokenIdentifier; -import static org.apache.hadoop.test.MetricsAsserts.*; -import org.apache.hadoop.fs.CommonConfigurationKeysPublic; import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTH_TO_LOCAL; -import org.apache.hadoop.util.Shell; +import static org.apache.hadoop.ipc.TestSaslRPC.*; +import static org.apache.hadoop.security.token.delegation.TestDelegationToken.TestDelegationTokenIdentifier; +import static org.apache.hadoop.test.MetricsAsserts.*; +import static org.junit.Assert.*; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; public class TestUserGroupInformation { final private static String USER_NAME = "user1@HADOOP.APACHE.ORG"; @@ -786,4 +788,29 @@ public class TestUserGroupInformation { UserGroupInformation.setLoginUser(ugi); assertEquals(ugi, UserGroupInformation.getLoginUser()); } + + /** + * In some scenario, such as HA, delegation tokens are associated with a + * logical name. The tokens are cloned and are associated with the + * physical address of the server where the service is provided. + * This test ensures cloned delegated tokens are locally used + * and are not returned in {@link UserGroupInformation#getCredentials()} + */ + @Test + public void testPrivateTokenExclusion() throws Exception { + UserGroupInformation ugi = UserGroupInformation.getCurrentUser(); + TestTokenIdentifier tokenId = new TestTokenIdentifier(); + Token token = new Token( + tokenId.getBytes(), "password".getBytes(), + tokenId.getKind(), null); + ugi.addToken(new Text("regular-token"), token); + + // Now add cloned private token + ugi.addToken(new Text("private-token"), new Token.PrivateToken(token)); + ugi.addToken(new Text("private-token1"), new Token.PrivateToken(token)); + + // Ensure only non-private tokens are returned + Collection> tokens = ugi.getCredentials().getAllTokens(); + assertEquals(1, tokens.size()); + } }