hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From cnaur...@apache.org
Subject svn commit: r1556097 - in /hadoop/common/branches/HDFS-4685/hadoop-common-project: hadoop-auth/ hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/ hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/server/ hado...
Date Tue, 07 Jan 2014 01:07:37 GMT
Author: cnauroth
Date: Tue Jan  7 01:07:33 2014
New Revision: 1556097

URL: http://svn.apache.org/r1556097
Log:
Merge trunk to HDFS-4685.

Added:
    hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestMetricsSourceAdapter.java
      - copied unchanged from r1556096, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestMetricsSourceAdapter.java
Modified:
    hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-auth/pom.xml
    hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/PseudoAuthenticationHandler.java
    hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/server/TestPseudoAuthenticationHandler.java
    hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/CHANGES.txt   (contents,
props changed)
    hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/docs/  
(props changed)
    hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/java/  
(props changed)
    hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/JavaKeyStoreProvider.java
    hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProvider.java
    hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/UserProvider.java
    hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeys.java
    hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/unix/DomainSocket.java
    hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Credentials.java
    hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Groups.java
    hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcServer.java
    hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java
    hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringUtils.java
    hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/net/unix/DomainSocket.c
    hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/test/core/  
(props changed)
    hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/TestKeyProviderFactory.java
    hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/unix/TestDomainSocket.java
    hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestUserGroupInformation.java

Modified: hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-auth/pom.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-auth/pom.xml?rev=1556097&r1=1556096&r2=1556097&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-auth/pom.xml (original)
+++ hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-auth/pom.xml Tue Jan  7
01:07:33 2014
@@ -92,6 +92,11 @@
       <artifactId>hadoop-minikdc</artifactId>
       <scope>test</scope>
     </dependency>
+    <dependency>
+      <groupId>org.apache.httpcomponents</groupId>
+      <artifactId>httpclient</artifactId>
+      <scope>compile</scope>
+    </dependency>
   </dependencies>
 
   <build>

Modified: hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/PseudoAuthenticationHandler.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/PseudoAuthenticationHandler.java?rev=1556097&r1=1556096&r2=1556097&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/PseudoAuthenticationHandler.java
(original)
+++ hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/PseudoAuthenticationHandler.java
Tue Jan  7 01:07:33 2014
@@ -16,10 +16,15 @@ package org.apache.hadoop.security.authe
 import org.apache.hadoop.security.authentication.client.AuthenticationException;
 import org.apache.hadoop.security.authentication.client.PseudoAuthenticator;
 
+import org.apache.http.client.utils.URLEncodedUtils;
+import org.apache.http.NameValuePair;
+
 import javax.servlet.ServletException;
 import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletResponse;
 import java.io.IOException;
+import java.nio.charset.Charset;
+import java.util.List;
 import java.util.Properties;
 
 /**
@@ -48,6 +53,7 @@ public class PseudoAuthenticationHandler
    */
   public static final String ANONYMOUS_ALLOWED = TYPE + ".anonymous.allowed";
 
+  private static final Charset UTF8_CHARSET = Charset.forName("UTF-8");
   private boolean acceptAnonymous;
 
   /**
@@ -114,6 +120,18 @@ public class PseudoAuthenticationHandler
     return true;
   }
 
+  private String getUserName(HttpServletRequest request) {
+    List<NameValuePair> list = URLEncodedUtils.parse(request.getQueryString(), UTF8_CHARSET);
+    if (list != null) {
+      for (NameValuePair nv : list) {
+        if (PseudoAuthenticator.USER_NAME.equals(nv.getName())) {
+          return nv.getValue();
+        }
+      }
+    }
+    return null;
+  }
+
   /**
    * Authenticates an HTTP client request.
    * <p/>
@@ -139,7 +157,7 @@ public class PseudoAuthenticationHandler
   public AuthenticationToken authenticate(HttpServletRequest request, HttpServletResponse
response)
     throws IOException, AuthenticationException {
     AuthenticationToken token;
-    String userName = request.getParameter(PseudoAuthenticator.USER_NAME);
+    String userName = getUserName(request);
     if (userName == null) {
       if (getAcceptAnonymous()) {
         token = AuthenticationToken.ANONYMOUS;

Modified: hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/server/TestPseudoAuthenticationHandler.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/server/TestPseudoAuthenticationHandler.java?rev=1556097&r1=1556096&r2=1556097&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/server/TestPseudoAuthenticationHandler.java
(original)
+++ hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/server/TestPseudoAuthenticationHandler.java
Tue Jan  7 01:07:33 2014
@@ -94,7 +94,7 @@ public class TestPseudoAuthenticationHan
 
       HttpServletRequest request = Mockito.mock(HttpServletRequest.class);
       HttpServletResponse response = Mockito.mock(HttpServletResponse.class);
-      Mockito.when(request.getParameter(PseudoAuthenticator.USER_NAME)).thenReturn("user");
+      Mockito.when(request.getQueryString()).thenReturn(PseudoAuthenticator.USER_NAME + "="
+ "user");
 
       AuthenticationToken token = handler.authenticate(request, response);
 

Modified: hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1556097&r1=1556096&r2=1556097&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/CHANGES.txt (original)
+++ hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/CHANGES.txt Tue Jan
 7 01:07:33 2014
@@ -108,6 +108,8 @@ Trunk (Unreleased)
     HADOOP-10141. Create KeyProvider API to separate encryption key storage
     from the applications. (omalley)
 
+    HADOOP-10201. Add listing to KeyProvider API. (Larry McCay via omalley)
+
   BUG FIXES
 
     HADOOP-9451. Fault single-layer config if node group topology is enabled.
@@ -407,6 +409,9 @@ Release 2.4.0 - UNRELEASED
     HADOOP-10169. Remove the unnecessary synchronized in JvmMetrics class.
     (Liang Xie via jing9) 
 
+    HADOOP-10198. DomainSocket: add support for socketpair.
+    (Colin Patrick McCabe via wang)
+
   OPTIMIZATIONS
 
     HADOOP-9748. Reduce blocking on UGI.ensureInitialized (daryn)
@@ -416,6 +421,9 @@ Release 2.4.0 - UNRELEASED
 
     HADOOP-10172. Cache SASL server factories (daryn)
 
+    HADOOP-10173. Remove UGI from DIGEST-MD5 SASL server creation (daryn via
+    kihwal)
+
   BUG FIXES
 
     HADOOP-9964. Fix deadlocks in TestHttpServer by synchronize
@@ -489,6 +497,9 @@ Release 2.4.0 - UNRELEASED
 
     HADOOP-10171. TestRPC fails intermittently on jkd7 (Mit Desai via jeagles)
 
+    HADOOP-10147  HDFS-5678 Upgrade to commons-logging 1.1.3 to avoid potential
+    deadlock in MiniDFSCluster (stevel)
+
 Release 2.3.0 - UNRELEASED
 
   INCOMPATIBLE CHANGES
@@ -568,6 +579,12 @@ Release 2.3.0 - UNRELEASED
     HADOOP-10175. Har files system authority should preserve userinfo.
     (Chuan Liu via cnauroth)
 
+    HADOOP-10090. Jobtracker metrics not updated properly after execution
+    of a mapreduce job. (ivanmi)
+
+    HADOOP-10193. hadoop-auth's PseudoAuthenticationHandler can consume getInputStream. 
+    (gchanan via tucu)
+
 Release 2.2.0 - 2013-10-13
 
   INCOMPATIBLE CHANGES

Propchange: hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/CHANGES.txt
------------------------------------------------------------------------------
  Merged /hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt:r1554802
  Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt:r1553225-1556096

Propchange: hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/docs/
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/docs:r1553225-1556096

Propchange: hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/java/
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java:r1553225-1556096

Modified: hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/JavaKeyStoreProvider.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/JavaKeyStoreProvider.java?rev=1556097&r1=1556096&r2=1556097&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/JavaKeyStoreProvider.java
(original)
+++ hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/JavaKeyStoreProvider.java
Tue Jan  7 01:07:33 2014
@@ -36,8 +36,11 @@ import java.security.KeyStoreException;
 import java.security.NoSuchAlgorithmException;
 import java.security.UnrecoverableKeyException;
 import java.security.cert.CertificateException;
+import java.util.ArrayList;
 import java.util.Date;
+import java.util.Enumeration;
 import java.util.HashMap;
+import java.util.List;
 import java.util.Map;
 
 /**
@@ -56,6 +59,7 @@ import java.util.Map;
  */
 @InterfaceAudience.Private
 public class JavaKeyStoreProvider extends KeyProvider {
+  private static final String KEY_METADATA = "KeyMetadata";
   public static final String SCHEME_NAME = "jceks";
   public static final String KEYSTORE_PASSWORD_NAME =
       "HADOOP_KEYSTORE_PASSWORD";
@@ -118,6 +122,44 @@ public class JavaKeyStoreProvider extend
   }
 
   @Override
+  public List<String> getKeys() throws IOException {
+    ArrayList<String> list = new ArrayList<String>();
+    String alias = null;
+    try {
+      Enumeration<String> e = keyStore.aliases();
+      while (e.hasMoreElements()) {
+         alias = e.nextElement();
+         // only include the metadata key names in the list of names
+         if (!alias.contains("@")) {
+             list.add(alias);
+         }
+      }
+    } catch (KeyStoreException e) {
+      throw new IOException("Can't get key " + alias + " from " + path, e);
+    }
+    return list;
+  }
+
+  @Override
+  public List<KeyVersion> getKeyVersions(String name) throws IOException {
+    List<KeyVersion> list = new ArrayList<KeyVersion>();
+    Metadata km = getMetadata(name);
+    if (km != null) {
+      int latestVersion = km.getVersions();
+      KeyVersion v = null;
+      String versionName = null;
+      for (int i = 0; i < latestVersion; i++) {
+        versionName = buildVersionName(name, i);
+        v = getKeyVersion(versionName);
+        if (v != null) {
+          list.add(v);
+        }
+      }
+    }
+    return list;
+  }
+
+  @Override
   public Metadata getMetadata(String name) throws IOException {
     if (cache.containsKey(name)) {
       return cache.get(name);
@@ -288,7 +330,7 @@ public class JavaKeyStoreProvider extend
 
     @Override
     public String getFormat() {
-      return "KeyMetadata";
+      return KEY_METADATA;
     }
 
     @Override

Modified: hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProvider.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProvider.java?rev=1556097&r1=1556096&r2=1556097&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProvider.java
(original)
+++ hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProvider.java
Tue Jan  7 01:07:33 2014
@@ -255,6 +255,20 @@ public abstract class KeyProvider {
                                             ) throws IOException;
 
   /**
+   * Get the key names for all keys.
+   * @return the list of key names
+   * @throws IOException
+   */
+  public abstract List<String> getKeys() throws IOException;
+
+  /**
+   * Get the key material for all versions of a specific key name.
+   * @return the list of key material
+   * @throws IOException
+   */
+  public abstract List<KeyVersion> getKeyVersions(String name) throws IOException;
+
+  /**
    * Get the current version of the key, which should be used for encrypting new
    * data.
    * @param name the base name of the key

Modified: hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/UserProvider.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/UserProvider.java?rev=1556097&r1=1556096&r2=1556097&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/UserProvider.java
(original)
+++ hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/UserProvider.java
Tue Jan  7 01:07:33 2014
@@ -20,8 +20,10 @@ package org.apache.hadoop.crypto.key;
 
 import java.io.IOException;
 import java.net.URI;
+import java.util.ArrayList;
 import java.util.Date;
 import java.util.HashMap;
+import java.util.List;
 import java.util.Map;
 
 import org.apache.hadoop.classification.InterfaceAudience;
@@ -142,4 +144,32 @@ public class UserProvider extends KeyPro
       return null;
     }
   }
+
+  @Override
+  public List<String> getKeys() throws IOException {
+    List<String> list = new ArrayList<String>();
+    List<Text> keys = credentials.getAllSecretKeys();
+    for (Text key : keys) {
+      if (key.find("@") == -1) {
+        list.add(key.toString());
+      }
+    }
+    return list;
+  }
+
+  @Override
+  public List<KeyVersion> getKeyVersions(String name) throws IOException {
+      List<KeyVersion> list = new ArrayList<KeyVersion>();
+      Metadata km = getMetadata(name);
+      if (km != null) {
+        int latestVersion = km.getVersions();
+        for (int i = 0; i < latestVersion; i++) {
+          KeyVersion v = getKeyVersion(buildVersionName(name, i));
+          if (v != null) {
+            list.add(v);
+          }
+        }
+      }
+      return list;
+  }
 }

Modified: hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeys.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeys.java?rev=1556097&r1=1556096&r2=1556097&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeys.java
(original)
+++ hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeys.java
Tue Jan  7 01:07:33 2014
@@ -21,7 +21,6 @@ package org.apache.hadoop.fs;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.http.lib.StaticUserWebFilter;
-import org.apache.hadoop.security.authorize.Service;
 
 /** 
  * This class contains constants for configuration keys used
@@ -240,4 +239,7 @@ public class CommonConfigurationKeys ext
   /** Default value for IPC_SERVER_CONNECTION_IDLE_SCAN_INTERVAL_KEY */
   public static final int IPC_CLIENT_CONNECTION_IDLESCANINTERVAL_DEFAULT =
       10000;
+
+  public static final String HADOOP_USER_GROUP_METRICS_PERCENTILES_INTERVALS =
+    "hadoop.user.group.metrics.percentiles.intervals";
 }

Modified: hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/unix/DomainSocket.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/unix/DomainSocket.java?rev=1556097&r1=1556096&r2=1556097&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/unix/DomainSocket.java
(original)
+++ hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/unix/DomainSocket.java
Tue Jan  7 01:07:33 2014
@@ -276,6 +276,24 @@ public class DomainSocket implements Clo
     return new DomainSocket(path, fd);
   }
 
+  /**
+   * Create a pair of UNIX domain sockets which are connected to each other
+   * by calling socketpair(2).
+   *
+   * @return                An array of two UNIX domain sockets connected to
+   *                        each other.
+   * @throws IOException    on error.
+   */
+  public static DomainSocket[] socketpair() throws IOException {
+    int fds[] = socketpair0();
+    return new DomainSocket[] {
+      new DomainSocket("(anonymous0)", fds[0]),
+      new DomainSocket("(anonymous1)", fds[1])
+    };
+  }
+
+  private static native int[] socketpair0() throws IOException;
+
   private static native int accept0(int fd) throws IOException;
 
   /**

Modified: hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Credentials.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Credentials.java?rev=1556097&r1=1556096&r2=1556097&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Credentials.java
(original)
+++ hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Credentials.java
Tue Jan  7 01:07:33 2014
@@ -29,7 +29,9 @@ import java.io.IOException;
 import java.util.Arrays;
 import java.util.Collection;
 import java.util.HashMap;
+import java.util.List;
 import java.util.Map;
+import java.util.Map.Entry;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -74,15 +76,6 @@ public class Credentials implements Writ
   }
   
   /**
-   * Returns the key bytes for the alias
-   * @param alias the alias for the key
-   * @return key for this alias
-   */
-  public byte[] getSecretKey(Text alias) {
-    return secretKeysMap.get(alias);
-  }
-  
-  /**
    * Returns the Token object for the alias
    * @param alias the alias for the Token
    * @return token for this alias
@@ -117,6 +110,15 @@ public class Credentials implements Writ
   public int numberOfTokens() {
     return tokenMap.size();
   }
+
+  /**
+   * Returns the key bytes for the alias
+   * @param alias the alias for the key
+   * @return key for this alias
+   */
+  public byte[] getSecretKey(Text alias) {
+    return secretKeysMap.get(alias);
+  }
   
   /**
    * @return number of keys in the in-memory map
@@ -143,6 +145,16 @@ public class Credentials implements Writ
   }
 
   /**
+   * Return all the secret key entries in the in-memory map
+   */
+  public List<Text> getAllSecretKeys() {
+    List<Text> list = new java.util.ArrayList<Text>();
+    list.addAll(secretKeysMap.keySet());
+
+    return list;
+  }
+
+  /**
    * Convenience method for reading a token storage file, and loading the Tokens
    * therein in the passed UGI
    * @param filename

Modified: hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Groups.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Groups.java?rev=1556097&r1=1556096&r2=1556097&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Groups.java
(original)
+++ hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Groups.java
Tue Jan  7 01:07:33 2014
@@ -138,6 +138,7 @@ public class Groups {
     List<String> groupList = impl.getGroups(user);
     long endMs = Time.monotonicNow();
     long deltaMs = endMs - startMs ;
+    UserGroupInformation.metrics.addGetGroups(deltaMs);
     if (deltaMs > warningDeltaMs) {
       LOG.warn("Potential performance problem: getGroups(user=" + user +") " +
           "took " + deltaMs + " milliseconds.");

Modified: hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcServer.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcServer.java?rev=1556097&r1=1556096&r2=1556097&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcServer.java
(original)
+++ hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcServer.java
Tue Jan  7 01:07:33 2014
@@ -131,7 +131,7 @@ public class SaslRpcServer {
   public SaslServer create(Connection connection,
                            SecretManager<TokenIdentifier> secretManager
       ) throws IOException, InterruptedException {
-    UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
+    UserGroupInformation ugi = null;
     final CallbackHandler callback;
     switch (authMethod) {
       case TOKEN: {
@@ -139,6 +139,7 @@ public class SaslRpcServer {
         break;
       }
       case KERBEROS: {
+        ugi = UserGroupInformation.getCurrentUser();
         if (serverId.isEmpty()) {
           throw new AccessControlException(
               "Kerberos principal name does NOT have the expected "
@@ -153,7 +154,9 @@ public class SaslRpcServer {
             "Server does not support SASL " + authMethod);
     }
     
-    SaslServer saslServer = ugi.doAs(
+    final SaslServer saslServer;
+    if (ugi != null) {
+      saslServer = ugi.doAs(
         new PrivilegedExceptionAction<SaslServer>() {
           @Override
           public SaslServer run() throws SaslException  {
@@ -161,6 +164,10 @@ public class SaslRpcServer {
                 SaslRpcServer.SASL_PROPS, callback);
           }
         });
+    } else {
+      saslServer = saslFactory.createSaslServer(mechanism, protocol, serverId,
+          SaslRpcServer.SASL_PROPS, callback);
+    }
     if (saslServer == null) {
       throw new AccessControlException(
           "Unable to find SASL server implementation for " + mechanism);

Modified: hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java?rev=1556097&r1=1556096&r2=1556097&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java
(original)
+++ hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java
Tue Jan  7 01:07:33 2014
@@ -19,6 +19,7 @@ package org.apache.hadoop.security;
 
 import static org.apache.hadoop.fs.CommonConfigurationKeys.HADOOP_KERBEROS_MIN_SECONDS_BEFORE_RELOGIN;
 import static org.apache.hadoop.fs.CommonConfigurationKeys.HADOOP_KERBEROS_MIN_SECONDS_BEFORE_RELOGIN_DEFAULT;
+import static org.apache.hadoop.fs.CommonConfigurationKeys.HADOOP_USER_GROUP_METRICS_PERCENTILES_INTERVALS;
 
 import java.io.File;
 import java.io.IOException;
@@ -58,6 +59,8 @@ import org.apache.hadoop.io.Text;
 import org.apache.hadoop.metrics2.annotation.Metric;
 import org.apache.hadoop.metrics2.annotation.Metrics;
 import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
+import org.apache.hadoop.metrics2.lib.MetricsRegistry;
+import org.apache.hadoop.metrics2.lib.MutableQuantiles;
 import org.apache.hadoop.metrics2.lib.MutableRate;
 import org.apache.hadoop.security.SaslRpcServer.AuthMethod;
 import org.apache.hadoop.security.authentication.util.KerberosUtil;
@@ -92,14 +95,27 @@ public class UserGroupInformation {
    */
   @Metrics(about="User and group related metrics", context="ugi")
   static class UgiMetrics {
+    final MetricsRegistry registry = new MetricsRegistry("UgiMetrics");
+
     @Metric("Rate of successful kerberos logins and latency (milliseconds)")
     MutableRate loginSuccess;
     @Metric("Rate of failed kerberos logins and latency (milliseconds)")
     MutableRate loginFailure;
+    @Metric("GetGroups") MutableRate getGroups;
+    MutableQuantiles[] getGroupsQuantiles;
 
     static UgiMetrics create() {
       return DefaultMetricsSystem.instance().register(new UgiMetrics());
     }
+
+    void addGetGroups(long latency) {
+      getGroups.add(latency);
+      if (getGroupsQuantiles != null) {
+        for (MutableQuantiles q : getGroupsQuantiles) {
+          q.add(latency);
+        }
+      }
+    }
   }
   
   /**
@@ -250,6 +266,20 @@ public class UserGroupInformation {
       groups = Groups.getUserToGroupsMappingService(conf);
     }
     UserGroupInformation.conf = conf;
+
+    if (metrics.getGroupsQuantiles == null) {
+      int[] intervals = conf.getInts(HADOOP_USER_GROUP_METRICS_PERCENTILES_INTERVALS);
+      if (intervals != null && intervals.length > 0) {
+        final int length = intervals.length;
+        MutableQuantiles[] getGroupsQuantiles = new MutableQuantiles[length];
+        for (int i = 0; i < length; i++) {
+          getGroupsQuantiles[i] = metrics.registry.newQuantiles(
+            "getGroups" + intervals[i] + "s",
+            "Get groups", "ops", "latency", intervals[i]);
+        }
+        metrics.getGroupsQuantiles = getGroupsQuantiles;
+      }
+    }
   }
 
   /**

Modified: hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringUtils.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringUtils.java?rev=1556097&r1=1556096&r2=1556097&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringUtils.java
(original)
+++ hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringUtils.java
Tue Jan  7 01:07:33 2014
@@ -928,8 +928,10 @@ public class StringUtils {
    * @param args  List of arguments.
    * @return      null if the option was not found; the value of the 
    *              option otherwise.
+   * @throws IllegalArgumentException if the option's argument is not present
    */
-  public static String popOptionWithArgument(String name, List<String> args) {
+  public static String popOptionWithArgument(String name, List<String> args)
+      throws IllegalArgumentException {
     String val = null;
     for (Iterator<String> iter = args.iterator(); iter.hasNext(); ) {
       String cur = iter.next();
@@ -939,7 +941,7 @@ public class StringUtils {
       } else if (cur.equals(name)) {
         iter.remove();
         if (!iter.hasNext()) {
-          throw new RuntimeException("option " + name + " requires 1 " +
+          throw new IllegalArgumentException("option " + name + " requires 1 " +
               "argument.");
         }
         val = iter.next();

Modified: hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/net/unix/DomainSocket.c
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/net/unix/DomainSocket.c?rev=1556097&r1=1556096&r2=1556097&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/net/unix/DomainSocket.c
(original)
+++ hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/net/unix/DomainSocket.c
Tue Jan  7 01:07:33 2014
@@ -364,6 +364,50 @@ JNIEnv *env, jclass clazz, jstring path)
   return fd;
 }
 
+#define SOCKETPAIR_ARRAY_LEN 2
+
+JNIEXPORT jarray JNICALL
+Java_org_apache_hadoop_net_unix_DomainSocket_socketpair0(
+JNIEnv *env, jclass clazz)
+{
+  jarray arr = NULL;
+  int idx, err, fds[SOCKETPAIR_ARRAY_LEN] = { -1, -1 };
+  jthrowable jthr = NULL;
+
+  arr = (*env)->NewIntArray(env, SOCKETPAIR_ARRAY_LEN);
+  jthr = (*env)->ExceptionOccurred(env);
+  if (jthr) {
+    (*env)->ExceptionClear(env);
+    goto done;
+  }
+  if (socketpair(PF_UNIX, SOCK_STREAM, 0, fds) < 0) {
+    err = errno;
+    jthr = newSocketException(env, err,
+            "socketpair(2) error: %s", terror(err));
+    goto done;
+  }
+  (*env)->SetIntArrayRegion(env, arr, 0, SOCKETPAIR_ARRAY_LEN, fds);
+  jthr = (*env)->ExceptionOccurred(env);
+  if (jthr) {
+    (*env)->ExceptionClear(env);
+    goto done;
+  }
+
+done:
+  if (jthr) {
+    (*env)->DeleteLocalRef(env, arr);
+    arr = NULL;
+    for (idx = 0; idx < SOCKETPAIR_ARRAY_LEN; idx++) {
+      if (fds[idx] >= 0) {
+        close(fds[idx]);
+        fds[idx] = -1;
+      }
+    }
+    (*env)->Throw(env, jthr);
+  }
+  return arr;
+}
+
 JNIEXPORT jint JNICALL
 Java_org_apache_hadoop_net_unix_DomainSocket_accept0(
 JNIEnv *env, jclass clazz, jint fd)

Propchange: hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/test/core/
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/core:r1553225-1556096

Modified: hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/TestKeyProviderFactory.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/TestKeyProviderFactory.java?rev=1556097&r1=1556096&r2=1556097&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/TestKeyProviderFactory.java
(original)
+++ hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/TestKeyProviderFactory.java
Tue Jan  7 01:07:33 2014
@@ -21,6 +21,7 @@ import java.io.File;
 import java.io.IOException;
 import java.util.List;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.crypto.key.KeyProvider.KeyVersion;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.security.Credentials;
 import org.apache.hadoop.security.UserGroupInformation;
@@ -160,6 +161,16 @@ public class TestKeyProviderFactory {
         provider.getCurrentKey("key4").getMaterial());
     assertArrayEquals(key3, provider.getCurrentKey("key3").getMaterial());
     assertEquals("key3@0", provider.getCurrentKey("key3").getVersionName());
+
+    List<String> keys = provider.getKeys();
+    assertTrue("Keys should have been returned.", keys.size() == 2);
+    assertTrue("Returned Keys should have included key3.", keys.contains("key3"));
+    assertTrue("Returned Keys should have included key4.", keys.contains("key4"));
+
+    List<KeyVersion> kvl = provider.getKeyVersions("key3");
+    assertTrue("KeyVersions should have been returned for key3.", kvl.size() == 1);
+    assertTrue("KeyVersions should have included key3@0.", kvl.get(0).getVersionName().equals("key3@0"));
+    assertArrayEquals(key3, kvl.get(0).getMaterial());
   }
 
   @Test

Modified: hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/unix/TestDomainSocket.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/unix/TestDomainSocket.java?rev=1556097&r1=1556096&r2=1556097&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/unix/TestDomainSocket.java
(original)
+++ hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/unix/TestDomainSocket.java
Tue Jan  7 01:07:33 2014
@@ -420,7 +420,8 @@ public class TestDomainSocket {
    * @throws IOException
    */
   void testClientServer1(final Class<? extends WriteStrategy> writeStrategyClass,
-      final Class<? extends ReadStrategy> readStrategyClass) throws Exception {
+      final Class<? extends ReadStrategy> readStrategyClass,
+      final DomainSocket preConnectedSockets[]) throws Exception {
     final String TEST_PATH = new File(sockDir.getDir(),
         "test_sock_client_server1").getAbsolutePath();
     final byte clientMsg1[] = new byte[] { 0x1, 0x2, 0x3, 0x4, 0x5, 0x6 };
@@ -428,13 +429,15 @@ public class TestDomainSocket {
     final byte clientMsg2 = 0x45;
     final ArrayBlockingQueue<Throwable> threadResults =
         new ArrayBlockingQueue<Throwable>(2);
-    final DomainSocket serv = DomainSocket.bindAndListen(TEST_PATH);
+    final DomainSocket serv = (preConnectedSockets != null) ?
+      null : DomainSocket.bindAndListen(TEST_PATH);
     Thread serverThread = new Thread() {
       public void run(){
         // Run server
         DomainSocket conn = null;
         try {
-          conn = serv.accept();
+          conn = preConnectedSockets != null ?
+                    preConnectedSockets[0] : serv.accept();
           byte in1[] = new byte[clientMsg1.length];
           ReadStrategy reader = readStrategyClass.newInstance();
           reader.init(conn);
@@ -459,7 +462,8 @@ public class TestDomainSocket {
     Thread clientThread = new Thread() {
       public void run(){
         try {
-          DomainSocket client = DomainSocket.connect(TEST_PATH);
+          DomainSocket client = preConnectedSockets != null ?
+                preConnectedSockets[1] : DomainSocket.connect(TEST_PATH);
           WriteStrategy writer = writeStrategyClass.newInstance();
           writer.init(client);
           writer.write(clientMsg1);
@@ -487,25 +491,45 @@ public class TestDomainSocket {
     }
     serverThread.join(120000);
     clientThread.join(120000);
-    serv.close();
+    if (serv != null) {
+      serv.close();
+    }
   }
 
   @Test(timeout=180000)
   public void testClientServerOutStreamInStream() throws Exception {
     testClientServer1(OutputStreamWriteStrategy.class,
-        InputStreamReadStrategy.class);
+        InputStreamReadStrategy.class, null);
+  }
+
+  @Test(timeout=180000)
+  public void testClientServerOutStreamInStreamWithSocketpair() throws Exception {
+    testClientServer1(OutputStreamWriteStrategy.class,
+        InputStreamReadStrategy.class, DomainSocket.socketpair());
   }
 
   @Test(timeout=180000)
   public void testClientServerOutStreamInDbb() throws Exception {
     testClientServer1(OutputStreamWriteStrategy.class,
-        DirectByteBufferReadStrategy.class);
+        DirectByteBufferReadStrategy.class, null);
+  }
+
+  @Test(timeout=180000)
+  public void testClientServerOutStreamInDbbWithSocketpair() throws Exception {
+    testClientServer1(OutputStreamWriteStrategy.class,
+        DirectByteBufferReadStrategy.class, DomainSocket.socketpair());
   }
 
   @Test(timeout=180000)
   public void testClientServerOutStreamInAbb() throws Exception {
     testClientServer1(OutputStreamWriteStrategy.class,
-        ArrayBackedByteBufferReadStrategy.class);
+        ArrayBackedByteBufferReadStrategy.class, null);
+  }
+
+  @Test(timeout=180000)
+  public void testClientServerOutStreamInAbbWithSocketpair() throws Exception {
+    testClientServer1(OutputStreamWriteStrategy.class,
+        ArrayBackedByteBufferReadStrategy.class, DomainSocket.socketpair());
   }
 
   static private class PassedFile {

Modified: hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestUserGroupInformation.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestUserGroupInformation.java?rev=1556097&r1=1556096&r2=1556097&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestUserGroupInformation.java
(original)
+++ hadoop/common/branches/HDFS-4685/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestUserGroupInformation.java
Tue Jan  7 01:07:33 2014
@@ -19,7 +19,6 @@ package org.apache.hadoop.security;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
 import org.apache.hadoop.io.Text;
-import org.apache.hadoop.ipc.TestSaslRPC;
 import org.apache.hadoop.metrics2.MetricsRecordBuilder;
 import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod;
 import org.apache.hadoop.security.authentication.util.KerberosName;
@@ -40,9 +39,9 @@ import java.util.Collection;
 import java.util.LinkedHashSet;
 import java.util.Set;
 
+import static org.apache.hadoop.fs.CommonConfigurationKeys.HADOOP_USER_GROUP_METRICS_PERCENTILES_INTERVALS;
 import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTH_TO_LOCAL;
 import static org.apache.hadoop.ipc.TestSaslRPC.*;
-import static org.apache.hadoop.security.token.delegation.TestDelegationToken.TestDelegationTokenIdentifier;
 import static org.apache.hadoop.test.MetricsAsserts.*;
 import static org.junit.Assert.*;
 import static org.mockito.Mockito.mock;
@@ -55,6 +54,8 @@ public class TestUserGroupInformation {
   final private static String GROUP3_NAME = "group3";
   final private static String[] GROUP_NAMES = 
     new String[]{GROUP1_NAME, GROUP2_NAME, GROUP3_NAME};
+  // Rollover interval of percentile metrics (in seconds)
+  private static final int PERCENTILES_INTERVAL = 1;
   private static Configuration conf;
   
   /**
@@ -80,7 +81,8 @@ public class TestUserGroupInformation {
     // doesn't matter what it is, but getGroups needs it set...
     // use HADOOP_HOME environment variable to prevent interfering with logic
     // that finds winutils.exe
-    System.setProperty("hadoop.home.dir", System.getenv("HADOOP_HOME"));
+    String home = System.getenv("HADOOP_HOME");
+    System.setProperty("hadoop.home.dir", (home != null ? home : "."));
     // fake the realm is kerberos is enabled
     System.setProperty("java.security.krb5.kdc", "");
     System.setProperty("java.security.krb5.realm", "DEFAULT.REALM");
@@ -150,11 +152,15 @@ public class TestUserGroupInformation {
   /** Test login method */
   @Test (timeout = 30000)
   public void testLogin() throws Exception {
+    conf.set(HADOOP_USER_GROUP_METRICS_PERCENTILES_INTERVALS,
+      String.valueOf(PERCENTILES_INTERVAL));
+    UserGroupInformation.setConfiguration(conf);
     // login from unix
     UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
     assertEquals(UserGroupInformation.getCurrentUser(),
                  UserGroupInformation.getLoginUser());
     assertTrue(ugi.getGroupNames().length >= 1);
+    verifyGroupMetrics(1);
 
     // ensure that doAs works correctly
     UserGroupInformation userGroupInfo = 
@@ -728,6 +734,21 @@ public class TestUserGroupInformation {
     }
   }
 
+  private static void verifyGroupMetrics(
+      long groups) throws InterruptedException {
+    MetricsRecordBuilder rb = getMetrics("UgiMetrics");
+    if (groups > 0) {
+      assertCounter("GetGroupsNumOps", groups, rb);
+      double avg = getDoubleGauge("GetGroupsAvgTime", rb);
+      assertTrue(avg >= 0.0);
+
+      // Sleep for an interval+slop to let the percentiles rollover
+      Thread.sleep((PERCENTILES_INTERVAL+1)*1000);
+      // Check that the percentiles were updated
+      assertQuantileGauges("GetGroups1s", rb);
+    }
+  }
+
   /**
    * Test for the case that UserGroupInformation.getCurrentUser()
    * is called when the AccessControlContext has a Subject associated



Mime
View raw message