hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From szets...@apache.org
Subject svn commit: r1618417 [1/2] - in /hadoop/common/branches/HDFS-6584/hadoop-common-project: hadoop-common/ hadoop-common/src/main/java/ hadoop-common/src/main/java/org/apache/hadoop/crypto/key/ hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/...
Date Sat, 16 Aug 2014 21:02:24 GMT
Author: szetszwo
Date: Sat Aug 16 21:02:21 2014
New Revision: 1618417

URL: http://svn.apache.org/r1618417
Log:
Merge r1609845 through r1618416 from trunk.

Added:
    hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/DecayRpcScheduler.java
      - copied unchanged from r1618416, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/DecayRpcScheduler.java
    hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/DecayRpcSchedulerMXBean.java
      - copied unchanged from r1618416, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/DecayRpcSchedulerMXBean.java
    hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcScheduler.java
      - copied unchanged from r1618416, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcScheduler.java
    hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestContentSummary.java
      - copied unchanged from r1618416, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestContentSummary.java
    hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestCount.java
      - copied unchanged from r1618416, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestCount.java
    hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestDecayRpcScheduler.java
      - copied unchanged from r1618416, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestDecayRpcScheduler.java
    hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestNetgroupCache.java
      - copied unchanged from r1618416, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestNetgroupCache.java
Modified:
    hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/CHANGES.txt   (contents, props changed)
    hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/java/   (props changed)
    hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProviderCryptoExtension.java
    hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProviderDelegationTokenExtension.java
    hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/KMSClientProvider.java
    hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ContentSummary.java
    hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Count.java
    hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetworkTopologyWithNodeGroup.java
    hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/NetgroupCache.java
    hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticationHandler.java
    hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DataChecksum.java
    hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/NativeCrc32.java
    hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c
    hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/NativeCrc32.c
    hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/site/apt/CommandsManual.apt.vm
    hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/site/apt/FileSystemShell.apt.vm
    hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/site/apt/NativeLibraries.apt.vm
    hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/test/resources/testConf.xml
    hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMS.java
    hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSACLs.java
    hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSAudit.java
    hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSAuthenticationFilter.java
    hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSExceptionsProvider.java
    hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSMDCFilter.java
    hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-kms/src/site/apt/index.apt.vm
    hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-kms/src/test/java/org/apache/hadoop/crypto/key/kms/server/TestKMS.java
    hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-kms/src/test/java/org/apache/hadoop/crypto/key/kms/server/TestKMSACLs.java
    hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-kms/src/test/java/org/apache/hadoop/crypto/key/kms/server/TestKMSAudit.java

Modified: hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1618417&r1=1618416&r2=1618417&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/CHANGES.txt (original)
+++ hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/CHANGES.txt Sat Aug 16 21:02:21 2014
@@ -202,6 +202,10 @@ Trunk (Unreleased)
     HADOOP-10224. JavaKeyStoreProvider has to protect against corrupting 
     underlying store. (asuresh via tucu)
 
+    HADOOP-10770. KMS add delegation token support. (tucu)
+
+    HADOOP-10698. KMS, add proxyuser support. (tucu)
+
   BUG FIXES
 
     HADOOP-9451. Fault single-layer config if node group topology is enabled.
@@ -427,6 +431,9 @@ Trunk (Unreleased)
     HADOOP-10862. Miscellaneous trivial corrections to KMS classes. 
     (asuresh via tucu)
 
+    HADOOP-10967. Improve DefaultCryptoExtension#generateEncryptedKey 
+    performance. (hitliuyi via tucu)
+
   OPTIMIZATIONS
 
     HADOOP-7761. Improve the performance of raw comparisons. (todd)
@@ -505,8 +512,19 @@ Release 2.6.0 - UNRELEASED
     HADOOP-10820. Throw an exception in GenericOptionsParser when passed
     an empty Path. (Alex Holmes and Zhihai Xu via wang)
 
+    HADOOP-10281. Create a scheduler, which assigns schedulables a priority
+    level. (Chris Li via Arpit Agarwal)
+
+    HADOOP-8944. Shell command fs -count should include human readable option 
+    (Jonathan Allen via aw)
+
+    HADOOP-10231. Add some components in Native Libraries document (Akira 
+    AJISAKA via aw)
+
   OPTIMIZATIONS
 
+    HADOOP-10838. Byte array native checksumming. (James Thomas via todd)
+
   BUG FIXES
 
     HADOOP-10781. Unportable getgrouplist() usage breaks FreeBSD (Dmitry
@@ -563,6 +581,24 @@ Release 2.6.0 - UNRELEASED
     HADOOP-10402. Configuration.getValByRegex does not substitute for
     variables. (Robert Kanter via kasha)
 
+    HADOOP-10851. NetgroupCache does not remove group memberships. (Benoy
+    Antony via Arpit Agarwal)
+
+    HADOOP-10962. Flags for posix_fadvise are not valid in some architectures
+    (David Villegas via Colin Patrick McCabe)
+
+    HADOOP-10966. Hadoop Common native compilation broken in windows.
+    (David Villegas via Arpit Agarwal)
+
+    HADOOP-10843. TestGridmixRecord unit tests failure on PowerPC (Jinghui Wang
+    via Colin Patrick McCabe)
+
+    HADOOP-10121. Fix javadoc spelling for HadoopArchives#writeTopLevelDirs
+    (Akira AJISAKA via aw)
+
+    HADOOP-10964. Small fix for NetworkTopologyWithNodeGroup#sortByDistance.
+    (Yi Liu via wang)
+
 Release 2.5.0 - UNRELEASED
 
   INCOMPATIBLE CHANGES

Propchange: hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/CHANGES.txt
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt:r1617566-1618416

Propchange: hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/java/
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java:r1617566-1618416

Modified: hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProviderCryptoExtension.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProviderCryptoExtension.java?rev=1618417&r1=1618416&r2=1618417&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProviderCryptoExtension.java (original)
+++ hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProviderCryptoExtension.java Sat Aug 16 21:02:21 2014
@@ -219,6 +219,13 @@ public class KeyProviderCryptoExtension 
   private static class DefaultCryptoExtension implements CryptoExtension {
 
     private final KeyProvider keyProvider;
+    private static final ThreadLocal<SecureRandom> RANDOM = 
+        new ThreadLocal<SecureRandom>() {
+      @Override
+      protected SecureRandom initialValue() {
+        return new SecureRandom();
+      }
+    };
 
     private DefaultCryptoExtension(KeyProvider keyProvider) {
       this.keyProvider = keyProvider;
@@ -233,10 +240,10 @@ public class KeyProviderCryptoExtension 
           "No KeyVersion exists for key '%s' ", encryptionKeyName);
       // Generate random bytes for new key and IV
       Cipher cipher = Cipher.getInstance("AES/CTR/NoPadding");
-      SecureRandom random = SecureRandom.getInstance("SHA1PRNG");
       final byte[] newKey = new byte[encryptionKey.getMaterial().length];
-      random.nextBytes(newKey);
-      final byte[] iv = random.generateSeed(cipher.getBlockSize());
+      RANDOM.get().nextBytes(newKey);
+      final byte[] iv = new byte[cipher.getBlockSize()];
+      RANDOM.get().nextBytes(iv);
       // Encryption key IV is derived from new key's IV
       final byte[] encryptionIV = EncryptedKeyVersion.deriveIV(iv);
       // Encrypt the new key

Modified: hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProviderDelegationTokenExtension.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProviderDelegationTokenExtension.java?rev=1618417&r1=1618416&r2=1618417&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProviderDelegationTokenExtension.java (original)
+++ hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProviderDelegationTokenExtension.java Sat Aug 16 21:02:21 2014
@@ -20,6 +20,8 @@ package org.apache.hadoop.crypto.key;
 import org.apache.hadoop.security.Credentials;
 import org.apache.hadoop.security.token.Token;
 
+import java.io.IOException;
+
 /**
  * A KeyProvider extension with the ability to add a renewer's Delegation 
  * Tokens to the provided Credentials.
@@ -45,9 +47,10 @@ public class KeyProviderDelegationTokenE
      * @param renewer the user allowed to renew the delegation tokens
      * @param credentials cache in which to add new delegation tokens
      * @return list of new delegation tokens
+     * @throws IOException thrown if IOException if an IO error occurs.
      */
     public Token<?>[] addDelegationTokens(final String renewer, 
-        Credentials credentials);
+        Credentials credentials) throws IOException;
   }
   
   /**
@@ -76,9 +79,10 @@ public class KeyProviderDelegationTokenE
    * @param renewer the user allowed to renew the delegation tokens
    * @param credentials cache in which to add new delegation tokens
    * @return list of new delegation tokens
+   * @throws IOException thrown if IOException if an IO error occurs.
    */
   public Token<?>[] addDelegationTokens(final String renewer, 
-      Credentials credentials) {
+      Credentials credentials) throws IOException {
     return getExtension().addDelegationTokens(renewer, credentials);
   }
   

Modified: hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/KMSClientProvider.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/KMSClientProvider.java?rev=1618417&r1=1618416&r2=1618417&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/KMSClientProvider.java (original)
+++ hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/KMSClientProvider.java Sat Aug 16 21:02:21 2014
@@ -22,15 +22,18 @@ import org.apache.hadoop.classification.
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.crypto.key.KeyProvider;
 import org.apache.hadoop.crypto.key.KeyProviderCryptoExtension.EncryptedKeyVersion;
+import org.apache.hadoop.crypto.key.KeyProviderDelegationTokenExtension;
 import org.apache.hadoop.crypto.key.KeyProviderFactory;
 import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.security.Credentials;
 import org.apache.hadoop.security.ProviderUtils;
-import org.apache.hadoop.security.authentication.client.AuthenticatedURL;
+import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.authentication.client.AuthenticationException;
 import org.apache.hadoop.security.authentication.client.ConnectionConfigurator;
-import org.apache.hadoop.security.authentication.client.PseudoAuthenticator;
 import org.apache.hadoop.security.ssl.SSLFactory;
+import org.apache.hadoop.security.token.Token;
+import org.apache.hadoop.security.token.delegation.web.DelegationTokenAuthenticatedURL;
 import org.apache.http.client.utils.URIBuilder;
 import org.codehaus.jackson.map.ObjectMapper;
 
@@ -50,6 +53,7 @@ import java.net.URL;
 import java.net.URLEncoder;
 import java.security.GeneralSecurityException;
 import java.security.NoSuchAlgorithmException;
+import java.security.PrivilegedExceptionAction;
 import java.text.MessageFormat;
 import java.util.ArrayList;
 import java.util.Date;
@@ -69,7 +73,10 @@ import com.google.common.base.Preconditi
  * KMS client <code>KeyProvider</code> implementation.
  */
 @InterfaceAudience.Private
-public class KMSClientProvider extends KeyProvider implements CryptoExtension {
+public class KMSClientProvider extends KeyProvider implements CryptoExtension,
+    KeyProviderDelegationTokenExtension.DelegationTokenExtension {
+
+  public static final String TOKEN_KIND = "kms-dt";
 
   public static final String SCHEME_NAME = "kms";
 
@@ -229,6 +236,8 @@ public class KMSClientProvider extends K
   private String kmsUrl;
   private SSLFactory sslFactory;
   private ConnectionConfigurator configurator;
+  private DelegationTokenAuthenticatedURL.Token authToken;
+  private UserGroupInformation loginUgi;
 
   @Override
   public String toString() {
@@ -309,6 +318,8 @@ public class KMSClientProvider extends K
                 CommonConfigurationKeysPublic.
                     KMS_CLIENT_ENC_KEY_CACHE_NUM_REFILL_THREADS_DEFAULT),
             new EncryptedQueueRefiller());
+    authToken = new DelegationTokenAuthenticatedURL.Token();
+    loginUgi = UserGroupInformation.getCurrentUser();
   }
 
   private String createServiceURL(URL url) throws IOException {
@@ -325,12 +336,14 @@ public class KMSClientProvider extends K
     try {
       StringBuilder sb = new StringBuilder();
       sb.append(kmsUrl);
-      sb.append(collection);
-      if (resource != null) {
-        sb.append("/").append(URLEncoder.encode(resource, UTF8));
-      }
-      if (subResource != null) {
-        sb.append("/").append(subResource);
+      if (collection != null) {
+        sb.append(collection);
+        if (resource != null) {
+          sb.append("/").append(URLEncoder.encode(resource, UTF8));
+          if (subResource != null) {
+            sb.append("/").append(subResource);
+          }
+        }
       }
       URIBuilder uriBuilder = new URIBuilder(sb.toString());
       if (parameters != null) {
@@ -365,14 +378,29 @@ public class KMSClientProvider extends K
     return conn;
   }
 
-  private HttpURLConnection createConnection(URL url, String method)
+  private HttpURLConnection createConnection(final URL url, String method)
       throws IOException {
     HttpURLConnection conn;
     try {
-      AuthenticatedURL authUrl = new AuthenticatedURL(new PseudoAuthenticator(),
-          configurator);
-      conn = authUrl.openConnection(url, new AuthenticatedURL.Token());
-    } catch (AuthenticationException ex) {
+      // if current UGI is different from UGI at constructor time, behave as
+      // proxyuser
+      UserGroupInformation currentUgi = UserGroupInformation.getCurrentUser();
+      final String doAsUser =
+          (loginUgi.getShortUserName().equals(currentUgi.getShortUserName()))
+          ? null : currentUgi.getShortUserName();
+
+      // creating the HTTP connection using the current UGI at constructor time
+      conn = loginUgi.doAs(new PrivilegedExceptionAction<HttpURLConnection>() {
+        @Override
+        public HttpURLConnection run() throws Exception {
+          DelegationTokenAuthenticatedURL authUrl =
+              new DelegationTokenAuthenticatedURL(configurator);
+          return authUrl.openConnection(url, authToken, doAsUser);
+        }
+      });
+    } catch (IOException ex) {
+      throw ex;
+    } catch (Exception ex) {
       throw new IOException(ex);
     }
     conn.setUseCaches(false);
@@ -403,20 +431,27 @@ public class KMSClientProvider extends K
     if (status != expected) {
       InputStream es = null;
       try {
-        es = conn.getErrorStream();
-        ObjectMapper mapper = new ObjectMapper();
-        Map json = mapper.readValue(es, Map.class);
-        String exClass = (String) json.get(
-            KMSRESTConstants.ERROR_EXCEPTION_JSON);
-        String exMsg = (String)
-            json.get(KMSRESTConstants.ERROR_MESSAGE_JSON);
         Exception toThrow;
-        try {
-          ClassLoader cl = KMSClientProvider.class.getClassLoader();
-          Class klass = cl.loadClass(exClass);
-          Constructor constr = klass.getConstructor(String.class);
-          toThrow = (Exception) constr.newInstance(exMsg);
-        } catch (Exception ex) {
+        String contentType = conn.getHeaderField(CONTENT_TYPE);
+        if (contentType != null &&
+            contentType.toLowerCase().startsWith(APPLICATION_JSON_MIME)) {
+          es = conn.getErrorStream();
+          ObjectMapper mapper = new ObjectMapper();
+          Map json = mapper.readValue(es, Map.class);
+          String exClass = (String) json.get(
+              KMSRESTConstants.ERROR_EXCEPTION_JSON);
+          String exMsg = (String)
+              json.get(KMSRESTConstants.ERROR_MESSAGE_JSON);
+          try {
+            ClassLoader cl = KMSClientProvider.class.getClassLoader();
+            Class klass = cl.loadClass(exClass);
+            Constructor constr = klass.getConstructor(String.class);
+            toThrow = (Exception) constr.newInstance(exMsg);
+          } catch (Exception ex) {
+            toThrow = new IOException(MessageFormat.format(
+                "HTTP status [{0}], {1}", status, conn.getResponseMessage()));
+          }
+        } else {
           toThrow = new IOException(MessageFormat.format(
               "HTTP status [{0}], {1}", status, conn.getResponseMessage()));
         }
@@ -729,4 +764,25 @@ public class KMSClientProvider extends K
     }
   }
 
+  @Override
+  public Token<?>[] addDelegationTokens(String renewer,
+      Credentials credentials) throws IOException {
+    Token<?>[] tokens;
+    URL url = createURL(null, null, null, null);
+    DelegationTokenAuthenticatedURL authUrl =
+        new DelegationTokenAuthenticatedURL(configurator);
+    try {
+      Token<?> token = authUrl.getDelegationToken(url, authToken, renewer);
+      if (token != null) {
+        credentials.addToken(token.getService(), token);
+        tokens = new Token<?>[] { token };
+      } else {
+        throw new IOException("Got NULL as delegation token");
+      }
+    } catch (AuthenticationException ex) {
+      throw new IOException(ex);
+    }
+    return tokens;
+  }
+
 }

Modified: hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ContentSummary.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ContentSummary.java?rev=1618417&r1=1618416&r2=1618417&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ContentSummary.java (original)
+++ hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ContentSummary.java Sat Aug 16 21:02:21 2014
@@ -24,6 +24,7 @@ import java.io.IOException;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.io.Writable;
+import org.apache.hadoop.util.StringUtils;
 
 /** Store the summary of a content (a directory or a file). */
 @InterfaceAudience.Public
@@ -102,7 +103,7 @@ public class ContentSummary implements W
    * <----12----> <----12----> <-------18------->
    *    DIR_COUNT   FILE_COUNT       CONTENT_SIZE FILE_NAME    
    */
-  private static final String STRING_FORMAT = "%12d %12d %18d ";
+  private static final String STRING_FORMAT = "%12s %12s %18s ";
   /** 
    * Output format:
    * <----12----> <----15----> <----15----> <----15----> <----12----> <----12----> <-------18------->
@@ -117,7 +118,7 @@ public class ContentSummary implements W
 
   private static final String QUOTA_HEADER = String.format(
       QUOTA_STRING_FORMAT + SPACE_QUOTA_STRING_FORMAT, 
-      "quota", "remaining quota", "space quota", "reamaining quota") +
+      "name quota", "rem name quota", "space quota", "rem space quota") +
       HEADER;
   
   /** Return the header of the output.
@@ -139,11 +140,25 @@ public class ContentSummary implements W
   /** Return the string representation of the object in the output format.
    * if qOption is false, output directory count, file count, and content size;
    * if qOption is true, output quota and remaining quota as well.
+   *
+   * @param qOption a flag indicating if quota needs to be printed or not
+   * @return the string representation of the object
+  */
+  public String toString(boolean qOption) {
+    return toString(qOption, false);
+  }
+
+  /** Return the string representation of the object in the output format.
+   * if qOption is false, output directory count, file count, and content size;
+   * if qOption is true, output quota and remaining quota as well.
+   * if hOption is false file sizes are returned in bytes
+   * if hOption is true file sizes are returned in human readable 
    * 
    * @param qOption a flag indicating if quota needs to be printed or not
+   * @param hOption a flag indicating if human readable output if to be used
    * @return the string representation of the object
    */
-  public String toString(boolean qOption) {
+  public String toString(boolean qOption, boolean hOption) {
     String prefix = "";
     if (qOption) {
       String quotaStr = "none";
@@ -152,19 +167,32 @@ public class ContentSummary implements W
       String spaceQuotaRem = "inf";
       
       if (quota>0) {
-        quotaStr = Long.toString(quota);
-        quotaRem = Long.toString(quota-(directoryCount+fileCount));
+        quotaStr = formatSize(quota, hOption);
+        quotaRem = formatSize(quota-(directoryCount+fileCount), hOption);
       }
       if (spaceQuota>0) {
-        spaceQuotaStr = Long.toString(spaceQuota);
-        spaceQuotaRem = Long.toString(spaceQuota - spaceConsumed);        
+        spaceQuotaStr = formatSize(spaceQuota, hOption);
+        spaceQuotaRem = formatSize(spaceQuota - spaceConsumed, hOption);
       }
       
       prefix = String.format(QUOTA_STRING_FORMAT + SPACE_QUOTA_STRING_FORMAT, 
                              quotaStr, quotaRem, spaceQuotaStr, spaceQuotaRem);
     }
     
-    return prefix + String.format(STRING_FORMAT, directoryCount, 
-                                  fileCount, length);
+    return prefix + String.format(STRING_FORMAT,
+     formatSize(directoryCount, hOption),
+     formatSize(fileCount, hOption),
+     formatSize(length, hOption));
+  }
+  /**
+   * Formats a size to be human readable or in bytes
+   * @param size value to be formatted
+   * @param humanReadable flag indicating human readable or not
+   * @return String representation of the size
+  */
+  private String formatSize(long size, boolean humanReadable) {
+    return humanReadable
+      ? StringUtils.TraditionalBinaryPrefix.long2String(size, "", 1)
+      : String.valueOf(size);
   }
 }

Modified: hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Count.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Count.java?rev=1618417&r1=1618416&r2=1618417&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Count.java (original)
+++ hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Count.java Sat Aug 16 21:02:21 2014
@@ -42,16 +42,22 @@ public class Count extends FsCommand {
     factory.addClass(Count.class, "-count");
   }
 
+  private static final String OPTION_QUOTA = "q";
+  private static final String OPTION_HUMAN = "h";
+
   public static final String NAME = "count";
-  public static final String USAGE = "[-q] <path> ...";
+  public static final String USAGE =
+      "[-" + OPTION_QUOTA + "] [-" + OPTION_HUMAN + "] <path> ...";
   public static final String DESCRIPTION = 
       "Count the number of directories, files and bytes under the paths\n" +
       "that match the specified file pattern.  The output columns are:\n" +
       "DIR_COUNT FILE_COUNT CONTENT_SIZE FILE_NAME or\n" +
       "QUOTA REMAINING_QUOTA SPACE_QUOTA REMAINING_SPACE_QUOTA \n" +
-      "      DIR_COUNT FILE_COUNT CONTENT_SIZE FILE_NAME";
+      "      DIR_COUNT FILE_COUNT CONTENT_SIZE FILE_NAME\n" +
+      "The -h option shows file sizes in human readable format.";
   
   private boolean showQuotas;
+  private boolean humanReadable;
 
   /** Constructor */
   public Count() {}
@@ -70,17 +76,37 @@ public class Count extends FsCommand {
 
   @Override
   protected void processOptions(LinkedList<String> args) {
-    CommandFormat cf = new CommandFormat(1, Integer.MAX_VALUE, "q");
+    CommandFormat cf = new CommandFormat(1, Integer.MAX_VALUE,
+      OPTION_QUOTA, OPTION_HUMAN);
     cf.parse(args);
     if (args.isEmpty()) { // default path is the current working directory
       args.add(".");
     }
-    showQuotas = cf.getOpt("q");
+    showQuotas = cf.getOpt(OPTION_QUOTA);
+    humanReadable = cf.getOpt(OPTION_HUMAN);
   }
 
   @Override
   protected void processPath(PathData src) throws IOException {
     ContentSummary summary = src.fs.getContentSummary(src.path);
-    out.println(summary.toString(showQuotas) + src);
+    out.println(summary.toString(showQuotas, isHumanReadable()) + src);
+  }
+  
+  /**
+   * Should quotas get shown as part of the report?
+   * @return if quotas should be shown then true otherwise false
+   */
+  @InterfaceAudience.Private
+  boolean isShowQuotas() {
+    return showQuotas;
+  }
+  
+  /**
+   * Should sizes be shown in human readable format rather than bytes?
+   * @return true if human readable format
+   */
+  @InterfaceAudience.Private
+  boolean isHumanReadable() {
+    return humanReadable;
   }
 }

Modified: hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetworkTopologyWithNodeGroup.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetworkTopologyWithNodeGroup.java?rev=1618417&r1=1618416&r2=1618417&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetworkTopologyWithNodeGroup.java (original)
+++ hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetworkTopologyWithNodeGroup.java Sat Aug 16 21:02:21 2014
@@ -293,7 +293,7 @@ public class NetworkTopologyWithNodeGrou
         return;
       }
     }
-    super.sortByDistance(reader, nodes, nodes.length, seed,
+    super.sortByDistance(reader, nodes, activeLen, seed,
         randomizeBlockLocationsPerBlock);
   }
 

Modified: hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/NetgroupCache.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/NetgroupCache.java?rev=1618417&r1=1618416&r2=1618417&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/NetgroupCache.java (original)
+++ hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/NetgroupCache.java Sat Aug 16 21:02:21 2014
@@ -27,12 +27,9 @@ import java.util.concurrent.ConcurrentHa
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-
 /**
  * Class that caches the netgroups and inverts group-to-user map
- * to user-to-group map, primarily intented for use with
+ * to user-to-group map, primarily intended for use with
  * netgroups (as returned by getent netgrgoup) which only returns
  * group to user mapping.
  */
@@ -69,9 +66,7 @@ public class NetgroupCache {
       }
     }
     if(userToNetgroupsMap.containsKey(user)) {
-      for(String netgroup : userToNetgroupsMap.get(user)) {
-        groups.add(netgroup);
-      }
+      groups.addAll(userToNetgroupsMap.get(user));
     }
   }
 
@@ -99,6 +94,7 @@ public class NetgroupCache {
    */
   public static void clear() {
     netgroupToUsersMap.clear();
+    userToNetgroupsMap.clear();
   }
 
   /**
@@ -108,12 +104,7 @@ public class NetgroupCache {
    * @param users list of users for a given group
    */
   public static void add(String group, List<String> users) {
-    if(!isCached(group)) {
-      netgroupToUsersMap.put(group, new HashSet<String>());
-      for(String user: users) {
-        netgroupToUsersMap.get(group).add(user);
-      }
-    }
+    netgroupToUsersMap.put(group, new HashSet<String>(users));
     netgroupToUsersMapUpdated = true; // at the end to avoid race
   }
 }

Modified: hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticationHandler.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticationHandler.java?rev=1618417&r1=1618416&r2=1618417&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticationHandler.java (original)
+++ hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticationHandler.java Sat Aug 16 21:02:21 2014
@@ -75,7 +75,7 @@ public abstract class DelegationTokenAut
 
   public static final String PREFIX = "delegation-token.";
 
-  public static final String TOKEN_KIND = PREFIX + "token-kind.sec";
+  public static final String TOKEN_KIND = PREFIX + "token-kind";
 
   public static final String UPDATE_INTERVAL = PREFIX + "update-interval.sec";
   public static final long UPDATE_INTERVAL_DEFAULT = 24 * 60 * 60;

Modified: hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DataChecksum.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DataChecksum.java?rev=1618417&r1=1618416&r2=1618417&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DataChecksum.java (original)
+++ hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DataChecksum.java Sat Aug 16 21:02:21 2014
@@ -339,6 +339,12 @@ public class DataChecksum implements Che
       byte[] data, int dataOff, int dataLen,
       byte[] checksums, int checksumsOff, String fileName,
       long basePos) throws ChecksumException {
+
+    if (NativeCrc32.isAvailable()) {
+      NativeCrc32.verifyChunkedSumsByteArray(bytesPerChecksum, type.id,
+          checksums, checksumsOff, data, dataOff, dataLen, fileName, basePos);
+      return;
+    }
     
     int remaining = dataLen;
     int dataPos = 0;

Modified: hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/NativeCrc32.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/NativeCrc32.java?rev=1618417&r1=1618416&r2=1618417&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/NativeCrc32.java (original)
+++ hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/NativeCrc32.java Sat Aug 16 21:02:21 2014
@@ -59,6 +59,16 @@ class NativeCrc32 {
         data, data.position(), data.remaining(),
         fileName, basePos);
   }
+
+  public static void verifyChunkedSumsByteArray(int bytesPerSum,
+      int checksumType, byte[] sums, int sumsOffset, byte[] data,
+      int dataOffset, int dataLength, String fileName, long basePos)
+      throws ChecksumException {
+    nativeVerifyChunkedSumsByteArray(bytesPerSum, checksumType,
+        sums, sumsOffset,
+        data, dataOffset, dataLength,
+        fileName, basePos);
+  }
   
     private static native void nativeVerifyChunkedSums(
       int bytesPerSum, int checksumType,
@@ -66,6 +76,12 @@ class NativeCrc32 {
       ByteBuffer data, int dataOffset, int dataLength,
       String fileName, long basePos);
 
+    private static native void nativeVerifyChunkedSumsByteArray(
+      int bytesPerSum, int checksumType,
+      byte[] sums, int sumsOffset,
+      byte[] data, int dataOffset, int dataLength,
+      String fileName, long basePos);
+
   // Copy the constants over from DataChecksum so that javah will pick them up
   // and make them available in the native code header.
   public static final int CHECKSUM_CRC32 = DataChecksum.CHECKSUM_CRC32;

Modified: hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c?rev=1618417&r1=1618416&r2=1618417&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c (original)
+++ hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c Sat Aug 16 21:02:21 2014
@@ -172,6 +172,39 @@ static void nioe_deinit(JNIEnv *env) {
 }
 
 /*
+ * Compatibility mapping for fadvise flags. Return the proper value from fnctl.h.
+ * If the value is not known, return the argument unchanged.
+ */
+static int map_fadvise_flag(jint flag) {
+#ifdef HAVE_POSIX_FADVISE
+  switch(flag) {
+    case org_apache_hadoop_io_nativeio_NativeIO_POSIX_POSIX_FADV_NORMAL:
+      return POSIX_FADV_NORMAL;
+      break;
+    case org_apache_hadoop_io_nativeio_NativeIO_POSIX_POSIX_FADV_RANDOM:
+      return POSIX_FADV_RANDOM;
+      break;
+    case org_apache_hadoop_io_nativeio_NativeIO_POSIX_POSIX_FADV_SEQUENTIAL:
+      return POSIX_FADV_SEQUENTIAL;
+      break;
+    case org_apache_hadoop_io_nativeio_NativeIO_POSIX_POSIX_FADV_WILLNEED:
+      return POSIX_FADV_WILLNEED;
+      break;
+    case org_apache_hadoop_io_nativeio_NativeIO_POSIX_POSIX_FADV_DONTNEED:
+      return POSIX_FADV_DONTNEED;
+      break;
+    case org_apache_hadoop_io_nativeio_NativeIO_POSIX_POSIX_FADV_NOREUSE:
+      return POSIX_FADV_NOREUSE;
+      break;
+    default:
+      return flag;
+  }
+#else
+  return flag;
+#endif
+}
+
+/*
  * private static native void initNative();
  *
  * We rely on this function rather than lazy initialization because
@@ -303,7 +336,7 @@ Java_org_apache_hadoop_io_nativeio_Nativ
   PASS_EXCEPTIONS(env);
 
   int err = 0;
-  if ((err = posix_fadvise(fd, (off_t)offset, (off_t)len, flags))) {
+  if ((err = posix_fadvise(fd, (off_t)offset, (off_t)len, map_fadvise_flag(flags)))) {
 #ifdef __FreeBSD__
     throw_ioe(env, errno);
 #else

Modified: hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/NativeCrc32.c
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/NativeCrc32.c?rev=1618417&r1=1618416&r2=1618417&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/NativeCrc32.c (original)
+++ hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/NativeCrc32.c Sat Aug 16 21:02:21 2014
@@ -34,6 +34,10 @@
 
 #include "bulk_crc32.h"
 
+#define MBYTE 1048576
+#define MIN(X,Y) ((X) < (Y) ? (X) : (Y))
+#define MAX(X,Y) ((X) > (Y) ? (X) : (Y))
+
 static void throw_checksum_exception(JNIEnv *env,
     uint32_t got_crc, uint32_t expected_crc,
     jstring j_filename, jlong pos) {
@@ -177,6 +181,84 @@ JNIEXPORT void JNICALL Java_org_apache_h
   }
 }
 
+JNIEXPORT void JNICALL Java_org_apache_hadoop_util_NativeCrc32_nativeVerifyChunkedSumsByteArray
+  (JNIEnv *env, jclass clazz,
+    jint bytes_per_checksum, jint j_crc_type,
+    jarray j_sums, jint sums_offset,
+    jarray j_data, jint data_offset, jint data_len,
+    jstring j_filename, jlong base_pos)
+{
+  uint8_t *sums_addr;
+  uint8_t *data_addr;
+  uint32_t *sums;
+  uint8_t *data;
+  int crc_type;
+  crc32_error_t error_data;
+  int ret;
+  int numChecksumsPerIter;
+  int checksumNum;
+
+  if (unlikely(!j_sums || !j_data)) {
+    THROW(env, "java/lang/NullPointerException",
+      "input byte arrays must not be null");
+    return;
+  }
+  if (unlikely(sums_offset < 0 || data_offset < 0 || data_len < 0)) {
+    THROW(env, "java/lang/IllegalArgumentException",
+      "bad offsets or lengths");
+    return;
+  }
+  if (unlikely(bytes_per_checksum) <= 0) {
+    THROW(env, "java/lang/IllegalArgumentException",
+      "invalid bytes_per_checksum");
+    return;
+  }
+
+  // Convert to correct internal C constant for CRC type
+  crc_type = convert_java_crc_type(env, j_crc_type);
+  if (crc_type == -1) return; // exception already thrown
+
+  numChecksumsPerIter = MAX(1, MBYTE / bytes_per_checksum);
+  checksumNum = 0;
+  while (checksumNum * bytes_per_checksum < data_len) {
+    // Convert byte arrays to C pointers
+    sums_addr = (*env)->GetPrimitiveArrayCritical(env, j_sums, NULL);
+    data_addr = (*env)->GetPrimitiveArrayCritical(env, j_data, NULL);
+
+    if (unlikely(!sums_addr || !data_addr)) {
+      if (data_addr) (*env)->ReleasePrimitiveArrayCritical(env, j_data, data_addr, 0);
+      if (sums_addr) (*env)->ReleasePrimitiveArrayCritical(env, j_sums, sums_addr, 0);
+      THROW(env, "java/lang/OutOfMemoryError",
+        "not enough memory for byte arrays in JNI code");
+      return;
+    }
+
+    sums = (uint32_t *)(sums_addr + sums_offset) + checksumNum;
+    data = data_addr + data_offset + checksumNum * bytes_per_checksum;
+
+    // Setup complete. Actually verify checksums.
+    ret = bulk_verify_crc(data, MIN(numChecksumsPerIter * bytes_per_checksum,
+                                    data_len - checksumNum * bytes_per_checksum),
+                          sums, crc_type, bytes_per_checksum, &error_data);
+    (*env)->ReleasePrimitiveArrayCritical(env, j_data, data_addr, 0);
+    (*env)->ReleasePrimitiveArrayCritical(env, j_sums, sums_addr, 0);
+    if (unlikely(ret == INVALID_CHECKSUM_DETECTED)) {
+      long pos = base_pos + (error_data.bad_data - data) + checksumNum *
+        bytes_per_checksum;
+      throw_checksum_exception(
+        env, error_data.got_crc, error_data.expected_crc,
+        j_filename, pos);
+      return;
+    } else if (unlikely(ret != CHECKSUMS_VALID)) {
+      THROW(env, "java/lang/AssertionError",
+        "Bad response code from native bulk_verify_crc");
+      return;
+    }
+    checksumNum += numChecksumsPerIter;
+  }
+
+}
+
 /**
  * vim: sw=2: ts=2: et:
  */

Modified: hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/site/apt/CommandsManual.apt.vm
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/site/apt/CommandsManual.apt.vm?rev=1618417&r1=1618416&r2=1618417&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/site/apt/CommandsManual.apt.vm (original)
+++ hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/site/apt/CommandsManual.apt.vm Sat Aug 16 21:02:21 2014
@@ -81,36 +81,15 @@ User Commands
 
 * <<<archive>>>
 
-   Creates a hadoop archive. More information can be found at Hadoop
-   Archives.
-
-   Usage: <<<hadoop archive -archiveName NAME <src>* <dest> >>>
-
-*-------------------+-------------------------------------------------------+
-||COMMAND_OPTION    ||                   Description
-*-------------------+-------------------------------------------------------+
-| -archiveName NAME |  Name of the archive to be created.
-*-------------------+-------------------------------------------------------+
-| src               | Filesystem pathnames which work as usual with regular
-                    | expressions.
-*-------------------+-------------------------------------------------------+
-| dest              | Destination directory which would contain the archive.
-*-------------------+-------------------------------------------------------+
+   Creates a hadoop archive. More information can be found at
+   {{{../../hadoop-mapreduce-client/hadoop-mapreduce-client-core/HadoopArchives.html}
+   Hadoop Archives Guide}}.
 
 * <<<distcp>>>
 
    Copy file or directories recursively. More information can be found at
-   Hadoop DistCp Guide.
-
-   Usage: <<<hadoop distcp <srcurl> <desturl> >>>
-
-*-------------------+--------------------------------------------+
-||COMMAND_OPTION    || Description
-*-------------------+--------------------------------------------+
-| srcurl            | Source Url
-*-------------------+--------------------------------------------+
-| desturl           | Destination Url
-*-------------------+--------------------------------------------+
+   {{{../../hadoop-mapreduce-client/hadoop-mapreduce-client-core/DistCp.html}
+   Hadoop DistCp Guide}}.
 
 * <<<fs>>>
 
@@ -142,103 +121,21 @@ User Commands
 
 * <<<job>>>
 
-   Command to interact with Map Reduce Jobs.
-
-   Usage: <<<hadoop job [GENERIC_OPTIONS] [-submit <job-file>] | [-status <job-id>] | [-counter <job-id> <group-name> <counter-name>] | [-kill <job-id>] | [-events <job-id> <from-event-#> <#-of-events>] | [-history [all] <jobOutputDir>] | [-list [all]] | [-kill-task <task-id>] | [-fail-task <task-id>] | [-set-priority <job-id> <priority>]>>>
-
-*------------------------------+---------------------------------------------+
-|| COMMAND_OPTION              || Description
-*------------------------------+---------------------------------------------+
-| -submit <job-file>           | Submits the job.
-*------------------------------+---------------------------------------------+
-| -status <job-id>             | Prints the map and reduce completion
-                               | percentage and all job counters.
-*------------------------------+---------------------------------------------+
-| -counter <job-id> <group-name> <counter-name> | Prints the counter value.
-*------------------------------+---------------------------------------------+
-| -kill <job-id>               | Kills the job.
-*------------------------------+---------------------------------------------+
-| -events <job-id> <from-event-#> <#-of-events> | Prints the events' details
-                               | received by jobtracker for the given range.
-*------------------------------+---------------------------------------------+
-| -history [all]<jobOutputDir> | Prints job details, failed and killed tip
-                               | details.  More details about the job such as
-                               | successful tasks and task attempts made for
-                               | each task can be viewed by specifying the [all]
-                               | option.
-*------------------------------+---------------------------------------------+
-| -list [all]                  | Displays jobs which are yet to complete.
-                               | <<<-list all>>> displays all jobs.
-*------------------------------+---------------------------------------------+
-| -kill-task <task-id>         | Kills the task. Killed tasks are NOT counted
-                               | against failed attempts.
-*------------------------------+---------------------------------------------+
-| -fail-task <task-id>         | Fails the task. Failed tasks are counted
-                               | against failed attempts.
-*------------------------------+---------------------------------------------+
-| -set-priority <job-id> <priority> | Changes the priority of the job. Allowed
-                               | priority values are VERY_HIGH, HIGH, NORMAL,
-                               | LOW, VERY_LOW
-*------------------------------+---------------------------------------------+
+   Deprecated. Use
+   {{{../../hadoop-mapreduce-client/hadoop-mapreduce-client-core/MapredCommands.html#job}
+   <<<mapred job>>>}} instead.
 
 * <<<pipes>>>
 
-   Runs a pipes job.
-
-   Usage: <<<hadoop pipes [-conf <path>] [-jobconf <key=value>, <key=value>,
-   ...] [-input <path>] [-output <path>] [-jar <jar file>] [-inputformat
-   <class>] [-map <class>] [-partitioner <class>] [-reduce <class>] [-writer
-   <class>] [-program <executable>] [-reduces <num>]>>>
- 
-*----------------------------------------+------------------------------------+
-|| COMMAND_OPTION                        || Description
-*----------------------------------------+------------------------------------+
-| -conf <path>                           | Configuration for job
-*----------------------------------------+------------------------------------+
-| -jobconf <key=value>, <key=value>, ... | Add/override configuration for job
-*----------------------------------------+------------------------------------+
-| -input <path>                          | Input directory
-*----------------------------------------+------------------------------------+
-| -output <path>                         | Output directory
-*----------------------------------------+------------------------------------+
-| -jar <jar file>                        | Jar filename
-*----------------------------------------+------------------------------------+
-| -inputformat <class>                   | InputFormat class
-*----------------------------------------+------------------------------------+
-| -map <class>                           | Java Map class
-*----------------------------------------+------------------------------------+
-| -partitioner <class>                   | Java Partitioner
-*----------------------------------------+------------------------------------+
-| -reduce <class>                        | Java Reduce class
-*----------------------------------------+------------------------------------+
-| -writer <class>                        | Java RecordWriter
-*----------------------------------------+------------------------------------+
-| -program <executable>                  | Executable URI
-*----------------------------------------+------------------------------------+
-| -reduces <num>                         | Number of reduces
-*----------------------------------------+------------------------------------+
+   Deprecated. Use
+   {{{../../hadoop-mapreduce-client/hadoop-mapreduce-client-core/MapredCommands.html#pipes}
+   <<<mapred pipes>>>}} instead.
 
 * <<<queue>>>
 
-   command to interact and view Job Queue information
-
-   Usage: <<<hadoop queue [-list] | [-info <job-queue-name> [-showJobs]] | [-showacls]>>>
-
-*-----------------+-----------------------------------------------------------+
-|| COMMAND_OPTION || Description
-*-----------------+-----------------------------------------------------------+
-| -list           | Gets list of Job Queues configured in the system.
-                  | Along with scheduling information associated with the job queues.
-*-----------------+-----------------------------------------------------------+
-| -info <job-queue-name> [-showJobs] | Displays the job queue information and
-                  | associated scheduling information of particular job queue.
-                  | If <<<-showJobs>>> options is present a list of jobs
-                  | submitted to the particular job queue is displayed.
-*-----------------+-----------------------------------------------------------+
-| -showacls       | Displays the queue name and associated queue operations
-                  | allowed for the current user. The list consists of only
-                  | those queues to which the user has access.
-*-----------------+-----------------------------------------------------------+
+   Deprecated. Use
+   {{{../../hadoop-mapreduce-client/hadoop-mapreduce-client-core/MapredCommands.html#queue}
+   <<<mapred queue>>>}} instead.
 
 * <<<version>>>
 
@@ -314,35 +211,6 @@ Administration Commands
    Deprecated, use {{{../hadoop-hdfs/HDFSCommands.html#dfsadmin}
    <<<hdfs dfsadmin>>>}} instead.
 
-* <<<mradmin>>>
-
-   Runs MR admin client
-
-   Usage: <<<hadoop mradmin [ GENERIC_OPTIONS ] [-refreshQueueAcls]>>>
-
-*-------------------+-----------------------------------------------------------+
-|| COMMAND_OPTION   || Description
-*-------------------+-----------------------------------------------------------+
-| -refreshQueueAcls | Refresh the queue acls used by hadoop, to check access
-                    | during submissions and administration of the job by the
-                    | user. The properties present in mapred-queue-acls.xml is
-                    | reloaded by the queue manager.
-*-------------------+-----------------------------------------------------------+
-
-* <<<jobtracker>>>
-
-   Runs the MapReduce job Tracker node.
-
-   Usage: <<<hadoop jobtracker [-dumpConfiguration]>>>
-
-*--------------------+-----------------------------------------------------------+
-|| COMMAND_OPTION    || Description
-*--------------------+-----------------------------------------------------------+
-| -dumpConfiguration | Dumps the configuration used by the JobTracker alongwith
-                     | queue configuration in JSON format into Standard output
-                     | used by the jobtracker and exits.
-*--------------------+-----------------------------------------------------------+
-
 * <<<namenode>>>
 
    Deprecated, use {{{../hadoop-hdfs/HDFSCommands.html#namenode}
@@ -352,9 +220,3 @@ Administration Commands
 
    Deprecated, use {{{../hadoop-hdfs/HDFSCommands.html#secondarynamenode}
    <<<hdfs secondarynamenode>>>}} instead.
-
-* <<<tasktracker>>>
-
-   Runs a MapReduce task Tracker node.
-
-   Usage: <<<hadoop tasktracker>>>

Modified: hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/site/apt/FileSystemShell.apt.vm
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/site/apt/FileSystemShell.apt.vm?rev=1618417&r1=1618416&r2=1618417&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/site/apt/FileSystemShell.apt.vm (original)
+++ hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/site/apt/FileSystemShell.apt.vm Sat Aug 16 21:02:21 2014
@@ -138,7 +138,7 @@ copyToLocal
 
 count
 
-   Usage: <<<hdfs dfs -count [-q] <paths> >>>
+   Usage: <<<hdfs dfs -count [-q] [-h] <paths> >>>
 
    Count the number of directories, files and bytes under the paths that match
    the specified file pattern.  The output columns with -count are: DIR_COUNT,
@@ -147,12 +147,16 @@ count
    The output columns with -count -q are: QUOTA, REMAINING_QUATA, SPACE_QUOTA,
    REMAINING_SPACE_QUOTA, DIR_COUNT, FILE_COUNT, CONTENT_SIZE, FILE_NAME
 
+   The -h option shows sizes in human readable format.
+
    Example:
 
      * <<<hdfs dfs -count hdfs://nn1.example.com/file1 hdfs://nn2.example.com/file2>>>
 
      * <<<hdfs dfs -count -q hdfs://nn1.example.com/file1>>>
 
+     * <<<hdfs dfs -count -q -h hdfs://nn1.example.com/file1>>>
+
    Exit Code:
 
    Returns 0 on success and -1 on error.

Modified: hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/site/apt/NativeLibraries.apt.vm
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/site/apt/NativeLibraries.apt.vm?rev=1618417&r1=1618416&r2=1618417&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/site/apt/NativeLibraries.apt.vm (original)
+++ hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/site/apt/NativeLibraries.apt.vm Sat Aug 16 21:02:21 2014
@@ -30,6 +30,8 @@ Native Libraries Guide
    compression" could refer to all *.so's you need to compile that are
    specifically related to compression. Currently, however, this document
    only addresses the native hadoop library (<<<libhadoop.so>>>).
+   The document for libhdfs library (<<<libhdfs.so>>>) is
+   {{{../hadoop-hdfs/LibHdfs.html}here}}.
 
 * Native Hadoop Library
 
@@ -64,14 +66,16 @@ Native Libraries Guide
 
 * Components
 
-   The native hadoop library includes two components, the zlib and gzip
-   compression codecs:
+   The native hadoop library includes various components:
 
-     * zlib
+   * Compression Codecs (bzip2, lz4, snappy, zlib)
 
-     * gzip
+   * Native IO utilities for {{{../hadoop-hdfs/ShortCircuitLocalReads.html}
+     HDFS Short-Circuit Local Reads}} and
+     {{{../hadoop-hdfs/CentralizedCacheManagement.html}Centralized Cache
+     Management in HDFS}}
 
-   The native hadoop library is imperative for gzip to work.
+   * CRC32 checksum implementation
 
 * Supported Platforms
 

Modified: hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/test/resources/testConf.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/test/resources/testConf.xml?rev=1618417&r1=1618416&r2=1618417&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/test/resources/testConf.xml (original)
+++ hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/test/resources/testConf.xml Sat Aug 16 21:02:21 2014
@@ -238,7 +238,7 @@
       <comparators>
         <comparator>
           <type>RegexpComparator</type>
-          <expected-output>^-count \[-q\] &lt;path&gt; \.\.\. :\s*</expected-output>
+          <expected-output>^-count \[-q\] \[-h\] &lt;path&gt; \.\.\. :( )*</expected-output>
         </comparator>
         <comparator>
           <type>RegexpComparator</type>
@@ -260,6 +260,10 @@
           <type>RegexpComparator</type>
           <expected-output>^( |\t)*DIR_COUNT FILE_COUNT CONTENT_SIZE FILE_NAME( )*</expected-output>
         </comparator>
+        <comparator>
+          <type>RegexpComparator</type>
+          <expected-output>^( |\t)*The -h option shows file sizes in human readable format.( )*</expected-output>
+        </comparator>
       </comparators>
     </test>
 

Modified: hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMS.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMS.java?rev=1618417&r1=1618416&r2=1618417&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMS.java (original)
+++ hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMS.java Sat Aug 16 21:02:21 2014
@@ -25,9 +25,10 @@ import org.apache.hadoop.crypto.key.KeyP
 import org.apache.hadoop.crypto.key.KeyProviderCryptoExtension.EncryptedKeyVersion;
 import org.apache.hadoop.crypto.key.kms.KMSRESTConstants;
 import org.apache.hadoop.security.AccessControlException;
-import org.apache.hadoop.security.authentication.client.AuthenticationException;
+import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.authorize.AuthorizationException;
 import org.apache.hadoop.crypto.key.kms.KMSClientProvider;
+import org.apache.hadoop.security.token.delegation.web.HttpUserGroupInformation;
 
 import javax.ws.rs.Consumes;
 import javax.ws.rs.DELETE;
@@ -38,15 +39,13 @@ import javax.ws.rs.Path;
 import javax.ws.rs.PathParam;
 import javax.ws.rs.Produces;
 import javax.ws.rs.QueryParam;
-import javax.ws.rs.core.Context;
 import javax.ws.rs.core.MediaType;
 import javax.ws.rs.core.Response;
-import javax.ws.rs.core.SecurityContext;
 
 import java.io.IOException;
 import java.net.URI;
 import java.net.URISyntaxException;
-import java.security.Principal;
+import java.security.PrivilegedExceptionAction;
 import java.util.ArrayList;
 import java.util.LinkedList;
 import java.util.List;
@@ -74,15 +73,6 @@ public class KMS {
     kmsAudit= KMSWebApp.getKMSAudit();
   }
 
-  private static Principal getPrincipal(SecurityContext securityContext)
-      throws AuthenticationException{
-    Principal user = securityContext.getUserPrincipal();
-    if (user == null) {
-      throw new AuthenticationException("User must be authenticated");
-    }
-    return user;
-  }
-
 
   private static final String UNAUTHORIZED_MSG_WITH_KEY = 
       "User:%s not allowed to do '%s' on '%s'";
@@ -90,20 +80,21 @@ public class KMS {
   private static final String UNAUTHORIZED_MSG_WITHOUT_KEY = 
       "User:%s not allowed to do '%s'";
 
-  private void assertAccess(KMSACLs.Type aclType, Principal principal,
+  private void assertAccess(KMSACLs.Type aclType, UserGroupInformation ugi,
       KMSOp operation) throws AccessControlException {
-    assertAccess(aclType, principal, operation, null);
+    assertAccess(aclType, ugi, operation, null);
   }
 
-  private void assertAccess(KMSACLs.Type aclType, Principal principal,
-      KMSOp operation, String key) throws AccessControlException {
-    if (!KMSWebApp.getACLs().hasAccess(aclType, principal.getName())) {
+  private void assertAccess(KMSACLs.Type aclType,
+      UserGroupInformation ugi, KMSOp operation, String key)
+      throws AccessControlException {
+    if (!KMSWebApp.getACLs().hasAccess(aclType, ugi)) {
       KMSWebApp.getUnauthorizedCallsMeter().mark();
-      kmsAudit.unauthorized(principal, operation, key);
+      kmsAudit.unauthorized(ugi, operation, key);
       throw new AuthorizationException(String.format(
           (key != null) ? UNAUTHORIZED_MSG_WITH_KEY 
                         : UNAUTHORIZED_MSG_WITHOUT_KEY,
-          principal.getName(), operation, key));
+          ugi.getShortUserName(), operation, key));
     }
   }
 
@@ -123,15 +114,14 @@ public class KMS {
   @Consumes(MediaType.APPLICATION_JSON)
   @Produces(MediaType.APPLICATION_JSON)
   @SuppressWarnings("unchecked")
-  public Response createKey(@Context SecurityContext securityContext,
-      Map jsonKey) throws Exception {
+  public Response createKey(Map jsonKey) throws Exception {
     KMSWebApp.getAdminCallsMeter().mark();
-    Principal user = getPrincipal(securityContext);
-    String name = (String) jsonKey.get(KMSRESTConstants.NAME_FIELD);
+    UserGroupInformation user = HttpUserGroupInformation.get();
+    final String name = (String) jsonKey.get(KMSRESTConstants.NAME_FIELD);
     KMSClientProvider.checkNotEmpty(name, KMSRESTConstants.NAME_FIELD);
     assertAccess(KMSACLs.Type.CREATE, user, KMSOp.CREATE_KEY, name);
     String cipher = (String) jsonKey.get(KMSRESTConstants.CIPHER_FIELD);
-    String material = (String) jsonKey.get(KMSRESTConstants.MATERIAL_FIELD);
+    final String material = (String) jsonKey.get(KMSRESTConstants.MATERIAL_FIELD);
     int length = (jsonKey.containsKey(KMSRESTConstants.LENGTH_FIELD))
                  ? (Integer) jsonKey.get(KMSRESTConstants.LENGTH_FIELD) : 0;
     String description = (String)
@@ -142,7 +132,7 @@ public class KMS {
       assertAccess(KMSACLs.Type.SET_KEY_MATERIAL, user,
           KMSOp.CREATE_KEY, name);
     }
-    KeyProvider.Options options = new KeyProvider.Options(
+    final KeyProvider.Options options = new KeyProvider.Options(
         KMSWebApp.getConfiguration());
     if (cipher != null) {
       options.setCipher(cipher);
@@ -153,16 +143,23 @@ public class KMS {
     options.setDescription(description);
     options.setAttributes(attributes);
 
-    KeyProvider.KeyVersion keyVersion = (material != null)
-        ? provider.createKey(name, Base64.decodeBase64(material), options)
-        : provider.createKey(name, options);
-
-    provider.flush();
+    KeyProvider.KeyVersion keyVersion = user.doAs(
+        new PrivilegedExceptionAction<KeyVersion>() {
+          @Override
+          public KeyVersion run() throws Exception {
+            KeyProvider.KeyVersion keyVersion = (material != null)
+              ? provider.createKey(name, Base64.decodeBase64(material), options)
+              : provider.createKey(name, options);
+            provider.flush();
+            return keyVersion;
+          }
+        }
+    );
 
     kmsAudit.ok(user, KMSOp.CREATE_KEY, name, "UserProvidedMaterial:" +
         (material != null) + " Description:" + description);
 
-    if (!KMSWebApp.getACLs().hasAccess(KMSACLs.Type.GET, user.getName())) {
+    if (!KMSWebApp.getACLs().hasAccess(KMSACLs.Type.GET, user)) {
       keyVersion = removeKeyMaterial(keyVersion);
     }
     Map json = KMSServerJSONUtils.toJSON(keyVersion);
@@ -176,14 +173,21 @@ public class KMS {
 
   @DELETE
   @Path(KMSRESTConstants.KEY_RESOURCE + "/{name:.*}")
-  public Response deleteKey(@Context SecurityContext securityContext,
-      @PathParam("name") String name) throws Exception {
+  public Response deleteKey(@PathParam("name") final String name)
+      throws Exception {
     KMSWebApp.getAdminCallsMeter().mark();
-    Principal user = getPrincipal(securityContext);
+    UserGroupInformation user = HttpUserGroupInformation.get();
     assertAccess(KMSACLs.Type.DELETE, user, KMSOp.DELETE_KEY, name);
     KMSClientProvider.checkNotEmpty(name, "name");
-    provider.deleteKey(name);
-    provider.flush();
+
+    user.doAs(new PrivilegedExceptionAction<Void>() {
+      @Override
+      public Void run() throws Exception {
+        provider.deleteKey(name);
+        provider.flush();
+        return null;
+      }
+    });
 
     kmsAudit.ok(user, KMSOp.DELETE_KEY, name, "");
 
@@ -194,29 +198,36 @@ public class KMS {
   @Path(KMSRESTConstants.KEY_RESOURCE + "/{name:.*}")
   @Consumes(MediaType.APPLICATION_JSON)
   @Produces(MediaType.APPLICATION_JSON)
-  public Response rolloverKey(@Context SecurityContext securityContext,
-      @PathParam("name") String name, Map jsonMaterial)
-      throws Exception {
+  public Response rolloverKey(@PathParam("name") final String name,
+      Map jsonMaterial) throws Exception {
     KMSWebApp.getAdminCallsMeter().mark();
-    Principal user = getPrincipal(securityContext);
+    UserGroupInformation user = HttpUserGroupInformation.get();
     assertAccess(KMSACLs.Type.ROLLOVER, user, KMSOp.ROLL_NEW_VERSION, name);
     KMSClientProvider.checkNotEmpty(name, "name");
-    String material = (String)
+    final String material = (String)
         jsonMaterial.get(KMSRESTConstants.MATERIAL_FIELD);
     if (material != null) {
       assertAccess(KMSACLs.Type.SET_KEY_MATERIAL, user,
           KMSOp.ROLL_NEW_VERSION, name);
     }
-    KeyProvider.KeyVersion keyVersion = (material != null)
-        ? provider.rollNewVersion(name, Base64.decodeBase64(material))
-        : provider.rollNewVersion(name);
 
-    provider.flush();
+    KeyProvider.KeyVersion keyVersion = user.doAs(
+        new PrivilegedExceptionAction<KeyVersion>() {
+          @Override
+          public KeyVersion run() throws Exception {
+            KeyVersion keyVersion = (material != null)
+              ? provider.rollNewVersion(name, Base64.decodeBase64(material))
+              : provider.rollNewVersion(name);
+            provider.flush();
+            return keyVersion;
+          }
+        }
+    );
 
     kmsAudit.ok(user, KMSOp.ROLL_NEW_VERSION, name, "UserProvidedMaterial:" +
         (material != null) + " NewVersion:" + keyVersion.getVersionName());
 
-    if (!KMSWebApp.getACLs().hasAccess(KMSACLs.Type.GET, user.getName())) {
+    if (!KMSWebApp.getACLs().hasAccess(KMSACLs.Type.GET, user)) {
       keyVersion = removeKeyMaterial(keyVersion);
     }
     Map json = KMSServerJSONUtils.toJSON(keyVersion);
@@ -226,14 +237,23 @@ public class KMS {
   @GET
   @Path(KMSRESTConstants.KEYS_METADATA_RESOURCE)
   @Produces(MediaType.APPLICATION_JSON)
-  public Response getKeysMetadata(@Context SecurityContext securityContext,
-      @QueryParam(KMSRESTConstants.KEY) List<String> keyNamesList)
-      throws Exception {
+  public Response getKeysMetadata(@QueryParam(KMSRESTConstants.KEY)
+      List<String> keyNamesList) throws Exception {
     KMSWebApp.getAdminCallsMeter().mark();
-    Principal user = getPrincipal(securityContext);
-    String[] keyNames = keyNamesList.toArray(new String[keyNamesList.size()]);
+    UserGroupInformation user = HttpUserGroupInformation.get();
+    final String[] keyNames = keyNamesList.toArray(
+        new String[keyNamesList.size()]);
     assertAccess(KMSACLs.Type.GET_METADATA, user, KMSOp.GET_KEYS_METADATA);
-    KeyProvider.Metadata[] keysMeta = provider.getKeysMetadata(keyNames);
+
+    KeyProvider.Metadata[] keysMeta = user.doAs(
+        new PrivilegedExceptionAction<KeyProvider.Metadata[]>() {
+          @Override
+          public KeyProvider.Metadata[] run() throws Exception {
+            return provider.getKeysMetadata(keyNames);
+          }
+        }
+    );
+
     Object json = KMSServerJSONUtils.toJSON(keyNames, keysMeta);
     kmsAudit.ok(user, KMSOp.GET_KEYS_METADATA, "");
     return Response.ok().type(MediaType.APPLICATION_JSON).entity(json).build();
@@ -242,36 +262,52 @@ public class KMS {
   @GET
   @Path(KMSRESTConstants.KEYS_NAMES_RESOURCE)
   @Produces(MediaType.APPLICATION_JSON)
-  public Response getKeyNames(@Context SecurityContext securityContext)
-      throws Exception {
+  public Response getKeyNames() throws Exception {
     KMSWebApp.getAdminCallsMeter().mark();
-    Principal user = getPrincipal(securityContext);
+    UserGroupInformation user = HttpUserGroupInformation.get();
     assertAccess(KMSACLs.Type.GET_KEYS, user, KMSOp.GET_KEYS);
-    Object json = provider.getKeys();
+
+    List<String> json = user.doAs(
+        new PrivilegedExceptionAction<List<String>>() {
+          @Override
+          public List<String> run() throws Exception {
+            return provider.getKeys();
+          }
+        }
+    );
+
     kmsAudit.ok(user, KMSOp.GET_KEYS, "");
     return Response.ok().type(MediaType.APPLICATION_JSON).entity(json).build();
   }
 
   @GET
   @Path(KMSRESTConstants.KEY_RESOURCE + "/{name:.*}")
-  public Response getKey(@Context SecurityContext securityContext,
-      @PathParam("name") String name)
+  public Response getKey(@PathParam("name") String name)
       throws Exception {
-    return getMetadata(securityContext, name);
+    return getMetadata(name);
   }
 
   @GET
   @Path(KMSRESTConstants.KEY_RESOURCE + "/{name:.*}/" +
       KMSRESTConstants.METADATA_SUB_RESOURCE)
   @Produces(MediaType.APPLICATION_JSON)
-  public Response getMetadata(@Context SecurityContext securityContext,
-      @PathParam("name") String name)
+  public Response getMetadata(@PathParam("name") final String name)
       throws Exception {
-    Principal user = getPrincipal(securityContext);
+    UserGroupInformation user = HttpUserGroupInformation.get();
     KMSClientProvider.checkNotEmpty(name, "name");
     KMSWebApp.getAdminCallsMeter().mark();
     assertAccess(KMSACLs.Type.GET_METADATA, user, KMSOp.GET_METADATA, name);
-    Object json = KMSServerJSONUtils.toJSON(name, provider.getMetadata(name));
+
+    KeyProvider.Metadata metadata = user.doAs(
+        new PrivilegedExceptionAction<KeyProvider.Metadata>() {
+          @Override
+          public KeyProvider.Metadata run() throws Exception {
+            return provider.getMetadata(name);
+          }
+        }
+    );
+
+    Object json = KMSServerJSONUtils.toJSON(name, metadata);
     kmsAudit.ok(user, KMSOp.GET_METADATA, name, "");
     return Response.ok().type(MediaType.APPLICATION_JSON).entity(json).build();
   }
@@ -280,14 +316,23 @@ public class KMS {
   @Path(KMSRESTConstants.KEY_RESOURCE + "/{name:.*}/" +
       KMSRESTConstants.CURRENT_VERSION_SUB_RESOURCE)
   @Produces(MediaType.APPLICATION_JSON)
-  public Response getCurrentVersion(@Context SecurityContext securityContext,
-      @PathParam("name") String name)
+  public Response getCurrentVersion(@PathParam("name") final String name)
       throws Exception {
-    Principal user = getPrincipal(securityContext);
+    UserGroupInformation user = HttpUserGroupInformation.get();
     KMSClientProvider.checkNotEmpty(name, "name");
     KMSWebApp.getKeyCallsMeter().mark();
     assertAccess(KMSACLs.Type.GET, user, KMSOp.GET_CURRENT_KEY, name);
-    Object json = KMSServerJSONUtils.toJSON(provider.getCurrentKey(name));
+
+    KeyVersion keyVersion = user.doAs(
+        new PrivilegedExceptionAction<KeyVersion>() {
+          @Override
+          public KeyVersion run() throws Exception {
+            return provider.getCurrentKey(name);
+          }
+        }
+    );
+
+    Object json = KMSServerJSONUtils.toJSON(keyVersion);
     kmsAudit.ok(user, KMSOp.GET_CURRENT_KEY, name, "");
     return Response.ok().type(MediaType.APPLICATION_JSON).entity(json).build();
   }
@@ -295,14 +340,22 @@ public class KMS {
   @GET
   @Path(KMSRESTConstants.KEY_VERSION_RESOURCE + "/{versionName:.*}")
   @Produces(MediaType.APPLICATION_JSON)
-  public Response getKeyVersion(@Context SecurityContext securityContext,
-      @PathParam("versionName") String versionName)
-      throws Exception {
-    Principal user = getPrincipal(securityContext);
+  public Response getKeyVersion(
+      @PathParam("versionName") final String versionName) throws Exception {
+    UserGroupInformation user = HttpUserGroupInformation.get();
     KMSClientProvider.checkNotEmpty(versionName, "versionName");
     KMSWebApp.getKeyCallsMeter().mark();
-    KeyVersion keyVersion = provider.getKeyVersion(versionName);
     assertAccess(KMSACLs.Type.GET, user, KMSOp.GET_KEY_VERSION);
+
+    KeyVersion keyVersion = user.doAs(
+        new PrivilegedExceptionAction<KeyVersion>() {
+          @Override
+          public KeyVersion run() throws Exception {
+            return provider.getKeyVersion(versionName);
+          }
+        }
+    );
+
     if (keyVersion != null) {
       kmsAudit.ok(user, KMSOp.GET_KEY_VERSION, keyVersion.getName(), "");
     }
@@ -316,13 +369,12 @@ public class KMS {
       KMSRESTConstants.EEK_SUB_RESOURCE)
   @Produces(MediaType.APPLICATION_JSON)
   public Response generateEncryptedKeys(
-          @Context SecurityContext securityContext,
-          @PathParam("name") String name,
+          @PathParam("name") final String name,
           @QueryParam(KMSRESTConstants.EEK_OP) String edekOp,
           @DefaultValue("1")
-          @QueryParam(KMSRESTConstants.EEK_NUM_KEYS) int numKeys)
+          @QueryParam(KMSRESTConstants.EEK_NUM_KEYS) final int numKeys)
           throws Exception {
-    Principal user = getPrincipal(securityContext);
+    UserGroupInformation user = HttpUserGroupInformation.get();
     KMSClientProvider.checkNotEmpty(name, "name");
     KMSClientProvider.checkNotNull(edekOp, "eekOp");
 
@@ -330,12 +382,22 @@ public class KMS {
     if (edekOp.equals(KMSRESTConstants.EEK_GENERATE)) {
       assertAccess(KMSACLs.Type.GENERATE_EEK, user, KMSOp.GENERATE_EEK, name);
 
-      List<EncryptedKeyVersion> retEdeks =
+      final List<EncryptedKeyVersion> retEdeks =
           new LinkedList<EncryptedKeyVersion>();
       try {
-        for (int i = 0; i < numKeys; i ++) {
-          retEdeks.add(provider.generateEncryptedKey(name));
-        }
+
+        user.doAs(
+            new PrivilegedExceptionAction<Void>() {
+              @Override
+              public Void run() throws Exception {
+                for (int i = 0; i < numKeys; i++) {
+                  retEdeks.add(provider.generateEncryptedKey(name));
+                }
+                return null;
+              }
+            }
+        );
+
       } catch (Exception e) {
         throw new IOException(e);
       }
@@ -359,16 +421,17 @@ public class KMS {
   @Path(KMSRESTConstants.KEY_VERSION_RESOURCE + "/{versionName:.*}/" +
       KMSRESTConstants.EEK_SUB_RESOURCE)
   @Produces(MediaType.APPLICATION_JSON)
-  public Response decryptEncryptedKey(@Context SecurityContext securityContext,
-      @PathParam("versionName") String versionName,
+  public Response decryptEncryptedKey(
+      @PathParam("versionName") final String versionName,
       @QueryParam(KMSRESTConstants.EEK_OP) String eekOp,
       Map jsonPayload)
       throws Exception {
-    Principal user = getPrincipal(securityContext);
+    UserGroupInformation user = HttpUserGroupInformation.get();
     KMSClientProvider.checkNotEmpty(versionName, "versionName");
     KMSClientProvider.checkNotNull(eekOp, "eekOp");
 
-    String keyName = (String) jsonPayload.get(KMSRESTConstants.NAME_FIELD);
+    final String keyName = (String) jsonPayload.get(
+        KMSRESTConstants.NAME_FIELD);
     String ivStr = (String) jsonPayload.get(KMSRESTConstants.IV_FIELD);
     String encMaterialStr = 
         (String) jsonPayload.get(KMSRESTConstants.MATERIAL_FIELD);
@@ -376,14 +439,24 @@ public class KMS {
     if (eekOp.equals(KMSRESTConstants.EEK_DECRYPT)) {
       assertAccess(KMSACLs.Type.DECRYPT_EEK, user, KMSOp.DECRYPT_EEK, keyName);
       KMSClientProvider.checkNotNull(ivStr, KMSRESTConstants.IV_FIELD);
-      byte[] iv = Base64.decodeBase64(ivStr);
+      final byte[] iv = Base64.decodeBase64(ivStr);
       KMSClientProvider.checkNotNull(encMaterialStr,
           KMSRESTConstants.MATERIAL_FIELD);
-      byte[] encMaterial = Base64.decodeBase64(encMaterialStr);
-      KeyProvider.KeyVersion retKeyVersion =
-          provider.decryptEncryptedKey(
-              new KMSClientProvider.KMSEncryptedKeyVersion(keyName, versionName,
-                  iv, KeyProviderCryptoExtension.EEK, encMaterial));
+      final byte[] encMaterial = Base64.decodeBase64(encMaterialStr);
+
+      KeyProvider.KeyVersion retKeyVersion = user.doAs(
+          new PrivilegedExceptionAction<KeyVersion>() {
+            @Override
+            public KeyVersion run() throws Exception {
+              return provider.decryptEncryptedKey(
+                  new KMSClientProvider.KMSEncryptedKeyVersion(keyName,
+                      versionName, iv, KeyProviderCryptoExtension.EEK,
+                      encMaterial)
+              );
+            }
+          }
+      );
+
       retJSON = KMSServerJSONUtils.toJSON(retKeyVersion);
       kmsAudit.ok(user, KMSOp.DECRYPT_EEK, keyName, "");
     } else {
@@ -400,14 +473,23 @@ public class KMS {
   @Path(KMSRESTConstants.KEY_RESOURCE + "/{name:.*}/" +
       KMSRESTConstants.VERSIONS_SUB_RESOURCE)
   @Produces(MediaType.APPLICATION_JSON)
-  public Response getKeyVersions(@Context SecurityContext securityContext,
-      @PathParam("name") String name)
+  public Response getKeyVersions(@PathParam("name") final String name)
       throws Exception {
-    Principal user = getPrincipal(securityContext);
+    UserGroupInformation user = HttpUserGroupInformation.get();
     KMSClientProvider.checkNotEmpty(name, "name");
     KMSWebApp.getKeyCallsMeter().mark();
     assertAccess(KMSACLs.Type.GET, user, KMSOp.GET_KEY_VERSIONS, name);
-    Object json = KMSServerJSONUtils.toJSON(provider.getKeyVersions(name));
+
+    List<KeyVersion> ret = user.doAs(
+        new PrivilegedExceptionAction<List<KeyVersion>>() {
+          @Override
+          public List<KeyVersion> run() throws Exception {
+            return provider.getKeyVersions(name);
+          }
+        }
+    );
+
+    Object json = KMSServerJSONUtils.toJSON(ret);
     kmsAudit.ok(user, KMSOp.GET_KEY_VERSIONS, name, "");
     return Response.ok().type(MediaType.APPLICATION_JSON).entity(json).build();
   }

Modified: hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSACLs.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSACLs.java?rev=1618417&r1=1618416&r2=1618417&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSACLs.java (original)
+++ hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSACLs.java Sat Aug 16 21:02:21 2014
@@ -113,8 +113,7 @@ public class KMSACLs implements Runnable
     return conf;
   }
 
-  public boolean hasAccess(Type type, String user) {
-    UserGroupInformation ugi = UserGroupInformation.createRemoteUser(user);
+  public boolean hasAccess(Type type, UserGroupInformation ugi) {
     return acls.get(type).isUserAllowed(ugi);
   }
 

Modified: hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSAudit.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSAudit.java?rev=1618417&r1=1618416&r2=1618417&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSAudit.java (original)
+++ hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSAudit.java Sat Aug 16 21:02:21 2014
@@ -17,6 +17,7 @@
  */
 package org.apache.hadoop.crypto.key.kms.server;
 
+import org.apache.hadoop.security.UserGroupInformation;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -29,7 +30,6 @@ import com.google.common.cache.RemovalNo
 import com.google.common.collect.Sets;
 import com.google.common.util.concurrent.ThreadFactoryBuilder;
 
-import java.security.Principal;
 import java.util.LinkedList;
 import java.util.List;
 import java.util.Set;
@@ -186,22 +186,22 @@ public class KMSAudit {
     }
   }
 
-  public void ok(Principal user, KMS.KMSOp op, String key,
+  public void ok(UserGroupInformation user, KMS.KMSOp op, String key,
       String extraMsg) {
-    op(OpStatus.OK, op, user.getName(), key, extraMsg);
+    op(OpStatus.OK, op, user.getShortUserName(), key, extraMsg);
   }
 
-  public void ok(Principal user, KMS.KMSOp op, String extraMsg) {
-    op(OpStatus.OK, op, user.getName(), null, extraMsg);
+  public void ok(UserGroupInformation user, KMS.KMSOp op, String extraMsg) {
+    op(OpStatus.OK, op, user.getShortUserName(), null, extraMsg);
   }
 
-  public void unauthorized(Principal user, KMS.KMSOp op, String key) {
-    op(OpStatus.UNAUTHORIZED, op, user.getName(), key, "");
+  public void unauthorized(UserGroupInformation user, KMS.KMSOp op, String key) {
+    op(OpStatus.UNAUTHORIZED, op, user.getShortUserName(), key, "");
   }
 
-  public void error(Principal user, String method, String url,
+  public void error(UserGroupInformation user, String method, String url,
       String extraMsg) {
-    op(OpStatus.ERROR, null, user.getName(), null, "Method:'" + method
+    op(OpStatus.ERROR, null, user.getShortUserName(), null, "Method:'" + method
         + "' Exception:'" + extraMsg + "'");
   }
 

Modified: hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSAuthenticationFilter.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSAuthenticationFilter.java?rev=1618417&r1=1618416&r2=1618417&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSAuthenticationFilter.java (original)
+++ hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSAuthenticationFilter.java Sat Aug 16 21:02:21 2014
@@ -19,7 +19,13 @@ package org.apache.hadoop.crypto.key.kms
 
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
+import org.apache.hadoop.crypto.key.kms.KMSClientProvider;
+import org.apache.hadoop.security.authentication.server.KerberosAuthenticationHandler;
+import org.apache.hadoop.security.authentication.server.PseudoAuthenticationHandler;
+import org.apache.hadoop.security.token.delegation.web.DelegationTokenAuthenticationFilter;
+import org.apache.hadoop.security.token.delegation.web.DelegationTokenAuthenticationHandler;
+import org.apache.hadoop.security.token.delegation.web.KerberosDelegationTokenAuthenticationHandler;
+import org.apache.hadoop.security.token.delegation.web.PseudoDelegationTokenAuthenticationHandler;
 
 import javax.servlet.FilterChain;
 import javax.servlet.FilterConfig;
@@ -38,7 +44,8 @@ import java.util.Properties;
  * file.
  */
 @InterfaceAudience.Private
-public class KMSAuthenticationFilter extends AuthenticationFilter {
+public class KMSAuthenticationFilter
+    extends DelegationTokenAuthenticationFilter {
   private static final String CONF_PREFIX = KMSConfiguration.CONFIG_PREFIX +
       "authentication.";
 
@@ -55,9 +62,30 @@ public class KMSAuthenticationFilter ext
         props.setProperty(name, value);
       }
     }
+    String authType = props.getProperty(AUTH_TYPE);
+    if (authType.equals(PseudoAuthenticationHandler.TYPE)) {
+      props.setProperty(AUTH_TYPE,
+          PseudoDelegationTokenAuthenticationHandler.class.getName());
+    } else if (authType.equals(KerberosAuthenticationHandler.TYPE)) {
+      props.setProperty(AUTH_TYPE,
+          KerberosDelegationTokenAuthenticationHandler.class.getName());
+    }
+    props.setProperty(DelegationTokenAuthenticationHandler.TOKEN_KIND,
+        KMSClientProvider.TOKEN_KIND);
     return props;
   }
 
+  protected Configuration getProxyuserConfiguration(FilterConfig filterConfig) {
+    Map<String, String> proxyuserConf = KMSWebApp.getConfiguration().
+        getValByRegex("hadoop\\.kms\\.proxyuser\\.");
+    Configuration conf = new Configuration(false);
+    for (Map.Entry<String, String> entry : proxyuserConf.entrySet()) {
+      conf.set(entry.getKey().substring("hadoop.kms.".length()),
+          entry.getValue());
+    }
+    return conf;
+  }
+
   private static class KMSResponse extends HttpServletResponseWrapper {
     public int statusCode;
     public String msg;



Mime
View raw message