hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From szets...@apache.org
Subject svn commit: r1618764 - in /hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common: ./ src/main/java/ src/main/java/org/apache/hadoop/conf/ src/main/java/org/apache/hadoop/fs/ src/main/java/org/apache/hadoop/ipc/metrics/ src/main/java/org/...
Date Mon, 18 Aug 2014 23:41:55 GMT
Author: szetszwo
Date: Mon Aug 18 23:41:53 2014
New Revision: 1618764

URL: http://svn.apache.org/r1618764
Log:
Merge r1609845 through r1618763 from trunk.

Added:
    hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/WhitelistBasedResolver.java
      - copied unchanged from r1618763, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/WhitelistBasedResolver.java
    hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/CacheableIPList.java
      - copied unchanged from r1618763, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/CacheableIPList.java
    hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/CombinedIPWhiteList.java
      - copied unchanged from r1618763, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/CombinedIPWhiteList.java
    hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/FileBasedIPList.java
      - copied unchanged from r1618763, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/FileBasedIPList.java
    hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/IPList.java
      - copied unchanged from r1618763, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/IPList.java
    hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestWhitelistBasedResolver.java
      - copied unchanged from r1618763, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestWhitelistBasedResolver.java
    hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestCacheableIPList.java
      - copied unchanged from r1618763, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestCacheableIPList.java
    hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestFileBasedIPList.java
      - copied unchanged from r1618763, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestFileBasedIPList.java
Modified:
    hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/CHANGES.txt   (contents, props changed)
    hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/java/   (props changed)
    hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
    hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeys.java
    hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/metrics/RpcMetrics.java
    hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ServiceAuthorizationManager.java
    hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DataChecksum.java
    hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/MachineList.java
    hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/NativeCrc32.java
    hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/NativeCrc32.c
    hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/bulk_crc32.c
    hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/bulk_crc32.h
    hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/native/src/test/org/apache/hadoop/util/test_bulk_crc32.c
    hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/site/apt/NativeLibraries.apt.vm
    hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/site/apt/ServiceLevelAuth.apt.vm
    hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPC.java
    hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/authorize/TestServiceAuthorization.java
    hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestDataChecksum.java

Modified: hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1618764&r1=1618763&r2=1618764&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/CHANGES.txt (original)
+++ hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/CHANGES.txt Mon Aug 18 23:41:53 2014
@@ -521,6 +521,15 @@ Release 2.6.0 - UNRELEASED
     HADOOP-10231. Add some components in Native Libraries document (Akira 
     AJISAKA via aw)
 
+    HADOOP-10650. Add ability to specify a reverse ACL (black list) of users
+    and groups. (Benoy Antony via Arpit Agarwal)
+
+    HADOOP-10335. An ip whilelist based implementation to resolve Sasl
+    properties per connection. (Benoy Antony via Arpit Agarwal)
+
+    HADOOP-10975. org.apache.hadoop.util.DataChecksum should support calculating
+    checksums in native code (James Thomas via Colin Patrick McCabe)
+
   OPTIMIZATIONS
 
     HADOOP-10838. Byte array native checksumming. (James Thomas via todd)
@@ -599,6 +608,19 @@ Release 2.6.0 - UNRELEASED
     HADOOP-10964. Small fix for NetworkTopologyWithNodeGroup#sortByDistance.
     (Yi Liu via wang)
 
+    HADOOP-10059. RPC authentication and authorization metrics overflow to
+    negative values on busy clusters (Tsuyoshi OZAWA and Akira AJISAKA
+    via jlowe)
+
+    HADOOP-10973. Native Libraries Guide contains format error. (Peter Klavins
+    via Arpit Agarwal)
+
+    HADOOP-10972. Native Libraries Guide contains mis-spelt build line (Peter
+    Klavins via aw)
+
+    HADOOP-10873. Fix dead link in Configuration javadoc (Akira AJISAKA 
+    via aw)
+
 Release 2.5.0 - UNRELEASED
 
   INCOMPATIBLE CHANGES

Propchange: hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/CHANGES.txt
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt:r1618417-1618763

Propchange: hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/java/
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java:r1618417-1618763

Modified: hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java?rev=1618764&r1=1618763&r2=1618764&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java (original)
+++ hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java Mon Aug 18 23:41:53 2014
@@ -110,8 +110,9 @@ import com.google.common.base.Preconditi
  *
  * <p>Unless explicitly turned off, Hadoop by default specifies two 
  * resources, loaded in-order from the classpath: <ol>
- * <li><tt><a href="{@docRoot}/../core-default.html">core-default.xml</a>
- * </tt>: Read-only defaults for hadoop.</li>
+ * <li><tt>
+ * <a href="{@docRoot}/../hadoop-project-dist/hadoop-common/core-default.xml">
+ * core-default.xml</a></tt>: Read-only defaults for hadoop.</li>
  * <li><tt>core-site.xml</tt>: Site-specific configuration for a given hadoop
  * installation.</li>
  * </ol>

Modified: hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeys.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeys.java?rev=1618764&r1=1618763&r2=1618764&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeys.java (original)
+++ hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeys.java Mon Aug 18 23:41:53 2014
@@ -134,6 +134,9 @@ public class CommonConfigurationKeys ext
   HADOOP_SECURITY_SERVICE_AUTHORIZATION_DEFAULT_ACL = 
       "security.service.authorization.default.acl";
   public static final String 
+  HADOOP_SECURITY_SERVICE_AUTHORIZATION_DEFAULT_BLOCKED_ACL =
+      "security.service.authorization.default.acl.blocked";
+  public static final String
   HADOOP_SECURITY_SERVICE_AUTHORIZATION_REFRESH_POLICY = 
       "security.refresh.policy.protocol.acl";
   public static final String 

Modified: hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/metrics/RpcMetrics.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/metrics/RpcMetrics.java?rev=1618764&r1=1618763&r2=1618764&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/metrics/RpcMetrics.java (original)
+++ hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/metrics/RpcMetrics.java Mon Aug 18 23:41:53 2014
@@ -88,13 +88,13 @@ public class RpcMetrics {
   @Metric("Processsing time") MutableRate rpcProcessingTime;
   MutableQuantiles[] rpcProcessingTimeMillisQuantiles;
   @Metric("Number of authentication failures")
-  MutableCounterInt rpcAuthenticationFailures;
+  MutableCounterLong rpcAuthenticationFailures;
   @Metric("Number of authentication successes")
-  MutableCounterInt rpcAuthenticationSuccesses;
+  MutableCounterLong rpcAuthenticationSuccesses;
   @Metric("Number of authorization failures")
-  MutableCounterInt rpcAuthorizationFailures;
+  MutableCounterLong rpcAuthorizationFailures;
   @Metric("Number of authorization sucesses")
-  MutableCounterInt rpcAuthorizationSuccesses;
+  MutableCounterLong rpcAuthorizationSuccesses;
 
   @Metric("Number of open connections") public int numOpenConnections() {
     return server.getNumOpenConnections();

Modified: hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ServiceAuthorizationManager.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ServiceAuthorizationManager.java?rev=1618764&r1=1618763&r2=1618764&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ServiceAuthorizationManager.java (original)
+++ hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ServiceAuthorizationManager.java Mon Aug 18 23:41:53 2014
@@ -43,10 +43,14 @@ import com.google.common.annotations.Vis
 @InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
 @InterfaceStability.Evolving
 public class ServiceAuthorizationManager {
+  static final String BLOCKED = ".blocked";
+
   private static final String HADOOP_POLICY_FILE = "hadoop-policy.xml";
 
-  private volatile Map<Class<?>, AccessControlList> protocolToAcl =
-    new IdentityHashMap<Class<?>, AccessControlList>();
+  // For each class, first ACL in the array specifies the allowed entries
+  // and second ACL specifies blocked entries.
+  private volatile Map<Class<?>, AccessControlList[]> protocolToAcls =
+    new IdentityHashMap<Class<?>, AccessControlList[]>();
   
   /**
    * Configuration key for controlling service-level authorization for Hadoop.
@@ -80,8 +84,8 @@ public class ServiceAuthorizationManager
                                Configuration conf,
                                InetAddress addr
                                ) throws AuthorizationException {
-    AccessControlList acl = protocolToAcl.get(protocol);
-    if (acl == null) {
+    AccessControlList[] acls = protocolToAcls.get(protocol);
+    if (acls == null) {
       throw new AuthorizationException("Protocol " + protocol + 
                                        " is not known.");
     }
@@ -104,7 +108,7 @@ public class ServiceAuthorizationManager
       }
     }
     if((clientPrincipal != null && !clientPrincipal.equals(user.getUserName())) || 
-        !acl.isUserAllowed(user)) {
+       acls.length != 2  || !acls[0].isUserAllowed(user) || acls[1].isUserAllowed(user)) {
       AUDITLOG.warn(AUTHZ_FAILED_FOR + user + " for protocol=" + protocol
           + ", expected client Kerberos principal is " + clientPrincipal);
       throw new AuthorizationException("User " + user + 
@@ -129,13 +133,16 @@ public class ServiceAuthorizationManager
   @Private
   public void refreshWithLoadedConfiguration(Configuration conf,
       PolicyProvider provider) {
-    final Map<Class<?>, AccessControlList> newAcls =
-        new IdentityHashMap<Class<?>, AccessControlList>();
+    final Map<Class<?>, AccessControlList[]> newAcls =
+      new IdentityHashMap<Class<?>, AccessControlList[]>();
     
     String defaultAcl = conf.get(
         CommonConfigurationKeys.HADOOP_SECURITY_SERVICE_AUTHORIZATION_DEFAULT_ACL,
         AccessControlList.WILDCARD_ACL_VALUE);
 
+    String defaultBlockedAcl = conf.get(
+      CommonConfigurationKeys.HADOOP_SECURITY_SERVICE_AUTHORIZATION_DEFAULT_BLOCKED_ACL, "");
+
     // Parse the config file
     Service[] services = provider.getServices();
     if (services != null) {
@@ -145,21 +152,30 @@ public class ServiceAuthorizationManager
                 conf.get(service.getServiceKey(),
                     defaultAcl)
             );
-        newAcls.put(service.getProtocol(), acl);
+        AccessControlList blockedAcl =
+           new AccessControlList(
+           conf.get(service.getServiceKey() + BLOCKED,
+           defaultBlockedAcl));
+        newAcls.put(service.getProtocol(), new AccessControlList[] {acl, blockedAcl});
       }
     }
 
     // Flip to the newly parsed permissions
-    protocolToAcl = newAcls;
+    protocolToAcls = newAcls;
   }
 
   @VisibleForTesting
   public Set<Class<?>> getProtocolsWithAcls() {
-    return protocolToAcl.keySet();
+    return protocolToAcls.keySet();
   }
 
   @VisibleForTesting
   public AccessControlList getProtocolsAcls(Class<?> className) {
-    return protocolToAcl.get(className);
+    return protocolToAcls.get(className)[0];
+  }
+
+  @VisibleForTesting
+  public AccessControlList getProtocolsBlockedAcls(Class<?> className) {
+    return protocolToAcls.get(className)[1];
   }
 }

Modified: hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DataChecksum.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DataChecksum.java?rev=1618764&r1=1618763&r2=1618764&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DataChecksum.java (original)
+++ hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DataChecksum.java Mon Aug 18 23:41:53 2014
@@ -390,6 +390,12 @@ public class DataChecksum implements Che
           checksums.array(), checksums.arrayOffset() + checksums.position());
       return;
     }
+
+    if (NativeCrc32.isAvailable()) {
+      NativeCrc32.calculateChunkedSums(bytesPerChecksum, type.id,
+          checksums, data);
+      return;
+    }
     
     data.mark();
     checksums.mark();
@@ -412,10 +418,16 @@ public class DataChecksum implements Che
    * Implementation of chunked calculation specifically on byte arrays. This
    * is to avoid the copy when dealing with ByteBuffers that have array backing.
    */
-  private void calculateChunkedSums(
+  public void calculateChunkedSums(
       byte[] data, int dataOffset, int dataLength,
       byte[] sums, int sumsOffset) {
 
+    if (NativeCrc32.isAvailable()) {
+      NativeCrc32.calculateChunkedSumsByteArray(bytesPerChecksum, type.id,
+          sums, sumsOffset, data, dataOffset, dataLength);
+      return;
+    }
+
     int remaining = dataLength;
     while (remaining > 0) {
       int n = Math.min(remaining, bytesPerChecksum);

Modified: hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/MachineList.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/MachineList.java?rev=1618764&r1=1618763&r2=1618764&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/MachineList.java (original)
+++ hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/MachineList.java Mon Aug 18 23:41:53 2014
@@ -37,7 +37,7 @@ import com.google.common.net.InetAddress
 /**
  * Container class which holds a list of ip/host addresses and 
  * answers membership queries.
- * .
+ *
  * Accepts list of ip addresses, ip addreses in CIDR format and/or 
  * host addresses.
  */
@@ -71,8 +71,15 @@ public class MachineList {
    * @param hostEntries comma separated ip/cidr/host addresses
    */
   public MachineList(String hostEntries) {
-    this(StringUtils.getTrimmedStringCollection(hostEntries),
-        InetAddressFactory.S_INSTANCE);
+    this(StringUtils.getTrimmedStringCollection(hostEntries));
+  }
+
+  /**
+   *
+   * @param hostEntries collection of separated ip/cidr/host addresses
+   */
+  public MachineList(Collection<String> hostEntries) {
+    this(hostEntries, InetAddressFactory.S_INSTANCE);
   }
 
   /**

Modified: hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/NativeCrc32.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/NativeCrc32.java?rev=1618764&r1=1618763&r2=1618764&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/NativeCrc32.java (original)
+++ hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/NativeCrc32.java Mon Aug 18 23:41:53 2014
@@ -54,33 +54,50 @@ class NativeCrc32 {
   public static void verifyChunkedSums(int bytesPerSum, int checksumType,
       ByteBuffer sums, ByteBuffer data, String fileName, long basePos)
       throws ChecksumException {
-    nativeVerifyChunkedSums(bytesPerSum, checksumType,
+    nativeComputeChunkedSums(bytesPerSum, checksumType,
         sums, sums.position(),
         data, data.position(), data.remaining(),
-        fileName, basePos);
+        fileName, basePos, true);
   }
 
   public static void verifyChunkedSumsByteArray(int bytesPerSum,
       int checksumType, byte[] sums, int sumsOffset, byte[] data,
       int dataOffset, int dataLength, String fileName, long basePos)
       throws ChecksumException {
-    nativeVerifyChunkedSumsByteArray(bytesPerSum, checksumType,
+    nativeComputeChunkedSumsByteArray(bytesPerSum, checksumType,
         sums, sumsOffset,
         data, dataOffset, dataLength,
-        fileName, basePos);
+        fileName, basePos, true);
+  }
+
+  public static void calculateChunkedSums(int bytesPerSum, int checksumType,
+      ByteBuffer sums, ByteBuffer data) {
+    nativeComputeChunkedSums(bytesPerSum, checksumType,
+        sums, sums.position(),
+        data, data.position(), data.remaining(),
+        "", 0, false);
+  }
+
+  public static void calculateChunkedSumsByteArray(int bytesPerSum,
+      int checksumType, byte[] sums, int sumsOffset, byte[] data,
+      int dataOffset, int dataLength) {
+    nativeComputeChunkedSumsByteArray(bytesPerSum, checksumType,
+        sums, sumsOffset,
+        data, dataOffset, dataLength,
+        "", 0, false);
   }
   
-    private static native void nativeVerifyChunkedSums(
+    private static native void nativeComputeChunkedSums(
       int bytesPerSum, int checksumType,
       ByteBuffer sums, int sumsOffset,
       ByteBuffer data, int dataOffset, int dataLength,
-      String fileName, long basePos);
+      String fileName, long basePos, boolean verify);
 
-    private static native void nativeVerifyChunkedSumsByteArray(
+    private static native void nativeComputeChunkedSumsByteArray(
       int bytesPerSum, int checksumType,
       byte[] sums, int sumsOffset,
       byte[] data, int dataOffset, int dataLength,
-      String fileName, long basePos);
+      String fileName, long basePos, boolean verify);
 
   // Copy the constants over from DataChecksum so that javah will pick them up
   // and make them available in the native code header.

Modified: hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/NativeCrc32.c
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/NativeCrc32.c?rev=1618764&r1=1618763&r2=1618764&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/NativeCrc32.c (original)
+++ hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/NativeCrc32.c Mon Aug 18 23:41:53 2014
@@ -117,12 +117,12 @@ static int convert_java_crc_type(JNIEnv 
   }
 }
 
-JNIEXPORT void JNICALL Java_org_apache_hadoop_util_NativeCrc32_nativeVerifyChunkedSums
+JNIEXPORT void JNICALL Java_org_apache_hadoop_util_NativeCrc32_nativeComputeChunkedSums
   (JNIEnv *env, jclass clazz,
     jint bytes_per_checksum, jint j_crc_type,
     jobject j_sums, jint sums_offset,
     jobject j_data, jint data_offset, jint data_len,
-    jstring j_filename, jlong base_pos)
+    jstring j_filename, jlong base_pos, jboolean verify)
 {
   uint8_t *sums_addr;
   uint8_t *data_addr;
@@ -166,27 +166,27 @@ JNIEXPORT void JNICALL Java_org_apache_h
   if (crc_type == -1) return; // exception already thrown
 
   // Setup complete. Actually verify checksums.
-  ret = bulk_verify_crc(data, data_len, sums, crc_type,
-                            bytes_per_checksum, &error_data);
-  if (likely(ret == CHECKSUMS_VALID)) {
+  ret = bulk_crc(data, data_len, sums, crc_type,
+                            bytes_per_checksum, verify ? &error_data : NULL);
+  if (likely(verify && ret == CHECKSUMS_VALID || !verify && ret == 0)) {
     return;
-  } else if (unlikely(ret == INVALID_CHECKSUM_DETECTED)) {
+  } else if (unlikely(verify && ret == INVALID_CHECKSUM_DETECTED)) {
     long pos = base_pos + (error_data.bad_data - data);
     throw_checksum_exception(
       env, error_data.got_crc, error_data.expected_crc,
       j_filename, pos);
   } else {
     THROW(env, "java/lang/AssertionError",
-      "Bad response code from native bulk_verify_crc");
+      "Bad response code from native bulk_crc");
   }
 }
 
-JNIEXPORT void JNICALL Java_org_apache_hadoop_util_NativeCrc32_nativeVerifyChunkedSumsByteArray
+JNIEXPORT void JNICALL Java_org_apache_hadoop_util_NativeCrc32_nativeComputeChunkedSumsByteArray
   (JNIEnv *env, jclass clazz,
     jint bytes_per_checksum, jint j_crc_type,
     jarray j_sums, jint sums_offset,
     jarray j_data, jint data_offset, jint data_len,
-    jstring j_filename, jlong base_pos)
+    jstring j_filename, jlong base_pos, jboolean verify)
 {
   uint8_t *sums_addr;
   uint8_t *data_addr;
@@ -237,21 +237,21 @@ JNIEXPORT void JNICALL Java_org_apache_h
     data = data_addr + data_offset + checksumNum * bytes_per_checksum;
 
     // Setup complete. Actually verify checksums.
-    ret = bulk_verify_crc(data, MIN(numChecksumsPerIter * bytes_per_checksum,
-                                    data_len - checksumNum * bytes_per_checksum),
-                          sums, crc_type, bytes_per_checksum, &error_data);
+    ret = bulk_crc(data, MIN(numChecksumsPerIter * bytes_per_checksum,
+                             data_len - checksumNum * bytes_per_checksum),
+                   sums, crc_type, bytes_per_checksum, verify ? &error_data : NULL);
     (*env)->ReleasePrimitiveArrayCritical(env, j_data, data_addr, 0);
     (*env)->ReleasePrimitiveArrayCritical(env, j_sums, sums_addr, 0);
-    if (unlikely(ret == INVALID_CHECKSUM_DETECTED)) {
+    if (unlikely(verify && ret == INVALID_CHECKSUM_DETECTED)) {
       long pos = base_pos + (error_data.bad_data - data) + checksumNum *
         bytes_per_checksum;
       throw_checksum_exception(
         env, error_data.got_crc, error_data.expected_crc,
         j_filename, pos);
       return;
-    } else if (unlikely(ret != CHECKSUMS_VALID)) {
+    } else if (unlikely(verify && ret != CHECKSUMS_VALID || !verify && ret != 0)) {
       THROW(env, "java/lang/AssertionError",
-        "Bad response code from native bulk_verify_crc");
+        "Bad response code from native bulk_crc");
       return;
     }
     checksumNum += numChecksumsPerIter;

Modified: hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/bulk_crc32.c
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/bulk_crc32.c?rev=1618764&r1=1618763&r2=1618764&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/bulk_crc32.c (original)
+++ hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/bulk_crc32.c Mon Aug 18 23:41:53 2014
@@ -55,40 +55,23 @@ static void pipelined_crc32c(uint32_t *c
 static int cached_cpu_supports_crc32; // initialized by constructor below
 static uint32_t crc32c_hardware(uint32_t crc, const uint8_t* data, size_t length);
 
-int bulk_calculate_crc(const uint8_t *data, size_t data_len,
-                    uint32_t *sums, int checksum_type,
-                    int bytes_per_checksum) {
-  uint32_t crc;
-  crc_update_func_t crc_update_func;
-
-  switch (checksum_type) {
-    case CRC32_ZLIB_POLYNOMIAL:
-      crc_update_func = crc32_zlib_sb8;
-      break;
-    case CRC32C_POLYNOMIAL:
-      crc_update_func = crc32c_sb8;
-      break;
-    default:
-      return -EINVAL;
-      break;
+static inline int store_or_verify(uint32_t *sums, uint32_t crc,
+                                   int is_verify) {
+  if (!is_verify) {
+    *sums = crc;
+    return 1;
+  } else {
+    return crc == *sums;
   }
-  while (likely(data_len > 0)) {
-    int len = likely(data_len >= bytes_per_checksum) ? bytes_per_checksum : data_len;
-    crc = CRC_INITIAL_VAL;
-    crc = crc_update_func(crc, data, len);
-    *sums = ntohl(crc_val(crc));
-    data += len;
-    data_len -= len;
-    sums++;
-  }
-  return 0;
 }
 
-int bulk_verify_crc(const uint8_t *data, size_t data_len,
-                    const uint32_t *sums, int checksum_type,
+int bulk_crc(const uint8_t *data, size_t data_len,
+                    uint32_t *sums, int checksum_type,
                     int bytes_per_checksum,
                     crc32_error_t *error_info) {
 
+  int is_verify = error_info != NULL;
+
 #ifdef USE_PIPELINED
   uint32_t crc1, crc2, crc3;
   int n_blocks = data_len / bytes_per_checksum;
@@ -112,7 +95,7 @@ int bulk_verify_crc(const uint8_t *data,
       }
       break;
     default:
-      return INVALID_CHECKSUM_TYPE;
+      return is_verify ? INVALID_CHECKSUM_TYPE : -EINVAL;
   }
 
 #ifdef USE_PIPELINED
@@ -122,16 +105,15 @@ int bulk_verify_crc(const uint8_t *data,
       crc1 = crc2 = crc3 = CRC_INITIAL_VAL;
       pipelined_crc32c(&crc1, &crc2, &crc3, data, bytes_per_checksum, 3);
 
-      crc = ntohl(crc_val(crc1));
-      if ((crc = ntohl(crc_val(crc1))) != *sums)
+      if (unlikely(!store_or_verify(sums, (crc = ntohl(crc_val(crc1))), is_verify)))
         goto return_crc_error;
       sums++;
       data += bytes_per_checksum;
-      if ((crc = ntohl(crc_val(crc2))) != *sums)
+      if (unlikely(!store_or_verify(sums, (crc = ntohl(crc_val(crc2))), is_verify)))
         goto return_crc_error;
       sums++;
       data += bytes_per_checksum;
-      if ((crc = ntohl(crc_val(crc3))) != *sums)
+      if (unlikely(!store_or_verify(sums, (crc = ntohl(crc_val(crc3))), is_verify)))
         goto return_crc_error;
       sums++;
       data += bytes_per_checksum;
@@ -143,12 +125,12 @@ int bulk_verify_crc(const uint8_t *data,
       crc1 = crc2 = crc3 = CRC_INITIAL_VAL;
       pipelined_crc32c(&crc1, &crc2, &crc3, data, bytes_per_checksum, n_blocks);
 
-      if ((crc = ntohl(crc_val(crc1))) != *sums)
+      if (unlikely(!store_or_verify(sums, (crc = ntohl(crc_val(crc1))), is_verify)))
         goto return_crc_error;
       data += bytes_per_checksum;
       sums++;
       if (n_blocks == 2) {
-        if ((crc = ntohl(crc_val(crc2))) != *sums)
+        if (unlikely(!store_or_verify(sums, (crc = ntohl(crc_val(crc2))), is_verify)))
           goto return_crc_error;
         sums++;
         data += bytes_per_checksum;
@@ -160,10 +142,10 @@ int bulk_verify_crc(const uint8_t *data,
       crc1 = crc2 = crc3 = CRC_INITIAL_VAL;
       pipelined_crc32c(&crc1, &crc2, &crc3, data, remainder, 1);
 
-      if ((crc = ntohl(crc_val(crc1))) != *sums)
+      if (unlikely(!store_or_verify(sums, (crc = ntohl(crc_val(crc1))), is_verify)))
         goto return_crc_error;
     }
-    return CHECKSUMS_VALID;
+    return is_verify ? CHECKSUMS_VALID : 0;
   }
 #endif
 
@@ -172,14 +154,14 @@ int bulk_verify_crc(const uint8_t *data,
     crc = CRC_INITIAL_VAL;
     crc = crc_update_func(crc, data, len);
     crc = ntohl(crc_val(crc));
-    if (unlikely(crc != *sums)) {
+    if (unlikely(!store_or_verify(sums, crc, is_verify))) {
       goto return_crc_error;
     }
     data += len;
     data_len -= len;
     sums++;
   }
-  return CHECKSUMS_VALID;
+  return is_verify ? CHECKSUMS_VALID : 0;
 
 return_crc_error:
   if (error_info != NULL) {

Modified: hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/bulk_crc32.h
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/bulk_crc32.h?rev=1618764&r1=1618763&r2=1618764&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/bulk_crc32.h (original)
+++ hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/bulk_crc32.h Mon Aug 18 23:41:53 2014
@@ -42,49 +42,32 @@ typedef struct crc32_error {
 
 
 /**
- * Verify a buffer of data which is checksummed in chunks
- * of bytes_per_checksum bytes. The checksums are each 32 bits
- * and are stored in sequential indexes of the 'sums' array.
+ * Either calculates checksums for or verifies a buffer of data.
+ * Checksums performed in chunks of bytes_per_checksum bytes. The checksums
+ * are each 32 bits and are stored in sequential indexes of the 'sums' array.
+ * Verification is done (sums is assumed to already contain the checksums)
+ * if error_info is non-null; otherwise calculation is done and checksums
+ * are stored into sums.
  *
  * @param data                  The data to checksum
  * @param dataLen               Length of the data buffer
- * @param sums                  (out param) buffer to write checksums into.
- *                              It must contain at least dataLen * 4 bytes.
+ * @param sums                  (out param) buffer to write checksums into or
+ *                              where checksums are already stored.
+ *                              It must contain at least
+ *                              ((dataLen - 1) / bytes_per_checksum + 1) * 4 bytes.
  * @param checksum_type         One of the CRC32 algorithm constants defined 
  *                              above
  * @param bytes_per_checksum    How many bytes of data to process per checksum.
- * @param error_info            If non-NULL, will be filled in if an error
- *                              is detected
+ * @param error_info            If non-NULL, verification will be performed and
+ *                              it will be filled in if an error
+ *                              is detected. Otherwise calculation is performed.
  *
  * @return                      0 for success, non-zero for an error, result codes
- *                              for which are defined above
+ *                              for verification are defined above
  */
-extern int bulk_verify_crc(const uint8_t *data, size_t data_len,
-    const uint32_t *sums, int checksum_type,
+extern int bulk_crc(const uint8_t *data, size_t data_len,
+    uint32_t *sums, int checksum_type,
     int bytes_per_checksum,
     crc32_error_t *error_info);
 
-/**
- * Calculate checksums for some data.
- *
- * The checksums are each 32 bits and are stored in sequential indexes of the
- * 'sums' array.
- *
- * This function is not (yet) optimized.  It is provided for testing purposes
- * only.
- *
- * @param data                  The data to checksum
- * @param dataLen               Length of the data buffer
- * @param sums                  (out param) buffer to write checksums into.
- *                              It must contain at least dataLen * 4 bytes.
- * @param checksum_type         One of the CRC32 algorithm constants defined 
- *                              above
- * @param bytesPerChecksum      How many bytes of data to process per checksum.
- *
- * @return                      0 for success, non-zero for an error
- */
-int bulk_calculate_crc(const uint8_t *data, size_t data_len,
-                    uint32_t *sums, int checksum_type,
-                    int bytes_per_checksum);
-
 #endif

Modified: hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/native/src/test/org/apache/hadoop/util/test_bulk_crc32.c
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/native/src/test/org/apache/hadoop/util/test_bulk_crc32.c?rev=1618764&r1=1618763&r2=1618764&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/native/src/test/org/apache/hadoop/util/test_bulk_crc32.c (original)
+++ hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/native/src/test/org/apache/hadoop/util/test_bulk_crc32.c Mon Aug 18 23:41:53 2014
@@ -48,9 +48,9 @@ static int testBulkVerifyCrc(int dataLen
   sums = calloc(sizeof(uint32_t),
                 (dataLen + bytesPerChecksum - 1) / bytesPerChecksum);
 
-  EXPECT_ZERO(bulk_calculate_crc(data, dataLen, sums, crcType,
-                                 bytesPerChecksum));
-  EXPECT_ZERO(bulk_verify_crc(data, dataLen, sums, crcType,
+  EXPECT_ZERO(bulk_crc(data, dataLen, sums, crcType,
+                                 bytesPerChecksum, NULL));
+  EXPECT_ZERO(bulk_crc(data, dataLen, sums, crcType,
                             bytesPerChecksum, &errorData));
   free(data);
   free(sums);

Modified: hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/site/apt/NativeLibraries.apt.vm
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/site/apt/NativeLibraries.apt.vm?rev=1618764&r1=1618763&r2=1618764&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/site/apt/NativeLibraries.apt.vm (original)
+++ hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/site/apt/NativeLibraries.apt.vm Mon Aug 18 23:41:53 2014
@@ -56,10 +56,12 @@ Native Libraries Guide
 
     [[4]] Install the compression codec development packages (>zlib-1.2,
        >gzip-1.2):
-          + If you download the library, install one or more development
+
+          * If you download the library, install one or more development
             packages - whichever compression codecs you want to use with
             your deployment.
-          + If you build the library, it is mandatory to install both
+
+          * If you build the library, it is mandatory to install both
             development packages.
 
     [[5]] Check the runtime log files.
@@ -127,7 +129,7 @@ Native Libraries Guide
    library:
 
 ----
-   $ mvn package -Pdist,native -Dskiptests -Dtar
+   $ mvn package -Pdist,native -DskipTests -Dtar
 ----
 
    You should see the newly-built library in:

Modified: hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/site/apt/ServiceLevelAuth.apt.vm
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/site/apt/ServiceLevelAuth.apt.vm?rev=1618764&r1=1618763&r2=1618764&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/site/apt/ServiceLevelAuth.apt.vm (original)
+++ hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/site/apt/ServiceLevelAuth.apt.vm Mon Aug 18 23:41:53 2014
@@ -110,6 +110,27 @@ security.ha.service.protocol.acl      | 
    <<<security.service.authorization.default.acl>>> is applied. If 
    <<<security.service.authorization.default.acl>>> is not defined, <<<*>>>  is applied.
 
+ ** Blocked Access Control Lists
+
+   In some cases, it is required to specify blocked access control list for a service. This specifies
+   the list of users and groups who are not authorized to access the service. The format of
+   the blocked access control list is same as that of access control list. The blocked access
+   control list can be specified via <<<${HADOOP_CONF_DIR}/hadoop-policy.xml>>>. The property name
+   is derived by suffixing with ".blocked".
+
+   Example: The property name of blocked access control list for <<<security.client.protocol.acl>>
+   will be <<<security.client.protocol.acl.blocked>>>
+
+   For a service, it is possible to specify both an access control list and a blocked control
+   list. A user is authorized to access the service if the user is in the access control and not in
+   the blocked access control list.
+
+   If blocked access control list is not defined for a service, the value of
+   <<<security.service.authorization.default.acl.blocked>>> is applied. If
+   <<<security.service.authorization.default.acl.blocked>>> is not defined,
+   empty blocked access control list is applied.
+
+
 ** Refreshing Service Level Authorization Configuration
 
    The service-level authorization configuration for the NameNode and

Modified: hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPC.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPC.java?rev=1618764&r1=1618763&r2=1618764&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPC.java (original)
+++ hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPC.java Mon Aug 18 23:41:53 2014
@@ -583,14 +583,14 @@ public class TestRPC {
       }
       MetricsRecordBuilder rb = getMetrics(server.rpcMetrics.name());
       if (expectFailure) {
-        assertCounter("RpcAuthorizationFailures", 1, rb);
+        assertCounter("RpcAuthorizationFailures", 1L, rb);
       } else {
-        assertCounter("RpcAuthorizationSuccesses", 1, rb);
+        assertCounter("RpcAuthorizationSuccesses", 1L, rb);
       }
       //since we don't have authentication turned ON, we should see 
       // 0 for the authentication successes and 0 for failure
-      assertCounter("RpcAuthenticationFailures", 0, rb);
-      assertCounter("RpcAuthenticationSuccesses", 0, rb);
+      assertCounter("RpcAuthenticationFailures", 0L, rb);
+      assertCounter("RpcAuthenticationSuccesses", 0L, rb);
     }
   }
   

Modified: hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/authorize/TestServiceAuthorization.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/authorize/TestServiceAuthorization.java?rev=1618764&r1=1618763&r2=1618764&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/authorize/TestServiceAuthorization.java (original)
+++ hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/authorize/TestServiceAuthorization.java Mon Aug 18 23:41:53 2014
@@ -18,16 +18,22 @@
 package org.apache.hadoop.security.authorize;
 
 import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.fail;
+
+import java.net.InetAddress;
+import java.net.UnknownHostException;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.CommonConfigurationKeys;
 import org.apache.hadoop.ipc.TestRPC.TestProtocol;
+import org.apache.hadoop.security.UserGroupInformation;
 import org.junit.Test;
 
 public class TestServiceAuthorization {
 
   private static final String ACL_CONFIG = "test.protocol.acl";
   private static final String ACL_CONFIG1 = "test.protocol1.acl";
+  private static final String ADDRESS =  "0.0.0.0";
 
   public interface TestProtocol1 extends TestProtocol {};
 
@@ -64,4 +70,115 @@ public class TestServiceAuthorization {
     acl = serviceAuthorizationManager.getProtocolsAcls(TestProtocol1.class);
     assertEquals("user2 group2", acl.getAclString());
   }
+
+  @Test
+  public void testBlockedAcl() throws UnknownHostException {
+    UserGroupInformation drwho =
+        UserGroupInformation.createUserForTesting("drwho@EXAMPLE.COM",
+            new String[] { "group1", "group2" });
+
+    ServiceAuthorizationManager serviceAuthorizationManager =
+        new ServiceAuthorizationManager();
+    Configuration conf = new Configuration ();
+
+    //test without setting a blocked acl
+    conf.set(ACL_CONFIG, "user1 group1");
+    serviceAuthorizationManager.refresh(conf, new TestPolicyProvider());
+    try {
+      serviceAuthorizationManager.authorize(drwho, TestProtocol.class, conf,
+          InetAddress.getByName(ADDRESS));
+    } catch (AuthorizationException e) {
+      fail();
+    }
+    //now set a blocked acl with another user and another group
+    conf.set(ACL_CONFIG + ServiceAuthorizationManager.BLOCKED, "drwho2 group3");
+    serviceAuthorizationManager.refresh(conf, new TestPolicyProvider());
+    try {
+      serviceAuthorizationManager.authorize(drwho, TestProtocol.class, conf,
+          InetAddress.getByName(ADDRESS));
+    } catch (AuthorizationException e) {
+      fail();
+    }
+    //now set a blocked acl with the user and another group
+    conf.set(ACL_CONFIG + ServiceAuthorizationManager.BLOCKED, "drwho group3");
+    serviceAuthorizationManager.refresh(conf, new TestPolicyProvider());
+    try {
+      serviceAuthorizationManager.authorize(drwho, TestProtocol.class, conf,
+          InetAddress.getByName(ADDRESS));
+      fail();
+    } catch (AuthorizationException e) {
+
+    }
+    //now set a blocked acl with another user and another group
+    conf.set(ACL_CONFIG + ServiceAuthorizationManager.BLOCKED, "drwho2 group3");
+    serviceAuthorizationManager.refresh(conf, new TestPolicyProvider());
+    try {
+      serviceAuthorizationManager.authorize(drwho, TestProtocol.class, conf,
+          InetAddress.getByName(ADDRESS));
+    } catch (AuthorizationException e) {
+      fail();
+    }
+    //now set a blocked acl with another user and group that the user belongs to
+    conf.set(ACL_CONFIG + ServiceAuthorizationManager.BLOCKED, "drwho2 group2");
+    serviceAuthorizationManager.refresh(conf, new TestPolicyProvider());
+    try {
+      serviceAuthorizationManager.authorize(drwho, TestProtocol.class, conf,
+          InetAddress.getByName(ADDRESS));
+      fail();
+    } catch (AuthorizationException e) {
+      //expects Exception
+    }
+    //reset blocked acl so that there is no blocked ACL
+    conf.set(ACL_CONFIG + ServiceAuthorizationManager.BLOCKED, "");
+    serviceAuthorizationManager.refresh(conf, new TestPolicyProvider());
+    try {
+      serviceAuthorizationManager.authorize(drwho, TestProtocol.class, conf,
+          InetAddress.getByName(ADDRESS));
+    } catch (AuthorizationException e) {
+      fail();
+    }
+  }
+
+  @Test
+  public void testDefaultBlockedAcl() throws UnknownHostException {
+    UserGroupInformation drwho =
+        UserGroupInformation.createUserForTesting("drwho@EXAMPLE.COM",
+            new String[] { "group1", "group2" });
+
+    ServiceAuthorizationManager serviceAuthorizationManager =
+        new ServiceAuthorizationManager();
+    Configuration conf = new Configuration ();
+
+    //test without setting a default blocked acl
+    serviceAuthorizationManager.refresh(conf, new TestPolicyProvider());
+    try {
+      serviceAuthorizationManager.authorize(drwho, TestProtocol1.class, conf,
+          InetAddress.getByName(ADDRESS));
+    } catch (AuthorizationException e) {
+      fail();
+    }
+
+    //set a restrictive default blocked acl and an non-restricting blocked acl for TestProtocol
+    conf.set(
+        CommonConfigurationKeys.HADOOP_SECURITY_SERVICE_AUTHORIZATION_DEFAULT_BLOCKED_ACL,
+        "user2 group2");
+    conf.set(ACL_CONFIG + ServiceAuthorizationManager.BLOCKED, "user2");
+    serviceAuthorizationManager.refresh(conf, new TestPolicyProvider());
+    //drwho is authorized to access TestProtocol
+    try {
+      serviceAuthorizationManager.authorize(drwho, TestProtocol.class, conf,
+          InetAddress.getByName(ADDRESS));
+    } catch (AuthorizationException e) {
+      fail();
+    }
+    //drwho is not authorized to access TestProtocol1 because it uses the default blocked acl.
+    try {
+      serviceAuthorizationManager.authorize(drwho, TestProtocol1.class, conf,
+          InetAddress.getByName(ADDRESS));
+      fail();
+    } catch (AuthorizationException e) {
+      //expects Exception
+    }
+  }
+
 }

Modified: hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestDataChecksum.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestDataChecksum.java?rev=1618764&r1=1618763&r2=1618764&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestDataChecksum.java (original)
+++ hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestDataChecksum.java Mon Aug 18 23:41:53 2014
@@ -19,6 +19,9 @@ package org.apache.hadoop.util;
 
 import java.nio.ByteBuffer;
 import java.util.Random;
+import java.util.concurrent.TimeUnit;
+
+import com.google.common.base.Stopwatch;
 
 import org.apache.hadoop.fs.ChecksumException;
 import org.junit.Test;
@@ -53,68 +56,113 @@ public class TestDataChecksum {
       }
     }
   }
-  
-  private void doBulkTest(DataChecksum checksum, int dataLength,
-      boolean useDirect) throws Exception {
-    System.err.println("Testing bulk checksums of length " + 
-        dataLength + " with " +
-        (useDirect ? "direct" : "array-backed") + " buffers");
-    int numSums = (dataLength - 1)/checksum.getBytesPerChecksum() + 1;
-    int sumsLength = numSums * checksum.getChecksumSize();
-    
-    byte data[] = new byte[dataLength +
-                           DATA_OFFSET_IN_BUFFER +
-                           DATA_TRAILER_IN_BUFFER];
-    new Random().nextBytes(data);
-    ByteBuffer dataBuf = ByteBuffer.wrap(
+
+  private static class Harness {
+    final DataChecksum checksum;
+    final int dataLength, sumsLength, numSums;
+    ByteBuffer dataBuf, checksumBuf;
+
+    Harness(DataChecksum checksum, int dataLength, boolean useDirect) {
+      this.checksum = checksum;
+      this.dataLength = dataLength;
+
+      numSums = (dataLength - 1)/checksum.getBytesPerChecksum() + 1;
+      sumsLength = numSums * checksum.getChecksumSize();
+
+      byte data[] = new byte[dataLength +
+                             DATA_OFFSET_IN_BUFFER +
+                             DATA_TRAILER_IN_BUFFER];
+      new Random().nextBytes(data);
+      dataBuf = ByteBuffer.wrap(
         data, DATA_OFFSET_IN_BUFFER, dataLength);
 
-    byte checksums[] = new byte[SUMS_OFFSET_IN_BUFFER + sumsLength];
-    ByteBuffer checksumBuf = ByteBuffer.wrap(
+      byte checksums[] = new byte[SUMS_OFFSET_IN_BUFFER + sumsLength];
+      checksumBuf = ByteBuffer.wrap(
         checksums, SUMS_OFFSET_IN_BUFFER, sumsLength);
-    
-    // Swap out for direct buffers if requested.
-    if (useDirect) {
-      dataBuf = directify(dataBuf);
-      checksumBuf = directify(checksumBuf);
+
+      // Swap out for direct buffers if requested.
+      if (useDirect) {
+        dataBuf = directify(dataBuf);
+        checksumBuf = directify(checksumBuf);
+      }
     }
-    
-    // calculate real checksum, make sure it passes
-    checksum.calculateChunkedSums(dataBuf, checksumBuf);
-    checksum.verifyChunkedSums(dataBuf, checksumBuf, "fake file", 0);
-
-    // Change a byte in the header and in the trailer, make sure
-    // it doesn't affect checksum result
-    corruptBufferOffset(checksumBuf, 0);
-    checksum.verifyChunkedSums(dataBuf, checksumBuf, "fake file", 0);
-    corruptBufferOffset(dataBuf, 0);
-    dataBuf.limit(dataBuf.limit() + 1);
-    corruptBufferOffset(dataBuf, dataLength + DATA_OFFSET_IN_BUFFER);
-    dataBuf.limit(dataBuf.limit() - 1);
-    checksum.verifyChunkedSums(dataBuf, checksumBuf, "fake file", 0);    
-    
-    // Make sure bad checksums fail - error at beginning of array
-    corruptBufferOffset(checksumBuf, SUMS_OFFSET_IN_BUFFER);
-    try {
+
+    void testCorrectness() throws ChecksumException {
+      // calculate real checksum, make sure it passes
+      checksum.calculateChunkedSums(dataBuf, checksumBuf);
       checksum.verifyChunkedSums(dataBuf, checksumBuf, "fake file", 0);
-      fail("Did not throw on bad checksums");
-    } catch (ChecksumException ce) {
-      assertEquals(0, ce.getPos());
-    }
 
-    // Make sure bad checksums fail - error at end of array
-    uncorruptBufferOffset(checksumBuf, SUMS_OFFSET_IN_BUFFER);
-    corruptBufferOffset(checksumBuf, SUMS_OFFSET_IN_BUFFER + sumsLength - 1);
-    try {
+      // Change a byte in the header and in the trailer, make sure
+      // it doesn't affect checksum result
+      corruptBufferOffset(checksumBuf, 0);
+      checksum.verifyChunkedSums(dataBuf, checksumBuf, "fake file", 0);
+      corruptBufferOffset(dataBuf, 0);
+      dataBuf.limit(dataBuf.limit() + 1);
+      corruptBufferOffset(dataBuf, dataLength + DATA_OFFSET_IN_BUFFER);
+      dataBuf.limit(dataBuf.limit() - 1);
       checksum.verifyChunkedSums(dataBuf, checksumBuf, "fake file", 0);
-      fail("Did not throw on bad checksums");
-    } catch (ChecksumException ce) {
-      int expectedPos = checksum.getBytesPerChecksum() * (numSums - 1);
-      assertEquals(expectedPos, ce.getPos());
-      assertTrue(ce.getMessage().contains("fake file"));
+
+      // Make sure bad checksums fail - error at beginning of array
+      corruptBufferOffset(checksumBuf, SUMS_OFFSET_IN_BUFFER);
+      try {
+        checksum.verifyChunkedSums(dataBuf, checksumBuf, "fake file", 0);
+        fail("Did not throw on bad checksums");
+      } catch (ChecksumException ce) {
+        assertEquals(0, ce.getPos());
+      }
+
+      // Make sure bad checksums fail - error at end of array
+      uncorruptBufferOffset(checksumBuf, SUMS_OFFSET_IN_BUFFER);
+      corruptBufferOffset(checksumBuf, SUMS_OFFSET_IN_BUFFER + sumsLength - 1);
+      try {
+        checksum.verifyChunkedSums(dataBuf, checksumBuf, "fake file", 0);
+        fail("Did not throw on bad checksums");
+      } catch (ChecksumException ce) {
+        int expectedPos = checksum.getBytesPerChecksum() * (numSums - 1);
+        assertEquals(expectedPos, ce.getPos());
+        assertTrue(ce.getMessage().contains("fake file"));
+      }
     }
   }
-  
+
+  private void doBulkTest(DataChecksum checksum, int dataLength,
+      boolean useDirect) throws Exception {
+    System.err.println("Testing bulk checksums of length " +
+        dataLength + " with " +
+        (useDirect ? "direct" : "array-backed") + " buffers");
+
+    new Harness(checksum, dataLength, useDirect).testCorrectness();
+  }
+
+  /**
+   * Simple performance test for the "common case" checksum usage in HDFS:
+   * computing and verifying CRC32C with 512 byte chunking on native
+   * buffers.
+   */
+  @Test
+  public void commonUsagePerfTest() throws Exception {
+    final int NUM_RUNS = 5;
+    final DataChecksum checksum = DataChecksum.newDataChecksum(DataChecksum.Type.CRC32C, 512);
+    final int dataLength = 512 * 1024 * 1024;
+    Harness h = new Harness(checksum, dataLength, true);
+
+    for (int i = 0; i < NUM_RUNS; i++) {
+      Stopwatch s = new Stopwatch().start();
+      // calculate real checksum, make sure it passes
+      checksum.calculateChunkedSums(h.dataBuf, h.checksumBuf);
+      s.stop();
+      System.err.println("Calculate run #" + i + ": " +
+                         s.elapsedTime(TimeUnit.MICROSECONDS) + "us");
+
+      s = new Stopwatch().start();
+      // calculate real checksum, make sure it passes
+      checksum.verifyChunkedSums(h.dataBuf, h.checksumBuf, "fake file", 0);
+      s.stop();
+      System.err.println("Verify run #" + i + ": " +
+                         s.elapsedTime(TimeUnit.MICROSECONDS) + "us");
+    }
+  }
+
   @Test
   public void testEquality() {
     assertEquals(



Mime
View raw message