hbase-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From apurt...@apache.org
Subject [8/9] git commit: HBASE-11589 AccessControlException should be a not retriable exception (Qiang Tian)
Date Sat, 09 Aug 2014 02:36:34 GMT
HBASE-11589 AccessControlException should be a not retriable exception (Qiang Tian)

Amending-Author: Andrew Purtell <apurtell@apache.org>


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/b8f2f67d
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/b8f2f67d
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/b8f2f67d

Branch: refs/heads/branch-1
Commit: b8f2f67d4c79138b94dc6c330944cc709049ce05
Parents: 2df6b05
Author: Andrew Purtell <apurtell@apache.org>
Authored: Fri Aug 8 19:10:56 2014 -0700
Committer: Andrew Purtell <apurtell@apache.org>
Committed: Fri Aug 8 19:12:09 2014 -0700

----------------------------------------------------------------------
 .../hbase/security/AccessDeniedException.java   |  5 ++++
 .../org/apache/hadoop/hbase/ipc/RpcServer.java  | 24 ++++++++++----------
 .../org/apache/hadoop/hbase/util/FSUtils.java   |  6 ++---
 .../org/apache/hadoop/hbase/util/HBaseFsck.java | 12 +++++-----
 4 files changed, 26 insertions(+), 21 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hbase/blob/b8f2f67d/hbase-client/src/main/java/org/apache/hadoop/hbase/security/AccessDeniedException.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/AccessDeniedException.java
b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/AccessDeniedException.java
index 482faef..f7b07e5 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/AccessDeniedException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/AccessDeniedException.java
@@ -41,4 +41,9 @@ public class AccessDeniedException extends DoNotRetryIOException {
   public AccessDeniedException(String s) {
     super(s);
   }
+
+  public AccessDeniedException(Throwable cause) {
+    super(cause);
+  }
+
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/b8f2f67d/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java
index ef4df3e..7d9c569 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java
@@ -19,6 +19,7 @@
 package org.apache.hadoop.hbase.ipc;
 
 import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHORIZATION;
+import io.netty.util.internal.ConcurrentSet;
 
 import java.io.ByteArrayInputStream;
 import java.io.ByteArrayOutputStream;
@@ -63,8 +64,6 @@ import javax.security.sasl.Sasl;
 import javax.security.sasl.SaslException;
 import javax.security.sasl.SaslServer;
 
-import io.netty.util.internal.ConcurrentSet;
-
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
@@ -91,6 +90,7 @@ import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeader;
 import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseHeader;
 import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation;
 import org.apache.hadoop.hbase.regionserver.HRegionServer;
+import org.apache.hadoop.hbase.security.AccessDeniedException;
 import org.apache.hadoop.hbase.security.AuthMethod;
 import org.apache.hadoop.hbase.security.HBasePolicyProvider;
 import org.apache.hadoop.hbase.security.HBaseSaslRpcServer;
@@ -108,7 +108,6 @@ import org.apache.hadoop.io.IntWritable;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.WritableUtils;
 import org.apache.hadoop.io.compress.CompressionCodec;
-import org.apache.hadoop.security.AccessControlException;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod;
 import org.apache.hadoop.security.authorize.AuthorizationException;
@@ -119,8 +118,8 @@ import org.apache.hadoop.security.token.SecretManager;
 import org.apache.hadoop.security.token.SecretManager.InvalidToken;
 import org.apache.hadoop.security.token.TokenIdentifier;
 import org.apache.hadoop.util.StringUtils;
-import org.htrace.TraceInfo;
 import org.codehaus.jackson.map.ObjectMapper;
+import org.htrace.TraceInfo;
 
 import com.google.common.util.concurrent.ThreadFactoryBuilder;
 import com.google.protobuf.BlockingService;
@@ -1235,7 +1234,7 @@ public class RpcServer implements RpcServerInterface {
             secretManager);
         UserGroupInformation ugi = tokenId.getUser();
         if (ugi == null) {
-          throw new AccessControlException(
+          throw new AccessDeniedException(
               "Can't retrieve username from tokenIdentifier.");
         }
         ugi.addTokenIdentifier(tokenId);
@@ -1265,7 +1264,7 @@ public class RpcServer implements RpcServerInterface {
             switch (authMethod) {
             case DIGEST:
               if (secretManager == null) {
-                throw new AccessControlException(
+                throw new AccessDeniedException(
                     "Server is not configured to do DIGEST authentication.");
               }
               saslServer = Sasl.createSaslServer(AuthMethod.DIGEST
@@ -1282,7 +1281,7 @@ public class RpcServer implements RpcServerInterface {
               }
               final String names[] = SaslUtil.splitKerberosName(fullName);
               if (names.length != 3) {
-                throw new AccessControlException(
+                throw new AccessDeniedException(
                     "Kerberos principal name does NOT have the expected "
                         + "hostname part: " + fullName);
               }
@@ -1297,7 +1296,7 @@ public class RpcServer implements RpcServerInterface {
               });
             }
             if (saslServer == null)
-              throw new AccessControlException(
+              throw new AccessDeniedException(
                   "Unable to find SASL server implementation for "
                       + authMethod.getMechanismName());
             if (LOG.isDebugEnabled()) {
@@ -1421,7 +1420,7 @@ public class RpcServer implements RpcServerInterface {
         return doBadPreambleHandling(msg, new BadAuthException(msg));
       }
       if (isSecurityEnabled && authMethod == AuthMethod.SIMPLE) {
-        AccessControlException ae = new AccessControlException("Authentication is required");
+        AccessDeniedException ae = new AccessDeniedException("Authentication is required");
         setupResponse(authFailedResponse, authFailedCall, ae, ae.getMessage());
         responder.doRespond(authFailedCall);
         throw ae;
@@ -1581,7 +1580,7 @@ public class RpcServer implements RpcServerInterface {
             && (!protocolUser.getUserName().equals(user.getUserName()))) {
           if (authMethod == AuthMethod.DIGEST) {
             // Not allowed to doAs if token authentication is used
-            throw new AccessControlException("Authenticated user (" + user
+            throw new AccessDeniedException("Authenticated user (" + user
                 + ") doesn't match what the client claims to be ("
                 + protocolUser + ")");
           } else {
@@ -1669,7 +1668,7 @@ public class RpcServer implements RpcServerInterface {
         if (!authorizeConnection()) {
           // Throw FatalConnectionException wrapping ACE so client does right thing and closes
           // down the connection instead of trying to read non-existent retun.
-          throw new AccessControlException("Connection from " + this + " for service " +
+          throw new AccessDeniedException("Connection from " + this + " for service " +
             connectionHeader.getServiceName() + " is unauthorized for user: " + user);
         }
       }
@@ -1778,7 +1777,8 @@ public class RpcServer implements RpcServerInterface {
           LOG.debug("Connection authorization failed: " + ae.getMessage(), ae);
         }
         metrics.authorizationFailure();
-        setupResponse(authFailedResponse, authFailedCall, ae, ae.getMessage());
+        setupResponse(authFailedResponse, authFailedCall,
+          new AccessDeniedException(ae), ae.getMessage());
         responder.doRespond(authFailedCall);
         return false;
       }

http://git-wip-us.apache.org/repos/asf/hbase/blob/b8f2f67d/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSUtils.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSUtils.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSUtils.java
index 53f5874..b569f4b 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSUtils.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSUtils.java
@@ -66,6 +66,7 @@ import org.apache.hadoop.hbase.exceptions.DeserializationException;
 import org.apache.hadoop.hbase.fs.HFileSystem;
 import org.apache.hadoop.hbase.master.HMaster;
 import org.apache.hadoop.hbase.master.RegionPlacementMaintainer;
+import org.apache.hadoop.hbase.security.AccessDeniedException;
 import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
 import org.apache.hadoop.hbase.protobuf.generated.FSProtos;
 import org.apache.hadoop.hbase.regionserver.HRegion;
@@ -73,7 +74,6 @@ import org.apache.hadoop.hdfs.DistributedFileSystem;
 import org.apache.hadoop.hdfs.protocol.FSConstants;
 import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.io.SequenceFile;
-import org.apache.hadoop.security.AccessControlException;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.util.Progressable;
 import org.apache.hadoop.util.ReflectionUtils;
@@ -1669,7 +1669,7 @@ public abstract class FSUtils {
    *          the action
    */
   public static void checkAccess(UserGroupInformation ugi, FileStatus file,
-      FsAction action) throws AccessControlException {
+      FsAction action) throws AccessDeniedException {
     if (ugi.getShortUserName().equals(file.getOwner())) {
       if (file.getPermission().getUserAction().implies(action)) {
         return;
@@ -1681,7 +1681,7 @@ public abstract class FSUtils {
     } else if (file.getPermission().getOtherAction().implies(action)) {
       return;
     }
-    throw new AccessControlException("Permission denied:" + " action=" + action
+    throw new AccessDeniedException("Permission denied:" + " action=" + action
         + " path=" + file.getPath() + " user=" + ugi.getShortUserName());
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/b8f2f67d/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java
index 3450516..982417e 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java
@@ -106,8 +106,8 @@ import org.apache.hadoop.hbase.util.hbck.TableLockChecker;
 import org.apache.hadoop.hbase.zookeeper.MetaTableLocator;
 import org.apache.hadoop.hbase.zookeeper.ZKTableStateClientSideReader;
 import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher;
+import org.apache.hadoop.hbase.security.AccessDeniedException;
 import org.apache.hadoop.io.IOUtils;
-import org.apache.hadoop.security.AccessControlException;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.util.ReflectionUtils;
 import org.apache.hadoop.util.Tool;
@@ -1565,7 +1565,7 @@ public class HBaseFsck extends Configured {
     }
   }
 
-  private void preCheckPermission() throws IOException, AccessControlException {
+  private void preCheckPermission() throws IOException, AccessDeniedException {
     if (shouldIgnorePreCheckPermission()) {
       return;
     }
@@ -1578,12 +1578,12 @@ public class HBaseFsck extends Configured {
     for (FileStatus file : files) {
       try {
         FSUtils.checkAccess(ugi, file, FsAction.WRITE);
-      } catch (AccessControlException ace) {
-        LOG.warn("Got AccessControlException when preCheckPermission ", ace);
+      } catch (AccessDeniedException ace) {
+        LOG.warn("Got AccessDeniedException when preCheckPermission ", ace);
         errors.reportError(ERROR_CODE.WRONG_USAGE, "Current user " + ugi.getUserName()
           + " does not have write perms to " + file.getPath()
           + ". Please rerun hbck as hdfs user " + file.getOwner());
-        throw new AccessControlException(ace);
+        throw ace;
       }
     }
   }
@@ -4014,7 +4014,7 @@ public class HBaseFsck extends Configured {
     // pre-check current user has FS write permission or not
     try {
       preCheckPermission();
-    } catch (AccessControlException ace) {
+    } catch (AccessDeniedException ace) {
       Runtime.getRuntime().exit(-1);
     } catch (IOException ioe) {
       Runtime.getRuntime().exit(-1);


Mime
View raw message