Return-Path: X-Original-To: apmail-hbase-commits-archive@www.apache.org Delivered-To: apmail-hbase-commits-archive@www.apache.org Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by minotaur.apache.org (Postfix) with SMTP id 8DEC311BB2 for ; Sat, 9 Aug 2014 02:36:28 +0000 (UTC) Received: (qmail 78325 invoked by uid 500); 9 Aug 2014 02:36:27 -0000 Delivered-To: apmail-hbase-commits-archive@hbase.apache.org Received: (qmail 78250 invoked by uid 500); 9 Aug 2014 02:36:27 -0000 Mailing-List: contact commits-help@hbase.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: dev@hbase.apache.org Delivered-To: mailing list commits@hbase.apache.org Received: (qmail 77990 invoked by uid 99); 9 Aug 2014 02:36:27 -0000 Received: from tyr.zones.apache.org (HELO tyr.zones.apache.org) (140.211.11.114) by apache.org (qpsmtpd/0.29) with ESMTP; Sat, 09 Aug 2014 02:36:27 +0000 Received: by tyr.zones.apache.org (Postfix, from userid 65534) id 3C76B95203C; Sat, 9 Aug 2014 02:36:27 +0000 (UTC) Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit From: apurtell@apache.org To: commits@hbase.apache.org Date: Sat, 09 Aug 2014 02:36:35 -0000 Message-Id: <455edecab89b48fd9510c21dc268efc6@git.apache.org> In-Reply-To: <1d97830785ec4687bf2bc239c5cd5cc0@git.apache.org> References: <1d97830785ec4687bf2bc239c5cd5cc0@git.apache.org> X-Mailer: ASF-Git Admin Mailer Subject: [9/9] git commit: HBASE-11589 AccessControlException should be a not retriable exception (Qiang Tian) HBASE-11589 AccessControlException should be a not retriable exception (Qiang Tian) Amending-Author: Andrew Purtell Project: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/67c23232 Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/67c23232 Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/67c23232 Branch: refs/heads/0.98 Commit: 67c232326d16d64bbd7551ec2ef3c796f365d16e Parents: cb4ac0d Author: Andrew Purtell Authored: Fri Aug 8 19:10:56 2014 -0700 Committer: Andrew Purtell Committed: Fri Aug 8 19:17:28 2014 -0700 ---------------------------------------------------------------------- .../hbase/security/AccessDeniedException.java | 5 +++++ .../org/apache/hadoop/hbase/ipc/RpcServer.java | 19 ++++++++++--------- .../org/apache/hadoop/hbase/util/FSUtils.java | 6 +++--- .../org/apache/hadoop/hbase/util/HBaseFsck.java | 12 ++++++------ 4 files changed, 24 insertions(+), 18 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/hbase/blob/67c23232/hbase-client/src/main/java/org/apache/hadoop/hbase/security/AccessDeniedException.java ---------------------------------------------------------------------- diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/AccessDeniedException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/AccessDeniedException.java index 482faef..f7b07e5 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/AccessDeniedException.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/AccessDeniedException.java @@ -41,4 +41,9 @@ public class AccessDeniedException extends DoNotRetryIOException { public AccessDeniedException(String s) { super(s); } + + public AccessDeniedException(Throwable cause) { + super(cause); + } + } http://git-wip-us.apache.org/repos/asf/hbase/blob/67c23232/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java index 8ff70fe..6f5b6a7 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java @@ -84,6 +84,7 @@ import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeader; import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseHeader; import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation; import org.apache.hadoop.hbase.regionserver.HRegionServer; +import org.apache.hadoop.hbase.security.AccessDeniedException; import org.apache.hadoop.hbase.security.AuthMethod; import org.apache.hadoop.hbase.security.HBasePolicyProvider; import org.apache.hadoop.hbase.security.HBaseSaslRpcServer; @@ -100,7 +101,6 @@ import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.Writable; import org.apache.hadoop.io.WritableUtils; import org.apache.hadoop.io.compress.CompressionCodec; -import org.apache.hadoop.security.AccessControlException; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod; import org.apache.hadoop.security.authorize.AuthorizationException; @@ -1247,7 +1247,7 @@ public class RpcServer implements RpcServerInterface { secretManager); UserGroupInformation ugi = tokenId.getUser(); if (ugi == null) { - throw new AccessControlException( + throw new AccessDeniedException( "Can't retrieve username from tokenIdentifier."); } ugi.addTokenIdentifier(tokenId); @@ -1277,7 +1277,7 @@ public class RpcServer implements RpcServerInterface { switch (authMethod) { case DIGEST: if (secretManager == null) { - throw new AccessControlException( + throw new AccessDeniedException( "Server is not configured to do DIGEST authentication."); } saslServer = Sasl.createSaslServer(AuthMethod.DIGEST @@ -1294,7 +1294,7 @@ public class RpcServer implements RpcServerInterface { } final String names[] = SaslUtil.splitKerberosName(fullName); if (names.length != 3) { - throw new AccessControlException( + throw new AccessDeniedException( "Kerberos principal name does NOT have the expected " + "hostname part: " + fullName); } @@ -1309,7 +1309,7 @@ public class RpcServer implements RpcServerInterface { }); } if (saslServer == null) - throw new AccessControlException( + throw new AccessDeniedException( "Unable to find SASL server implementation for " + authMethod.getMechanismName()); if (LOG.isDebugEnabled()) { @@ -1453,7 +1453,7 @@ public class RpcServer implements RpcServerInterface { return doBadPreambleHandling(msg, new BadAuthException(msg)); } if (isSecurityEnabled && authMethod == AuthMethod.SIMPLE) { - AccessControlException ae = new AccessControlException("Authentication is required"); + AccessDeniedException ae = new AccessDeniedException("Authentication is required"); setupResponse(authFailedResponse, authFailedCall, ae, ae.getMessage()); responder.doRespond(authFailedCall); throw ae; @@ -1566,7 +1566,7 @@ public class RpcServer implements RpcServerInterface { && (!protocolUser.getUserName().equals(user.getUserName()))) { if (authMethod == AuthMethod.DIGEST) { // Not allowed to doAs if token authentication is used - throw new AccessControlException("Authenticated user (" + user + throw new AccessDeniedException("Authenticated user (" + user + ") doesn't match what the client claims to be (" + protocolUser + ")"); } else { @@ -1655,7 +1655,7 @@ public class RpcServer implements RpcServerInterface { if (!authorizeConnection()) { // Throw FatalConnectionException wrapping ACE so client does right thing and closes // down the connection instead of trying to read non-existent retun. - throw new AccessControlException("Connection from " + this + " for service " + + throw new AccessDeniedException("Connection from " + this + " for service " + connectionHeader.getServiceName() + " is unauthorized for user: " + user); } } @@ -1765,7 +1765,8 @@ public class RpcServer implements RpcServerInterface { } catch (AuthorizationException ae) { LOG.debug("Connection authorization failed: " + ae.getMessage(), ae); metrics.authorizationFailure(); - setupResponse(authFailedResponse, authFailedCall, ae, ae.getMessage()); + setupResponse(authFailedResponse, authFailedCall, + new AccessDeniedException(ae), ae.getMessage()); responder.doRespond(authFailedCall); return false; } http://git-wip-us.apache.org/repos/asf/hbase/blob/67c23232/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSUtils.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSUtils.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSUtils.java index c418ec3..ade9940 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSUtils.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSUtils.java @@ -66,6 +66,7 @@ import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.fs.HFileSystem; import org.apache.hadoop.hbase.master.HMaster; import org.apache.hadoop.hbase.master.RegionPlacementMaintainer; +import org.apache.hadoop.hbase.security.AccessDeniedException; import org.apache.hadoop.hbase.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.protobuf.generated.FSProtos; import org.apache.hadoop.hbase.regionserver.HRegion; @@ -73,7 +74,6 @@ import org.apache.hadoop.hdfs.DistributedFileSystem; import org.apache.hadoop.hdfs.protocol.FSConstants; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.io.SequenceFile; -import org.apache.hadoop.security.AccessControlException; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.util.Progressable; import org.apache.hadoop.util.ReflectionUtils; @@ -1684,7 +1684,7 @@ public abstract class FSUtils { * the action */ public static void checkAccess(UserGroupInformation ugi, FileStatus file, - FsAction action) throws AccessControlException { + FsAction action) throws AccessDeniedException { if (ugi.getShortUserName().equals(file.getOwner())) { if (file.getPermission().getUserAction().implies(action)) { return; @@ -1696,7 +1696,7 @@ public abstract class FSUtils { } else if (file.getPermission().getOtherAction().implies(action)) { return; } - throw new AccessControlException("Permission denied:" + " action=" + action + throw new AccessDeniedException("Permission denied:" + " action=" + action + " path=" + file.getPath() + " user=" + ugi.getShortUserName()); } http://git-wip-us.apache.org/repos/asf/hbase/blob/67c23232/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java index 783b3da..1ca7f39 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java @@ -104,8 +104,8 @@ import org.apache.hadoop.hbase.util.hbck.TableLockChecker; import org.apache.hadoop.hbase.zookeeper.MetaRegionTracker; import org.apache.hadoop.hbase.zookeeper.ZKTableReadOnly; import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher; +import org.apache.hadoop.hbase.security.AccessDeniedException; import org.apache.hadoop.io.IOUtils; -import org.apache.hadoop.security.AccessControlException; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.util.ReflectionUtils; import org.apache.hadoop.util.Tool; @@ -1560,7 +1560,7 @@ public class HBaseFsck extends Configured { } } - private void preCheckPermission() throws IOException, AccessControlException { + private void preCheckPermission() throws IOException, AccessDeniedException { if (shouldIgnorePreCheckPermission()) { return; } @@ -1573,12 +1573,12 @@ public class HBaseFsck extends Configured { for (FileStatus file : files) { try { FSUtils.checkAccess(ugi, file, FsAction.WRITE); - } catch (AccessControlException ace) { - LOG.warn("Got AccessControlException when preCheckPermission ", ace); + } catch (AccessDeniedException ace) { + LOG.warn("Got AccessDeniedException when preCheckPermission ", ace); errors.reportError(ERROR_CODE.WRONG_USAGE, "Current user " + ugi.getUserName() + " does not have write perms to " + file.getPath() + ". Please rerun hbck as hdfs user " + file.getOwner()); - throw new AccessControlException(ace); + throw ace; } } } @@ -3999,7 +3999,7 @@ public class HBaseFsck extends Configured { // pre-check current user has FS write permission or not try { preCheckPermission(); - } catch (AccessControlException ace) { + } catch (AccessDeniedException ace) { Runtime.getRuntime().exit(-1); } catch (IOException ioe) { Runtime.getRuntime().exit(-1);