ranger-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From ab...@apache.org
Subject ranger git commit: RANGER-2297: getContentSummary validation failure
Date Thu, 29 Nov 2018 20:33:04 GMT
Repository: ranger
Updated Branches:
  refs/heads/ranger-1 762db6beb -> 1c797e52a


RANGER-2297: getContentSummary validation failure


Project: http://git-wip-us.apache.org/repos/asf/ranger/repo
Commit: http://git-wip-us.apache.org/repos/asf/ranger/commit/1c797e52
Tree: http://git-wip-us.apache.org/repos/asf/ranger/tree/1c797e52
Diff: http://git-wip-us.apache.org/repos/asf/ranger/diff/1c797e52

Branch: refs/heads/ranger-1
Commit: 1c797e52adf351e0e347c7a5abf256a0767603ae
Parents: 762db6b
Author: Abhay Kulkarni <akulkarni@hortonworks.com>
Authored: Thu Nov 29 11:54:40 2018 -0800
Committer: Abhay Kulkarni <akulkarni@hortonworks.com>
Committed: Thu Nov 29 12:32:52 2018 -0800

----------------------------------------------------------------------
 .../hadoop/RangerHdfsAuthorizer.java            | 44 +++++++++++++++++---
 1 file changed, 39 insertions(+), 5 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ranger/blob/1c797e52/hdfs-agent/src/main/java/org/apache/ranger/authorization/hadoop/RangerHdfsAuthorizer.java
----------------------------------------------------------------------
diff --git a/hdfs-agent/src/main/java/org/apache/ranger/authorization/hadoop/RangerHdfsAuthorizer.java
b/hdfs-agent/src/main/java/org/apache/ranger/authorization/hadoop/RangerHdfsAuthorizer.java
index 65a397d..466f22a 100644
--- a/hdfs-agent/src/main/java/org/apache/ranger/authorization/hadoop/RangerHdfsAuthorizer.java
+++ b/hdfs-agent/src/main/java/org/apache/ranger/authorization/hadoop/RangerHdfsAuthorizer.java
@@ -43,6 +43,7 @@ import org.apache.hadoop.hdfs.server.namenode.INode;
 import org.apache.hadoop.hdfs.server.namenode.INodeAttributeProvider;
 import org.apache.hadoop.hdfs.server.namenode.INodeAttributes;
 import org.apache.hadoop.hdfs.server.namenode.INodeDirectory;
+import org.apache.hadoop.hdfs.server.namenode.snapshot.Snapshot;
 import org.apache.hadoop.hdfs.util.ReadOnlyList;
 import org.apache.hadoop.ipc.Server;
 import org.apache.hadoop.security.AccessControlException;
@@ -241,10 +242,14 @@ public class RangerHdfsAuthorizer extends INodeAttributeProvider {
 				INode   parent              = null;
 				INode   inode               = null;
 
+				boolean useDefaultAuthorizerOnly = false;
+				boolean doNotGenerateAuditRecord = false;
+
 				if(plugin != null && !ArrayUtils.isEmpty(inodes)) {
 					int sz = inodeAttrs.length;
 					if (LOG.isDebugEnabled()) {
 						LOG.debug("Size of INodeAttrs array:[" + sz + "]");
+						LOG.debug("Size of INodes array:[" + inodes.length + "]");
 					}
 					byte[][] components = new byte[sz][];
 
@@ -259,11 +264,40 @@ public class RangerHdfsAuthorizer extends INodeAttributeProvider {
 					if (i != sz) {
 						if (LOG.isDebugEnabled()) {
 							LOG.debug("Input INodeAttributes array contains null at position " + i);
-							LOG.debug("Will use only first [" + i + "] components to build resourcePath");
+							LOG.debug("Will use only first [" + i + "] components");
 						}
 					}
 
-					resourcePath = DFSUtil.byteArray2PathString(components, 0, i);
+					if (sz == 1 && inodes.length == 1 && inodes[0].getParent() != null)
{
+
+						doNotGenerateAuditRecord = true;
+
+						if (LOG.isDebugEnabled()) {
+							LOG.debug("Using the only inode in the array to figure out path to resource. No audit
record will be generated for this authorization request");
+						}
+
+						resourcePath = inodes[0].getFullPathName();
+
+						if (snapshotId != Snapshot.CURRENT_STATE_ID) {
+
+							useDefaultAuthorizerOnly = true;
+
+							if (LOG.isDebugEnabled()) {
+								LOG.debug("path:[" + resourcePath + "] is for a snapshot, id=[" + snapshotId +"],
default Authorizer will be used to authorize this request");
+							}
+						} else {
+							if (LOG.isDebugEnabled()) {
+								LOG.debug("path:[" + resourcePath + "] is not for a snapshot, id=[" + snapshotId
+"]. It will be used to authorize this request");
+							}
+						}
+					} else {
+
+						resourcePath = DFSUtil.byteArray2PathString(components, 0, i);
+
+						if (LOG.isDebugEnabled()) {
+							LOG.debug("INodeAttributes array is used to figure out path to resource, resourcePath:["
+ resourcePath +"]");
+						}
+					}
 
 					if(ancestorIndex >= inodes.length) {
 						ancestorIndex = inodes.length - 1;
@@ -271,13 +305,13 @@ public class RangerHdfsAuthorizer extends INodeAttributeProvider {
 
 					for(; ancestorIndex >= 0 && inodes[ancestorIndex] == null; ancestorIndex--);
 
-					authzStatus = AuthzStatus.ALLOW;
+					authzStatus = useDefaultAuthorizerOnly ? AuthzStatus.NOT_DETERMINED : AuthzStatus.ALLOW;
 
 					ancestor = inodes.length > ancestorIndex && ancestorIndex >= 0 ? inodes[ancestorIndex]
: null;
 					parent   = inodes.length > 1 ? inodes[inodes.length - 2] : null;
 					inode    = inodes[inodes.length - 1]; // could be null while creating a new file
 
-					auditHandler = new RangerHdfsAuditHandler(resourcePath, isTraverseOnlyCheck);
+					auditHandler = doNotGenerateAuditRecord ? null : new RangerHdfsAuditHandler(resourcePath,
isTraverseOnlyCheck);
 
 					/* Hadoop versions prior to 2.8.0 didn't ask for authorization of parent/ancestor traversal
for
 					 * reading or writing a file. However, Hadoop version 2.8.0 and later ask traversal
authorization for
@@ -294,7 +328,7 @@ public class RangerHdfsAuthorizer extends INodeAttributeProvider {
 					 * This approach would ensure that Ranger authorization will continue to work with existing
policies,
 					 * without requiring policy migration/update, for the changes in behaviour in Hadoop
2.8.0.
 					 */
-					if(isTraverseOnlyCheck) {
+					if(authzStatus == AuthzStatus.ALLOW && isTraverseOnlyCheck) {
 						authzStatus = traverseOnlyCheck(inode, inodeAttrs, resourcePath, components, parent,
ancestor, ancestorIndex, user, groups, plugin, auditHandler);
 					}
 


Mime
View raw message