ranger-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From omal...@apache.org
Subject [37/44] ARGUS-1. Initial code commit (Selvamohan Neethiraj via omalley)
Date Thu, 14 Aug 2014 20:50:48 GMT
http://git-wip-us.apache.org/repos/asf/incubator-argus/blob/7defc061/hive-agent/src/main/java/com/xasecure/authorization/hive/authorizer/XaSecureHiveAuthorizerBase.java
----------------------------------------------------------------------
diff --git a/hive-agent/src/main/java/com/xasecure/authorization/hive/authorizer/XaSecureHiveAuthorizerBase.java b/hive-agent/src/main/java/com/xasecure/authorization/hive/authorizer/XaSecureHiveAuthorizerBase.java
new file mode 100644
index 0000000..92a3bb8
--- /dev/null
+++ b/hive-agent/src/main/java/com/xasecure/authorization/hive/authorizer/XaSecureHiveAuthorizerBase.java
@@ -0,0 +1,165 @@
+package com.xasecure.authorization.hive.authorizer;
+
+import java.util.List;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.security.HiveAuthenticationProvider;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAccessControlException;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizer;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzContext;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzPluginException;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveMetastoreClientFactory;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrincipal;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilege;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeInfo;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveRoleGrant;
+import org.apache.hadoop.security.UserGroupInformation;
+
+public class XaSecureHiveAuthorizerBase implements HiveAuthorizer {
+
+	private HiveMetastoreClientFactory mMetastoreClientFactory;
+	private HiveConf                   mHiveConf;
+	private HiveAuthenticationProvider mHiveAuthenticator;
+	private UserGroupInformation       mUgi;
+	  
+	public XaSecureHiveAuthorizerBase(HiveMetastoreClientFactory metastoreClientFactory,
+									  HiveConf                   hiveConf,
+									  HiveAuthenticationProvider hiveAuthenticator) {
+		mMetastoreClientFactory = metastoreClientFactory;
+		mHiveConf               = hiveConf;
+		mHiveAuthenticator      = hiveAuthenticator;
+
+		String userName = mHiveAuthenticator == null ? null : mHiveAuthenticator.getUserName();
+
+		mUgi = userName == null ? null : UserGroupInformation.createRemoteUser(userName);
+	}
+
+	public HiveMetastoreClientFactory getMetastoreClientFactory() {
+		return mMetastoreClientFactory;
+	}
+
+	public HiveConf getHiveConf() {
+		return mHiveConf;
+	}
+
+	public HiveAuthenticationProvider getHiveAuthenticator() {
+		return mHiveAuthenticator;
+	}
+
+	public UserGroupInformation getCurrentUserGroupInfo() {
+		return mUgi;
+	}
+
+	@Override
+	public void applyAuthorizationConfigPolicy(HiveConf arg0) {
+		// TODO Auto-generated method stub
+	}
+
+	@Override
+	public void checkPrivileges(HiveOperationType         hiveOpType,
+								List<HivePrivilegeObject> inputsHObjs,
+								List<HivePrivilegeObject> outputHObjs,
+								HiveAuthzContext          context)
+										throws HiveAuthzPluginException, HiveAccessControlException {
+		// TODO Auto-generated method stub
+	}
+
+	@Override
+	public void createRole(String arg0, HivePrincipal arg1)
+			throws HiveAuthzPluginException, HiveAccessControlException {
+		// TODO Auto-generated method stub
+		
+	}
+
+	@Override
+	public void dropRole(String arg0) throws HiveAuthzPluginException,
+			HiveAccessControlException {
+		// TODO Auto-generated method stub
+		
+	}
+
+	@Override
+	public List<String> getAllRoles() throws HiveAuthzPluginException,
+			HiveAccessControlException {
+		// TODO Auto-generated method stub
+		return null;
+	}
+
+	@Override
+	public List<String> getCurrentRoleNames() throws HiveAuthzPluginException {
+		// TODO Auto-generated method stub
+		return null;
+	}
+
+	@Override
+	public List<HiveRoleGrant> getPrincipalGrantInfoForRole(String arg0)
+			throws HiveAuthzPluginException, HiveAccessControlException {
+		// TODO Auto-generated method stub
+		return null;
+	}
+
+	@Override
+	public List<HiveRoleGrant> getRoleGrantInfoForPrincipal(HivePrincipal arg0)
+			throws HiveAuthzPluginException, HiveAccessControlException {
+		// TODO Auto-generated method stub
+		return null;
+	}
+
+	@Override
+	public VERSION getVersion() {
+		// TODO Auto-generated method stub
+		return null;
+	}
+
+	@Override
+	public void grantPrivileges(List<HivePrincipal> arg0,
+			List<HivePrivilege> arg1, HivePrivilegeObject arg2,
+			HivePrincipal arg3, boolean arg4) throws HiveAuthzPluginException,
+			HiveAccessControlException {
+		// TODO Auto-generated method stub
+		
+	}
+
+	@Override
+	public void grantRole(List<HivePrincipal> arg0, List<String> arg1,
+			boolean arg2, HivePrincipal arg3) throws HiveAuthzPluginException,
+			HiveAccessControlException {
+		// TODO Auto-generated method stub
+		
+	}
+
+	@Override
+	public void revokePrivileges(List<HivePrincipal> arg0,
+			List<HivePrivilege> arg1, HivePrivilegeObject arg2,
+			HivePrincipal arg3, boolean arg4) throws HiveAuthzPluginException,
+			HiveAccessControlException {
+		// TODO Auto-generated method stub
+		
+	}
+
+	@Override
+	public void revokeRole(List<HivePrincipal> arg0, List<String> arg1,
+			boolean arg2, HivePrincipal arg3) throws HiveAuthzPluginException,
+			HiveAccessControlException {
+		// TODO Auto-generated method stub
+		
+	}
+
+	@Override
+	public void setCurrentRole(String arg0) throws HiveAccessControlException,
+			HiveAuthzPluginException {
+		// TODO Auto-generated method stub
+		
+	}
+
+	@Override
+	public List<HivePrivilegeInfo> showPrivileges(HivePrincipal arg0,
+			HivePrivilegeObject arg1) throws HiveAuthzPluginException,
+			HiveAccessControlException {
+		// TODO Auto-generated method stub
+		return null;
+	}
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-argus/blob/7defc061/hive-agent/src/main/java/com/xasecure/authorization/hive/authorizer/XaSecureHiveAuthorizerFactory.java
----------------------------------------------------------------------
diff --git a/hive-agent/src/main/java/com/xasecure/authorization/hive/authorizer/XaSecureHiveAuthorizerFactory.java b/hive-agent/src/main/java/com/xasecure/authorization/hive/authorizer/XaSecureHiveAuthorizerFactory.java
new file mode 100644
index 0000000..4a2fc40
--- /dev/null
+++ b/hive-agent/src/main/java/com/xasecure/authorization/hive/authorizer/XaSecureHiveAuthorizerFactory.java
@@ -0,0 +1,18 @@
+package com.xasecure.authorization.hive.authorizer;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.security.HiveAuthenticationProvider;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizer;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizerFactory;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzPluginException;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveMetastoreClientFactory;
+
+public class XaSecureHiveAuthorizerFactory implements HiveAuthorizerFactory {
+	@Override
+	public HiveAuthorizer createHiveAuthorizer(HiveMetastoreClientFactory metastoreClientFactory,
+											   HiveConf conf,
+											   HiveAuthenticationProvider hiveAuthenticator)
+													   throws HiveAuthzPluginException {
+		return new XaSecureHiveAuthorizer(metastoreClientFactory, conf, hiveAuthenticator);
+	}
+}

http://git-wip-us.apache.org/repos/asf/incubator-argus/blob/7defc061/hive-agent/src/main/java/com/xasecure/authorization/hive/constants/XaSecureHiveConstants.java
----------------------------------------------------------------------
diff --git a/hive-agent/src/main/java/com/xasecure/authorization/hive/constants/XaSecureHiveConstants.java b/hive-agent/src/main/java/com/xasecure/authorization/hive/constants/XaSecureHiveConstants.java
new file mode 100644
index 0000000..063c060
--- /dev/null
+++ b/hive-agent/src/main/java/com/xasecure/authorization/hive/constants/XaSecureHiveConstants.java
@@ -0,0 +1,9 @@
+package com.xasecure.authorization.hive.constants;
+
+public final class XaSecureHiveConstants {
+	public static final String WILDCARD_OBJECT = "*" ;
+	public static final String HAS_ANY_PERMISSION = "any" ;
+	public static final String SHOW_META_INFO_PERMISSION = "show" ;
+	public static final String PUBLIC_ACCESS_ROLE = "public" ;
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-argus/blob/7defc061/knox-agent/conf/xasecure-audit-changes.cfg
----------------------------------------------------------------------
diff --git a/knox-agent/conf/xasecure-audit-changes.cfg b/knox-agent/conf/xasecure-audit-changes.cfg
new file mode 100644
index 0000000..e3e09da
--- /dev/null
+++ b/knox-agent/conf/xasecure-audit-changes.cfg
@@ -0,0 +1,4 @@
+xasecure.audit.jpa.javax.persistence.jdbc.url	jdbc:mysql://%XAAUDIT.DB.HOSTNAME%/%XAAUDIT.DB.DATABASE_NAME%	mod create-if-not-exists
+xasecure.audit.jpa.javax.persistence.jdbc.user	%XAAUDIT.DB.USER_NAME% mod create-if-not-exists
+xasecure.audit.jpa.javax.persistence.jdbc.password	%XAAUDIT.DB.PASSWORD% mod create-if-not-exists
+xasecure.audit.repository.name	%REPOSITORY_NAME% mod create-if-not-exists

http://git-wip-us.apache.org/repos/asf/incubator-argus/blob/7defc061/knox-agent/conf/xasecure-audit.xml
----------------------------------------------------------------------
diff --git a/knox-agent/conf/xasecure-audit.xml b/knox-agent/conf/xasecure-audit.xml
new file mode 100644
index 0000000..996d06f
--- /dev/null
+++ b/knox-agent/conf/xasecure-audit.xml
@@ -0,0 +1,85 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<configuration xmlns:xi="http://www.w3.org/2001/XInclude">
+
+	<property>
+		<name>xasecure.audit.provider.factory</name>
+		<value>com.xasecure.audit.provider.AuditProviderFactory</value>
+	</property>
+
+	<!--  Properties whose name begin with "xasecure.audit." are used to configure JPA -->
+	<property>
+		<name>xasecure.audit.jpa.javax.persistence.jdbc.url</name>
+		<value>jdbc:mysql://localhost:3306/xa_db</value>
+	</property>
+
+	<property>
+		<name>xasecure.audit.jpa.javax.persistence.jdbc.user</name>
+		<value>xaaudit</value>
+	</property>
+
+	<property>
+		<name>xasecure.audit.jpa.javax.persistence.jdbc.password</name>
+		<value>xaaudit</value>
+	</property>
+
+	<property>
+		<name>xasecure.audit.jpa.javax.persistence.jdbc.driver</name>
+		<value>com.mysql.jdbc.Driver</value>
+	</property>
+
+	<property>
+		<name>xasecure.audit.repository.name</name>
+		<value>knoxdev</value>
+	</property>	
+	
+	<property>
+		<name>xasecure.audit.is.enabled</name>
+		<value>true</value>
+	</property>	
+
+	<property>
+		<name>xasecure.audit.log4j.is.enabled</name>
+		<value>false</value>
+	</property>	
+
+	<property>
+		<name>xasecure.audit.log4j.is.async</name>
+		<value>false</value>
+	</property>	
+	
+	<property>
+		<name>xasecure.audit.log4j.async.max.queue.size</name>
+		<value>10240</value>
+	</property>	
+
+	<property>
+		<name>xasecure.audit.log4j.async.max.flush.interval.ms</name>
+		<value>30000</value>
+	</property>	
+	
+	<property>
+		<name>xasecure.audit.db.is.enabled</name>
+		<value>true</value>
+	</property>	
+	
+	<property>
+		<name>xasecure.audit.db.is.async</name>
+		<value>true</value>
+	</property>	
+	
+	<property>
+		<name>xasecure.audit.db.async.max.queue.size</name>
+		<value>10240</value>
+	</property>	
+
+	<property>
+		<name>xasecure.audit.db.async.max.flush.interval.ms</name>
+		<value>30000</value>
+	</property>	
+
+	<property>
+		<name>xasecure.audit.db.batch.size</name>
+		<value>100</value>
+	</property>	
+</configuration>

http://git-wip-us.apache.org/repos/asf/incubator-argus/blob/7defc061/knox-agent/conf/xasecure-knox-security-changes.cfg
----------------------------------------------------------------------
diff --git a/knox-agent/conf/xasecure-knox-security-changes.cfg b/knox-agent/conf/xasecure-knox-security-changes.cfg
new file mode 100644
index 0000000..e7dd10b
--- /dev/null
+++ b/knox-agent/conf/xasecure-knox-security-changes.cfg
@@ -0,0 +1,9 @@
+#
+# Change the original policy parameter to work with policy manager based.
+# 
+#
+knox.authorization.verifier.classname					com.xasecure.pdp.knox.XASecureAuthorizer							mod	create-if-not-exists
+xasecure.knox.policymgr.url							%POLICY_MGR_URL%/service/assets/policyList/%REPOSITORY_NAME% 	    mod create-if-not-exists
+xasecure.knox.policymgr.url.saveAsFile				   	/tmp/knox%REPOSITORY_NAME%_json  									mod create-if-not-exists
+xasecure.knox.policymgr.url.reloadIntervalInMillis 	30000 																mod create-if-not-exists
+xasecure.knox.policymgr.ssl.config						/etc/knox/conf/xasecure-policymgr-ssl.xml							mod create-if-not-exists

http://git-wip-us.apache.org/repos/asf/incubator-argus/blob/7defc061/knox-agent/conf/xasecure-knox-security.xml
----------------------------------------------------------------------
diff --git a/knox-agent/conf/xasecure-knox-security.xml b/knox-agent/conf/xasecure-knox-security.xml
new file mode 100644
index 0000000..7d3536d
--- /dev/null
+++ b/knox-agent/conf/xasecure-knox-security.xml
@@ -0,0 +1,53 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<configuration xmlns:xi="http://www.w3.org/2001/XInclude">
+
+
+	<!--  The following property is used to select appropriate XASecure Authorizer Module (filebased, policymanager based) -->
+	<property>
+		<name>knox.authorization.verifier.classname</name>
+		<value>com.xasecure.pdp.knox.XASecureAuthorizer</value>
+		<description>
+			Class Name of the authorization Module 
+		</description>
+	</property>
+
+
+	<!-- The following properties are used only when PolicyManager is used as 
+		main storage for all policy -->
+	<property>
+		<name>xasecure.knox.policymgr.url</name>
+		<value>http://policymanagerhost:port/service/assets/dev-knox</value>
+		<description>
+			Location where XASecure Role Based Authorization Info is
+			located.
+		</description>
+	</property>
+	<property>
+		<name>xasecure.knox.policymgr.url.saveAsFile</name>
+		<value>/tmp/xasecure-knox-policy.json</value>
+		<description>
+			Location where XASecure Role Based Authorization Info is
+			saved after successful retrieval from policymanager
+		</description>
+	</property>
+	<property>
+		<name>xasecure.knox.policymgr.url.laststoredfile</name>
+		<value>/home/knox/last_xasecure-knox-policy.json</value>
+		<description>
+			Location and file where last XASecure Role Based Authorization Info
+		    is saved after successful retrieval from policymanager.
+		</description>
+	</property>
+	<property>
+		<name>xasecure.knox.policymgr.url.reloadIntervalInMillis</name>
+		<value>30000</value>
+		<description>
+			How often do we need to verify the changes tothe
+			authorization url,
+			to reload to memory (reloaded only if there are
+			changes)
+		</description>
+	</property>
+
+</configuration>

http://git-wip-us.apache.org/repos/asf/incubator-argus/blob/7defc061/knox-agent/conf/xasecure-policymgr-ssl-changes.cfg
----------------------------------------------------------------------
diff --git a/knox-agent/conf/xasecure-policymgr-ssl-changes.cfg b/knox-agent/conf/xasecure-policymgr-ssl-changes.cfg
new file mode 100644
index 0000000..99900d7
--- /dev/null
+++ b/knox-agent/conf/xasecure-policymgr-ssl-changes.cfg
@@ -0,0 +1,7 @@
+#
+# SSL Params
+#
+xasecure.policymgr.clientssl.keystore				%SSL_KEYSTORE_FILE_PATH%												mod create-if-not-exists
+xasecure.policymgr.clientssl.keystore.password		%SSL_KEYSTORE_PASSWORD%													mod create-if-not-exists
+xasecure.policymgr.clientssl.truststore				%SSL_TRUSTSTORE_FILE_PATH%												mod create-if-not-exists
+xasecure.policymgr.clientssl.truststore.password	%SSL_TRUSTSTORE_PASSWORD%												mod create-if-not-exists
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-argus/blob/7defc061/knox-agent/conf/xasecure-policymgr-ssl.xml
----------------------------------------------------------------------
diff --git a/knox-agent/conf/xasecure-policymgr-ssl.xml b/knox-agent/conf/xasecure-policymgr-ssl.xml
new file mode 100644
index 0000000..60f5387
--- /dev/null
+++ b/knox-agent/conf/xasecure-policymgr-ssl.xml
@@ -0,0 +1,33 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<configuration xmlns:xi="http://www.w3.org/2001/XInclude">
+	<!--  The following properties are used for 2-way SSL client server validation -->
+	<property>
+		<name>xasecure.policymgr.clientssl.keystore</name>
+		<value>knoxdev-clientcert.jks</value>
+		<description> 
+			Java Keystore files 
+		</description>
+	</property>
+	<property>
+		<name>xasecure.policymgr.clientssl.keystore.password</name>
+		<value>xasecure</value>
+		<description> 
+			password for keystore 
+		</description>
+	</property>
+	<property>
+		<name>xasecure.policymgr.clientssl.truststore</name>
+		<value>cacerts-xasecure.jks</value>
+		<description> 
+			java truststore file
+		</description>
+	</property>
+		<property>
+		<name>xasecure.policymgr.clientssl.truststore.password</name>
+		<value>changeit</value>
+		<description> 
+			java  truststore password
+		</description>
+	</property>
+</configuration>

http://git-wip-us.apache.org/repos/asf/incubator-argus/blob/7defc061/knox-agent/pom.xml
----------------------------------------------------------------------
diff --git a/knox-agent/pom.xml b/knox-agent/pom.xml
new file mode 100644
index 0000000..c8ee2ad
--- /dev/null
+++ b/knox-agent/pom.xml
@@ -0,0 +1,48 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <groupId>security_agents.knox-agent</groupId>
+  <artifactId>knox-agent</artifactId>
+  <name>Knox Security Agent</name>
+  <description>Knox Security Agents</description>
+  <packaging>jar</packaging>
+  <properties>
+    <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+  </properties>
+  <parent>
+     <groupId>com.hortonworks.hadoop.security</groupId>
+     <artifactId>argus</artifactId>
+     <version>3.5.000</version>
+     <relativePath>..</relativePath>
+  </parent>
+  <dependencies>
+    <dependency>
+	<groupId>org.apache.hadoop.gateway</groupId>
+	<artifactId>gateway-site</artifactId>
+	<version>${knox.gateway.version}</version>
+        <scope>system</scope>
+        <systemPath>${local.lib.dir}/gateway-spi-0.5.0-SNAPSHOT.jar</systemPath>
+    </dependency>
+    <dependency>
+	<groupId>org.apache.hadoop.gateway</groupId>
+	<artifactId>gateway-site-util</artifactId>
+	<version>${knox.gateway.version}</version>
+        <scope>system</scope>
+        <systemPath>${local.lib.dir}/gateway-util-urltemplate-0.5.0-SNAPSHOT.jar</systemPath>
+    </dependency>
+    <dependency>
+	<groupId>javax.servlet</groupId>
+	<artifactId>javax.servlet-api</artifactId>
+	<version>${javax.servlet.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>security_agents.agents-common</groupId>
+      <artifactId>agents-common</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>security_agents.agents-audit</groupId>
+      <artifactId>agents-audit</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+  </dependencies>
+</project>

http://git-wip-us.apache.org/repos/asf/incubator-argus/blob/7defc061/knox-agent/scripts/install.properties
----------------------------------------------------------------------
diff --git a/knox-agent/scripts/install.properties b/knox-agent/scripts/install.properties
new file mode 100644
index 0000000..548cab9
--- /dev/null
+++ b/knox-agent/scripts/install.properties
@@ -0,0 +1,63 @@
+#
+# Location of Policy Manager URL  
+#
+#
+# Example:
+# POLICY_MGR_URL=http://policymanager.xasecure.net:6080
+#
+
+POLICY_MGR_URL=http://localhost:6080
+
+#
+# Location of mysql client library (please check the location of the jar file)
+#
+MYSQL_CONNECTOR_JAR=/usr/share/java/mysql-connector-java.jar
+
+#
+# This is the repository name created within policy manager
+#
+# Example:
+# REPOSITORY_NAME=knoxdev
+#
+
+REPOSITORY_NAME=knoxdev
+
+# KNOX_HOME directory, would contain conf/, ext/ subdirectories
+KNOX_HOME=/usr/lib/knox
+
+#
+# AUDIT DB Configuration
+# 
+#  This information should match with the one you specified during the PolicyManager Installation
+# 
+# Example:
+# XAAUDIT.DB.HOSTNAME=localhost
+# XAAUDIT.DB.DATABASE_NAME=xasecure
+# XAAUDIT.DB.USER_NAME=xalogger
+# XAAUDIT.DB.PASSWORD=
+#
+#
+
+XAAUDIT.DB.HOSTNAME=localhost
+XAAUDIT.DB.DATABASE_NAME=xasecure
+XAAUDIT.DB.USER_NAME=xalogger
+XAAUDIT.DB.PASSWORD=xalogger
+
+
+#
+# SSL Client Certificate Information
+#
+# Example:
+# SSL_KEYSTORE_FILE_PATH=/etc/xasecure/conf/xasecure-hadoop-client.jks
+# SSL_KEYSTORE_PASSWORD=clientdb01
+# SSL_TRUSTSTORE_FILE_PATH=/etc/xasecure/conf/xasecure-truststore.jks
+# SSL_TRUSTSTORE_PASSWORD=changeit
+
+#
+# You do not need use SSL between agent and security admin tool, please leave these sample value as it is.
+#
+
+SSL_KEYSTORE_FILE_PATH=agentKey.jks
+SSL_KEYSTORE_PASSWORD=myKeyFilePassword
+SSL_TRUSTSTORE_FILE_PATH=cacert
+SSL_TRUSTSTORE_PASSWORD=changeit

http://git-wip-us.apache.org/repos/asf/incubator-argus/blob/7defc061/knox-agent/scripts/install.sh
----------------------------------------------------------------------
diff --git a/knox-agent/scripts/install.sh b/knox-agent/scripts/install.sh
new file mode 100644
index 0000000..753e71d
--- /dev/null
+++ b/knox-agent/scripts/install.sh
@@ -0,0 +1,187 @@
+#!/bin/bash
+
+# TODO: change <name>AclsAuthz<name> to <name>XASecurePDPKnox</name> for provider <role>authorization<role>
+
+MY_ID=`id -u`
+
+if [ "${MY_ID}" -ne 0 ]
+then
+  echo "ERROR: You must run the installation as root user."
+  exit 1
+fi
+
+install_dir=`dirname $0`
+
+[ "${install_dir}" = "." ] && install_dir=`pwd`
+
+#echo "Current Install Directory: [${install_dir}]"
+
+#verify mysql-connector path is valid
+MYSQL_CONNECTOR_JAR=`grep '^MYSQL_CONNECTOR_JAR'  ${install_dir}/install.properties | awk -F= '{ print $2 }'`
+echo "[I] Checking MYSQL CONNECTOR FILE : $MYSQL_CONNECTOR_JAR" 
+if test -f "$MYSQL_CONNECTOR_JAR"; then
+	echo "[I] MYSQL CONNECTOR FILE : $MYSQL_CONNECTOR_JAR file found" 
+else
+	echo "[E] MYSQL CONNECTOR FILE : $MYSQL_CONNECTOR_JAR not found, aborting installation"
+  exit 1
+fi
+
+KNOX_HOME=`grep 'KNOX_HOME'  ${install_dir}/install.properties | awk -F= '{ print $2 }'`
+if [ "${KNOX_HOME}" == "" ]
+then
+  echo "ERROR: KNOX_HOME property not defined, aborting installation"
+  exit 1
+fi
+
+if [ ! -d ${KNOX_HOME} ]
+then
+  echo "ERROR: directory ${KNOX_HOME} does not exist"
+  exit 1
+fi
+
+KNOX_EXT=${KNOX_HOME}/ext
+if [ ! -d ${KNOX_EXT} ]
+then
+  echo "ERROR: Knox ext directory ${KNOX_EXT} does not exist"
+  exit 1
+fi
+
+KNOX_CONF=${KNOX_HOME}/conf
+if [ ! -d ${KNOX_CONF} ]
+then
+  echo "ERROR: Knox conf directory ${KNOX_CONF} does not exist"
+  exit 1
+fi
+
+# copy lib, dist jar files in to KNOX_EXT
+echo "Copying knox agent lib, dist jars to ${KNOX_EXT}"
+cp lib/*.jar ${KNOX_EXT}
+cp dist/*.jar ${KNOX_EXT}
+
+# copy mysql connector jar  in to KNOX_EXT
+echo "Copying mysql connector jar to ${KNOX_EXT}"
+cp ${MYSQL_CONNECTOR_JAR} ${KNOX_EXT}
+
+CONFIG_FILE_OWNER="knox:hadoop"
+
+# --- Backup current configuration for backup - START
+
+COMPONENT_NAME=knox
+
+XASECURE_VERSION=`cat ${install_dir}/version`
+
+CFG_DIR=${KNOX_CONF}
+XASECURE_ROOT=/etc/xasecure/${COMPONENT_NAME}
+BACKUP_TYPE=pre
+CUR_VERSION_FILE=${XASECURE_ROOT}/.current_version
+CUR_CFG_DIR_FILE=${XASECURE_ROOT}/.config_dir
+PRE_INSTALL_CONFIG=${XASECURE_ROOT}/${BACKUP_TYPE}-${XASECURE_VERSION}
+
+backup_dt=`date '+%Y%m%d%H%M%S'`
+
+if [ -d "${PRE_INSTALL_CONFIG}" ]
+then
+	PRE_INSTALL_CONFIG="${PRE_INSTALL_CONFIG}.${backup_dt}"
+fi
+
+# back up prior config back up
+if [ -d ${CFG_DIR} ]
+then
+	( cd ${CFG_DIR} ; find . -print | cpio -pdm ${PRE_INSTALL_CONFIG} )
+	[ -f ${CUR_VERSION_FILE} ] && mv ${CUR_VERSION_FILE} ${CUR_VERSION_FILE}-${backup_dt}
+	echo ${XASECURE_VERSION} > ${CUR_VERSION_FILE}
+	echo ${CFG_DIR} > ${CUR_CFG_DIR_FILE}
+else
+	echo "ERROR: Unable to find configuration directory: [${CFG_DIR}]"
+	exit 1
+fi
+
+cp -f ${install_dir}/uninstall.sh ${XASECURE_ROOT}/
+
+# --- Backup current configuration for backup  - END
+
+
+
+dt=`date '+%Y%m%d%H%M%S'`
+for f in ${install_dir}/conf/*
+do
+	if [ -f ${f} ]
+	then
+		fn=`basename $f`
+		if [ ! -f ${KNOX_CONF}/${fn} ]
+		then
+			echo "+cp ${f} ${KNOX_CONF}/${fn}"
+			cp ${f} ${KNOX_CONF}/${fn}
+		else
+			echo "WARN: ${fn} already exists in the ${KNOX_CONF} - Using existing configuration ${fn}"
+		fi
+	fi
+done
+
+
+# create new config files based on *-changes.cfg files
+
+PROP_ARGS="-p  ${install_dir}/install.properties"
+
+for f in ${install_dir}/installer/conf/*-changes.cfg
+do
+	if [ -f ${f} ]
+	then
+		fn=`basename $f`
+		orgfn=`echo $fn | sed -e 's:-changes.cfg:.xml:'`
+		fullpathorgfn="${KNOX_CONF}/${orgfn}"
+		if [ ! -f ${fullpathorgfn} ]
+		then
+			echo "ERROR: Unable to find ${fullpathorgfn}"
+			exit 1
+		fi
+		archivefn="${NOX_CONF}/.${orgfn}.${dt}"
+		newfn="${NOX_CONF}/.${orgfn}-new.${dt}"
+		cp ${fullpathorgfn} ${archivefn}
+		if [ $? -eq 0 ]
+		then
+			cp="${install_dir}/installer/lib/*:/usr/lib/hadoop/*:/usr/lib/hadoop/lib/*"
+			java -cp "${cp}" com.xasecure.utils.install.XmlConfigChanger -i ${archivefn} -o ${newfn} -c ${f} ${PROP_ARGS}
+			if [ $? -eq 0 ]
+			then
+				diff -w ${newfn} ${fullpathorgfn} > /dev/null 2>&1
+				if [ $? -ne 0 ]
+				then
+					#echo "Changing config file:  ${fullpathorgfn} with following changes:"
+					#echo "==============================================================="
+					#diff -w ${newfn} ${fullpathorgfn}
+					#echo "==============================================================="
+					echo "NOTE: Current config file: ${fullpathorgfn} is being saved as ${archivefn}"
+					#echo "==============================================================="
+					cp ${newfn} ${fullpathorgfn}
+				fi
+			else
+				echo "ERROR: Unable to make changes to config. file: ${fullpathorgfn}"
+				echo "exiting ...."
+				exit 1
+			fi
+			else
+			echo "ERROR: Unable to save config. file: ${fullpathorgfn}  to ${archivefn}"
+			echo "exiting ...."
+			exit 1
+		fi
+	fi
+done
+
+chmod go-rwx ${KNOX_CONF}/xasecure-policymgr-ssl.xml
+chown ${CONFIG_FILE_OWNER} ${KNOX_CONF}/xasecure-policymgr-ssl.xml
+
+# update topology files - replace <name>AclsAuthz</name> with <name>XASecurePDPKnox</name>
+# ${PRE_INSTALL_CONFIG}/topologies/topologies/*.xml
+for fn in `ls ${PRE_INSTALL_CONFIG}/topologies/*.xml`
+do
+  tn=`basename ${fn}`
+  echo "Updating topology file ${KNOX_CONF}/topologies/${tn}"
+  cat $fn | sed -e 's-<name>AclsAuthz</name>-<name>XASecurePDPKnox</name>-' > ${KNOX_CONF}/topologies/$tn
+done
+
+echo "Restarting Knox"
+su -l knox ${KNOX_HOME}/bin/gateway.sh stop
+su -l knox ${KNOX_HOME}/bin/gateway.sh start
+
+exit 0

http://git-wip-us.apache.org/repos/asf/incubator-argus/blob/7defc061/knox-agent/scripts/uninstall.sh
----------------------------------------------------------------------
diff --git a/knox-agent/scripts/uninstall.sh b/knox-agent/scripts/uninstall.sh
new file mode 100644
index 0000000..9a5c6bc
--- /dev/null
+++ b/knox-agent/scripts/uninstall.sh
@@ -0,0 +1,54 @@
+#!/bin/bash
+COMPONENT_NAME=knox
+CFG_DIR=/etc/${COMPONENT_NAME}/conf
+XASECURE_ROOT=/etc/xasecure/${COMPONENT_NAME}
+BACKUP_TYPE=pre
+CUR_VERSION_FILE=${XASECURE_ROOT}/.current_version
+CUR_CFG_DIR_FILE=${XASECURE_ROOT}/.config_dir
+if [ -f ${CUR_VERSION_FILE} ]
+then
+	XASECURE_VERSION=`cat ${CUR_VERSION_FILE}`
+	PRE_INSTALL_CONFIG=${XASECURE_ROOT}/${BACKUP_TYPE}-${XASECURE_VERSION}
+	dt=`date '+%Y%m%d%H%M%S'`
+	if [ -d "${PRE_INSTALL_CONFIG}" ]
+	then
+		if [ -f ${CUR_CFG_DIR_FILE} ] 
+		then
+			CFG_DIR=`cat ${CUR_CFG_DIR_FILE}`
+		fi 
+		[ -d ${CFG_DIR} ] && mv ${CFG_DIR} ${CFG_DIR}-${dt}
+		( cd ${PRE_INSTALL_CONFIG} ; find . -print | cpio -pdm ${CFG_DIR} )
+		[ -f ${CUR_VERSION_FILE} ] && mv ${CUR_VERSION_FILE} ${CUR_VERSION_FILE}-uninstalled-${dt}
+		echo "XASecure version - ${XASECURE_VERSION} has been uninstalled successfully."
+	else
+		echo "ERROR: Unable to find pre-install configuration directory: [${PRE_INSTALL_CONFIG}]"
+		exit 1
+	fi
+else
+	cd ${CFG_DIR}
+	saved_files=`find . -type f -name '.*' |  sort | grep -v -- '-new.' | grep '[0-9]*$' | grep -v -- '-[0-9]*$' | sed -e 's:\.[0-9]*$::' | sed -e 's:^./::' | sort -u`
+	dt=`date '+%Y%m%d%H%M%S'`
+	if [ "${saved_files}" != "" ]
+	then
+	        for f in ${saved_files}
+	        do
+	                oldf=`ls ${f}.[0-9]* | sort | head -1`
+	                if [ -f "${oldf}" ]
+	                then
+	                        nf=`echo ${f} | sed -e 's:^\.::'`
+	                        if [ -f "${nf}" ]
+	                        then
+	                                echo "+cp -p ${nf} .${nf}-${dt}"
+	                                cp -p ${nf} .${nf}-${dt}
+	                                echo "+cp ${oldf} ${nf}"
+	                                cp ${oldf} ${nf}
+	                        else
+	                                echo "ERROR: ${nf} not found to save. However, old file is being recovered."
+	                                echo "+cp -p ${oldf} ${nf}"
+	                                cp -p ${oldf} ${nf}
+	                        fi
+	                fi
+	        done
+	        echo "XASecure configuration has been uninstalled successfully."
+	fi
+fi

http://git-wip-us.apache.org/repos/asf/incubator-argus/blob/7defc061/knox-agent/src/main/java/com/xasecure/authorization/knox/KnoxAccessVerifier.java
----------------------------------------------------------------------
diff --git a/knox-agent/src/main/java/com/xasecure/authorization/knox/KnoxAccessVerifier.java b/knox-agent/src/main/java/com/xasecure/authorization/knox/KnoxAccessVerifier.java
new file mode 100644
index 0000000..91f2b90
--- /dev/null
+++ b/knox-agent/src/main/java/com/xasecure/authorization/knox/KnoxAccessVerifier.java
@@ -0,0 +1,30 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.xasecure.authorization.knox;
+
+import java.util.Set;
+
+public interface KnoxAccessVerifier {
+
+	
+	public boolean isAccessAllowed(String topologyName, String serviceName, String accessTypes, String userName, Set<String> groups, String requestIp) ;
+
+	public boolean isAuditEnabled(String topologyName, String serviceName) ;
+	
+}

http://git-wip-us.apache.org/repos/asf/incubator-argus/blob/7defc061/knox-agent/src/main/java/com/xasecure/authorization/knox/KnoxAccessVerifierFactory.java
----------------------------------------------------------------------
diff --git a/knox-agent/src/main/java/com/xasecure/authorization/knox/KnoxAccessVerifierFactory.java b/knox-agent/src/main/java/com/xasecure/authorization/knox/KnoxAccessVerifierFactory.java
new file mode 100644
index 0000000..112d94b
--- /dev/null
+++ b/knox-agent/src/main/java/com/xasecure/authorization/knox/KnoxAccessVerifierFactory.java
@@ -0,0 +1,66 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.xasecure.authorization.knox;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+
+import com.xasecure.authorization.hadoop.config.XaSecureConfiguration;
+import com.xasecure.authorization.hadoop.constants.XaSecureHadoopConstants;
+
+public class KnoxAccessVerifierFactory {
+
+	private static final Log LOG = LogFactory.getLog(KnoxAccessVerifierFactory.class) ;
+
+	private static KnoxAccessVerifier knoxAccessVerififer = null ;
+	
+	public static KnoxAccessVerifier getInstance() {
+		if (knoxAccessVerififer == null) {
+			synchronized(KnoxAccessVerifierFactory.class) {
+				KnoxAccessVerifier temp = knoxAccessVerififer ;
+				if (temp == null) {
+					String knoxAccessVerifierClassName = XaSecureConfiguration.getInstance().get(XaSecureHadoopConstants.KNOX_ACCESS_VERIFIER_CLASS_NAME_PROP, XaSecureHadoopConstants.KNOX_ACCESS_VERIFIER_CLASS_NAME_DEFAULT_VALUE ) ;
+
+					if (knoxAccessVerifierClassName != null) {
+						LOG.info("Knox Access Verification class [" + knoxAccessVerifierClassName + "] - Being build");
+						try {
+							knoxAccessVerififer = (KnoxAccessVerifier) (Class.forName(knoxAccessVerifierClassName).newInstance()) ;
+							LOG.info("Created a new instance of class: [" + knoxAccessVerifierClassName + "] for Knox Access verification.");
+						} catch (InstantiationException e) {
+							LOG.error("Unable to create KnoxAccess Verifier: [" +  knoxAccessVerifierClassName + "]", e);
+						} catch (IllegalAccessException e) {
+							LOG.error("Unable to create KnoxAccess Verifier: [" +  knoxAccessVerifierClassName + "]", e);
+						} catch (ClassNotFoundException e) {
+							LOG.error("Unable to create KnoxAccess Verifier: [" +  knoxAccessVerifierClassName + "]", e);
+						} catch (Throwable t) {
+							LOG.error("Unable to create KnoxAccess Verifier: [" +  knoxAccessVerifierClassName + "]", t);
+						}
+						finally {
+							LOG.info("Created a new instance of class: [" + knoxAccessVerifierClassName + "] for Knox Access verification. (" + knoxAccessVerififer + ")");
+						}
+					}
+				}
+				else {
+					LOG.error("Unable to obtain knoxAccessVerifier [" +  XaSecureHadoopConstants.KNOX_ACCESS_VERIFIER_CLASS_NAME_PROP + "]");
+				}
+			}
+		}
+		return knoxAccessVerififer ;
+	}
+}

http://git-wip-us.apache.org/repos/asf/incubator-argus/blob/7defc061/lookup-client/pom.xml
----------------------------------------------------------------------
diff --git a/lookup-client/pom.xml b/lookup-client/pom.xml
new file mode 100644
index 0000000..8fe53d1
--- /dev/null
+++ b/lookup-client/pom.xml
@@ -0,0 +1,47 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <groupId>com.hortonworks.hadoop.security</groupId>
+  <artifactId>lookup-client</artifactId>
+  <version>3.5.000</version>
+  <name>Resource Lookup API Implementation</name>
+  <description>Resource Lookup API Implementation</description>
+  <packaging>jar</packaging>
+  <parent>
+     <groupId>com.hortonworks.hadoop.security</groupId>
+     <artifactId>argus</artifactId>
+     <version>3.5.000</version>
+     <relativePath>..</relativePath>
+  </parent>
+  <dependencies>
+    <dependency>
+	<groupId>commons-logging</groupId>
+	<artifactId>commons-logging</artifactId>
+	<version>${commons.logging.version}</version>
+    </dependency>
+    <dependency>
+        <groupId>org.apache.hadoop</groupId>
+        <artifactId>hadoop-common</artifactId>
+        <version>${hadoop.version}</version>
+    </dependency>
+    <dependency>
+        <groupId>org.apache.hadoop</groupId>
+        <artifactId>hadoop-hdfs</artifactId>
+        <version>${hadoop.version}</version>
+    </dependency>
+    <dependency>
+        <groupId>org.apache.hbase</groupId>
+        <artifactId>hbase-server</artifactId>
+        <version>${hbase.version}</version>
+    </dependency>
+    <dependency>
+        <groupId>org.apache.hive</groupId>
+        <artifactId>hive-common</artifactId>
+        <version>${hive.version}</version>
+    </dependency>
+    <dependency>
+        <groupId>org.apache.hive</groupId>
+        <artifactId>hive-service</artifactId>
+        <version>${hive.version}</version>
+    </dependency>
+  </dependencies>
+</project>

http://git-wip-us.apache.org/repos/asf/incubator-argus/blob/7defc061/lookup-client/src/main/java/com/xasecure/hadoop/client/HadoopFS.java
----------------------------------------------------------------------
diff --git a/lookup-client/src/main/java/com/xasecure/hadoop/client/HadoopFS.java b/lookup-client/src/main/java/com/xasecure/hadoop/client/HadoopFS.java
new file mode 100644
index 0000000..239a85f
--- /dev/null
+++ b/lookup-client/src/main/java/com/xasecure/hadoop/client/HadoopFS.java
@@ -0,0 +1,107 @@
+package com.xasecure.hadoop.client;
+
+import java.io.IOException;
+import java.security.PrivilegedAction;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+
+import javax.security.auth.Subject;
+
+import org.apache.commons.io.FilenameUtils;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+
+import com.xasecure.hadoop.client.config.BaseClient;
+import com.xasecure.hadoop.client.exceptions.HadoopException;
+
+public class HadoopFS extends BaseClient {
+	
+	public HadoopFS(String dataSource) {
+		super(dataSource) ;
+	}
+	
+	public HadoopFS(String dataSource, HashMap<String,String> connectionProperties) {
+		super(dataSource,connectionProperties) ;
+	}
+	
+	
+	private List<String> listFilesInternal(String baseDir, String fileMatching) {
+		List<String> fileList = new ArrayList<String>() ;
+		ClassLoader prevCl = Thread.currentThread().getContextClassLoader() ;
+		try {
+			Thread.currentThread().setContextClassLoader(getConfigHolder().getClassLoader());
+			String dirPrefix = (baseDir.endsWith("/") ? baseDir : (baseDir + "/")) ;
+			String filterRegEx = null;
+			if (fileMatching != null && fileMatching.trim().length() > 0) {
+				filterRegEx = fileMatching.trim() ;
+			}
+			Configuration conf = new Configuration() ;
+			FileSystem fs = null ;
+			try {
+				fs = FileSystem.get(conf) ;
+				FileStatus[] fileStats = fs.listStatus(new Path(baseDir)) ;
+				if (fileStats != null) {
+					for(FileStatus stat : fileStats) {
+						Path path = stat.getPath() ;
+						String pathComponent = path.getName() ;
+						if (filterRegEx == null) {
+							fileList.add(dirPrefix + pathComponent) ;
+						}
+						else if (FilenameUtils.wildcardMatch(pathComponent, fileMatching)) {
+							fileList.add(dirPrefix + pathComponent) ;
+						}
+					}
+				}
+			}
+			finally {
+			}
+		}
+		catch(IOException ioe) {
+			throw new HadoopException("Unable to get listing of files for directory [" + baseDir + "] from Hadoop environment [" + getDataSource() + "]", ioe) ;
+		}
+		finally {
+			Thread.currentThread().setContextClassLoader(prevCl);
+		}
+		return fileList ;
+	}
+
+	
+	public List<String> listFiles(final String baseDir, final String fileMatching) {
+		PrivilegedAction<List<String>> action = new PrivilegedAction<List<String>>() {
+			@Override
+			public List<String> run() {
+				return listFilesInternal(baseDir, fileMatching) ;
+			}
+			
+		};
+		return Subject.doAs(getLoginSubject(),action) ;
+	}
+	
+	
+	public static final void main(String[] args) {
+		
+		if (args.length < 2) {
+			System.err.println("USAGE: java " + HadoopFS.class.getName() + " repositoryName  basedirectory  [filenameToMatch]") ;
+			System.exit(1) ;
+		}
+		
+		String repositoryName = args[0] ;
+		String baseDir = args[1] ;
+		String fileNameToMatch = (args.length == 2 ? null : args[2]) ;
+		
+		HadoopFS fs = new HadoopFS(repositoryName) ;
+		List<String> fsList = fs.listFiles(baseDir, fileNameToMatch) ;
+		if (fsList != null && fsList.size() > 0) {
+			for(String s : fsList) {
+				System.out.println(s) ;
+			}
+		}
+		else {
+			System.err.println("Unable to get file listing for [" + baseDir + (baseDir.endsWith("/") ? "" : "/") + fileNameToMatch + "]  in repository [" + repositoryName + "]") ;
+		}
+	}
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-argus/blob/7defc061/lookup-client/src/main/java/com/xasecure/hadoop/client/HadoopFSTester.java
----------------------------------------------------------------------
diff --git a/lookup-client/src/main/java/com/xasecure/hadoop/client/HadoopFSTester.java b/lookup-client/src/main/java/com/xasecure/hadoop/client/HadoopFSTester.java
new file mode 100644
index 0000000..2a91124
--- /dev/null
+++ b/lookup-client/src/main/java/com/xasecure/hadoop/client/HadoopFSTester.java
@@ -0,0 +1,42 @@
+package com.xasecure.hadoop.client;
+
+import java.util.HashMap;
+import java.util.List;
+import java.util.Properties;
+
+public class HadoopFSTester {
+
+	public static void main(String[] args) throws Throwable {
+		if (args.length < 3) {
+			System.err.println("USAGE: java " + HadoopFS.class.getName() + " repositoryName propertyFile basedirectory  [filenameToMatch]") ;
+			System.exit(1) ;
+		}
+		
+		String repositoryName = args[0] ;
+		String propFile = args[1] ;
+		String baseDir = args[2] ;
+		String fileNameToMatch = (args.length == 3 ? null : args[3]) ;
+
+		Properties conf = new Properties() ;
+		conf.load(HadoopFSTester.class.getClassLoader().getResourceAsStream(propFile));
+		
+		HashMap<String,String> prop = new HashMap<String,String>() ;
+		for(Object key : conf.keySet()) {
+			Object val = conf.get(key) ;
+			prop.put((String)key, (String)val) ;
+		}
+		
+		HadoopFS fs = new HadoopFS(repositoryName, prop) ;
+		List<String> fsList = fs.listFiles(baseDir, fileNameToMatch) ;
+		if (fsList != null && fsList.size() > 0) {
+			for(String s : fsList) {
+				System.out.println(s) ;
+			}
+		}
+		else {
+			System.err.println("Unable to get file listing for [" + baseDir + (baseDir.endsWith("/") ? "" : "/") + fileNameToMatch + "]  in repository [" + repositoryName + "]") ;
+		}
+
+	}
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-argus/blob/7defc061/lookup-client/src/main/java/com/xasecure/hadoop/client/config/BaseClient.java
----------------------------------------------------------------------
diff --git a/lookup-client/src/main/java/com/xasecure/hadoop/client/config/BaseClient.java b/lookup-client/src/main/java/com/xasecure/hadoop/client/config/BaseClient.java
new file mode 100644
index 0000000..3323298
--- /dev/null
+++ b/lookup-client/src/main/java/com/xasecure/hadoop/client/config/BaseClient.java
@@ -0,0 +1,94 @@
+package com.xasecure.hadoop.client.config;
+
+import java.io.IOException;
+import java.util.HashMap;
+
+import javax.security.auth.Subject;
+
+import org.apache.hadoop.security.SecureClientLogin;
+import org.apache.hadoop.security.UserGroupInformation;
+
+import com.xasecure.hadoop.client.exceptions.HadoopException;
+
+public abstract class BaseClient {
+	
+	private String dataSource ;
+	private Subject loginSubject ;
+	private HadoopConfigHolder configHolder;
+	
+	protected HashMap<String,String> connectionProperties ;
+	
+	public BaseClient(String dataSource) {
+		this.dataSource = dataSource ;
+		init() ;
+		login() ;
+	}
+	
+	public BaseClient(String dataSource, HashMap<String,String> connectionProperties) {
+		this.dataSource = dataSource ;
+		this.connectionProperties = connectionProperties ;
+		init() ;
+		login() ;
+	}
+	
+	
+	private void init() {
+		if (connectionProperties == null) {
+			configHolder = HadoopConfigHolder.getInstance(dataSource) ;
+		}
+		else {
+			configHolder = HadoopConfigHolder.getInstance(dataSource,connectionProperties) ;
+		}
+	}
+	
+	
+	protected void login() {
+		ClassLoader prevCl = Thread.currentThread().getContextClassLoader() ;
+		try {
+			Thread.currentThread().setContextClassLoader(configHolder.getClassLoader());
+			String userName = configHolder.getUserName() ;
+			if (userName == null) {
+				throw new HadoopException("Unable to find login username for hadoop environment, [" + dataSource + "]") ;
+			}
+			String keyTabFile = configHolder.getKeyTabFile() ;
+			if (keyTabFile != null) {
+				if ( UserGroupInformation.isSecurityEnabled() ) {
+					loginSubject = SecureClientLogin.loginUserFromKeytab(userName, keyTabFile) ;
+				}
+				else {
+					loginSubject = SecureClientLogin.login(userName) ;
+				}
+			}
+			else {
+				String password = configHolder.getPassword() ;
+				if ( UserGroupInformation.isSecurityEnabled() ) {
+					loginSubject = SecureClientLogin.loginUserWithPassword(userName, password) ;
+				}
+				else {
+					loginSubject = SecureClientLogin.login(userName) ;
+				}
+			}
+		}
+		catch(IOException ioe) {
+			throw new HadoopException("Unable to login to Hadoop environment [" + dataSource + "]", ioe) ;
+		}
+		finally {
+			Thread.currentThread().setContextClassLoader(prevCl);
+		}
+	}
+	
+	public String getDataSource() {
+		return dataSource ;
+	}
+
+	protected Subject getLoginSubject() {
+		return loginSubject;
+	}
+
+	protected HadoopConfigHolder getConfigHolder() {
+		return configHolder;
+	}
+	
+	
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-argus/blob/7defc061/lookup-client/src/main/java/com/xasecure/hadoop/client/config/HadoopClassLoader.java
----------------------------------------------------------------------
diff --git a/lookup-client/src/main/java/com/xasecure/hadoop/client/config/HadoopClassLoader.java b/lookup-client/src/main/java/com/xasecure/hadoop/client/config/HadoopClassLoader.java
new file mode 100644
index 0000000..76c75f9
--- /dev/null
+++ b/lookup-client/src/main/java/com/xasecure/hadoop/client/config/HadoopClassLoader.java
@@ -0,0 +1,85 @@
+package com.xasecure.hadoop.client.config;
+
+import java.io.File;
+import java.io.FileWriter;
+import java.io.IOException;
+import java.io.PrintWriter;
+import java.net.URL;
+import java.util.Properties;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+
+import com.xasecure.hadoop.client.exceptions.HadoopException;
+
+public class HadoopClassLoader extends ClassLoader {
+	
+	private static final Log LOG = LogFactory.getLog(HadoopClassLoader.class) ;
+	
+	private HadoopConfigHolder confHolder ;
+	
+	public HadoopClassLoader(HadoopConfigHolder confHolder) {
+		super(Thread.currentThread().getContextClassLoader()) ;
+		this.confHolder = confHolder;
+	}
+	
+	
+	@Override
+	protected URL findResource(String resourceName) {
+		LOG.debug("findResource(" + resourceName + ") is called.") ;
+		URL ret = null;
+	
+		if (confHolder.hasResourceExists(resourceName)) {
+			ret = buildResourceFile(resourceName) ;
+		}
+		else {
+			ret = super.findResource(resourceName);
+		}
+		LOG.debug("findResource(" + resourceName + ") is returning [" + ret + "]") ;
+		return ret ;
+	}
+	
+	
+	@SuppressWarnings("deprecation")
+	private URL buildResourceFile(String aResourceName) {
+		URL ret = null ;
+		String prefix = aResourceName ;
+		String suffix = ".txt" ;
+
+		Properties prop = confHolder.getProperties(aResourceName) ;
+		
+		if (prop != null && prop.size() > 0) {
+
+			if (aResourceName.contains(".")) {
+				int lastDotFound = aResourceName.indexOf(".") ; 
+				prefix = aResourceName.substring(0,lastDotFound) + "-" ;
+				suffix = aResourceName.substring(lastDotFound) ;
+			}
+			
+			try {
+				File tempFile = File.createTempFile(prefix, suffix) ;
+				tempFile.deleteOnExit();
+				PrintWriter out = new PrintWriter(new FileWriter(tempFile)) ;
+				out.println("<?xml version=\"1.0\"?>") ;
+				out.println("<?xml-stylesheet type=\"text/xsl\" href=\"configuration.xsl\"?>") ;
+				out.println("<configuration xmlns:xi=\"http://www.w3.org/2001/XInclude\">") ;
+				for(Object keyobj : prop.keySet()) {
+					String key = (String)keyobj;
+					String val = prop.getProperty(key) ;
+					out.println("<property><name>" + key.trim() + "</name><value>" + val + "</value></property>") ;
+				}
+				out.println("</configuration>") ;
+				out.close() ;
+				ret = tempFile.toURL() ;
+			} catch (IOException e) {
+				throw new HadoopException("Unable to load create hadoop configuration file [" + aResourceName + "]", e) ;
+			}
+			
+		}
+		
+		return ret ;
+
+	}
+	
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-argus/blob/7defc061/lookup-client/src/main/java/com/xasecure/hadoop/client/config/HadoopConfigHolder.java
----------------------------------------------------------------------
diff --git a/lookup-client/src/main/java/com/xasecure/hadoop/client/config/HadoopConfigHolder.java b/lookup-client/src/main/java/com/xasecure/hadoop/client/config/HadoopConfigHolder.java
new file mode 100644
index 0000000..6200217
--- /dev/null
+++ b/lookup-client/src/main/java/com/xasecure/hadoop/client/config/HadoopConfigHolder.java
@@ -0,0 +1,312 @@
+package com.xasecure.hadoop.client.config;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.HashMap;
+import java.util.Properties;
+
+import com.xasecure.hadoop.client.exceptions.HadoopException;
+
+
+public class HadoopConfigHolder  {
+		
+	public static final String GLOBAL_LOGIN_PARAM_PROP_FILE = "hadoop-login.properties" ;
+	public static final String DEFAULT_DATASOURCE_PARAM_PROP_FILE = "datasource.properties" ;
+	public static final String RESOURCEMAP_PROP_FILE = "resourcenamemap.properties" ;
+	public static final String DEFAULT_RESOURCE_NAME = "core-site.xml" ;
+	public static final String XASECURE_SECTION_NAME = "xalogin.xml" ;
+	public static final String XASECURE_LOGIN_USER_NAME_PROP = "username" ;
+	public static final String XASECURE_LOGIN_KEYTAB_FILE_PROP = "keytabfile" ;
+	public static final String XASECURE_LOGIN_PASSWORD = "password" ;
+
+	private static boolean initialized = false ;
+	private static HashMap<String,HashMap<String,Properties>> dataSource2ResourceListMap = new HashMap<String,HashMap<String,Properties>>() ;
+	private static Properties globalLoginProp = new Properties() ;
+	private static HashMap<String,HadoopConfigHolder> dataSource2HadoopConfigHolder = new HashMap<String,HadoopConfigHolder>() ;
+	private static Properties resourcemapProperties = null ;
+	
+	
+	private String datasourceName ;
+	private String userName ;
+	private String keyTabFile ;
+	private String password ;
+	private boolean isKerberosAuth ;
+	
+	private HadoopClassLoader classLoader ;
+	private HashMap<String,String>  connectionProperties; 
+	
+	public static HadoopConfigHolder getInstance(String aDatasourceName) {
+		HadoopConfigHolder ret = dataSource2HadoopConfigHolder.get(aDatasourceName) ;
+		if (ret == null) {
+			synchronized(HadoopConfigHolder.class) {
+				HadoopConfigHolder temp = ret ;
+				if (temp == null) {
+					ret = new HadoopConfigHolder(aDatasourceName) ;
+					dataSource2HadoopConfigHolder.put(aDatasourceName, ret) ;
+				}
+			}
+		}
+		return ret ;
+	}
+	
+	public static HadoopConfigHolder getInstance(String aDatasourceName, HashMap<String,String> connectionProperties) {
+		HadoopConfigHolder ret = dataSource2HadoopConfigHolder.get(aDatasourceName) ;
+		if (ret == null) {
+			synchronized(HadoopConfigHolder.class) {
+				HadoopConfigHolder temp = ret ;
+				if (temp == null) {
+					ret = new HadoopConfigHolder(aDatasourceName,connectionProperties) ;
+					dataSource2HadoopConfigHolder.put(aDatasourceName, ret) ;
+				}
+			}
+		}
+		else {
+			if (connectionProperties !=null  &&  !connectionProperties.equals(ret.connectionProperties)) {
+				ret = new HadoopConfigHolder(aDatasourceName,connectionProperties) ;
+				dataSource2HadoopConfigHolder.remove(aDatasourceName) ;
+				dataSource2HadoopConfigHolder.put(aDatasourceName, ret) ;
+			}
+		}
+		return ret ;
+	}
+	
+	
+
+	private HadoopConfigHolder(String aDatasourceName) {
+		datasourceName = aDatasourceName;
+		if ( ! initialized ) {
+			init() ;
+		}
+		initLoginInfo();
+		initClassLoader() ;
+	}
+	
+	private HadoopConfigHolder(String aDatasourceName, HashMap<String,String> connectionProperties) {
+		datasourceName = aDatasourceName;
+		this.connectionProperties = connectionProperties ;
+		initConnectionProp() ;
+		initLoginInfo();
+		initClassLoader() ;
+	}
+	
+	private void initConnectionProp() {
+		for(String key : connectionProperties.keySet()) {
+			String resourceName = getResourceName(key) ;
+			if (resourceName == null) {
+				resourceName = XASECURE_SECTION_NAME ;
+			}
+			String val = connectionProperties.get(key) ;
+			addConfiguration(datasourceName, resourceName, key, val );
+		}
+	}
+	
+	private String getResourceName(String key) {
+		
+		if (resourcemapProperties == null) {
+			initResourceMap();
+		}
+		
+		if (resourcemapProperties != null) {
+			return resourcemapProperties.getProperty(key);
+		}
+		else {
+			return null;
+		}
+	}
+
+	public static void initResourceMap() {
+		if (resourcemapProperties == null) {
+			resourcemapProperties = new Properties() ;
+			InputStream in = HadoopConfigHolder.class.getClassLoader().getResourceAsStream(RESOURCEMAP_PROP_FILE) ;
+			if (in != null) {
+				try {
+					resourcemapProperties.load(in);
+				} catch (IOException e) {
+					throw new HadoopException("Unable to load resource map properties from [" + RESOURCEMAP_PROP_FILE + "]", e);
+				}
+			}
+			else {
+				throw new HadoopException("Unable to locate resource map properties from [" + RESOURCEMAP_PROP_FILE + "] in the class path.");
+			}
+		}
+	}
+
+	
+	
+	private static synchronized void init() {
+
+		if (initialized) {
+			return ;
+		}
+
+		try {
+			InputStream in = HadoopConfigHolder.class.getClassLoader().getResourceAsStream(DEFAULT_DATASOURCE_PARAM_PROP_FILE) ;
+			if (in != null) {
+				Properties prop = new Properties() ;
+				try {
+					prop.load(in) ;
+				} catch (IOException e) {
+					throw new HadoopException("Unable to get configuration information for Hadoop environments", e);
+				}
+				finally {
+					try {
+						in.close();
+					} catch (IOException e) {
+						// Ignored exception when the stream is closed.
+					} 
+				}
+	
+				if (prop.size() == 0) 
+					return ;
+				
+				for(Object keyobj : prop.keySet()) {
+					String key = (String)keyobj;
+					String val = prop.getProperty(key) ;
+					
+					int dotLocatedAt = key.indexOf(".") ;
+					
+					if (dotLocatedAt == -1) {
+						continue ;
+					}
+					
+					String dataSource = key.substring(0,dotLocatedAt) ;
+					
+					String propKey = key.substring(dotLocatedAt+1) ;
+					int resourceFoundAt =  propKey.indexOf(".") ;
+					if (resourceFoundAt > -1) {
+						String resourceName = propKey.substring(0, resourceFoundAt) + ".xml" ; 
+						propKey = propKey.substring(resourceFoundAt+1) ;
+						addConfiguration(dataSource, resourceName, propKey, val) ;
+					}
+					
+				}
+			}
+			
+			in = HadoopConfigHolder.class.getClassLoader().getResourceAsStream(GLOBAL_LOGIN_PARAM_PROP_FILE) ;
+			if (in != null) {
+				Properties tempLoginProp = new Properties() ;
+				try {
+					tempLoginProp.load(in) ;
+				} catch (IOException e) {
+					throw new HadoopException("Unable to get login configuration information for Hadoop environments from file: [" + GLOBAL_LOGIN_PARAM_PROP_FILE + "]", e);
+				}
+				finally {
+					try {
+						in.close();
+					} catch (IOException e) {
+						// Ignored exception when the stream is closed.
+					} 
+				}
+				globalLoginProp = tempLoginProp ;
+			}
+		}
+		finally {
+			initialized = true ;
+		}
+	}
+	
+	
+	private void initLoginInfo() {
+		Properties prop = this.getXASecureSection() ;
+		if (prop != null) {
+			userName = prop.getProperty(XASECURE_LOGIN_USER_NAME_PROP) ;
+			keyTabFile = prop.getProperty(XASECURE_LOGIN_KEYTAB_FILE_PROP) ;
+			password = prop.getProperty(XASECURE_LOGIN_PASSWORD) ;
+			isKerberosAuth = (userName != null) && (userName.indexOf("@") > -1) ;
+		}
+		
+	}
+	
+	private void initClassLoader() {
+		classLoader = new HadoopClassLoader(this) ;
+	}
+	
+	
+	public Properties getXASecureSection() {
+		Properties prop = this.getProperties(XASECURE_SECTION_NAME) ;
+		if (prop == null) {
+			prop = globalLoginProp ;
+		}
+		return prop ;
+	}
+
+
+
+	private static void addConfiguration(String dataSource, String resourceName, String propertyName, String value) {
+
+		if (dataSource == null || dataSource.isEmpty()) {
+			return ;
+		}
+		
+		if (propertyName == null || propertyName.isEmpty()) {
+			return ;
+		}
+		
+		if (resourceName == null) {
+			resourceName = DEFAULT_RESOURCE_NAME ;
+		}
+		
+		
+		HashMap<String,Properties> resourceName2PropertiesMap  = dataSource2ResourceListMap.get(dataSource) ;
+		
+		if (resourceName2PropertiesMap == null) {
+			resourceName2PropertiesMap = new HashMap<String,Properties>() ;
+			dataSource2ResourceListMap.put(dataSource, resourceName2PropertiesMap) ;
+		}
+		
+		Properties prop = resourceName2PropertiesMap.get(resourceName) ;
+		if (prop == null) {
+			prop = new Properties() ;
+			resourceName2PropertiesMap.put(resourceName, prop) ;
+		}
+		if (value == null) {
+			prop.remove(propertyName) ;
+		}
+		else {
+			prop.put(propertyName, value) ;
+		}
+	}
+	
+	
+	public String getDatasourceName() {
+		return datasourceName ;
+	}
+	
+	public boolean hasResourceExists(String aResourceName) {
+		HashMap<String,Properties> resourceName2PropertiesMap  = dataSource2ResourceListMap.get(datasourceName) ;
+		return (resourceName2PropertiesMap != null && resourceName2PropertiesMap.containsKey(aResourceName)) ;
+ 	}
+
+	public Properties getProperties(String aResourceName) {
+		Properties ret = null ;
+		HashMap<String,Properties> resourceName2PropertiesMap  = dataSource2ResourceListMap.get(datasourceName) ;
+		if (resourceName2PropertiesMap != null) {
+			ret =  resourceName2PropertiesMap.get(aResourceName) ;
+		}
+		return ret ;
+ 	}
+	
+	public String getUserName() {
+		return userName;
+	}
+
+	public String getKeyTabFile() {
+		return keyTabFile;
+	}
+
+	public String getPassword() {
+		return password;
+	}
+
+	public HadoopClassLoader getClassLoader() {
+		return classLoader;
+	}
+
+	public boolean isKerberosAuthentication() {
+		return isKerberosAuth;
+	}
+
+
+	
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-argus/blob/7defc061/lookup-client/src/main/java/com/xasecure/hadoop/client/exceptions/HadoopException.java
----------------------------------------------------------------------
diff --git a/lookup-client/src/main/java/com/xasecure/hadoop/client/exceptions/HadoopException.java b/lookup-client/src/main/java/com/xasecure/hadoop/client/exceptions/HadoopException.java
new file mode 100644
index 0000000..692e834
--- /dev/null
+++ b/lookup-client/src/main/java/com/xasecure/hadoop/client/exceptions/HadoopException.java
@@ -0,0 +1,28 @@
+package com.xasecure.hadoop.client.exceptions;
+
+public class HadoopException extends RuntimeException {
+
+	private static final long serialVersionUID = 8872734935128535649L;
+
+	public HadoopException() {
+		super();
+		// TODO Auto-generated constructor stub
+	}
+
+	public HadoopException(String message, Throwable cause) {
+		super(message, cause);
+		// TODO Auto-generated constructor stub
+	}
+
+	public HadoopException(String message) {
+		super(message);
+		// TODO Auto-generated constructor stub
+	}
+
+	public HadoopException(Throwable cause) {
+		super(cause);
+		// TODO Auto-generated constructor stub
+	}
+
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-argus/blob/7defc061/lookup-client/src/main/java/com/xasecure/hbase/client/HBaseClient.java
----------------------------------------------------------------------
diff --git a/lookup-client/src/main/java/com/xasecure/hbase/client/HBaseClient.java b/lookup-client/src/main/java/com/xasecure/hbase/client/HBaseClient.java
new file mode 100644
index 0000000..6a19cbf
--- /dev/null
+++ b/lookup-client/src/main/java/com/xasecure/hbase/client/HBaseClient.java
@@ -0,0 +1,182 @@
+package com.xasecure.hbase.client;
+
+import java.io.IOException;
+import java.security.PrivilegedAction;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+
+import javax.security.auth.Subject;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.HColumnDescriptor;
+import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.security.SecureClientLogin;
+import org.apache.hadoop.security.UserGroupInformation;
+
+import com.xasecure.hadoop.client.config.BaseClient;
+
+public class HBaseClient extends BaseClient {
+
+	private static final Log LOG = LogFactory.getLog(HBaseClient.class) ;
+
+	private Subject subj = null ;
+
+	public HBaseClient(String dataSource) {
+		super(dataSource) ;
+		initHBase() ;
+	}
+
+	public HBaseClient(String dataSource,HashMap<String,String> connectionProp) {
+		super(dataSource, addDefaultHBaseProp(connectionProp)) ;
+		initHBase() ;
+	}
+	
+	//TODO: temporary solution - to be added to the UI for HBase 
+	private static HashMap<String,String> addDefaultHBaseProp(HashMap<String,String> connectionProp) {
+		if (connectionProp != null) {
+			
+			String param = "zookeeper.znode.parent" ;
+			String unsecuredPath = "/hbase-unsecure" ;
+			String authParam = "hadoop.security.authorization" ;
+			
+			String ret = connectionProp.get(param) ;
+			LOG.info("HBase connection has [" + param + "] with value [" + ret + "]");
+			if (ret == null) {
+				ret = connectionProp.get(authParam) ;
+				LOG.info("HBase connection has [" + authParam + "] with value [" + ret + "]");
+				if (ret != null && ret.trim().equalsIgnoreCase("false")) {
+					LOG.info("HBase connection is resetting [" + param + "] with value [" + unsecuredPath + "]");
+					connectionProp.put(param, unsecuredPath) ;
+				}
+			}
+		}
+		return connectionProp;
+	}
+
+	
+	public void initHBase() {
+		try {
+			if (UserGroupInformation.isSecurityEnabled()) {
+				if (getConfigHolder().getKeyTabFile() == null) {
+						subj = SecureClientLogin.loginUserWithPassword(getConfigHolder().getUserName(), getConfigHolder().getPassword()) ;
+				}
+				else {
+					subj = SecureClientLogin.loginUserFromKeytab(getConfigHolder().getUserName() , getConfigHolder().getKeyTabFile()) ;
+				}
+			}
+			else {
+				subj = SecureClientLogin.login(getConfigHolder().getUserName()) ;
+			}
+		} catch (IOException e) {
+			LOG.error("Unable to perform secure login to Hive environment [" + getConfigHolder().getDatasourceName() + "]", e);
+		}
+	}
+	
+	
+	
+	public List<String> getTableList(final String tableNameMatching) {
+		List<String> ret = null ;
+		
+		if (subj != null) {
+			ClassLoader prevCl = Thread.currentThread().getContextClassLoader() ;
+			try {
+				Thread.currentThread().setContextClassLoader(getConfigHolder().getClassLoader());
+	
+				ret = Subject.doAs(subj, new PrivilegedAction<List<String>>() {
+		
+					@Override
+					public List<String> run() {
+						
+						List<String> tableList = new ArrayList<String>() ;
+						HBaseAdmin admin = null ;
+						try {
+							
+							Configuration conf = HBaseConfiguration.create() ;
+							admin = new HBaseAdmin(conf) ;
+							for (HTableDescriptor htd : admin.listTables(tableNameMatching)) {
+								tableList.add(htd.getNameAsString()) ;
+							}
+						}
+						catch(Throwable t) {
+							LOG.error("Unable to get HBase table List for [repository:" + getConfigHolder().getDatasourceName() + ",table-match:" + tableNameMatching + "]", t);
+						}
+						finally {
+							if (admin != null) {
+								try {
+									admin.close() ;
+								} catch (IOException e) {
+									LOG.error("Unable to close HBase connection [" + getConfigHolder().getDatasourceName() + "]", e);
+								}
+							}
+						}
+						return tableList ;
+					}
+					
+				}) ;
+			}
+			finally {
+				Thread.currentThread().setContextClassLoader(prevCl);
+			}
+		}
+		return ret ;
+	}
+	
+	
+	public List<String> getColumnFamilyList(final String tableName, final String columnFamilyMatching) {
+		List<String> ret = null ;		
+		if (subj != null) {
+			ClassLoader prevCl = Thread.currentThread().getContextClassLoader() ;
+			try {
+				Thread.currentThread().setContextClassLoader(getConfigHolder().getClassLoader());
+				
+				ret = Subject.doAs(subj, new PrivilegedAction<List<String>>() {
+		
+					@Override
+					public List<String> run() {
+						
+						List<String> colfList = new ArrayList<String>() ;
+						HBaseAdmin admin = null ;
+						try {
+							Configuration conf = HBaseConfiguration.create();
+							admin = new HBaseAdmin(conf) ;
+							HTableDescriptor htd = admin.getTableDescriptor(tableName.getBytes()) ;
+							if (htd != null) {
+								for (HColumnDescriptor hcd : htd.getColumnFamilies()) {
+									String colf = hcd.getNameAsString() ;
+									if (colf.matches(columnFamilyMatching)) {
+										if (!colfList.contains(colf)) {
+											colfList.add(colf) ;
+										}
+									}
+								}
+							}
+						}
+						catch(Throwable t) {
+							LOG.error("Unable to get HBase table List for [repository:" + getConfigHolder().getDatasourceName() + ",table:" + tableName + ", table-match:" + columnFamilyMatching + "]", t);
+						}
+						finally {
+							if (admin != null) {
+								try {
+									admin.close() ;
+								} catch (IOException e) {
+									LOG.error("Unable to close HBase connection [" + getConfigHolder().getDatasourceName() + "]", e);
+								}
+							}
+						}
+						return colfList ;
+					}
+					
+				}) ;
+			}
+			finally {
+				Thread.currentThread().setContextClassLoader(prevCl);
+			}
+		}
+		return ret ;
+	}
+}

http://git-wip-us.apache.org/repos/asf/incubator-argus/blob/7defc061/lookup-client/src/main/java/com/xasecure/hbase/client/HBaseClientTester.java
----------------------------------------------------------------------
diff --git a/lookup-client/src/main/java/com/xasecure/hbase/client/HBaseClientTester.java b/lookup-client/src/main/java/com/xasecure/hbase/client/HBaseClientTester.java
new file mode 100644
index 0000000..431d978
--- /dev/null
+++ b/lookup-client/src/main/java/com/xasecure/hbase/client/HBaseClientTester.java
@@ -0,0 +1,59 @@
+package com.xasecure.hbase.client;
+
+import java.util.HashMap;
+import java.util.List;
+import java.util.Properties;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+
+public class HBaseClientTester {
+
+	private static final Log LOG = LogFactory.getLog(HBaseClientTester.class) ;
+
+	public static void main(String[] args) throws Throwable {
+
+		HBaseClient hc = null;
+
+		if (args.length <= 2) {
+			System.err.println("USAGE: java " + HBaseClientTester.class.getName() + " dataSourceName propertyFile <tableName> <columnFamilyName>");
+			System.exit(1);
+		}
+		
+		LOG.info("Starting ...");
+
+		Properties conf = new Properties();
+		
+		conf.load(HBaseClientTester.class.getClassLoader().getResourceAsStream(args[1]));
+
+		HashMap<String, String> prop = new HashMap<String, String>();
+		for (Object key : conf.keySet()) {
+			Object val = conf.get(key);
+			prop.put((String) key, (String) val);
+		}
+
+		hc = new HBaseClient(args[0], prop);
+
+		if (args.length == 3) {
+			List<String> dbList = hc.getTableList(args[2]);
+			if (dbList.size() == 0) {
+				System.out.println("No tables found with db filter [" + args[2] + "]");
+			} else {
+				for (String str : dbList) {
+					System.out.println("table: " + str);
+				}
+			}
+		} else if (args.length == 4) {
+			List<String> tableList = hc.getColumnFamilyList(args[2], args[3]);
+			if (tableList.size() == 0) {
+				System.out.println("No column families found under table [" + args[2] + "] with columnfamily filter [" + args[3] + "]");
+			} else {
+				for (String str : tableList) {
+					System.out.println("ColumnFamily: " + str);
+				}
+			}
+		}
+
+	}
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-argus/blob/7defc061/lookup-client/src/main/java/com/xasecure/hive/client/HiveClient.java
----------------------------------------------------------------------
diff --git a/lookup-client/src/main/java/com/xasecure/hive/client/HiveClient.java b/lookup-client/src/main/java/com/xasecure/hive/client/HiveClient.java
new file mode 100644
index 0000000..2f6950b
--- /dev/null
+++ b/lookup-client/src/main/java/com/xasecure/hive/client/HiveClient.java
@@ -0,0 +1,333 @@
+package com.xasecure.hive.client;
+
+import java.io.Closeable;
+import java.io.IOException;
+import java.security.PrivilegedAction;
+import java.sql.Connection;
+import java.sql.Driver;
+import java.sql.DriverManager;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Properties;
+
+import javax.security.auth.Subject;
+
+import org.apache.commons.io.FilenameUtils;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.security.SecureClientLogin;
+
+import com.xasecure.hadoop.client.config.BaseClient;
+import com.xasecure.hadoop.client.exceptions.HadoopException;
+
+public class HiveClient extends BaseClient implements Closeable {
+
+	private static final Log LOG = LogFactory.getLog(HiveClient.class) ;
+	
+	Connection con = null ;
+
+	public HiveClient(String dataSource) {
+		super(dataSource) ;
+		initHive() ;
+	}
+
+	public HiveClient(String dataSource,HashMap<String,String> connectionProp) {
+		super(dataSource,connectionProp) ;
+		initHive() ;
+	}
+
+	
+	public void initHive() {
+		try {
+			Subject subj = null ;
+			
+			if (getConfigHolder().isKerberosAuthentication()) {
+				if (getConfigHolder().getKeyTabFile() != null) {
+					LOG.info("Since KeyTab is provided, Trying to use SecureClientLogin with KeyTab based login");
+					subj = SecureClientLogin.loginUserFromKeytab(getConfigHolder().getUserName() , getConfigHolder().getKeyTabFile()) ;
+				}
+				else {
+					LOG.info("Since Password is provided, Trying to use SecureClientLogin with Password");
+					subj = SecureClientLogin.loginUserWithPassword(getConfigHolder().getUserName() , getConfigHolder().getPassword()) ;
+				}
+				
+				Subject.doAs(subj,  new PrivilegedAction<Object>() {
+					public Object run() {
+						initConnection();
+						return null;
+					}
+				}) ;
+				
+			}
+			else {
+				LOG.info("Since Password is NOT provided, Trying to use UnSecure client with username and password");
+				String userName = getConfigHolder().getUserName() ;
+				String password = getConfigHolder().getPassword() ;
+				initConnection(userName,password);
+			}
+		} catch (IOException e) {
+			LOG.error("Unable to perform secure login to Hive environment [" + getConfigHolder().getDatasourceName() + "]", e);
+		}
+	}
+	
+	
+	
+	public List<String> getDatabaseList(String databaseMatching) {
+		List<String> ret = new ArrayList<String>() ;
+		if (con != null) {
+			Statement stat =  null ;
+			ResultSet rs = null ;
+			String sql = "show databases" ;
+			if (databaseMatching != null && ! databaseMatching.isEmpty()) {
+				sql = sql + " like \"" + databaseMatching  + "\"" ;
+			}
+			try {
+				stat =  con.createStatement()  ;
+				rs = stat.executeQuery(sql) ;
+				while (rs.next()) {
+					ret.add(rs.getString(1)) ;
+				}
+ 			}
+			catch(SQLException sqle) {
+				throw new HadoopException("Unable to execute SQL [" + sql + "]", sqle);
+			}
+			finally {
+				close(rs) ;
+				close(stat) ;
+			}
+			
+		}
+		return ret ;
+	}
+
+	public List<String> getTableList(String database, String tableNameMatching) {
+		List<String> ret = new ArrayList<String>() ;
+		if (con != null) {
+			Statement stat =  null ;
+			ResultSet rs = null ;
+			
+			String sql = null ;
+			
+			try {
+				sql = "use " + database;
+				
+				try {
+					stat = con.createStatement() ;
+					stat.execute(sql) ;
+				}
+				finally {
+					close(stat) ;
+				}
+				
+				sql = "show tables " ;
+				if (tableNameMatching != null && ! tableNameMatching.isEmpty()) {
+					sql = sql + " like \"" + tableNameMatching  + "\"" ;
+				}
+				stat =  con.createStatement()  ;
+				rs = stat.executeQuery(sql) ;
+				while (rs.next()) {
+					ret.add(rs.getString(1)) ;
+				}
+ 			}
+			catch(SQLException sqle) {
+				throw new HadoopException("Unable to execute SQL [" + sql + "]", sqle);
+			}
+			finally {
+				close(rs) ;
+				close(stat) ;
+			}
+			
+		}
+		return ret ;
+	}
+
+	public List<String> getViewList(String database, String viewNameMatching) {
+		List<String> ret = null ;
+		return ret ;
+	}
+
+	public List<String> getUDFList(String database, String udfMatching) {
+		List<String> ret = null ;
+		return ret ;
+	}
+
+	public List<String> getColumnList(String database, String tableName, String columnNameMatching) {
+		List<String> ret = new ArrayList<String>() ;
+		if (con != null) {
+			
+			String columnNameMatchingRegEx = null ;
+			
+			if (columnNameMatching != null && ! columnNameMatching.isEmpty()) {
+				columnNameMatchingRegEx = columnNameMatching ;
+			}
+			
+			Statement stat =  null ;
+			ResultSet rs = null ;
+			
+			String sql = null ;
+			
+			try {
+				sql = "use " + database;
+				
+				try {
+					stat = con.createStatement() ;
+					stat.execute(sql) ;
+				}
+				finally {
+					close(stat) ;
+				}
+				
+				sql = "describe  " + tableName ;
+				stat =  con.createStatement()  ;
+				rs = stat.executeQuery(sql) ;
+				while (rs.next()) {
+					String columnName = rs.getString(1) ;
+					if (columnNameMatchingRegEx == null) {
+						ret.add(columnName) ;
+					}
+					else if (FilenameUtils.wildcardMatch(columnName,columnNameMatchingRegEx)) {
+						ret.add(columnName) ;
+					}
+				}
+ 			}
+			catch(SQLException sqle) {
+				throw new HadoopException("Unable to execute SQL [" + sql + "]", sqle);
+			}
+			finally {
+				close(rs) ;
+				close(stat) ;
+			}
+			
+		}
+		return ret ;
+	}
+	
+	
+	public void close() {
+		close(con) ;
+	}
+	
+	private void close(Statement aStat) {
+		try {
+			if (aStat != null) {
+				aStat.close();
+			}
+		} catch (SQLException e) {
+			LOG.error("Unable to close SQL statement", e);
+		}
+	}
+
+	private void close(ResultSet aResultSet) {
+		try {
+			if (aResultSet != null) {
+				aResultSet.close();
+			}
+		} catch (SQLException e) {
+			LOG.error("Unable to close ResultSet", e);
+		}
+	}
+
+	private void close(Connection aCon) {
+		try {
+			if (aCon != null) {
+				aCon.close();
+			}
+		} catch (SQLException e) {
+			LOG.error("Unable to close SQL Connection", e);
+		}
+	}
+
+	private void initConnection() {
+		initConnection(null,null) ;
+	}
+
+	
+	private void initConnection(String userName, String password) {
+		Properties prop = getConfigHolder().getXASecureSection() ;
+		
+		String driverClassName = prop.getProperty("jdbc.driverClassName") ;
+		String url =  prop.getProperty("jdbc.url") ;
+		
+		if (driverClassName != null) {
+			try {
+				Driver driver = (Driver)Class.forName(driverClassName).newInstance() ;
+				DriverManager.registerDriver(driver);
+			} catch (Throwable t) {
+				throw new HadoopException("Unable to connect to Hive Thrift Server instance", t) ;
+			}
+		}
+		
+		try {
+			if (userName == null && password == null) {
+				con = DriverManager.getConnection(url) ;
+			}
+			else {
+				con = DriverManager.getConnection(url, userName, password) ;
+			}
+		} catch (SQLException e) {
+			throw new HadoopException("Unable to connect to Hive Thrift Server instance", e) ;
+		}
+		
+	}
+	
+	
+	
+	public static void main(String[] args) {
+		
+		HiveClient hc = null ;
+		
+		if (args.length == 0) {
+			System.err.println("USAGE: java " + HiveClient.class.getName() + " dataSourceName <databaseName> <tableName> <columnName>") ;
+			System.exit(1) ;
+		}
+		
+		
+		try {
+			hc = new HiveClient(args[0]) ;
+			
+			if (args.length == 2) {
+				List<String> dbList = hc.getDatabaseList(args[1]) ;
+				if (dbList.size() == 0) {
+					System.out.println("No database found with db filter [" + args[1] + "]") ;
+				}
+				else {
+					for (String str : dbList ) {
+						System.out.println("database: " + str ) ;
+					}
+				}
+			}
+			else if (args.length == 3) {
+				List<String> tableList = hc.getTableList(args[1], args[2]) ;
+				if (tableList.size() == 0) {
+					System.out.println("No tables found under database[" + args[1] + "] with table filter [" + args[2] + "]") ;
+				}
+				else {
+					for(String str : tableList) {
+						System.out.println("Table: " + str) ;
+					}
+				}
+			}
+			else if (args.length == 4) {
+				List<String> columnList = hc.getColumnList(args[1], args[2], args[3]) ;
+				if (columnList.size() == 0) {
+					System.out.println("No columns found for db:" + args[1] + ", table: [" + args[2] + "], with column filter [" + args[3] + "]") ;
+				}
+				else {
+					for (String str : columnList ) {
+						System.out.println("Column: " + str) ;
+					}
+				}
+			}
+			
+		}
+		finally {
+			if (hc != null) {
+				hc.close();
+			}
+		}	
+	}	
+}

http://git-wip-us.apache.org/repos/asf/incubator-argus/blob/7defc061/lookup-client/src/main/java/com/xasecure/hive/client/HiveClientTester.java
----------------------------------------------------------------------
diff --git a/lookup-client/src/main/java/com/xasecure/hive/client/HiveClientTester.java b/lookup-client/src/main/java/com/xasecure/hive/client/HiveClientTester.java
new file mode 100644
index 0000000..7c48068
--- /dev/null
+++ b/lookup-client/src/main/java/com/xasecure/hive/client/HiveClientTester.java
@@ -0,0 +1,78 @@
+package com.xasecure.hive.client;
+
+import java.util.HashMap;
+import java.util.List;
+import java.util.Properties;
+
+public class HiveClientTester  {
+
+	public static void main(String[] args) throws Throwable {
+		
+		HiveClient hc = null ;
+		
+		if (args.length <= 2) {
+			System.err.println("USAGE: java " + HiveClientTester.class.getName() + " dataSourceName propertyFile <databaseName> <tableName> <columnName>") ;
+			System.exit(1) ;
+		}
+		
+		
+		try {
+			
+			Properties conf = new Properties() ;
+			conf.load(HiveClientTester.class.getClassLoader().getResourceAsStream(args[1]));
+			
+			HashMap<String,String> prop = new HashMap<String,String>() ;
+			for(Object key : conf.keySet()) {
+				Object val = conf.get(key) ;
+				prop.put((String)key, (String)val) ;
+			}
+
+			
+			hc = new HiveClient(args[0], prop) ;
+			
+			
+			if (args.length == 3) {
+				List<String> dbList = hc.getDatabaseList(args[2]) ;
+				if (dbList.size() == 0) {
+					System.out.println("No database found with db filter [" + args[2] + "]") ;
+				}
+				else {
+					for (String str : dbList ) {
+						System.out.println("database: " + str ) ;
+					}
+				}
+			}
+			else if (args.length == 4) {
+				List<String> tableList = hc.getTableList(args[2], args[3]) ;
+				if (tableList.size() == 0) {
+					System.out.println("No tables found under database[" + args[2] + "] with table filter [" + args[3] + "]") ;
+				}
+				else {
+					for(String str : tableList) {
+						System.out.println("Table: " + str) ;
+					}
+				}
+			}
+			else if (args.length == 5) {
+				List<String> columnList = hc.getColumnList(args[2], args[3], args[4]) ;
+				if (columnList.size() == 0) {
+					System.out.println("No columns found for db:" + args[2] + ", table: [" + args[3] + "], with column filter [" + args[4] + "]") ;
+				}
+				else {
+					for (String str : columnList ) {
+						System.out.println("Column: " + str) ;
+					}
+				}
+			}
+			
+		}
+		finally {
+			if (hc != null) {
+				hc.close();
+			}
+		}
+		
+	}
+	
+
+}


Mime
View raw message