ranger-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From ank...@apache.org
Subject ranger git commit: RANGER-1483 : Ranger hive service definition to use hive metastore directly
Date Fri, 31 Mar 2017 17:19:54 GMT
Repository: ranger
Updated Branches:
  refs/heads/master 84e90c132 -> 6cfb01883


RANGER-1483 : Ranger hive service definition to use hive metastore directly


Project: http://git-wip-us.apache.org/repos/asf/ranger/repo
Commit: http://git-wip-us.apache.org/repos/asf/ranger/commit/6cfb0188
Tree: http://git-wip-us.apache.org/repos/asf/ranger/tree/6cfb0188
Diff: http://git-wip-us.apache.org/repos/asf/ranger/diff/6cfb0188

Branch: refs/heads/master
Commit: 6cfb01883fb97bd98e5e5b7baacb3cdd85a15b68
Parents: 84e90c1
Author: Ankita Sinha <ankita@apache.org>
Authored: Fri Mar 31 14:35:34 2017 +0530
Committer: ankita <ankita@apache.org>
Committed: Fri Mar 31 22:49:21 2017 +0530

----------------------------------------------------------------------
 .../plugin/client/HadoopConfigHolder.java       | 126 +++--
 .../ranger/services/hive/client/HiveClient.java | 484 +++++++++++++------
 security-admin/pom.xml                          |   5 +
 .../scripts/views/service/ConfigurationList.js  |   6 +-
 4 files changed, 419 insertions(+), 202 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ranger/blob/6cfb0188/agents-common/src/main/java/org/apache/ranger/plugin/client/HadoopConfigHolder.java
----------------------------------------------------------------------
diff --git a/agents-common/src/main/java/org/apache/ranger/plugin/client/HadoopConfigHolder.java
b/agents-common/src/main/java/org/apache/ranger/plugin/client/HadoopConfigHolder.java
index 8e79706..b78930f 100644
--- a/agents-common/src/main/java/org/apache/ranger/plugin/client/HadoopConfigHolder.java
+++ b/agents-common/src/main/java/org/apache/ranger/plugin/client/HadoopConfigHolder.java
@@ -27,6 +27,7 @@ import java.util.Map;
 import java.util.Properties;
 import java.util.Set;
 
+import org.apache.commons.lang.StringUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.security.SecureClientLogin;
@@ -53,23 +54,27 @@ public class HadoopConfigHolder  {
 	public static final String HADOOP_SECURITY_AUTHENTICATION_METHOD = "kerberos";
 	public static final String HADOOP_RPC_PROTECTION = "hadoop.rpc.protection";
 	
+	public static final String ENABLE_HIVE_METASTORE_LOOKUP = "enable.hive.metastore.lookup";
+	public static final String HIVE_SITE_FILE_PATH = "hive.site.file.path";
+
 	private static boolean initialized;
 	private static Map<String,HashMap<String,Properties>> dataSource2ResourceListMap
= new HashMap<>();
-	private static Properties globalLoginProp = new Properties();
 	private static Map<String,HadoopConfigHolder> dataSource2HadoopConfigHolder = new
HashMap<>();
+	private static Properties globalLoginProp = new Properties();
 	private static Properties resourcemapProperties;
 	
-	
 	private String datasourceName;
 	private String defaultConfigFile;
 	private String userName;
 	private String keyTabFile;
 	private String password;
-	private boolean isKerberosAuth;
 	private String lookupPrincipal;
 	private String lookupKeytab;
 	private String nameRules;
 	private String authType;
+	private String hiveSiteFilePath;
+	private boolean isKerberosAuth;
+	private boolean enableHiveMetastoreLookup;
 	
 	private Map<String,String>  connectionProperties;
 
@@ -78,7 +83,7 @@ public class HadoopConfigHolder  {
 	public static HadoopConfigHolder getInstance(String aDatasourceName) {
 		HadoopConfigHolder ret = dataSource2HadoopConfigHolder.get(aDatasourceName);
 		if (ret == null) {
-			synchronized(HadoopConfigHolder.class) {
+			synchronized (HadoopConfigHolder.class) {
 				HadoopConfigHolder temp = ret;
 				if (temp == null) {
 					ret = new HadoopConfigHolder(aDatasourceName);
@@ -97,7 +102,7 @@ public class HadoopConfigHolder  {
                                                String defaultConfigFile) {
 		HadoopConfigHolder ret = dataSource2HadoopConfigHolder.get(aDatasourceName);
 		if (ret == null) {
-			synchronized(HadoopConfigHolder.class) {
+			synchronized (HadoopConfigHolder.class) {
 				HadoopConfigHolder temp = ret;
 				if (temp == null) {
 					ret = new HadoopConfigHolder(aDatasourceName,connectionProperties, defaultConfigFile);
@@ -120,27 +125,35 @@ public class HadoopConfigHolder  {
 
 	private HadoopConfigHolder(String aDatasourceName) {
 		datasourceName = aDatasourceName;
-		if ( ! initialized ) {
+		if (!initialized) {
 			init();
 		}
 		initLoginInfo();
 	}
 
-  private HadoopConfigHolder(String aDatasourceName, Map<String,String> connectionProperties)
{
-   this(aDatasourceName, connectionProperties, null);
-  }
+	private HadoopConfigHolder(String aDatasourceName,
+			Map<String, String> connectionProperties) {
+		this(aDatasourceName, connectionProperties, null);
+	}
 
 	private HadoopConfigHolder(String aDatasourceName, Map<String,String> connectionProperties,
                              String defaultConfigFile) {
 		datasourceName = aDatasourceName;
 		this.connectionProperties = connectionProperties;
-    this.defaultConfigFile = defaultConfigFile;
+		this.defaultConfigFile = defaultConfigFile;
 		initConnectionProp();
 		initLoginInfo();
 	}
 	
 	private void initConnectionProp() {
-		for(Map.Entry<String,String> entry : connectionProperties.entrySet()) {
+		if (!connectionProperties.containsKey(ENABLE_HIVE_METASTORE_LOOKUP)) {
+			 connectionProperties.put(ENABLE_HIVE_METASTORE_LOOKUP, "false");
+		 }
+		if (!connectionProperties.containsKey(HIVE_SITE_FILE_PATH)) {
+			 connectionProperties.put(HIVE_SITE_FILE_PATH, null);
+		 }
+
+		for (Map.Entry<String, String> entry : connectionProperties.entrySet()) {
 			String key = entry.getKey();
 			String resourceName = getResourceName(key);
 			
@@ -159,10 +172,9 @@ public class HadoopConfigHolder  {
 		}
 
 		if (resourcemapProperties != null) {
-      String rn = resourcemapProperties.getProperty(key);
-      return ( rn != null)  ? rn : defaultConfigFile;
-		}
-		else {
+			String rn = resourcemapProperties.getProperty(key);
+			return ( rn != null)  ? rn : defaultConfigFile;
+		} else {
 			return defaultConfigFile;
 		}
 	}
@@ -174,13 +186,14 @@ public class HadoopConfigHolder  {
 			if (in != null) {
 				try {
 					resourcemapProperties.load(in);
-		          for (Map.Entry<Object, Object> entry : resourcemapProperties.entrySet()
) {
-		            String key = (String)entry.getKey();
-		            String value = (String)entry.getValue();
-		            if (RANGER_SECTION_NAME.equals(value))  {
-		              rangerInternalPropertyKeys.add(key);
-		            }
-		          }
+					for (Map.Entry<Object, Object> entry : resourcemapProperties
+							.entrySet()) {
+						String key = (String) entry.getKey();
+						String value = (String) entry.getValue();
+						if (RANGER_SECTION_NAME.equals(value)) {
+							rangerInternalPropertyKeys.add(key);
+						}
+					}
 				} catch (IOException e) {
 					throw new HadoopException("Unable to load resource map properties from [" + RESOURCEMAP_PROP_FILE
+ "]", e);
 				}
@@ -188,14 +201,12 @@ public class HadoopConfigHolder  {
 					if (in != null) {
 						try {
 							in.close();
-						}
-						catch(IOException ioe) {
+						} catch (IOException ioe) {
 							// Ignore IOException during close of stream
 						}
 					}
 				}
-			}
-			else {
+			} else {
 				throw new HadoopException("Unable to locate resource map properties from [" + RESOURCEMAP_PROP_FILE
+ "] in the class path.");
 			}
 		}
@@ -230,7 +241,7 @@ public class HadoopConfigHolder  {
 					return;
 				}
 				
-				for(Object keyobj : prop.keySet()) {
+				for (Object keyobj : prop.keySet()) {
 					String key = (String)keyobj;
 					String val = prop.getProperty(key);
 					
@@ -282,10 +293,24 @@ public class HadoopConfigHolder  {
 		if (prop != null) {
 			userName = prop.getProperty(RANGER_LOGIN_USER_NAME_PROP);
 			keyTabFile = prop.getProperty(RANGER_LOGIN_KEYTAB_FILE_PROP);
+			if (!StringUtils.isEmpty(prop.getProperty(ENABLE_HIVE_METASTORE_LOOKUP).trim())) {
+				try {
+					enableHiveMetastoreLookup = Boolean.valueOf(prop.getProperty(ENABLE_HIVE_METASTORE_LOOKUP,"false").trim());
+				} catch (Exception e) {
+					enableHiveMetastoreLookup = false;
+					LOG.error("Error while getting " + ENABLE_HIVE_METASTORE_LOOKUP + " : " + e.getMessage());
+				}
+			}
+			if (!StringUtils.isEmpty(prop.getProperty(HIVE_SITE_FILE_PATH))) {
+				hiveSiteFilePath = prop.getProperty(HIVE_SITE_FILE_PATH).trim();
+			} else {
+				hiveSiteFilePath = null;
+			}
+
 			String plainTextPwd = prop.getProperty(RANGER_LOGIN_PASSWORD);
 			try {
 				password = PasswordUtils.encryptPassword(plainTextPwd);
-			}catch (IOException e) {
+			} catch (IOException e) {
 				throw new HadoopException("Unable to initialize login info", e);
 			}
 
@@ -296,10 +321,9 @@ public class HadoopConfigHolder  {
 			
 			String hadoopSecurityAuthentication =  getHadoopSecurityAuthentication();
 
-			if ( hadoopSecurityAuthentication != null) {
+			if (hadoopSecurityAuthentication != null) {
 				isKerberosAuth = HADOOP_SECURITY_AUTHENTICATION_METHOD.equalsIgnoreCase(hadoopSecurityAuthentication);
-			}
-			else {
+			} else {
 				isKerberosAuth = (((userName != null) && (userName.indexOf("@") > -1)) ||
(SecureClientLogin.isKerberosCredentialExists(lookupPrincipal, lookupKeytab)));
 			}
 		}
@@ -345,8 +369,7 @@ public class HadoopConfigHolder  {
 		}
 		if (value == null) {
 			prop.remove(propertyName);
-		}
-		else {
+		} else {
 			prop.put(propertyName, value);
 		}
 	}
@@ -374,17 +397,17 @@ public class HadoopConfigHolder  {
 		String ret = null;
 		String sectionName = RANGER_SECTION_NAME;
 
-		if ( defaultConfigFile != null) {
+		if (defaultConfigFile != null) {
 			sectionName = defaultConfigFile;
 		}
 
-		if ( LOG.isDebugEnabled() ) {
+		if (LOG.isDebugEnabled()) {
 			LOG.debug("==> HadoopConfigHolder.getHadoopSecurityAuthentication( " + " DataSource
: " + sectionName + " Property : " +  HADOOP_SECURITY_AUTHENTICATION + ")" );
 		}
 
 		ret = getProperties(sectionName,HADOOP_SECURITY_AUTHENTICATION);
 		
-		if ( LOG.isDebugEnabled() ) {
+		if (LOG.isDebugEnabled()) {
 			LOG.debug("<== HadoopConfigHolder.getHadoopSecurityAuthentication(" + " DataSource
: " + sectionName + " Property : " +  HADOOP_SECURITY_AUTHENTICATION  + " Value : " + ret
+ ")" );
 		}
 
@@ -407,30 +430,37 @@ public class HadoopConfigHolder  {
 		return isKerberosAuth;
 	}
 	
-	public String getLookupPrincipal(){
+	public String getLookupPrincipal() {
 		return lookupPrincipal;
 	}
 
-	public String getLookupKeytab(){
+	public String getLookupKeytab() {
 		return lookupKeytab;
 	}
 
-	public String getNameRules(){
+	public String getNameRules() {
 		return nameRules;
 	}
-	
-	public String getAuthType(){
+
+	public String getAuthType() {
 		return authType;
 	}
 
-  public Set<String> getRangerInternalPropertyKeys() {
-    return rangerInternalPropertyKeys;
+	public boolean isEnableHiveMetastoreLookup() {
+		return enableHiveMetastoreLookup;
+	}
 
-  }
+	public String getHiveSiteFilePath() {
+		return hiveSiteFilePath;
+	}
+
+	public Set<String> getRangerInternalPropertyKeys() {
+		return rangerInternalPropertyKeys;
+	}
 
 	private String getProperties(String sectionName, String property) {
 
-		if ( LOG.isDebugEnabled() ) {
+		if (LOG.isDebugEnabled()) {
 			LOG.debug("==> HadoopConfigHolder.getProperties( " + " DataSource : " + sectionName
+ " Property : " +  property + ")" );
 		}
 
@@ -439,15 +469,15 @@ public class HadoopConfigHolder  {
 
 		HashMap<String,Properties> resourceName2PropertiesMap  = dataSource2ResourceListMap.get(this.getDatasourceName());
 
-		if ( resourceName2PropertiesMap != null) {
+		if (resourceName2PropertiesMap != null) {
 			repoParam=resourceName2PropertiesMap.get(sectionName);
 		}
 
-		if ( repoParam != null ) {
+		if (repoParam != null) {
 			ret = (String)repoParam.get(property);
 		}
 
-		if ( LOG.isDebugEnabled() ) {
+		if (LOG.isDebugEnabled()) {
 			LOG.debug("<== HadoopConfigHolder.getProperties( " + " DataSource : " + sectionName
+ " Property : " +  property + " Value : " + ret);
 		}
 

http://git-wip-us.apache.org/repos/asf/ranger/blob/6cfb0188/hive-agent/src/main/java/org/apache/ranger/services/hive/client/HiveClient.java
----------------------------------------------------------------------
diff --git a/hive-agent/src/main/java/org/apache/ranger/services/hive/client/HiveClient.java
b/hive-agent/src/main/java/org/apache/ranger/services/hive/client/HiveClient.java
index e3074ce..6cc62a7 100644
--- a/hive-agent/src/main/java/org/apache/ranger/services/hive/client/HiveClient.java
+++ b/hive-agent/src/main/java/org/apache/ranger/services/hive/client/HiveClient.java
@@ -20,6 +20,8 @@
  package org.apache.ranger.services.hive.client;
 
 import java.io.Closeable;
+import java.io.File;
+import java.net.MalformedURLException;
 import java.security.PrivilegedAction;
 import java.security.PrivilegedExceptionAction;
 import java.sql.Connection;
@@ -39,17 +41,30 @@ import javax.security.auth.Subject;
 
 import org.apache.commons.collections.CollectionUtils;
 import org.apache.commons.io.FilenameUtils;
+import org.apache.commons.lang.StringUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
+import org.apache.hadoop.hive.metastore.api.FieldSchema;
+import org.apache.hadoop.hive.metastore.api.MetaException;
 import org.apache.ranger.plugin.client.BaseClient;
 import org.apache.ranger.plugin.client.HadoopException;
+import org.apache.thrift.TException;
 
 public class HiveClient extends BaseClient implements Closeable {
 
 	private static final Log LOG = LogFactory.getLog(HiveClient.class);
 	
-	Connection con = null;
-	boolean isKerberosAuth=false;
+	private static final String ERR_MSG = "You can still save the repository and start creating
"
+			+ "policies, but you would not be able to use autocomplete for "
+			+ "resource names. Check ranger_admin.log for more info.";
+
+	private Connection con;
+	private HiveMetaStoreClient hiveClient;
+	private String hiveSiteFilePath;
+	private boolean isKerberosAuth;
+	private boolean enableHiveMetastoreLookup;
 
 	public HiveClient(String serviceName) throws Exception {
 		super(serviceName, null);
@@ -62,6 +77,8 @@ public class HiveClient extends BaseClient implements Closeable {
 	}
 
 	public void initHive() throws Exception {
+		enableHiveMetastoreLookup = getConfigHolder().isEnableHiveMetastoreLookup();
+		hiveSiteFilePath = getConfigHolder().getHiveSiteFilePath();
 		isKerberosAuth = getConfigHolder().isKerberosAuthentication();
 		if (isKerberosAuth) {
 			LOG.info("Secured Mode: JDBC Connection done with preAuthenticated Subject");
@@ -90,8 +107,12 @@ public class HiveClient extends BaseClient implements Closeable {
 			public List<String>  run() {
 				List<String> ret = null;
 				try {
-					ret = getDBList(dbMatching,dbList);
-				} catch ( HadoopException he) {
+					if (enableHiveMetastoreLookup) {
+						ret = getDBListFromHM(dbMatching,dbList);
+					} else {
+						ret = getDBList(dbMatching,dbList);
+					}
+				} catch (HadoopException he) {
 					LOG.error("<== HiveClient getDatabaseList() :Unable to get the Database List", he);
 					throw he;
 				}
@@ -100,21 +121,56 @@ public class HiveClient extends BaseClient implements Closeable {
 		});
 		return dblist;
 	}
+
+	private List<String> getDBListFromHM(String databaseMatching, List<String>dbList)
throws  HadoopException {
+		if (LOG.isDebugEnabled()) {
+			LOG.debug("==> HiveClient getDBListFromHM databaseMatching : " + databaseMatching +
" ExcludedbList : " + dbList);
+		}
+		List<String> ret = new ArrayList<String>();
+		try {
+			List<String> hiveDBList = null;
+			if (hiveClient != null) {
+				if (databaseMatching.equalsIgnoreCase("*")) {
+					hiveDBList = hiveClient.getAllDatabases();
+				} else {
+					hiveDBList = hiveClient.getDatabases(databaseMatching);
+				}
+			}
+			if (hiveDBList != null) {
+				for (String dbName : hiveDBList) {
+					if (dbList != null && dbList.contains(dbName)) {
+						continue;
+					}
+					ret.add(dbName);
+				}
+			}
+		} catch (MetaException e) {
+			String msgDesc = "Unable to get Database";
+			HadoopException hdpException = new HadoopException(msgDesc, e);
+			hdpException.generateResponseDataMap(false, getMessage(e),
+					msgDesc + ERR_MSG, null, null);
+			if (LOG.isDebugEnabled()) {
+				LOG.debug("<== HiveClient.getDBListFromHM() Error : " , e);
+			}
+			throw hdpException;
+		}
+		if (LOG.isDebugEnabled()) {
+			LOG.debug("<== HiveClient.getDBListFromHM(): " + ret);
+		}
+		return ret;
+	}
 		
 	private List<String> getDBList(String databaseMatching, List<String>dbList)
throws  HadoopException {
-		if(LOG.isDebugEnabled()) {
+		if (LOG.isDebugEnabled()) {
 			LOG.debug("==> HiveClient getDBList databaseMatching : " + databaseMatching + " ExcludedbList
:" + dbList);
 		}
 
 		List<String> ret = new ArrayList<String>();
-		String errMsg = " You can still save the repository and start creating "
-				+ "policies, but you would not be able to use autocomplete for "
-				+ "resource names. Check ranger_admin.log for more info.";
 		if (con != null) {
 			Statement stat =  null;
 			ResultSet rs = null;
 			String sql = "show databases";
-			if (databaseMatching != null && ! databaseMatching.isEmpty()) {
+			if (databaseMatching != null && !databaseMatching.isEmpty()) {
 				sql = sql + " like \"" + databaseMatching  + "\"";
 			}
 			try {
@@ -122,7 +178,7 @@ public class HiveClient extends BaseClient implements Closeable {
 				rs = stat.executeQuery(sql);
 				while (rs.next()) {
 					String dbName = rs.getString(1);
-					if ( dbList != null && dbList.contains(dbName)) {
+					if (dbList != null && dbList.contains(dbName)) {
 						continue;
 					}
 					ret.add(rs.getString(1));
@@ -133,8 +189,8 @@ public class HiveClient extends BaseClient implements Closeable {
 				HadoopException hdpException = new HadoopException(msgDesc,
 						sqlt);
 				hdpException.generateResponseDataMap(false, getMessage(sqlt),
-						msgDesc + errMsg, null, null);
-				if(LOG.isDebugEnabled()) {
+						msgDesc + ERR_MSG, null, null);
+				if (LOG.isDebugEnabled()) {
 					LOG.debug("<== HiveClient.getDBList() Error : ",  sqlt);
 				}
 				throw hdpException;
@@ -143,8 +199,8 @@ public class HiveClient extends BaseClient implements Closeable {
 				HadoopException hdpException = new HadoopException(msgDesc,
 						sqle);
 				hdpException.generateResponseDataMap(false, getMessage(sqle),
-						msgDesc + errMsg, null, null);
-				if(LOG.isDebugEnabled()) {
+						msgDesc + ERR_MSG, null, null);
+				if (LOG.isDebugEnabled()) {
 					LOG.debug("<== HiveClient.getDBList() Error : " , sqle);
 				}
 				throw hdpException;
@@ -155,7 +211,7 @@ public class HiveClient extends BaseClient implements Closeable {
 			
 		}
 
-		if(LOG.isDebugEnabled()) {
+		if (LOG.isDebugEnabled()) {
 			  LOG.debug("<== HiveClient.getDBList(): " + ret);
 		}
 
@@ -169,10 +225,14 @@ public class HiveClient extends BaseClient implements Closeable {
 
 		List<String> tableList = Subject.doAs(getLoginSubject(), new PrivilegedAction<List<String>>()
{
 			public List<String>  run() {
-				 List<String> ret = null;
+				List<String> ret = null;
 				try {
-					ret = getTblList(tblNameMatching,dbList,tblList);
-				} catch(HadoopException he) {
+					if (enableHiveMetastoreLookup) {
+						ret = getTblListFromHM(tblNameMatching,dbList,tblList);
+					} else {
+						ret = getTblList(tblNameMatching,dbList,tblList);
+					}
+				} catch (HadoopException he) {
 					LOG.error("<== HiveClient getTblList() :Unable to get the Table List", he);
 					throw he;
 				}
@@ -183,15 +243,44 @@ public class HiveClient extends BaseClient implements Closeable {
 		return tableList;
 	}
 
+	private List<String> getTblListFromHM(String tableNameMatching, List<String>
dbList, List<String> tblList) throws HadoopException {
+		if (LOG.isDebugEnabled()) {
+			LOG.debug("==> HiveClient getTblListFromHM() tableNameMatching : " + tableNameMatching
+ " ExcludedbList :" + dbList + "ExcludeTableList :" + tblList);
+		}
+		List<String> ret = new ArrayList<String>();
+		if (hiveClient != null && dbList != null && !dbList.isEmpty()) {
+			for (String dbName : dbList) {
+				try {
+					List<String> hiveTblList = hiveClient.getTables(dbName, tableNameMatching);
+					for (String tblName : hiveTblList) {
+						if (tblList != null && tblList.contains(tblName)) {
+							continue;
+						}
+						ret.add(tblName);
+					}
+				} catch (MetaException e) {
+					String msgDesc = "Unable to get Table.";
+					HadoopException hdpException = new HadoopException(msgDesc,e);
+					hdpException.generateResponseDataMap(false, getMessage(e), msgDesc + ERR_MSG, null,
null);
+					if (LOG.isDebugEnabled()) {
+						LOG.debug("<== HiveClient.getTblListFromHM() Error : " , e);
+					}
+					throw hdpException;
+				}
+			}
+		}
+		if (LOG.isDebugEnabled()) {
+			LOG.debug("<== HiveClient getTblListFromHM() " +  ret);
+		}
+		return ret;
+	}
+
 	private List<String> getTblList(String tableNameMatching, List<String> dbList,
List<String> tblList) throws HadoopException {
-		if(LOG.isDebugEnabled()) {
+		if (LOG.isDebugEnabled()) {
 			LOG.debug("==> HiveClient getTblList() tableNameMatching : " + tableNameMatching +
" ExcludedbList :" + dbList + "ExcludeTableList :" + tblList);
 		}
 
 		List<String> ret = new ArrayList<String>();
-		String errMsg = " You can still save the repository and start creating "
-				+ "policies, but you would not be able to use autocomplete for "
-				+ "resource names. Check ranger_admin.log for more info.";
 		if (con != null) {
 			Statement stat =  null;
 			ResultSet rs = null;
@@ -200,7 +289,7 @@ public class HiveClient extends BaseClient implements Closeable {
 
 			try {
 				if (dbList != null && !dbList.isEmpty()) {
-					for ( String db: dbList) {
+					for (String db : dbList) {
 						sql = "use " + db;
 						
 						try {
@@ -213,15 +302,15 @@ public class HiveClient extends BaseClient implements Closeable {
 						}
 						
 						sql = "show tables ";
-						if (tableNameMatching != null && ! tableNameMatching.isEmpty()) {
+						if (tableNameMatching != null && !tableNameMatching.isEmpty()) {
 							sql = sql + " like \"" + tableNameMatching  + "\"";
 						}
                         try {
                             stat = con.createStatement();
                             rs = stat.executeQuery(sql);
-                            while (rs.next()) {
+							while (rs.next()) {
                                 String tblName = rs.getString(1);
-                                if (tblList != null && tblList.contains(tblName))
{
+								if (tblList != null	&& tblList.contains(tblName)) {
                                     continue;
                                 }
                                 ret.add(tblName);
@@ -240,8 +329,8 @@ public class HiveClient extends BaseClient implements Closeable {
 				HadoopException hdpException = new HadoopException(msgDesc,
 						sqlt);
 				hdpException.generateResponseDataMap(false, getMessage(sqlt),
-						msgDesc + errMsg, null, null);
-				if(LOG.isDebugEnabled()) {
+						msgDesc + ERR_MSG, null, null);
+				if (LOG.isDebugEnabled()) {
 					LOG.debug("<== HiveClient.getTblList() Error : " , sqlt);
 				}
 				throw hdpException;
@@ -250,8 +339,8 @@ public class HiveClient extends BaseClient implements Closeable {
 				HadoopException hdpException = new HadoopException(msgDesc,
 						sqle);
 				hdpException.generateResponseDataMap(false, getMessage(sqle),
-						msgDesc + errMsg, null, null);
-				if(LOG.isDebugEnabled()) {
+						msgDesc + ERR_MSG, null, null);
+				if (LOG.isDebugEnabled()) {
 					LOG.debug("<== HiveClient.getTblList() Error : " , sqle);
 				}
 				throw hdpException;
@@ -259,7 +348,7 @@ public class HiveClient extends BaseClient implements Closeable {
 			
 		}
 
-		if(LOG.isDebugEnabled()) {
+		if (LOG.isDebugEnabled()) {
 			LOG.debug("<== HiveClient getTblList() " +  ret);
 		}
 
@@ -282,11 +371,15 @@ public class HiveClient extends BaseClient implements Closeable {
 		final List<String> tableList    = tblList;
 		final List<String> clmList 	= colList;
 		List<String> columnList = Subject.doAs(getLoginSubject(), new PrivilegedAction<List<String>>()
{
-			public List<String>  run() {
+			public List<String> run() {
 				    List<String> ret = null;
 					try {
-						ret = getClmList(clmNameMatching,databaseList,tableList,clmList);
-					} catch ( HadoopException he) {
+						if (enableHiveMetastoreLookup) {
+							ret = getClmListFromHM(clmNameMatching,databaseList,tableList,clmList);
+						} else {
+							ret = getClmList(clmNameMatching,databaseList,tableList,clmList);
+						}
+					} catch (HadoopException he) {
 						LOG.error("<== HiveClient getColumnList() :Unable to get the Column List", he);
 						throw he;
 					}
@@ -295,21 +388,63 @@ public class HiveClient extends BaseClient implements Closeable {
 			});
 		return columnList;
 	}
-	
+
+	private List<String> getClmListFromHM(String columnNameMatching,List<String>
dbList, List<String> tblList, List<String> colList) throws HadoopException {
+		if (LOG.isDebugEnabled()) {
+			LOG.debug("==> HiveClient.getClmListFromHM() columnNameMatching: " + columnNameMatching
+ " dbList :" + dbList +  " tblList: " + tblList + " colList: " + colList);
+		}
+		List<String> ret = new ArrayList<String>();
+		String columnNameMatchingRegEx = null;
+
+		if (columnNameMatching != null && !columnNameMatching.isEmpty()) {
+			columnNameMatchingRegEx = columnNameMatching;
+		}
+		if (hiveClient != null && dbList != null && !dbList.isEmpty() &&
tblList != null && !tblList.isEmpty()) {
+			for (String db : dbList) {
+				for (String tbl : tblList) {
+					try {
+						List<FieldSchema> hiveSch = hiveClient.getFields(db, tbl);
+						for (FieldSchema sch : hiveSch) {
+							String columnName = sch.getName();
+							if (colList != null && colList.contains(columnName)) {
+								continue;
+							}
+							if (columnNameMatchingRegEx == null) {
+								ret.add(columnName);
+							}
+							else if (FilenameUtils.wildcardMatch(columnName,columnNameMatchingRegEx)) {
+								ret.add(columnName);
+							}
+						}
+					} catch (TException e) {
+						String msgDesc = "Unable to get Columns.";
+						HadoopException hdpException = new HadoopException(msgDesc, e);
+						hdpException.generateResponseDataMap(false, getMessage(e), msgDesc + ERR_MSG, null,
null);
+						if (LOG.isDebugEnabled()) {
+							LOG.debug("<== HiveClient.getClmListFromHM() Error : " ,e);
+						}
+						throw hdpException;
+					}
+				}
+			}
+		}
+		if (LOG.isDebugEnabled()) {
+			LOG.debug("<== HiveClient.getClmListFromHM() " + ret );
+		}
+		return ret;
+	}
+
 	private List<String> getClmList(String columnNameMatching,List<String> dbList,
List<String> tblList, List<String> colList) throws HadoopException {
-		if(LOG.isDebugEnabled()) {
-			LOG.debug("<== HiveClient.getClmList() columnNameMatching: " + columnNameMatching +
" dbList :" + dbList +  " tblList: " + tblList + " colList: " + colList);
+		if (LOG.isDebugEnabled()) {
+			LOG.debug("==> HiveClient.getClmList() columnNameMatching: " + columnNameMatching +
" dbList :" + dbList +  " tblList: " + tblList + " colList: " + colList);
 		}
 
 		List<String> ret = new ArrayList<String>();
-		String errMsg = " You can still save the repository and start creating "
-				+ "policies, but you would not be able to use autocomplete for "
-				+ "resource names. Check ranger_admin.log for more info.";
 		if (con != null) {
 			
 			String columnNameMatchingRegEx = null;
 			
-			if (columnNameMatching != null && ! columnNameMatching.isEmpty()) {
+			if (columnNameMatching != null && !columnNameMatching.isEmpty()) {
 				columnNameMatchingRegEx = columnNameMatching;
 			}
 			
@@ -320,8 +455,8 @@ public class HiveClient extends BaseClient implements Closeable {
 
 			if (dbList != null && !dbList.isEmpty() &&
 				tblList != null && !tblList.isEmpty()) {
-				for (String db: dbList) {
-					for(String tbl:tblList) {
+				for (String db : dbList) {
+					for (String tbl : tblList) {
 						try {
 							sql = "use " + db;
 							
@@ -355,8 +490,8 @@ public class HiveClient extends BaseClient implements Closeable {
 								HadoopException hdpException = new HadoopException(msgDesc,
 										sqlt);
 								hdpException.generateResponseDataMap(false, getMessage(sqlt),
-										msgDesc + errMsg, null, null);
-								if(LOG.isDebugEnabled()) {
+										msgDesc + ERR_MSG, null, null);
+								if (LOG.isDebugEnabled()) {
 									LOG.debug("<== HiveClient.getClmList() Error : " ,sqlt);
 								}
 								throw hdpException;
@@ -365,8 +500,8 @@ public class HiveClient extends BaseClient implements Closeable {
 								HadoopException hdpException = new HadoopException(msgDesc,
 										sqle);
 								hdpException.generateResponseDataMap(false, getMessage(sqle),
-										msgDesc + errMsg, null, null);
-								if(LOG.isDebugEnabled()) {
+										msgDesc + ERR_MSG, null, null);
+								if (LOG.isDebugEnabled()) {
 									LOG.debug("<== HiveClient.getClmList() Error : " ,sqle);
 								}
 								throw hdpException;
@@ -379,7 +514,7 @@ public class HiveClient extends BaseClient implements Closeable {
 			}
 		}
 
-		if(LOG.isDebugEnabled()) {
+		if (LOG.isDebugEnabled()) {
 			LOG.debug("<== HiveClient.getClmList() " + ret );
 		}
 
@@ -437,122 +572,172 @@ public class HiveClient extends BaseClient implements Closeable {
 
 	
 	private void initConnection(String userName, String password) throws HadoopException  {
-	
-		Properties prop = getConfigHolder().getRangerSection();
-		String driverClassName = prop.getProperty("jdbc.driverClassName");
-		String url =  prop.getProperty("jdbc.url");	
-		String errMsg = " You can still save the repository and start creating "
-				+ "policies, but you would not be able to use autocomplete for "
-				+ "resource names. Check ranger_admin.log for more info.";
-	
-		if (driverClassName != null) {
+		if (enableHiveMetastoreLookup) {
 			try {
-				Driver driver = (Driver)Class.forName(driverClassName).newInstance();
-				DriverManager.registerDriver(driver);
-			} catch (SQLException e) {
-				String msgDesc = "initConnection: Caught SQLException while registering "
-						+ "Hive driver, so Unable to connect to Hive Thrift Server instance.";
-				HadoopException hdpException = new HadoopException(msgDesc, e);
-				hdpException.generateResponseDataMap(false, getMessage(e),
-						msgDesc + errMsg, null, null);
-				if ( LOG.isDebugEnabled()) {
+				HiveConf conf = new HiveConf();
+				if (!StringUtils.isEmpty(hiveSiteFilePath)) {
+					File f = new File(hiveSiteFilePath);
+					if (f.exists()) {
+						conf.addResource(f.toURI().toURL());
+					} else {
+						if(LOG.isDebugEnabled()) {
+							LOG.debug("Hive site conf file path " + hiveSiteFilePath + " does not exists for Hive
Metastore lookup");
+						}
+					}
+				} else {
+					if (LOG.isDebugEnabled()) {
+						LOG.debug("Hive site conf file path property not found for Hive Metastore lookup");
+					}
+				}
+				hiveClient = new HiveMetaStoreClient(conf);
+			} catch (HadoopException he) {
+				String msgDesc = "initConnection: Class or its nullary constructor might not accessible."
+						+ "So unable to initiate connection to hive thrift server instance.";
+				HadoopException hdpException = new HadoopException(msgDesc, he);
+				hdpException.generateResponseDataMap(false, getMessage(he),
+						msgDesc + ERR_MSG, null, null);
+				if (LOG.isDebugEnabled()) {
 					LOG.debug(msgDesc, hdpException);
 				}
 				throw hdpException;
-			} catch (IllegalAccessException ilae) {
-				String msgDesc = "initConnection: Class or its nullary constructor might not accessible."
+			} catch (MalformedURLException e) {
+				String msgDesc = "initConnection: URL might be malformed."
 						+ "So unable to initiate connection to hive thrift server instance.";
-				HadoopException hdpException = new HadoopException(msgDesc, ilae);
-				hdpException.generateResponseDataMap(false, getMessage(ilae),
-						msgDesc + errMsg, null, null);
-				if ( LOG.isDebugEnabled()) {
+				HadoopException hdpException = new HadoopException(msgDesc, e);
+				hdpException.generateResponseDataMap(false, getMessage(e), msgDesc + ERR_MSG, null, null);
+				if (LOG.isDebugEnabled()) {
 					LOG.debug(msgDesc, hdpException);
 				}
 				throw hdpException;
-			} catch (InstantiationException ie) {
-				String msgDesc = "initConnection: Class may not have its nullary constructor or "
-						+ "may be the instantiation fails for some other reason."
+			} catch (MetaException e) {
+				String msgDesc = "initConnection: Meta info is not proper."
 						+ "So unable to initiate connection to hive thrift server instance.";
-				HadoopException hdpException = new HadoopException(msgDesc, ie);
-				hdpException.generateResponseDataMap(false, getMessage(ie),
-						msgDesc + errMsg, null, null);
-				if ( LOG.isDebugEnabled()) {
+				HadoopException hdpException = new HadoopException(msgDesc, e);
+				hdpException.generateResponseDataMap(false, getMessage(e), msgDesc + ERR_MSG, null, null);
+				if (LOG.isDebugEnabled()) {
 					LOG.debug(msgDesc, hdpException);
 				}
 				throw hdpException;
+			} catch ( Throwable t) {
+				String msgDesc = "Unable to connect to Hive Thrift Server instance";
+				HadoopException hdpException = new HadoopException(msgDesc, t);
+				hdpException.generateResponseDataMap(false, getMessage(t), msgDesc + ERR_MSG, null, null);
+				if (LOG.isDebugEnabled()) {
+					LOG.debug(msgDesc, hdpException);
+				}
+		        throw hdpException;
+			}
+		} else {
+			Properties prop = getConfigHolder().getRangerSection();
+			String driverClassName = prop.getProperty("jdbc.driverClassName");
+			String url =  prop.getProperty("jdbc.url");
+
+			if (driverClassName != null) {
+				try {
+					Driver driver = (Driver)Class.forName(driverClassName).newInstance();
+					DriverManager.registerDriver(driver);
+				} catch (SQLException e) {
+					String msgDesc = "initConnection: Caught SQLException while registering "
+							+ "Hive driver, so Unable to connect to Hive Thrift Server instance.";
+					HadoopException hdpException = new HadoopException(msgDesc, e);
+					hdpException.generateResponseDataMap(false, getMessage(e),
+							msgDesc + ERR_MSG, null, null);
+					if (LOG.isDebugEnabled()) {
+						LOG.debug(msgDesc, hdpException);
+					}
+					throw hdpException;
+				} catch (IllegalAccessException ilae) {
+					String msgDesc = "initConnection: Class or its nullary constructor might not accessible."
+							+ "So unable to initiate connection to hive thrift server instance.";
+					HadoopException hdpException = new HadoopException(msgDesc, ilae);
+					hdpException.generateResponseDataMap(false, getMessage(ilae),
+							msgDesc + ERR_MSG, null, null);
+					if (LOG.isDebugEnabled()) {
+						LOG.debug(msgDesc, hdpException);
+					}
+					throw hdpException;
+				} catch (InstantiationException ie) {
+					String msgDesc = "initConnection: Class may not have its nullary constructor or "
+							+ "may be the instantiation fails for some other reason."
+							+ "So unable to initiate connection to hive thrift server instance.";
+					HadoopException hdpException = new HadoopException(msgDesc, ie);
+					hdpException.generateResponseDataMap(false, getMessage(ie),
+							msgDesc + ERR_MSG, null, null);
+					if (LOG.isDebugEnabled()) {
+						LOG.debug(msgDesc, hdpException);
+					}
+					throw hdpException;
+				} catch (ExceptionInInitializerError eie) {
+					String msgDesc = "initConnection: Got ExceptionInInitializerError, "
+							+ "The initialization provoked by this method fails."
+							+ "So unable to initiate connection to hive thrift server instance.";
+					HadoopException hdpException = new HadoopException(msgDesc, eie);
+					hdpException.generateResponseDataMap(false, getMessage(eie),
+							msgDesc + ERR_MSG, null, null);
+					if (LOG.isDebugEnabled()) {
+						LOG.debug(msgDesc, hdpException);
+					}
+					throw hdpException;
+				} catch (SecurityException se) {
+					String msgDesc = "initConnection: unable to initiate connection to hive thrift server
instance,"
+							+ " The caller's class loader is not the same as or an ancestor "
+							+ "of the class loader for the current class and invocation of "
+							+ "s.checkPackageAccess() denies access to the package of this class.";
+					HadoopException hdpException = new HadoopException(msgDesc, se);
+					hdpException.generateResponseDataMap(false, getMessage(se),
+							msgDesc + ERR_MSG, null, null);
+					if (LOG.isDebugEnabled()) {
+						LOG.debug(msgDesc, hdpException);
+					}
+					throw hdpException;
+				} catch (Throwable t) {
+					String msgDesc = "initConnection: Unable to connect to Hive Thrift Server instance,
"
+							+ "please provide valid value of field : {jdbc.driverClassName}.";
+					HadoopException hdpException = new HadoopException(msgDesc, t);
+					hdpException.generateResponseDataMap(false, getMessage(t),
+							msgDesc + ERR_MSG, null, "jdbc.driverClassName");
+					if (LOG.isDebugEnabled()) {
+						LOG.debug(msgDesc, hdpException);
+					}
+					throw hdpException;
+				}
+			}
+
+			try {
 				
-			} catch (ExceptionInInitializerError eie) {
-				String msgDesc = "initConnection: Got ExceptionInInitializerError, "
-						+ "The initialization provoked by this method fails."
-						+ "So unable to initiate connection to hive thrift server instance.";
-				HadoopException hdpException = new HadoopException(msgDesc, eie);
-				hdpException.generateResponseDataMap(false, getMessage(eie),
-						msgDesc + errMsg, null, null);
-				if ( LOG.isDebugEnabled()) {
+				if (userName == null && password == null) {
+					con = DriverManager.getConnection(url);
+				} else {
+					con = DriverManager.getConnection(url, userName, password);
+				}
+			} catch (SQLException e) {
+				String msgDesc = "Unable to connect to Hive Thrift Server instance.";
+				HadoopException hdpException = new HadoopException(msgDesc, e);
+				hdpException.generateResponseDataMap(false, getMessage(e), msgDesc
+						+ ERR_MSG, null, null);
+				if (LOG.isDebugEnabled()) {
 					LOG.debug(msgDesc, hdpException);
 				}
 				throw hdpException;
 			} catch (SecurityException se) {
-				String msgDesc = "initConnection: unable to initiate connection to hive thrift server
instance,"
-						+ " The caller's class loader is not the same as or an ancestor "
-						+ "of the class loader for the current class and invocation of "
-						+ "s.checkPackageAccess() denies access to the package of this class.";
+				String msgDesc = "Unable to connect to Hive Thrift Server instance.";
 				HadoopException hdpException = new HadoopException(msgDesc, se);
-				hdpException.generateResponseDataMap(false, getMessage(se),
-						msgDesc + errMsg, null, null);
-				if ( LOG.isDebugEnabled()) {
+				hdpException.generateResponseDataMap(false, getMessage(se), msgDesc
+						+ ERR_MSG, null, null);
+				if (LOG.isDebugEnabled()) {
 					LOG.debug(msgDesc, hdpException);
 				}
 				throw hdpException;
-			} catch (Throwable t) {
-				String msgDesc = "initConnection: Unable to connect to Hive Thrift Server instance, "
-						+ "please provide valid value of field : {jdbc.driverClassName}.";
+			} catch ( Throwable t) {
+				String msgDesc = "Unable to connect to Hive Thrift Server instance";
 				HadoopException hdpException = new HadoopException(msgDesc, t);
 				hdpException.generateResponseDataMap(false, getMessage(t),
-						msgDesc + errMsg, null, "jdbc.driverClassName");
-				if ( LOG.isDebugEnabled()) {
+						msgDesc + ERR_MSG, null, url);
+				if (LOG.isDebugEnabled()) {
 					LOG.debug(msgDesc, hdpException);
 				}
-				throw hdpException;
-			}
-		}
-		
-		try {
-			
-			if (userName == null && password == null) {
-				con = DriverManager.getConnection(url);
-			}
-			else {			
-				con = DriverManager.getConnection(url, userName, password);
-			}
-		
-		} catch (SQLException e) {
-			String msgDesc = "Unable to connect to Hive Thrift Server instance.";
-			HadoopException hdpException = new HadoopException(msgDesc, e);
-			hdpException.generateResponseDataMap(false, getMessage(e), msgDesc
-					+ errMsg, null, null);
-			if ( LOG.isDebugEnabled()) {
-				LOG.debug(msgDesc, hdpException);
-			}
-			throw hdpException;
-		} catch (SecurityException se) {
-			String msgDesc = "Unable to connect to Hive Thrift Server instance.";
-			HadoopException hdpException = new HadoopException(msgDesc, se);
-			hdpException.generateResponseDataMap(false, getMessage(se), msgDesc
-					+ errMsg, null, null);
-			if ( LOG.isDebugEnabled()) {
-				LOG.debug(msgDesc, hdpException);
-			}
-			throw hdpException;
-		} catch ( Throwable t) {
-			String msgDesc = "Unable to connect to Hive Thrift Server instance";
-			HadoopException hdpException = new HadoopException(msgDesc, t);
-			hdpException.generateResponseDataMap(false, getMessage(t),
-					msgDesc + errMsg, null, url);
-			if ( LOG.isDebugEnabled()) {
-				LOG.debug(msgDesc, hdpException);
+		        throw hdpException;
 			}
-	        throw hdpException;
 		}
 	}
 
@@ -581,7 +766,7 @@ public class HiveClient extends BaseClient implements Closeable {
 				}
 				else {
 					if (CollectionUtils.isNotEmpty(dbList)) {
-						for (String str : dbList ) {
+						for (String str : dbList) {
 							System.out.println("database: " + str );
 						}
 					}
@@ -591,9 +776,8 @@ public class HiveClient extends BaseClient implements Closeable {
 				List<String> tableList = hc.getTableList(args[2],null,null);
 				if (tableList.size() == 0) {
 					System.out.println("No tables found under database[" + args[1] + "] with table filter
[" + args[2] + "]");
-				}
-				else {
-					for(String str : tableList) {
+				} else {
+					for (String str : tableList) {
 						System.out.println("Table: " + str);
 					}
 				}
@@ -602,9 +786,8 @@ public class HiveClient extends BaseClient implements Closeable {
 				List<String> columnList = hc.getColumnList(args[3],null,null,null);
 				if (columnList.size() == 0) {
 					System.out.println("No columns found for db:" + args[1] + ", table: [" + args[2] + "],
with column filter [" + args[3] + "]");
-				}
-				else {
-					for (String str : columnList ) {
+				} else {
+					for (String str : columnList) {
 						System.out.println("Column: " + str);
 					}
 				}
@@ -625,9 +808,6 @@ public class HiveClient extends BaseClient implements Closeable {
 		HiveClient connectionObj = null;
 		Map<String, Object> responseData = new HashMap<String, Object>();
 		boolean connectivityStatus = false;
-		String errMsg = " You can still save the repository and start creating "
-				+ "policies, but you would not be able to use autocomplete for "
-				+ "resource names. Check ranger_admin.log for more info.";
 		List<String> testResult = null;
 		try {
 			connectionObj = new HiveClient(serviceName,	connectionProperties);
@@ -642,14 +822,14 @@ public class HiveClient extends BaseClient implements Closeable {
 						null, null, responseData);
 				} else {
 					String failureMsg = "Unable to retrieve any databases using given parameters.";
-					generateResponseDataMap(connectivityStatus, failureMsg, failureMsg + errMsg,
+					generateResponseDataMap(connectivityStatus, failureMsg, failureMsg + ERR_MSG,
 						null, null, responseData);
 				}
 			}
-		} catch ( Exception e) {
+		} catch (Exception e) {
 			throw e;
-		} finally  {
-			if ( connectionObj != null) {
+		} finally {
+			if (connectionObj != null) {
 				connectionObj.close();
 			}
 		}

http://git-wip-us.apache.org/repos/asf/ranger/blob/6cfb0188/security-admin/pom.xml
----------------------------------------------------------------------
diff --git a/security-admin/pom.xml b/security-admin/pom.xml
index d2a9e18..caf3576 100644
--- a/security-admin/pom.xml
+++ b/security-admin/pom.xml
@@ -267,6 +267,11 @@
                 </exclusion>
             </exclusions>
         </dependency>
+		<dependency>
+			<groupId>org.apache.hadoop</groupId>
+			<artifactId>hadoop-mapreduce-client-core</artifactId>
+			<version>${hadoop.version}</version>
+		</dependency>
         <dependency>
             <groupId>com.sun.jersey.contribs</groupId>
             <artifactId>jersey-multipart</artifactId>

http://git-wip-us.apache.org/repos/asf/ranger/blob/6cfb0188/security-admin/src/main/webapp/scripts/views/service/ConfigurationList.js
----------------------------------------------------------------------
diff --git a/security-admin/src/main/webapp/scripts/views/service/ConfigurationList.js b/security-admin/src/main/webapp/scripts/views/service/ConfigurationList.js
index dcc85ab..961d5d8 100644
--- a/security-admin/src/main/webapp/scripts/views/service/ConfigurationList.js
+++ b/security-admin/src/main/webapp/scripts/views/service/ConfigurationList.js
@@ -57,10 +57,12 @@ define(function(require) {
 		onRender : function() {
 		},
 		onInputNameChange : function(e) {
-			this.model.set('name', $(e.currentTarget).val());
+			this.model.set('name', $(e.currentTarget).val().trim());
+			this.ui.name.val($(e.currentTarget).val().trim());
 		},
 		onInputValueChange : function(e) {
-			this.model.set('value', $(e.currentTarget).val());
+			this.model.set('value', $(e.currentTarget).val().trim());
+			this.ui.value.val($(e.currentTarget).val().trim());
 		},
 		evDelete : function(){
 			var that = this;


Mime
View raw message