accumulo-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From vi...@apache.org
Subject svn commit: r1438354 [4/12] - in /accumulo/trunk: ./ assemble/ bin/ conf/examples/1GB/native-standalone/ conf/examples/1GB/standalone/ conf/examples/2GB/native-standalone/ conf/examples/2GB/standalone/ conf/examples/3GB/native-standalone/ conf/examples...
Date Fri, 25 Jan 2013 07:04:29 GMT
Modified: accumulo/trunk/core/src/main/java/org/apache/accumulo/core/client/mapred/AccumuloOutputFormat.java
URL: http://svn.apache.org/viewvc/accumulo/trunk/core/src/main/java/org/apache/accumulo/core/client/mapred/AccumuloOutputFormat.java?rev=1438354&r1=1438353&r2=1438354&view=diff
==============================================================================
--- accumulo/trunk/core/src/main/java/org/apache/accumulo/core/client/mapred/AccumuloOutputFormat.java (original)
+++ accumulo/trunk/core/src/main/java/org/apache/accumulo/core/client/mapred/AccumuloOutputFormat.java Fri Jan 25 07:04:25 2013
@@ -40,6 +40,7 @@ import org.apache.accumulo.core.data.Key
 import org.apache.accumulo.core.data.Mutation;
 import org.apache.accumulo.core.security.ColumnVisibility;
 import org.apache.accumulo.core.security.thrift.SecurityErrorCode;
+import org.apache.accumulo.core.security.tokens.AccumuloToken;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapred.JobConf;
@@ -57,7 +58,7 @@ import org.apache.log4j.Logger;
  * The user must specify the following via static configurator methods:
  * 
  * <ul>
- * <li>{@link AccumuloOutputFormat#setConnectorInfo(JobConf, String, byte[])}
+ * <li>{@link AccumuloOutputFormat#setConnectorInfo(JobConf, AccumuloToken)}
  * <li>{@link AccumuloOutputFormat#setZooKeeperInstance(JobConf, String, String)} OR {@link AccumuloOutputFormat#setMockInstance(JobConf, String)}
  * </ul>
  * 
@@ -73,14 +74,12 @@ public class AccumuloOutputFormat implem
    * 
    * @param job
    *          the Hadoop job instance to be configured
-   * @param user
-   *          a valid Accumulo user name (user must have Table.CREATE permission if {@link #setCreateTables(JobConf, boolean)} is set to true)
-   * @param passwd
-   *          the user's password
+   * @param token
+   *          a valid AccumuloToken (user must have Table.CREATE permission if {@link #setCreateTables(JobConf, boolean)} is set to true)
    * @since 1.5.0
    */
-  public static void setConnectorInfo(JobConf job, String user, byte[] passwd) {
-    OutputConfigurator.setConnectorInfo(CLASS, job, user, passwd);
+  public static void setConnectorInfo(JobConf job, AccumuloToken<?,?> token) {
+    OutputConfigurator.setConnectorInfo(CLASS, job, token);
   }
   
   /**
@@ -90,37 +89,24 @@ public class AccumuloOutputFormat implem
    *          the Hadoop context for the configured job
    * @return true if the connector has been configured, false otherwise
    * @since 1.5.0
-   * @see #setConnectorInfo(JobConf, String, byte[])
+   * @see #setConnectorInfo(JobConf, AccumuloToken)
    */
   protected static Boolean isConnectorInfoSet(JobConf job) {
     return OutputConfigurator.isConnectorInfoSet(CLASS, job);
   }
   
   /**
-   * Gets the user name from the configuration.
+   * Gets the AccumuloToken from the configuration. WARNING: The serlaized token is stored in the Configuration and shared with all MapReduce tasks; It is
+   * BASE64 encoded to provide a charset safe conversion to a string, and is not intended to be secure.
    * 
    * @param job
    *          the Hadoop context for the configured job
-   * @return the user name
+   * @return the decoded user token
    * @since 1.5.0
-   * @see #setConnectorInfo(JobConf, String, byte[])
+   * @see #setConnectorInfo(JobConf, AccumuloToken)
    */
-  protected static String getUsername(JobConf job) {
-    return OutputConfigurator.getUsername(CLASS, job);
-  }
-  
-  /**
-   * Gets the password from the configuration. WARNING: The password is stored in the Configuration and shared with all MapReduce tasks; It is BASE64 encoded to
-   * provide a charset safe conversion to a string, and is not intended to be secure.
-   * 
-   * @param job
-   *          the Hadoop context for the configured job
-   * @return the decoded user password
-   * @since 1.5.0
-   * @see #setConnectorInfo(JobConf, String, byte[])
-   */
-  protected static byte[] getPassword(JobConf job) {
-    return OutputConfigurator.getPassword(CLASS, job);
+  protected static AccumuloToken<?,?> getToken(JobConf job) {
+    return OutputConfigurator.getToken(CLASS, job);
   }
   
   /**
@@ -335,7 +321,7 @@ public class AccumuloOutputFormat implem
       this.defaultTableName = (tname == null) ? null : new Text(tname);
       
       if (!simulate) {
-        this.conn = getInstance(job).getConnector(getUsername(job), getPassword(job));
+        this.conn = getInstance(job).getConnector(getToken(job));
         mtbw = conn.createMultiTableBatchWriter(getBatchWriterOptions(job));
       }
     }
@@ -469,8 +455,8 @@ public class AccumuloOutputFormat implem
       throw new IOException("Connector info has not been set.");
     try {
       // if the instance isn't configured, it will complain here
-      Connector c = getInstance(job).getConnector(getUsername(job), getPassword(job));
-      if (!c.securityOperations().authenticateUser(getUsername(job), getPassword(job)))
+      Connector c = getInstance(job).getConnector(getToken(job));
+      if (!c.securityOperations().authenticateUser(getToken(job)))
         throw new IOException("Unable to authenticate user");
     } catch (AccumuloException e) {
       throw new IOException(e);

Modified: accumulo/trunk/core/src/main/java/org/apache/accumulo/core/client/mapred/InputFormatBase.java
URL: http://svn.apache.org/viewvc/accumulo/trunk/core/src/main/java/org/apache/accumulo/core/client/mapred/InputFormatBase.java?rev=1438354&r1=1438353&r2=1438354&view=diff
==============================================================================
--- accumulo/trunk/core/src/main/java/org/apache/accumulo/core/client/mapred/InputFormatBase.java (original)
+++ accumulo/trunk/core/src/main/java/org/apache/accumulo/core/client/mapred/InputFormatBase.java Fri Jan 25 07:04:25 2013
@@ -18,7 +18,6 @@ package org.apache.accumulo.core.client.
 
 import java.io.IOException;
 import java.net.InetAddress;
-import java.nio.ByteBuffer;
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.HashMap;
@@ -54,7 +53,7 @@ import org.apache.accumulo.core.data.Ran
 import org.apache.accumulo.core.data.Value;
 import org.apache.accumulo.core.master.state.tables.TableState;
 import org.apache.accumulo.core.security.Authorizations;
-import org.apache.accumulo.core.security.thrift.AuthInfo;
+import org.apache.accumulo.core.security.tokens.AccumuloToken;
 import org.apache.accumulo.core.util.Pair;
 import org.apache.accumulo.core.util.UtilWaitThread;
 import org.apache.hadoop.io.Text;
@@ -86,14 +85,12 @@ public abstract class InputFormatBase<K,
    * 
    * @param job
    *          the Hadoop job instance to be configured
-   * @param user
-   *          a valid Accumulo user name (user must have Table.CREATE permission)
-   * @param passwd
-   *          the user's password
+   * @param token
+   *          a valid AccumuloToken (user must have Table.CREATE permission)
    * @since 1.5.0
    */
-  public static void setConnectorInfo(JobConf job, String user, byte[] passwd) {
-    InputConfigurator.setConnectorInfo(CLASS, job, user, passwd);
+  public static void setConnectorInfo(JobConf job, AccumuloToken<?,?> token) {
+    InputConfigurator.setConnectorInfo(CLASS, job, token);
   }
   
   /**
@@ -103,37 +100,24 @@ public abstract class InputFormatBase<K,
    *          the Hadoop context for the configured job
    * @return true if the connector has been configured, false otherwise
    * @since 1.5.0
-   * @see #setConnectorInfo(JobConf, String, byte[])
+   * @see #setConnectorInfo(JobConf, AccumuloToken)
    */
   protected static Boolean isConnectorInfoSet(JobConf job) {
     return InputConfigurator.isConnectorInfoSet(CLASS, job);
   }
   
   /**
-   * Gets the user name from the configuration.
+   * Gets the AccumuloToken from the configuration. WARNING: The serialized token is stored in the Configuration and shared with all MapReduce tasks; It is
+   * BASE64 encoded to provide a charset safe conversion to a string, and is not intended to be secure.
    * 
    * @param job
    *          the Hadoop context for the configured job
-   * @return the user name
+   * @return the decoded user Token
    * @since 1.5.0
-   * @see #setConnectorInfo(JobConf, String, byte[])
+   * @see #setConnectorInfo(JobConf, AccumuloToken)
    */
-  protected static String getUsername(JobConf job) {
-    return InputConfigurator.getUsername(CLASS, job);
-  }
-  
-  /**
-   * Gets the password from the configuration. WARNING: The password is stored in the Configuration and shared with all MapReduce tasks; It is BASE64 encoded to
-   * provide a charset safe conversion to a string, and is not intended to be secure.
-   * 
-   * @param job
-   *          the Hadoop context for the configured job
-   * @return the decoded user password
-   * @since 1.5.0
-   * @see #setConnectorInfo(JobConf, String, byte[])
-   */
-  protected static byte[] getPassword(JobConf job) {
-    return InputConfigurator.getPassword(CLASS, job);
+  protected static AccumuloToken<?,?> getToken(JobConf job) {
+    return InputConfigurator.getToken(CLASS, job);
   }
   
   /**
@@ -541,18 +525,16 @@ public abstract class InputFormatBase<K,
       split = (RangeInputSplit) inSplit;
       log.debug("Initializing input split: " + split.getRange());
       Instance instance = getInstance(job);
-      String user = getUsername(job);
-      byte[] password = getPassword(job);
+      AccumuloToken<?,?> token = getToken(job);
       Authorizations authorizations = getScanAuthorizations(job);
       
       try {
-        log.debug("Creating connector with user: " + user);
-        Connector conn = instance.getConnector(user, password);
+        log.debug("Creating connector with user: " + token.getPrincipal());
+        Connector conn = instance.getConnector(token);
         log.debug("Creating scanner for table: " + getInputTableName(job));
         log.debug("Authorizations are: " + authorizations);
         if (isOfflineScan(job)) {
-          scanner = new OfflineScanner(instance, new AuthInfo(user, ByteBuffer.wrap(password), instance.getInstanceID()), Tables.getTableId(instance,
-              getInputTableName(job)), authorizations);
+          scanner = new OfflineScanner(instance, token, Tables.getTableId(instance, getInputTableName(job)), authorizations);
         } else {
           scanner = conn.createScanner(getInputTableName(job), authorizations);
         }
@@ -613,7 +595,7 @@ public abstract class InputFormatBase<K,
     Map<String,Map<KeyExtent,List<Range>>> binnedRanges = new HashMap<String,Map<KeyExtent,List<Range>>>();
     
     Instance instance = getInstance(job);
-    Connector conn = instance.getConnector(getUsername(job), getPassword(job));
+    Connector conn = instance.getConnector(getToken(job));
     String tableId = Tables.getTableId(instance, tableName);
     
     if (Tables.getTableState(instance, tableId) != TableState.OFFLINE) {

Modified: accumulo/trunk/core/src/main/java/org/apache/accumulo/core/client/mapreduce/AccumuloOutputFormat.java
URL: http://svn.apache.org/viewvc/accumulo/trunk/core/src/main/java/org/apache/accumulo/core/client/mapreduce/AccumuloOutputFormat.java?rev=1438354&r1=1438353&r2=1438354&view=diff
==============================================================================
--- accumulo/trunk/core/src/main/java/org/apache/accumulo/core/client/mapreduce/AccumuloOutputFormat.java (original)
+++ accumulo/trunk/core/src/main/java/org/apache/accumulo/core/client/mapreduce/AccumuloOutputFormat.java Fri Jan 25 07:04:25 2013
@@ -41,6 +41,8 @@ import org.apache.accumulo.core.data.Key
 import org.apache.accumulo.core.data.Mutation;
 import org.apache.accumulo.core.security.ColumnVisibility;
 import org.apache.accumulo.core.security.thrift.SecurityErrorCode;
+import org.apache.accumulo.core.security.tokens.AccumuloToken;
+import org.apache.accumulo.core.security.tokens.UserPassToken;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapreduce.Job;
@@ -76,14 +78,12 @@ public class AccumuloOutputFormat extend
    * 
    * @param job
    *          the Hadoop job instance to be configured
-   * @param user
-   *          a valid Accumulo user name (user must have Table.CREATE permission if {@link #setCreateTables(Job, boolean)} is set to true)
-   * @param passwd
-   *          the user's password
+   * @param token
+   *          a valid AccumuloToken (principal must have Table.CREATE permission if {@link #setCreateTables(Job, boolean)} is set to true)
    * @since 1.5.0
    */
-  public static void setConnectorInfo(Job job, String user, byte[] passwd) {
-    OutputConfigurator.setConnectorInfo(CLASS, job.getConfiguration(), user, passwd);
+  public static void setConnectorInfo(Job job, AccumuloToken<?,?> token) {
+    OutputConfigurator.setConnectorInfo(CLASS, job.getConfiguration(), token);
   }
   
   /**
@@ -104,28 +104,14 @@ public class AccumuloOutputFormat extend
    * 
    * @param context
    *          the Hadoop context for the configured job
-   * @return the user name
+   * @return the AccumuloToken
    * @since 1.5.0
-   * @see #setConnectorInfo(Job, String, byte[])
+   * @see #setConnectorInfo(Job, AccumuloToken)
    */
-  protected static String getUsername(JobContext context) {
-    return OutputConfigurator.getUsername(CLASS, context.getConfiguration());
+  protected static AccumuloToken<?,?> getToken(JobContext context) {
+    return OutputConfigurator.getToken(CLASS, context.getConfiguration());
   }
-  
-  /**
-   * Gets the password from the configuration. WARNING: The password is stored in the Configuration and shared with all MapReduce tasks; It is BASE64 encoded to
-   * provide a charset safe conversion to a string, and is not intended to be secure.
-   * 
-   * @param context
-   *          the Hadoop context for the configured job
-   * @return the decoded user password
-   * @since 1.5.0
-   * @see #setConnectorInfo(Job, String, byte[])
-   */
-  protected static byte[] getPassword(JobContext context) {
-    return OutputConfigurator.getPassword(CLASS, context.getConfiguration());
-  }
-  
+
   /**
    * Configures a {@link ZooKeeperInstance} for this job.
    * 
@@ -338,7 +324,7 @@ public class AccumuloOutputFormat extend
       this.defaultTableName = (tname == null) ? null : new Text(tname);
       
       if (!simulate) {
-        this.conn = getInstance(context).getConnector(getUsername(context), getPassword(context));
+        this.conn = getInstance(context).getConnector(getToken(context));
         mtbw = conn.createMultiTableBatchWriter(getBatchWriterOptions(context));
       }
     }
@@ -472,8 +458,8 @@ public class AccumuloOutputFormat extend
       throw new IOException("Connector info has not been set.");
     try {
       // if the instance isn't configured, it will complain here
-      Connector c = getInstance(job).getConnector(getUsername(job), getPassword(job));
-      if (!c.securityOperations().authenticateUser(getUsername(job), getPassword(job)))
+      Connector c = getInstance(job).getConnector(getToken(job));
+      if (!c.securityOperations().authenticateUser(getToken(job)))
         throw new IOException("Unable to authenticate user");
     } catch (AccumuloException e) {
       throw new IOException(e);
@@ -506,7 +492,7 @@ public class AccumuloOutputFormat extend
    */
   @Deprecated
   public static void setOutputInfo(Configuration conf, String user, byte[] passwd, boolean createTables, String defaultTable) {
-    OutputConfigurator.setConnectorInfo(CLASS, conf, user, passwd);
+    OutputConfigurator.setConnectorInfo(CLASS, conf, new UserPassToken(user, passwd));
     OutputConfigurator.setCreateTables(CLASS, conf, createTables);
     OutputConfigurator.setDefaultTableName(CLASS, conf, defaultTable);
   }
@@ -578,7 +564,7 @@ public class AccumuloOutputFormat extend
    */
   @Deprecated
   protected static String getUsername(Configuration conf) {
-    return OutputConfigurator.getUsername(CLASS, conf);
+    return OutputConfigurator.getToken(CLASS, conf).getPrincipal();
   }
   
   /**
@@ -586,7 +572,12 @@ public class AccumuloOutputFormat extend
    */
   @Deprecated
   protected static byte[] getPassword(Configuration conf) {
-    return OutputConfigurator.getPassword(CLASS, conf);
+    AccumuloToken<?,?> token = OutputConfigurator.getToken(CLASS, conf);
+    if (token instanceof UserPassToken) {
+      UserPassToken upt = (UserPassToken) token;
+      return upt.getPassword();
+    }
+    throw new RuntimeException("Not applicable for non-UserPassTokens");
   }
   
   /**

Modified: accumulo/trunk/core/src/main/java/org/apache/accumulo/core/client/mapreduce/InputFormatBase.java
URL: http://svn.apache.org/viewvc/accumulo/trunk/core/src/main/java/org/apache/accumulo/core/client/mapreduce/InputFormatBase.java?rev=1438354&r1=1438353&r2=1438354&view=diff
==============================================================================
--- accumulo/trunk/core/src/main/java/org/apache/accumulo/core/client/mapreduce/InputFormatBase.java (original)
+++ accumulo/trunk/core/src/main/java/org/apache/accumulo/core/client/mapreduce/InputFormatBase.java Fri Jan 25 07:04:25 2013
@@ -24,7 +24,6 @@ import java.math.BigInteger;
 import java.net.InetAddress;
 import java.net.URLDecoder;
 import java.net.URLEncoder;
-import java.nio.ByteBuffer;
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.HashMap;
@@ -63,7 +62,8 @@ import org.apache.accumulo.core.data.Val
 import org.apache.accumulo.core.iterators.user.VersioningIterator;
 import org.apache.accumulo.core.master.state.tables.TableState;
 import org.apache.accumulo.core.security.Authorizations;
-import org.apache.accumulo.core.security.thrift.AuthInfo;
+import org.apache.accumulo.core.security.tokens.AccumuloToken;
+import org.apache.accumulo.core.security.tokens.UserPassToken;
 import org.apache.accumulo.core.util.Pair;
 import org.apache.accumulo.core.util.UtilWaitThread;
 import org.apache.hadoop.conf.Configuration;
@@ -98,14 +98,12 @@ public abstract class InputFormatBase<K,
    * 
    * @param job
    *          the Hadoop job instance to be configured
-   * @param user
-   *          a valid Accumulo user name (user must have Table.CREATE permission)
-   * @param passwd
-   *          the user's password
+   * @param token
+   *          a valid AccumuloToken (principal must have Table.CREATE permission)
    * @since 1.5.0
    */
-  public static void setConnectorInfo(Job job, String user, byte[] passwd) {
-    InputConfigurator.setConnectorInfo(CLASS, job.getConfiguration(), user, passwd);
+  public static void setConnectorInfo(Job job, AccumuloToken<?,?> token) {
+    InputConfigurator.setConnectorInfo(CLASS, job.getConfiguration(), token);
   }
   
   /**
@@ -115,7 +113,7 @@ public abstract class InputFormatBase<K,
    *          the Hadoop context for the configured job
    * @return true if the connector has been configured, false otherwise
    * @since 1.5.0
-   * @see #setConnectorInfo(Job, String, byte[])
+   * @see #setConnectorInfo(Job, AccumuloToken)
    */
   protected static Boolean isConnectorInfoSet(JobContext context) {
     return InputConfigurator.isConnectorInfoSet(CLASS, context.getConfiguration());
@@ -128,26 +126,12 @@ public abstract class InputFormatBase<K,
    *          the Hadoop context for the configured job
    * @return the user name
    * @since 1.5.0
-   * @see #setConnectorInfo(Job, String, byte[])
+   * @see #setConnectorInfo(Job, AccumuloToken)
    */
-  protected static String getUsername(JobContext context) {
-    return InputConfigurator.getUsername(CLASS, context.getConfiguration());
+  protected static AccumuloToken<?,?> getToken(JobContext context) {
+    return InputConfigurator.getToken(CLASS, context.getConfiguration());
   }
-  
-  /**
-   * Gets the password from the configuration. WARNING: The password is stored in the Configuration and shared with all MapReduce tasks; It is BASE64 encoded to
-   * provide a charset safe conversion to a string, and is not intended to be secure.
-   * 
-   * @param context
-   *          the Hadoop context for the configured job
-   * @return the decoded user password
-   * @since 1.5.0
-   * @see #setConnectorInfo(Job, String, byte[])
-   */
-  protected static byte[] getPassword(JobContext context) {
-    return InputConfigurator.getPassword(CLASS, context.getConfiguration());
-  }
-  
+
   /**
    * Configures a {@link ZooKeeperInstance} for this job.
    * 
@@ -556,18 +540,17 @@ public abstract class InputFormatBase<K,
       split = (RangeInputSplit) inSplit;
       log.debug("Initializing input split: " + split.range);
       Instance instance = getInstance(attempt);
-      String user = getUsername(attempt);
-      byte[] password = getPassword(attempt);
+      AccumuloToken<?,?> token = getToken(attempt);
       Authorizations authorizations = getScanAuthorizations(attempt);
       
       try {
-        log.debug("Creating connector with user: " + user);
-        Connector conn = instance.getConnector(user, password);
+        log.debug("Creating connector with user: " + token.getPrincipal());
+        Connector conn = instance.getConnector(token);
         log.debug("Creating scanner for table: " + getInputTableName(attempt));
         log.debug("Authorizations are: " + authorizations);
         if (isOfflineScan(attempt)) {
-          scanner = new OfflineScanner(instance, new AuthInfo(user, ByteBuffer.wrap(password), instance.getInstanceID()), Tables.getTableId(instance,
-              getInputTableName(attempt)), authorizations);
+          scanner = new OfflineScanner(instance, token, Tables.getTableId(instance, getInputTableName(attempt)),
+              authorizations);
         } else {
           scanner = conn.createScanner(getInputTableName(attempt), authorizations);
         }
@@ -635,7 +618,7 @@ public abstract class InputFormatBase<K,
     Map<String,Map<KeyExtent,List<Range>>> binnedRanges = new HashMap<String,Map<KeyExtent,List<Range>>>();
     
     Instance instance = getInstance(context);
-    Connector conn = instance.getConnector(getUsername(context), getPassword(context));
+    Connector conn = instance.getConnector(getToken(context));
     String tableId = Tables.getTableId(instance, tableName);
     
     if (Tables.getTableState(instance, tableId) != TableState.OFFLINE) {
@@ -963,12 +946,12 @@ public abstract class InputFormatBase<K,
   }
   
   /**
-   * @deprecated since 1.5.0; Use {@link #setConnectorInfo(Job, String, byte[])}, {@link #setInputTableName(Job, String)}, and
+   * @deprecated since 1.5.0; Use {@link #setConnectorInfo(Job, AccumuloToken}, {@link #setInputTableName(Job, String)}, and
    *             {@link #setScanAuthorizations(Job, Authorizations)} instead.
    */
   @Deprecated
   public static void setInputInfo(Configuration conf, String user, byte[] passwd, String table, Authorizations auths) {
-    InputConfigurator.setConnectorInfo(CLASS, conf, user, passwd);
+    InputConfigurator.setConnectorInfo(CLASS, conf, new UserPassToken(user, passwd));
     InputConfigurator.setInputTableName(CLASS, conf, table);
     InputConfigurator.setScanAuthorizations(CLASS, conf, auths);
   }
@@ -1068,7 +1051,7 @@ public abstract class InputFormatBase<K,
    */
   @Deprecated
   protected static String getUsername(Configuration conf) {
-    return InputConfigurator.getUsername(CLASS, conf);
+    return InputConfigurator.getToken(CLASS, conf).getPrincipal();
   }
   
   /**
@@ -1076,7 +1059,12 @@ public abstract class InputFormatBase<K,
    */
   @Deprecated
   protected static byte[] getPassword(Configuration conf) {
-    return InputConfigurator.getPassword(CLASS, conf);
+    AccumuloToken<?,?> token = InputConfigurator.getToken(CLASS, conf);
+    if (token instanceof UserPassToken) {
+      UserPassToken upt = (UserPassToken) token;
+      return upt.getPassword();
+    }
+    throw new RuntimeException("Not applicable for non-UserPassTokens");
   }
   
   /**

Modified: accumulo/trunk/core/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/util/ConfiguratorBase.java
URL: http://svn.apache.org/viewvc/accumulo/trunk/core/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/util/ConfiguratorBase.java?rev=1438354&r1=1438353&r2=1438354&view=diff
==============================================================================
--- accumulo/trunk/core/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/util/ConfiguratorBase.java (original)
+++ accumulo/trunk/core/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/util/ConfiguratorBase.java Fri Jan 25 07:04:25 2013
@@ -16,13 +16,12 @@
  */
 package org.apache.accumulo.core.client.mapreduce.lib.util;
 
-import java.nio.charset.Charset;
-
 import org.apache.accumulo.core.client.Instance;
 import org.apache.accumulo.core.client.ZooKeeperInstance;
 import org.apache.accumulo.core.client.mock.MockInstance;
+import org.apache.accumulo.core.security.tokens.AccumuloToken;
+import org.apache.accumulo.core.security.tokens.TokenHelper;
 import org.apache.accumulo.core.util.ArgumentChecker;
-import org.apache.commons.codec.binary.Base64;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.util.StringUtils;
 import org.apache.log4j.Level;
@@ -39,7 +38,7 @@ public class ConfiguratorBase {
    * @since 1.5.0
    */
   public static enum ConnectorInfo {
-    IS_CONFIGURED, USER_NAME, PASSWORD
+    IS_CONFIGURED, TOKEN
   }
   
   /**
@@ -81,20 +80,17 @@ public class ConfiguratorBase {
    *          the class whose name will be used as a prefix for the property configuration key
    * @param conf
    *          the Hadoop configuration object to configure
-   * @param user
-   *          a valid Accumulo user name
-   * @param passwd
-   *          the user's password
+   * @param token
+   *          a valid AccumuloToken
    * @since 1.5.0
    */
-  public static void setConnectorInfo(Class<?> implementingClass, Configuration conf, String user, byte[] passwd) {
+  public static void setConnectorInfo(Class<?> implementingClass, Configuration conf, AccumuloToken<?,?> token) {
     if (isConnectorInfoSet(implementingClass, conf))
       throw new IllegalStateException("Connector info for " + implementingClass.getSimpleName() + " can only be set once per job");
     
-    ArgumentChecker.notNull(user, passwd);
+    ArgumentChecker.notNull(token);
     conf.setBoolean(enumToConfKey(implementingClass, ConnectorInfo.IS_CONFIGURED), true);
-    conf.set(enumToConfKey(implementingClass, ConnectorInfo.USER_NAME), user);
-    conf.set(enumToConfKey(implementingClass, ConnectorInfo.PASSWORD), new String(Base64.encodeBase64(passwd), Charset.forName("UTF-8")));
+    conf.set(enumToConfKey(implementingClass, ConnectorInfo.TOKEN), TokenHelper.asBase64String(token));
   }
   
   /**
@@ -112,34 +108,19 @@ public class ConfiguratorBase {
   }
   
   /**
-   * Gets the user name from the configuration.
-   * 
-   * @param implementingClass
-   *          the class whose name will be used as a prefix for the property configuration key
-   * @param conf
-   *          the Hadoop configuration object to configure
-   * @return the user name
-   * @since 1.5.0
-   * @see #setConnectorInfo(Class, Configuration, String, byte[])
-   */
-  public static String getUsername(Class<?> implementingClass, Configuration conf) {
-    return conf.get(enumToConfKey(implementingClass, ConnectorInfo.USER_NAME));
-  }
-  
-  /**
-   * Gets the password from the configuration. WARNING: The password is stored in the Configuration and shared with all MapReduce tasks; It is BASE64 encoded to
-   * provide a charset safe conversion to a string, and is not intended to be secure.
+   * Gets the AccumuloToken from the configuration. WARNING: The serialized Token is stored in the Configuration and shared with all MapReduce tasks; It is
+   * BASE64 encoded to provide a charset safe conversion to a string, and is not intended to be secure.
    * 
    * @param implementingClass
    *          the class whose name will be used as a prefix for the property configuration key
    * @param conf
    *          the Hadoop configuration object to configure
-   * @return the decoded user password
+   * @return the AccumuloToken
    * @since 1.5.0
-   * @see #setConnectorInfo(Class, Configuration, String, byte[])
+   * @see #setConnectorInfo(Class, Configuration, AccumuloToken)
    */
-  public static byte[] getPassword(Class<?> implementingClass, Configuration conf) {
-    return Base64.decodeBase64(conf.get(enumToConfKey(implementingClass, ConnectorInfo.PASSWORD), "").getBytes(Charset.forName("UTF-8")));
+  public static AccumuloToken<?,?> getToken(Class<?> implementingClass, Configuration conf) {
+    return TokenHelper.fromBase64String(conf.get(enumToConfKey(implementingClass, ConnectorInfo.TOKEN)));
   }
   
   /**

Modified: accumulo/trunk/core/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/util/InputConfigurator.java
URL: http://svn.apache.org/viewvc/accumulo/trunk/core/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/util/InputConfigurator.java?rev=1438354&r1=1438353&r2=1438354&view=diff
==============================================================================
--- accumulo/trunk/core/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/util/InputConfigurator.java (original)
+++ accumulo/trunk/core/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/util/InputConfigurator.java Fri Jan 25 07:04:25 2013
@@ -21,7 +21,6 @@ import java.io.ByteArrayOutputStream;
 import java.io.DataInputStream;
 import java.io.DataOutputStream;
 import java.io.IOException;
-import java.nio.ByteBuffer;
 import java.nio.charset.Charset;
 import java.util.ArrayList;
 import java.util.Collection;
@@ -47,7 +46,7 @@ import org.apache.accumulo.core.data.Ran
 import org.apache.accumulo.core.iterators.SortedKeyValueIterator;
 import org.apache.accumulo.core.security.Authorizations;
 import org.apache.accumulo.core.security.TablePermission;
-import org.apache.accumulo.core.security.thrift.AuthInfo;
+import org.apache.accumulo.core.security.tokens.AccumuloToken;
 import org.apache.accumulo.core.util.ArgumentChecker;
 import org.apache.accumulo.core.util.Pair;
 import org.apache.accumulo.core.util.TextUtil;
@@ -481,11 +480,9 @@ public class InputConfigurator extends C
     if ("MockInstance".equals(instanceType))
       return new MockTabletLocator();
     Instance instance = getInstance(implementingClass, conf);
-    String username = getUsername(implementingClass, conf);
-    byte[] password = getPassword(implementingClass, conf);
+    AccumuloToken<?,?> token = getToken(implementingClass, conf);
     String tableName = getInputTableName(implementingClass, conf);
-    return TabletLocator.getInstance(instance, new AuthInfo(username, ByteBuffer.wrap(password), instance.getInstanceID()),
-        new Text(Tables.getTableId(instance, tableName)));
+    return TabletLocator.getInstance(instance, token, new Text(Tables.getTableId(instance, tableName)));
   }
   
   // InputFormat doesn't have the equivalent of OutputFormat's checkOutputSpecs(JobContext job)
@@ -508,10 +505,10 @@ public class InputConfigurator extends C
       throw new IOException("Instance info has not been set.");
     // validate that we can connect as configured
     try {
-      Connector c = getInstance(implementingClass, conf).getConnector(getUsername(implementingClass, conf), getPassword(implementingClass, conf));
-      if (!c.securityOperations().authenticateUser(getUsername(implementingClass, conf), getPassword(implementingClass, conf)))
+      Connector c = getInstance(implementingClass, conf).getConnector(getToken(implementingClass, conf));
+      if (!c.securityOperations().authenticateUser(getToken(implementingClass, conf)))
         throw new IOException("Unable to authenticate user");
-      if (!c.securityOperations().hasTablePermission(getUsername(implementingClass, conf), getInputTableName(implementingClass, conf), TablePermission.READ))
+      if (!c.securityOperations().hasTablePermission(getToken(implementingClass, conf).getPrincipal(), getInputTableName(implementingClass, conf), TablePermission.READ))
         throw new IOException("Unable to access table");
       
       if (!conf.getBoolean(enumToConfKey(implementingClass, Features.USE_LOCAL_ITERATORS), false)) {

Modified: accumulo/trunk/core/src/main/java/org/apache/accumulo/core/client/mock/MockInstance.java
URL: http://svn.apache.org/viewvc/accumulo/trunk/core/src/main/java/org/apache/accumulo/core/client/mock/MockInstance.java?rev=1438354&r1=1438353&r2=1438354&view=diff
==============================================================================
--- accumulo/trunk/core/src/main/java/org/apache/accumulo/core/client/mock/MockInstance.java (original)
+++ accumulo/trunk/core/src/main/java/org/apache/accumulo/core/client/mock/MockInstance.java Fri Jan 25 07:04:25 2013
@@ -31,6 +31,9 @@ import org.apache.accumulo.core.client.I
 import org.apache.accumulo.core.conf.AccumuloConfiguration;
 import org.apache.accumulo.core.security.thrift.AuthInfo;
 import org.apache.accumulo.core.security.thrift.SecurityErrorCode;
+import org.apache.accumulo.core.security.tokens.AccumuloToken;
+import org.apache.accumulo.core.security.tokens.InstanceTokenWrapper;
+import org.apache.accumulo.core.security.tokens.UserPassToken;
 import org.apache.accumulo.core.util.ByteBufferUtil;
 import org.apache.accumulo.core.util.CachedConfiguration;
 import org.apache.accumulo.core.util.TextUtil;
@@ -112,22 +115,42 @@ public class MockInstance implements Ins
     return 30 * 1000;
   }
   
-  @Override
+  /**
+   * @deprecated @since 1.5, use {@link #getConnector(AccumuloToken)}
+   * @Override
+   */
   public Connector getConnector(String user, byte[] pass) throws AccumuloException, AccumuloSecurityException {
-    Connector conn = new MockConnector(user, acu, this);
-    if (!acu.users.containsKey(user))
-      conn.securityOperations().createUser(user, pass);
-    else if (!Arrays.equals(acu.users.get(user).password, pass))
-        throw new AccumuloSecurityException(user, SecurityErrorCode.BAD_CREDENTIALS);
+    return getConnector(new UserPassToken(user, ByteBuffer.wrap(pass)));
+  }
+  
+  public Connector getConnector(AccumuloToken<?,?> token) throws AccumuloException, AccumuloSecurityException {
+    if (!(token instanceof UserPassToken))
+      throw new AccumuloException("Mock only accepts UserPassTokens");
+    UserPassToken upt = (UserPassToken) token;
+    
+    Connector conn = new MockConnector(token.getPrincipal(), acu, this);
+    if (!acu.users.containsKey(token.getPrincipal()))
+      conn.securityOperations().createUser(upt);
+    else if (!Arrays.equals(acu.users.get(upt.getPrincipal()).password, upt.getPassword()))
+      throw new AccumuloSecurityException(upt.getPrincipal(), SecurityErrorCode.BAD_CREDENTIALS);
     return conn;
   }
   
-  @Override
+  public Connector getConnector(InstanceTokenWrapper token) throws AccumuloException, AccumuloSecurityException {
+    return getConnector(token.getToken());
+  }
+  /**
+   * @deprecated @since 1.5, use {@link #getConnector(AccumuloToken)}
+   * @Override
+   */
   public Connector getConnector(String user, ByteBuffer pass) throws AccumuloException, AccumuloSecurityException {
     return getConnector(user, ByteBufferUtil.toBytes(pass));
   }
   
-  @Override
+  /**
+   * @deprecated @since 1.5, use {@link #getConnector(AccumuloToken)}
+   * @Override
+   */
   public Connector getConnector(String user, CharSequence pass) throws AccumuloException, AccumuloSecurityException {
     return getConnector(user, TextUtil.getBytes(new Text(pass.toString())));
   }
@@ -146,8 +169,16 @@ public class MockInstance implements Ins
     this.conf = conf;
   }
   
-  @Override
+  /**
+   * @deprecated @since 1.5, use {@link #getConnector(AccumuloToken)}
+   * @Override
+   */
   public Connector getConnector(AuthInfo auth) throws AccumuloException, AccumuloSecurityException {
-    return getConnector(auth.user, auth.password);
+    return getConnector(UserPassToken.convertAuthInfo(auth));
+  }
+
+  @Override
+  public String getSecurityTokenClass() throws AccumuloException {
+    return UserPassToken.class.getCanonicalName();
   }
 }

Modified: accumulo/trunk/core/src/main/java/org/apache/accumulo/core/client/mock/MockSecurityOperations.java
URL: http://svn.apache.org/viewvc/accumulo/trunk/core/src/main/java/org/apache/accumulo/core/client/mock/MockSecurityOperations.java?rev=1438354&r1=1438353&r2=1438354&view=diff
==============================================================================
--- accumulo/trunk/core/src/main/java/org/apache/accumulo/core/client/mock/MockSecurityOperations.java (original)
+++ accumulo/trunk/core/src/main/java/org/apache/accumulo/core/client/mock/MockSecurityOperations.java Fri Jan 25 07:04:25 2013
@@ -27,6 +27,9 @@ import org.apache.accumulo.core.security
 import org.apache.accumulo.core.security.SystemPermission;
 import org.apache.accumulo.core.security.TablePermission;
 import org.apache.accumulo.core.security.thrift.SecurityErrorCode;
+import org.apache.accumulo.core.security.tokens.AccumuloToken;
+import org.apache.accumulo.core.security.tokens.UserPassToken;
+import org.apache.accumulo.core.util.ByteBufferUtil;
 
 public class MockSecurityOperations implements SecurityOperations {
   
@@ -37,20 +40,41 @@ public class MockSecurityOperations impl
   }
   
   /**
-   * @deprecated Use {@link #createUser(String,byte[])} instead
+   * @deprecated Use {@link #createUser(AccumuloToken)} instead
    */
   @Deprecated
   @Override
   public void createUser(String user, byte[] password, Authorizations authorizations) throws AccumuloException, AccumuloSecurityException {
-    this.acu.users.put(user, new MockUser(user, password, authorizations));
+    createUser(new UserPassToken(user, password), authorizations);
   }
-
-  @Override
+  
+  /**
+   * @deprecated @since 1.5, use {@link #createUser(AccumuloToken)}
+   * @param user
+   * @param password
+   * @throws AccumuloException
+   * @throws AccumuloSecurityException
+   */
   public void createUser(String user, byte[] password) throws AccumuloException, AccumuloSecurityException {
     createUser(user, password, new Authorizations());
   }
   
   @Override
+  public void createUser(AccumuloToken<?,?> token, Authorizations authorization) throws AccumuloException, AccumuloSecurityException {
+    if (token instanceof UserPassToken) {
+      UserPassToken upt = (UserPassToken) token;
+      this.acu.users.put(upt.getPrincipal(), new MockUser(upt.getPrincipal(), upt.getPassword(), authorization));
+    }
+    else
+      throw new AccumuloSecurityException(token.getPrincipal(), SecurityErrorCode.INVALID_TOKEN);
+  }
+  
+  @Override
+  public void createUser(AccumuloToken<?,?> token) throws AccumuloException, AccumuloSecurityException {
+    createUser(token, new Authorizations());
+  }
+  
+  @Override
   public void dropUser(String user) throws AccumuloException, AccumuloSecurityException {
     this.acu.users.remove(user);
   }
@@ -62,14 +86,36 @@ public class MockSecurityOperations impl
       return false;
     return Arrays.equals(user.password, password);
   }
-  
+
+  @Override
+  public boolean authenticateUser(AccumuloToken<?,?> token) throws AccumuloException, AccumuloSecurityException {
+    MockUser user = acu.users.get(token.getPrincipal());
+    if (user == null)
+      return false;
+    return Arrays.equals(user.password, ((UserPassToken) token).getPassword());
+  }
+
+  /**
+   * @deprecated @since 1.5, use {@link #changeUserPassword(AccumuloToken)}
+   */
   @Override
   public void changeUserPassword(String name, byte[] password) throws AccumuloException, AccumuloSecurityException {
-    MockUser user = acu.users.get(name);
-    if (user != null)
-      user.password = Arrays.copyOf(password, password.length);
+    changeUserPassword(new UserPassToken(name, password));
+  }
+  
+  @Override
+  public void changeUserPassword(AccumuloToken<?,?> token) throws AccumuloException, AccumuloSecurityException {
+    MockUser user = acu.users.get(token.getPrincipal());
+    if (user != null){
+      if (token instanceof UserPassToken) {
+        UserPassToken upt = (UserPassToken) token;
+        // want to copy the password
+        user.password = ByteBufferUtil.toBytes(upt.password);
+      }
+      else throw new AccumuloSecurityException(token.getPrincipal(), SecurityErrorCode.INVALID_TOKEN);
+    }
     else
-      throw new AccumuloSecurityException(name, SecurityErrorCode.USER_DOESNT_EXIST);
+      throw new AccumuloSecurityException(token.getPrincipal(), SecurityErrorCode.USER_DOESNT_EXIST);
   }
   
   @Override

Modified: accumulo/trunk/core/src/main/java/org/apache/accumulo/core/conf/Property.java
URL: http://svn.apache.org/viewvc/accumulo/trunk/core/src/main/java/org/apache/accumulo/core/conf/Property.java?rev=1438354&r1=1438353&r2=1438354&view=diff
==============================================================================
--- accumulo/trunk/core/src/main/java/org/apache/accumulo/core/conf/Property.java (original)
+++ accumulo/trunk/core/src/main/java/org/apache/accumulo/core/conf/Property.java Fri Jan 25 07:04:25 2013
@@ -41,6 +41,12 @@ public enum Property {
       "A secret unique to a given instance that all servers must know in order to communicate with one another."
           + " Change it before initialization. To change it later use ./bin/accumulo accumulo.server.util.ChangeSecret [oldpasswd] [newpasswd], "
           + " and then update conf/accumulo-site.xml everywhere."),
+  INSTANCE_SECURITY_AUTHENTICATOR("instance.security.authenticator", "org.apache.accumulo.server.security.handler.ZKAuthenticator", PropertyType.CLASSNAME,
+      "The authenticator class that accumulo will use to determine if a user has privilege to perform an action"),
+  INSTANCE_SECURITY_AUTHORIZOR("instance.security.authorizor", "org.apache.accumulo.server.security.handler.ZKAuthorizor", PropertyType.CLASSNAME,
+      "The authorizor class that accumulo will use to determine what labels a user has privilege to see"),
+  INSTANCE_SECURITY_PERMISSION_HANDLER("instance.security.permissionHandler", "org.apache.accumulo.server.security.handler.ZKPermHandler",
+      PropertyType.CLASSNAME, "The permission handler class that accumulo will use to determine if a user has privilege to perform an action"),
   
   // general properties
   GENERAL_PREFIX("general.", null, PropertyType.PREFIX,
@@ -127,12 +133,12 @@ public enum Property {
       "tserver.monitor.fs",
       "true",
       PropertyType.BOOLEAN,
-      "When enabled the tserver will monitor file systems and kill itself when one switches from rw to ro.  This is usually and indication that Linux has detected a bad disk."),
-  TSERV_MEMDUMP_DIR(
-      "tserver.dir.memdump",
-      "/tmp",
-      PropertyType.PATH,
-      "A long running scan could possibly hold memory that has been minor compacted.  To prevent this, the in memory map is dumped to a local file and the scan is switched to that local file.  We can not switch to the minor compacted file because it may have been modified by iterators.  The file dumped to the local dir is an exact copy of what was in memory."),
+      "When enabled the tserver will monitor file systems and kill itself when one switches from rw to ro.  This is usually and indication that Linux has"
+          + " detected a bad disk."),
+  TSERV_MEMDUMP_DIR("tserver.dir.memdump", "/tmp", PropertyType.PATH,
+      "A long running scan could possibly hold memory that has been minor compacted.  To prevent this, the in memory map is dumped to a local file and the "
+          + "scan is switched to that local file.  We can not switch to the minor compacted file because it may have been modified by iterators.  The file "
+          + "dumped to the local dir is an exact copy of what was in memory."),
   TSERV_LOCK_MEMORY("tserver.memory.lock", "false", PropertyType.BOOLEAN,
       "The tablet server must communicate with zookeeper frequently to maintain its locks.  If the tablet server's memory is swapped out"
           + " the java garbage collector can stop all processing for long periods.  Change this property to true and the tablet server will "
@@ -203,11 +209,9 @@ public enum Property {
       + "in zookeeper. Restarting accumulo tablet servers after setting these properties in the site file "
       + "will cause the global setting to take effect. However, you must use the API or the shell to change "
       + "properties in zookeeper that are set on a table."),
-  TABLE_MAJC_RATIO(
-      "table.compaction.major.ratio",
-      "3",
-      PropertyType.FRACTION,
-      "minimum ratio of total input size to maximum input file size for running a major compaction.   When adjusting this property you may want to also adjust table.file.max.  Want to avoid the situation where only merging minor compactions occur."),
+  TABLE_MAJC_RATIO("table.compaction.major.ratio", "3", PropertyType.FRACTION,
+      "minimum ratio of total input size to maximum input file size for running a major compaction.   When adjusting this property you may want to also "
+          + "adjust table.file.max.  Want to avoid the situation where only merging minor compactions occur."),
   TABLE_MAJC_COMPACTALL_IDLETIME("table.compaction.major.everything.idle", "1h", PropertyType.TIMEDURATION,
       "After a tablet has been idle (no mutations) for this time period it may have all "
           + "of its map file compacted into one.  There is no guarantee an idle tablet will be compacted. "
@@ -227,12 +231,12 @@ public enum Property {
       "This property can be set to allow the LoadBalanceByTable load balancer to change the called Load Balancer for this table"),
   TABLE_FILE_COMPRESSION_TYPE("table.file.compress.type", "gz", PropertyType.STRING, "One of gz,lzo,none"),
   TABLE_FILE_COMPRESSED_BLOCK_SIZE("table.file.compress.blocksize", "100K", PropertyType.MEMORY,
-      "Overrides the hadoop io.seqfile.compress.blocksize setting so that map files have better query performance. " + "The maximum value for this is "
+      "Overrides the hadoop io.seqfile.compress.blocksize setting so that map files have better query performance. The maximum value for this is "
           + Integer.MAX_VALUE),
   TABLE_FILE_COMPRESSED_BLOCK_SIZE_INDEX("table.file.compress.blocksize.index", "128K", PropertyType.MEMORY,
       "Determines how large index blocks can be in files that support multilevel indexes. The maximum value for this is " + Integer.MAX_VALUE),
   TABLE_FILE_BLOCK_SIZE("table.file.blocksize", "0B", PropertyType.MEMORY,
-      "Overrides the hadoop dfs.block.size setting so that map files have better query performance. " + "The maximum value for this is " + Integer.MAX_VALUE),
+      "Overrides the hadoop dfs.block.size setting so that map files have better query performance. The maximum value for this is " + Integer.MAX_VALUE),
   TABLE_FILE_REPLICATION("table.file.replication", "0", PropertyType.COUNT, "Determines how many replicas to keep of a tables map files in HDFS. "
       + "When this value is LTE 0, HDFS defaults are used."),
   TABLE_FILE_MAX(
@@ -253,29 +257,25 @@ public enum Property {
       PropertyType.CLASSNAME,
       "A function that can transform the key prior to insertion and check of bloom filter.  org.apache.accumulo.core.file.keyfunctor.RowFunctor,"
           + ",org.apache.accumulo.core.file.keyfunctor.ColumnFamilyFunctor, and org.apache.accumulo.core.file.keyfunctor.ColumnQualifierFunctor are allowable values."
-          + " One can extend any of the above mentioned classes to perform specialized parsing of the key. "),
-  TABLE_BLOOM_HASHTYPE("table.bloom.hash.type", "murmur", PropertyType.STRING, "The bloom filter hash type"),
-  TABLE_FAILURES_IGNORE("table.failures.ignore", "false", PropertyType.BOOLEAN,
+          + " One can extend any of the above mentioned classes to perform specialized parsing of the key. "), TABLE_BLOOM_HASHTYPE("table.bloom.hash.type",
+      "murmur", PropertyType.STRING, "The bloom filter hash type"), TABLE_FAILURES_IGNORE("table.failures.ignore", "false", PropertyType.BOOLEAN,
       "If you want queries for your table to hang or fail when data is missing from the system, "
           + "then set this to false. When this set to true missing data will be reported but queries "
-          + "will still run possibly returning a subset of the data."),
-  TABLE_DEFAULT_SCANTIME_VISIBILITY("table.security.scan.visibility.default", "", PropertyType.STRING,
-      "The security label that will be assumed at scan time if an entry does not have a visibility set.<br />"
+          + "will still run possibly returning a subset of the data."), TABLE_DEFAULT_SCANTIME_VISIBILITY("table.security.scan.visibility.default", "",
+      PropertyType.STRING, "The security label that will be assumed at scan time if an entry does not have a visibility set.<br />"
           + "Note: An empty security label is displayed as []. The scan results will show an empty visibility even if "
           + "the visibility from this setting is applied to the entry.<br />"
           + "CAUTION: If a particular key has an empty security label AND its table's default visibility is also empty, "
           + "access will ALWAYS be granted for users with permission to that table. Additionally, if this field is changed, "
-          + "all existing data with an empty visibility label will be interpreted with the new label on the next scan."),
-  TABLE_LOCALITY_GROUPS("table.groups.enabled", "", PropertyType.STRING, "A comma separated list of locality group names to enable for this table."),
-  TABLE_CONSTRAINT_PREFIX("table.constraint.", null, PropertyType.PREFIX,
-      "Properties in this category are per-table properties that add constraints to a table. "
+          + "all existing data with an empty visibility label will be interpreted with the new label on the next scan."), TABLE_LOCALITY_GROUPS(
+      "table.groups.enabled", "", PropertyType.STRING, "A comma separated list of locality group names to enable for this table."), TABLE_CONSTRAINT_PREFIX(
+      "table.constraint.", null, PropertyType.PREFIX, "Properties in this category are per-table properties that add constraints to a table. "
           + "These properties start with the category prefix, followed by a number, and their values "
           + "correspond to a fully qualified Java class that implements the Constraint interface.<br />"
           + "For example, table.constraint.1 = org.apache.accumulo.core.constraints.MyCustomConstraint "
-          + "and table.constraint.2 = my.package.constraints.MySecondConstraint"),
-  TABLE_INDEXCACHE_ENABLED("table.cache.index.enable", "true", PropertyType.BOOLEAN, "Determines whether index cache is enabled."),
-  TABLE_BLOCKCACHE_ENABLED("table.cache.block.enable", "false", PropertyType.BOOLEAN, "Determines whether file block cache is enabled."),
-  TABLE_ITERATOR_PREFIX("table.iterator.", null, PropertyType.PREFIX,
+          + "and table.constraint.2 = my.package.constraints.MySecondConstraint"), TABLE_INDEXCACHE_ENABLED("table.cache.index.enable", "true",
+      PropertyType.BOOLEAN, "Determines whether index cache is enabled."), TABLE_BLOCKCACHE_ENABLED("table.cache.block.enable", "false", PropertyType.BOOLEAN,
+      "Determines whether file block cache is enabled."), TABLE_ITERATOR_PREFIX("table.iterator.", null, PropertyType.PREFIX,
       "Properties in this category specify iterators that are applied at various stages (scopes) of interaction "
           + "with a table. These properties start with the category prefix, followed by a scope (minc, majc, scan, etc.), "
           + "followed by a period, followed by a name, as in table.iterator.scan.vers, or table.iterator.scan.custom. "
@@ -283,8 +283,7 @@ public enum Property {
           + "such as table.iterator.scan.vers = 10,org.apache.accumulo.core.iterators.VersioningIterator<br /> "
           + "These iterators can take options if additional properties are set that look like this property, "
           + "but are suffixed with a period, followed by 'opt' followed by another period, and a property name.<br />"
-          + "For example, table.iterator.minc.vers.opt.maxVersions = 3"),
-  TABLE_LOCALITY_GROUP_PREFIX("table.group.", null, PropertyType.PREFIX,
+          + "For example, table.iterator.minc.vers.opt.maxVersions = 3"), TABLE_LOCALITY_GROUP_PREFIX("table.group.", null, PropertyType.PREFIX,
       "Properties in this category are per-table properties that define locality groups in a table. These properties start "
           + "with the category prefix, followed by a name, followed by a period, and followed by a property for that group.<br />"
           + "For example table.group.group1=x,y,z sets the column families for a group called group1. Once configured, "

Modified: accumulo/trunk/core/src/main/java/org/apache/accumulo/core/gc/thrift/GCMonitorService.java
URL: http://svn.apache.org/viewvc/accumulo/trunk/core/src/main/java/org/apache/accumulo/core/gc/thrift/GCMonitorService.java?rev=1438354&r1=1438353&r2=1438354&view=diff
==============================================================================
--- accumulo/trunk/core/src/main/java/org/apache/accumulo/core/gc/thrift/GCMonitorService.java (original)
+++ accumulo/trunk/core/src/main/java/org/apache/accumulo/core/gc/thrift/GCMonitorService.java Fri Jan 25 07:04:25 2013
@@ -50,13 +50,13 @@ import org.slf4j.LoggerFactory;
 
   public interface Iface {
 
-    public GCStatus getStatus(org.apache.accumulo.cloudtrace.thrift.TInfo tinfo, org.apache.accumulo.core.security.thrift.AuthInfo credentials) throws org.apache.accumulo.core.security.thrift.ThriftSecurityException, org.apache.thrift.TException;
+    public GCStatus getStatus(org.apache.accumulo.cloudtrace.thrift.TInfo tinfo, org.apache.accumulo.core.security.thrift.ThriftInstanceTokenWrapper credentials) throws org.apache.accumulo.core.security.thrift.ThriftSecurityException, org.apache.thrift.TException;
 
   }
 
   public interface AsyncIface {
 
-    public void getStatus(org.apache.accumulo.cloudtrace.thrift.TInfo tinfo, org.apache.accumulo.core.security.thrift.AuthInfo credentials, org.apache.thrift.async.AsyncMethodCallback<AsyncClient.getStatus_call> resultHandler) throws org.apache.thrift.TException;
+    public void getStatus(org.apache.accumulo.cloudtrace.thrift.TInfo tinfo, org.apache.accumulo.core.security.thrift.ThriftInstanceTokenWrapper credentials, org.apache.thrift.async.AsyncMethodCallback<AsyncClient.getStatus_call> resultHandler) throws org.apache.thrift.TException;
 
   }
 
@@ -80,13 +80,13 @@ import org.slf4j.LoggerFactory;
       super(iprot, oprot);
     }
 
-    public GCStatus getStatus(org.apache.accumulo.cloudtrace.thrift.TInfo tinfo, org.apache.accumulo.core.security.thrift.AuthInfo credentials) throws org.apache.accumulo.core.security.thrift.ThriftSecurityException, org.apache.thrift.TException
+    public GCStatus getStatus(org.apache.accumulo.cloudtrace.thrift.TInfo tinfo, org.apache.accumulo.core.security.thrift.ThriftInstanceTokenWrapper credentials) throws org.apache.accumulo.core.security.thrift.ThriftSecurityException, org.apache.thrift.TException
     {
       send_getStatus(tinfo, credentials);
       return recv_getStatus();
     }
 
-    public void send_getStatus(org.apache.accumulo.cloudtrace.thrift.TInfo tinfo, org.apache.accumulo.core.security.thrift.AuthInfo credentials) throws org.apache.thrift.TException
+    public void send_getStatus(org.apache.accumulo.cloudtrace.thrift.TInfo tinfo, org.apache.accumulo.core.security.thrift.ThriftInstanceTokenWrapper credentials) throws org.apache.thrift.TException
     {
       getStatus_args args = new getStatus_args();
       args.setTinfo(tinfo);
@@ -125,7 +125,7 @@ import org.slf4j.LoggerFactory;
       super(protocolFactory, clientManager, transport);
     }
 
-    public void getStatus(org.apache.accumulo.cloudtrace.thrift.TInfo tinfo, org.apache.accumulo.core.security.thrift.AuthInfo credentials, org.apache.thrift.async.AsyncMethodCallback<getStatus_call> resultHandler) throws org.apache.thrift.TException {
+    public void getStatus(org.apache.accumulo.cloudtrace.thrift.TInfo tinfo, org.apache.accumulo.core.security.thrift.ThriftInstanceTokenWrapper credentials, org.apache.thrift.async.AsyncMethodCallback<getStatus_call> resultHandler) throws org.apache.thrift.TException {
       checkReady();
       getStatus_call method_call = new getStatus_call(tinfo, credentials, resultHandler, this, ___protocolFactory, ___transport);
       this.___currentMethod = method_call;
@@ -134,8 +134,8 @@ import org.slf4j.LoggerFactory;
 
     public static class getStatus_call extends org.apache.thrift.async.TAsyncMethodCall {
       private org.apache.accumulo.cloudtrace.thrift.TInfo tinfo;
-      private org.apache.accumulo.core.security.thrift.AuthInfo credentials;
-      public getStatus_call(org.apache.accumulo.cloudtrace.thrift.TInfo tinfo, org.apache.accumulo.core.security.thrift.AuthInfo credentials, org.apache.thrift.async.AsyncMethodCallback<getStatus_call> resultHandler, org.apache.thrift.async.TAsyncClient client, org.apache.thrift.protocol.TProtocolFactory protocolFactory, org.apache.thrift.transport.TNonblockingTransport transport) throws org.apache.thrift.TException {
+      private org.apache.accumulo.core.security.thrift.ThriftInstanceTokenWrapper credentials;
+      public getStatus_call(org.apache.accumulo.cloudtrace.thrift.TInfo tinfo, org.apache.accumulo.core.security.thrift.ThriftInstanceTokenWrapper credentials, org.apache.thrift.async.AsyncMethodCallback<getStatus_call> resultHandler, org.apache.thrift.async.TAsyncClient client, org.apache.thrift.protocol.TProtocolFactory protocolFactory, org.apache.thrift.transport.TNonblockingTransport transport) throws org.apache.thrift.TException {
         super(client, protocolFactory, transport, resultHandler, false);
         this.tinfo = tinfo;
         this.credentials = credentials;
@@ -216,7 +216,7 @@ import org.slf4j.LoggerFactory;
     }
 
     public org.apache.accumulo.cloudtrace.thrift.TInfo tinfo; // required
-    public org.apache.accumulo.core.security.thrift.AuthInfo credentials; // required
+    public org.apache.accumulo.core.security.thrift.ThriftInstanceTokenWrapper credentials; // required
 
     /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
     @SuppressWarnings("all") public enum _Fields implements org.apache.thrift.TFieldIdEnum {
@@ -286,7 +286,7 @@ import org.slf4j.LoggerFactory;
       tmpMap.put(_Fields.TINFO, new org.apache.thrift.meta_data.FieldMetaData("tinfo", org.apache.thrift.TFieldRequirementType.DEFAULT, 
           new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, org.apache.accumulo.cloudtrace.thrift.TInfo.class)));
       tmpMap.put(_Fields.CREDENTIALS, new org.apache.thrift.meta_data.FieldMetaData("credentials", org.apache.thrift.TFieldRequirementType.DEFAULT, 
-          new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, org.apache.accumulo.core.security.thrift.AuthInfo.class)));
+          new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, org.apache.accumulo.core.security.thrift.ThriftInstanceTokenWrapper.class)));
       metaDataMap = Collections.unmodifiableMap(tmpMap);
       org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(getStatus_args.class, metaDataMap);
     }
@@ -296,7 +296,7 @@ import org.slf4j.LoggerFactory;
 
     public getStatus_args(
       org.apache.accumulo.cloudtrace.thrift.TInfo tinfo,
-      org.apache.accumulo.core.security.thrift.AuthInfo credentials)
+      org.apache.accumulo.core.security.thrift.ThriftInstanceTokenWrapper credentials)
     {
       this();
       this.tinfo = tinfo;
@@ -311,7 +311,7 @@ import org.slf4j.LoggerFactory;
         this.tinfo = new org.apache.accumulo.cloudtrace.thrift.TInfo(other.tinfo);
       }
       if (other.isSetCredentials()) {
-        this.credentials = new org.apache.accumulo.core.security.thrift.AuthInfo(other.credentials);
+        this.credentials = new org.apache.accumulo.core.security.thrift.ThriftInstanceTokenWrapper(other.credentials);
       }
     }
 
@@ -349,11 +349,11 @@ import org.slf4j.LoggerFactory;
       }
     }
 
-    public org.apache.accumulo.core.security.thrift.AuthInfo getCredentials() {
+    public org.apache.accumulo.core.security.thrift.ThriftInstanceTokenWrapper getCredentials() {
       return this.credentials;
     }
 
-    public getStatus_args setCredentials(org.apache.accumulo.core.security.thrift.AuthInfo credentials) {
+    public getStatus_args setCredentials(org.apache.accumulo.core.security.thrift.ThriftInstanceTokenWrapper credentials) {
       this.credentials = credentials;
       return this;
     }
@@ -387,7 +387,7 @@ import org.slf4j.LoggerFactory;
         if (value == null) {
           unsetCredentials();
         } else {
-          setCredentials((org.apache.accumulo.core.security.thrift.AuthInfo)value);
+          setCredentials((org.apache.accumulo.core.security.thrift.ThriftInstanceTokenWrapper)value);
         }
         break;
 
@@ -583,7 +583,7 @@ import org.slf4j.LoggerFactory;
               break;
             case 1: // CREDENTIALS
               if (schemeField.type == org.apache.thrift.protocol.TType.STRUCT) {
-                struct.credentials = new org.apache.accumulo.core.security.thrift.AuthInfo();
+                struct.credentials = new org.apache.accumulo.core.security.thrift.ThriftInstanceTokenWrapper();
                 struct.credentials.read(iprot);
                 struct.setCredentialsIsSet(true);
               } else { 
@@ -658,7 +658,7 @@ import org.slf4j.LoggerFactory;
           struct.setTinfoIsSet(true);
         }
         if (incoming.get(1)) {
-          struct.credentials = new org.apache.accumulo.core.security.thrift.AuthInfo();
+          struct.credentials = new org.apache.accumulo.core.security.thrift.ThriftInstanceTokenWrapper();
           struct.credentials.read(iprot);
           struct.setCredentialsIsSet(true);
         }



Mime
View raw message