accumulo-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From vi...@apache.org
Subject svn commit: r1433166 [3/20] - in /accumulo/branches/ACCUMULO-259: ./ assemble/ assemble/platform/ assemble/scripts/ assemble/scripts/init.d/ bin/ conf/examples/1GB/native-standalone/ conf/examples/1GB/standalone/ conf/examples/2GB/native-standalone/ co...
Date Mon, 14 Jan 2013 22:03:34 GMT
Modified: accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/admin/SecurityOperations.java
URL: http://svn.apache.org/viewvc/accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/admin/SecurityOperations.java?rev=1433166&r1=1433165&r2=1433166&view=diff
==============================================================================
--- accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/admin/SecurityOperations.java (original)
+++ accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/admin/SecurityOperations.java Mon Jan 14 22:03:24 2013
@@ -44,8 +44,23 @@ public interface SecurityOperations {
    *           if a general error occurs
    * @throws AccumuloSecurityException
    *           if the user does not have permission to create a user
+   * @deprecated Use {@link #createUser(String,byte[])} instead
    */
   public void createUser(String user, byte[] password, Authorizations authorizations) throws AccumuloException, AccumuloSecurityException;
+
+  /**
+   * Create a user
+   * 
+   * @param user
+   *          the name of the user to create
+   * @param password
+   *          the plaintext password for the user
+   * @throws AccumuloException
+   *           if a general error occurs
+   * @throws AccumuloSecurityException
+   *           if the user does not have permission to create a user
+   */
+  public void createUser(String user, byte[] password) throws AccumuloException, AccumuloSecurityException;
   
   /**
    * Delete a user

Modified: accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/admin/SecurityOperationsImpl.java
URL: http://svn.apache.org/viewvc/accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/admin/SecurityOperationsImpl.java?rev=1433166&r1=1433165&r2=1433166&view=diff
==============================================================================
--- accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/admin/SecurityOperationsImpl.java (original)
+++ accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/admin/SecurityOperationsImpl.java Mon Jan 14 22:03:24 2013
@@ -105,6 +105,7 @@ public class SecurityOperationsImpl impl
    *           if a general error occurs
    * @throws AccumuloSecurityException
    *           if the user does not have permission to create a user
+   * @deprecated Use {@link #createUser(String,byte[])} instead
    */
   public void createUser(final String user, final byte[] password, final Authorizations authorizations) throws AccumuloException, AccumuloSecurityException {
     ArgumentChecker.notNull(user, password, authorizations);
@@ -115,6 +116,22 @@ public class SecurityOperationsImpl impl
       }
     });
   }
+
+  /**
+   * Create a user
+   * 
+   * @param user
+   *          the name of the user to create
+   * @param password
+   *          the plaintext password for the user
+   * @throws AccumuloException
+   *           if a general error occurs
+   * @throws AccumuloSecurityException
+   *           if the user does not have permission to create a user
+   */
+  public void createUser(final String user, final byte[] password) throws AccumuloException, AccumuloSecurityException {
+    createUser(user, password, new Authorizations());
+  }
   
   /**
    * Delete a user

Modified: accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/admin/TableOperationsImpl.java
URL: http://svn.apache.org/viewvc/accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/admin/TableOperationsImpl.java?rev=1433166&r1=1433165&r2=1433166&view=diff
==============================================================================
--- accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/admin/TableOperationsImpl.java (original)
+++ accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/admin/TableOperationsImpl.java Mon Jan 14 22:03:24 2013
@@ -20,7 +20,6 @@ import java.io.BufferedReader;
 import java.io.IOException;
 import java.io.InputStreamReader;
 import java.nio.ByteBuffer;
-import java.nio.charset.Charset;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collection;
@@ -109,8 +108,6 @@ public class TableOperationsImpl extends
   private AuthInfo credentials;
   
   private static final Logger log = Logger.getLogger(TableOperationsImpl.class);
-
-  private static final Charset utf8 = Charset.forName("UTF8");
   
   /**
    * @param instance
@@ -191,7 +188,7 @@ public class TableOperationsImpl extends
   public void create(String tableName, boolean limitVersion, TimeType timeType) throws AccumuloException, AccumuloSecurityException, TableExistsException {
     ArgumentChecker.notNull(tableName, timeType);
     
-    List<ByteBuffer> args = Arrays.asList(ByteBuffer.wrap(tableName.getBytes(utf8)), ByteBuffer.wrap(timeType.name().getBytes(utf8)));
+    List<ByteBuffer> args = Arrays.asList(ByteBuffer.wrap(tableName.getBytes()), ByteBuffer.wrap(timeType.name().getBytes()));
     
     Map<String,String> opts = IteratorUtil.generateInitialTableProperties(limitVersion);
     
@@ -488,7 +485,7 @@ public class TableOperationsImpl extends
     
     ArgumentChecker.notNull(tableName);
     ByteBuffer EMPTY = ByteBuffer.allocate(0);
-    List<ByteBuffer> args = Arrays.asList(ByteBuffer.wrap(tableName.getBytes(utf8)), start == null ? EMPTY : TextUtil.getByteBuffer(start), end == null ? EMPTY
+    List<ByteBuffer> args = Arrays.asList(ByteBuffer.wrap(tableName.getBytes()), start == null ? EMPTY : TextUtil.getByteBuffer(start), end == null ? EMPTY
         : TextUtil.getByteBuffer(end));
     Map<String,String> opts = new HashMap<String,String>();
     try {
@@ -503,7 +500,7 @@ public class TableOperationsImpl extends
     
     ArgumentChecker.notNull(tableName);
     ByteBuffer EMPTY = ByteBuffer.allocate(0);
-    List<ByteBuffer> args = Arrays.asList(ByteBuffer.wrap(tableName.getBytes(utf8)), start == null ? EMPTY : TextUtil.getByteBuffer(start), end == null ? EMPTY
+    List<ByteBuffer> args = Arrays.asList(ByteBuffer.wrap(tableName.getBytes()), start == null ? EMPTY : TextUtil.getByteBuffer(start), end == null ? EMPTY
         : TextUtil.getByteBuffer(end));
     Map<String,String> opts = new HashMap<String,String>();
     try {
@@ -600,7 +597,7 @@ public class TableOperationsImpl extends
   public void delete(String tableName) throws AccumuloException, AccumuloSecurityException, TableNotFoundException {
     ArgumentChecker.notNull(tableName);
     
-    List<ByteBuffer> args = Arrays.asList(ByteBuffer.wrap(tableName.getBytes(utf8)));
+    List<ByteBuffer> args = Arrays.asList(ByteBuffer.wrap(tableName.getBytes()));
     Map<String,String> opts = new HashMap<String,String>();
     
     try {
@@ -626,7 +623,7 @@ public class TableOperationsImpl extends
     if (!Collections.disjoint(propertiesToExclude, propertiesToSet.keySet()))
       throw new IllegalArgumentException("propertiesToSet and propertiesToExclude not disjoint");
     
-    List<ByteBuffer> args = Arrays.asList(ByteBuffer.wrap(srcTableId.getBytes(utf8)), ByteBuffer.wrap(newTableName.getBytes(utf8)));
+    List<ByteBuffer> args = Arrays.asList(ByteBuffer.wrap(srcTableId.getBytes()), ByteBuffer.wrap(newTableName.getBytes()));
     Map<String,String> opts = new HashMap<String,String>();
     opts.putAll(propertiesToSet);
     for (String prop : propertiesToExclude)
@@ -654,7 +651,7 @@ public class TableOperationsImpl extends
   public void rename(String oldTableName, String newTableName) throws AccumuloSecurityException, TableNotFoundException, AccumuloException,
       TableExistsException {
     
-    List<ByteBuffer> args = Arrays.asList(ByteBuffer.wrap(oldTableName.getBytes(utf8)), ByteBuffer.wrap(newTableName.getBytes(utf8)));
+    List<ByteBuffer> args = Arrays.asList(ByteBuffer.wrap(oldTableName.getBytes()), ByteBuffer.wrap(newTableName.getBytes()));
     Map<String,String> opts = new HashMap<String,String>();
     doTableOperation(TableOperation.RENAME, args, opts);
   }
@@ -703,7 +700,7 @@ public class TableOperationsImpl extends
     if (flush)
       _flush(tableId, start, end, true);
     
-    List<ByteBuffer> args = Arrays.asList(ByteBuffer.wrap(tableId.getBytes(utf8)), start == null ? EMPTY : TextUtil.getByteBuffer(start), end == null ? EMPTY
+    List<ByteBuffer> args = Arrays.asList(ByteBuffer.wrap(tableId.getBytes()), start == null ? EMPTY : TextUtil.getByteBuffer(start), end == null ? EMPTY
         : TextUtil.getByteBuffer(end), ByteBuffer.wrap(IteratorUtil.encodeIteratorSettings(iterators)));
 
     Map<String,String> opts = new HashMap<String,String>();
@@ -1020,8 +1017,8 @@ public class TableOperationsImpl extends
       throw new AccumuloException("Bulk import failure directory " + failPath + " is not empty");
     }
     
-    List<ByteBuffer> args = Arrays.asList(ByteBuffer.wrap(tableName.getBytes(utf8)), ByteBuffer.wrap(dir.getBytes(utf8)), ByteBuffer.wrap(failureDir.getBytes(utf8)),
-        ByteBuffer.wrap((setTime + "").getBytes(utf8)));
+    List<ByteBuffer> args = Arrays.asList(ByteBuffer.wrap(tableName.getBytes()), ByteBuffer.wrap(dir.getBytes()), ByteBuffer.wrap(failureDir.getBytes()),
+        ByteBuffer.wrap((setTime + "").getBytes()));
     Map<String,String> opts = new HashMap<String,String>();
     
     try {
@@ -1047,7 +1044,7 @@ public class TableOperationsImpl extends
   public void offline(String tableName) throws AccumuloSecurityException, AccumuloException, TableNotFoundException {
     
     ArgumentChecker.notNull(tableName);
-    List<ByteBuffer> args = Arrays.asList(ByteBuffer.wrap(tableName.getBytes(utf8)));
+    List<ByteBuffer> args = Arrays.asList(ByteBuffer.wrap(tableName.getBytes()));
     Map<String,String> opts = new HashMap<String,String>();
     
     try {
@@ -1070,7 +1067,7 @@ public class TableOperationsImpl extends
    */
   public void online(String tableName) throws AccumuloSecurityException, AccumuloException, TableNotFoundException {
     ArgumentChecker.notNull(tableName);
-    List<ByteBuffer> args = Arrays.asList(ByteBuffer.wrap(tableName.getBytes(utf8)));
+    List<ByteBuffer> args = Arrays.asList(ByteBuffer.wrap(tableName.getBytes()));
     Map<String,String> opts = new HashMap<String,String>();
     
     try {
@@ -1155,7 +1152,7 @@ public class TableOperationsImpl extends
       Logger.getLogger(this.getClass()).warn("Failed to check if imported table references external java classes : " + ioe.getMessage());
     }
     
-    List<ByteBuffer> args = Arrays.asList(ByteBuffer.wrap(tableName.getBytes(utf8)), ByteBuffer.wrap(importDir.getBytes(utf8)));
+    List<ByteBuffer> args = Arrays.asList(ByteBuffer.wrap(tableName.getBytes()), ByteBuffer.wrap(importDir.getBytes()));
     
     Map<String,String> opts = Collections.emptyMap();
     
@@ -1172,7 +1169,7 @@ public class TableOperationsImpl extends
   public void exportTable(String tableName, String exportDir) throws TableNotFoundException, AccumuloException, AccumuloSecurityException {
     ArgumentChecker.notNull(tableName, exportDir);
     
-    List<ByteBuffer> args = Arrays.asList(ByteBuffer.wrap(tableName.getBytes(utf8)), ByteBuffer.wrap(exportDir.getBytes(utf8)));
+    List<ByteBuffer> args = Arrays.asList(ByteBuffer.wrap(tableName.getBytes()), ByteBuffer.wrap(exportDir.getBytes()));
     
     Map<String,String> opts = Collections.emptyMap();
     

Modified: accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/impl/ConnectorImpl.java
URL: http://svn.apache.org/viewvc/accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/impl/ConnectorImpl.java?rev=1433166&r1=1433165&r2=1433166&view=diff
==============================================================================
--- accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/impl/ConnectorImpl.java (original)
+++ accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/impl/ConnectorImpl.java Mon Jan 14 22:03:24 2013
@@ -51,18 +51,19 @@ public class ConnectorImpl extends Conne
   private TableOperations tableops = null;
   private InstanceOperations instanceops = null;
   
-/**
-     * 
-     * Use {@link Instance#getConnector(String, byte[])}
-     * 
-     * @param instance
-     * @param user
-     * @param password
-     * @throws AccumuloException
-     * @throws AccumuloSecurityException
-     * @see Instance#getConnector(String user, byte[] password)
-     * @deprecated Not for client use
-     */
+  /**
+   * 
+   * Use {@link Instance#getConnector(String, byte[])}
+   * 
+   * @param instance
+   * @param user
+   * @param password
+   * @throws AccumuloException
+   * @throws AccumuloSecurityException
+   * @see Instance#getConnector(String user, byte[] password)
+   * @deprecated Not for client use
+   */
+  @Deprecated
   public ConnectorImpl(Instance instance, String user, byte[] password) throws AccumuloException, AccumuloSecurityException {
     ArgumentChecker.notNull(instance, user, password);
     this.instance = instance;
@@ -102,6 +103,7 @@ public class ConnectorImpl extends Conne
     return new TabletServerBatchReader(instance, credentials, getTableId(tableName), authorizations, numQueryThreads);
   }
   
+  @Deprecated
   @Override
   public BatchDeleter createBatchDeleter(String tableName, Authorizations authorizations, int numQueryThreads, long maxMemory, long maxLatency,
       int maxWriteThreads) throws TableNotFoundException {
@@ -117,6 +119,7 @@ public class ConnectorImpl extends Conne
     return new TabletServerBatchDeleter(instance, credentials, getTableId(tableName), authorizations, numQueryThreads, config);
   }
   
+  @Deprecated
   @Override
   public BatchWriter createBatchWriter(String tableName, long maxMemory, long maxLatency, int maxWriteThreads) throws TableNotFoundException {
     ArgumentChecker.notNull(tableName);
@@ -130,6 +133,7 @@ public class ConnectorImpl extends Conne
     return new BatchWriterImpl(instance, credentials, getTableId(tableName), config);
   }
   
+  @Deprecated
   @Override
   public MultiTableBatchWriter createMultiTableBatchWriter(long maxMemory, long maxLatency, int maxWriteThreads) {
     return new MultiTableBatchWriterImpl(instance, credentials, new BatchWriterConfig().setMaxMemory(maxMemory)

Modified: accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/impl/OfflineScanner.java
URL: http://svn.apache.org/viewvc/accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/impl/OfflineScanner.java?rev=1433166&r1=1433165&r2=1433166&view=diff
==============================================================================
--- accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/impl/OfflineScanner.java (original)
+++ accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/impl/OfflineScanner.java Mon Jan 14 22:03:24 2013
@@ -104,7 +104,7 @@ class OfflineIterator implements Iterato
       return new MultiIterator(allIters, false);
     }
   }
-
+  
   private SortedKeyValueIterator<Key,Value> iter;
   private Range range;
   private KeyExtent currentExtent;
@@ -114,7 +114,7 @@ class OfflineIterator implements Iterato
   private Instance instance;
   private ScannerOptions options;
   private ArrayList<SortedKeyValueIterator<Key,Value>> readers;
-
+  
   /**
    * @param offlineScanner
    * @param instance
@@ -130,7 +130,7 @@ class OfflineIterator implements Iterato
     if (this.options.fetchedColumns.size() > 0) {
       this.range = range.bound(this.options.fetchedColumns.first(), this.options.fetchedColumns.last());
     }
-
+    
     this.tableId = table.toString();
     this.authorizations = authorizations;
     this.readers = new ArrayList<SortedKeyValueIterator<Key,Value>>();
@@ -141,12 +141,12 @@ class OfflineIterator implements Iterato
       
       while (iter != null && !iter.hasTop())
         nextTablet();
-
+      
     } catch (Exception e) {
       throw new RuntimeException(e);
     }
   }
-
+  
   @Override
   public boolean hasNext() {
     return iter != null && iter.hasTop();
@@ -158,7 +158,7 @@ class OfflineIterator implements Iterato
       byte[] v = iter.getTopValue().get();
       // copy just like tablet server does, do this before calling next
       KeyValue ret = new KeyValue(new Key(iter.getTopKey()), Arrays.copyOf(v, v.length));
-
+      
       iter.next();
       
       while (iter != null && !iter.hasTop())
@@ -195,19 +195,19 @@ class OfflineIterator implements Iterato
         iter = null;
         return;
       }
-
+      
       if (range.afterEndKey(new Key(currentExtent.getEndRow()).followingKey(PartialKey.ROW))) {
         iter = null;
         return;
       }
-
+      
       nextRange = new Range(currentExtent.getMetadataEntry(), false, null, false);
     }
-
+    
     List<String> relFiles = new ArrayList<String>();
     
     Pair<KeyExtent,String> eloc = getTabletFiles(nextRange, relFiles);
-
+    
     while (eloc.getSecond() != null) {
       if (Tables.getTableState(instance, tableId) != TableState.OFFLINE) {
         Tables.clearCache(instance);
@@ -226,7 +226,7 @@ class OfflineIterator implements Iterato
     if (!extent.getTableId().toString().equals(tableId)) {
       throw new AccumuloException(" did not find tablets for table " + tableId + " " + extent);
     }
-
+    
     if (currentExtent != null && !extent.isPreviousExtent(currentExtent))
       throw new AccumuloException(" " + currentExtent + " is not previous extent " + extent);
     
@@ -259,7 +259,7 @@ class OfflineIterator implements Iterato
     while (row.hasNext()) {
       Entry<Key,Value> entry = row.next();
       Key key = entry.getKey();
-
+      
       if (key.getColumnFamily().equals(Constants.METADATA_DATAFILE_COLUMN_FAMILY)) {
         relFiles.add(key.getColumnQualifier().toString());
       }
@@ -272,11 +272,11 @@ class OfflineIterator implements Iterato
       if (Constants.METADATA_PREV_ROW_COLUMN.hasColumns(key)) {
         extent = new KeyExtent(key.getRow(), entry.getValue());
       }
-
+      
     }
     return new Pair<KeyExtent,String>(extent, location);
   }
-
+  
   /**
    * @param absFiles
    * @return
@@ -299,7 +299,7 @@ class OfflineIterator implements Iterato
     }
     
     readers.clear();
-
+    
     // TODO need to close files
     for (String file : absFiles) {
       FileSKVIterator reader = FileOperations.getInstance().openReader(file, false, fs, conf, acuTableConf, null, null);
@@ -326,7 +326,7 @@ class OfflineIterator implements Iterato
     return iterEnv.getTopLevelIterator(IteratorUtil.loadIterators(IteratorScope.scan, visFilter, extent, acuTableConf, options.serverSideIteratorList,
         options.serverSideIteratorOptions, iterEnv, false));
   }
-
+  
   @Override
   public void remove() {
     throw new UnsupportedOperationException();
@@ -354,18 +354,20 @@ public class OfflineScanner extends Scan
     this.credentials = credentials;
     this.tableId = new Text(tableId);
     this.range = new Range((Key) null, (Key) null);
-
+    
     this.authorizations = authorizations;
     
     this.batchSize = Constants.SCAN_BATCH_SIZE;
     this.timeOut = Integer.MAX_VALUE;
   }
-
+  
+  @Deprecated
   @Override
   public void setTimeOut(int timeOut) {
     this.timeOut = timeOut;
   }
   
+  @Deprecated
   @Override
   public int getTimeOut() {
     return timeOut;
@@ -405,5 +407,5 @@ public class OfflineScanner extends Scan
   public Iterator<Entry<Key,Value>> iterator() {
     return new OfflineIterator(this, instance, credentials, authorizations, tableId, range);
   }
-
+  
 }

Modified: accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/impl/ScannerImpl.java
URL: http://svn.apache.org/viewvc/accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/impl/ScannerImpl.java?rev=1433166&r1=1433165&r2=1433166&view=diff
==============================================================================
--- accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/impl/ScannerImpl.java (original)
+++ accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/impl/ScannerImpl.java Mon Jan 14 22:03:24 2013
@@ -98,6 +98,7 @@ public class ScannerImpl extends Scanner
    * Returns an iterator over an accumulo table. This iterator uses the options that are currently set on the scanner for its lifetime. So setting options on a
    * Scanner object will have no effect on existing iterators.
    */
+  @Override
   public synchronized Iterator<Entry<Key,Value>> iterator() {
     return new ScannerIterator(instance, credentials, table, authorizations, range, size, getTimeOut(), this, isolated);
   }
@@ -112,6 +113,7 @@ public class ScannerImpl extends Scanner
     this.isolated = false;
   }
   
+  @Deprecated
   @Override
   public void setTimeOut(int timeOut) {
     if (timeOut == Integer.MAX_VALUE)
@@ -120,6 +122,7 @@ public class ScannerImpl extends Scanner
       setTimeout(timeOut, TimeUnit.SECONDS);
   }
   
+  @Deprecated
   @Override
   public int getTimeOut() {
     long timeout = getTimeout(TimeUnit.SECONDS);

Modified: accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/impl/ServerClient.java
URL: http://svn.apache.org/viewvc/accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/impl/ServerClient.java?rev=1433166&r1=1433165&r2=1433166&view=diff
==============================================================================
--- accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/impl/ServerClient.java (original)
+++ accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/impl/ServerClient.java Mon Jan 14 22:03:24 2013
@@ -26,6 +26,7 @@ import org.apache.accumulo.core.client.A
 import org.apache.accumulo.core.client.Instance;
 import org.apache.accumulo.core.client.impl.thrift.ClientService;
 import org.apache.accumulo.core.client.impl.thrift.ClientService.Client;
+import org.apache.accumulo.core.conf.AccumuloConfiguration;
 import org.apache.accumulo.core.conf.Property;
 import org.apache.accumulo.core.security.thrift.ThriftSecurityException;
 import org.apache.accumulo.core.util.ArgumentChecker;
@@ -123,6 +124,11 @@ public class ServerClient {
   }
   
   public static Pair<String,ClientService.Client> getConnection(Instance instance, boolean preferCachedConnections) throws TTransportException {
+    AccumuloConfiguration conf = instance.getConfiguration();
+    return getConnection(instance, preferCachedConnections, conf.getTimeInMillis(Property.GENERAL_RPC_TIMEOUT));
+  }
+  
+  public static Pair<String,ClientService.Client> getConnection(Instance instance, boolean preferCachedConnections, long rpcTimeout) throws TTransportException {
     ArgumentChecker.notNull(instance);
     // create list of servers
     ArrayList<ThriftTransportKey> servers = new ArrayList<ThriftTransportKey>();
@@ -130,13 +136,15 @@ public class ServerClient {
     // add tservers
     
     ZooCache zc = getZooCache(instance);
-    
+    AccumuloConfiguration conf = instance.getConfiguration();
     for (String tserver : zc.getChildren(ZooUtil.getRoot(instance) + Constants.ZTSERVERS)) {
       String path = ZooUtil.getRoot(instance) + Constants.ZTSERVERS + "/" + tserver;
       byte[] data = ZooUtil.getLockData(zc, path);
       if (data != null && !new String(data).equals("master"))
-        servers.add(new ThriftTransportKey(new ServerServices(new String(data)).getAddressString(Service.TSERV_CLIENT), instance.getConfiguration().getPort(
-            Property.TSERV_CLIENTPORT), instance.getConfiguration().getTimeInMillis(Property.GENERAL_RPC_TIMEOUT)));
+        servers.add(new ThriftTransportKey(
+            new ServerServices(new String(data)).getAddressString(Service.TSERV_CLIENT), 
+            conf.getPort(Property.TSERV_CLIENTPORT), 
+            rpcTimeout));
     }
     
     boolean opened = false;

Modified: accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/impl/TabletLocator.java
URL: http://svn.apache.org/viewvc/accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/impl/TabletLocator.java?rev=1433166&r1=1433165&r2=1433166&view=diff
==============================================================================
--- accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/impl/TabletLocator.java (original)
+++ accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/impl/TabletLocator.java Mon Jan 14 22:03:24 2013
@@ -146,7 +146,7 @@ public abstract class TabletLocator {
   }
 
   public static class TabletLocation implements Comparable<TabletLocation> {
-    private static WeakHashMap<String,WeakReference<String>> tabletLocs = new WeakHashMap<String,WeakReference<String>>();
+    private static final WeakHashMap<String,WeakReference<String>> tabletLocs = new WeakHashMap<String,WeakReference<String>>();
     
     private static String dedupeLocation(String tabletLoc) {
       synchronized (tabletLocs) {

Modified: accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/impl/TabletLocatorImpl.java
URL: http://svn.apache.org/viewvc/accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/impl/TabletLocatorImpl.java?rev=1433166&r1=1433165&r2=1433166&view=diff
==============================================================================
--- accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/impl/TabletLocatorImpl.java (original)
+++ accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/impl/TabletLocatorImpl.java Mon Jan 14 22:03:24 2013
@@ -87,8 +87,8 @@ public class TabletLocatorImpl extends T
   
   private TreeSet<KeyExtent> badExtents = new TreeSet<KeyExtent>();
   private ReentrantReadWriteLock rwLock = new ReentrantReadWriteLock();
-  private Lock rLock = rwLock.readLock();
-  private Lock wLock = rwLock.writeLock();
+  private final Lock rLock = rwLock.readLock();
+  private final Lock wLock = rwLock.writeLock();
   
   public static interface TabletLocationObtainer {
     /**

Modified: accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/impl/TabletServerBatchReaderIterator.java
URL: http://svn.apache.org/viewvc/accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/impl/TabletServerBatchReaderIterator.java?rev=1433166&r1=1433165&r2=1433166&view=diff
==============================================================================
--- accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/impl/TabletServerBatchReaderIterator.java (original)
+++ accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/impl/TabletServerBatchReaderIterator.java Mon Jan 14 22:03:24 2013
@@ -91,7 +91,7 @@ public class TabletServerBatchReaderIter
   private Iterator<Entry<Key,Value>> batchIterator;
   private List<Entry<Key,Value>> batch;
   private static final List<Entry<Key,Value>> LAST_BATCH = new ArrayList<Map.Entry<Key,Value>>();
-  private Object nextLock = new Object();
+  private final Object nextLock = new Object();
   
   private long failSleepTime = 100;
   
@@ -331,7 +331,7 @@ public class TabletServerBatchReaderIter
     private Map<KeyExtent,List<Range>> tabletsRanges;
     private ResultReceiver receiver;
     private Semaphore semaphore = null;
-    private Map<KeyExtent,List<Range>> failures;
+    private final Map<KeyExtent,List<Range>> failures;
     private List<Column> columns;
     private int semaphoreSize;
     

Modified: accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/impl/TabletServerBatchWriter.java
URL: http://svn.apache.org/viewvc/accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/impl/TabletServerBatchWriter.java?rev=1433166&r1=1433165&r2=1433166&view=diff
==============================================================================
--- accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/impl/TabletServerBatchWriter.java (original)
+++ accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/impl/TabletServerBatchWriter.java Mon Jan 14 22:03:24 2013
@@ -21,7 +21,6 @@ import java.lang.management.CompilationM
 import java.lang.management.GarbageCollectorMXBean;
 import java.lang.management.ManagementFactory;
 import java.util.ArrayList;
-import java.util.Collection;
 import java.util.Collections;
 import java.util.HashMap;
 import java.util.HashSet;
@@ -33,7 +32,6 @@ import java.util.Set;
 import java.util.Timer;
 import java.util.TimerTask;
 import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Executors;
 import java.util.concurrent.TimeUnit;
 import java.util.concurrent.atomic.AtomicLong;
 
@@ -61,12 +59,13 @@ import org.apache.accumulo.core.data.thr
 import org.apache.accumulo.core.data.thrift.UpdateErrors;
 import org.apache.accumulo.core.master.state.tables.TableState;
 import org.apache.accumulo.core.security.thrift.AuthInfo;
+import org.apache.accumulo.core.security.thrift.SecurityErrorCode;
 import org.apache.accumulo.core.security.thrift.ThriftSecurityException;
 import org.apache.accumulo.core.tabletserver.thrift.ConstraintViolationException;
 import org.apache.accumulo.core.tabletserver.thrift.NoSuchScanIDException;
 import org.apache.accumulo.core.tabletserver.thrift.NotServingTabletException;
 import org.apache.accumulo.core.tabletserver.thrift.TabletClientService;
-import org.apache.accumulo.core.util.NamingThreadFactory;
+import org.apache.accumulo.core.util.SimpleThreadPool;
 import org.apache.accumulo.core.util.ThriftUtil;
 import org.apache.hadoop.io.Text;
 import org.apache.log4j.Logger;
@@ -115,7 +114,7 @@ public class TabletServerBatchWriter {
   private AuthInfo credentials;
   
   private Violations violations;
-  private HashSet<KeyExtent> authorizationFailures;
+  private Map<KeyExtent,Set<SecurityErrorCode>> authorizationFailures;
   private HashSet<String> serverSideErrors;
   private int unknownErrors = 0;
   private boolean somethingFailed = false;
@@ -125,7 +124,7 @@ public class TabletServerBatchWriter {
   private long maxLatency;
   
   private long timeout;
-
+  
   private long lastProcessingStartTime;
   
   private long totalAdded = 0;
@@ -149,7 +148,7 @@ public class TabletServerBatchWriter {
   private Throwable lastUnknownError = null;
   
   private Map<String,TimeoutTracker> timeoutTrackers;
-
+  
   private static class TimeoutTracker {
     
     String server;
@@ -170,7 +169,7 @@ public class TabletServerBatchWriter {
       activityTime = System.currentTimeMillis();
       firstErrorTime = null;
     }
-
+    
     void wroteNothing() {
       if (firstErrorTime == null) {
         firstErrorTime = activityTime;
@@ -182,12 +181,12 @@ public class TabletServerBatchWriter {
     void errorOccured(Exception e) {
       wroteNothing();
     }
-
+    
     public long getTimeOut() {
       return timeOut;
     }
   }
-
+  
   public TabletServerBatchWriter(Instance instance, AuthInfo credentials, BatchWriterConfig config) {
     this.instance = instance;
     this.maxMem = config.getMaxMemory();
@@ -198,7 +197,7 @@ public class TabletServerBatchWriter {
     
     violations = new Violations();
     
-    authorizationFailures = new HashSet<KeyExtent>();
+    authorizationFailures = new HashMap<KeyExtent,Set<SecurityErrorCode>>();
     serverSideErrors = new HashSet<String>();
     
     lastProcessingStartTime = System.currentTimeMillis();
@@ -209,7 +208,7 @@ public class TabletServerBatchWriter {
     failedMutations = new FailedMutations();
     
     timeoutTrackers = Collections.synchronizedMap(new HashMap<String,TabletServerBatchWriter.TimeoutTracker>());
-
+    
     if (this.maxLatency != Long.MAX_VALUE) {
       jtimer.schedule(new TimerTask() {
         public void run() {
@@ -462,12 +461,20 @@ public class TabletServerBatchWriter {
     }
   }
   
-  private void updateAuthorizationFailures(Collection<KeyExtent> authorizationFailures) {
+  private void updateAuthorizationFailures(Set<KeyExtent> keySet, SecurityErrorCode code) {
+    HashMap<KeyExtent, SecurityErrorCode> map = new HashMap<KeyExtent, SecurityErrorCode>();
+    for (KeyExtent ke : keySet)
+      map.put(ke, code);
+    
+    updateAuthorizationFailures(map);
+  }
+  
+  private void updateAuthorizationFailures(Map<KeyExtent,SecurityErrorCode> authorizationFailures) {
     if (authorizationFailures.size() > 0) {
       
       // was a table deleted?
       HashSet<String> tableIds = new HashSet<String>();
-      for (KeyExtent ke : authorizationFailures)
+      for (KeyExtent ke : authorizationFailures.keySet())
         tableIds.add(ke.getTableId().toString());
       
       Tables.clearCache(instance);
@@ -477,12 +484,23 @@ public class TabletServerBatchWriter {
       
       synchronized (this) {
         somethingFailed = true;
-        this.authorizationFailures.addAll(authorizationFailures);
+        mergeAuthorizationFailures(this.authorizationFailures, authorizationFailures);
         this.notifyAll();
       }
     }
   }
   
+  private void mergeAuthorizationFailures(Map<KeyExtent,Set<SecurityErrorCode>> source, Map<KeyExtent,SecurityErrorCode> addition) {
+    for (Entry<KeyExtent,SecurityErrorCode> entry : addition.entrySet()) {
+      Set<SecurityErrorCode> secs = source.get(entry.getKey());
+      if (secs == null) {
+        secs = new HashSet<SecurityErrorCode>();
+        source.put(entry.getKey(), secs);
+      }
+      secs.add(entry.getValue());
+    }
+  }
+  
   private synchronized void updateServerErrors(String server, Exception e) {
     somethingFailed = true;
     this.serverSideErrors.add(server);
@@ -504,7 +522,7 @@ public class TabletServerBatchWriter {
   private void checkForFailures() throws MutationsRejectedException {
     if (somethingFailed) {
       List<ConstraintViolationSummary> cvsList = violations.asList();
-      throw new MutationsRejectedException(cvsList, new ArrayList<KeyExtent>(authorizationFailures), serverSideErrors, unknownErrors, lastUnknownError);
+      throw new MutationsRejectedException(cvsList, new HashMap<KeyExtent,Set<SecurityErrorCode>>(authorizationFailures), serverSideErrors, unknownErrors, lastUnknownError);
     }
   }
   
@@ -597,7 +615,7 @@ public class TabletServerBatchWriter {
     public MutationWriter(int numSendThreads) {
       serversMutations = new HashMap<String,TabletServerMutations>();
       queued = new HashSet<String>();
-      sendThreadPool = Executors.newFixedThreadPool(numSendThreads, new NamingThreadFactory(this.getClass().getName()));
+      sendThreadPool = new SimpleThreadPool(numSendThreads, this.getClass().getName());
       locators = new HashMap<String,TabletLocator>();
     }
     
@@ -611,7 +629,7 @@ public class TabletServerBatchWriter {
       
       return ret;
     }
-
+    
     private void binMutations(MutationSet mutationsToProcess, Map<String,TabletServerMutations> binnedMutations) {
       try {
         Set<Entry<String,List<Mutation>>> es = mutationsToProcess.getMutations().entrySet();
@@ -644,7 +662,7 @@ public class TabletServerBatchWriter {
         // assume an IOError communicating with !METADATA tablet
         failedMutations.add(mutationsToProcess);
       } catch (AccumuloSecurityException e) {
-        updateAuthorizationFailures(Collections.singletonList(new KeyExtent(new Text(Constants.METADATA_TABLE_ID), null, null)));
+        updateAuthorizationFailures(Collections.singletonMap(new KeyExtent(new Text(Constants.METADATA_TABLE_ID), null, null), e.getErrorCode()));
       } catch (TableDeletedException e) {
         updateUnknownErrors(e.getMessage(), e);
       } catch (TableOfflineException e) {
@@ -768,7 +786,7 @@ public class TabletServerBatchWriter {
               timeoutTracker = new TimeoutTracker(location, timeout);
               timeoutTrackers.put(location, timeoutTracker);
             }
-
+            
             long st1 = System.currentTimeMillis();
             failures = sendMutationsToTabletServer(location, mutationBatch, timeoutTracker);
             long st2 = System.currentTimeMillis();
@@ -813,8 +831,7 @@ public class TabletServerBatchWriter {
     }
     
     private MutationSet sendMutationsToTabletServer(String location, Map<KeyExtent,List<Mutation>> tabMuts, TimeoutTracker timeoutTracker) throws IOException,
-        AccumuloSecurityException,
-        AccumuloServerException {
+        AccumuloSecurityException, AccumuloServerException {
       if (tabMuts.size() == 0) {
         return new MutationSet();
       }
@@ -822,7 +839,7 @@ public class TabletServerBatchWriter {
       TTransport transport = null;
       
       timeoutTracker.startingWrite();
-
+      
       try {
         TabletClientService.Iface client;
         
@@ -830,7 +847,7 @@ public class TabletServerBatchWriter {
           client = ThriftUtil.getTServerClient(location, instance.getConfiguration(), timeoutTracker.getTimeOut());
         else
           client = ThriftUtil.getTServerClient(location, instance.getConfiguration());
-
+        
         try {
           MutationSet allFailures = new MutationSet();
           
@@ -868,13 +885,13 @@ public class TabletServerBatchWriter {
             }
             
             UpdateErrors updateErrors = client.closeUpdate(tinfo, usid);
-
+            
             Map<KeyExtent,Long> failures = Translator.translate(updateErrors.failedExtents, Translator.TKET);
             updatedConstraintViolations(Translator.translate(updateErrors.violationSummaries, Translator.TCVST));
             updateAuthorizationFailures(Translator.translate(updateErrors.authorizationFailures, Translator.TKET));
             
             long totalCommitted = 0;
-
+            
             for (Entry<KeyExtent,Long> entry : failures.entrySet()) {
               KeyExtent failedExtent = entry.getKey();
               int numCommitted = (int) (long) entry.getValue();
@@ -907,7 +924,7 @@ public class TabletServerBatchWriter {
         updateServerErrors(location, tae);
         throw new AccumuloServerException(location, tae);
       } catch (ThriftSecurityException e) {
-        updateAuthorizationFailures(tabMuts.keySet());
+        updateAuthorizationFailures(tabMuts.keySet(), e.code);
         throw new AccumuloSecurityException(e.user, e.code, e);
       } catch (NoSuchScanIDException e) {
         throw new IOException(e);
@@ -917,7 +934,6 @@ public class TabletServerBatchWriter {
         ThriftTransportPool.getInstance().returnTransport(transport);
       }
     }
-    
   }
   
   // END code for sending mutations to tablet servers using background threads

Modified: accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/impl/ThriftScanner.java
URL: http://svn.apache.org/viewvc/accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/impl/ThriftScanner.java?rev=1433166&r1=1433165&r2=1433166&view=diff
==============================================================================
--- accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/impl/ThriftScanner.java (original)
+++ accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/impl/ThriftScanner.java Mon Jan 14 22:03:24 2013
@@ -74,7 +74,7 @@ public class ThriftScanner {
   private static final byte[] EMPTY_BYTES = new byte[0];
   private static final Logger log = Logger.getLogger(ThriftScanner.class);
   
-  public static Map<TabletType,Set<String>> serversWaitedForWrites = new EnumMap<TabletType,Set<String>>(TabletType.class);
+  public static final Map<TabletType,Set<String>> serversWaitedForWrites = new EnumMap<TabletType,Set<String>>(TabletType.class);
   
   static {
     for (TabletType ttype : TabletType.values()) {

Modified: accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/impl/ThriftTransportPool.java
URL: http://svn.apache.org/viewvc/accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/impl/ThriftTransportPool.java?rev=1433166&r1=1433165&r2=1433166&view=diff
==============================================================================
--- accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/impl/ThriftTransportPool.java (original)
+++ accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/impl/ThriftTransportPool.java Mon Jan 14 22:03:24 2013
@@ -79,7 +79,7 @@ public class ThriftTransportPool {
   }
   
   private static class Closer implements Runnable {
-    ThriftTransportPool pool;
+    final ThriftTransportPool pool;
     
     public Closer(ThriftTransportPool pool) {
       this.pool = pool;
@@ -417,7 +417,7 @@ public class ThriftTransportPool {
               if (!cachedConnection.isReserved()) {
                 cachedConnection.setReserved(true);
                 if (log.isTraceEnabled())
-                  log.trace("Using existing connection to " + ttk.getLocation() + ":" + ttk.getPort());
+                  log.trace("Using existing connection to " + ttk.getLocation() + ":" + ttk.getPort() + " timeout " + ttk.getTimeout());
                 return new Pair<String,TTransport>(ttk.getLocation() + ":" + ttk.getPort(), cachedConnection.transport);
               }
             }

Modified: accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/impl/Writer.java
URL: http://svn.apache.org/viewvc/accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/impl/Writer.java?rev=1433166&r1=1433165&r2=1433166&view=diff
==============================================================================
--- accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/impl/Writer.java (original)
+++ accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/impl/Writer.java Mon Jan 14 22:03:24 2013
@@ -99,7 +99,7 @@ public class Writer {
         log.trace("Not serving tablet, server = " + tabLoc.tablet_location);
         TabletLocator.getInstance(instance, credentials, table).invalidateCache(tabLoc.tablet_extent);
       } catch (TException e) {
-        log.trace("server = " + tabLoc.tablet_location, e);
+        log.error("server = " + tabLoc.tablet_location, e);
         TabletLocator.getInstance(instance, credentials, table).invalidateCache(tabLoc.tablet_extent);
       } 
       

Modified: accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/impl/thrift/ClientService.java
URL: http://svn.apache.org/viewvc/accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/impl/thrift/ClientService.java?rev=1433166&r1=1433165&r2=1433166&view=diff
==============================================================================
--- accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/impl/thrift/ClientService.java (original)
+++ accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/impl/thrift/ClientService.java Mon Jan 14 22:03:24 2013
@@ -1,3 +1,19 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
 /**
  * Autogenerated by Thrift Compiler (0.9.0)
  *

Modified: accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/impl/thrift/ConfigurationType.java
URL: http://svn.apache.org/viewvc/accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/impl/thrift/ConfigurationType.java?rev=1433166&r1=1433165&r2=1433166&view=diff
==============================================================================
--- accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/impl/thrift/ConfigurationType.java (original)
+++ accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/impl/thrift/ConfigurationType.java Mon Jan 14 22:03:24 2013
@@ -1,3 +1,19 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
 /**
  * Autogenerated by Thrift Compiler (0.9.0)
  *

Modified: accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/impl/thrift/TableOperation.java
URL: http://svn.apache.org/viewvc/accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/impl/thrift/TableOperation.java?rev=1433166&r1=1433165&r2=1433166&view=diff
==============================================================================
--- accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/impl/thrift/TableOperation.java (original)
+++ accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/impl/thrift/TableOperation.java Mon Jan 14 22:03:24 2013
@@ -1,3 +1,19 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
 /**
  * Autogenerated by Thrift Compiler (0.9.0)
  *

Modified: accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/impl/thrift/TableOperationExceptionType.java
URL: http://svn.apache.org/viewvc/accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/impl/thrift/TableOperationExceptionType.java?rev=1433166&r1=1433165&r2=1433166&view=diff
==============================================================================
--- accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/impl/thrift/TableOperationExceptionType.java (original)
+++ accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/impl/thrift/TableOperationExceptionType.java Mon Jan 14 22:03:24 2013
@@ -1,3 +1,19 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
 /**
  * Autogenerated by Thrift Compiler (0.9.0)
  *

Modified: accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/impl/thrift/ThriftTableOperationException.java
URL: http://svn.apache.org/viewvc/accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/impl/thrift/ThriftTableOperationException.java?rev=1433166&r1=1433165&r2=1433166&view=diff
==============================================================================
--- accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/impl/thrift/ThriftTableOperationException.java (original)
+++ accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/impl/thrift/ThriftTableOperationException.java Mon Jan 14 22:03:24 2013
@@ -1,3 +1,19 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
 /**
  * Autogenerated by Thrift Compiler (0.9.0)
  *

Modified: accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/impl/thrift/ThriftTest.java
URL: http://svn.apache.org/viewvc/accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/impl/thrift/ThriftTest.java?rev=1433166&r1=1433165&r2=1433166&view=diff
==============================================================================
--- accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/impl/thrift/ThriftTest.java (original)
+++ accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/impl/thrift/ThriftTest.java Mon Jan 14 22:03:24 2013
@@ -1,3 +1,19 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
 /**
  * Autogenerated by Thrift Compiler (0.9.0)
  *

Modified: accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/mapreduce/AccumuloFileOutputFormat.java
URL: http://svn.apache.org/viewvc/accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/mapreduce/AccumuloFileOutputFormat.java?rev=1433166&r1=1433165&r2=1433166&view=diff
==============================================================================
--- accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/mapreduce/AccumuloFileOutputFormat.java (original)
+++ accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/mapreduce/AccumuloFileOutputFormat.java Mon Jan 14 22:03:24 2013
@@ -17,88 +17,246 @@
 package org.apache.accumulo.core.client.mapreduce;
 
 import java.io.IOException;
+import java.util.Arrays;
+import java.util.Map.Entry;
 
 import org.apache.accumulo.core.client.Instance;
 import org.apache.accumulo.core.client.ZooKeeperInstance;
 import org.apache.accumulo.core.conf.AccumuloConfiguration;
+import org.apache.accumulo.core.conf.ConfigurationCopy;
 import org.apache.accumulo.core.conf.Property;
+import org.apache.accumulo.core.data.ArrayByteSequence;
 import org.apache.accumulo.core.data.Key;
 import org.apache.accumulo.core.data.Value;
 import org.apache.accumulo.core.file.FileOperations;
 import org.apache.accumulo.core.file.FileSKVWriter;
-import org.apache.accumulo.core.file.rfile.RFile;
+import org.apache.accumulo.core.security.ColumnVisibility;
 import org.apache.accumulo.core.util.ArgumentChecker;
+import org.apache.commons.collections.map.LRUMap;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.mapreduce.Job;
+import org.apache.hadoop.mapreduce.JobContext;
 import org.apache.hadoop.mapreduce.RecordWriter;
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
 import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
 
 /**
- * This class allows MapReduce jobs to use the Accumulo data file format for output of data
+ * This class allows MapReduce jobs to write output in the Accumulo data file format.<br />
+ * Care should be taken to write only sorted data (sorted by {@link Key}), as this is an important requirement of Accumulo data files.
  * 
- * The user must specify the output path that does not exist following via static method calls to this class:
- * 
- * AccumuloFileOutputFormat.setOutputPath(job, outputDirectory)
- * 
- * Other methods from FileOutputFormat to configure options are ignored Compression is using the DefaultCodec and is always on
+ * <p>
+ * The output path to be created must be specified via {@link AccumuloFileOutputFormat#setOutputPath(Job, Path)}. This is inherited from
+ * {@link FileOutputFormat#setOutputPath(Job, Path)}. Other methods from {@link FileOutputFormat} are not supported and may be ignored or cause failures. Using
+ * other Hadoop configuration options that affect the behavior of the underlying files directly in the Job's configuration may work, but are not directly
+ * supported at this time.
  */
 public class AccumuloFileOutputFormat extends FileOutputFormat<Key,Value> {
-  private static final String PREFIX = AccumuloOutputFormat.class.getSimpleName();
-  public static final String FILE_TYPE = PREFIX + ".file_type";
+  private static final String PREFIX = AccumuloOutputFormat.class.getSimpleName() + ".";
+  private static final String ACCUMULO_PROPERTY_PREFIX = PREFIX + "accumuloProperties.";
+  
+  /**
+   * This helper method provides an AccumuloConfiguration object constructed from the Accumulo defaults, and overridden with Accumulo properties that have been
+   * stored in the Job's configuration
+   * 
+   * @since 1.5.0
+   */
+  protected static AccumuloConfiguration getAccumuloConfiguration(JobContext context) {
+    ConfigurationCopy acuConf = new ConfigurationCopy(AccumuloConfiguration.getDefaultConfiguration());
+    for (Entry<String,String> entry : context.getConfiguration())
+      if (entry.getKey().startsWith(ACCUMULO_PROPERTY_PREFIX))
+        acuConf.set(Property.getPropertyByKey(entry.getKey().substring(ACCUMULO_PROPERTY_PREFIX.length())), entry.getValue());
+    return acuConf;
+  }
+  
+  /**
+   * The supported Accumulo properties we set in this OutputFormat, that change the behavior of the RecordWriter.<br />
+   * These properties correspond to the supported public static setter methods available to this class.
+   * 
+   * @since 1.5.0
+   */
+  protected static boolean isSupportedAccumuloProperty(Property property) {
+    switch (property) {
+      case TABLE_FILE_COMPRESSION_TYPE:
+      case TABLE_FILE_COMPRESSED_BLOCK_SIZE:
+      case TABLE_FILE_BLOCK_SIZE:
+      case TABLE_FILE_COMPRESSED_BLOCK_SIZE_INDEX:
+      case TABLE_FILE_REPLICATION:
+        return true;
+      default:
+        return false;
+    }
+  }
   
-  private static final String INSTANCE_HAS_BEEN_SET = PREFIX + ".instanceConfigured";
-  private static final String INSTANCE_NAME = PREFIX + ".instanceName";
-  private static final String ZOOKEEPERS = PREFIX + ".zooKeepers";
+  /**
+   * Helper for transforming Accumulo configuration properties into something that can be stored safely inside the Hadoop Job configuration.
+   * 
+   * @since 1.5.0
+   */
+  protected static <T> void setAccumuloProperty(Job job, Property property, T value) {
+    if (isSupportedAccumuloProperty(property)) {
+      String val = String.valueOf(value);
+      if (property.getType().isValidFormat(val))
+        job.getConfiguration().set(ACCUMULO_PROPERTY_PREFIX + property.getKey(), val);
+      else
+        throw new IllegalArgumentException("Value is not appropriate for property type '" + property.getType() + "'");
+    } else
+      throw new IllegalArgumentException("Unsupported configuration property " + property.getKey());
+  }
+  
+  /**
+   * @param compressionType
+   *          The type of compression to use. One of "none", "gz", "lzo", or "snappy". Specifying a compression may require additional libraries to be available
+   *          to your Job.
+   * @since 1.5.0
+   */
+  public static void setCompressionType(Job job, String compressionType) {
+    setAccumuloProperty(job, Property.TABLE_FILE_COMPRESSION_TYPE, compressionType);
+  }
+  
+  /**
+   * Sets the size for data blocks within each file.<br />
+   * Data blocks are a span of key/value pairs stored in the file that are compressed and indexed as a group.
+   * 
+   * <p>
+   * Making this value smaller may increase seek performance, but at the cost of increasing the size of the indexes (which can also affect seek performance).
+   * 
+   * @since 1.5.0
+   */
+  public static void setDataBlockSize(Job job, long dataBlockSize) {
+    setAccumuloProperty(job, Property.TABLE_FILE_COMPRESSED_BLOCK_SIZE, dataBlockSize);
+  }
+  
+  /**
+   * Sets the size for file blocks in the file system; file blocks are managed, and replicated, by the underlying file system
+   * 
+   * @since 1.5.0
+   */
+  public static void setFileBlockSize(Job job, long fileBlockSize) {
+    setAccumuloProperty(job, Property.TABLE_FILE_BLOCK_SIZE, fileBlockSize);
+  }
+  
+  /**
+   * Sets the size for index blocks within each file; smaller blocks means a deeper index hierarchy within the file, while larger blocks mean a more shallow
+   * index hierarchy within the file. This can affect the performance of queries.
+   * 
+   * @since 1.5.0
+   */
+  public static void setIndexBlockSize(Job job, long indexBlockSize) {
+    setAccumuloProperty(job, Property.TABLE_FILE_COMPRESSED_BLOCK_SIZE_INDEX, indexBlockSize);
+  }
+  
+  /**
+   * Sets the file system replication factor for the resulting file, overriding the file system default.
+   * 
+   * @since 1.5.0
+   */
+  public static void setReplication(Job job, int replication) {
+    setAccumuloProperty(job, Property.TABLE_FILE_REPLICATION, replication);
+  }
   
   @Override
-  public RecordWriter<Key,Value> getRecordWriter(TaskAttemptContext job) throws IOException, InterruptedException {
+  public RecordWriter<Key,Value> getRecordWriter(TaskAttemptContext context) throws IOException {
     // get the path of the temporary output file
-    final Configuration conf = job.getConfiguration();
+    final Configuration conf = context.getConfiguration();
+    final AccumuloConfiguration acuConf = getAccumuloConfiguration(context);
     
-    String extension = conf.get(FILE_TYPE);
-    if (extension == null || extension.isEmpty())
-      extension = RFile.EXTENSION;
-    
-    final Path file = this.getDefaultWorkFile(job, "." + extension);
+    final String extension = acuConf.get(Property.TABLE_FILE_TYPE);
+    final Path file = this.getDefaultWorkFile(context, "." + extension);
     
+    final LRUMap validVisibilities = new LRUMap(1000);
+
     return new RecordWriter<Key,Value>() {
       FileSKVWriter out = null;
       
       @Override
+      public void close(TaskAttemptContext context) throws IOException {
+        if (out != null)
+          out.close();
+      }
+      
+      @Override
       public void write(Key key, Value value) throws IOException {
+        
+        Boolean wasChecked = (Boolean) validVisibilities.get(key.getColumnVisibilityData());
+        if (wasChecked == null) {
+          byte[] cv = key.getColumnVisibilityData().toArray();
+          new ColumnVisibility(cv);
+          validVisibilities.put(new ArrayByteSequence(Arrays.copyOf(cv, cv.length)), Boolean.TRUE);
+        }
+
         if (out == null) {
-          out = FileOperations.getInstance().openWriter(file.toString(), file.getFileSystem(conf), conf, AccumuloConfiguration.getDefaultConfiguration());
+          out = FileOperations.getInstance().openWriter(file.toString(), file.getFileSystem(conf), conf, acuConf);
           out.startDefaultLocalityGroup();
         }
         out.append(key, value);
       }
-      
-      @Override
-      public void close(TaskAttemptContext context) throws IOException, InterruptedException {
-        if (out != null)
-          out.close();
-      }
     };
   }
   
-  public static void setFileType(Configuration conf, String type) {
-    conf.set(FILE_TYPE, type);
+  // ----------------------------------------------------------------------------------------------------
+  // Everything below this line is deprecated and should go away in future versions
+  // ----------------------------------------------------------------------------------------------------
+  
+  /**
+   * @deprecated since 1.5.0;
+   */
+  @SuppressWarnings("unused")
+  @Deprecated
+  private static final String FILE_TYPE = PREFIX + "file_type";
+  
+  /**
+   * @deprecated since 1.5.0;
+   */
+  @SuppressWarnings("unused")
+  @Deprecated
+  private static final String BLOCK_SIZE = PREFIX + "block_size";
+  
+  /**
+   * @deprecated since 1.5.0;
+   */
+  @Deprecated
+  private static final String INSTANCE_HAS_BEEN_SET = PREFIX + "instanceConfigured";
+  
+  /**
+   * @deprecated since 1.5.0;
+   */
+  @Deprecated
+  private static final String INSTANCE_NAME = PREFIX + "instanceName";
+  
+  /**
+   * @deprecated since 1.5.0;
+   */
+  @Deprecated
+  private static final String ZOOKEEPERS = PREFIX + "zooKeepers";
+  
+  /**
+   * @deprecated since 1.5.0; Retrieve the relevant block size from {@link #getAccumuloConfiguration(JobContext)}
+   */
+  @Deprecated
+  protected static void handleBlockSize(Configuration conf) {
+    conf.setInt("io.seqfile.compress.blocksize",
+        (int) AccumuloConfiguration.getDefaultConfiguration().getMemoryInBytes(Property.TABLE_FILE_COMPRESSED_BLOCK_SIZE));
   }
   
   /**
-   * @deprecated since 1.5, use {@link #setCompressedBlockSize(Configuration, long)} instead
+   * @deprecated since 1.5.0; This method does nothing. Only 'rf' type is supported.
+   */
+  @Deprecated
+  public static void setFileType(Configuration conf, String type) {}
+  
+  /**
+   * @deprecated since 1.5.0; Use {@link #setFileBlockSize(Job, long)}, {@link #setDataBlockSize(Job, long)}, or {@link #setIndexBlockSize(Job, long)} instead.
    */
+  @Deprecated
   public static void setBlockSize(Configuration conf, int blockSize) {
-    long bs = blockSize;
-    setCompressedBlockSize(conf, bs);
+    conf.set(ACCUMULO_PROPERTY_PREFIX + Property.TABLE_FILE_COMPRESSED_BLOCK_SIZE.getKey(), String.valueOf(blockSize));
   }
   
   /**
-   * @param conf
-   * @param instanceName
-   * @param zooKeepers
+   * @deprecated since 1.5.0; This OutputFormat does not communicate with Accumulo. If this is needed, subclasses must implement their own configuration.
    */
+  @Deprecated
   public static void setZooKeeperInstance(Configuration conf, String instanceName, String zooKeepers) {
     if (conf.getBoolean(INSTANCE_HAS_BEEN_SET, false))
       throw new IllegalStateException("Instance info can only be set once per job");
@@ -110,30 +268,11 @@ public class AccumuloFileOutputFormat ex
   }
   
   /**
-   * @param conf
-   * @return The Accumulo instance.
+   * @deprecated since 1.5.0; This OutputFormat does not communicate with Accumulo. If this is needed, subclasses must implement their own configuration.
    */
+  @Deprecated
   protected static Instance getInstance(Configuration conf) {
     return new ZooKeeperInstance(conf.get(INSTANCE_NAME), conf.get(ZOOKEEPERS));
   }
   
-  public static void setReplication(Configuration conf, int replication) {
-    conf.setInt(Property.TABLE_FILE_REPLICATION.getKey(), replication);
-  }
-  
-  public static void setDFSBlockSize(Configuration conf, long blockSize) {
-    conf.setLong(Property.TABLE_FILE_BLOCK_SIZE.getKey(), blockSize);
-  }
-  
-  public static void setCompressedBlockSize(Configuration conf, long cblockSize) {
-    conf.setLong(Property.TABLE_FILE_COMPRESSED_BLOCK_SIZE.getKey(), cblockSize);
-  }
-  
-  public static void setCompressedBlockSizeIndex(Configuration conf, long cblockSizeIndex) {
-    conf.setLong(Property.TABLE_FILE_COMPRESSED_BLOCK_SIZE_INDEX.getKey(), cblockSizeIndex);
-  }
-  
-  public static void setCompressionType(Configuration conf, String compression) {
-    conf.set(Property.TABLE_FILE_COMPRESSION_TYPE.getKey(), compression);
-  }
 }

Modified: accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/mapreduce/AccumuloOutputFormat.java
URL: http://svn.apache.org/viewvc/accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/mapreduce/AccumuloOutputFormat.java?rev=1433166&r1=1433165&r2=1433166&view=diff
==============================================================================
--- accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/mapreduce/AccumuloOutputFormat.java (original)
+++ accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/mapreduce/AccumuloOutputFormat.java Mon Jan 14 22:03:24 2013
@@ -17,9 +17,10 @@
 package org.apache.accumulo.core.client.mapreduce;
 
 import java.io.IOException;
-import java.nio.charset.Charset;
 import java.util.HashMap;
 import java.util.HashSet;
+import java.util.Map.Entry;
+import java.util.Set;
 import java.util.concurrent.TimeUnit;
 
 import org.apache.accumulo.core.client.AccumuloException;
@@ -38,6 +39,7 @@ import org.apache.accumulo.core.data.Col
 import org.apache.accumulo.core.data.KeyExtent;
 import org.apache.accumulo.core.data.Mutation;
 import org.apache.accumulo.core.security.ColumnVisibility;
+import org.apache.accumulo.core.security.thrift.SecurityErrorCode;
 import org.apache.accumulo.core.util.ArgumentChecker;
 import org.apache.commons.codec.binary.Base64;
 import org.apache.hadoop.conf.Configuration;
@@ -67,8 +69,6 @@ import org.apache.log4j.Logger;
 public class AccumuloOutputFormat extends OutputFormat<Text,Mutation> {
   private static final Logger log = Logger.getLogger(AccumuloOutputFormat.class);
   
-  private static final Charset utf8 = Charset.forName("UTF8");
-  
   private static final String PREFIX = AccumuloOutputFormat.class.getSimpleName();
   private static final String OUTPUT_INFO_HAS_BEEN_SET = PREFIX + ".configured";
   private static final String INSTANCE_HAS_BEEN_SET = PREFIX + ".instanceConfigured";
@@ -125,10 +125,10 @@ public class AccumuloOutputFormat extend
     if (conf.getBoolean(INSTANCE_HAS_BEEN_SET, false))
       throw new IllegalStateException("Instance info can only be set once per job");
     conf.setBoolean(INSTANCE_HAS_BEEN_SET, true);
-    
     ArgumentChecker.notNull(instanceName, zooKeepers);
     conf.set(INSTANCE_NAME, instanceName);
     conf.set(ZOOKEEPERS, zooKeepers);
+    System.out.println("instance set: " + conf.get(INSTANCE_HAS_BEEN_SET));
   }
   
   public static void setMockInstance(Configuration conf, String instanceName) {
@@ -140,7 +140,7 @@ public class AccumuloOutputFormat extend
   /**
    * see {@link BatchWriterConfig#setMaxMemory(long)}
    */
-
+  
   public static void setMaxMutationBufferSize(Configuration conf, long numberOfBytes) {
     conf.setLong(MAX_MUTATION_BUFFER_SIZE, numberOfBytes);
   }
@@ -148,7 +148,7 @@ public class AccumuloOutputFormat extend
   /**
    * see {@link BatchWriterConfig#setMaxLatency(long, TimeUnit)}
    */
-
+  
   public static void setMaxLatency(Configuration conf, int numberOfMilliseconds) {
     conf.setInt(MAX_LATENCY, numberOfMilliseconds);
   }
@@ -156,7 +156,7 @@ public class AccumuloOutputFormat extend
   /**
    * see {@link BatchWriterConfig#setMaxWriteThreads(int)}
    */
-
+  
   public static void setMaxWriteThreads(Configuration conf, int numberOfThreads) {
     conf.setInt(NUM_WRITE_THREADS, numberOfThreads);
   }
@@ -168,7 +168,7 @@ public class AccumuloOutputFormat extend
   public static void setTimeout(Configuration conf, long time, TimeUnit timeUnit) {
     conf.setLong(TIMEOUT, timeUnit.toMillis(time));
   }
-
+  
   public static void setLogLevel(Configuration conf, Level level) {
     ArgumentChecker.notNull(level);
     conf.setInt(LOGLEVEL, level.toInt());
@@ -187,7 +187,7 @@ public class AccumuloOutputFormat extend
    * string, and is not intended to be secure.
    */
   protected static byte[] getPassword(Configuration conf) {
-    return Base64.decodeBase64(conf.get(PASSWORD, "").getBytes(utf8));
+    return Base64.decodeBase64(conf.get(PASSWORD, "").getBytes());
   }
   
   protected static boolean canCreateTables(Configuration conf) {
@@ -219,7 +219,7 @@ public class AccumuloOutputFormat extend
   protected static long getTimeout(Configuration conf) {
     return conf.getLong(TIMEOUT, Long.MAX_VALUE);
   }
-
+  
   protected static Level getLogLevel(Configuration conf) {
     if (conf.get(LOGLEVEL) != null)
       return Level.toLevel(conf.getInt(LOGLEVEL, Level.INFO.toInt()));
@@ -369,9 +369,14 @@ public class AccumuloOutputFormat extend
         mtbw.close();
       } catch (MutationsRejectedException e) {
         if (e.getAuthorizationFailures().size() >= 0) {
-          HashSet<String> tables = new HashSet<String>();
-          for (KeyExtent ke : e.getAuthorizationFailures()) {
-            tables.add(ke.getTableId().toString());
+          HashMap<String,Set<SecurityErrorCode>> tables = new HashMap<String,Set<SecurityErrorCode>>();
+          for (Entry<KeyExtent,Set<SecurityErrorCode>> ke : e.getAuthorizationFailures().entrySet()) {
+            Set<SecurityErrorCode> secCodes = tables.get(ke.getKey().getTableId().toString());
+            if (secCodes == null) {
+              secCodes = new HashSet<SecurityErrorCode>();
+              tables.put(ke.getKey().getTableId().toString(), secCodes);
+            }
+            secCodes.addAll(ke.getValue());
           }
           
           log.error("Not authorized to write to tables : " + tables);

Modified: accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/mapreduce/InputFormatBase.java
URL: http://svn.apache.org/viewvc/accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/mapreduce/InputFormatBase.java?rev=1433166&r1=1433165&r2=1433166&view=diff
==============================================================================
--- accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/mapreduce/InputFormatBase.java (original)
+++ accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/mapreduce/InputFormatBase.java Mon Jan 14 22:03:24 2013
@@ -26,7 +26,6 @@ import java.io.IOException;
 import java.math.BigInteger;
 import java.net.InetAddress;
 import java.nio.ByteBuffer;
-import java.nio.charset.Charset;
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.HashMap;
@@ -105,8 +104,6 @@ import org.apache.log4j.Logger;
 public abstract class InputFormatBase<K,V> extends InputFormat<K,V> {
   protected static final Logger log = Logger.getLogger(InputFormatBase.class);
   
-  private static final Charset utf8 = Charset.forName("UTF8");
-
   private static final String PREFIX = AccumuloInputFormat.class.getSimpleName();
   private static final String INPUT_INFO_HAS_BEEN_SET = PREFIX + ".configured";
   private static final String INSTANCE_HAS_BEEN_SET = PREFIX + ".instanceConfigured";
@@ -423,7 +420,7 @@ public abstract class InputFormatBase<K,
    * @see #setInputInfo(Configuration, String, byte[], String, Authorizations)
    */
   protected static byte[] getPassword(Configuration conf) {
-    return Base64.decodeBase64(conf.get(PASSWORD, "").getBytes(utf8));
+    return Base64.decodeBase64(conf.get(PASSWORD, "").getBytes());
   }
   
   /**
@@ -448,7 +445,7 @@ public abstract class InputFormatBase<K,
    */
   protected static Authorizations getAuthorizations(Configuration conf) {
     String authString = conf.get(AUTHORIZATIONS);
-    return authString == null ? Constants.NO_AUTHS : new Authorizations(authString.getBytes(utf8));
+    return authString == null ? Constants.NO_AUTHS : new Authorizations(authString.getBytes());
   }
   
   /**
@@ -499,7 +496,7 @@ public abstract class InputFormatBase<K,
   protected static List<Range> getRanges(Configuration conf) throws IOException {
     ArrayList<Range> ranges = new ArrayList<Range>();
     for (String rangeString : conf.getStringCollection(RANGES)) {
-      ByteArrayInputStream bais = new ByteArrayInputStream(Base64.decodeBase64(rangeString.getBytes(utf8)));
+      ByteArrayInputStream bais = new ByteArrayInputStream(Base64.decodeBase64(rangeString.getBytes()));
       Range range = new Range();
       range.readFields(new DataInputStream(bais));
       ranges.add(range);
@@ -519,8 +516,8 @@ public abstract class InputFormatBase<K,
     Set<Pair<Text,Text>> columns = new HashSet<Pair<Text,Text>>();
     for (String col : conf.getStringCollection(COLUMNS)) {
       int idx = col.indexOf(":");
-      Text cf = new Text(idx < 0 ? Base64.decodeBase64(col.getBytes(utf8)) : Base64.decodeBase64(col.substring(0, idx).getBytes(utf8)));
-      Text cq = idx < 0 ? null : new Text(Base64.decodeBase64(col.substring(idx + 1).getBytes(utf8)));
+      Text cf = new Text(idx < 0 ? Base64.decodeBase64(col.getBytes()) : Base64.decodeBase64(col.substring(0, idx).getBytes()));
+      Text cq = idx < 0 ? null : new Text(Base64.decodeBase64(col.substring(idx + 1).getBytes()));
       columns.add(new Pair<Text,Text>(cf, cq));
     }
     return columns;
@@ -627,7 +624,7 @@ public abstract class InputFormatBase<K,
     try {
       while (tokens.hasMoreTokens()) {
         String itstring = tokens.nextToken();
-        ByteArrayInputStream bais = new ByteArrayInputStream(Base64.decodeBase64(itstring.getBytes(utf8)));
+        ByteArrayInputStream bais = new ByteArrayInputStream(Base64.decodeBase64(itstring.getBytes()));
         list.add(new IteratorSetting(new DataInputStream(bais)));
         bais.close();
       }

Modified: accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/partition/KeyRangePartitioner.java
URL: http://svn.apache.org/viewvc/accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/partition/KeyRangePartitioner.java?rev=1433166&r1=1433165&r2=1433166&view=diff
==============================================================================
--- accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/partition/KeyRangePartitioner.java (original)
+++ accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/partition/KeyRangePartitioner.java Mon Jan 14 22:03:24 2013
@@ -20,7 +20,7 @@ import org.apache.accumulo.core.data.Key
 import org.apache.hadoop.conf.Configurable;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.Writable;
-import org.apache.hadoop.mapreduce.JobContext;
+import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.Partitioner;
 
 /**
@@ -47,14 +47,14 @@ public class KeyRangePartitioner extends
   /**
    * Sets the hdfs file name to use, containing a newline separated list of Base64 encoded split points that represent ranges for partitioning
    */
-  public static void setSplitFile(JobContext job, String file) {
+  public static void setSplitFile(Job job, String file) {
     RangePartitioner.setSplitFile(job, file);
   }
   
   /**
    * Sets the number of random sub-bins per range
    */
-  public static void setNumSubBins(JobContext job, int num) {
+  public static void setNumSubBins(Job job, int num) {
     RangePartitioner.setNumSubBins(job, num);
   }
 }

Modified: accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/partition/RangePartitioner.java
URL: http://svn.apache.org/viewvc/accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/partition/RangePartitioner.java?rev=1433166&r1=1433165&r2=1433166&view=diff
==============================================================================
--- accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/partition/RangePartitioner.java (original)
+++ accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/partition/RangePartitioner.java Mon Jan 14 22:03:24 2013
@@ -21,7 +21,6 @@ import java.io.FileNotFoundException;
 import java.io.FileReader;
 import java.io.IOException;
 import java.net.URI;
-import java.nio.charset.Charset;
 import java.util.Arrays;
 import java.util.Scanner;
 import java.util.TreeSet;
@@ -33,7 +32,7 @@ import org.apache.hadoop.filecache.Distr
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.Writable;
-import org.apache.hadoop.mapreduce.JobContext;
+import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.Partitioner;
 
 /**
@@ -45,9 +44,8 @@ public class RangePartitioner extends Pa
   private static final String NUM_SUBBINS = PREFIX + ".subBins";
   
   private Configuration conf;
-
-  private static final Charset utf8 = Charset.forName("UTF8");
   
+  @Override
   public int getPartition(Text key, Writable value, int numPartitions) {
     try {
       return findPartition(key, getCutPoints(), getNumSubBins());
@@ -92,7 +90,7 @@ public class RangePartitioner extends Pa
             Scanner in = new Scanner(new BufferedReader(new FileReader(path.toString())));
             try {
               while (in.hasNextLine())
-                cutPoints.add(new Text(Base64.decodeBase64(in.nextLine().getBytes(utf8))));
+                cutPoints.add(new Text(Base64.decodeBase64(in.nextLine().getBytes())));
             } finally {
               in.close();
             }
@@ -120,7 +118,7 @@ public class RangePartitioner extends Pa
   /**
    * Sets the hdfs file name to use, containing a newline separated list of Base64 encoded split points that represent ranges for partitioning
    */
-  public static void setSplitFile(JobContext job, String file) {
+  public static void setSplitFile(Job job, String file) {
     URI uri = new Path(file).toUri();
     DistributedCache.addCacheFile(uri, job.getConfiguration());
     job.getConfiguration().set(CUTFILE_KEY, uri.getPath());
@@ -129,7 +127,7 @@ public class RangePartitioner extends Pa
   /**
    * Sets the number of random sub-bins per range
    */
-  public static void setNumSubBins(JobContext job, int num) {
+  public static void setNumSubBins(Job job, int num) {
     job.getConfiguration().setInt(NUM_SUBBINS, num);
   }
 }

Modified: accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/mock/MockConnector.java
URL: http://svn.apache.org/viewvc/accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/mock/MockConnector.java?rev=1433166&r1=1433165&r2=1433166&view=diff
==============================================================================
--- accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/mock/MockConnector.java (original)
+++ accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/mock/MockConnector.java Mon Jan 14 22:03:24 2013
@@ -57,6 +57,7 @@ public class MockConnector extends Conne
     return acu.createBatchScanner(tableName, authorizations);
   }
   
+  @Deprecated
   @Override
   public BatchDeleter createBatchDeleter(String tableName, Authorizations authorizations, int numQueryThreads, long maxMemory, long maxLatency,
       int maxWriteThreads) throws TableNotFoundException {
@@ -72,6 +73,7 @@ public class MockConnector extends Conne
         config.getMaxWriteThreads());
   }
   
+  @Deprecated
   @Override
   public BatchWriter createBatchWriter(String tableName, long maxMemory, long maxLatency, int maxWriteThreads) throws TableNotFoundException {
     if (acu.tables.get(tableName) == null)
@@ -84,6 +86,7 @@ public class MockConnector extends Conne
     return createBatchWriter(tableName, config.getMaxMemory(), config.getMaxLatency(TimeUnit.MILLISECONDS), config.getMaxWriteThreads());
   }
   
+  @Deprecated
   @Override
   public MultiTableBatchWriter createMultiTableBatchWriter(long maxMemory, long maxLatency, int maxWriteThreads) {
     return new MockMultiTableBatchWriter(acu);

Modified: accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/mock/MockInstance.java
URL: http://svn.apache.org/viewvc/accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/mock/MockInstance.java?rev=1433166&r1=1433165&r2=1433166&view=diff
==============================================================================
--- accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/mock/MockInstance.java (original)
+++ accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/mock/MockInstance.java Mon Jan 14 22:03:24 2013
@@ -18,6 +18,7 @@ package org.apache.accumulo.core.client.
 
 import java.io.IOException;
 import java.nio.ByteBuffer;
+import java.util.Arrays;
 import java.util.Collections;
 import java.util.HashMap;
 import java.util.List;
@@ -28,14 +29,25 @@ import org.apache.accumulo.core.client.A
 import org.apache.accumulo.core.client.Connector;
 import org.apache.accumulo.core.client.Instance;
 import org.apache.accumulo.core.conf.AccumuloConfiguration;
-import org.apache.accumulo.core.security.Authorizations;
 import org.apache.accumulo.core.security.thrift.AuthInfo;
+import org.apache.accumulo.core.security.thrift.SecurityErrorCode;
 import org.apache.accumulo.core.util.ByteBufferUtil;
 import org.apache.accumulo.core.util.CachedConfiguration;
 import org.apache.accumulo.core.util.TextUtil;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.io.Text;
 
+/**
+ * Mock Accumulo provides an in memory implementation of the Accumulo client API. It is possible that the behavior of this implementation may differ subtly from
+ * the behavior of Accumulo. This could result in unit tests that pass on Mock Accumulo and fail on Accumulo or visa-versa. Documenting the differences would be
+ * difficult and is not done.
+ * 
+ * <p>
+ * An alternative to Mock Accumulo called MiniAccumuloCluster was introduced in Accumulo 1.5. MiniAccumuloCluster spins up actual Accumulo server processes, can
+ * be used for unit testing, and its behavior should match Accumulo. The drawback of MiniAccumuloCluster is that it starts more slowly than Mock Accumulo.
+ * 
+ */
+
 public class MockInstance implements Instance {
   
   static final String genericAddress = "localhost:1234";
@@ -103,7 +115,10 @@ public class MockInstance implements Ins
   @Override
   public Connector getConnector(String user, byte[] pass) throws AccumuloException, AccumuloSecurityException {
     Connector conn = new MockConnector(user, acu, this);
-    conn.securityOperations().createUser(user, pass, new Authorizations());
+    if (!acu.users.containsKey(user))
+      conn.securityOperations().createUser(user, pass);
+    else if (!Arrays.equals(acu.users.get(user).password, pass))
+        throw new AccumuloSecurityException(user, SecurityErrorCode.BAD_CREDENTIALS);
     return conn;
   }
   

Modified: accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/mock/MockInstanceOperations.java
URL: http://svn.apache.org/viewvc/accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/mock/MockInstanceOperations.java?rev=1433166&r1=1433165&r2=1433166&view=diff
==============================================================================
--- accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/mock/MockInstanceOperations.java (original)
+++ accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/mock/MockInstanceOperations.java Mon Jan 14 22:03:24 2013
@@ -24,7 +24,7 @@ import org.apache.accumulo.core.client.A
 import org.apache.accumulo.core.client.AccumuloSecurityException;
 import org.apache.accumulo.core.client.admin.ActiveScan;
 import org.apache.accumulo.core.client.admin.InstanceOperations;
-import org.apache.accumulo.start.classloader.AccumuloClassLoader;
+import org.apache.accumulo.start.classloader.vfs.AccumuloVFSClassLoader;
 
 /**
  * 
@@ -107,7 +107,7 @@ public class MockInstanceOperations impl
   @Override
   public boolean testClassLoad(String className, String asTypeName) throws AccumuloException, AccumuloSecurityException {
     try {
-      AccumuloClassLoader.loadClass(className, Class.forName(asTypeName));
+      AccumuloVFSClassLoader.loadClass(className, Class.forName(asTypeName));
     } catch (ClassNotFoundException e) {
       e.printStackTrace();
       return false;

Modified: accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/mock/MockScanner.java
URL: http://svn.apache.org/viewvc/accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/mock/MockScanner.java?rev=1433166&r1=1433165&r2=1433166&view=diff
==============================================================================
--- accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/mock/MockScanner.java (original)
+++ accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/mock/MockScanner.java Mon Jan 14 22:03:24 2013
@@ -39,6 +39,7 @@ public class MockScanner extends MockSca
     super(table, auths);
   }
   
+  @Deprecated
   @Override
   public void setTimeOut(int timeOut) {
     if (timeOut == Integer.MAX_VALUE)
@@ -47,6 +48,7 @@ public class MockScanner extends MockSca
       setTimeout(timeOut, TimeUnit.SECONDS);
   }
   
+  @Deprecated
   @Override
   public int getTimeOut() {
     long timeout = getTimeout(TimeUnit.SECONDS);

Modified: accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/mock/MockSecurityOperations.java
URL: http://svn.apache.org/viewvc/accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/mock/MockSecurityOperations.java?rev=1433166&r1=1433165&r2=1433166&view=diff
==============================================================================
--- accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/mock/MockSecurityOperations.java (original)
+++ accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/client/mock/MockSecurityOperations.java Mon Jan 14 22:03:24 2013
@@ -36,10 +36,18 @@ public class MockSecurityOperations impl
     this.acu = acu;
   }
   
+  /**
+   * @deprecated Use {@link #createUser(String,byte[])} instead
+   */
   @Override
   public void createUser(String user, byte[] password, Authorizations authorizations) throws AccumuloException, AccumuloSecurityException {
     this.acu.users.put(user, new MockUser(user, password, authorizations));
   }
+
+  @Override
+  public void createUser(String user, byte[] password) throws AccumuloException, AccumuloSecurityException {
+    createUser(user, password, new Authorizations());
+  }
   
   @Override
   public void dropUser(String user) throws AccumuloException, AccumuloSecurityException {



Mime
View raw message