accumulo-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From cjno...@apache.org
Subject git commit: ACCUMULO-391 columns can be set per table
Date Thu, 12 Sep 2013 04:08:31 GMT
Updated Branches:
  refs/heads/ACCUMULO-391 8c76062a2 -> 48c2d30b8


ACCUMULO-391 columns can be set per table


Project: http://git-wip-us.apache.org/repos/asf/accumulo/repo
Commit: http://git-wip-us.apache.org/repos/asf/accumulo/commit/48c2d30b
Tree: http://git-wip-us.apache.org/repos/asf/accumulo/tree/48c2d30b
Diff: http://git-wip-us.apache.org/repos/asf/accumulo/diff/48c2d30b

Branch: refs/heads/ACCUMULO-391
Commit: 48c2d30b862baf11d2735e0d001f96e9207753e1
Parents: 8c76062
Author: Corey J. Nolet <cjnolet@gmail.com>
Authored: Thu Sep 12 00:07:19 2013 -0400
Committer: Corey J. Nolet <cjnolet@gmail.com>
Committed: Thu Sep 12 00:07:19 2013 -0400

----------------------------------------------------------------------
 .../core/client/mapred/InputFormatBase.java     |   4 +-
 .../core/client/mapreduce/InputFormatBase.java  |  51 +++++---
 .../mapreduce/lib/util/InputConfigurator.java   | 127 ++++++++++++++++---
 .../mapreduce/AccumuloRowInputFormatTest.java   |   6 +-
 .../mapreduce/MultiTableInputFormatTest.java    |  10 +-
 .../core/client/mapreduce/TokenFileTest.java    |  12 +-
 core/src/test/resources/log4j.properties        |   1 +
 7 files changed, 161 insertions(+), 50 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/accumulo/blob/48c2d30b/core/src/main/java/org/apache/accumulo/core/client/mapred/InputFormatBase.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/accumulo/core/client/mapred/InputFormatBase.java
b/core/src/main/java/org/apache/accumulo/core/client/mapred/InputFormatBase.java
index 74373f6..6997277 100644
--- a/core/src/main/java/org/apache/accumulo/core/client/mapred/InputFormatBase.java
+++ b/core/src/main/java/org/apache/accumulo/core/client/mapred/InputFormatBase.java
@@ -368,9 +368,9 @@ public abstract class InputFormatBase<K,V> implements InputFormat<K,V>
{
    * @see #fetchColumns(JobConf, Collection)
    */
   protected static Set<Pair<Text,Text>> getFetchedColumns(JobConf job) {
-    return InputConfigurator.getFetchedColumns(CLASS, job);
+    return InputConfigurator.getFetchedColumns(CLASS, job, getInputTableName (job));
   }
-  
+
   /**
    * Encode an iterator on the input for this job.
    * 

http://git-wip-us.apache.org/repos/asf/accumulo/blob/48c2d30b/core/src/main/java/org/apache/accumulo/core/client/mapreduce/InputFormatBase.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/accumulo/core/client/mapreduce/InputFormatBase.java
b/core/src/main/java/org/apache/accumulo/core/client/mapreduce/InputFormatBase.java
index 34c524d..a67f85d 100644
--- a/core/src/main/java/org/apache/accumulo/core/client/mapreduce/InputFormatBase.java
+++ b/core/src/main/java/org/apache/accumulo/core/client/mapreduce/InputFormatBase.java
@@ -16,6 +16,8 @@
  */
 package org.apache.accumulo.core.client.mapreduce;
 
+import static org.apache.accumulo.core.client.security.tokens.AuthenticationToken.AuthenticationTokenSerializer.deserialize;
+
 import java.io.DataInput;
 import java.io.DataOutput;
 import java.io.IOException;
@@ -51,6 +53,7 @@ import org.apache.accumulo.core.client.impl.TabletLocator;
 import org.apache.accumulo.core.client.mapreduce.lib.util.InputConfigurator;
 import org.apache.accumulo.core.client.mock.MockInstance;
 import org.apache.accumulo.core.client.security.tokens.AuthenticationToken;
+import org.apache.accumulo.core.client.security.tokens.PasswordToken;
 import org.apache.accumulo.core.data.ByteSequence;
 import org.apache.accumulo.core.data.Key;
 import org.apache.accumulo.core.data.KeyExtent;
@@ -76,8 +79,6 @@ import org.apache.hadoop.mapreduce.TaskAttemptContext;
 import org.apache.log4j.Level;
 import org.apache.log4j.Logger;
 
-import static org.apache.accumulo.core.client.security.tokens.AuthenticationToken.AuthenticationTokenSerializer.deserialize;
-
 /**
  * This abstract {@link InputFormat} class allows MapReduce jobs to use Accumulo as the source
of K,V pairs.
  * <p>
@@ -403,8 +404,8 @@ public abstract class InputFormatBase<K,V> extends InputFormat<K,V>
{
    * @since 1.5.0
    * @see #fetchColumns(Job, Collection)
    */
-  protected static Set<Pair<Text,Text>> getFetchedColumns(JobContext context)
{
-    return InputConfigurator.getFetchedColumns(CLASS, getConfiguration(context));
+  protected static Set<Pair<Text,Text>> getFetchedColumns(JobContext context,
String table) {
+    return InputConfigurator.getFetchedColumns(CLASS, getConfiguration(context), table);
   }
   
   /**
@@ -625,18 +626,20 @@ public abstract class InputFormatBase<K,V> extends InputFormat<K,V>
{
      * @param scanner
      *          the scanner to configure
      */
-    protected void setupIterators(TaskAttemptContext context, Scanner scanner) {
+    protected void setupIterators(TaskAttemptContext context, Scanner scanner, String tableName)
{
       List<IteratorSetting> iterators = getIterators(context);
       for (IteratorSetting iterator : iterators) {
         scanner.addScanIterator(iterator);
       }
     }
-    
+
+
     /**
      * Initialize a scanner over the given input split using this task attempt configuration.
      */
     @Override
     public void initialize(InputSplit inSplit, TaskAttemptContext attempt) throws IOException
{
+
       Scanner scanner;
       split = (RangeInputSplit) inSplit;
       log.debug("Initializing input split: " + split.range);
@@ -644,18 +647,19 @@ public abstract class InputFormatBase<K,V> extends InputFormat<K,V>
{
       String principal = getPrincipal(attempt);
       String tokenClass = getTokenClass(attempt);
       byte[] token = getToken(attempt);
-      Authorizations authorizations = getScanAuthorizations(attempt);
-      
+      Authorizations authorizations = getScanAuthorizations (attempt);
+
       try {
         log.debug("Creating connector with user: " + principal);
         Connector conn = instance.getConnector(principal, deserialize (tokenClass, token));
-        log.debug("Creating scanner for table: " + getDefaultInputTableName (attempt));
+        log.debug("Creating scanner for table: " + split.getTableName ());
         log.debug("Authorizations are: " + authorizations);
         if (isOfflineScan(attempt)) {
-          scanner = new OfflineScanner(instance, new Credentials(principal, deserialize (tokenClass,
token)), Tables.getTableId(
-              instance, getDefaultInputTableName (attempt)), authorizations);
+          // TODO: This used AuthInfo before- figure out a better equivalent
+          scanner = new OfflineScanner(instance, new Credentials(principal, deserialize (tokenClass,
token)), Tables.getTableId(instance,
+                  split.getTableName()), authorizations);
         } else {
-          scanner = conn.createScanner(getDefaultInputTableName (attempt), authorizations);
+          scanner = conn.createScanner(split.getTableName(), authorizations);
         }
         if (isIsolated(attempt)) {
           log.info("Creating isolated scanner");
@@ -665,13 +669,13 @@ public abstract class InputFormatBase<K,V> extends InputFormat<K,V>
{
           log.info("Using local iterators");
           scanner = new ClientSideIteratorScanner(scanner);
         }
-        setupIterators(attempt, scanner);
+        setupIterators(attempt, scanner, split.getTableName());
       } catch (Exception e) {
         throw new IOException(e);
       }
-      
+
       // setup a scanner within the bounds of this split
-      for (Pair<Text,Text> c : getFetchedColumns(attempt)) {
+      for (Pair<Text,Text> c : getFetchedColumns(attempt, split.getTableName())) {
         if (c.getSecond() != null) {
           log.debug("Fetching column " + c.getFirst() + ":" + c.getSecond());
           scanner.fetchColumn(c.getFirst(), c.getSecond());
@@ -680,13 +684,14 @@ public abstract class InputFormatBase<K,V> extends InputFormat<K,V>
{
           scanner.fetchColumnFamily(c.getFirst());
         }
       }
-      
-      scanner.setRange(split.range);
-      
+
+      scanner.setRange(split.getRange());
+
       numKeysRead = 0;
-      
+
       // do this last after setting all scanner options
       scannerIterator = scanner.iterator();
+
     }
     
     @Override
@@ -1016,11 +1021,13 @@ public abstract class InputFormatBase<K,V> extends InputFormat<K,V>
{
     public RangeInputSplit() {
       range = new Range();
       locations = new String[0];
+      tableName = "";
     }
     
     public RangeInputSplit(RangeInputSplit split) throws IOException {
       this.setRange(split.getRange());
       this.setLocations(split.getLocations());
+      this.setTableName(split.getTableName ());
     }
     
     protected RangeInputSplit(String table, Range range, String[] locations) {
@@ -1040,6 +1047,10 @@ public abstract class InputFormatBase<K,V> extends InputFormat<K,V>
{
     public String getTableName() {
       return this.tableName;
     }
+
+    public void setTableName(String tableName) {
+      this.tableName = tableName;
+    }
     
     private static byte[] extractBytes(ByteSequence seq, int numBytes) {
       byte[] bytes = new byte[numBytes + 1];
@@ -1115,6 +1126,7 @@ public abstract class InputFormatBase<K,V> extends InputFormat<K,V>
{
     @Override
     public void readFields(DataInput in) throws IOException {
       range.readFields(in);
+      tableName = in.readUTF ();
       int numLocs = in.readInt();
       locations = new String[numLocs];
       for (int i = 0; i < numLocs; ++i)
@@ -1124,6 +1136,7 @@ public abstract class InputFormatBase<K,V> extends InputFormat<K,V>
{
     @Override
     public void write(DataOutput out) throws IOException {
       range.write(out);
+      out.writeUTF (tableName);
       out.writeInt(locations.length);
       for (int i = 0; i < locations.length; ++i)
         out.writeUTF(locations[i]);

http://git-wip-us.apache.org/repos/asf/accumulo/blob/48c2d30b/core/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/util/InputConfigurator.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/util/InputConfigurator.java
b/core/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/util/InputConfigurator.java
index dc77bf8..17e9b4c 100644
--- a/core/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/util/InputConfigurator.java
+++ b/core/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/util/InputConfigurator.java
@@ -58,8 +58,16 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.util.StringUtils;
 
+import static java.lang.String.format;
+import static org.apache.accumulo.core.Constants.UTF8;
+import static org.apache.accumulo.core.Constants.ZTABLE_NAME;
+import static org.apache.accumulo.core.client.mapreduce.lib.util.InputConfigurator.ScanOpts.COLUMNS;
+import static org.apache.accumulo.core.client.mapreduce.lib.util.InputConfigurator.ScanOpts.ITERATORS;
 import static org.apache.accumulo.core.util.ArgumentChecker.notNull;
+import static org.apache.accumulo.core.util.TextUtil.getBytes;
+import static org.apache.commons.codec.binary.Base64.decodeBase64;
 import static org.apache.commons.codec.binary.Base64.encodeBase64;
+import static org.apache.hadoop.util.StringUtils.COMMA_STR;
 import static org.apache.hadoop.util.StringUtils.join;
 import static org.apache.hadoop.util.StringUtils.split;
 
@@ -218,7 +226,7 @@ public class InputConfigurator extends ConfiguratorBase {
    * @since 1.6.0
    */
   public static void setRanges(Class<?> implementingClass, Configuration conf, Map<String,
Collection<Range>> ranges) {
-    notNull(ranges);
+    notNull (ranges);
 
     ArrayList<String> rangeStrings = new ArrayList<String>(ranges.size());
     for (Map.Entry<String,Collection<Range>> pair : ranges.entrySet()) {
@@ -261,7 +269,7 @@ public class InputConfigurator extends ConfiguratorBase {
 
     // parse out the ranges and add them to table's bucket
     for (String rangeString : conf.getStringCollection(enumToConfKey(implementingClass, ScanOpts.RANGES)))
{
-      ByteArrayInputStream bais = new ByteArrayInputStream(Base64.decodeBase64(rangeString.getBytes()));
+      ByteArrayInputStream bais = new ByteArrayInputStream(decodeBase64 (rangeString.getBytes
()));
       TableRange range = new TableRange ();
       range.readFields(new DataInputStream(bais));
       ranges.get(range.tableName()).add(range.range());
@@ -271,7 +279,8 @@ public class InputConfigurator extends ConfiguratorBase {
   }
   
   /**
-   * Restricts the columns that will be mapped over for this job.
+   * Restricts the columns that will be mapped over for this job. This provides backwards
compatibility when single
+   * tables
    * 
    * @param implementingClass
    *          the class whose name will be used as a prefix for the property configuration
key
@@ -289,14 +298,55 @@ public class InputConfigurator extends ConfiguratorBase {
       if (column.getFirst() == null)
         throw new IllegalArgumentException("Column family can not be null");
       
-      String col = new String(encodeBase64 (TextUtil.getBytes (column.getFirst ())), Constants.UTF8);
+      String col = new String(encodeBase64 (getBytes (column.getFirst ())), UTF8);
       if (column.getSecond() != null)
-        col += ":" + new String(encodeBase64 (TextUtil.getBytes (column.getSecond ())), Constants.UTF8);
+        col += ":" + new String(encodeBase64 (getBytes (column.getSecond ())), UTF8);
       columnStrings.add(col);
     }
-    conf.setStrings(enumToConfKey(implementingClass, ScanOpts.COLUMNS), columnStrings.toArray(new
String[0]));
+
+    String[] tables = getInputTableNames (implementingClass, conf);
+    if(tables.length > 0)
+      conf.setStrings(format ("%s.%s", enumToConfKey (implementingClass, COLUMNS), tables[0]),
+              columnStrings.toArray (new String[0]));
+    else
+      throw new IllegalStateException ("Input tables must be set before setting fetched columns");
   }
-  
+
+  /**
+   * Restricts the columns that will be mapped over for this job.
+   *
+   * @param implementingClass
+   *          the class whose name will be used as a prefix for the property configuration
key
+   * @param conf
+   *          the Hadoop configuration object to configure
+   * @param columnFamilyColumnQualifierPairs
+   *          a map keyed by the table name corresponding to a pair of {@link Text} objects
corresponding to column
+   *          family and column qualifier. If the column qualifier is null, the entire column
family is
+   *          selected. An empty set is the default and is equivalent to scanning the all
columns.
+   * @since 1.5.0
+   */
+  public static void fetchColumns(Class<?> implementingClass, Configuration conf, Map<String,
Collection<Pair<Text,Text>>> columnFamilyColumnQualifierPairs) {
+    notNull (columnFamilyColumnQualifierPairs);
+    ArrayList<String> columnStrings = new ArrayList<String>(columnFamilyColumnQualifierPairs.size());
+    for (Map.Entry<String, Collection<Pair<Text,Text>>> tableColumns :
columnFamilyColumnQualifierPairs.entrySet()) {
+
+      String tableName = tableColumns.getKey();
+      for(Pair<Text,Text> column : tableColumns.getValue()) {
+
+        if (column.getFirst() == null)
+          throw new IllegalArgumentException("Column family can not be null");
+
+        String col = new String(encodeBase64 (getBytes (column.getFirst ())), UTF8);
+        if (column.getSecond() != null)
+          col += ":" + new String(encodeBase64 (getBytes (column.getSecond ())), UTF8);
+        columnStrings.add(col);
+      }
+      conf.setStrings (format ("%s.%s", enumToConfKey (implementingClass, COLUMNS), tableName),
+              columnStrings.toArray (new String[0]));
+    }
+  }
+
+
   /**
    * Gets the columns to be mapped over from this job.
    * 
@@ -308,19 +358,19 @@ public class InputConfigurator extends ConfiguratorBase {
    * @since 1.5.0
    * @see #fetchColumns(Class, Configuration, Collection)
    */
-  public static Set<Pair<Text,Text>> getFetchedColumns(Class<?> implementingClass,
Configuration conf) {
+  public static Set<Pair<Text,Text>> getFetchedColumns(Class<?> implementingClass,
Configuration conf, String table) {
     Set<Pair<Text,Text>> columns = new HashSet<Pair<Text,Text>>();
-    for (String col : conf.getStringCollection(enumToConfKey(implementingClass, ScanOpts.COLUMNS)))
{
+    for (String col : conf.getStringCollection(format ("%s.%s", enumToConfKey (implementingClass,
COLUMNS), table))) {
       int idx = col.indexOf(":");
-      Text cf = new Text(idx < 0 ? Base64.decodeBase64(col.getBytes(Constants.UTF8)) :
Base64.decodeBase64(col.substring(0, idx).getBytes(Constants.UTF8)));
-      Text cq = idx < 0 ? null : new Text(Base64.decodeBase64(col.substring(idx + 1).getBytes()));
+      Text cf = new Text(idx < 0 ? decodeBase64 (col.getBytes (UTF8)) : decodeBase64 (col.substring
(0, idx).getBytes (UTF8)));
+      Text cq = idx < 0 ? null : new Text(decodeBase64 (col.substring (idx + 1).getBytes
()));
       columns.add(new Pair<Text,Text>(cf, cq));
     }
     return columns;
   }
   
   /**
-   * Encode an iterator on the input for this job.
+   * Encode an iterator on the default input table for this job.
    * 
    * @param implementingClass
    *          the class whose name will be used as a prefix for the property configuration
key
@@ -329,30 +379,67 @@ public class InputConfigurator extends ConfiguratorBase {
    * @param cfg
    *          the configuration of the iterator
    * @since 1.5.0
+   * @deprecated since 1.6.0 in favor of addIterators();
    */
   public static void addIterator(Class<?> implementingClass, Configuration conf, IteratorSetting
cfg) {
     ByteArrayOutputStream baos = new ByteArrayOutputStream();
     String newIter;
     try {
       cfg.write(new DataOutputStream(baos));
-      newIter = new String(encodeBase64 (baos.toByteArray ()), Constants.UTF8);
+      newIter = new String(encodeBase64 (baos.toByteArray ()), UTF8);
       baos.close();
     } catch (IOException e) {
       throw new IllegalArgumentException("unable to serialize IteratorSetting");
     }
     
-    String iterators = conf.get(enumToConfKey(implementingClass, ScanOpts.ITERATORS));
+    String iterators = conf.get(enumToConfKey(implementingClass, ITERATORS));
     // No iterators specified yet, create a new string
     if (iterators == null || iterators.isEmpty()) {
       iterators = newIter;
     } else {
       // append the next iterator & reset
-      iterators = iterators.concat(StringUtils.COMMA_STR + newIter);
+      iterators = iterators.concat(COMMA_STR + newIter);
     }
     // Store the iterators w/ the job
-    conf.set(enumToConfKey(implementingClass, ScanOpts.ITERATORS), iterators);
+    conf.set (enumToConfKey (implementingClass, ITERATORS), iterators);
   }
-  
+
+  /**
+   * Encode an iterator on the input for this job.
+   *
+   * @param implementingClass
+   *          the class whose name will be used as a prefix for the property configuration
key
+   * @param conf
+   *          the Hadoop configuration object to configure
+   * @param cfg
+   *          the configuration of the iterator
+   * @since 1.5.0
+   * @deprecated since 1.6.0 in favor of addIterators();
+   */
+  public static void addIterator(Class<?> implementingClass, Configuration conf, String
tableName, IteratorSetting cfg) {
+    ByteArrayOutputStream baos = new ByteArrayOutputStream();
+    String newIter;
+    try {
+      cfg.write(new DataOutputStream(baos));
+      newIter = new String(encodeBase64 (baos.toByteArray ()), UTF8);
+      baos.close();
+    } catch (IOException e) {
+      throw new IllegalArgumentException("unable to serialize IteratorSetting");
+    }
+
+    String iterators = conf.get(enumToConfKey(implementingClass, ITERATORS));
+    // No iterators specified yet, create a new string
+    if (iterators == null || iterators.isEmpty()) {
+      iterators = newIter;
+    } else {
+      // append the next iterator & reset
+      iterators = iterators.concat(COMMA_STR + newIter);
+    }
+    // Store the iterators w/ the job
+    conf.set(enumToConfKey(implementingClass, ITERATORS), iterators);
+  }
+
+
   /**
    * Gets a list of the iterator settings (for iterators to apply to a scanner) from this
configuration.
    * 
@@ -365,19 +452,19 @@ public class InputConfigurator extends ConfiguratorBase {
    * @see #addIterator(Class, Configuration, IteratorSetting)
    */
   public static List<IteratorSetting> getIterators(Class<?> implementingClass,
Configuration conf) {
-    String iterators = conf.get(enumToConfKey(implementingClass, ScanOpts.ITERATORS));
+    String iterators = conf.get(enumToConfKey(implementingClass, ITERATORS));
     
     // If no iterators are present, return an empty list
     if (iterators == null || iterators.isEmpty())
       return new ArrayList<IteratorSetting>();
     
     // Compose the set of iterators encoded in the job configuration
-    StringTokenizer tokens = new StringTokenizer(iterators, StringUtils.COMMA_STR);
+    StringTokenizer tokens = new StringTokenizer(iterators, COMMA_STR);
     List<IteratorSetting> list = new ArrayList<IteratorSetting>();
     try {
       while (tokens.hasMoreTokens()) {
         String itstring = tokens.nextToken();
-        ByteArrayInputStream bais = new ByteArrayInputStream(Base64.decodeBase64(itstring.getBytes()));
+        ByteArrayInputStream bais = new ByteArrayInputStream(decodeBase64 (itstring.getBytes
()));
         list.add(new IteratorSetting(new DataInputStream(bais)));
         bais.close();
       }

http://git-wip-us.apache.org/repos/asf/accumulo/blob/48c2d30b/core/src/test/java/org/apache/accumulo/core/client/mapreduce/AccumuloRowInputFormatTest.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/accumulo/core/client/mapreduce/AccumuloRowInputFormatTest.java
b/core/src/test/java/org/apache/accumulo/core/client/mapreduce/AccumuloRowInputFormatTest.java
index c581233..2c5f801 100644
--- a/core/src/test/java/org/apache/accumulo/core/client/mapreduce/AccumuloRowInputFormatTest.java
+++ b/core/src/test/java/org/apache/accumulo/core/client/mapreduce/AccumuloRowInputFormatTest.java
@@ -165,7 +165,11 @@ public class AccumuloRowInputFormatTest {
       job.setMapOutputKeyClass(Key.class);
       job.setMapOutputValueClass(Value.class);
       job.setOutputFormatClass(NullOutputFormat.class);
-      
+
+      for(Entry<String, String> entry : job.getConfiguration ()) {
+        System.out.println(entry);
+      }
+
       job.setNumReduceTasks(0);
       
       job.waitForCompletion(true);

http://git-wip-us.apache.org/repos/asf/accumulo/blob/48c2d30b/core/src/test/java/org/apache/accumulo/core/client/mapreduce/MultiTableInputFormatTest.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/accumulo/core/client/mapreduce/MultiTableInputFormatTest.java
b/core/src/test/java/org/apache/accumulo/core/client/mapreduce/MultiTableInputFormatTest.java
index 777d362..b03db9b 100644
--- a/core/src/test/java/org/apache/accumulo/core/client/mapreduce/MultiTableInputFormatTest.java
+++ b/core/src/test/java/org/apache/accumulo/core/client/mapreduce/MultiTableInputFormatTest.java
@@ -1,8 +1,10 @@
 package org.apache.accumulo.core.client.mapreduce;
 
+import static org.apache.accumulo.core.client.mapreduce.InputFormatBase.getRanges;
 import static org.apache.accumulo.core.client.mapreduce.InputFormatBase.setConnectorInfo;
 import static org.apache.accumulo.core.client.mapreduce.InputFormatBase.setInputTableNames;
 import static org.apache.accumulo.core.client.mapreduce.InputFormatBase.setMockInstance;
+import static org.apache.accumulo.core.client.mapreduce.InputFormatBase.setRanges;
 import static org.apache.accumulo.core.client.mapreduce.InputFormatBase.setScanAuthorizations;
 import static org.apache.accumulo.core.client.mapreduce.multi.ContextFactory.createMapContext;
 import static org.apache.accumulo.core.client.mapreduce.multi.ContextFactory.createTaskAttemptContext;
@@ -93,7 +95,7 @@ public class MultiTableInputFormatTest {
     job.setInputFormatClass (AccumuloInputFormat.class);
     job.setMapperClass (TestMapper.class);
     job.setNumReduceTasks (0);
-    setConnectorInfo (job, "root", new PasswordToken ("".getBytes ()));
+    setConnectorInfo (job, "root", new PasswordToken (new byte[0]));
     setInputTableNames (job, tablesList);
     setScanAuthorizations (job, new Authorizations ());
     setMockInstance (job, "testmapinstance");
@@ -123,7 +125,7 @@ public class MultiTableInputFormatTest {
     job.setInputFormatClass(AccumuloInputFormat.class);
     job.setMapperClass (TestMapper.class);
     job.setNumReduceTasks (0);
-    setConnectorInfo (job, "root", new PasswordToken ("".getBytes ()));
+    setConnectorInfo (job, "root", new PasswordToken (new byte[0]));
     setInputTableNames (job, tables);
     setScanAuthorizations (job, new Authorizations ());
     setMockInstance (job, "testmapinstance");
@@ -137,8 +139,8 @@ public class MultiTableInputFormatTest {
       tblRanges.put(tbl, ranges);
     }
     
-    AccumuloInputFormat.setRanges (job, tblRanges);
-    Map<String, List<Range>> configuredRanges = AccumuloInputFormat.getRanges
(job);
+    setRanges (job, tblRanges);
+    Map<String, List<Range>> configuredRanges = getRanges (job);
     
     for(Entry<String, List<Range>> cfgRange : configuredRanges.entrySet()) {
       String tbl = cfgRange.getKey();

http://git-wip-us.apache.org/repos/asf/accumulo/blob/48c2d30b/core/src/test/java/org/apache/accumulo/core/client/mapreduce/TokenFileTest.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/accumulo/core/client/mapreduce/TokenFileTest.java
b/core/src/test/java/org/apache/accumulo/core/client/mapreduce/TokenFileTest.java
index f0277fc..303b12e 100644
--- a/core/src/test/java/org/apache/accumulo/core/client/mapreduce/TokenFileTest.java
+++ b/core/src/test/java/org/apache/accumulo/core/client/mapreduce/TokenFileTest.java
@@ -16,6 +16,7 @@
  */
 package org.apache.accumulo.core.client.mapreduce;
 
+import static java.util.Collections.singleton;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertNull;
@@ -24,6 +25,7 @@ import static org.junit.Assert.assertTrue;
 import java.io.File;
 import java.io.IOException;
 import java.io.PrintStream;
+import java.util.Collections;
 import java.util.Iterator;
 import java.util.Map.Entry;
 
@@ -102,7 +104,7 @@ public class TokenFileTest {
       job.setInputFormatClass(AccumuloInputFormat.class);
       
       AccumuloInputFormat.setConnectorInfo(job, user, tokenFile);
-      AccumuloInputFormat.setInputTableName(job, table1);
+      AccumuloInputFormat.setInputTableNames (job, singleton (table1));
       AccumuloInputFormat.setMockInstance(job, INSTANCE_NAME);
       
       job.setMapperClass(TestMapper.class);
@@ -116,7 +118,9 @@ public class TokenFileTest {
       AccumuloOutputFormat.setCreateTables(job, false);
       AccumuloOutputFormat.setDefaultTableName(job, table2);
       AccumuloOutputFormat.setMockInstance(job, INSTANCE_NAME);
-      
+
+      System.out.println(job.getConfiguration ());
+
       job.setNumReduceTasks(0);
       
       job.waitForCompletion(true);
@@ -125,14 +129,14 @@ public class TokenFileTest {
     }
     
     public static void main(String[] args) throws Exception {
-      assertEquals(0, ToolRunner.run(CachedConfiguration.getInstance(), new MRTokenFileTester(),
args));
+        assertEquals(0, ToolRunner.run(CachedConfiguration.getInstance(), new MRTokenFileTester(),
args));
     }
   }
   
   @Test
   public void testMR() throws Exception {
     MockInstance mockInstance = new MockInstance(INSTANCE_NAME);
-    Connector c = mockInstance.getConnector("root", new PasswordToken(""));
+    Connector c = mockInstance.getConnector("root", new PasswordToken(new byte[0]));
     c.tableOperations().create(TEST_TABLE_1);
     c.tableOperations().create(TEST_TABLE_2);
     BatchWriter bw = c.createBatchWriter(TEST_TABLE_1, new BatchWriterConfig());

http://git-wip-us.apache.org/repos/asf/accumulo/blob/48c2d30b/core/src/test/resources/log4j.properties
----------------------------------------------------------------------
diff --git a/core/src/test/resources/log4j.properties b/core/src/test/resources/log4j.properties
index cf5c2f0..b1c083a 100644
--- a/core/src/test/resources/log4j.properties
+++ b/core/src/test/resources/log4j.properties
@@ -25,3 +25,4 @@ log4j.logger.org.apache.commons.vfs2.impl.DefaultFileSystemManager=WARN
 log4j.logger.org.apache.hadoop.mapred=ERROR
 log4j.logger.org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter=ERROR
 log4j.logger.org.apache.hadoop.util.ProcessTree=ERROR
+


Mime
View raw message