accumulo-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From vi...@apache.org
Subject svn commit: r1433166 [5/20] - in /accumulo/branches/ACCUMULO-259: ./ assemble/ assemble/platform/ assemble/scripts/ assemble/scripts/init.d/ bin/ conf/examples/1GB/native-standalone/ conf/examples/1GB/standalone/ conf/examples/2GB/native-standalone/ co...
Date Mon, 14 Jan 2013 22:03:34 GMT
Modified: accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/file/rfile/RFile.java
URL: http://svn.apache.org/viewvc/accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/file/rfile/RFile.java?rev=1433166&r1=1433165&r2=1433166&view=diff
==============================================================================
--- accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/file/rfile/RFile.java (original)
+++ accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/file/rfile/RFile.java Mon Jan 14 22:03:24 2013
@@ -22,7 +22,6 @@ import java.io.DataOutput;
 import java.io.DataOutputStream;
 import java.io.IOException;
 import java.io.PrintStream;
-import java.nio.charset.Charset;
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.Collections;
@@ -33,7 +32,6 @@ import java.util.LinkedList;
 import java.util.List;
 import java.util.Map;
 import java.util.Map.Entry;
-import java.util.Random;
 import java.util.Set;
 import java.util.TreeMap;
 import java.util.concurrent.atomic.AtomicBoolean;
@@ -52,21 +50,16 @@ import org.apache.accumulo.core.file.blo
 import org.apache.accumulo.core.file.blockfile.ABlockWriter;
 import org.apache.accumulo.core.file.blockfile.BlockFileReader;
 import org.apache.accumulo.core.file.blockfile.BlockFileWriter;
-import org.apache.accumulo.core.file.blockfile.impl.CachableBlockFile;
 import org.apache.accumulo.core.file.rfile.BlockIndex.BlockIndexEntry;
 import org.apache.accumulo.core.file.rfile.MultiLevelIndex.IndexEntry;
 import org.apache.accumulo.core.file.rfile.MultiLevelIndex.Reader.IndexIterator;
 import org.apache.accumulo.core.file.rfile.RelativeKey.MByteSequence;
+import org.apache.accumulo.core.file.rfile.RelativeKey.SkippR;
 import org.apache.accumulo.core.file.rfile.bcfile.MetaBlockDoesNotExist;
 import org.apache.accumulo.core.iterators.IterationInterruptedException;
 import org.apache.accumulo.core.iterators.IteratorEnvironment;
 import org.apache.accumulo.core.iterators.SortedKeyValueIterator;
 import org.apache.accumulo.core.iterators.system.HeapIterator;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FSDataInputStream;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.Writable;
 import org.apache.log4j.Logger;
 
@@ -79,7 +72,9 @@ public class RFile {
   private RFile() {}
   
   private static final int RINDEX_MAGIC = 0x20637474;
+  static final int RINDEX_VER_7 = 7;
   static final int RINDEX_VER_6 = 6;
+  // static final int RINDEX_VER_5 = 5; // unreleased
   static final int RINDEX_VER_4 = 4;
   static final int RINDEX_VER_3 = 3;
   
@@ -328,6 +323,7 @@ public class RFile {
       previousColumnFamilies = new HashSet<ByteSequence>();
     }
     
+    @Override
     public synchronized void close() throws IOException {
       
       if (closed) {
@@ -339,7 +335,7 @@ public class RFile {
       ABlockWriter mba = fileWriter.prepareMetaBlock("RFile.index");
       
       mba.writeInt(RINDEX_MAGIC);
-      mba.writeInt(RINDEX_VER_6);
+      mba.writeInt(RINDEX_VER_7);
       
       if (currentLocalityGroup != null)
         localityGroups.add(currentLocalityGroup);
@@ -370,6 +366,7 @@ public class RFile {
       }
     }
     
+    @Override
     public void append(Key key, Value value) throws IOException {
       
       if (dataClosed) {
@@ -686,14 +683,12 @@ public class RFile {
           // and speed up others.
 
           MByteSequence valbs = new MByteSequence(new byte[64], 0, 0);
-          RelativeKey tmpRk = new RelativeKey();
-          Key pKey = new Key(getTopKey());
-          int fastSkipped = tmpRk.fastSkip(currBlock, startKey, valbs, pKey, getTopKey());
-          if (fastSkipped > 0) {
-            entriesLeft -= fastSkipped;
+          SkippR skippr = RelativeKey.fastSkip(currBlock, startKey, valbs, prevKey, getTopKey());
+          if (skippr.skipped > 0) {
+            entriesLeft -= skippr.skipped;
             val = new Value(valbs.toArray());
-            prevKey = pKey;
-            rk = tmpRk;
+            prevKey = skippr.prevKey;
+            rk = skippr.rk;
           }
           
           reseek = false;
@@ -706,8 +701,6 @@ public class RFile {
         }
       }
       
-      int fastSkipped = -1;
-      
       if (reseek) {
         iiter = index.lookup(startKey);
         
@@ -736,7 +729,6 @@ public class RFile {
           if (!checkRange)
             hasTop = true;
 
-          RelativeKey tmpRk = new RelativeKey();
           MByteSequence valbs = new MByteSequence(new byte[64], 0, 0);
 
           Key currKey = null;
@@ -748,7 +740,8 @@ public class RFile {
               if (bie != null) {
                 // we are seeked to the current position of the key in the index
                 // need to prime the read process and read this key from the block
-                tmpRk.setPrevKey(bie.getKey());
+                RelativeKey tmpRk = new RelativeKey();
+                tmpRk.setPrevKey(bie.getPrevKey());
                 tmpRk.readFields(currBlock);
                 val = new Value();
 
@@ -757,18 +750,19 @@ public class RFile {
                 
                 // just consumed one key from the input stream, so subtract one from entries left
                 entriesLeft = bie.getEntriesLeft() - 1;
-                prevKey = new Key(bie.getKey());
-                currKey = bie.getKey();
+                prevKey = new Key(bie.getPrevKey());
+                currKey = tmpRk.getKey();
               }
             }
           }
 
-          fastSkipped = tmpRk.fastSkip(currBlock, startKey, valbs, prevKey, currKey);
-          entriesLeft -= fastSkipped;
+          SkippR skippr = RelativeKey.fastSkip(currBlock, startKey, valbs, prevKey, currKey);
+          prevKey = skippr.prevKey;
+          entriesLeft -= skippr.skipped;
           val = new Value(valbs.toArray());
           // set rk when everything above is successful, if exception
           // occurs rk will not be set
-          rk = tmpRk;
+          rk = skippr.rk;
         }
       }
       
@@ -843,7 +837,7 @@ public class RFile {
       
       if (magic != RINDEX_MAGIC)
         throw new IOException("Did not see expected magic number, saw " + magic);
-      if (ver != RINDEX_VER_6 && ver != RINDEX_VER_4 && ver != RINDEX_VER_3)
+      if (ver != RINDEX_VER_7 && ver != RINDEX_VER_6 && ver != RINDEX_VER_4 && ver != RINDEX_VER_3)
         throw new IOException("Did not see expected version, saw " + ver);
       
       int size = mb.readInt();
@@ -1092,103 +1086,4 @@ public class RFile {
       }
     }
   }
-  
-  public static void main(String[] args) throws Exception {
-    Configuration conf = new Configuration();
-    FileSystem fs = FileSystem.get(conf);
-    
-    int max_row = 10000;
-    int max_cf = 10;
-    int max_cq = 10;
-
-    Charset utf8 = Charset.forName("UTF8");
-    
-    // FSDataOutputStream fsout = fs.create(new Path("/tmp/test.rf"));
-    
-    // RFile.Writer w = new RFile.Writer(fsout, 1000, "gz", conf);
-    CachableBlockFile.Writer _cbw = new CachableBlockFile.Writer(fs, new Path("/tmp/test.rf"), "gz", conf);
-    RFile.Writer w = new RFile.Writer(_cbw, 100000);
-    
-    w.startDefaultLocalityGroup();
-    
-    int c = 0;
-    
-    for (int i = 0; i < max_row; i++) {
-      Text row = new Text(String.format("R%06d", i));
-      for (int j = 0; j < max_cf; j++) {
-        Text cf = new Text(String.format("CF%06d", j));
-        for (int k = 0; k < max_cq; k++) {
-          Text cq = new Text(String.format("CQ%06d", k));
-          w.append(new Key(row, cf, cq), new Value((c++ + "").getBytes(utf8)));
-        }
-      }
-    }
-    
-    w.close();
-    // fsout.close();
-    
-    // Logger.getLogger("accumulo.core.file.rfile").setLevel(Level.DEBUG);
-    long t1 = System.currentTimeMillis();
-    FSDataInputStream fsin = fs.open(new Path("/tmp/test.rf"));
-    long t2 = System.currentTimeMillis();
-    CachableBlockFile.Reader _cbr = new CachableBlockFile.Reader(fs, new Path("/tmp/test.rf"), conf, null, null);
-    RFile.Reader r = new RFile.Reader(_cbr);
-    long t3 = System.currentTimeMillis();
-    
-    System.out.println("Open time " + (t2 - t1) + " " + (t3 - t2));
-    
-    SortedKeyValueIterator<Key,Value> rd = r.deepCopy(null);
-    SortedKeyValueIterator<Key,Value> rd2 = r.deepCopy(null);
-    
-    Random rand = new Random(10);
-    
-    seekRandomly(100, max_row, max_cf, max_cq, r, rand);
-    
-    rand = new Random(10);
-    seekRandomly(100, max_row, max_cf, max_cq, rd, rand);
-    
-    rand = new Random(10);
-    seekRandomly(100, max_row, max_cf, max_cq, rd2, rand);
-    
-    r.closeDeepCopies();
-    
-    seekRandomly(100, max_row, max_cf, max_cq, r, rand);
-    
-    rd = r.deepCopy(null);
-    
-    seekRandomly(100, max_row, max_cf, max_cq, rd, rand);
-    
-    r.close();
-    fsin.close();
-    
-    seekRandomly(100, max_row, max_cf, max_cq, r, rand);
-  }
-  
-  private static void seekRandomly(int num, int max_row, int max_cf, int max_cq, SortedKeyValueIterator<Key,Value> rd, Random rand) throws Exception {
-    long t1 = System.currentTimeMillis();
-    
-    for (int i = 0; i < num; i++) {
-      Text row = new Text(String.format("R%06d", rand.nextInt(max_row)));
-      Text cf = new Text(String.format("CF%06d", rand.nextInt(max_cf)));
-      Text cq = new Text(String.format("CQ%06d", rand.nextInt(max_cq)));
-      
-      Key sk = new Key(row, cf, cq);
-      rd.seek(new Range(sk, null), new ArrayList<ByteSequence>(), false);
-      if (!rd.hasTop() || !rd.getTopKey().equals(sk)) {
-        System.err.println(sk + " != " + rd.getTopKey());
-      }
-      
-    }
-    
-    long t2 = System.currentTimeMillis();
-    
-    double delta = ((t2 - t1) / 1000.0);
-    System.out.println("" + delta + " " + num / delta);
-    
-    System.gc();
-    System.gc();
-    System.gc();
-    
-    Thread.sleep(3000);
-  }
 }

Modified: accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/file/rfile/RFileOperations.java
URL: http://svn.apache.org/viewvc/accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/file/rfile/RFileOperations.java?rev=1433166&r1=1433165&r2=1433166&view=diff
==============================================================================
--- accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/file/rfile/RFileOperations.java (original)
+++ accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/file/rfile/RFileOperations.java Mon Jan 14 22:03:24 2013
@@ -104,23 +104,22 @@ public class RFileOperations extends Fil
   @Override
   public FileSKVWriter openWriter(String file, FileSystem fs, Configuration conf, AccumuloConfiguration acuconf) throws IOException {
     int hrep = conf.getInt("dfs.replication", -1);
-    int trep = conf.getInt(Property.TABLE_FILE_REPLICATION.getKey(), acuconf.getCount(Property.TABLE_FILE_REPLICATION));
+    int trep = acuconf.getCount(Property.TABLE_FILE_REPLICATION);
     int rep = hrep;
     if (trep > 0 && trep != hrep) {
       rep = trep;
     }
     long hblock = conf.getLong("dfs.block.size", 1 << 26);
-    long tblock = conf.getLong(Property.TABLE_FILE_BLOCK_SIZE.getKey(), acuconf.getMemoryInBytes(Property.TABLE_FILE_BLOCK_SIZE));
+    long tblock = acuconf.getMemoryInBytes(Property.TABLE_FILE_BLOCK_SIZE);
     long block = hblock;
     if (tblock > 0)
       block = tblock;
     int bufferSize = conf.getInt("io.file.buffer.size", 4096);
     
-    long blockSize = conf.getLong(Property.TABLE_FILE_COMPRESSED_BLOCK_SIZE.getKey(), acuconf.getMemoryInBytes(Property.TABLE_FILE_COMPRESSED_BLOCK_SIZE));
-    long indexBlockSize = conf.getLong(Property.TABLE_FILE_COMPRESSED_BLOCK_SIZE_INDEX.getKey(),
-        acuconf.getMemoryInBytes(Property.TABLE_FILE_COMPRESSED_BLOCK_SIZE_INDEX));
+    long blockSize = acuconf.getMemoryInBytes(Property.TABLE_FILE_COMPRESSED_BLOCK_SIZE);
+    long indexBlockSize = acuconf.getMemoryInBytes(Property.TABLE_FILE_COMPRESSED_BLOCK_SIZE_INDEX);
     
-    String compression = conf.get(Property.TABLE_FILE_COMPRESSION_TYPE.getKey(), acuconf.get(Property.TABLE_FILE_COMPRESSION_TYPE));
+    String compression = acuconf.get(Property.TABLE_FILE_COMPRESSION_TYPE);
     
     CachableBlockFile.Writer _cbw = new CachableBlockFile.Writer(fs.create(new Path(file), false, bufferSize, (short) rep, block), compression, conf);
     Writer writer = new RFile.Writer(_cbw, (int) blockSize, (int) indexBlockSize);

Modified: accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/file/rfile/RelativeKey.java
URL: http://svn.apache.org/viewvc/accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/file/rfile/RelativeKey.java?rev=1433166&r1=1433165&r2=1433166&view=diff
==============================================================================
--- accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/file/rfile/RelativeKey.java (original)
+++ accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/file/rfile/RelativeKey.java Mon Jan 14 22:03:24 2013
@@ -16,94 +16,115 @@
  */
 package org.apache.accumulo.core.file.rfile;
 
-import java.io.ByteArrayOutputStream;
 import java.io.DataInput;
 import java.io.DataOutput;
-import java.io.DataOutputStream;
 import java.io.IOException;
-import java.util.HashMap;
-import java.util.Map.Entry;
-import java.util.Set;
-import java.util.zip.GZIPOutputStream;
 
 import org.apache.accumulo.core.data.ArrayByteSequence;
 import org.apache.accumulo.core.data.ByteSequence;
 import org.apache.accumulo.core.data.Key;
-import org.apache.hadoop.io.Text;
-import org.apache.hadoop.io.WritableComparable;
+import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.WritableUtils;
 
-public class RelativeKey implements WritableComparable<RelativeKey> {
+public class RelativeKey implements Writable {
   
-  private Key key;
-  
-  private byte fieldsSame;
+  private static final byte BIT = 0x01;
   
+  private Key key;
   private Key prevKey;
   
-  private static final byte ROW_SAME = 0x01;
-  private static final byte CF_SAME = 0x02;
-  private static final byte CQ_SAME = 0x04;
-  private static final byte CV_SAME = 0x08;
-  private static final byte TS_SAME = 0x10;
-  private static final byte DELETED = 0x20;
-  
-  private static HashMap<Text,Integer> colFams = new HashMap<Text,Integer>();
-  
-  private static long bytesWritten = 0;
+  private byte fieldsSame;
+  private byte fieldsPrefixed;
   
-  public static void printStats() throws Exception {
-    System.out.println("colFams.size() : " + colFams.size());
-    Set<Entry<Text,Integer>> es = colFams.entrySet();
-    
-    int sum = 0;
-    
-    for (Entry<Text,Integer> entry : es) {
-      sum += entry.getKey().getLength();
-    }
-    
-    System.out.println("Total Column name bytes : " + sum);
-    
-    ByteArrayOutputStream baos = new ByteArrayOutputStream();
-    DataOutputStream dos = new DataOutputStream(new GZIPOutputStream(baos));
-    for (Entry<Text,Integer> entry : es) {
-      entry.getKey().write(dos);
-      dos.writeInt(entry.getValue());
-    }
-    
-    dos.close();
-    
-    System.out.println("Compressed column map size : " + baos.toByteArray().length);
-    System.out.printf("Bytes written : %,d%n", bytesWritten);
-    
-  }
+  // Exact match compression options (first byte) and flag for further
+  private static final byte ROW_SAME = BIT << 0;
+  private static final byte CF_SAME = BIT << 1;
+  private static final byte CQ_SAME = BIT << 2;
+  private static final byte CV_SAME = BIT << 3;
+  private static final byte TS_SAME = BIT << 4;
+  private static final byte DELETED = BIT << 5;
+  // private static final byte UNUSED_1_6 = BIT << 6;
+  private static final byte PREFIX_COMPRESSION_ENABLED = (byte) (BIT << 7);
+  
+  // Prefix compression (second byte)
+  private static final byte ROW_COMMON_PREFIX = BIT << 0;
+  private static final byte CF_COMMON_PREFIX = BIT << 1;
+  private static final byte CQ_COMMON_PREFIX = BIT << 2;
+  private static final byte CV_COMMON_PREFIX = BIT << 3;
+  private static final byte TS_DIFF = BIT << 4;
+  
+  // private static final byte UNUSED_2_5 = BIT << 5;
+  // private static final byte UNUSED_2_6 = BIT << 6;
+  // private static final byte UNUSED_2_7 = (byte) (BIT << 7);
+  
+  // Values for prefix compression
+  int rowCommonPrefixLen;
+  int cfCommonPrefixLen;
+  int cqCommonPrefixLen;
+  int cvCommonPrefixLen;
+  long tsDiff;
   
+  /**
+   * This constructor is used when one needs to read from an input stream
+   */
   public RelativeKey() {
     
   }
   
+  /**
+   * This constructor is used when constructing a key for writing to an output stream
+   */
   public RelativeKey(Key prevKey, Key key) {
     
     this.key = key;
     
     fieldsSame = 0;
+    fieldsPrefixed = 0;
+    
+    ByteSequence prevKeyScratch;
+    ByteSequence keyScratch;
     
     if (prevKey != null) {
-      if (prevKey.getRowData().equals(key.getRowData()))
+      
+      prevKeyScratch = prevKey.getRowData();
+      keyScratch = key.getRowData();
+      rowCommonPrefixLen = getCommonPrefix(prevKeyScratch, keyScratch);
+      if (rowCommonPrefixLen == -1)
         fieldsSame |= ROW_SAME;
+      else if (rowCommonPrefixLen > 1)
+        fieldsPrefixed |= ROW_COMMON_PREFIX;
       
-      if (prevKey.getColumnFamilyData().equals(key.getColumnFamilyData()))
+      prevKeyScratch = prevKey.getColumnFamilyData();
+      keyScratch = key.getColumnFamilyData();
+      cfCommonPrefixLen = getCommonPrefix(prevKeyScratch, keyScratch);
+      if (cfCommonPrefixLen == -1)
         fieldsSame |= CF_SAME;
+      else if (cfCommonPrefixLen > 1)
+        fieldsPrefixed |= CF_COMMON_PREFIX;
       
-      if (prevKey.getColumnQualifierData().equals(key.getColumnQualifierData()))
+      prevKeyScratch = prevKey.getColumnQualifierData();
+      keyScratch = key.getColumnQualifierData();
+      cqCommonPrefixLen = getCommonPrefix(prevKeyScratch, keyScratch);
+      if (cqCommonPrefixLen == -1)
         fieldsSame |= CQ_SAME;
+      else if (cqCommonPrefixLen > 1)
+        fieldsPrefixed |= CQ_COMMON_PREFIX;
       
-      if (prevKey.getColumnVisibilityData().equals(key.getColumnVisibilityData()))
+      prevKeyScratch = prevKey.getColumnVisibilityData();
+      keyScratch = key.getColumnVisibilityData();
+      cvCommonPrefixLen = getCommonPrefix(prevKeyScratch, keyScratch);
+      if (cvCommonPrefixLen == -1)
         fieldsSame |= CV_SAME;
+      else if (cvCommonPrefixLen > 1)
+        fieldsPrefixed |= CV_COMMON_PREFIX;
       
-      if (prevKey.getTimestamp() == key.getTimestamp())
+      tsDiff = key.getTimestamp() - prevKey.getTimestamp();
+      if (tsDiff == 0)
         fieldsSame |= TS_SAME;
+      else
+        fieldsPrefixed |= TS_DIFF;
       
+      fieldsSame |= fieldsPrefixed == 0 ? 0 : PREFIX_COMPRESSION_ENABLED;
     }
     
     // stored deleted information in bit vector instead of its own byte
@@ -111,6 +132,31 @@ public class RelativeKey implements Writ
       fieldsSame |= DELETED;
   }
   
+  /**
+   * 
+   * @return -1 (exact match) or the number of bytes in common
+   */
+  static int getCommonPrefix(ByteSequence prev, ByteSequence cur) {
+    if (prev == cur)
+      return -1; // infinite... exact match
+      
+    int prevLen = prev.length();
+    int curLen = cur.length();
+    int maxChecks = Math.min(prevLen, curLen);
+    int common = 0;
+    while (common < maxChecks) {
+      int a = prev.byteAt(common) & 0xff;
+      int b = cur.byteAt(common) & 0xff;
+      if (a != b)
+        return common;
+      common++;
+    }
+    // no differences found
+    // either exact or matches the part checked, so if they are the same length, they are an exact match,
+    // and if not, then they have a common prefix over all the checks we've done
+    return prevLen == curLen ? -1 : maxChecks;
+  }
+  
   public void setPrevKey(Key pk) {
     this.prevKey = pk;
   }
@@ -118,42 +164,56 @@ public class RelativeKey implements Writ
   @Override
   public void readFields(DataInput in) throws IOException {
     fieldsSame = in.readByte();
+    if ((fieldsSame & PREFIX_COMPRESSION_ENABLED) == PREFIX_COMPRESSION_ENABLED) {
+      fieldsPrefixed = in.readByte();
+    } else {
+      fieldsPrefixed = 0;
+    }
     
     byte[] row, cf, cq, cv;
     long ts;
     
-    if ((fieldsSame & ROW_SAME) == 0) {
-      row = read(in);
-    } else {
+    if ((fieldsSame & ROW_SAME) == ROW_SAME) {
       row = prevKey.getRowData().toArray();
+    } else if ((fieldsPrefixed & ROW_COMMON_PREFIX) == ROW_COMMON_PREFIX) {
+      row = readPrefix(in, prevKey.getRowData());
+    } else {
+      row = read(in);
     }
     
-    if ((fieldsSame & CF_SAME) == 0) {
-      cf = read(in);
-    } else {
+    if ((fieldsSame & CF_SAME) == CF_SAME) {
       cf = prevKey.getColumnFamilyData().toArray();
+    } else if ((fieldsPrefixed & CF_COMMON_PREFIX) == CF_COMMON_PREFIX) {
+      cf = readPrefix(in, prevKey.getColumnFamilyData());
+    } else {
+      cf = read(in);
     }
     
-    if ((fieldsSame & CQ_SAME) == 0) {
-      cq = read(in);
-    } else {
+    if ((fieldsSame & CQ_SAME) == CQ_SAME) {
       cq = prevKey.getColumnQualifierData().toArray();
+    } else if ((fieldsPrefixed & CQ_COMMON_PREFIX) == CQ_COMMON_PREFIX) {
+      cq = readPrefix(in, prevKey.getColumnQualifierData());
+    } else {
+      cq = read(in);
     }
     
-    if ((fieldsSame & CV_SAME) == 0) {
-      cv = read(in);
-    } else {
+    if ((fieldsSame & CV_SAME) == CV_SAME) {
       cv = prevKey.getColumnVisibilityData().toArray();
+    } else if ((fieldsPrefixed & CV_COMMON_PREFIX) == CV_COMMON_PREFIX) {
+      cv = readPrefix(in, prevKey.getColumnVisibilityData());
+    } else {
+      cv = read(in);
     }
     
-    if ((fieldsSame & TS_SAME) == 0) {
-      ts = WritableUtils.readVLong(in);
-    } else {
+    if ((fieldsSame & TS_SAME) == TS_SAME) {
       ts = prevKey.getTimestamp();
+    } else if ((fieldsPrefixed & TS_DIFF) == TS_DIFF) {
+      ts = WritableUtils.readVLong(in) + prevKey.getTimestamp();
+    } else {
+      ts = WritableUtils.readVLong(in);
     }
     
-    this.key = new Key(row, cf, cq, cv, ts, (fieldsSame & DELETED) != 0, false);
-    
+    this.key = new Key(row, cf, cq, cv, ts, (fieldsSame & DELETED) == DELETED, false);
     this.prevKey = this.key;
   }
   
@@ -182,10 +242,22 @@ public class RelativeKey implements Writ
     }
   }
   
-  int fastSkip(DataInput in, Key seekKey, MByteSequence value, Key pkey, Key currKey) throws IOException {
+  public static class SkippR {
+    RelativeKey rk;
+    int skipped;
+    Key prevKey;
+    
+    SkippR(RelativeKey rk, int skipped, Key prevKey) {
+      this.rk = rk;
+      this.skipped = skipped;
+      this.prevKey = prevKey;
+    }
+  }
+  
+  public static SkippR fastSkip(DataInput in, Key seekKey, MByteSequence value, Key prevKey, Key currKey) throws IOException {
     // this method assumes that fast skip is being called on a compressed block where the last key
-    // in the compressed block is >= seekKey... therefore this method should go passed the end of the
-    // compressed block... if it does, there is probably an error in the callers logic
+    // in the compressed block is >= seekKey... therefore this method shouldn't go past the end of the
+    // compressed block... if it does, there is probably an error in the caller's logic
     
     // this method mostly avoids object allocation and only does compares when the row changes
     
@@ -204,11 +276,11 @@ public class RelativeKey implements Writ
     
     if (currKey != null) {
       
-      prow = new MByteSequence(pkey.getRowData());
-      pcf = new MByteSequence(pkey.getColumnFamilyData());
-      pcq = new MByteSequence(pkey.getColumnQualifierData());
-      pcv = new MByteSequence(pkey.getColumnVisibilityData());
-      pts = pkey.getTimestamp();
+      prow = new MByteSequence(currKey.getRowData());
+      pcf = new MByteSequence(currKey.getColumnFamilyData());
+      pcq = new MByteSequence(currKey.getColumnQualifierData());
+      pcv = new MByteSequence(currKey.getColumnVisibilityData());
+      pts = currKey.getTimestamp();
       
       row = new MByteSequence(currKey.getRowData());
       cf = new MByteSequence(currKey.getColumnFamilyData());
@@ -221,15 +293,24 @@ public class RelativeKey implements Writ
       cqCmp = cq.compareTo(stopCQ);
       
       if (rowCmp >= 0) {
-        if (rowCmp > 0)
-          return 0;
+        if (rowCmp > 0) {
+          RelativeKey rk = new RelativeKey();
+          rk.key = rk.prevKey = new Key(currKey);
+          return new SkippR(rk, 0, prevKey);
+        }
         
         if (cfCmp >= 0) {
-          if (cfCmp > 0)
-            return 0;
+          if (cfCmp > 0) {
+            RelativeKey rk = new RelativeKey();
+            rk.key = rk.prevKey = new Key(currKey);
+            return new SkippR(rk, 0, prevKey);
+          }
           
-          if (cqCmp >= 0)
-            return 0;
+          if (cqCmp >= 0) {
+            RelativeKey rk = new RelativeKey();
+            rk.key = rk.prevKey = new Key(currKey);
+            return new SkippR(rk, 0, prevKey);
+          }
         }
       }
       
@@ -246,22 +327,31 @@ public class RelativeKey implements Writ
     }
     
     byte fieldsSame = -1;
+    byte fieldsPrefixed = 0;
     int count = 0;
+    Key newPrevKey = null;
     
     while (true) {
       
-      pdel = (fieldsSame & DELETED) != 0;
+      pdel = (fieldsSame & DELETED) == DELETED;
       
       fieldsSame = in.readByte();
+      if ((fieldsSame & PREFIX_COMPRESSION_ENABLED) == PREFIX_COMPRESSION_ENABLED)
+        fieldsPrefixed = in.readByte();
+      else
+        fieldsPrefixed = 0;
       
       boolean changed = false;
       
-      if ((fieldsSame & ROW_SAME) == 0) {
+      if ((fieldsSame & ROW_SAME) != ROW_SAME) {
         
         MByteSequence tmp = prow;
         prow = row;
         row = tmp;
         
+        if ((fieldsPrefixed & ROW_COMMON_PREFIX) == ROW_COMMON_PREFIX)
+          readPrefix(in, row, prow);
+        else
         read(in, row);
         
         // read a new row, so need to compare...
@@ -269,41 +359,54 @@ public class RelativeKey implements Writ
         changed = true;
       }// else the row is the same as the last, so no need to compare
       
-      if ((fieldsSame & CF_SAME) == 0) {
+      if ((fieldsSame & CF_SAME) != CF_SAME) {
         
         MByteSequence tmp = pcf;
         pcf = cf;
         cf = tmp;
         
+        if ((fieldsPrefixed & CF_COMMON_PREFIX) == CF_COMMON_PREFIX)
+          readPrefix(in, cf, pcf);
+        else
         read(in, cf);
         
         cfCmp = cf.compareTo(stopCF);
         changed = true;
       }
       
-      if ((fieldsSame & CQ_SAME) == 0) {
+      if ((fieldsSame & CQ_SAME) != CQ_SAME) {
         
         MByteSequence tmp = pcq;
         pcq = cq;
         cq = tmp;
         
+        if ((fieldsPrefixed & CQ_COMMON_PREFIX) == CQ_COMMON_PREFIX)
+          readPrefix(in, cq, pcq);
+        else
         read(in, cq);
         
         cqCmp = cq.compareTo(stopCQ);
         changed = true;
       }
       
-      if ((fieldsSame & CV_SAME) == 0) {
+      if ((fieldsSame & CV_SAME) != CV_SAME) {
         
         MByteSequence tmp = pcv;
         pcv = cv;
         cv = tmp;
         
+        if ((fieldsPrefixed & CV_COMMON_PREFIX) == CV_COMMON_PREFIX)
+          readPrefix(in, cv, pcv);
+        else
         read(in, cv);
       }
       
-      if ((fieldsSame & TS_SAME) == 0) {
+      if ((fieldsSame & TS_SAME) != TS_SAME) {
         pts = ts;
+        
+        if ((fieldsPrefixed & TS_DIFF) == TS_DIFF)
+          ts = WritableUtils.readVLong(in) + pts;
+        else
         ts = WritableUtils.readVLong(in);
       }
       
@@ -332,33 +435,39 @@ public class RelativeKey implements Writ
       
       // when the current keys field is same as the last, then
       // set the prev keys field the same as the current key
-      trow = (fieldsSame & ROW_SAME) == 0 ? prow : row;
-      tcf = (fieldsSame & CF_SAME) == 0 ? pcf : cf;
-      tcq = (fieldsSame & CQ_SAME) == 0 ? pcq : cq;
-      tcv = (fieldsSame & CV_SAME) == 0 ? pcv : cv;
-      tts = (fieldsSame & TS_SAME) == 0 ? pts : ts;
+      trow = (fieldsSame & ROW_SAME) == ROW_SAME ? row : prow;
+      tcf = (fieldsSame & CF_SAME) == CF_SAME ? cf : pcf;
+      tcq = (fieldsSame & CQ_SAME) == CQ_SAME ? cq : pcq;
+      tcv = (fieldsSame & CV_SAME) == CV_SAME ? cv : pcv;
+      tts = (fieldsSame & TS_SAME) == TS_SAME ? ts : pts;
       
-      Key tmp = new Key(trow.getBackingArray(), trow.offset(), trow.length(), tcf.getBackingArray(), tcf.offset(), tcf.length(), tcq.getBackingArray(),
+      newPrevKey = new Key(trow.getBackingArray(), trow.offset(), trow.length(), tcf.getBackingArray(), tcf.offset(), tcf.length(), tcq.getBackingArray(),
           tcq.offset(), tcq.length(), tcv.getBackingArray(), tcv.offset(), tcv.length(), tts);
-      tmp.setDeleted(pdel);
-      pkey.set(tmp);
+      newPrevKey.setDeleted(pdel);
+    } else if (count == 1) {
+      if (currKey != null)
+        newPrevKey = currKey;
+      else
+        newPrevKey = prevKey;
+    } else {
+      throw new IllegalStateException();
     }
     
-    this.key = new Key(row.getBackingArray(), row.offset(), row.length(), cf.getBackingArray(), cf.offset(), cf.length(), cq.getBackingArray(), cq.offset(),
+    RelativeKey result = new RelativeKey();
+    result.key = new Key(row.getBackingArray(), row.offset(), row.length(), cf.getBackingArray(), cf.offset(), cf.length(), cq.getBackingArray(), cq.offset(),
         cq.length(), cv.getBackingArray(), cv.offset(), cv.length(), ts);
-    this.key.setDeleted((fieldsSame & DELETED) != 0);
+    result.key.setDeleted((fieldsSame & DELETED) != 0);
+    result.prevKey = result.key;
     
-    this.prevKey = this.key;
-    
-    return count;
+    return new SkippR(result, count, newPrevKey);
   }
   
-  private void read(DataInput in, MByteSequence mbseq) throws IOException {
+  private static void read(DataInput in, MByteSequence mbseq) throws IOException {
     int len = WritableUtils.readVInt(in);
     read(in, mbseq, len);
   }
   
-  private void readValue(DataInput in, MByteSequence mbseq) throws IOException {
+  private static void readValue(DataInput in, MByteSequence mbseq) throws IOException {
     int len = in.readInt();
     read(in, mbseq, len);
   }
@@ -391,16 +500,49 @@ public class RelativeKey implements Writ
     return ret;
   }
 
-  private void read(DataInput in, MByteSequence mbseq, int len) throws IOException {
-    if (mbseq.getBackingArray().length < len) {
-      mbseq.setArray(new byte[nextArraySize(len)]);
+  private static void read(DataInput in, MByteSequence mbseqDestination, int len) throws IOException {
+    if (mbseqDestination.getBackingArray().length < len) {
+      mbseqDestination.setArray(new byte[nextArraySize(len)]);
+    }
+    
+    in.readFully(mbseqDestination.getBackingArray(), 0, len);
+    mbseqDestination.setLength(len);
+  }
+  
+  private static byte[] readPrefix(DataInput in, ByteSequence prefixSource) throws IOException {
+    int prefixLen = WritableUtils.readVInt(in);
+    int remainingLen = WritableUtils.readVInt(in);
+    byte[] data = new byte[prefixLen + remainingLen];
+    if (prefixSource.isBackedByArray()) {
+      System.arraycopy(prefixSource.getBackingArray(), prefixSource.offset(), data, 0, prefixLen);
+    } else {
+      byte[] prefixArray = prefixSource.toArray();
+      System.arraycopy(prefixArray, 0, data, 0, prefixLen);
+    }
+    // read remaining
+    in.readFully(data, prefixLen, remainingLen);
+    return data;
     }
     
-    in.readFully(mbseq.getBackingArray(), 0, len);
-    mbseq.setLength(len);
+  private static void readPrefix(DataInput in, MByteSequence dest, ByteSequence prefixSource) throws IOException {
+    int prefixLen = WritableUtils.readVInt(in);
+    int remainingLen = WritableUtils.readVInt(in);
+    int len = prefixLen + remainingLen;
+    if (dest.getBackingArray().length < len) {
+      dest.setArray(new byte[nextArraySize(len)]);
+    }
+    if (prefixSource.isBackedByArray()) {
+      System.arraycopy(prefixSource.getBackingArray(), prefixSource.offset(), dest.getBackingArray(), 0, prefixLen);
+    } else {
+      byte[] prefixArray = prefixSource.toArray();
+      System.arraycopy(prefixArray, 0, dest.getBackingArray(), 0, prefixLen);
+    }
+    // read remaining
+    in.readFully(dest.getBackingArray(), prefixLen, remainingLen);
+    dest.setLength(len);
   }
   
-  private byte[] read(DataInput in) throws IOException {
+  private static byte[] read(DataInput in) throws IOException {
     int len = WritableUtils.readVInt(in);
     byte[] data = new byte[len];
     in.readFully(data);
@@ -411,52 +553,75 @@ public class RelativeKey implements Writ
     return key;
   }
   
-  private void write(DataOutput out, ByteSequence bs) throws IOException {
+  private static void write(DataOutput out, ByteSequence bs) throws IOException {
     WritableUtils.writeVInt(out, bs.length());
     out.write(bs.getBackingArray(), bs.offset(), bs.length());
   }
   
+  private static void writePrefix(DataOutput out, ByteSequence bs, int commonPrefixLength) throws IOException {
+    WritableUtils.writeVInt(out, commonPrefixLength);
+    WritableUtils.writeVInt(out, bs.length() - commonPrefixLength);
+    out.write(bs.getBackingArray(), bs.offset() + commonPrefixLength, bs.length() - commonPrefixLength);
+  }
+  
   @Override
   public void write(DataOutput out) throws IOException {
     
     out.writeByte(fieldsSame);
     
-    // System.out.printf("wrote fs %x%n", fieldsSame);
-    
-    bytesWritten += 1;
+    if ((fieldsSame & PREFIX_COMPRESSION_ENABLED) == PREFIX_COMPRESSION_ENABLED) {
+      out.write(fieldsPrefixed);
+    }
     
-    if ((fieldsSame & ROW_SAME) == 0) {
+    if ((fieldsSame & ROW_SAME) == ROW_SAME) {
+      // same, write nothing
+    } else if ((fieldsPrefixed & ROW_COMMON_PREFIX) == ROW_COMMON_PREFIX) {
+      // similar, write what's common
+      writePrefix(out, key.getRowData(), rowCommonPrefixLen);
+    } else {
+      // write it all
       write(out, key.getRowData());
     }
     
-    if ((fieldsSame & CF_SAME) == 0) {
+    if ((fieldsSame & CF_SAME) == CF_SAME) {
+      // same, write nothing
+    } else if ((fieldsPrefixed & CF_COMMON_PREFIX) == CF_COMMON_PREFIX) {
+      // similar, write what's common
+      writePrefix(out, key.getColumnFamilyData(), cfCommonPrefixLen);
+    } else {
+      // write it all
       write(out, key.getColumnFamilyData());
     }
     
-    if ((fieldsSame & CQ_SAME) == 0) {
-      
+    if ((fieldsSame & CQ_SAME) == CQ_SAME) {
+      // same, write nothing
+    } else if ((fieldsPrefixed & CQ_COMMON_PREFIX) == CQ_COMMON_PREFIX) {
+      // similar, write what's common
+      writePrefix(out, key.getColumnQualifierData(), cqCommonPrefixLen);
+    } else {
+      // write it all
       write(out, key.getColumnQualifierData());
-      
-      /*
-       * Integer id = colFams.get(key.getColumnQualifier()); if(id == null){ id = nextId++; colFams.put(key.getColumnQualifier(), id); }
-       * 
-       * WritableUtils.writeVInt(out, id); bytesWritten += 1;
-       */
-      
     }
     
-    if ((fieldsSame & CV_SAME) == 0) {
+    if ((fieldsSame & CV_SAME) == CV_SAME) {
+      // same, write nothing
+    } else if ((fieldsPrefixed & CV_COMMON_PREFIX) == CV_COMMON_PREFIX) {
+      // similar, write what's common
+      writePrefix(out, key.getColumnVisibilityData(), cvCommonPrefixLen);
+    } else {
+      // write it all
       write(out, key.getColumnVisibilityData());
     }
     
-    if ((fieldsSame & TS_SAME) == 0) {
+    if ((fieldsSame & TS_SAME) == TS_SAME) {
+      // same, write nothing
+    } else if ((fieldsPrefixed & TS_DIFF) == TS_DIFF) {
+      // similar, write what's common
+      WritableUtils.writeVLong(out, tsDiff);
+    } else {
+      // write it all
       WritableUtils.writeVLong(out, key.getTimestamp());
     }
   }
   
-  @Override
-  public int compareTo(RelativeKey o) {
-    throw new UnsupportedOperationException();
-  }
-  
 }

Modified: accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/file/rfile/SplitLarge.java
URL: http://svn.apache.org/viewvc/accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/file/rfile/SplitLarge.java?rev=1433166&r1=1433165&r2=1433166&view=diff
==============================================================================
--- accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/file/rfile/SplitLarge.java (original)
+++ accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/file/rfile/SplitLarge.java Mon Jan 14 22:03:24 2013
@@ -17,7 +17,9 @@
 package org.apache.accumulo.core.file.rfile;
 
 import java.util.ArrayList;
+import java.util.List;
 
+import org.apache.accumulo.core.cli.Help;
 import org.apache.accumulo.core.conf.DefaultConfiguration;
 import org.apache.accumulo.core.conf.Property;
 import org.apache.accumulo.core.data.ByteSequence;
@@ -28,44 +30,43 @@ import org.apache.accumulo.core.file.blo
 import org.apache.accumulo.core.file.rfile.RFile.Reader;
 import org.apache.accumulo.core.file.rfile.RFile.Writer;
 import org.apache.accumulo.core.util.CachedConfiguration;
-import org.apache.commons.cli.BasicParser;
-import org.apache.commons.cli.CommandLine;
-import org.apache.commons.cli.Option;
-import org.apache.commons.cli.Options;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 
+import com.beust.jcommander.Parameter;
+
 /**
  * Split an RFile into large and small key/value files.
  * 
  */
 public class SplitLarge {
   
+  static class Opts extends Help {
+    @Parameter(names="-m", description="the maximum size of the key/value pair to shunt to the small file")
+    long maxSize = 10 * 1024 * 1024;
+    @Parameter(description="<file.rf> { <file.rf> ... }")
+    List<String> files = new ArrayList<String>();
+  }
+  
+  
   public static void main(String[] args) throws Exception {
     Configuration conf = CachedConfiguration.getInstance();
     FileSystem fs = FileSystem.get(conf);
     long maxSize = 10 * 1024 * 1024;
+    Opts opts = new Opts();
+    opts.parseArgs(SplitLarge.class.getName(), args);
     
-    Options opts = new Options();
-    Option maxSizeOption = new Option("m", "", true, "the maximum size of the key/value pair to shunt to the small file");
-    opts.addOption(maxSizeOption);
-    
-    CommandLine commandLine = new BasicParser().parse(opts, args);
-    if (commandLine.hasOption(maxSizeOption.getOpt())) {
-      maxSize = Long.parseLong(commandLine.getOptionValue(maxSizeOption.getOpt()));
-    }
-    
-    for (String arg : commandLine.getArgs()) {
-      Path path = new Path(arg);
+    for (String file : opts.files) {
+      Path path = new Path(file);
       CachableBlockFile.Reader rdr = new CachableBlockFile.Reader(fs, path, conf, null, null);
       Reader iter = new RFile.Reader(rdr);
       
-      if (!arg.endsWith(".rf")) {
+      if (!file.endsWith(".rf")) {
         throw new IllegalArgumentException("File must end with .rf");
       }
-      String smallName = arg.substring(0, arg.length() - 3) + "_small.rf";
-      String largeName = arg.substring(0, arg.length() - 3) + "_large.rf";
+      String smallName = file.substring(0, file.length() - 3) + "_small.rf";
+      String largeName = file.substring(0, file.length() - 3) + "_large.rf";
       
       int blockSize = (int) DefaultConfiguration.getDefaultConfiguration().getMemoryInBytes(Property.TABLE_FILE_BLOCK_SIZE);
       Writer small = new RFile.Writer(new CachableBlockFile.Writer(fs, new Path(smallName), "gz", conf), blockSize);

Modified: accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/file/rfile/bcfile/BoundedRangeFileInputStream.java
URL: http://svn.apache.org/viewvc/accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/file/rfile/bcfile/BoundedRangeFileInputStream.java?rev=1433166&r1=1433165&r2=1433166&view=diff
==============================================================================
--- accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/file/rfile/bcfile/BoundedRangeFileInputStream.java (original)
+++ accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/file/rfile/bcfile/BoundedRangeFileInputStream.java Mon Jan 14 22:03:24 2013
@@ -93,14 +93,15 @@ class BoundedRangeFileInputStream extend
     if (n == 0)
       return -1;
     Integer ret = 0;
-    synchronized (in) {
-      in.seek(pos);
+    final FSDataInputStream inLocal = in;
+    synchronized (inLocal) {
+    	inLocal.seek(pos);
       try {
         ret = AccessController.doPrivileged(new PrivilegedExceptionAction<Integer>() {
           @Override
           public Integer run() throws IOException {
             int ret = 0;
-            ret = in.read(b, off, n);
+            ret = inLocal.read(b, off, n);
             return ret;
           }
         });

Modified: accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/gc/thrift/GCMonitorService.java
URL: http://svn.apache.org/viewvc/accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/gc/thrift/GCMonitorService.java?rev=1433166&r1=1433165&r2=1433166&view=diff
==============================================================================
--- accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/gc/thrift/GCMonitorService.java (original)
+++ accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/gc/thrift/GCMonitorService.java Mon Jan 14 22:03:24 2013
@@ -1,3 +1,19 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
 /**
  * Autogenerated by Thrift Compiler (0.9.0)
  *

Modified: accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/gc/thrift/GCStatus.java
URL: http://svn.apache.org/viewvc/accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/gc/thrift/GCStatus.java?rev=1433166&r1=1433165&r2=1433166&view=diff
==============================================================================
--- accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/gc/thrift/GCStatus.java (original)
+++ accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/gc/thrift/GCStatus.java Mon Jan 14 22:03:24 2013
@@ -1,3 +1,19 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
 /**
  * Autogenerated by Thrift Compiler (0.9.0)
  *

Modified: accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/gc/thrift/GcCycleStats.java
URL: http://svn.apache.org/viewvc/accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/gc/thrift/GcCycleStats.java?rev=1433166&r1=1433165&r2=1433166&view=diff
==============================================================================
--- accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/gc/thrift/GcCycleStats.java (original)
+++ accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/gc/thrift/GcCycleStats.java Mon Jan 14 22:03:24 2013
@@ -1,3 +1,19 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
 /**
  * Autogenerated by Thrift Compiler (0.9.0)
  *

Modified: accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/iterators/AggregatingIterator.java
URL: http://svn.apache.org/viewvc/accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/iterators/AggregatingIterator.java?rev=1433166&r1=1433165&r2=1433166&view=diff
==============================================================================
--- accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/iterators/AggregatingIterator.java (original)
+++ accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/iterators/AggregatingIterator.java Mon Jan 14 22:03:24 2013
@@ -29,7 +29,7 @@ import org.apache.accumulo.core.data.Ran
 import org.apache.accumulo.core.data.Value;
 import org.apache.accumulo.core.iterators.aggregation.Aggregator;
 import org.apache.accumulo.core.iterators.conf.ColumnToClassMapping;
-import org.apache.accumulo.start.classloader.AccumuloClassLoader;
+import org.apache.accumulo.start.classloader.vfs.AccumuloVFSClassLoader;
 import org.apache.log4j.Logger;
 
 /**
@@ -193,7 +193,7 @@ public class AggregatingIterator impleme
         return false;
       Class<? extends Aggregator> clazz;
       try {
-        clazz = AccumuloClassLoader.loadClass(classname, Aggregator.class);
+        clazz = AccumuloVFSClassLoader.loadClass(classname, Aggregator.class);
         clazz.newInstance();
       } catch (ClassNotFoundException e) {
         throw new IllegalArgumentException("class not found: " + classname);

Modified: accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/iterators/ColumnFamilyCounter.java
URL: http://svn.apache.org/viewvc/accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/iterators/ColumnFamilyCounter.java?rev=1433166&r1=1433165&r2=1433166&view=diff
==============================================================================
--- accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/iterators/ColumnFamilyCounter.java (original)
+++ accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/iterators/ColumnFamilyCounter.java Mon Jan 14 22:03:24 2013
@@ -17,7 +17,6 @@
 package org.apache.accumulo.core.iterators;
 
 import java.io.IOException;
-import java.nio.charset.Charset;
 import java.util.Collection;
 import java.util.Map;
 
@@ -31,8 +30,6 @@ public class ColumnFamilyCounter impleme
   private SortedKeyValueIterator<Key,Value> source;
   private Key key;
   private Value value;
-
-  private static final Charset utf8 = Charset.forName("UTF8");
   
   @Override
   public void init(SortedKeyValueIterator<Key,Value> source, Map<String,String> options, IteratorEnvironment env) throws IOException {
@@ -61,7 +58,7 @@ public class ColumnFamilyCounter impleme
       }
       
       this.key = new Key(currentRow.toArray(), currentColf.toArray(), new byte[0], new byte[0], ts);
-      this.value = new Value(Integer.toString(count).getBytes(utf8));
+      this.value = new Value(Integer.toString(count).getBytes());
       
     } else {
       this.key = null;

Modified: accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/iterators/Combiner.java
URL: http://svn.apache.org/viewvc/accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/iterators/Combiner.java?rev=1433166&r1=1433165&r2=1433166&view=diff
==============================================================================
--- accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/iterators/Combiner.java (original)
+++ accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/iterators/Combiner.java Mon Jan 14 22:03:24 2013
@@ -35,12 +35,13 @@ import org.apache.accumulo.core.iterator
 import org.apache.log4j.Logger;
 
 /**
- * A SortedKeyValueIterator that combines the Values for different versions of a Key into a single Value. Combiner will replace one or more versions of a Key
+ * A SortedKeyValueIterator that combines the Values for different versions (timestamps) of a Key into a single Value. Combiner will replace one or more versions of a Key
  * and their Values with the most recent Key and a Value which is the result of the reduce method.
  * 
  * Subclasses must implement a reduce method: {@code public Value reduce(Key key, Iterator<Value> iter)}.
  * 
  * This reduce method will be passed the most recent Key and an iterator over the Values for all non-deleted versions of that Key.
+ * A combiner will not combine keys that differ by more than the timestamp.
  */
 public abstract class Combiner extends WrappingIterator implements OptionDescriber {
   static final Logger log = Logger.getLogger(Combiner.class);
@@ -63,7 +64,7 @@ public abstract class Combiner extends W
      */
     public ValueIterator(SortedKeyValueIterator<Key,Value> source) {
       this.source = source;
-      topKey = source.getTopKey();
+      topKey = new Key(source.getTopKey());
       hasNext = _hasNext();
     }
     

Modified: accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/iterators/IteratorUtil.java
URL: http://svn.apache.org/viewvc/accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/iterators/IteratorUtil.java?rev=1433166&r1=1433165&r2=1433166&view=diff
==============================================================================
--- accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/iterators/IteratorUtil.java (original)
+++ accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/iterators/IteratorUtil.java Mon Jan 14 22:03:24 2013
@@ -38,7 +38,7 @@ import org.apache.accumulo.core.iterator
 import org.apache.accumulo.core.iterators.user.VersioningIterator;
 import org.apache.accumulo.core.tabletserver.thrift.IteratorConfig;
 import org.apache.accumulo.core.tabletserver.thrift.TIteratorSetting;
-import org.apache.accumulo.start.classloader.AccumuloClassLoader;
+import org.apache.accumulo.start.classloader.vfs.AccumuloVFSClassLoader;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.WritableComparable;
 import org.apache.log4j.Logger;
@@ -210,12 +210,13 @@ public class IteratorUtil {
       }
     }
     
-    return loadIterators(source, iters, allOptions, env, useAccumuloClassLoader);
+    return loadIterators(source, iters, allOptions, env, useAccumuloClassLoader, conf.get(Property.TABLE_CLASSPATH));
   }
   
   @SuppressWarnings("unchecked")
   public static <K extends WritableComparable<?>,V extends Writable> SortedKeyValueIterator<K,V> loadIterators(SortedKeyValueIterator<K,V> source,
-      Collection<IterInfo> iters, Map<String,Map<String,String>> iterOpts, IteratorEnvironment env, boolean useAccumuloClassLoader) throws IOException {
+      Collection<IterInfo> iters, Map<String,Map<String,String>> iterOpts, IteratorEnvironment env, boolean useAccumuloClassLoader, String context)
+      throws IOException {
     // wrap the source in a SynchronizedIterator in case any of the additional configured iterators want to use threading
     SortedKeyValueIterator<K,V> prev = new SynchronizedIterator<K,V>(source);
     
@@ -224,7 +225,11 @@ public class IteratorUtil {
        
         Class<? extends SortedKeyValueIterator<K,V>> clazz;
         if (useAccumuloClassLoader){
-          clazz = (Class<? extends SortedKeyValueIterator<K,V>>) AccumuloClassLoader.loadClass(iterInfo.className, SortedKeyValueIterator.class);
+          if (context != null && !context.equals(""))
+            clazz = (Class<? extends SortedKeyValueIterator<K,V>>) AccumuloVFSClassLoader.getContextManager().loadClass(context, iterInfo.className,
+                SortedKeyValueIterator.class);
+          else
+            clazz = (Class<? extends SortedKeyValueIterator<K,V>>) AccumuloVFSClassLoader.loadClass(iterInfo.className, SortedKeyValueIterator.class);
         }else{
           clazz = (Class<? extends SortedKeyValueIterator<K,V>>) Class.forName(iterInfo.className).asSubclass(SortedKeyValueIterator.class);
         }

Modified: accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/iterators/LongCombiner.java
URL: http://svn.apache.org/viewvc/accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/iterators/LongCombiner.java?rev=1433166&r1=1433165&r2=1433166&view=diff
==============================================================================
--- accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/iterators/LongCombiner.java (original)
+++ accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/iterators/LongCombiner.java Mon Jan 14 22:03:24 2013
@@ -21,7 +21,6 @@ import java.io.ByteArrayOutputStream;
 import java.io.DataInputStream;
 import java.io.DataOutputStream;
 import java.io.IOException;
-import java.nio.charset.Charset;
 import java.util.Map;
 
 import org.apache.accumulo.core.client.IteratorSetting;
@@ -46,8 +45,6 @@ public abstract class LongCombiner exten
   
   protected static final String TYPE = "type";
   protected static final String CLASS_PREFIX = "class:";
-
-  private static final Charset utf8 = Charset.forName("UTF8");
   
   public static enum Type {
     /**
@@ -176,7 +173,7 @@ public abstract class LongCombiner exten
   public static class StringEncoder implements Encoder<Long> {
     @Override
     public byte[] encode(Long v) {
-      return Long.toString(v).getBytes(utf8);
+      return Long.toString(v).getBytes();
     }
     
     @Override

Modified: accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/iterators/TypedValueCombiner.java
URL: http://svn.apache.org/viewvc/accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/iterators/TypedValueCombiner.java?rev=1433166&r1=1433165&r2=1433166&view=diff
==============================================================================
--- accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/iterators/TypedValueCombiner.java (original)
+++ accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/iterators/TypedValueCombiner.java Mon Jan 14 22:03:24 2013
@@ -24,7 +24,7 @@ import java.util.NoSuchElementException;
 import org.apache.accumulo.core.client.IteratorSetting;
 import org.apache.accumulo.core.data.Key;
 import org.apache.accumulo.core.data.Value;
-import org.apache.accumulo.start.classloader.AccumuloClassLoader;
+import org.apache.accumulo.start.classloader.vfs.AccumuloVFSClassLoader;
 
 /**
  * A Combiner that decodes each Value to type V before reducing, then encodes the result of typedReduce back to Value.
@@ -132,7 +132,7 @@ public abstract class TypedValueCombiner
   protected void setEncoder(String encoderClass) {
     try {
       @SuppressWarnings("unchecked")
-      Class<? extends Encoder<V>> clazz = (Class<? extends Encoder<V>>) AccumuloClassLoader.loadClass(encoderClass, Encoder.class);
+      Class<? extends Encoder<V>> clazz = (Class<? extends Encoder<V>>) AccumuloVFSClassLoader.loadClass(encoderClass, Encoder.class);
       encoder = clazz.newInstance();
     } catch (ClassNotFoundException e) {
       throw new IllegalArgumentException(e);

Modified: accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/iterators/aggregation/StringMax.java
URL: http://svn.apache.org/viewvc/accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/iterators/aggregation/StringMax.java?rev=1433166&r1=1433165&r2=1433166&view=diff
==============================================================================
--- accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/iterators/aggregation/StringMax.java (original)
+++ accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/iterators/aggregation/StringMax.java Mon Jan 14 22:03:24 2013
@@ -16,8 +16,6 @@
  */
 package org.apache.accumulo.core.iterators.aggregation;
 
-import java.nio.charset.Charset;
-
 import org.apache.accumulo.core.data.Value;
 
 /**
@@ -28,10 +26,8 @@ public class StringMax implements Aggreg
   
   long max = Long.MIN_VALUE;
   
-  private static final Charset utf8 = Charset.forName("UTF8");
-  
   public Value aggregate() {
-    return new Value(Long.toString(max).getBytes(utf8));
+    return new Value(Long.toString(max).getBytes());
   }
   
   public void collect(Value value) {

Modified: accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/iterators/aggregation/StringMin.java
URL: http://svn.apache.org/viewvc/accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/iterators/aggregation/StringMin.java?rev=1433166&r1=1433165&r2=1433166&view=diff
==============================================================================
--- accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/iterators/aggregation/StringMin.java (original)
+++ accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/iterators/aggregation/StringMin.java Mon Jan 14 22:03:24 2013
@@ -16,8 +16,6 @@
  */
 package org.apache.accumulo.core.iterators.aggregation;
 
-import java.nio.charset.Charset;
-
 import org.apache.accumulo.core.data.Value;
 
 /**
@@ -28,10 +26,8 @@ public class StringMin implements Aggreg
   
   long min = Long.MAX_VALUE;
   
-  private static final Charset utf8 = Charset.forName("UTF8");
-  
   public Value aggregate() {
-    return new Value(Long.toString(min).getBytes(utf8));
+    return new Value(Long.toString(min).getBytes());
   }
   
   public void collect(Value value) {

Modified: accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/iterators/aggregation/StringSummation.java
URL: http://svn.apache.org/viewvc/accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/iterators/aggregation/StringSummation.java?rev=1433166&r1=1433165&r2=1433166&view=diff
==============================================================================
--- accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/iterators/aggregation/StringSummation.java (original)
+++ accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/iterators/aggregation/StringSummation.java Mon Jan 14 22:03:24 2013
@@ -16,8 +16,6 @@
  */
 package org.apache.accumulo.core.iterators.aggregation;
 
-import java.nio.charset.Charset;
-
 import org.apache.accumulo.core.data.Value;
 
 /**
@@ -28,10 +26,8 @@ public class StringSummation implements 
   
   long sum = 0;
   
-  private static final Charset utf8 = Charset.forName("UTF8");
-  
   public Value aggregate() {
-    return new Value(Long.toString(sum).getBytes(utf8));
+    return new Value(Long.toString(sum).getBytes());
   }
   
   public void collect(Value value) {

Modified: accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/iterators/conf/ColumnSet.java
URL: http://svn.apache.org/viewvc/accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/iterators/conf/ColumnSet.java?rev=1433166&r1=1433165&r2=1433166&view=diff
==============================================================================
--- accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/iterators/conf/ColumnSet.java (original)
+++ accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/iterators/conf/ColumnSet.java Mon Jan 14 22:03:24 2013
@@ -16,7 +16,6 @@
  */
 package org.apache.accumulo.core.iterators.conf;
 
-import java.nio.charset.Charset;
 import java.util.Collection;
 import java.util.HashSet;
 import java.util.Set;
@@ -33,8 +32,6 @@ public class ColumnSet {
   
   private ColHashKey lookupCol = new ColHashKey();
   private ColFamHashKey lookupCF = new ColFamHashKey();
-
-  private static final Charset utf8 = Charset.forName("UTF8");
   
   public ColumnSet() {
     objectsCF = new HashSet<ColFamHashKey>();
@@ -95,7 +92,7 @@ public class ColumnSet {
     
     return sb.toString();
   }
-
+  
   static void encode(StringBuilder sb, Text t) {
     for (int i = 0; i < t.getLength(); i++) {
       int b = (0xff & t.getBytes()[i]);
@@ -138,7 +135,7 @@ public class ColumnSet {
   static Text decode(String s) {
     Text t = new Text();
     
-    byte[] sb = s.getBytes(utf8);
+    byte[] sb = s.getBytes();
     
     // very inefficient code
     for (int i = 0; i < sb.length; i++) {

Modified: accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/iterators/conf/ColumnToClassMapping.java
URL: http://svn.apache.org/viewvc/accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/iterators/conf/ColumnToClassMapping.java?rev=1433166&r1=1433165&r2=1433166&view=diff
==============================================================================
--- accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/iterators/conf/ColumnToClassMapping.java (original)
+++ accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/iterators/conf/ColumnToClassMapping.java Mon Jan 14 22:03:24 2013
@@ -24,7 +24,7 @@ import org.apache.accumulo.core.data.Key
 import org.apache.accumulo.core.iterators.conf.ColumnUtil.ColFamHashKey;
 import org.apache.accumulo.core.iterators.conf.ColumnUtil.ColHashKey;
 import org.apache.accumulo.core.util.Pair;
-import org.apache.accumulo.start.classloader.AccumuloClassLoader;
+import org.apache.accumulo.start.classloader.vfs.AccumuloVFSClassLoader;
 import org.apache.hadoop.io.Text;
 
 public class ColumnToClassMapping<K> {
@@ -50,7 +50,7 @@ public class ColumnToClassMapping<K> {
       
       Pair<Text,Text> pcic = ColumnSet.decodeColumns(column);
       
-      Class<? extends K> clazz = AccumuloClassLoader.loadClass(className, c);
+      Class<? extends K> clazz = AccumuloVFSClassLoader.loadClass(className, c);
       
       if (pcic.getSecond() == null) {
         addObject(pcic.getFirst(), clazz.newInstance());

Modified: accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/iterators/system/SourceSwitchingIterator.java
URL: http://svn.apache.org/viewvc/accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/iterators/system/SourceSwitchingIterator.java?rev=1433166&r1=1433165&r2=1433166&view=diff
==============================================================================
--- accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/iterators/system/SourceSwitchingIterator.java (original)
+++ accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/iterators/system/SourceSwitchingIterator.java Mon Jan 14 22:03:24 2013
@@ -57,7 +57,7 @@ public class SourceSwitchingIterator imp
   private boolean onlySwitchAfterRow;
   private AtomicBoolean iflag;
   
-  private List<SourceSwitchingIterator> copies;
+  private final List<SourceSwitchingIterator> copies;
   
   private SourceSwitchingIterator(DataSource source, boolean onlySwitchAfterRow, List<SourceSwitchingIterator> copies, AtomicBoolean iflag) {
     this.source = source;

Modified: accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/iterators/system/VisibilityFilter.java
URL: http://svn.apache.org/viewvc/accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/iterators/system/VisibilityFilter.java?rev=1433166&r1=1433165&r2=1433166&view=diff
==============================================================================
--- accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/iterators/system/VisibilityFilter.java (original)
+++ accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/iterators/system/VisibilityFilter.java Mon Jan 14 22:03:24 2013
@@ -25,6 +25,7 @@ import org.apache.accumulo.core.security
 import org.apache.accumulo.core.security.ColumnVisibility;
 import org.apache.accumulo.core.security.VisibilityEvaluator;
 import org.apache.accumulo.core.security.VisibilityParseException;
+import org.apache.accumulo.core.util.BadArgumentException;
 import org.apache.accumulo.core.util.TextUtil;
 import org.apache.commons.collections.map.LRUMap;
 import org.apache.hadoop.io.Text;
@@ -73,6 +74,9 @@ public class VisibilityFilter extends Fi
     } catch (VisibilityParseException e) {
       log.error("Parse Error", e);
       return false;
+    } catch (BadArgumentException e) {
+      log.error("Parse Error", e);
+      return false;
     }
   }
 }

Modified: accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/iterators/user/GrepIterator.java
URL: http://svn.apache.org/viewvc/accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/iterators/user/GrepIterator.java?rev=1433166&r1=1433165&r2=1433166&view=diff
==============================================================================
--- accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/iterators/user/GrepIterator.java (original)
+++ accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/iterators/user/GrepIterator.java Mon Jan 14 22:03:24 2013
@@ -17,7 +17,6 @@
 package org.apache.accumulo.core.iterators.user;
 
 import java.io.IOException;
-import java.nio.charset.Charset;
 import java.util.Arrays;
 import java.util.Map;
 
@@ -36,8 +35,6 @@ public class GrepIterator extends Filter
   
   private byte term[];
   
-  private static final Charset utf8 = Charset.forName("UTF8");
-    
   @Override
   public boolean accept(Key k, Value v) {
     return match(v.get()) || match(k.getRowData()) || match(k.getColumnFamilyData()) || match(k.getColumnQualifierData());
@@ -91,7 +88,7 @@ public class GrepIterator extends Filter
   @Override
   public void init(SortedKeyValueIterator<Key,Value> source, Map<String,String> options, IteratorEnvironment env) throws IOException {
     super.init(source, options, env);
-    term = options.get("term").getBytes(utf8);
+    term = options.get("term").getBytes();
   }
   
   /**

Modified: accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/iterators/user/IntersectingIterator.java
URL: http://svn.apache.org/viewvc/accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/iterators/user/IntersectingIterator.java?rev=1433166&r1=1433165&r2=1433166&view=diff
==============================================================================
--- accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/iterators/user/IntersectingIterator.java (original)
+++ accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/iterators/user/IntersectingIterator.java Mon Jan 14 22:03:24 2013
@@ -17,7 +17,6 @@
 package org.apache.accumulo.core.iterators.user;
 
 import java.io.IOException;
-import java.nio.charset.Charset;
 import java.util.Collection;
 import java.util.Collections;
 import java.util.Map;
@@ -61,8 +60,6 @@ public class IntersectingIterator implem
   
   protected Text nullText = new Text();
   
-  private static final Charset utf8 = Charset.forName("UTF8");
-  
   protected Text getPartition(Key key) {
     return key.getRow();
   }
@@ -420,7 +417,7 @@ public class IntersectingIterator implem
     String[] columnStrings = columns.split("\n");
     Text[] columnTexts = new Text[columnStrings.length];
     for (int i = 0; i < columnStrings.length; i++) {
-      columnTexts[i] = new Text(Base64.decodeBase64(columnStrings[i].getBytes(utf8)));
+      columnTexts[i] = new Text(Base64.decodeBase64(columnStrings[i].getBytes()));
     }
     return columnTexts;
   }
@@ -434,7 +431,7 @@ public class IntersectingIterator implem
     if (flags == null)
       return null;
     
-    byte[] bytes = Base64.decodeBase64(flags.getBytes(utf8));
+    byte[] bytes = Base64.decodeBase64(flags.getBytes());
     boolean[] bFlags = new boolean[bytes.length];
     for (int i = 0; i < bytes.length; i++) {
       if (bytes[i] == 1)

Modified: accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/iterators/user/LargeRowFilter.java
URL: http://svn.apache.org/viewvc/accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/iterators/user/LargeRowFilter.java?rev=1433166&r1=1433165&r2=1433166&view=diff
==============================================================================
--- accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/iterators/user/LargeRowFilter.java (original)
+++ accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/iterators/user/LargeRowFilter.java Mon Jan 14 22:03:24 2013
@@ -17,7 +17,6 @@
 package org.apache.accumulo.core.iterators.user;
 
 import java.io.IOException;
-import java.nio.charset.Charset;
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.Collections;
@@ -44,10 +43,8 @@ import org.apache.hadoop.io.Text;
  * This iterator works in a similar way to the RowDeletingIterator. See its javadoc about locality groups.
  */
 public class LargeRowFilter implements SortedKeyValueIterator<Key,Value>, OptionDescriber {
-
-  private static final Charset utf8 = Charset.forName("UTF8");
   
-  public static final Value SUPPRESS_ROW_VALUE = new Value("SUPPRESS_ROW".getBytes(utf8));
+  public static final Value SUPPRESS_ROW_VALUE = new Value("SUPPRESS_ROW".getBytes());
   
   private static final ByteSequence EMPTY = new ArrayByteSequence(new byte[] {});
   

Modified: accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/iterators/user/RegExFilter.java
URL: http://svn.apache.org/viewvc/accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/iterators/user/RegExFilter.java?rev=1433166&r1=1433165&r2=1433166&view=diff
==============================================================================
--- accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/iterators/user/RegExFilter.java (original)
+++ accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/iterators/user/RegExFilter.java Mon Jan 14 22:03:24 2013
@@ -18,7 +18,6 @@ package org.apache.accumulo.core.iterato
 
 import java.io.IOException;
 import java.io.UnsupportedEncodingException;
-import java.nio.charset.Charset;
 import java.util.Map;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
@@ -35,8 +34,6 @@ import org.apache.accumulo.core.iterator
  * A Filter that matches entries based on Java regular expressions.
  */
 public class RegExFilter extends Filter {
-
-  private static final Charset utf8 = Charset.forName("UTF8");  
   
   @Override
   public SortedKeyValueIterator<Key,Value> deepCopy(IteratorEnvironment env) {
@@ -196,7 +193,7 @@ public class RegExFilter extends Filter 
       try {
         this.encoding = options.get(ENCODING);
         @SuppressWarnings("unused")
-        String test = new String("test".getBytes(utf8), encoding);
+        String test = new String("test".getBytes(), encoding);
       } catch (UnsupportedEncodingException e) {
         e.printStackTrace();
         return false;

Modified: accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/iterators/user/RowDeletingIterator.java
URL: http://svn.apache.org/viewvc/accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/iterators/user/RowDeletingIterator.java?rev=1433166&r1=1433165&r2=1433166&view=diff
==============================================================================
--- accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/iterators/user/RowDeletingIterator.java (original)
+++ accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/iterators/user/RowDeletingIterator.java Mon Jan 14 22:03:24 2013
@@ -17,7 +17,6 @@
 package org.apache.accumulo.core.iterators.user;
 
 import java.io.IOException;
-import java.nio.charset.Charset;
 import java.util.Collection;
 import java.util.HashSet;
 import java.util.Map;
@@ -49,9 +48,7 @@ import org.apache.accumulo.core.iterator
 
 public class RowDeletingIterator implements SortedKeyValueIterator<Key,Value> {
   
-  private static final Charset utf8 = Charset.forName("UTF8");
-	  
-  public static final Value DELETE_ROW_VALUE = new Value("DEL_ROW".getBytes(utf8));
+  public static final Value DELETE_ROW_VALUE = new Value("DEL_ROW".getBytes());
   private SortedKeyValueIterator<Key,Value> source;
   private boolean propogateDeletes;
   private ByteSequence currentRow;
@@ -61,7 +58,7 @@ public class RowDeletingIterator impleme
   private boolean dropEmptyColFams;
   
   private static final ByteSequence EMPTY = new ArrayByteSequence(new byte[] {});
-
+  
   private RowDeletingIterator(SortedKeyValueIterator<Key,Value> source, boolean propogateDeletes2) {
     this.source = source;
     this.propogateDeletes = propogateDeletes2;

Modified: accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/iterators/user/SummingArrayCombiner.java
URL: http://svn.apache.org/viewvc/accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/iterators/user/SummingArrayCombiner.java?rev=1433166&r1=1433165&r2=1433166&view=diff
==============================================================================
--- accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/iterators/user/SummingArrayCombiner.java (original)
+++ accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/iterators/user/SummingArrayCombiner.java Mon Jan 14 22:03:24 2013
@@ -21,7 +21,6 @@ import java.io.ByteArrayOutputStream;
 import java.io.DataInputStream;
 import java.io.DataOutputStream;
 import java.io.IOException;
-import java.nio.charset.Charset;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Iterator;
@@ -49,8 +48,6 @@ public class SummingArrayCombiner extend
   private static final String TYPE = "type";
   private static final String CLASS_PREFIX = "class:";
   
-  private static final Charset utf8 = Charset.forName("UTF8");
-  
   public static enum Type {
     /**
      * indicates a variable-length encoding of a list of Longs using {@link SummingArrayCombiner.VarLongArrayEncoder}
@@ -205,7 +202,7 @@ public class SummingArrayCombiner extend
         sb.append(",");
         sb.append(Long.toString(la.get(i)));
       }
-      return sb.toString().getBytes(utf8);
+      return sb.toString().getBytes();
     }
     
     @Override

Modified: accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/iterators/user/SummingCombiner.java
URL: http://svn.apache.org/viewvc/accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/iterators/user/SummingCombiner.java?rev=1433166&r1=1433165&r2=1433166&view=diff
==============================================================================
--- accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/iterators/user/SummingCombiner.java (original)
+++ accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/iterators/user/SummingCombiner.java Mon Jan 14 22:03:24 2013
@@ -29,7 +29,8 @@ public class SummingCombiner extends Lon
   public Long typedReduce(Key key, Iterator<Long> iter) {
     long sum = 0;
     while (iter.hasNext()) {
-      sum = safeAdd(sum, iter.next());
+      Long next = iter.next();
+      sum = safeAdd(sum, next);
     }
     return sum;
   }

Modified: accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/iterators/user/TimestampFilter.java
URL: http://svn.apache.org/viewvc/accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/iterators/user/TimestampFilter.java?rev=1433166&r1=1433165&r2=1433166&view=diff
==============================================================================
--- accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/iterators/user/TimestampFilter.java (original)
+++ accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/iterators/user/TimestampFilter.java Mon Jan 14 22:03:24 2013
@@ -17,8 +17,8 @@
 package org.apache.accumulo.core.iterators.user;
 
 import java.io.IOException;
+import java.text.ParseException;
 import java.text.SimpleDateFormat;
-import java.util.Date;
 import java.util.Map;
 import java.util.TimeZone;
 
@@ -33,6 +33,7 @@ import org.apache.accumulo.core.iterator
  * A Filter that matches entries whose timestamps fall within a range.
  */
 public class TimestampFilter extends Filter {
+  private static final String LONG_PREFIX = "LONG";
   private final SimpleDateFormat dateParser = initDateParser();
   
   private static SimpleDateFormat initDateParser() {
@@ -86,10 +87,20 @@ public class TimestampFilter extends Fil
       throw new IllegalArgumentException("must have either start or end for " + TimestampFilter.class.getName());
     
     try {
-      if (hasStart)
-        start = dateParser.parse(options.get(START)).getTime();
-      if (hasEnd)
-        end = dateParser.parse(options.get(END)).getTime();
+      if (hasStart) {
+        String s = options.get(START);
+        if (s.startsWith(LONG_PREFIX))
+          start = Long.valueOf(s.substring(LONG_PREFIX.length()));
+        else
+          start = dateParser.parse(s).getTime();
+      }
+      if (hasEnd) {
+        String s = options.get(END);
+        if (s.startsWith(LONG_PREFIX))
+          end = Long.valueOf(s.substring(LONG_PREFIX.length()));
+        else
+          end = dateParser.parse(s).getTime();
+      }
     } catch (Exception e) {
       throw new IllegalArgumentException(e);
     }
@@ -116,8 +127,8 @@ public class TimestampFilter extends Fil
     IteratorOptions io = super.describeOptions();
     io.setName("tsfilter");
     io.setDescription("TimestampFilter displays entries with timestamps between specified values");
-    io.addNamedOption("start", "start timestamp (yyyyMMddHHmmssz)");
-    io.addNamedOption("end", "end timestamp (yyyyMMddHHmmssz)");
+    io.addNamedOption("start", "start timestamp (yyyyMMddHHmmssz or LONG<longstring>)");
+    io.addNamedOption("end", "end timestamp (yyyyMMddHHmmssz or LONG<longstring>)");
     io.addNamedOption("startInclusive", "true or false");
     io.addNamedOption("endInclusive", "true or false");
     return io;
@@ -126,11 +137,27 @@ public class TimestampFilter extends Fil
   @Override
   public boolean validateOptions(Map<String,String> options) {
     super.validateOptions(options);
+    boolean hasStart = false;
+    boolean hasEnd = false;
     try {
-      if (options.containsKey(START))
-        dateParser.parse(options.get(START));
-      if (options.containsKey(END))
-        dateParser.parse(options.get(END));
+      if (options.containsKey(START)) {
+        hasStart = true;
+        String s = options.get(START);
+        if (s.startsWith(LONG_PREFIX))
+          Long.valueOf(s.substring(LONG_PREFIX.length()));
+        else
+          dateParser.parse(s);
+      }
+      if (options.containsKey(END)) {
+        hasEnd = true;
+        String s = options.get(END);
+        if (s.startsWith(LONG_PREFIX))
+          Long.valueOf(s.substring(LONG_PREFIX.length()));
+        else
+          dateParser.parse(s);
+      }
+      if (!hasStart && !hasEnd)
+        return false;
       if (options.get(START_INCL) != null)
         Boolean.parseBoolean(options.get(START_INCL));
       if (options.get(END_INCL) != null)
@@ -185,8 +212,13 @@ public class TimestampFilter extends Fil
    *          boolean indicating whether the start is inclusive
    */
   public static void setStart(IteratorSetting is, String start, boolean startInclusive) {
-    is.addOption(START, start);
-    is.addOption(START_INCL, Boolean.toString(startInclusive));
+    SimpleDateFormat dateParser = initDateParser();
+    try {
+      long startTS = dateParser.parse(start).getTime();
+      setStart(is, startTS, startInclusive);
+    } catch (ParseException e) {
+      throw new IllegalArgumentException("couldn't parse " + start);
+    }
   }
   
   /**
@@ -200,8 +232,13 @@ public class TimestampFilter extends Fil
    *          boolean indicating whether the end is inclusive
    */
   public static void setEnd(IteratorSetting is, String end, boolean endInclusive) {
-    is.addOption(END, end);
-    is.addOption(END_INCL, Boolean.toString(endInclusive));
+    SimpleDateFormat dateParser = initDateParser();
+    try {
+      long endTS = dateParser.parse(end).getTime();
+      setEnd(is, endTS, endInclusive);
+    } catch (ParseException e) {
+      throw new IllegalArgumentException("couldn't parse " + end);
+    }
   }
   
   /**
@@ -248,8 +285,7 @@ public class TimestampFilter extends Fil
    *          boolean indicating whether the start is inclusive
    */
   public static void setStart(IteratorSetting is, long start, boolean startInclusive) {
-    SimpleDateFormat dateParser = initDateParser();
-    is.addOption(START, dateParser.format(new Date(start)));
+    is.addOption(START, LONG_PREFIX + Long.toString(start));
     is.addOption(START_INCL, Boolean.toString(startInclusive));
   }
   
@@ -264,8 +300,7 @@ public class TimestampFilter extends Fil
    *          boolean indicating whether the end is inclusive
    */
   public static void setEnd(IteratorSetting is, long end, boolean endInclusive) {
-    SimpleDateFormat dateParser = initDateParser();
-    is.addOption(END, dateParser.format(new Date(end)));
+    is.addOption(END, LONG_PREFIX + Long.toString(end));
     is.addOption(END_INCL, Boolean.toString(endInclusive));
   }
 }

Modified: accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/master/thrift/Compacting.java
URL: http://svn.apache.org/viewvc/accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/master/thrift/Compacting.java?rev=1433166&r1=1433165&r2=1433166&view=diff
==============================================================================
--- accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/master/thrift/Compacting.java (original)
+++ accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/master/thrift/Compacting.java Mon Jan 14 22:03:24 2013
@@ -1,3 +1,19 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
 /**
  * Autogenerated by Thrift Compiler (0.9.0)
  *

Modified: accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/master/thrift/DeadServer.java
URL: http://svn.apache.org/viewvc/accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/master/thrift/DeadServer.java?rev=1433166&r1=1433165&r2=1433166&view=diff
==============================================================================
--- accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/master/thrift/DeadServer.java (original)
+++ accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/master/thrift/DeadServer.java Mon Jan 14 22:03:24 2013
@@ -1,3 +1,19 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
 /**
  * Autogenerated by Thrift Compiler (0.9.0)
  *

Modified: accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/master/thrift/MasterClientService.java
URL: http://svn.apache.org/viewvc/accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/master/thrift/MasterClientService.java?rev=1433166&r1=1433165&r2=1433166&view=diff
==============================================================================
--- accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/master/thrift/MasterClientService.java (original)
+++ accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/master/thrift/MasterClientService.java Mon Jan 14 22:03:24 2013
@@ -1,3 +1,19 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
 /**
  * Autogenerated by Thrift Compiler (0.9.0)
  *

Modified: accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/master/thrift/MasterGoalState.java
URL: http://svn.apache.org/viewvc/accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/master/thrift/MasterGoalState.java?rev=1433166&r1=1433165&r2=1433166&view=diff
==============================================================================
--- accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/master/thrift/MasterGoalState.java (original)
+++ accumulo/branches/ACCUMULO-259/core/src/main/java/org/apache/accumulo/core/master/thrift/MasterGoalState.java Mon Jan 14 22:03:24 2013
@@ -1,3 +1,19 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
 /**
  * Autogenerated by Thrift Compiler (0.9.0)
  *



Mime
View raw message