incubator-blur-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From amccu...@apache.org
Subject [42/92] [abbrv] [partial] Fixed BLUR-126.
Date Tue, 11 Jun 2013 02:41:28 GMT
http://git-wip-us.apache.org/repos/asf/incubator-blur/blob/b0e26648/blur-core/src/test/java/org/apache/blur/manager/results/PeekableIteratorTest.java
----------------------------------------------------------------------
diff --git a/blur-core/src/test/java/org/apache/blur/manager/results/PeekableIteratorTest.java b/blur-core/src/test/java/org/apache/blur/manager/results/PeekableIteratorTest.java
new file mode 100644
index 0000000..f590ef0
--- /dev/null
+++ b/blur-core/src/test/java/org/apache/blur/manager/results/PeekableIteratorTest.java
@@ -0,0 +1,54 @@
+package org.apache.blur.manager.results;
+
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import java.util.ArrayList;
+import java.util.Arrays;
+
+import org.apache.blur.manager.results.PeekableIterator;
+import org.junit.Test;
+
+
+public class PeekableIteratorTest {
+
+  @Test
+  public void testPeekableIterator1() {
+    PeekableIterator<Integer> iterator = new PeekableIterator<Integer>(Arrays.asList(0, 1, 2, 3, 4, 5, 6, 7, 8, 9).iterator());
+    while (iterator.hasNext()) {
+      for (int i = 0; i < 3; i++) {
+        System.out.println(iterator.peek());
+      }
+      System.out.println(iterator.next());
+    }
+  }
+
+  @Test
+  public void testPeekableIteratorEmpty() {
+    PeekableIterator<Integer> iterator = new PeekableIterator<Integer>(new ArrayList<Integer>().iterator());
+    for (int i = 0; i < 3; i++) {
+      System.out.println(iterator.peek());
+    }
+    while (iterator.hasNext()) {
+      for (int i = 0; i < 3; i++) {
+        System.out.println(iterator.peek());
+      }
+      System.out.println(iterator.next());
+    }
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-blur/blob/b0e26648/blur-core/src/test/java/org/apache/blur/manager/writer/BlurIndexReaderTest.java
----------------------------------------------------------------------
diff --git a/blur-core/src/test/java/org/apache/blur/manager/writer/BlurIndexReaderTest.java b/blur-core/src/test/java/org/apache/blur/manager/writer/BlurIndexReaderTest.java
new file mode 100644
index 0000000..f225cfc
--- /dev/null
+++ b/blur-core/src/test/java/org/apache/blur/manager/writer/BlurIndexReaderTest.java
@@ -0,0 +1,144 @@
+package org.apache.blur.manager.writer;
+
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import static org.junit.Assert.assertEquals;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.Random;
+import java.util.concurrent.ExecutorService;
+
+import org.apache.blur.concurrent.Executors;
+import org.apache.blur.lucene.store.refcounter.DirectoryReferenceFileGC;
+import org.apache.blur.lucene.store.refcounter.IndexInputCloser;
+import org.apache.blur.server.ShardContext;
+import org.apache.blur.server.TableContext;
+import org.apache.blur.thrift.generated.AnalyzerDefinition;
+import org.apache.blur.thrift.generated.Column;
+import org.apache.blur.thrift.generated.Record;
+import org.apache.blur.thrift.generated.Row;
+import org.apache.blur.thrift.generated.TableDescriptor;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.lucene.search.IndexSearcher;
+import org.apache.lucene.store.FSDirectory;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+public class BlurIndexReaderTest {
+
+  private static final File TMPDIR = new File("./target/tmp");
+
+  private BlurNRTIndex writer;
+  private Random random = new Random();
+  private ExecutorService service;
+  private File base;
+  private Configuration configuration;
+
+  private DirectoryReferenceFileGC gc;
+  private IndexInputCloser closer;
+  private SharedMergeScheduler mergeScheduler;
+  private BlurIndexReader reader;
+
+  @Before
+  public void setup() throws IOException {
+    TableContext.clear();
+    base = new File(TMPDIR, "blur-index-reader-test");
+    rm(base);
+    base.mkdirs();
+
+    mergeScheduler = new SharedMergeScheduler();
+    gc = new DirectoryReferenceFileGC();
+    gc.init();
+    closer = new IndexInputCloser();
+    closer.init();
+
+    configuration = new Configuration();
+    service = Executors.newThreadPool("test", 1);
+    
+  }
+
+  private void setupWriter(Configuration configuration, long refresh) throws IOException {
+    TableDescriptor tableDescriptor = new TableDescriptor();
+    tableDescriptor.setName("test-table");
+    tableDescriptor.setTableUri(new File(base, "table-store").toURI().toString());
+    tableDescriptor.setAnalyzerDefinition(new AnalyzerDefinition());
+    tableDescriptor.putToTableProperties("blur.shard.time.between.refreshs", Long.toString(refresh));
+    tableDescriptor.putToTableProperties("blur.shard.time.between.commits", Long.toString(1000));
+    
+    TableContext tableContext = TableContext.create(tableDescriptor);
+    FSDirectory directory = FSDirectory.open(new File(base, "index"));
+
+    ShardContext shardContext = ShardContext.create(tableContext, "test-shard");
+
+    writer = new BlurNRTIndex(shardContext, mergeScheduler, closer, directory, gc, service);
+    BlurIndexRefresher refresher = new BlurIndexRefresher();
+    BlurIndexCloser indexCloser = new BlurIndexCloser();
+    refresher.init();
+    indexCloser.init();
+    reader = new BlurIndexReader(shardContext, directory, refresher, indexCloser);
+  }
+
+  @After
+  public void tearDown() throws IOException {
+    writer.close();
+    mergeScheduler.close();
+    closer.close();
+    gc.close();
+    service.shutdownNow();
+    rm(base);
+  }
+
+  private void rm(File file) {
+    if (!file.exists()) {
+      return;
+    }
+    if (file.isDirectory()) {
+      for (File f : file.listFiles()) {
+        rm(f);
+      }
+    }
+    file.delete();
+  }
+
+  @Test
+  public void testBlurIndexWriter() throws IOException, InterruptedException {
+    setupWriter(configuration, 1);
+    IndexSearcher searcher = reader.getSearcher();
+    writer.replaceRow(true, true, genRow());
+    Thread.sleep(1500);
+    assertEquals(0,searcher.getIndexReader().numDocs());
+    reader.refresh();
+    assertEquals(1,reader.getSearcher().getIndexReader().numDocs());
+  }
+  
+  private Row genRow() {
+    Row row = new Row();
+    row.setId(Long.toString(random.nextLong()));
+    Record record = new Record();
+    record.setFamily("testing");
+    record.setRecordId(Long.toString(random.nextLong()));
+    for (int i = 0; i < 10; i++) {
+      record.addToColumns(new Column("col" + i, Long.toString(random.nextLong())));
+    }
+    row.addToRecords(record);
+    return row;
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-blur/blob/b0e26648/blur-core/src/test/java/org/apache/blur/manager/writer/BlurNRTIndexTest.java
----------------------------------------------------------------------
diff --git a/blur-core/src/test/java/org/apache/blur/manager/writer/BlurNRTIndexTest.java b/blur-core/src/test/java/org/apache/blur/manager/writer/BlurNRTIndexTest.java
new file mode 100644
index 0000000..c207937
--- /dev/null
+++ b/blur-core/src/test/java/org/apache/blur/manager/writer/BlurNRTIndexTest.java
@@ -0,0 +1,185 @@
+package org.apache.blur.manager.writer;
+
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import static org.junit.Assert.assertEquals;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.Random;
+import java.util.UUID;
+import java.util.concurrent.ExecutorService;
+
+import org.apache.blur.concurrent.Executors;
+import org.apache.blur.lucene.store.refcounter.DirectoryReferenceFileGC;
+import org.apache.blur.lucene.store.refcounter.IndexInputCloser;
+import org.apache.blur.server.IndexSearcherClosable;
+import org.apache.blur.server.ShardContext;
+import org.apache.blur.server.TableContext;
+import org.apache.blur.thrift.generated.AnalyzerDefinition;
+import org.apache.blur.thrift.generated.Column;
+import org.apache.blur.thrift.generated.Record;
+import org.apache.blur.thrift.generated.Row;
+import org.apache.blur.thrift.generated.TableDescriptor;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.store.FSDirectory;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+public class BlurNRTIndexTest {
+
+  private static final int TEST_NUMBER_WAIT_VISIBLE = 500;
+  private static final int TEST_NUMBER = 50000;
+
+  private static final File TMPDIR = new File("./target/tmp");
+
+  private BlurNRTIndex writer;
+  private Random random = new Random();
+  private ExecutorService service;
+  private File base;
+  private Configuration configuration;
+
+  private DirectoryReferenceFileGC gc;
+  private IndexInputCloser closer;
+  private SharedMergeScheduler mergeScheduler;
+
+  @Before
+  public void setup() throws IOException {
+    TableContext.clear();
+    base = new File(TMPDIR, "blur-index-writer-test");
+    rm(base);
+    base.mkdirs();
+
+    mergeScheduler = new SharedMergeScheduler();
+    gc = new DirectoryReferenceFileGC();
+    gc.init();
+    closer = new IndexInputCloser();
+    closer.init();
+
+    configuration = new Configuration();
+    service = Executors.newThreadPool("test", 10);
+  }
+
+  private void setupWriter(Configuration configuration, long refresh) throws IOException {
+    TableDescriptor tableDescriptor = new TableDescriptor();
+    tableDescriptor.setName("test-table");
+    String uuid = UUID.randomUUID().toString();
+    tableDescriptor.setTableUri(new File(base, "table-store-" + uuid).toURI().toString());
+    tableDescriptor.setAnalyzerDefinition(new AnalyzerDefinition());
+    tableDescriptor.putToTableProperties("blur.shard.time.between.refreshs", Long.toString(refresh));
+
+    TableContext tableContext = TableContext.create(tableDescriptor);
+    File path = new File(base, "index_" + uuid);
+    path.mkdirs();
+    FSDirectory directory = FSDirectory.open(path);
+    ShardContext shardContext = ShardContext.create(tableContext, "test-shard-" + uuid);
+    writer = new BlurNRTIndex(shardContext, mergeScheduler, closer, directory, gc, service);
+  }
+
+  @After
+  public void tearDown() throws IOException {
+    writer.close();
+    mergeScheduler.close();
+    closer.close();
+    gc.close();
+    service.shutdownNow();
+    rm(base);
+  }
+
+  private void rm(File file) {
+    if (!file.exists()) {
+      return;
+    }
+    if (file.isDirectory()) {
+      for (File f : file.listFiles()) {
+        rm(f);
+      }
+    }
+    file.delete();
+  }
+
+  @Test
+  public void testBlurIndexWriter() throws IOException {
+    setupWriter(configuration, 5);
+    long s = System.nanoTime();
+    int total = 0;
+    for (int i = 0; i < TEST_NUMBER_WAIT_VISIBLE; i++) {
+      writer.replaceRow(true, true, genRow());
+      IndexSearcherClosable searcher = writer.getIndexReader();
+      IndexReader reader = searcher.getIndexReader();
+      assertEquals(i + 1, reader.numDocs());
+      searcher.close();
+      total++;
+    }
+    long e = System.nanoTime();
+    double seconds = (e - s) / 1000000000.0;
+    double rate = total / seconds;
+    System.out.println("Rate " + rate);
+    IndexSearcherClosable searcher = writer.getIndexReader();
+    IndexReader reader = searcher.getIndexReader();
+    assertEquals(TEST_NUMBER_WAIT_VISIBLE, reader.numDocs());
+    searcher.close();
+  }
+
+  @Test
+  public void testBlurIndexWriterFaster() throws IOException, InterruptedException {
+    setupWriter(configuration, 100);
+    IndexSearcherClosable searcher1 = writer.getIndexReader();
+    IndexReader reader1 = searcher1.getIndexReader();
+    assertEquals(0, reader1.numDocs());
+    searcher1.close();
+    long s = System.nanoTime();
+    int total = 0;
+    for (int i = 0; i < TEST_NUMBER; i++) {
+      if (i == TEST_NUMBER - 1) {
+        writer.replaceRow(true, true, genRow());
+      } else {
+        writer.replaceRow(false, true, genRow());
+      }
+      total++;
+    }
+    long e = System.nanoTime();
+    double seconds = (e - s) / 1000000000.0;
+    double rate = total / seconds;
+    System.out.println("Rate " + rate);
+    // //wait one second for the data to become visible the test is set to
+    // refresh once every 25 ms
+    // Thread.sleep(1000);
+    writer.refresh();
+    IndexSearcherClosable searcher2 = writer.getIndexReader();
+    IndexReader reader2 = searcher2.getIndexReader();
+    assertEquals(TEST_NUMBER, reader2.numDocs());
+    searcher2.close();
+  }
+
+  private Row genRow() {
+    Row row = new Row();
+    row.setId(Long.toString(random.nextLong()));
+    Record record = new Record();
+    record.setFamily("testing");
+    record.setRecordId(Long.toString(random.nextLong()));
+    for (int i = 0; i < 10; i++) {
+      record.addToColumns(new Column("col" + i, Long.toString(random.nextLong())));
+    }
+    row.addToRecords(record);
+    return row;
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-blur/blob/b0e26648/blur-core/src/test/java/org/apache/blur/manager/writer/DirectoryReferenceCounterTest.java
----------------------------------------------------------------------
diff --git a/blur-core/src/test/java/org/apache/blur/manager/writer/DirectoryReferenceCounterTest.java b/blur-core/src/test/java/org/apache/blur/manager/writer/DirectoryReferenceCounterTest.java
new file mode 100644
index 0000000..7183bed
--- /dev/null
+++ b/blur-core/src/test/java/org/apache/blur/manager/writer/DirectoryReferenceCounterTest.java
@@ -0,0 +1,344 @@
+package org.apache.blur.manager.writer;
+
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import static org.apache.blur.lucene.LuceneVersionConstant.LUCENE_VERSION;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.io.IOException;
+import java.util.Collection;
+import java.util.Map;
+import java.util.concurrent.LinkedBlockingQueue;
+
+import org.apache.blur.lucene.store.refcounter.DirectoryReferenceCounter;
+import org.apache.blur.lucene.store.refcounter.DirectoryReferenceFileGC;
+import org.apache.blur.lucene.store.refcounter.IndexInputCloser;
+import org.apache.lucene.analysis.core.KeywordAnalyzer;
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.Field;
+import org.apache.lucene.document.FieldType;
+import org.apache.lucene.index.CorruptIndexException;
+import org.apache.lucene.index.DirectoryReader;
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.index.IndexWriterConfig;
+import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.IOContext;
+import org.apache.lucene.store.IndexInput;
+import org.apache.lucene.store.IndexOutput;
+import org.apache.lucene.store.Lock;
+import org.apache.lucene.store.LockFactory;
+import org.apache.lucene.store.LockObtainFailedException;
+import org.apache.lucene.store.RAMDirectory;
+import org.junit.Test;
+
+public class DirectoryReferenceCounterTest {
+
+  @Test
+  public void testDirectoryReferenceCounterTestError() throws CorruptIndexException, IOException {
+    Directory directory = wrap(new RAMDirectory());
+    IndexWriterConfig conf = new IndexWriterConfig(LUCENE_VERSION, new KeywordAnalyzer());
+    IndexWriter writer = new IndexWriter(directory, conf);
+    int size = 100;
+    IndexReader[] readers = new IndexReader[size];
+    for (int i = 0; i < size; i++) {
+      writer.addDocument(getDoc());
+      readers[i] = DirectoryReader.open(writer, true);
+      writer.forceMerge(1);
+    }
+
+    try {
+      for (int i = 0; i < size; i++) {
+        checkReader(readers[i], i);
+      }
+      fail();
+    } catch (Exception e) {
+      // should error
+    }
+  }
+
+  @Test
+  public void testDirectoryReferenceCounter() throws CorruptIndexException, LockObtainFailedException, IOException, InterruptedException {
+    Directory directory = wrap(new RAMDirectory());
+    DirectoryReferenceFileGC gc = new DirectoryReferenceFileGC();
+    gc.init();
+    IndexInputCloser closer = new IndexInputCloser();
+    closer.init();
+    DirectoryReferenceCounter counter = new DirectoryReferenceCounter(directory, gc, closer);
+    IndexWriterConfig conf = new IndexWriterConfig(LUCENE_VERSION, new KeywordAnalyzer());
+    IndexWriter writer = new IndexWriter(counter, conf);
+    int size = 100;
+    IndexReader[] readers = new IndexReader[size];
+    for (int i = 0; i < size; i++) {
+      writer.addDocument(getDoc());
+      writer.forceMerge(1);
+      readers[i] = DirectoryReader.open(writer, true);
+    }
+
+    for (int i = 0; i < size; i++) {
+      assertEquals(i + 1, readers[i].numDocs());
+      checkReader(readers[i], i);
+    }
+
+    String[] listAll = directory.listAll();
+
+    for (int i = 0; i < size - 1; i++) {
+      readers[i].close();
+    }
+
+    for (int i = 0; i < 1000; i++) {
+      gc.run();
+      Thread.sleep(1);
+    }
+
+    IndexReader last = readers[size - 1];
+
+    assertEquals(100, last.numDocs());
+
+    assertTrue(listAll.length > directory.listAll().length);
+
+    last.close();
+    writer.close();
+    gc.close();
+  }
+
+  private Document getDoc() {
+    Document document = new Document();
+    FieldType type = new FieldType();
+    type.setIndexed(true);
+    type.setOmitNorms(true);
+    type.setTokenized(false);
+    type.setStored(true);
+    document.add(new Field("id", "value", type));
+    return document;
+  }
+
+  private void checkReader(IndexReader indexReader, int size) throws CorruptIndexException, IOException {
+    for (int i = 0; i < size; i++) {
+      Document document = indexReader.document(i);
+      String value = document.get("id");
+      assertEquals("value", value);
+    }
+  }
+
+  // This class is use simulate what would happen with a directory that will
+  // forcefully delete files even if they are still in use. e.g. HDFSDirectory
+  public static Directory wrap(final RAMDirectory ramDirectory) {
+    return new Directory() {
+      private Directory d = ramDirectory;
+      private Collection<String> deletedFiles = new LinkedBlockingQueue<String>();
+
+      @Override
+      public void deleteFile(String name) throws IOException {
+        deletedFiles.add(name);
+        d.deleteFile(name);
+      }
+
+      @Override
+      public IndexOutput createOutput(String name, IOContext context) throws IOException {
+        return d.createOutput(name, context);
+      }
+
+      @Override
+      public void sync(Collection<String> names) throws IOException {
+        d.sync(names);
+      }
+
+      @Override
+      public IndexInput openInput(String name, IOContext context) throws IOException {
+        return wrap(d.openInput(name, context), deletedFiles, name);
+      }
+
+      @Override
+      public void clearLock(String name) throws IOException {
+        d.clearLock(name);
+      }
+
+      @Override
+      public void close() throws IOException {
+        d.close();
+      }
+
+      @Override
+      public void setLockFactory(LockFactory lockFactory) throws IOException {
+        d.setLockFactory(lockFactory);
+      }
+
+      @Override
+      public String getLockID() {
+        return d.getLockID();
+      }
+
+      @Override
+      public boolean equals(Object arg0) {
+        return d.equals(arg0);
+      }
+
+      @Override
+      public boolean fileExists(String name) throws IOException {
+        return d.fileExists(name);
+      }
+
+      @Override
+      public long fileLength(String name) throws IOException {
+        return d.fileLength(name);
+      }
+
+      @Override
+      public LockFactory getLockFactory() {
+        return d.getLockFactory();
+      }
+
+      @Override
+      public int hashCode() {
+        return d.hashCode();
+      }
+
+      @Override
+      public String[] listAll() throws IOException {
+        return d.listAll();
+      }
+
+      @Override
+      public Lock makeLock(String name) {
+        return d.makeLock(name);
+      }
+
+      @Override
+      public String toString() {
+        return d.toString();
+      }
+    };
+  }
+
+  public static IndexInput wrap(final IndexInput input, final Collection<String> deletedFiles, final String name) {
+    return new IndexInput(input.toString()) {
+      private IndexInput in = input;
+
+      private void checkForDeleted() throws IOException {
+        if (deletedFiles.contains(name)) {
+          throw new IOException("File [" + name + "] does not exist");
+        }
+      }
+
+      @Override
+      public void close() throws IOException {
+        checkForDeleted();
+        in.close();
+      }
+
+      @Override
+      public short readShort() throws IOException {
+        checkForDeleted();
+        return in.readShort();
+      }
+
+      @Override
+      public void seek(long pos) throws IOException {
+        checkForDeleted();
+        in.seek(pos);
+      }
+
+      @Override
+      public int readInt() throws IOException {
+        checkForDeleted();
+        return in.readInt();
+      }
+
+      @Override
+      public int readVInt() throws IOException {
+        checkForDeleted();
+        return in.readVInt();
+      }
+
+      @Override
+      public String toString() {
+        return in.toString();
+      }
+
+      @Override
+      public long readLong() throws IOException {
+        checkForDeleted();
+        return in.readLong();
+      }
+
+      @Override
+      public long readVLong() throws IOException {
+        checkForDeleted();
+        return in.readVLong();
+      }
+
+      @Override
+      public String readString() throws IOException {
+        checkForDeleted();
+        return in.readString();
+      }
+
+      @Override
+      public IndexInput clone() {
+        return super.clone();
+      }
+
+      @Override
+      public boolean equals(Object obj) {
+        return in.equals(obj);
+      }
+
+      @Override
+      public long getFilePointer() {
+        return in.getFilePointer();
+      }
+
+      @Override
+      public int hashCode() {
+        return in.hashCode();
+      }
+
+      @Override
+      public byte readByte() throws IOException {
+        checkForDeleted();
+        return in.readByte();
+      }
+
+      @Override
+      public void readBytes(byte[] b, int offset, int len) throws IOException {
+        checkForDeleted();
+        in.readBytes(b, offset, len);
+      }
+
+      @Override
+      public void readBytes(byte[] b, int offset, int len, boolean useBuffer) throws IOException {
+        checkForDeleted();
+        in.readBytes(b, offset, len, useBuffer);
+      }
+
+      @Override
+      public long length() {
+        return in.length();
+      }
+
+      @Override
+      public Map<String, String> readStringStringMap() throws IOException {
+        checkForDeleted();
+        return in.readStringStringMap();
+      }
+    };
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-blur/blob/b0e26648/blur-core/src/test/java/org/apache/blur/manager/writer/IndexImporterTest.java
----------------------------------------------------------------------
diff --git a/blur-core/src/test/java/org/apache/blur/manager/writer/IndexImporterTest.java b/blur-core/src/test/java/org/apache/blur/manager/writer/IndexImporterTest.java
new file mode 100644
index 0000000..5d3466e
--- /dev/null
+++ b/blur-core/src/test/java/org/apache/blur/manager/writer/IndexImporterTest.java
@@ -0,0 +1,160 @@
+package org.apache.blur.manager.writer;
+
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import static org.apache.blur.lucene.LuceneVersionConstant.LUCENE_VERSION;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+
+import java.io.IOException;
+import java.util.Random;
+import java.util.UUID;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.locks.ReentrantReadWriteLock;
+
+import org.apache.blur.analysis.BlurAnalyzer;
+import org.apache.blur.server.ShardContext;
+import org.apache.blur.server.TableContext;
+import org.apache.blur.store.buffer.BufferStore;
+import org.apache.blur.store.hdfs.HdfsDirectory;
+import org.apache.blur.thrift.generated.AnalyzerDefinition;
+import org.apache.blur.thrift.generated.Column;
+import org.apache.blur.thrift.generated.Record;
+import org.apache.blur.thrift.generated.TableDescriptor;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.lucene.document.Document;
+import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.index.IndexWriterConfig;
+import org.apache.lucene.search.NRTManager.TrackingIndexWriter;
+import org.apache.lucene.store.Directory;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+public class IndexImporterTest {
+
+  private static final Path TMPDIR = new Path("target/tmp");
+
+  private Path base;
+  private Configuration configuration;
+  private IndexWriter commitWriter;
+  private IndexImporter indexImporter;
+  private Random random = new Random();
+  private Path path;
+  private Path badRowIdsPath;
+  private IndexWriter mainWriter;
+  private FileSystem fileSystem;
+
+  @Before
+  public void setup() throws IOException {
+    TableContext.clear();
+    configuration = new Configuration();
+    base = new Path(TMPDIR, "blur-index-importer-test");
+    fileSystem = base.getFileSystem(configuration);
+    fileSystem.delete(base, true);
+    fileSystem.mkdirs(base);
+    setupWriter(configuration);
+  }
+
+  private void setupWriter(Configuration configuration) throws IOException {
+    TableDescriptor tableDescriptor = new TableDescriptor();
+    tableDescriptor.setName("test-table");
+    String uuid = UUID.randomUUID().toString();
+    
+    tableDescriptor.setTableUri(new Path(base, "table-table").toUri().toString());
+    tableDescriptor.setAnalyzerDefinition(new AnalyzerDefinition());
+    tableDescriptor.setShardCount(2);
+    
+    TableContext tableContext = TableContext.create(tableDescriptor);
+    ShardContext shardContext = ShardContext.create(tableContext, "shard-00000000");
+    Path tablePath = new Path(base, "table-table");
+    Path shardPath = new Path(tablePath, "shard-00000000");
+    String indexDirName = "index_" + uuid;
+    path = new Path(shardPath, indexDirName + ".commit");
+    fileSystem.mkdirs(path);
+    badRowIdsPath = new Path(shardPath, indexDirName + ".bad_rowids");
+    Directory commitDirectory = new HdfsDirectory(configuration, path);
+    Directory mainDirectory = new HdfsDirectory(configuration, shardPath);
+    IndexWriterConfig conf = new IndexWriterConfig(LUCENE_VERSION, tableContext.getAnalyzer());
+    commitWriter = new IndexWriter(commitDirectory, conf);
+    
+    mainWriter = new IndexWriter(mainDirectory, conf);
+    BufferStore.init(128, 128);
+    
+    indexImporter = new IndexImporter(new TrackingIndexWriter(mainWriter), new ReentrantReadWriteLock(), shardContext,
+        TimeUnit.MINUTES, 10);
+  }
+
+  @After
+  public void tearDown() throws IOException {
+    mainWriter.close();
+    indexImporter.close();
+    base.getFileSystem(configuration).delete(base, true);
+  }
+
+
+  @Test
+  public void testIndexImporterWithCorrectRowIdShardCombination() throws IOException {
+    
+    Document document = TransactionRecorder.convert("1", genRecord("1"), new StringBuilder(), new BlurAnalyzer());
+    commitWriter.addDocument(document);
+    commitWriter.commit();
+    commitWriter.close();
+    indexImporter.run();
+    assertFalse(fileSystem.exists(path));
+    assertFalse(fileSystem.exists(badRowIdsPath));
+  }
+
+//  private void debug(Path file) throws IOException {
+//    if (!fileSystem.exists(file)) {
+//      return;
+//    }
+//    System.out.println(file);
+//    if (!fileSystem.isFile(file)) {
+//      FileStatus[] listStatus = fileSystem.listStatus(file);
+//      for (FileStatus f : listStatus) {
+//        debug(f.getPath());
+//      }
+//    }
+//  }
+
+  @Test
+  public void testIndexImporterWithWrongRowIdShardCombination() throws IOException {
+    setupWriter(configuration);
+    Document document = TransactionRecorder.convert("2", genRecord("1"), new StringBuilder(), new BlurAnalyzer());
+    commitWriter.addDocument(document);
+    commitWriter.commit();
+    commitWriter.close();
+    indexImporter.run();
+    assertFalse(fileSystem.exists(path));
+    assertTrue(fileSystem.exists(badRowIdsPath));
+  }
+
+  private Record genRecord(String recordId) {
+    Record record = new Record();
+    record.setFamily("testing");
+    record.setRecordId(recordId);
+    for (int i = 0; i < 10; i++) {
+      record.addToColumns(new Column("col" + i, Long.toString(random.nextLong())));
+    }
+    return record;
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-blur/blob/b0e26648/blur-core/src/test/java/org/apache/blur/manager/writer/TransactionRecorderTest.java
----------------------------------------------------------------------
diff --git a/blur-core/src/test/java/org/apache/blur/manager/writer/TransactionRecorderTest.java b/blur-core/src/test/java/org/apache/blur/manager/writer/TransactionRecorderTest.java
new file mode 100644
index 0000000..b3eb93e
--- /dev/null
+++ b/blur-core/src/test/java/org/apache/blur/manager/writer/TransactionRecorderTest.java
@@ -0,0 +1,184 @@
+package org.apache.blur.manager.writer;
+
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import static org.apache.blur.lucene.LuceneVersionConstant.LUCENE_VERSION;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.fail;
+
+import java.io.Closeable;
+import java.io.IOException;
+import java.net.URI;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.HashSet;
+import java.util.concurrent.TimeUnit;
+
+import org.apache.blur.MiniCluster;
+import org.apache.blur.analysis.BlurAnalyzer;
+import org.apache.blur.index.IndexWriter;
+import org.apache.blur.log.Log;
+import org.apache.blur.log.LogFactory;
+import org.apache.blur.server.ShardContext;
+import org.apache.blur.server.TableContext;
+import org.apache.blur.thrift.generated.AnalyzerDefinition;
+import org.apache.blur.thrift.generated.Column;
+import org.apache.blur.thrift.generated.Record;
+import org.apache.blur.thrift.generated.Row;
+import org.apache.blur.thrift.generated.TableDescriptor;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.IOUtils;
+import org.apache.lucene.analysis.core.KeywordAnalyzer;
+import org.apache.lucene.index.DirectoryReader;
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.index.IndexWriterConfig;
+import org.apache.lucene.store.RAMDirectory;
+import org.junit.After;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+public class TransactionRecorderTest {
+
+  private static final Log LOG = LogFactory.getLog(TransactionRecorderTest.class);
+
+  @BeforeClass
+  public static void setup() {
+    MiniCluster.startDfs("target/transaction-recorder-test");
+  }
+
+  @AfterClass
+  public static void teardown() throws IOException {
+    MiniCluster.shutdownDfs();
+  }
+
+  private Collection<Closeable> closeThis = new HashSet<Closeable>();
+
+  @After
+  public void after() {
+    for (Closeable closeable : closeThis) {
+      IOUtils.cleanup(LOG, closeable);
+    }
+  }
+
+  @Test
+  public void testReplaySimpleTest() throws IOException, InterruptedException {
+    TableContext.clear();
+    Configuration configuration = new Configuration(false);
+    URI fileSystemUri = MiniCluster.getFileSystemUri();
+    Path path = new Path(fileSystemUri.toString() + "/transaction-recorder-test");
+    FileSystem fileSystem = path.getFileSystem(configuration);
+    fileSystem.delete(path, true);
+
+    KeywordAnalyzer analyzer = new KeywordAnalyzer();
+
+    TableDescriptor tableDescriptor = new TableDescriptor();
+    tableDescriptor.setName("table");
+    String tableUri = new Path(path, "tableuri").toUri().toString();
+
+    System.out.println("tableUri=" + tableUri);
+    tableDescriptor.setTableUri(tableUri);
+    tableDescriptor.setAnalyzerDefinition(new AnalyzerDefinition());
+
+    TableContext tableContext = TableContext.create(tableDescriptor);
+    ShardContext shardContext = ShardContext.create(tableContext, "shard-1");
+    TransactionRecorder transactionRecorder = new TransactionRecorder(shardContext);
+    closeThis.add(transactionRecorder);
+    transactionRecorder.open();
+
+    try {
+      transactionRecorder.replaceRow(true, genRow(), null);
+      fail("Should NPE");
+    } catch (NullPointerException e) {
+    }
+
+    Thread.sleep(TimeUnit.SECONDS.toMillis(2));
+
+    RAMDirectory directory = new RAMDirectory();
+    IndexWriterConfig conf = new IndexWriterConfig(LUCENE_VERSION, analyzer);
+    IndexWriter writer = new IndexWriter(directory, conf);
+
+    TransactionRecorder replayTransactionRecorder = new TransactionRecorder(shardContext);
+    closeThis.add(replayTransactionRecorder);
+    System.out.println("REPLAY");
+    replayTransactionRecorder.replay(writer);
+    System.out.println("REPLAY COMPLETE");
+    IndexReader reader = DirectoryReader.open(directory);
+    System.out.println("assert");
+    assertEquals(1, reader.numDocs());
+  }
+  
+  @Test
+  public void testConvertShouldPass(){
+    String rowId = "RowId_123-1";
+    Record record = new Record();
+    record.setRecordId("RecordId_123-1");
+    record.setFamily("Family_123-1");
+    
+    Column column = new Column();
+    column.setName("columnName_123-1");
+    record.setColumns(Arrays.asList(column));
+    
+    TransactionRecorder.convert(rowId, record, new StringBuilder(), new BlurAnalyzer());
+    assert(true);
+  }
+  
+  @Test(expected=IllegalArgumentException.class)
+  public void testConvertWithBadFamilyNameShouldFail(){
+    String rowId = "RowId_123-1";
+    Record record = new Record();
+    record.setRecordId("RecordId_123-1");
+    record.setFamily("Family_123.1");
+    
+    Column column = new Column();
+    column.setName("columnName_123-1");
+    record.setColumns(Arrays.asList(column));
+    
+    TransactionRecorder.convert(rowId, record, new StringBuilder(), new BlurAnalyzer());
+    fail();
+  }
+  
+  @Test(expected=IllegalArgumentException.class)
+  public void testConvertWithBadColumnNameShouldFail(){
+    String rowId = "RowId_123-1";
+    Record record = new Record();
+    record.setRecordId("RecordId_123-1");
+    record.setFamily("Family_123-1");
+    
+    Column column = new Column();
+    column.setName("columnName_123.1");
+    record.setColumns(Arrays.asList(column));
+    
+    TransactionRecorder.convert(rowId, record, new StringBuilder(), new BlurAnalyzer());
+    fail();
+  }
+
+  private Row genRow() {
+    Row row = new Row();
+    row.id = "1";
+    Record record = new Record();
+    record.recordId = "1";
+    record.family = "test";
+    record.addToColumns(new Column("name", "value"));
+    row.addToRecords(record);
+    return row;
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-blur/blob/b0e26648/blur-core/src/test/java/org/apache/blur/thrift/BlurClusterTest.java
----------------------------------------------------------------------
diff --git a/blur-core/src/test/java/org/apache/blur/thrift/BlurClusterTest.java b/blur-core/src/test/java/org/apache/blur/thrift/BlurClusterTest.java
new file mode 100644
index 0000000..38475a8
--- /dev/null
+++ b/blur-core/src/test/java/org/apache/blur/thrift/BlurClusterTest.java
@@ -0,0 +1,206 @@
+package org.apache.blur.thrift;
+
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNull;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+import java.util.UUID;
+import java.util.concurrent.TimeUnit;
+
+import org.apache.blur.MiniCluster;
+import org.apache.blur.thirdparty.thrift_0_9_0.TException;
+import org.apache.blur.thrift.generated.Blur;
+import org.apache.blur.thrift.generated.Blur.Iface;
+import org.apache.blur.thrift.generated.BlurException;
+import org.apache.blur.thrift.generated.BlurQuery;
+import org.apache.blur.thrift.generated.BlurResult;
+import org.apache.blur.thrift.generated.BlurResults;
+import org.apache.blur.thrift.generated.RecordMutation;
+import org.apache.blur.thrift.generated.RowMutation;
+import org.apache.blur.thrift.generated.SimpleQuery;
+import org.apache.blur.thrift.generated.TableDescriptor;
+import org.apache.blur.thrift.util.BlurThriftHelper;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.LocalFileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.permission.FsAction;
+import org.apache.hadoop.fs.permission.FsPermission;
+import org.apache.zookeeper.KeeperException;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+public class BlurClusterTest {
+
+  private static final File TMPDIR = new File(System.getProperty("blur.tmp.dir", "/tmp"));
+
+  @BeforeClass
+  public static void startCluster() throws IOException {
+    LocalFileSystem localFS = FileSystem.getLocal(new Configuration());
+    File testDirectory = new File(TMPDIR, "blur-cluster-test");
+    testDirectory.mkdirs();
+
+    Path directory = new Path(testDirectory.getPath());
+    FsPermission dirPermissions = localFS.getFileStatus(directory).getPermission();
+    FsAction userAction = dirPermissions.getUserAction();
+    FsAction groupAction = dirPermissions.getGroupAction();
+    FsAction otherAction = dirPermissions.getOtherAction();
+
+    StringBuilder builder = new StringBuilder();
+    builder.append(userAction.ordinal());
+    builder.append(groupAction.ordinal());
+    builder.append(otherAction.ordinal());
+    String dirPermissionNum = builder.toString();
+    System.setProperty("dfs.datanode.data.dir.perm", dirPermissionNum);
+    testDirectory.delete();
+
+    MiniCluster.startBlurCluster("target/cluster", 2, 3);
+  }
+
+  @AfterClass
+  public static void shutdownCluster() {
+    MiniCluster.shutdownBlurCluster();
+  }
+
+  private Iface getClient() {
+    return BlurClient.getClient(MiniCluster.getControllerConnectionStr());
+  }
+
+  @Test
+  public void testCreateTable() throws BlurException, TException, IOException {
+    Blur.Iface client = getClient();
+    TableDescriptor tableDescriptor = new TableDescriptor();
+    tableDescriptor.setName("test");
+    tableDescriptor.setShardCount(5);
+    tableDescriptor.setTableUri(MiniCluster.getFileSystemUri().toString() + "/blur/test");
+    client.createTable(tableDescriptor);
+    List<String> tableList = client.tableList();
+    assertEquals(Arrays.asList("test"), tableList);
+  }
+
+  @Test
+  public void testLoadTable() throws BlurException, TException, InterruptedException {
+    Iface client = getClient();
+    int length = 100;
+    List<RowMutation> mutations = new ArrayList<RowMutation>();
+    for (int i = 0; i < length; i++) {
+      String rowId = UUID.randomUUID().toString();
+      RecordMutation mutation = BlurThriftHelper.newRecordMutation("test", rowId,
+          BlurThriftHelper.newColumn("test", "value"));
+      RowMutation rowMutation = BlurThriftHelper.newRowMutation("test", rowId, mutation);
+      rowMutation.setWaitToBeVisible(true);
+      mutations.add(rowMutation);
+    }
+    long s = System.nanoTime();
+    client.mutateBatch(mutations);
+    long e = System.nanoTime();
+    System.out.println("mutateBatch took [" + (e - s) / 1000000.0 + "]");
+    BlurQuery blurQueryRow = new BlurQuery();
+    SimpleQuery simpleQueryRow = new SimpleQuery();
+    simpleQueryRow.setQueryStr("test.test:value");
+    blurQueryRow.setSimpleQuery(simpleQueryRow);
+    BlurResults resultsRow = client.query("test", blurQueryRow);
+    assertRowResults(resultsRow);
+    assertEquals(length, resultsRow.getTotalResults());
+
+    BlurQuery blurQueryRecord = new BlurQuery();
+    SimpleQuery simpleQueryRecord = new SimpleQuery();
+    simpleQueryRecord.superQueryOn = false;
+    simpleQueryRecord.setQueryStr("test.test:value");
+    blurQueryRecord.setSimpleQuery(simpleQueryRecord);
+    BlurResults resultsRecord = client.query("test", blurQueryRecord);
+    assertRecordResults(resultsRecord);
+    assertEquals(length, resultsRecord.getTotalResults());
+  }
+
+  @Test
+  public void testTestShardFailover() throws BlurException, TException, InterruptedException, IOException,
+      KeeperException {
+    Iface client = getClient();
+    int length = 100;
+    BlurQuery blurQuery = new BlurQuery();
+    blurQuery.setUseCacheIfPresent(false);
+    SimpleQuery simpleQuery = new SimpleQuery();
+    simpleQuery.setQueryStr("test.test:value");
+    blurQuery.setSimpleQuery(simpleQuery);
+    BlurResults results1 = client.query("test", blurQuery);
+    assertEquals(length, results1.getTotalResults());
+    assertRowResults(results1);
+
+    MiniCluster.killShardServer(1);
+
+    // make sure the WAL syncs
+    Thread.sleep(TimeUnit.SECONDS.toMillis(1));
+
+    // This should block until shards have failed over
+    client.shardServerLayout("test");
+
+    assertEquals(length, client.query("test", blurQuery).getTotalResults());
+
+  }
+
+  private void assertRowResults(BlurResults results) {
+    for (BlurResult result : results.getResults()) {
+      assertNull(result.locationId);
+      assertNull(result.fetchResult.recordResult);
+      assertNull(result.fetchResult.rowResult.row.records);
+      assertNotNull(result.fetchResult.rowResult.row.id);
+    }
+  }
+
+  private void assertRecordResults(BlurResults results) {
+    for (BlurResult result : results.getResults()) {
+      assertNull(result.locationId);
+      assertNotNull(result.fetchResult.recordResult);
+      assertNotNull(result.fetchResult.recordResult.rowid);
+      assertNotNull(result.fetchResult.recordResult.record.recordId);
+      assertNotNull(result.fetchResult.recordResult.record.family);
+      assertNull("Not null [" + result.fetchResult.recordResult.record.columns + "]",
+          result.fetchResult.recordResult.record.columns);
+      assertNull(result.fetchResult.rowResult);
+    }
+  }
+
+  @Test
+  public void testCreateDisableAndRemoveTable() throws IOException, BlurException, TException {
+    Iface client = getClient();
+    String tableName = UUID.randomUUID().toString();
+    TableDescriptor tableDescriptor = new TableDescriptor();
+    tableDescriptor.setName(tableName);
+    tableDescriptor.setShardCount(5);
+    tableDescriptor.setTableUri(MiniCluster.getFileSystemUri().toString() + "/blur/" + tableName);
+
+    for (int i = 0; i < 3; i++) {
+      client.createTable(tableDescriptor);
+      client.disableTable(tableName);
+      client.removeTable(tableName, true);
+    }
+
+    assertFalse(client.tableList().contains(tableName));
+
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-blur/blob/b0e26648/blur-core/src/test/java/org/apache/blur/utils/BlurUtilsTest.java
----------------------------------------------------------------------
diff --git a/blur-core/src/test/java/org/apache/blur/utils/BlurUtilsTest.java b/blur-core/src/test/java/org/apache/blur/utils/BlurUtilsTest.java
new file mode 100644
index 0000000..4923862
--- /dev/null
+++ b/blur-core/src/test/java/org/apache/blur/utils/BlurUtilsTest.java
@@ -0,0 +1,219 @@
+package org.apache.blur.utils;
+
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import static org.apache.blur.lucene.LuceneVersionConstant.LUCENE_VERSION;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.HashSet;
+import java.util.List;
+import java.util.concurrent.TimeUnit;
+
+import org.apache.blur.thrift.generated.Selector;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.lucene.analysis.core.KeywordAnalyzer;
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.Field;
+import org.apache.lucene.document.Field.Index;
+import org.apache.lucene.document.Field.Store;
+import org.apache.lucene.index.CorruptIndexException;
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.index.IndexWriterConfig;
+import org.apache.lucene.index.Term;
+import org.apache.lucene.store.LockObtainFailedException;
+import org.apache.lucene.store.RAMDirectory;
+import org.junit.Test;
+
+public class BlurUtilsTest {
+  private static final File TMPDIR = new File(System.getProperty("blur.tmp.dir", "/tmp"));
+
+  @Test
+  public void testHumanizeTime1() {
+    long time = TimeUnit.HOURS.toMillis(2) + TimeUnit.MINUTES.toMillis(42) + TimeUnit.SECONDS.toMillis(37) + TimeUnit.MILLISECONDS.toMillis(124);
+    String humanizeTime = BlurUtil.humanizeTime(time, TimeUnit.MILLISECONDS);
+    assertEquals("2 hours 42 minutes 37 seconds", humanizeTime);
+  }
+
+  @Test
+  public void testHumanizeTime2() {
+    long time = TimeUnit.HOURS.toMillis(0) + TimeUnit.MINUTES.toMillis(42) + TimeUnit.SECONDS.toMillis(37) + TimeUnit.MILLISECONDS.toMillis(124);
+    String humanizeTime = BlurUtil.humanizeTime(time, TimeUnit.MILLISECONDS);
+    assertEquals("42 minutes 37 seconds", humanizeTime);
+  }
+
+  @Test
+  public void testHumanizeTime3() {
+    long time = TimeUnit.HOURS.toMillis(2) + TimeUnit.MINUTES.toMillis(0) + TimeUnit.SECONDS.toMillis(37) + TimeUnit.MILLISECONDS.toMillis(124);
+    String humanizeTime = BlurUtil.humanizeTime(time, TimeUnit.MILLISECONDS);
+    assertEquals("2 hours 0 minutes 37 seconds", humanizeTime);
+  }
+
+  @Test
+  public void testHumanizeTime4() {
+    long time = TimeUnit.HOURS.toMillis(2) + TimeUnit.MINUTES.toMillis(0) + TimeUnit.SECONDS.toMillis(0) + TimeUnit.MILLISECONDS.toMillis(124);
+    String humanizeTime = BlurUtil.humanizeTime(time, TimeUnit.MILLISECONDS);
+    assertEquals("2 hours 0 minutes 0 seconds", humanizeTime);
+  }
+
+  @Test
+  public void testHumanizeTime5() {
+    long time = TimeUnit.HOURS.toMillis(0) + TimeUnit.MINUTES.toMillis(0) + TimeUnit.SECONDS.toMillis(37) + TimeUnit.MILLISECONDS.toMillis(124);
+    String humanizeTime = BlurUtil.humanizeTime(time, TimeUnit.MILLISECONDS);
+    assertEquals("37 seconds", humanizeTime);
+  }
+
+  @Test
+  public void testHumanizeTime6() {
+    long time = TimeUnit.HOURS.toMillis(0) + TimeUnit.MINUTES.toMillis(0) + TimeUnit.SECONDS.toMillis(0) + TimeUnit.MILLISECONDS.toMillis(124);
+    String humanizeTime = BlurUtil.humanizeTime(time, TimeUnit.MILLISECONDS);
+    assertEquals("0 seconds", humanizeTime);
+  }
+
+  @Test
+  public void testMemoryUsage() throws CorruptIndexException, LockObtainFailedException, IOException {
+    IndexReader reader = getReader();
+    long memoryUsage = BlurUtil.getMemoryUsage(reader);
+    assertTrue(memoryUsage > 0);
+  }
+
+  @Test
+  public void testValidateShardCount() throws IOException {
+    File file = new File(TMPDIR, "ValidateShardCount-test");
+    rm(file);
+    Path path = new Path(file.toURI());
+    Configuration conf = new Configuration();
+    FileSystem fileSystem = path.getFileSystem(conf);
+    fileSystem.mkdirs(path);
+    int shardCount = 10;
+    createShardDirs(shardCount, fileSystem, path);
+    BlurUtil.validateShardCount(shardCount, fileSystem, path);
+  }
+
+  @Test
+  public void testValidateShardCountExtraDir() throws IOException {
+    File file = new File(TMPDIR, "ValidateShardCount-test");
+    rm(file);
+    Path path = new Path(file.toURI());
+    Configuration conf = new Configuration();
+    FileSystem fileSystem = path.getFileSystem(conf);
+    fileSystem.mkdirs(path);
+    int shardCount = 10;
+    createShardDirs(shardCount, fileSystem, path);
+    fileSystem.mkdirs(new Path(path, "logs"));
+    BlurUtil.validateShardCount(shardCount, fileSystem, path);
+  }
+
+  @Test
+  public void testValidateShardCountTooFew() throws IOException {
+    File file = new File(TMPDIR, "ValidateShardCount-test");
+    rm(file);
+    Path path = new Path(file.toURI());
+    Configuration conf = new Configuration();
+    FileSystem fileSystem = path.getFileSystem(conf);
+    fileSystem.mkdirs(path);
+    int shardCount = 10;
+    createShardDirs(shardCount - 1, fileSystem, path);
+    try {
+      BlurUtil.validateShardCount(shardCount, fileSystem, path);
+      fail();
+    } catch (Exception e) {
+      // Should throw exception
+    }
+  }
+  
+  @Test
+  public void testValidateShardCountTooMany() throws IOException {
+    File file = new File(TMPDIR, "ValidateShardCount-test");
+    rm(file);
+    Path path = new Path(file.toURI());
+    Configuration conf = new Configuration();
+    FileSystem fileSystem = path.getFileSystem(conf);
+    fileSystem.mkdirs(path);
+    int shardCount = 10;
+    createShardDirs(shardCount + 1, fileSystem, path);
+    try {
+      BlurUtil.validateShardCount(shardCount, fileSystem, path);
+      fail();
+    } catch (Exception e) {
+      // Should throw exception
+    }
+  }
+  
+  @Test
+  public void testFetchDocuments() throws CorruptIndexException, LockObtainFailedException, IOException{
+	  Selector selector = new Selector();
+	  HashSet<String> columnFamiliesToFetch = new HashSet<String>();
+	  columnFamiliesToFetch.add("f1");
+	  columnFamiliesToFetch.add("f2");
+	  selector.setColumnFamiliesToFetch(columnFamiliesToFetch);
+	  
+	  ResetableDocumentStoredFieldVisitor resetableDocumentStoredFieldVisitor = new ResetableDocumentStoredFieldVisitor();
+	  List<Document> docs = BlurUtil.fetchDocuments(getReader(), new Term("a","b"), resetableDocumentStoredFieldVisitor, selector);
+	  assertEquals(docs.size(),1);
+  }
+  
+  @Test
+  public void testFetchDocumentsWithoutFamily() throws CorruptIndexException, LockObtainFailedException, IOException{
+	  Selector selector = new Selector();
+	  ResetableDocumentStoredFieldVisitor resetableDocumentStoredFieldVisitor = new ResetableDocumentStoredFieldVisitor();
+	  List<Document> docs = BlurUtil.fetchDocuments(getReader(), new Term("a","b"), resetableDocumentStoredFieldVisitor, selector);
+	  assertEquals(docs.size(),2);
+  }
+
+  private void rm(File file) {
+    if (!file.exists()) {
+      return;
+    }
+    if (file.isDirectory()) {
+      for (File f : file.listFiles()) {
+        rm(f);
+      }
+    }
+    file.delete();
+  }
+
+  private void createShardDirs(int shardCount, FileSystem fileSystem, Path path) throws IOException {
+    for (int i = 0; i < shardCount; i++) {
+      fileSystem.mkdirs(new Path(path, BlurUtil.getShardName(BlurConstants.SHARD_PREFIX, i)));
+    }
+  }
+
+  private IndexReader getReader() throws CorruptIndexException, LockObtainFailedException, IOException {
+    RAMDirectory directory = new RAMDirectory();
+    IndexWriterConfig conf = new IndexWriterConfig(LUCENE_VERSION, new KeywordAnalyzer());
+    IndexWriter writer = new IndexWriter(directory, conf);
+    Document doc = new Document();
+    doc.add(new Field("a", "b", Store.YES, Index.NOT_ANALYZED_NO_NORMS));
+    doc.add(new Field("family", "f1", Store.YES, Index.NOT_ANALYZED_NO_NORMS));
+    
+    Document doc1 = new Document();
+    doc1.add(new Field("a", "b", Store.YES, Index.NOT_ANALYZED_NO_NORMS));
+    writer.addDocument(doc);
+    writer.addDocument(doc1);
+    writer.close();
+    return IndexReader.open(directory);
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-blur/blob/b0e26648/blur-core/src/test/java/org/apache/blur/utils/TableShardCountCollapserTest.java
----------------------------------------------------------------------
diff --git a/blur-core/src/test/java/org/apache/blur/utils/TableShardCountCollapserTest.java b/blur-core/src/test/java/org/apache/blur/utils/TableShardCountCollapserTest.java
new file mode 100644
index 0000000..79361d8
--- /dev/null
+++ b/blur-core/src/test/java/org/apache/blur/utils/TableShardCountCollapserTest.java
@@ -0,0 +1,117 @@
+package org.apache.blur.utils;
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+import static org.junit.Assert.assertEquals;
+
+import java.io.IOException;
+
+import org.apache.blur.store.buffer.BufferStore;
+import org.apache.blur.store.hdfs.HdfsDirectory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.IntWritable;
+import org.apache.hadoop.mapreduce.Partitioner;
+import org.apache.hadoop.mapreduce.lib.partition.HashPartitioner;
+import org.apache.lucene.analysis.core.KeywordAnalyzer;
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.Field.Store;
+import org.apache.lucene.document.IntField;
+import org.apache.lucene.index.DirectoryReader;
+import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.index.IndexWriterConfig;
+import org.apache.lucene.index.IndexableField;
+import org.apache.lucene.index.TieredMergePolicy;
+import org.apache.lucene.util.Version;
+import org.junit.Before;
+import org.junit.Test;
+
+public class TableShardCountCollapserTest {
+
+  private static final int NUMBER_OF_BASE_SHARDS = 128;
+  private Configuration configuration;
+  private Path path;
+
+  @Before
+  public void setup() throws IOException {
+    BufferStore.init(128, 128);
+    configuration = new Configuration();
+    path = new Path("./target/tmp-shards-for-testing");
+    FileSystem fileSystem = path.getFileSystem(configuration);
+    fileSystem.delete(path, true);
+    createShards(NUMBER_OF_BASE_SHARDS);
+  }
+
+  private void createShards(int shardCount) throws IOException {
+    for (int i = 0; i < shardCount; i++) {
+      String shardName = BlurUtil.getShardName(i);
+      createShard(configuration, i, new Path(path, shardName), shardCount);
+    }
+  }
+
+  @Test
+  public void testShardCountReducer() throws IOException {
+    assertData(NUMBER_OF_BASE_SHARDS);
+    TableShardCountCollapser t = new TableShardCountCollapser();
+    t.setConf(configuration);
+    t.setTablePath(path);
+    int totalShardCount = 4;
+    t.collapseShardsTo(totalShardCount);
+    assertData(totalShardCount);
+  }
+
+  private void assertData(int totalShardCount) throws IOException {
+    Partitioner<IntWritable, IntWritable> partitioner = new HashPartitioner<IntWritable, IntWritable>();
+    for (int i = 0; i < totalShardCount; i++) {
+      HdfsDirectory directory = new HdfsDirectory(configuration, new Path(path, BlurUtil.getShardName(i)));
+      DirectoryReader reader = DirectoryReader.open(directory);
+      int numDocs = reader.numDocs();
+      for (int d = 0; d < numDocs; d++) {
+        Document document = reader.document(d);
+        IndexableField field = document.getField("id");
+        Integer id = (Integer) field.numericValue();
+        int partition = partitioner.getPartition(new IntWritable(id), null, totalShardCount);
+        assertEquals(i, partition);
+      }
+      reader.close();
+    }
+  }
+
+  private static void createShard(Configuration configuration, int i, Path path, int totalShardCount)
+      throws IOException {
+    HdfsDirectory hdfsDirectory = new HdfsDirectory(configuration, path);
+    IndexWriterConfig conf = new IndexWriterConfig(Version.LUCENE_42, new KeywordAnalyzer());
+    TieredMergePolicy mergePolicy = (TieredMergePolicy) conf.getMergePolicy();
+    mergePolicy.setUseCompoundFile(false);
+    IndexWriter indexWriter = new IndexWriter(hdfsDirectory, conf);
+
+    Partitioner<IntWritable, IntWritable> partitioner = new HashPartitioner<IntWritable, IntWritable>();
+    int partition = partitioner.getPartition(new IntWritable(i), null, totalShardCount);
+    assertEquals(i, partition);
+
+    Document doc = getDoc(i);
+    indexWriter.addDocument(doc);
+    indexWriter.close();
+  }
+
+  private static Document getDoc(int i) {
+    Document document = new Document();
+    document.add(new IntField("id", i, Store.YES));
+    return document;
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-blur/blob/b0e26648/blur-core/src/test/java/org/apache/blur/utils/TermDocIterableTest.java
----------------------------------------------------------------------
diff --git a/blur-core/src/test/java/org/apache/blur/utils/TermDocIterableTest.java b/blur-core/src/test/java/org/apache/blur/utils/TermDocIterableTest.java
new file mode 100644
index 0000000..3f5a412
--- /dev/null
+++ b/blur-core/src/test/java/org/apache/blur/utils/TermDocIterableTest.java
@@ -0,0 +1,104 @@
+package org.apache.blur.utils;
+
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import static org.apache.blur.lucene.LuceneVersionConstant.LUCENE_VERSION;
+import static org.junit.Assert.assertEquals;
+
+import java.io.IOException;
+
+import org.apache.lucene.analysis.standard.StandardAnalyzer;
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.Field;
+import org.apache.lucene.document.FieldType;
+import org.apache.lucene.index.AtomicReader;
+import org.apache.lucene.index.DirectoryReader;
+import org.apache.lucene.index.DocsEnum;
+import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.index.IndexWriterConfig;
+import org.apache.lucene.index.SlowCompositeReaderWrapper;
+import org.apache.lucene.index.Term;
+import org.apache.lucene.store.RAMDirectory;
+import org.junit.Before;
+import org.junit.Test;
+
+public class TermDocIterableTest {
+
+  private static final int BLOCKS = 10;
+  private static final int COUNT_PER_BLOCK = 100;
+  private AtomicReader reader;
+
+  @Before
+  public void setup() throws IOException {
+    reader = createIndexReader();
+  }
+
+  @Test
+  public void testTermDocIterable() throws IOException {
+    for (int pass = 0; pass < 1; pass++) {
+      for (int id = 0; id < BLOCKS; id++) {
+        DocsEnum termDocs = reader.termDocsEnum(new Term("id", Integer.toString(id)));
+        TermDocIterable iterable = new TermDocIterable(termDocs, reader);
+        int count = 0;
+        int i = 0;
+        long s = System.nanoTime();
+        for (Document document : iterable) {
+          count++;
+          assertEquals(i, Integer.parseInt(document.get("field")));
+          i++;
+        }
+        long time = System.nanoTime() - s;
+        System.out.println(time / 1000000.0 + " " + id + " " + pass);
+        assertEquals(COUNT_PER_BLOCK, count);
+      }
+    }
+  }
+
+  private AtomicReader createIndexReader() throws IOException {
+    RAMDirectory directory = new RAMDirectory();
+    IndexWriter writer = new IndexWriter(directory, new IndexWriterConfig(LUCENE_VERSION, new StandardAnalyzer(LUCENE_VERSION)));
+    for (int i = 0; i < BLOCKS; i++) {
+      addDocumentBlock(i, COUNT_PER_BLOCK, writer);
+    }
+    writer.close();
+    return SlowCompositeReaderWrapper.wrap(DirectoryReader.open(directory));
+  }
+
+  private void addDocumentBlock(int id, int count, IndexWriter writer) throws IOException {
+    FieldType fieldType = new FieldType();
+    fieldType.setIndexed(true);
+    fieldType.setOmitNorms(true);
+    fieldType.setTokenized(false);
+    fieldType.setStored(true);
+
+    FieldType fieldTypeNoIndex = new FieldType();
+    fieldTypeNoIndex.setStored(true);
+    fieldTypeNoIndex.setIndexed(false);
+
+    for (int i = 0; i < count; i++) {
+      Document document = new Document();
+      document.add(new Field("id", Integer.toString(id), fieldType));
+      document.add(new Field("field", Integer.toString(i), fieldType));
+      for (int j = 0; j < 100; j++) {
+        document.add(new Field("field" + j, "testing here testing here testing here testing here testing here testing here testing here", fieldTypeNoIndex));
+      }
+      writer.addDocument(document);
+    }
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-blur/blob/b0e26648/blur-core/src/test/resources/log4j.xml
----------------------------------------------------------------------
diff --git a/blur-core/src/test/resources/log4j.xml b/blur-core/src/test/resources/log4j.xml
new file mode 100644
index 0000000..ed16114
--- /dev/null
+++ b/blur-core/src/test/resources/log4j.xml
@@ -0,0 +1,37 @@
+<?xml version="1.0" encoding="UTF-8" ?>
+<!--
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+-->
+<!DOCTYPE log4j:configuration SYSTEM "log4j.dtd">
+
+<log4j:configuration xmlns:log4j="http://jakarta.apache.org/log4j/">
+	<appender name="console" class="org.apache.log4j.ConsoleAppender">
+		<param name="Target" value="System.out" />
+		<layout class="org.apache.log4j.PatternLayout">
+			<param name="ConversionPattern" value="%-5p %d{yyyyMMdd_HH:mm:ss:sss_z} [%t] %c{2}: %m%n" />
+		</layout>
+	</appender>
+	<logger name="org.apache.hadoop">
+    	<level value="ERROR" />
+	    <appender-ref ref="console"/>
+	</logger>
+	<root>
+		<priority value="info" />
+		<appender-ref ref="console" />
+	</root>
+</log4j:configuration>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-blur/blob/b0e26648/blur-gui/pom.xml
----------------------------------------------------------------------
diff --git a/blur-gui/pom.xml b/blur-gui/pom.xml
new file mode 100644
index 0000000..8e2b3ce
--- /dev/null
+++ b/blur-gui/pom.xml
@@ -0,0 +1,125 @@
+<?xml version="1.0" encoding="UTF-8" ?>
+<!-- Licensed to the Apache Software Foundation (ASF) under one or more contributor 
+	license agreements. See the NOTICE file distributed with this work for additional 
+	information regarding copyright ownership. The ASF licenses this file to 
+	you under the Apache License, Version 2.0 (the "License"); you may not use 
+	this file except in compliance with the License. You may obtain a copy of 
+	the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required 
+	by applicable law or agreed to in writing, software distributed under the 
+	License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS 
+	OF ANY KIND, either express or implied. See the License for the specific 
+	language governing permissions and limitations under the License. -->
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+	xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+	<modelVersion>4.0.0</modelVersion>
+	<parent>
+		<groupId>org.apache.blur</groupId>
+		<artifactId>blur</artifactId>
+		<version>0.1.5</version>
+		<relativePath>../pom.xml</relativePath>
+	</parent>
+	<groupId>org.apache.blur</groupId>
+	<artifactId>blur-gui</artifactId>
+	<packaging>war</packaging>
+	<name>Blur GUI</name>
+
+	<properties>
+		<enableAssertions>false</enableAssertions>
+	</properties>
+
+	<dependencies>
+		<dependency>
+			<groupId>org.apache.blur</groupId>
+			<artifactId>blur-thrift</artifactId>
+			<version>${project.version}</version>
+		</dependency>
+		<dependency>
+			<groupId>org.apache.blur</groupId>
+			<artifactId>blur-store</artifactId>
+			<version>${project.version}</version>
+		</dependency>
+		<dependency>
+			<groupId>org.apache.blur</groupId>
+			<artifactId>blur-util</artifactId>
+			<version>${project.version}</version>
+		</dependency>
+		<dependency>
+			<groupId>com.yammer.metrics</groupId>
+			<artifactId>metrics-servlet</artifactId>
+			<version>${metrics.version}</version>
+		</dependency>
+		<dependency>
+			<groupId>log4j</groupId>
+			<artifactId>log4j</artifactId>
+			<version>${log4j.version}</version>
+			<scope>provided</scope>
+			<exclusions>
+				<exclusion>
+					<groupId>javax.mail</groupId>
+					<artifactId>mail</artifactId>
+				</exclusion>
+				<exclusion>
+					<groupId>javax.jms</groupId>
+					<artifactId>jms</artifactId>
+				</exclusion>
+				<exclusion>
+					<groupId>com.sun.jdmk</groupId>
+					<artifactId>jmxtools</artifactId>
+				</exclusion>
+				<exclusion>
+					<groupId>com.sun.jmx</groupId>
+					<artifactId>jmxri</artifactId>
+				</exclusion>
+			</exclusions>
+		</dependency>
+	</dependencies>
+
+	<repositories>
+		<repository>
+			<id>libdir</id>
+			<url>file://${basedir}/../lib</url>
+		</repository>
+	</repositories>
+
+	<build>
+		<pluginManagement>
+			<plugins>
+				<plugin>
+					<artifactId>maven-war-plugin</artifactId>
+					<configuration>
+						<archiveClasses>true</archiveClasses>
+					</configuration>
+				</plugin>
+				<plugin>
+					<groupId>org.apache.maven.plugins</groupId>
+					<artifactId>maven-compiler-plugin</artifactId>
+					<configuration>
+						<source>1.6</source>
+						<target>1.6</target>
+					</configuration>
+				</plugin>
+				<plugin>
+					<groupId>org.apache.maven.plugins</groupId>
+					<artifactId>maven-install-plugin</artifactId>
+					<executions>
+						<execution>
+							<phase>install</phase>
+							<goals>
+								<goal>install-file</goal>
+							</goals>
+							<configuration>
+								<packaging>jar</packaging>
+								<artifactId>${project.artifactId}</artifactId>
+								<groupId>${project.groupId}</groupId>
+								<version>${project.version}</version>
+								<file>
+									${project.build.directory}/${project.artifactId}-${project.version}/WEB-INF/lib/${project.artifactId}-${project.version}.jar
+								</file>
+							</configuration>
+						</execution>
+					</executions>
+				</plugin>
+			</plugins>
+		</pluginManagement>
+	</build>
+</project>

http://git-wip-us.apache.org/repos/asf/incubator-blur/blob/b0e26648/blur-gui/src/main/java/org/apache/blur/gui/HttpJettyServer.java
----------------------------------------------------------------------
diff --git a/blur-gui/src/main/java/org/apache/blur/gui/HttpJettyServer.java b/blur-gui/src/main/java/org/apache/blur/gui/HttpJettyServer.java
new file mode 100644
index 0000000..cb8b16f
--- /dev/null
+++ b/blur-gui/src/main/java/org/apache/blur/gui/HttpJettyServer.java
@@ -0,0 +1,152 @@
+package org.apache.blur.gui;
+
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+import java.io.File;
+import java.io.IOException;
+import java.util.Properties;
+
+import org.apache.blur.log.Log;
+import org.apache.blur.log.LogFactory;
+import org.mortbay.jetty.Server;
+import org.mortbay.jetty.servlet.ServletHolder;
+import org.mortbay.jetty.webapp.WebAppContext;
+
+import com.yammer.metrics.reporting.MetricsServlet;
+
+/**
+ * Starts up a Jetty server to run the utility gui.
+ * 
+ */
+public class HttpJettyServer {
+
+  private static final Log LOG = LogFactory.getLog(HttpJettyServer.class);
+
+  private Server server = null;
+
+  private WebAppContext context;
+
+  /**
+   * @param bindPort
+   *          port of the process that the gui is wrapping
+   * @param port
+   *          port to run gui on
+   * @param baseControllerPort
+   *          ports that service runs on
+   * @param baseShardPort
+   * @param baseGuiShardPort
+   *          port to run gui on
+   * @param baseGuiControllerPort
+   *          port to run gui on
+   * @param base
+   *          location of webapp to serve
+   * @param bm
+   *          metrics object for using.
+   * @throws IOException
+   */
+  public HttpJettyServer(int bindPort, int port, int baseControllerPort, int baseShardPort, int baseGuiControllerPort,
+      int baseGuiShardPort, String base) throws IOException {
+    server = new Server(port);
+
+    String logDir = System.getProperty("blur.logs.dir");
+    String logFile = System.getProperty("blur.log.file");
+    String blurLogFile = logDir + "/" + logFile;
+    System.setProperty("blur.gui.servicing.port", bindPort + "");
+    System.setProperty("blur.base.shard.port", baseShardPort + "");
+    System.setProperty("blur.base.controller.port", baseControllerPort + "");
+    System.setProperty("baseGuiShardPort", baseGuiShardPort + "");
+    System.setProperty("baseGuiControllerPort", baseGuiControllerPort + "");
+    System.setProperty("blur.gui.mode", base);
+    LOG.info("System props:" + System.getProperties().toString());
+
+    context = new WebAppContext();
+    String warPath = getWarFolder();
+    context.setWar(warPath);
+    context.setContextPath("/");
+    context.setParentLoaderPriority(true);
+    // context.addServlet(new ServletHolder(new LiveMetricsServlet()),
+    // "/livemetrics");
+    context.addServlet(new ServletHolder(new MetricsServlet()), "/metrics");
+    context.addServlet(new ServletHolder(new LogServlet(blurLogFile)), "/logs");
+
+    LOG.info("WEB GUI coming up for resource: " + base);
+    LOG.info("WEB GUI thinks its at: " + warPath);
+    LOG.info("WEB GUI log file being exposed: " + logDir == null ? "STDOUT" : blurLogFile);
+
+    server.setHandler(context);
+
+    try {
+      server.start();
+    } catch (Exception e) {
+      try {
+        server.stop();
+      } catch (Exception ex) {
+        LOG.error("Unknown error while trying to stop server during error on startup.", ex);
+      }
+      throw new IOException("cannot start Http server for " + base, e);
+    }
+    LOG.info("WEB GUI up on port: " + port);
+  }
+
+  public WebAppContext getContext() {
+    return context;
+  }
+
+  private static String findBlurGuiInClassPath() {
+    Properties properties = System.getProperties();
+    String cp = (String) properties.get("java.class.path");
+    String[] split = cp.split(":");
+    for (String s : split) {
+      if (s.endsWith(".war")) {
+        return s;
+      }
+    }
+    return null;
+  }
+
+  private String getWarFolder() {
+    String findBlurGuiInClassPath = findBlurGuiInClassPath();
+    if (findBlurGuiInClassPath != null) {
+      return findBlurGuiInClassPath;
+    }
+    String name = getClass().getName().replace('.', '/');
+    String classResource = "/" + name + ".class";
+    String pathToClassResource = getClass().getResource(classResource).toString();
+    pathToClassResource = pathToClassResource.replace('/', File.separatorChar);
+    int indexOfJar = pathToClassResource.indexOf(".jar");
+    if (indexOfJar < 0) {
+      int index = pathToClassResource.indexOf(name);
+      String pathToClasses = pathToClassResource.substring(0, index);
+      int indexOfProjectName = pathToClasses.indexOf("/blur-gui/");
+      return pathToClasses.substring(0, indexOfProjectName) + "/blur-gui/src/main/webapp";
+    }
+    return null;
+  }
+
+  public void close() {
+    if (server != null) {
+      try {
+        LOG.info("stopping web server");
+        server.stop();
+        LOG.info("stopped web server");
+      } catch (Exception e) {
+        e.printStackTrace();
+      }
+    }
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-blur/blob/b0e26648/blur-gui/src/main/java/org/apache/blur/gui/LiveMetricsServlet.java
----------------------------------------------------------------------
diff --git a/blur-gui/src/main/java/org/apache/blur/gui/LiveMetricsServlet.java b/blur-gui/src/main/java/org/apache/blur/gui/LiveMetricsServlet.java
new file mode 100644
index 0000000..c91fbad
--- /dev/null
+++ b/blur-gui/src/main/java/org/apache/blur/gui/LiveMetricsServlet.java
@@ -0,0 +1,60 @@
+package org.apache.blur.gui;
+
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+import java.io.IOException;
+import java.io.PrintWriter;
+
+import javax.servlet.ServletException;
+import javax.servlet.http.HttpServlet;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+
+
+public class LiveMetricsServlet extends HttpServlet {
+
+  private static final long serialVersionUID = 1L;
+
+  
+  public LiveMetricsServlet() {}
+
+  protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
+    /*
+     * The live metrics page is currently not being used
+     */
+     
+	/* 
+	response.setContentType("application/json");
+    PrintWriter out = response.getWriter();
+    out.write("{");
+    out.write("\"jvm\":{\"xLabel\":\"Time\",\"yLabel\":\"Heap (GB)\",\"lines\":");
+    heapMetrics.writeJson(out);
+    out.write("}");
+    out.write(",\"blur_calls\":{\"xLabel\":\"Time\",\"yLabel\":\"Rates\",\"lines\":");
+    queryMetrics.writeGraph1Json(out);
+    out.write("}");
+    out.write(",\"blur_recordRates\":{\"xLabel\":\"Time\",\"yLabel\":\"Rates\",\"lines\":");
+    queryMetrics.writeGraph2Json(out);
+    out.write("}");
+    out.write(",\"system\":{\"xLabel\":\"Time\",\"yLabel\":\"Load\",\"lines\":");
+    systemLoadMetrics.writeJson(out);
+    out.write("}");
+    out.write("}");
+    */
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-blur/blob/b0e26648/blur-gui/src/main/java/org/apache/blur/gui/LogServlet.java
----------------------------------------------------------------------
diff --git a/blur-gui/src/main/java/org/apache/blur/gui/LogServlet.java b/blur-gui/src/main/java/org/apache/blur/gui/LogServlet.java
new file mode 100644
index 0000000..47681ae
--- /dev/null
+++ b/blur-gui/src/main/java/org/apache/blur/gui/LogServlet.java
@@ -0,0 +1,95 @@
+package org.apache.blur.gui;
+
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+import java.io.File;
+import java.io.IOException;
+import java.io.PrintWriter;
+import java.io.RandomAccessFile;
+
+import javax.servlet.ServletException;
+import javax.servlet.http.HttpServlet;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+
+public class LogServlet extends HttpServlet {
+
+  private static final long serialVersionUID = 1L;
+  private String filePath = null;
+  private int buffLen = 8192;
+
+  public LogServlet(String filePath) {
+    this.filePath = filePath;
+  }
+
+  protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
+
+    response.setContentType("text/html");
+    PrintWriter out = response.getWriter();
+
+    File f = new File(filePath);
+    RandomAccessFile ram = new RandomAccessFile(f, "r");
+
+    String offsetStr = request.getParameter("offset");
+    long offset = -1;
+    if (offsetStr != null)
+      offset = Long.parseLong(offsetStr);
+
+    long start = 0;
+    long length = ram.length();
+    // figure out buffer
+    if (length < buffLen)
+      buffLen = new Long(length).intValue();
+
+    // use offset if passed in
+    if (offset >= 0)
+      start = offset;
+    else
+      start = length - buffLen;
+
+    // calc new offset
+    offset = start - buffLen;
+    if (offset < 0)
+      offset = 0;
+
+    // buffer
+    byte[] buff = new byte[buffLen];
+
+    ram.seek(start);
+    ram.read(buff);
+
+    String returnStr = new String(buff, "UTF-8").replaceAll("\n", "\n<br>");
+
+    out.write("<html><link href='style.css' rel='stylesheet' type='text/css' /><body>");
+    out.write("<a href='index.html'>home</a><br/>");
+    out.write("<p>File:<b> " + f.toString() + "</b> (" + start + "/" + length + ")</p>");
+    if (start != 0) {
+      out.write("<a href='logs?offset=" + 0 + "'>start</a>");
+      out.write(" <a href='logs?offset=" + offset + "'>prev</a>");
+    }
+    if (start + buffLen < length) {
+      out.write(" <a href='logs?offset=" + (start + buffLen) + "'>next</a>");
+      out.write(" <a href='logs?offset=" + ((length - buffLen > 0) ? (length - buffLen) : 0) + "'>end</a>");
+    }
+    out.write("<br/>");
+    out.write(returnStr);
+    out.write("</body></html>");
+
+    ram.close();
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-blur/blob/b0e26648/blur-gui/src/main/webapp/WEB-INF/web.xml
----------------------------------------------------------------------
diff --git a/blur-gui/src/main/webapp/WEB-INF/web.xml b/blur-gui/src/main/webapp/WEB-INF/web.xml
new file mode 100644
index 0000000..a35ac56
--- /dev/null
+++ b/blur-gui/src/main/webapp/WEB-INF/web.xml
@@ -0,0 +1,8 @@
+<?xml version="1.0" encoding="UTF-8"?>
+
+<web-app xmlns="http://java.sun.com/xml/ns/javaee" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+	xsi:schemaLocation="http://java.sun.com/xml/ns/javaee http://java.sun.com/xml/ns/javaee/web-app_2_5.xsd"
+	version="2.5">
+
+	<display-name>Blur GUI</display-name>
+</web-app>
\ No newline at end of file


Mime
View raw message