incubator-blur-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From amccu...@apache.org
Subject [6/8] Blur store and pom files have been updated, everything else is still broken.
Date Tue, 16 Oct 2012 00:57:06 GMT
http://git-wip-us.apache.org/repos/asf/incubator-blur/blob/39539e56/src/blur-store/src/test/java/org/apache/blur/store/BenchmarkDirectoryNrt.java
----------------------------------------------------------------------
diff --git a/src/blur-store/src/test/java/org/apache/blur/store/BenchmarkDirectoryNrt.java
b/src/blur-store/src/test/java/org/apache/blur/store/BenchmarkDirectoryNrt.java
deleted file mode 100644
index 4f1ccdd..0000000
--- a/src/blur-store/src/test/java/org/apache/blur/store/BenchmarkDirectoryNrt.java
+++ /dev/null
@@ -1,160 +0,0 @@
-package org.apache.blur.store;
-
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-import java.io.IOException;
-import java.lang.management.ManagementFactory;
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.UUID;
-import java.util.concurrent.ArrayBlockingQueue;
-import java.util.concurrent.BlockingQueue;
-import java.util.concurrent.atomic.AtomicBoolean;
-
-import org.apache.blur.metrics.BlurMetrics;
-import org.apache.blur.store.blockcache.BlockCache;
-import org.apache.blur.store.blockcache.BlockDirectory;
-import org.apache.blur.store.blockcache.BlockDirectoryCache;
-import org.apache.blur.store.hdfs.HdfsDirectory;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.lucene.analysis.standard.StandardAnalyzer;
-import org.apache.lucene.document.Document;
-import org.apache.lucene.document.Field;
-import org.apache.lucene.document.Field.Index;
-import org.apache.lucene.document.Field.Store;
-import org.apache.lucene.index.CorruptIndexException;
-import org.apache.lucene.index.IndexReader;
-import org.apache.lucene.index.IndexWriter;
-import org.apache.lucene.index.IndexWriterConfig;
-import org.apache.lucene.index.TieredMergePolicy;
-import org.apache.lucene.store.NoLockFactory;
-
-
-import static org.apache.blur.lucene.LuceneConstant.LUCENE_VERSION;
-
-public class BenchmarkDirectoryNrt {
-
-  public static void main(String[] args) throws IOException, InterruptedException {
-    int blockSize = BlockDirectory.BLOCK_SIZE;
-    long totalMemory = BlockCache._128M * 2;
-    int slabSize = (int) (totalMemory / 2);
-
-    BlockCache blockCache = new BlockCache(new BlurMetrics(new Configuration()), true, totalMemory,
slabSize, blockSize);
-    BlurMetrics metrics = new BlurMetrics(new Configuration());
-    BlockDirectoryCache cache = new BlockDirectoryCache(blockCache, metrics);
-
-    Path p = new Path("hdfs://localhost:9000/bench");
-    FileSystem fs = FileSystem.get(p.toUri(), new Configuration());
-    fs.delete(p, true);
-
-    final HdfsDirectory dir = new HdfsDirectory(p);
-    dir.setLockFactory(NoLockFactory.getNoLockFactory());
-
-    BlockDirectory directory = new BlockDirectory("test", dir, cache);
-
-    while (true) {
-      IndexWriterConfig conf = new IndexWriterConfig(LUCENE_VERSION, new StandardAnalyzer(LUCENE_VERSION));
-      TieredMergePolicy mergePolicy = (TieredMergePolicy) conf.getMergePolicy();
-      mergePolicy.setUseCompoundFile(false);
-      int count = 0;
-      int max = 10000;
-      long s = System.currentTimeMillis();
-      IndexWriter writer = new IndexWriter(directory, conf);
-      long as = System.currentTimeMillis();
-      BlockingQueue<Collection<Document>> queue = new ArrayBlockingQueue<Collection<Document>>(1024);
-      Indexer indexer = new Indexer(queue, writer);
-      new Thread(indexer).start();
-      for (int i = 0; i < 1000000; i++) {
-        if (count >= max) {
-          double aseconds = (System.currentTimeMillis() - as) / 1000.0;
-          double arate = i / aseconds;
-          double seconds = (System.currentTimeMillis() - s) / 1000.0;
-          double rate = count / seconds;
-          System.out.println("Total [" + i + "] Rate [" + rate + "] AvgRate [" + arate +
"] Doc count [" + indexer.getReader().numDocs() + "]");
-          count = 0;
-          s = System.currentTimeMillis();
-        }
-        queue.put(Arrays.asList(getDoc()));
-        count++;
-      }
-      writer.close();
-    }
-  }
-
-  private static class Indexer implements Runnable {
-
-    private BlockingQueue<Collection<Document>> _queue;
-    private AtomicBoolean _running = new AtomicBoolean(true);
-    private IndexWriter _writer;
-    private IndexReader _reader;
-
-    public Indexer(BlockingQueue<Collection<Document>> queue, IndexWriter writer)
throws CorruptIndexException, IOException {
-      _queue = queue;
-      _writer = writer;
-      _reader = IndexReader.open(_writer, true);
-    }
-
-    public IndexReader getReader() {
-      return _reader;
-    }
-
-    @Override
-    public void run() {
-      long cycleTime = 50000000;
-      long start = System.nanoTime();
-      while (_running.get()) {
-        try {
-          Collection<Document> docs = _queue.take();
-          _writer.addDocuments(docs);
-          if (start + cycleTime < System.nanoTime()) {
-            IndexReader newReader = IndexReader.open(_writer, true);
-            _reader.close();
-            _reader = newReader;
-            start = System.nanoTime();
-          }
-        } catch (InterruptedException e) {
-          return;
-        } catch (CorruptIndexException e) {
-          e.printStackTrace();
-          return;
-        } catch (IOException e) {
-          e.printStackTrace();
-          return;
-        }
-      }
-    }
-  }
-
-  private static Document getDoc() {
-    Document document = new Document();
-    document.add(new Field("name", UUID.randomUUID().toString(), Store.YES, Index.ANALYZED_NO_NORMS));
-    return document;
-  }
-
-  public static int getNumberOfSlabs(float heapPercentage, int numberOfBlocksPerSlab, int
blockSize) {
-    long max = ManagementFactory.getMemoryMXBean().getHeapMemoryUsage().getMax();
-    long targetBytes = (long) (max * heapPercentage);
-    int slabSize = numberOfBlocksPerSlab * blockSize;
-    int slabs = (int) (targetBytes / slabSize);
-    if (slabs == 0) {
-      throw new RuntimeException("Minimum heap size is 512m!");
-    }
-    return slabs;
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-blur/blob/39539e56/src/blur-store/src/test/java/org/apache/blur/store/HdfsDirectoryTest.java
----------------------------------------------------------------------
diff --git a/src/blur-store/src/test/java/org/apache/blur/store/HdfsDirectoryTest.java b/src/blur-store/src/test/java/org/apache/blur/store/HdfsDirectoryTest.java
index ef9e4cb..1ed257a 100644
--- a/src/blur-store/src/test/java/org/apache/blur/store/HdfsDirectoryTest.java
+++ b/src/blur-store/src/test/java/org/apache/blur/store/HdfsDirectoryTest.java
@@ -29,15 +29,16 @@ import java.util.Random;
 import java.util.Set;
 
 import org.apache.blur.store.hdfs.HdfsDirectory;
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.IOContext;
 import org.apache.lucene.store.IndexInput;
 import org.apache.lucene.store.IndexOutput;
 import org.apache.lucene.store.RAMDirectory;
 import org.junit.Before;
 import org.junit.Test;
 
-
 public class HdfsDirectoryTest {
 
   private static final int MAX_NUMBER_OF_WRITES = 10000;
@@ -57,21 +58,21 @@ public class HdfsDirectoryTest {
     rm(file);
     URI uri = new File(file, "hdfs").toURI();
     Path hdfsDirPath = new Path(uri.toString());
-    directory = new HdfsDirectory(hdfsDirPath);
+    Configuration conf = new Configuration();
+    directory = new HdfsDirectory(conf, hdfsDirPath);
     seed = new Random().nextLong();
-    // seed = 7392202912208392081L;
     random = new Random(seed);
   }
 
   @Test
   public void testWritingAndReadingAFile() throws IOException {
 
-    IndexOutput output = directory.createOutput("testing.test");
+    IndexOutput output = directory.createOutput("testing.test", IOContext.DEFAULT);
     output.writeInt(12345);
     output.flush();
     output.close();
 
-    IndexInput input = directory.openInput("testing.test");
+    IndexInput input = directory.openInput("testing.test", IOContext.DEFAULT);
     assertEquals(12345, input.readInt());
     input.close();
 
@@ -81,7 +82,7 @@ public class HdfsDirectoryTest {
 
     assertEquals(4, directory.fileLength("testing.test"));
 
-    IndexInput input1 = directory.openInput("testing.test");
+    IndexInput input1 = directory.openInput("testing.test", IOContext.DEFAULT);
 
     IndexInput input2 = (IndexInput) input1.clone();
     assertEquals(12345, input2.readInt());
@@ -109,7 +110,7 @@ public class HdfsDirectoryTest {
   }
 
   private void testEof(String name, Directory directory, long length) throws IOException
{
-    IndexInput input = directory.openInput(name);
+    IndexInput input = directory.openInput(name, IOContext.DEFAULT);
     input.seek(length);
     try {
       input.readByte();
@@ -119,7 +120,7 @@ public class HdfsDirectoryTest {
   }
 
   @Test
-  public void testRandomAccessWrites() throws IOException {
+  public void testWrites() throws IOException {
     int i = 0;
     try {
       Set<String> names = new HashSet<String>();
@@ -140,9 +141,8 @@ public class HdfsDirectoryTest {
 
   private void assertInputsEquals(String name, Directory fsDir, HdfsDirectory hdfs) throws
IOException {
     int reads = random.nextInt(MAX_NUMBER_OF_READS);
-    int buffer = random.nextInt(MAX_BUFFER_SIZE - MIN_BUFFER_SIZE) + MIN_BUFFER_SIZE;
-    IndexInput fsInput = fsDir.openInput(name, buffer);
-    IndexInput hdfsInput = hdfs.openInput(name, buffer);
+    IndexInput fsInput = fsDir.openInput(name, IOContext.DEFAULT);
+    IndexInput hdfsInput = hdfs.openInput(name, IOContext.DEFAULT);
     assertEquals(fsInput.length(), hdfsInput.length());
     int fileLength = (int) fsInput.length();
     for (int i = 0; i < reads; i++) {
@@ -168,9 +168,9 @@ public class HdfsDirectoryTest {
   private void createFile(String name, Directory fsDir, HdfsDirectory hdfs) throws IOException
{
     int writes = random.nextInt(MAX_NUMBER_OF_WRITES);
     int fileLength = random.nextInt(MAX_FILE_SIZE - MIN_FILE_SIZE) + MIN_FILE_SIZE;
-    IndexOutput fsOutput = fsDir.createOutput(name);
+    IndexOutput fsOutput = fsDir.createOutput(name, IOContext.DEFAULT);
     fsOutput.setLength(fileLength);
-    IndexOutput hdfsOutput = hdfs.createOutput(name);
+    IndexOutput hdfsOutput = hdfs.createOutput(name, IOContext.DEFAULT);
     hdfsOutput.setLength(fileLength);
     for (int i = 0; i < writes; i++) {
       byte[] buf = new byte[random.nextInt(Math.min(MAX_BUFFER_SIZE - MIN_BUFFER_SIZE, fileLength))
+ MIN_BUFFER_SIZE];

http://git-wip-us.apache.org/repos/asf/incubator-blur/blob/39539e56/src/blur-store/src/test/java/org/apache/blur/store/UsingHdfsDir.java
----------------------------------------------------------------------
diff --git a/src/blur-store/src/test/java/org/apache/blur/store/UsingHdfsDir.java b/src/blur-store/src/test/java/org/apache/blur/store/UsingHdfsDir.java
deleted file mode 100644
index 302a7a1..0000000
--- a/src/blur-store/src/test/java/org/apache/blur/store/UsingHdfsDir.java
+++ /dev/null
@@ -1,112 +0,0 @@
-package org.apache.blur.store;
-
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-import static org.apache.blur.lucene.LuceneConstant.LUCENE_VERSION;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.Comparator;
-import java.util.List;
-import java.util.UUID;
-
-import org.apache.blur.store.hdfs.HdfsDirectory;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.lucene.analysis.standard.StandardAnalyzer;
-import org.apache.lucene.document.Document;
-import org.apache.lucene.document.Field;
-import org.apache.lucene.document.Field.Index;
-import org.apache.lucene.document.Field.Store;
-import org.apache.lucene.index.IndexReader;
-import org.apache.lucene.index.IndexWriter;
-import org.apache.lucene.index.IndexWriterConfig;
-import org.apache.lucene.index.Term;
-import org.apache.lucene.index.TermEnum;
-import org.apache.lucene.search.IndexSearcher;
-import org.apache.lucene.search.TermQuery;
-import org.apache.lucene.search.TopDocs;
-import org.apache.lucene.store.NoLockFactory;
-
-
-public class UsingHdfsDir {
-
-  public static void main(String[] args) throws IOException {
-
-    // FileSystem fs = FileSystem.getLocal(new Configuration());
-    // Path p = new Path("file:///tmp/testdir");
-
-    Path p = new Path("hdfs://localhost:9000/test-dir");
-    FileSystem fs = FileSystem.get(p.toUri(), new Configuration());
-    fs.delete(p, true);
-
-    final HdfsDirectory directory = new HdfsDirectory(p);
-    directory.setLockFactory(NoLockFactory.getNoLockFactory());
-
-    IndexWriter writer = new IndexWriter(directory, new IndexWriterConfig(LUCENE_VERSION,
new StandardAnalyzer(LUCENE_VERSION)));
-    for (int i = 0; i < 100000; i++) {
-      writer.addDocument(getDoc());
-    }
-    writer.close();
-
-    IndexReader reader = IndexReader.open(directory);
-    TermEnum terms = reader.terms();
-    while (terms.next()) {
-      System.out.println(terms.term());
-    }
-    terms.close();
-
-    IndexSearcher searcher = new IndexSearcher(reader);
-    TopDocs topDocs = searcher.search(new TermQuery(new Term("name", "ffff")), 10);
-    System.out.println(topDocs.totalHits);
-
-    reader.close();
-
-    List<String> files = new ArrayList<String>(Arrays.asList(directory.listAll()));
-    Collections.sort(files, new Comparator<String>() {
-      @Override
-      public int compare(String o1, String o2) {
-        try {
-          long fileLength1 = directory.fileLength(o1);
-          long fileLength2 = directory.fileLength(o2);
-          if (fileLength1 == fileLength2) {
-            return o1.compareTo(o2);
-          }
-          return (int) (fileLength2 - fileLength1);
-        } catch (IOException e) {
-          throw new RuntimeException(e);
-        }
-      }
-    });
-
-    for (String file : files) {
-      System.out.println(file + " " + directory.fileLength(file));
-    }
-
-    directory.close();
-  }
-
-  private static Document getDoc() {
-    Document document = new Document();
-    document.add(new Field("name", UUID.randomUUID().toString(), Store.YES, Index.ANALYZED_NO_NORMS));
-    return document;
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-blur/blob/39539e56/src/blur-store/src/test/java/org/apache/blur/store/blockcache/BlockCacheTest.java
----------------------------------------------------------------------
diff --git a/src/blur-store/src/test/java/org/apache/blur/store/blockcache/BlockCacheTest.java
b/src/blur-store/src/test/java/org/apache/blur/store/blockcache/BlockCacheTest.java
index 86087b0..7bfb9ec 100644
--- a/src/blur-store/src/test/java/org/apache/blur/store/blockcache/BlockCacheTest.java
+++ b/src/blur-store/src/test/java/org/apache/blur/store/blockcache/BlockCacheTest.java
@@ -22,25 +22,20 @@ import java.util.Arrays;
 import java.util.Random;
 import java.util.concurrent.atomic.AtomicLong;
 
-import org.apache.blur.metrics.BlurMetrics;
-import org.apache.blur.store.blockcache.BlockCache;
-import org.apache.blur.store.blockcache.BlockCacheKey;
-import org.apache.hadoop.conf.Configuration;
 import org.junit.Test;
 
-
 public class BlockCacheTest {
   @Test
   public void testBlockCache() {
     int blocksInTest = 2000000;
-    int blockSize = 1024;
-
-    int slabSize = blockSize * 4096;
+    int blockSize = BlockCache._8K;
+    int slabSize = blockSize * 1024;
     long totalMemory = 2 * slabSize;
 
-    BlockCache blockCache = new BlockCache(new BlurMetrics(new Configuration()), true, totalMemory,
slabSize, blockSize);
-    byte[] buffer = new byte[1024];
+    BlockCache blockCache = new BlockCache(true, totalMemory, slabSize);
+    byte[] buffer = new byte[blockSize];
     Random random = new Random();
+
     byte[] newData = new byte[blockSize];
     AtomicLong hitsInCache = new AtomicLong();
     AtomicLong missesInCache = new AtomicLong();

http://git-wip-us.apache.org/repos/asf/incubator-blur/blob/39539e56/src/blur-store/src/test/java/org/apache/blur/store/blockcache/BlockDirectoryTest.java
----------------------------------------------------------------------
diff --git a/src/blur-store/src/test/java/org/apache/blur/store/blockcache/BlockDirectoryTest.java
b/src/blur-store/src/test/java/org/apache/blur/store/blockcache/BlockDirectoryTest.java
index adc998b..7e229f6 100644
--- a/src/blur-store/src/test/java/org/apache/blur/store/blockcache/BlockDirectoryTest.java
+++ b/src/blur-store/src/test/java/org/apache/blur/store/blockcache/BlockDirectoryTest.java
@@ -28,6 +28,7 @@ import org.apache.blur.store.blockcache.BlockDirectory;
 import org.apache.blur.store.blockcache.Cache;
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.store.FSDirectory;
+import org.apache.lucene.store.IOContext;
 import org.apache.lucene.store.IndexInput;
 import org.apache.lucene.store.IndexOutput;
 import org.junit.Before;
@@ -110,7 +111,7 @@ public class BlockDirectoryTest {
   }
 
   private void testEof(String name, Directory directory, long length) throws IOException
{
-    IndexInput input = directory.openInput(name);
+    IndexInput input = directory.openInput(name, IOContext.DEFAULT);
     input.seek(length);
     try {
       input.readByte();
@@ -137,9 +138,8 @@ public class BlockDirectoryTest {
 
   private void assertInputsEquals(String name, Directory fsDir, Directory hdfs) throws IOException
{
     int reads = random.nextInt(MAX_NUMBER_OF_READS);
-    int buffer = random.nextInt(MAX_BUFFER_SIZE - MIN_BUFFER_SIZE) + MIN_BUFFER_SIZE;
-    IndexInput fsInput = fsDir.openInput(name, buffer);
-    IndexInput hdfsInput = hdfs.openInput(name, buffer);
+    IndexInput fsInput = fsDir.openInput(name, IOContext.DEFAULT);
+    IndexInput hdfsInput = hdfs.openInput(name, IOContext.DEFAULT);
     assertEquals(fsInput.length(), hdfsInput.length());
     int fileLength = (int) fsInput.length();
     for (int i = 0; i < reads; i++) {
@@ -165,9 +165,9 @@ public class BlockDirectoryTest {
   private void createFile(String name, Directory fsDir, Directory hdfs) throws IOException
{
     int writes = random.nextInt(MAX_NUMBER_OF_WRITES);
     int fileLength = random.nextInt(MAX_FILE_SIZE - MIN_FILE_SIZE) + MIN_FILE_SIZE;
-    IndexOutput fsOutput = fsDir.createOutput(name);
+    IndexOutput fsOutput = fsDir.createOutput(name, IOContext.DEFAULT);
     fsOutput.setLength(fileLength);
-    IndexOutput hdfsOutput = hdfs.createOutput(name);
+    IndexOutput hdfsOutput = hdfs.createOutput(name, IOContext.DEFAULT);
     hdfsOutput.setLength(fileLength);
     for (int i = 0; i < writes; i++) {
       byte[] buf = new byte[random.nextInt(Math.min(MAX_BUFFER_SIZE - MIN_BUFFER_SIZE, fileLength))
+ MIN_BUFFER_SIZE];

http://git-wip-us.apache.org/repos/asf/incubator-blur/blob/39539e56/src/blur-store/src/test/java/org/apache/blur/store/compressed/CompressedFieldDataDirectoryTest.java
----------------------------------------------------------------------
diff --git a/src/blur-store/src/test/java/org/apache/blur/store/compressed/CompressedFieldDataDirectoryTest.java
b/src/blur-store/src/test/java/org/apache/blur/store/compressed/CompressedFieldDataDirectoryTest.java
deleted file mode 100644
index f9072b7..0000000
--- a/src/blur-store/src/test/java/org/apache/blur/store/compressed/CompressedFieldDataDirectoryTest.java
+++ /dev/null
@@ -1,143 +0,0 @@
-package org.apache.blur.store.compressed;
-
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-import static org.apache.blur.lucene.LuceneConstant.LUCENE_VERSION;
-import static org.junit.Assert.assertEquals;
-
-import java.io.IOException;
-
-import org.apache.blur.store.compressed.CompressedFieldDataDirectory;
-import org.apache.hadoop.io.compress.CompressionCodec;
-import org.apache.lucene.analysis.KeywordAnalyzer;
-import org.apache.lucene.document.Document;
-import org.apache.lucene.document.Field;
-import org.apache.lucene.document.Field.Index;
-import org.apache.lucene.document.Field.Store;
-import org.apache.lucene.index.CorruptIndexException;
-import org.apache.lucene.index.IndexReader;
-import org.apache.lucene.index.IndexWriter;
-import org.apache.lucene.index.IndexWriterConfig;
-import org.apache.lucene.index.TieredMergePolicy;
-import org.apache.lucene.store.Directory;
-import org.apache.lucene.store.IndexInput;
-import org.apache.lucene.store.LockObtainFailedException;
-import org.apache.lucene.store.RAMDirectory;
-import org.junit.Test;
-
-
-public class CompressedFieldDataDirectoryTest {
-
-  private static final CompressionCodec COMPRESSION_CODEC = CompressedFieldDataDirectory.DEFAULT_COMPRESSION;
-
-  @Test
-  public void testCompressedFieldDataDirectoryBasic() throws CorruptIndexException, IOException
{
-    RAMDirectory dir = new RAMDirectory();
-    CompressedFieldDataDirectory directory = new CompressedFieldDataDirectory(dir, COMPRESSION_CODEC);
-    IndexWriterConfig config = new IndexWriterConfig(LUCENE_VERSION, new KeywordAnalyzer());
-    TieredMergePolicy mergePolicy = (TieredMergePolicy) config.getMergePolicy();
-    mergePolicy.setUseCompoundFile(false);
-    IndexWriter writer = new IndexWriter(directory, config);
-    addDocs(writer, 0, 10);
-    writer.close();
-    testFetches(directory);
-  }
-
-  @Test
-  public void testCompressedFieldDataDirectoryTransition() throws CorruptIndexException,
LockObtainFailedException, IOException {
-    RAMDirectory dir = new RAMDirectory();
-
-    IndexWriterConfig config = new IndexWriterConfig(LUCENE_VERSION, new KeywordAnalyzer());
-    TieredMergePolicy mergePolicy = (TieredMergePolicy) config.getMergePolicy();
-    mergePolicy.setUseCompoundFile(false);
-    IndexWriter writer = new IndexWriter(dir, config);
-
-    addDocs(writer, 0, 5);
-    writer.close();
-
-    CompressedFieldDataDirectory directory = new CompressedFieldDataDirectory(dir, COMPRESSION_CODEC);
-    config = new IndexWriterConfig(LUCENE_VERSION, new KeywordAnalyzer());
-    mergePolicy = (TieredMergePolicy) config.getMergePolicy();
-    mergePolicy.setUseCompoundFile(false);
-    writer = new IndexWriter(directory, config);
-    addDocs(writer, 5, 5);
-    writer.close();
-    testFetches(directory);
-  }
-
-  @Test
-  public void testCompressedFieldDataDirectoryMixedBlockSize() throws CorruptIndexException,
LockObtainFailedException, IOException {
-    RAMDirectory dir = new RAMDirectory();
-    IndexWriterConfig config = new IndexWriterConfig(LUCENE_VERSION, new KeywordAnalyzer());
-    TieredMergePolicy mergePolicy = (TieredMergePolicy) config.getMergePolicy();
-    mergePolicy.setUseCompoundFile(false);
-    IndexWriter writer = new IndexWriter(dir, config);
-    addDocs(writer, 0, 5);
-    writer.close();
-
-    CompressedFieldDataDirectory directory1 = new CompressedFieldDataDirectory(dir, COMPRESSION_CODEC,
2);
-    config = new IndexWriterConfig(LUCENE_VERSION, new KeywordAnalyzer());
-    mergePolicy = (TieredMergePolicy) config.getMergePolicy();
-    mergePolicy.setUseCompoundFile(false);
-    writer = new IndexWriter(directory1, config);
-    addDocs(writer, 5, 2);
-    writer.close();
-
-    CompressedFieldDataDirectory directory2 = new CompressedFieldDataDirectory(dir, COMPRESSION_CODEC,
4);
-    config = new IndexWriterConfig(LUCENE_VERSION, new KeywordAnalyzer());
-    mergePolicy = (TieredMergePolicy) config.getMergePolicy();
-    mergePolicy.setUseCompoundFile(false);
-    writer = new IndexWriter(directory2, config);
-    addDocs(writer, 7, 3);
-    writer.close();
-    testFetches(directory2);
-    testFileLengths(directory2);
-  }
-
-  private void testFileLengths(Directory dir) throws IOException {
-    String[] listAll = dir.listAll();
-    for (String name : listAll) {
-      IndexInput input = dir.openInput(name);
-      assertEquals(input.length(), dir.fileLength(name));
-      input.close();
-    }
-
-  }
-
-  private void testFetches(Directory directory) throws CorruptIndexException, IOException
{
-    IndexReader reader = IndexReader.open(directory);
-    for (int i = 0; i < reader.maxDoc(); i++) {
-      String id = Integer.toString(i);
-      Document document = reader.document(i);
-      assertEquals(id, document.get("id"));
-    }
-  }
-
-  private void addDocs(IndexWriter writer, int starting, int amount) throws CorruptIndexException,
IOException {
-    for (int i = 0; i < amount; i++) {
-      int index = starting + i;
-      writer.addDocument(getDoc(index));
-    }
-  }
-
-  private Document getDoc(int index) {
-    Document document = new Document();
-    document.add(new Field("id", Integer.toString(index), Store.YES, Index.NOT_ANALYZED_NO_NORMS));
-    return document;
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-blur/blob/39539e56/src/blur-testsuite/pom.xml
----------------------------------------------------------------------
diff --git a/src/blur-testsuite/pom.xml b/src/blur-testsuite/pom.xml
index f899c8d..bb2fa4e 100644
--- a/src/blur-testsuite/pom.xml
+++ b/src/blur-testsuite/pom.xml
@@ -22,7 +22,7 @@ under the License.
 	<parent>
 		<groupId>org.apache.blur</groupId>
 		<artifactId>blur</artifactId>
-		<version>0.1.3</version>
+		<version>0.2.0</version>
 	</parent>
 	<modelVersion>4.0.0</modelVersion>
 	<groupId>org.apache.blur</groupId>
@@ -34,17 +34,17 @@ under the License.
 		<dependency>
 			<groupId>org.apache.blur</groupId>
 			<artifactId>blur-core</artifactId>
-			<version>0.1.3</version>
+			<version>0.2.0</version>
 		</dependency>
 		<dependency>
 			<groupId>org.apache.blur</groupId>
 			<artifactId>blur-mapred</artifactId>
-			<version>0.1.3</version>
+			<version>0.2.0</version>
 		</dependency>
 		<dependency>
 			<groupId>org.apache.blur</groupId>
 			<artifactId>blur-thrift</artifactId>
-			<version>0.1.3</version>
+			<version>0.2.0</version>
 		</dependency>
 		<dependency>
 			<groupId>junit</groupId>
@@ -55,7 +55,7 @@ under the License.
 		<dependency>
 			<groupId>org.apache.zookeeper</groupId>
 			<artifactId>zookeeper</artifactId>
-			<version>3.3.4</version>
+			<version>3.4.4</version>
 			<scope>provided</scope>
 		</dependency>
 		<dependency>
@@ -89,10 +89,6 @@ under the License.
 			<id>libdir</id>
 			<url>file://${basedir}/../lib</url>
 		</repository>
-		<repository>
-			<id>cloudera</id>
-			<url>https://repository.cloudera.com/artifactory/cloudera-repos/</url>
-		</repository>
 	</repositories>
 
 	<build>

http://git-wip-us.apache.org/repos/asf/incubator-blur/blob/39539e56/src/blur-thrift/pom.xml
----------------------------------------------------------------------
diff --git a/src/blur-thrift/pom.xml b/src/blur-thrift/pom.xml
index aa52fe8..c55fe03 100644
--- a/src/blur-thrift/pom.xml
+++ b/src/blur-thrift/pom.xml
@@ -22,7 +22,7 @@ under the License.
 	<parent>
 		<groupId>org.apache.blur</groupId>
 		<artifactId>blur</artifactId>
-		<version>0.1.3</version>
+		<version>0.2.0</version>
 	</parent>
 	<modelVersion>4.0.0</modelVersion>
 	<groupId>org.apache.blur</groupId>
@@ -34,12 +34,12 @@ under the License.
 		<dependency>
 			<groupId>org.apache.blur</groupId>
 			<artifactId>blur-util</artifactId>
-			<version>0.1.3</version>
+			<version>0.2.0</version>
 		</dependency>
 		<dependency>
 			<groupId>org.apache.thrift</groupId>
 			<artifactId>libthrift</artifactId>
-			<version>0.7.0</version>
+			<version>0.8.0</version>
 		</dependency>
 		<dependency>
 			<groupId>log4j</groupId>

http://git-wip-us.apache.org/repos/asf/incubator-blur/blob/39539e56/src/blur-util/pom.xml
----------------------------------------------------------------------
diff --git a/src/blur-util/pom.xml b/src/blur-util/pom.xml
index 3417276..2a4900d 100644
--- a/src/blur-util/pom.xml
+++ b/src/blur-util/pom.xml
@@ -22,7 +22,7 @@ under the License.
 	<parent>
 		<groupId>org.apache.blur</groupId>
 		<artifactId>blur</artifactId>
-		<version>0.1.3</version>
+		<version>0.2.0</version>
 	</parent>
 	<modelVersion>4.0.0</modelVersion>
 	<groupId>org.apache.blur</groupId>
@@ -34,7 +34,7 @@ under the License.
 		<dependency>
 			<groupId>org.apache.zookeeper</groupId>
 			<artifactId>zookeeper</artifactId>
-			<version>3.3.4</version>
+			<version>3.4.4</version>
 			<scope>provided</scope>
 		</dependency>
 		<dependency>
@@ -46,7 +46,7 @@ under the License.
 		<dependency>
 			<groupId>org.apache.hadoop</groupId>
 			<artifactId>hadoop-core</artifactId>
-			<version>0.20.2-cdh3u5</version>
+			<version>1.0.3</version>
 			<scope>compile</scope>
 		</dependency>
 		<dependency>
@@ -92,10 +92,6 @@ under the License.
 			<id>libdir</id>
 			<url>file://${basedir}/../lib</url>
 		</repository>
-		<repository>
-			<id>cloudera</id>
-			<url>https://repository.cloudera.com/artifactory/cloudera-repos/</url>
-		</repository>
 	</repositories>
 
 	<build>

http://git-wip-us.apache.org/repos/asf/incubator-blur/blob/39539e56/src/pom.xml
----------------------------------------------------------------------
diff --git a/src/pom.xml b/src/pom.xml
index e1a32d2..add96da 100644
--- a/src/pom.xml
+++ b/src/pom.xml
@@ -23,7 +23,7 @@ under the License.
 	<groupId>org.apache.blur</groupId>
 	<artifactId>blur</artifactId>
 	<packaging>pom</packaging>
-	<version>0.1.3</version>
+	<version>0.2.0</version>
 	<name>Blur</name>
 	
 	<modules>


Mime
View raw message