lucene-java-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From mikemcc...@apache.org
Subject svn commit: r921532 [3/8] - in /lucene/java/trunk: ./ contrib/analyzers/common/src/test/org/apache/lucene/analysis/query/ contrib/analyzers/common/src/test/org/apache/lucene/analysis/shingle/ contrib/ant/src/java/org/apache/lucene/ant/ contrib/benchmar...
Date Wed, 10 Mar 2010 19:45:36 GMT
Modified: lucene/java/trunk/src/test/org/apache/lucene/TestMergeSchedulerExternal.java
URL: http://svn.apache.org/viewvc/lucene/java/trunk/src/test/org/apache/lucene/TestMergeSchedulerExternal.java?rev=921532&r1=921531&r2=921532&view=diff
==============================================================================
--- lucene/java/trunk/src/test/org/apache/lucene/TestMergeSchedulerExternal.java (original)
+++ lucene/java/trunk/src/test/org/apache/lucene/TestMergeSchedulerExternal.java Wed Mar 10 19:45:31 2010
@@ -18,9 +18,9 @@ package org.apache.lucene;
  */
 import java.io.IOException;
 import org.apache.lucene.util.LuceneTestCase;
+import org.apache.lucene.analysis.WhitespaceAnalyzer;
 import org.apache.lucene.store.MockRAMDirectory;
 import org.apache.lucene.index.IndexWriter;
-import org.apache.lucene.index.IndexWriterConfig;
 import org.apache.lucene.index.MergePolicy;
 import org.apache.lucene.index.ConcurrentMergeScheduler;
 import org.apache.lucene.document.Document;
@@ -86,14 +86,15 @@ public class TestMergeSchedulerExternal 
     Field idField = new Field("id", "", Field.Store.YES, Field.Index.NOT_ANALYZED);
     doc.add(idField);
     
-    IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
-        TEST_VERSION_CURRENT).setMergeScheduler(new MyMergeScheduler())
-        .setMaxBufferedDocs(2).setRAMBufferSizeMB(
-            IndexWriterConfig.DISABLE_AUTO_FLUSH));
+    IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
+    MyMergeScheduler ms = new MyMergeScheduler();
+    writer.setMergeScheduler(ms);
+    writer.setMaxBufferedDocs(2);
+    writer.setRAMBufferSizeMB(IndexWriter.DISABLE_AUTO_FLUSH);
     for(int i=0;i<20;i++)
       writer.addDocument(doc);
 
-    ((MyMergeScheduler) writer.getConfig().getMergeScheduler()).sync();
+    ms.sync();
     writer.close();
     
     assertTrue(mergeThreadCreated);

Modified: lucene/java/trunk/src/test/org/apache/lucene/TestSearch.java
URL: http://svn.apache.org/viewvc/lucene/java/trunk/src/test/org/apache/lucene/TestSearch.java?rev=921532&r1=921531&r2=921532&view=diff
==============================================================================
--- lucene/java/trunk/src/test/org/apache/lucene/TestSearch.java (original)
+++ lucene/java/trunk/src/test/org/apache/lucene/TestSearch.java Wed Mar 10 19:45:31 2010
@@ -70,14 +70,14 @@ public class TestSearch extends LuceneTe
 
 
     private void doTestSearch(PrintWriter out, boolean useCompoundFile)
-    throws Exception {
+    throws Exception
+    {
       Directory directory = new RAMDirectory();
       Analyzer analyzer = new SimpleAnalyzer(TEST_VERSION_CURRENT);
-      IndexWriter writer = new IndexWriter(directory, new IndexWriterConfig(
-        TEST_VERSION_CURRENT).setAnalyzer(analyzer));
-      LogMergePolicy lmp = (LogMergePolicy) writer.getMergePolicy();
-      lmp.setUseCompoundFile(useCompoundFile);
-      lmp.setUseCompoundDocStore(useCompoundFile);
+      IndexWriter writer = new IndexWriter(directory, analyzer, true, 
+                                           IndexWriter.MaxFieldLength.LIMITED);
+
+      writer.setUseCompoundFile(useCompoundFile);
 
       String[] docs = {
         "a b c d e",

Modified: lucene/java/trunk/src/test/org/apache/lucene/TestSearchForDuplicates.java
URL: http://svn.apache.org/viewvc/lucene/java/trunk/src/test/org/apache/lucene/TestSearchForDuplicates.java?rev=921532&r1=921531&r2=921532&view=diff
==============================================================================
--- lucene/java/trunk/src/test/org/apache/lucene/TestSearchForDuplicates.java (original)
+++ lucene/java/trunk/src/test/org/apache/lucene/TestSearchForDuplicates.java Wed Mar 10 19:45:31 2010
@@ -78,11 +78,10 @@ public class TestSearchForDuplicates ext
   private void doTest(PrintWriter out, boolean useCompoundFiles) throws Exception {
       Directory directory = new RAMDirectory();
       Analyzer analyzer = new SimpleAnalyzer(TEST_VERSION_CURRENT);
-      IndexWriter writer = new IndexWriter(directory, new IndexWriterConfig(
-        TEST_VERSION_CURRENT).setAnalyzer(analyzer));
-      LogMergePolicy lmp = (LogMergePolicy) writer.getMergePolicy();
-      lmp.setUseCompoundFile(useCompoundFiles);
-      lmp.setUseCompoundDocStore(useCompoundFiles);
+      IndexWriter writer = new IndexWriter(directory, analyzer, true,
+                                           IndexWriter.MaxFieldLength.LIMITED);
+
+      writer.setUseCompoundFile(useCompoundFiles);
 
       final int MAX_DOCS = 225;
 

Modified: lucene/java/trunk/src/test/org/apache/lucene/TestSnapshotDeletionPolicy.java
URL: http://svn.apache.org/viewvc/lucene/java/trunk/src/test/org/apache/lucene/TestSnapshotDeletionPolicy.java?rev=921532&r1=921531&r2=921532&view=diff
==============================================================================
--- lucene/java/trunk/src/test/org/apache/lucene/TestSnapshotDeletionPolicy.java (original)
+++ lucene/java/trunk/src/test/org/apache/lucene/TestSnapshotDeletionPolicy.java Wed Mar 10 19:45:31 2010
@@ -31,7 +31,6 @@ import org.apache.lucene.store.IndexInpu
 import org.apache.lucene.store.MockRAMDirectory;
 import org.apache.lucene.analysis.standard.StandardAnalyzer;
 import org.apache.lucene.index.IndexCommit;
-import org.apache.lucene.index.IndexWriterConfig;
 import org.apache.lucene.index.KeepOnlyLastCommitDeletionPolicy;
 import org.apache.lucene.index.IndexWriter;
 import org.apache.lucene.index.TestIndexWriter;
@@ -68,10 +67,9 @@ public class TestSnapshotDeletionPolicy 
     Directory dir = new MockRAMDirectory();
 
     SnapshotDeletionPolicy dp = new SnapshotDeletionPolicy(new KeepOnlyLastCommitDeletionPolicy());
-    IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
-        TEST_VERSION_CURRENT).setAnalyzer(
-        new StandardAnalyzer(TEST_VERSION_CURRENT)).setIndexDeletionPolicy(dp)
-        .setMaxBufferedDocs(2));
+    IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(TEST_VERSION_CURRENT), dp, IndexWriter.MaxFieldLength.UNLIMITED);
+    // Force frequent flushes
+    writer.setMaxBufferedDocs(2);
     Document doc = new Document();
     doc.add(new Field("content", "aaa", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
     for(int i=0;i<7;i++) {
@@ -85,9 +83,7 @@ public class TestSnapshotDeletionPolicy 
     writer.close();
     copyFiles(dir, cp);
     
-    writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT)
-        .setAnalyzer(new StandardAnalyzer(TEST_VERSION_CURRENT))
-        .setIndexDeletionPolicy(dp));
+    writer = new IndexWriter(dir, new StandardAnalyzer(TEST_VERSION_CURRENT), dp, IndexWriter.MaxFieldLength.UNLIMITED);
     copyFiles(dir, cp);
     for(int i=0;i<7;i++) {
       writer.addDocument(doc);
@@ -99,9 +95,7 @@ public class TestSnapshotDeletionPolicy 
     writer.close();
     copyFiles(dir, cp);
     dp.release();
-    writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT)
-        .setAnalyzer(new StandardAnalyzer(TEST_VERSION_CURRENT))
-        .setIndexDeletionPolicy(dp));
+    writer = new IndexWriter(dir, new StandardAnalyzer(TEST_VERSION_CURRENT), dp, IndexWriter.MaxFieldLength.UNLIMITED);
     writer.close();
     try {
       copyFiles(dir, cp);
@@ -117,10 +111,10 @@ public class TestSnapshotDeletionPolicy 
     final long stopTime = System.currentTimeMillis() + 1000;
 
     SnapshotDeletionPolicy dp = new SnapshotDeletionPolicy(new KeepOnlyLastCommitDeletionPolicy());
-    final IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
-        TEST_VERSION_CURRENT).setAnalyzer(
-        new StandardAnalyzer(TEST_VERSION_CURRENT)).setIndexDeletionPolicy(dp)
-        .setMaxBufferedDocs(2));
+    final IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(TEST_VERSION_CURRENT), dp, IndexWriter.MaxFieldLength.UNLIMITED);
+
+    // Force frequent flushes
+    writer.setMaxBufferedDocs(2);
 
     final Thread t = new Thread() {
         @Override

Modified: lucene/java/trunk/src/test/org/apache/lucene/analysis/TestCachingTokenFilter.java
URL: http://svn.apache.org/viewvc/lucene/java/trunk/src/test/org/apache/lucene/analysis/TestCachingTokenFilter.java?rev=921532&r1=921531&r2=921532&view=diff
==============================================================================
--- lucene/java/trunk/src/test/org/apache/lucene/analysis/TestCachingTokenFilter.java (original)
+++ lucene/java/trunk/src/test/org/apache/lucene/analysis/TestCachingTokenFilter.java Wed Mar 10 19:45:31 2010
@@ -27,7 +27,6 @@ import org.apache.lucene.document.Field;
 import org.apache.lucene.document.Field.TermVector;
 import org.apache.lucene.index.IndexReader;
 import org.apache.lucene.index.IndexWriter;
-import org.apache.lucene.index.IndexWriterConfig;
 import org.apache.lucene.index.Term;
 import org.apache.lucene.index.TermPositions;
 import org.apache.lucene.store.Directory;
@@ -38,7 +37,7 @@ public class TestCachingTokenFilter exte
   
   public void testCaching() throws IOException {
     Directory dir = new RAMDirectory();
-    IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT));
+    IndexWriter writer = new IndexWriter(dir, new SimpleAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
     Document doc = new Document();
     TokenStream stream = new TokenStream() {
       private int index = 0;

Modified: lucene/java/trunk/src/test/org/apache/lucene/analysis/TestKeywordAnalyzer.java
URL: http://svn.apache.org/viewvc/lucene/java/trunk/src/test/org/apache/lucene/analysis/TestKeywordAnalyzer.java?rev=921532&r1=921531&r2=921532&view=diff
==============================================================================
--- lucene/java/trunk/src/test/org/apache/lucene/analysis/TestKeywordAnalyzer.java (original)
+++ lucene/java/trunk/src/test/org/apache/lucene/analysis/TestKeywordAnalyzer.java Wed Mar 10 19:45:31 2010
@@ -24,7 +24,6 @@ import org.apache.lucene.document.Docume
 import org.apache.lucene.document.Field;
 import org.apache.lucene.index.IndexReader;
 import org.apache.lucene.index.IndexWriter;
-import org.apache.lucene.index.IndexWriterConfig;
 import org.apache.lucene.index.Term;
 import org.apache.lucene.index.TermDocs;
 import org.apache.lucene.queryParser.QueryParser;
@@ -42,9 +41,9 @@ public class TestKeywordAnalyzer extends
   protected void setUp() throws Exception {
     super.setUp();
     directory = new RAMDirectory();
-    IndexWriter writer = new IndexWriter(directory, new IndexWriterConfig(
-        TEST_VERSION_CURRENT).setAnalyzer(new SimpleAnalyzer(
-        TEST_VERSION_CURRENT)));
+    IndexWriter writer = new IndexWriter(directory,
+                                         new SimpleAnalyzer(TEST_VERSION_CURRENT),
+                                         true, IndexWriter.MaxFieldLength.LIMITED);
 
     Document doc = new Document();
     doc.add(new Field("partnum", "Q36", Field.Store.YES, Field.Index.NOT_ANALYZED));
@@ -71,7 +70,7 @@ public class TestKeywordAnalyzer extends
 
   public void testMutipleDocument() throws Exception {
     RAMDirectory dir = new RAMDirectory();
-    IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setAnalyzer(new KeywordAnalyzer()));
+    IndexWriter writer = new IndexWriter(dir,new KeywordAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
     Document doc = new Document();
     doc.add(new Field("partnum", "Q36", Field.Store.YES, Field.Index.ANALYZED));
     writer.addDocument(doc);

Modified: lucene/java/trunk/src/test/org/apache/lucene/collation/CollationTestBase.java
URL: http://svn.apache.org/viewvc/lucene/java/trunk/src/test/org/apache/lucene/collation/CollationTestBase.java?rev=921532&r1=921531&r2=921532&view=diff
==============================================================================
--- lucene/java/trunk/src/test/org/apache/lucene/collation/CollationTestBase.java (original)
+++ lucene/java/trunk/src/test/org/apache/lucene/collation/CollationTestBase.java Wed Mar 10 19:45:31 2010
@@ -23,7 +23,6 @@ import org.apache.lucene.analysis.PerFie
 import org.apache.lucene.analysis.WhitespaceAnalyzer;
 import org.apache.lucene.store.RAMDirectory;
 import org.apache.lucene.index.IndexWriter;
-import org.apache.lucene.index.IndexWriterConfig;
 import org.apache.lucene.index.Term;
 import org.apache.lucene.index.IndexReader;
 import org.apache.lucene.search.IndexSearcher;
@@ -70,7 +69,8 @@ public class CollationTestBase extends L
                                             String firstEnd, String secondBeg,
                                             String secondEnd) throws Exception {
     RAMDirectory ramDir = new RAMDirectory();
-    IndexWriter writer = new IndexWriter(ramDir, new IndexWriterConfig(TEST_VERSION_CURRENT).setAnalyzer(analyzer));
+    IndexWriter writer = new IndexWriter
+      (ramDir, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
     Document doc = new Document();
     doc.add(new Field("content", "\u0633\u0627\u0628", 
                       Field.Store.YES, Field.Index.ANALYZED));
@@ -101,7 +101,8 @@ public class CollationTestBase extends L
                                             String firstEnd, String secondBeg,
                                             String secondEnd) throws Exception {
     RAMDirectory ramDir = new RAMDirectory();
-    IndexWriter writer = new IndexWriter(ramDir, new IndexWriterConfig(TEST_VERSION_CURRENT).setAnalyzer(analyzer));
+    IndexWriter writer = new IndexWriter
+      (ramDir, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
     Document doc = new Document();
 
     // Unicode order would include U+0633 in [ U+062F - U+0698 ], but Farsi
@@ -124,11 +125,13 @@ public class CollationTestBase extends L
     searcher.close();
   }
 
-  public void testFarsiTermRangeQuery(Analyzer analyzer, String firstBeg,
-      String firstEnd, String secondBeg, String secondEnd) throws Exception {
+  public void testFarsiTermRangeQuery
+    (Analyzer analyzer, String firstBeg, String firstEnd, 
+     String secondBeg, String secondEnd) throws Exception {
 
     RAMDirectory farsiIndex = new RAMDirectory();
-    IndexWriter writer = new IndexWriter(farsiIndex, new IndexWriterConfig(TEST_VERSION_CURRENT).setAnalyzer(analyzer));
+    IndexWriter writer = new IndexWriter
+      (farsiIndex, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
     Document doc = new Document();
     doc.add(new Field("content", "\u0633\u0627\u0628", 
                       Field.Store.YES, Field.Index.ANALYZED));
@@ -175,7 +178,8 @@ public class CollationTestBase extends L
     analyzer.addAnalyzer("France", franceAnalyzer);
     analyzer.addAnalyzer("Sweden", swedenAnalyzer);
     analyzer.addAnalyzer("Denmark", denmarkAnalyzer);
-    IndexWriter writer = new IndexWriter(indexStore, new IndexWriterConfig(TEST_VERSION_CURRENT).setAnalyzer(analyzer));
+    IndexWriter writer = new IndexWriter 
+      (indexStore, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
 
     // document data:
     // the tracer field is used to determine which document was hit

Modified: lucene/java/trunk/src/test/org/apache/lucene/document/TestBinaryDocument.java
URL: http://svn.apache.org/viewvc/lucene/java/trunk/src/test/org/apache/lucene/document/TestBinaryDocument.java?rev=921532&r1=921531&r2=921532&view=diff
==============================================================================
--- lucene/java/trunk/src/test/org/apache/lucene/document/TestBinaryDocument.java (original)
+++ lucene/java/trunk/src/test/org/apache/lucene/document/TestBinaryDocument.java Wed Mar 10 19:45:31 2010
@@ -2,9 +2,9 @@ package org.apache.lucene.document;
 
 import org.apache.lucene.util.LuceneTestCase;
 
+import org.apache.lucene.analysis.standard.StandardAnalyzer;
 import org.apache.lucene.index.IndexReader;
 import org.apache.lucene.index.IndexWriter;
-import org.apache.lucene.index.IndexWriterConfig;
 import org.apache.lucene.store.MockRAMDirectory;
 
 /**
@@ -27,7 +27,8 @@ import org.apache.lucene.store.MockRAMDi
 /**
  * Tests {@link Document} class.
  */
-public class TestBinaryDocument extends LuceneTestCase {
+public class TestBinaryDocument extends LuceneTestCase
+{
 
   String binaryValStored = "this text will be stored as a byte array in the index";
   String binaryValCompressed = "this text will be also stored and compressed as a byte array in the index";
@@ -57,7 +58,7 @@ public class TestBinaryDocument extends 
     
     /** add the doc to a ram index */
     MockRAMDirectory dir = new MockRAMDirectory();
-    IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT));
+    IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
     writer.addDocument(doc);
     writer.close();
     
@@ -82,7 +83,9 @@ public class TestBinaryDocument extends 
     dir.close();
   }
   
-  public void testCompressionTools() throws Exception {
+  public void testCompressionTools()
+    throws Exception
+  {
     Fieldable binaryFldCompressed = new Field("binaryCompressed", CompressionTools.compress(binaryValCompressed.getBytes()));
     Fieldable stringFldCompressed = new Field("stringCompressed", CompressionTools.compressString(binaryValCompressed));
     
@@ -93,7 +96,7 @@ public class TestBinaryDocument extends 
     
     /** add the doc to a ram index */
     MockRAMDirectory dir = new MockRAMDirectory();
-    IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT));
+    IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
     writer.addDocument(doc);
     writer.close();
     

Modified: lucene/java/trunk/src/test/org/apache/lucene/document/TestDocument.java
URL: http://svn.apache.org/viewvc/lucene/java/trunk/src/test/org/apache/lucene/document/TestDocument.java?rev=921532&r1=921531&r2=921532&view=diff
==============================================================================
--- lucene/java/trunk/src/test/org/apache/lucene/document/TestDocument.java (original)
+++ lucene/java/trunk/src/test/org/apache/lucene/document/TestDocument.java Wed Mar 10 19:45:31 2010
@@ -2,7 +2,6 @@ package org.apache.lucene.document;
 
 import org.apache.lucene.analysis.standard.StandardAnalyzer;
 import org.apache.lucene.index.IndexWriter;
-import org.apache.lucene.index.IndexWriterConfig;
 import org.apache.lucene.index.Term;
 import org.apache.lucene.search.IndexSearcher;
 import org.apache.lucene.search.Query;
@@ -152,11 +151,10 @@ public class TestDocument extends Lucene
      *
      * @throws Exception on error
      */
-    public void testGetValuesForIndexedDocument() throws Exception {
+    public void testGetValuesForIndexedDocument() throws Exception
+    {
         RAMDirectory dir = new RAMDirectory();
-        IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
-        TEST_VERSION_CURRENT).setAnalyzer(new StandardAnalyzer(
-        TEST_VERSION_CURRENT)));
+        IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
         writer.addDocument(makeDocumentWithFields());
         writer.close();
 
@@ -227,9 +225,7 @@ public class TestDocument extends Lucene
       doc.add(new Field("keyword", "test", Field.Store.YES, Field.Index.NOT_ANALYZED));
 
       RAMDirectory dir = new RAMDirectory();
-      IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
-        TEST_VERSION_CURRENT).setAnalyzer(new StandardAnalyzer(
-        TEST_VERSION_CURRENT)));
+      IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
       writer.addDocument(doc);
       field.setValue("id2");
       writer.addDocument(doc);

Modified: lucene/java/trunk/src/test/org/apache/lucene/index/DocHelper.java
URL: http://svn.apache.org/viewvc/lucene/java/trunk/src/test/org/apache/lucene/index/DocHelper.java?rev=921532&r1=921531&r2=921532&view=diff
==============================================================================
--- lucene/java/trunk/src/test/org/apache/lucene/index/DocHelper.java (original)
+++ lucene/java/trunk/src/test/org/apache/lucene/index/DocHelper.java Wed Mar 10 19:45:31 2010
@@ -232,9 +232,10 @@ class DocHelper {
    * @param doc
    * @throws IOException
    */ 
-  public static SegmentInfo writeDoc(Directory dir, Analyzer analyzer, Similarity similarity, Document doc) throws IOException {
-    IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
-        TEST_VERSION_CURRENT).setAnalyzer(analyzer).setSimilarity(similarity));
+  public static SegmentInfo writeDoc(Directory dir, Analyzer analyzer, Similarity similarity, Document doc) throws IOException
+  {
+    IndexWriter writer = new IndexWriter(dir, analyzer, IndexWriter.MaxFieldLength.LIMITED);
+    writer.setSimilarity(similarity);
     //writer.setUseCompoundFile(false);
     writer.addDocument(doc);
     writer.commit();

Modified: lucene/java/trunk/src/test/org/apache/lucene/index/TestAddIndexesNoOptimize.java
URL: http://svn.apache.org/viewvc/lucene/java/trunk/src/test/org/apache/lucene/index/TestAddIndexesNoOptimize.java?rev=921532&r1=921531&r2=921532&view=diff
==============================================================================
--- lucene/java/trunk/src/test/org/apache/lucene/index/TestAddIndexesNoOptimize.java (original)
+++ lucene/java/trunk/src/test/org/apache/lucene/index/TestAddIndexesNoOptimize.java Wed Mar 10 19:45:31 2010
@@ -20,9 +20,9 @@ package org.apache.lucene.index;
 import java.io.IOException;
 
 import org.apache.lucene.util.LuceneTestCase;
+import org.apache.lucene.analysis.WhitespaceAnalyzer;
 import org.apache.lucene.document.Document;
 import org.apache.lucene.document.Field;
-import org.apache.lucene.index.IndexWriterConfig.OpenMode;
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.store.RAMDirectory;
 import org.apache.lucene.store.MockRAMDirectory;
@@ -39,28 +39,27 @@ public class TestAddIndexesNoOptimize ex
 
     IndexWriter writer = null;
 
-    writer = newWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.CREATE));
+    writer = newWriter(dir, true);
     // add 100 documents
     addDocs(writer, 100);
     assertEquals(100, writer.maxDoc());
     writer.close();
 
-    writer = newWriter(aux, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.CREATE));
-    ((LogMergePolicy) writer.getMergePolicy()).setUseCompoundFile(false); // use one without a compound file
-    ((LogMergePolicy) writer.getMergePolicy()).setUseCompoundDocStore(false); // use one without a compound file
+    writer = newWriter(aux, true);
+    writer.setUseCompoundFile(false); // use one without a compound file
     // add 40 documents in separate files
     addDocs(writer, 40);
     assertEquals(40, writer.maxDoc());
     writer.close();
 
-    writer = newWriter(aux2, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.CREATE));
+    writer = newWriter(aux2, true);
     // add 40 documents in compound files
     addDocs2(writer, 50);
     assertEquals(50, writer.maxDoc());
     writer.close();
 
     // test doc count before segments are merged
-    writer = newWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.APPEND));
+    writer = newWriter(dir, false);
     assertEquals(100, writer.maxDoc());
     writer.addIndexesNoOptimize(new Directory[] { aux, aux2 });
     assertEquals(190, writer.maxDoc());
@@ -74,14 +73,14 @@ public class TestAddIndexesNoOptimize ex
 
     // now add another set in.
     Directory aux3 = new RAMDirectory();
-    writer = newWriter(aux3, new IndexWriterConfig(TEST_VERSION_CURRENT));
+    writer = newWriter(aux3, true);
     // add 40 documents
     addDocs(writer, 40);
     assertEquals(40, writer.maxDoc());
     writer.close();
 
     // test doc count before segments are merged/index is optimized
-    writer = newWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.APPEND));
+    writer = newWriter(dir, false);
     assertEquals(190, writer.maxDoc());
     writer.addIndexesNoOptimize(new Directory[] { aux3 });
     assertEquals(230, writer.maxDoc());
@@ -95,7 +94,7 @@ public class TestAddIndexesNoOptimize ex
     verifyTermDocs(dir, new Term("content", "bbb"), 50);
 
     // now optimize it.
-    writer = newWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.APPEND));
+    writer = newWriter(dir, false);
     writer.optimize();
     writer.close();
 
@@ -108,11 +107,11 @@ public class TestAddIndexesNoOptimize ex
 
     // now add a single document
     Directory aux4 = new RAMDirectory();
-    writer = newWriter(aux4, new IndexWriterConfig(TEST_VERSION_CURRENT));
+    writer = newWriter(aux4, true);
     addDocs2(writer, 1);
     writer.close();
 
-    writer = newWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.APPEND));
+    writer = newWriter(dir, false);
     assertEquals(230, writer.maxDoc());
     writer.addIndexesNoOptimize(new Directory[] { aux4 });
     assertEquals(231, writer.maxDoc());
@@ -130,7 +129,7 @@ public class TestAddIndexesNoOptimize ex
     Directory aux = new RAMDirectory();
 
     setUpDirs(dir, aux);
-    IndexWriter writer = newWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.APPEND));
+    IndexWriter writer = newWriter(dir, false);
     writer.addIndexesNoOptimize(new Directory[] {aux});
 
     // Adds 10 docs, then replaces them with another 10
@@ -167,7 +166,7 @@ public class TestAddIndexesNoOptimize ex
     Directory aux = new RAMDirectory();
 
     setUpDirs(dir, aux);
-    IndexWriter writer = newWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.APPEND));
+    IndexWriter writer = newWriter(dir, false);
 
     // Adds 10 docs, then replaces them with another 10
     // docs, so 10 pending deletes:
@@ -206,7 +205,7 @@ public class TestAddIndexesNoOptimize ex
     Directory aux = new RAMDirectory();
 
     setUpDirs(dir, aux);
-    IndexWriter writer = newWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.APPEND));
+    IndexWriter writer = newWriter(dir, false);
 
     // Adds 10 docs, then replaces them with another 10
     // docs, so 10 pending deletes:
@@ -247,25 +246,25 @@ public class TestAddIndexesNoOptimize ex
 
     IndexWriter writer = null;
 
-    writer = newWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT));
+    writer = newWriter(dir, true);
     // add 100 documents
     addDocs(writer, 100);
     assertEquals(100, writer.maxDoc());
     writer.close();
 
-    writer = newWriter(aux, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.CREATE).setMaxBufferedDocs(1000));
-    ((LogMergePolicy) writer.getMergePolicy()).setUseCompoundFile(false); // use one without a compound file
-    ((LogMergePolicy) writer.getMergePolicy()).setUseCompoundDocStore(false); // use one without a compound file
+    writer = newWriter(aux, true);
+    writer.setUseCompoundFile(false); // use one without a compound file
+    writer.setMaxBufferedDocs(1000);
     // add 140 documents in separate files
     addDocs(writer, 40);
     writer.close();
-    writer = newWriter(aux, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.CREATE).setMaxBufferedDocs(1000));
-    ((LogMergePolicy) writer.getMergePolicy()).setUseCompoundFile(false); // use one without a compound file
-    ((LogMergePolicy) writer.getMergePolicy()).setUseCompoundDocStore(false); // use one without a compound file
+    writer = newWriter(aux, true);
+    writer.setUseCompoundFile(false); // use one without a compound file
+    writer.setMaxBufferedDocs(1000);
     addDocs(writer, 100);
     writer.close();
 
-    writer = newWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.APPEND));
+    writer = newWriter(dir, false);
     try {
       // cannot add self
       writer.addIndexesNoOptimize(new Directory[] { aux, dir });
@@ -291,10 +290,9 @@ public class TestAddIndexesNoOptimize ex
 
     setUpDirs(dir, aux);
 
-    IndexWriter writer = newWriter(dir, new IndexWriterConfig(
-        TEST_VERSION_CURRENT).setOpenMode(OpenMode.APPEND).setMaxBufferedDocs(
-        10));
-    ((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(4);
+    IndexWriter writer = newWriter(dir, false);
+    writer.setMaxBufferedDocs(10);
+    writer.setMergeFactor(4);
     addDocs(writer, 10);
 
     writer.addIndexesNoOptimize(new Directory[] { aux });
@@ -316,8 +314,9 @@ public class TestAddIndexesNoOptimize ex
 
     setUpDirs(dir, aux);
 
-    IndexWriter writer = newWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.APPEND).setMaxBufferedDocs(9));
-    ((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(4);
+    IndexWriter writer = newWriter(dir, false);
+    writer.setMaxBufferedDocs(9);
+    writer.setMergeFactor(4);
     addDocs(writer, 2);
 
     writer.addIndexesNoOptimize(new Directory[] { aux });
@@ -339,10 +338,9 @@ public class TestAddIndexesNoOptimize ex
 
     setUpDirs(dir, aux);
 
-    IndexWriter writer = newWriter(dir, new IndexWriterConfig(
-        TEST_VERSION_CURRENT).setOpenMode(OpenMode.APPEND).setMaxBufferedDocs(
-        10));
-    ((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(4);
+    IndexWriter writer = newWriter(dir, false);
+    writer.setMaxBufferedDocs(10);
+    writer.setMergeFactor(4);
 
     writer.addIndexesNoOptimize(new Directory[] { aux, new RAMDirectory(aux) });
     assertEquals(1060, writer.maxDoc());
@@ -369,10 +367,9 @@ public class TestAddIndexesNoOptimize ex
     assertEquals(10, reader.numDocs());
     reader.close();
 
-    IndexWriter writer = newWriter(dir, new IndexWriterConfig(
-        TEST_VERSION_CURRENT).setOpenMode(OpenMode.APPEND)
-        .setMaxBufferedDocs(4));
-    ((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(4);
+    IndexWriter writer = newWriter(dir, false);
+    writer.setMaxBufferedDocs(4);
+    writer.setMergeFactor(4);
 
     writer.addIndexesNoOptimize(new Directory[] { aux, new RAMDirectory(aux) });
     assertEquals(1020, writer.maxDoc());
@@ -393,10 +390,9 @@ public class TestAddIndexesNoOptimize ex
 
     setUpDirs(dir, aux);
 
-    IndexWriter writer = newWriter(aux2, new IndexWriterConfig(
-        TEST_VERSION_CURRENT).setOpenMode(OpenMode.CREATE).setMaxBufferedDocs(
-        100));
-    ((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(10);
+    IndexWriter writer = newWriter(aux2, true);
+    writer.setMaxBufferedDocs(100);
+    writer.setMergeFactor(10);
     writer.addIndexesNoOptimize(new Directory[] { aux });
     assertEquals(30, writer.maxDoc());
     assertEquals(3, writer.getSegmentCount());
@@ -416,9 +412,9 @@ public class TestAddIndexesNoOptimize ex
     assertEquals(22, reader.numDocs());
     reader.close();
 
-    writer = newWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT)
-        .setOpenMode(OpenMode.APPEND).setMaxBufferedDocs(6));
-    ((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(4);
+    writer = newWriter(dir, false);
+    writer.setMaxBufferedDocs(6);
+    writer.setMergeFactor(4);
 
     writer.addIndexesNoOptimize(new Directory[] { aux, aux2 });
     assertEquals(1025, writer.maxDoc());
@@ -429,9 +425,9 @@ public class TestAddIndexesNoOptimize ex
     verifyNumDocs(dir, 1025);
   }
 
-  private IndexWriter newWriter(Directory dir, IndexWriterConfig conf)
+  private IndexWriter newWriter(Directory dir, boolean create)
       throws IOException {
-    final IndexWriter writer = new IndexWriter(dir, conf);
+    final IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), create, IndexWriter.MaxFieldLength.UNLIMITED);
     writer.setMergePolicy(new LogDocMergePolicy(writer));
     return writer;
   }
@@ -475,25 +471,26 @@ public class TestAddIndexesNoOptimize ex
   private void setUpDirs(Directory dir, Directory aux) throws IOException {
     IndexWriter writer = null;
 
-    writer = newWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.CREATE).setMaxBufferedDocs(1000));
+    writer = newWriter(dir, true);
+    writer.setMaxBufferedDocs(1000);
     // add 1000 documents in 1 segment
     addDocs(writer, 1000);
     assertEquals(1000, writer.maxDoc());
     assertEquals(1, writer.getSegmentCount());
     writer.close();
 
-    writer = newWriter(aux, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.CREATE).setMaxBufferedDocs(100));
-    ((LogMergePolicy) writer.getMergePolicy()).setUseCompoundFile(false); // use one without a compound file
-    ((LogMergePolicy) writer.getMergePolicy()).setUseCompoundDocStore(false); // use one without a compound file
-    ((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(10);
+    writer = newWriter(aux, true);
+    writer.setUseCompoundFile(false); // use one without a compound file
+    writer.setMaxBufferedDocs(100);
+    writer.setMergeFactor(10);
     // add 30 documents in 3 segments
     for (int i = 0; i < 3; i++) {
       addDocs(writer, 10);
       writer.close();
-      writer = newWriter(aux, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.APPEND).setMaxBufferedDocs(100));
-      ((LogMergePolicy) writer.getMergePolicy()).setUseCompoundFile(false); // use one without a compound file
-      ((LogMergePolicy) writer.getMergePolicy()).setUseCompoundDocStore(false); // use one without a compound file
-      ((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(10);
+      writer = newWriter(aux, false);
+      writer.setUseCompoundFile(false); // use one without a compound file
+      writer.setMaxBufferedDocs(100);
+      writer.setMergeFactor(10);
     }
     assertEquals(30, writer.maxDoc());
     assertEquals(3, writer.getSegmentCount());
@@ -504,19 +501,18 @@ public class TestAddIndexesNoOptimize ex
   public void testHangOnClose() throws IOException {
 
     Directory dir = new MockRAMDirectory();
-    IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setMaxBufferedDocs(5));
-    LogByteSizeMergePolicy lmp = new LogByteSizeMergePolicy(writer);
-    lmp.setUseCompoundFile(false);
-    lmp.setUseCompoundDocStore(false);
-    lmp.setMergeFactor(100);
-    writer.setMergePolicy(lmp);
+    IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
+    writer.setMergePolicy(new LogByteSizeMergePolicy(writer));
+    writer.setMaxBufferedDocs(5);
+    writer.setUseCompoundFile(false);
+    writer.setMergeFactor(100);
 
     Document doc = new Document();
     doc.add(new Field("content", "aaa bbb ccc ddd eee fff ggg hhh iii", Field.Store.YES,
                       Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
     for(int i=0;i<60;i++)
       writer.addDocument(doc);
-
+    writer.setMaxBufferedDocs(200);
     Document doc2 = new Document();
     doc2.add(new Field("content", "aaa bbb ccc ddd eee fff ggg hhh iii", Field.Store.YES,
                       Field.Index.NO));
@@ -531,13 +527,13 @@ public class TestAddIndexesNoOptimize ex
     writer.close();
 
     Directory dir2 = new MockRAMDirectory();
-    writer = new IndexWriter(dir2, new IndexWriterConfig(TEST_VERSION_CURRENT).setMergeScheduler(new SerialMergeScheduler()));
-    lmp = new LogByteSizeMergePolicy(writer);
+    writer = new IndexWriter(dir2, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
+    LogByteSizeMergePolicy lmp = new LogByteSizeMergePolicy(writer);
     lmp.setMinMergeMB(0.0001);
-    lmp.setUseCompoundFile(false);
-    lmp.setUseCompoundDocStore(false);
-    lmp.setMergeFactor(4);
     writer.setMergePolicy(lmp);
+    writer.setMergeFactor(4);
+    writer.setUseCompoundFile(false);
+    writer.setMergeScheduler(new SerialMergeScheduler());
     writer.addIndexesNoOptimize(new Directory[] {dir});
     writer.close();
     dir.close();
@@ -548,16 +544,14 @@ public class TestAddIndexesNoOptimize ex
   // is respected when copying tail segments
   public void testTargetCFS() throws IOException {
     Directory dir = new RAMDirectory();
-    IndexWriter writer = newWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT));
-    ((LogMergePolicy) writer.getMergePolicy()).setUseCompoundFile(false);
-    ((LogMergePolicy) writer.getMergePolicy()).setUseCompoundDocStore(false);
+    IndexWriter writer = newWriter(dir, true);
+    writer.setUseCompoundFile(false);
     addDocs(writer, 1);
     writer.close();
 
     Directory other = new RAMDirectory();
-    writer = newWriter(other, new IndexWriterConfig(TEST_VERSION_CURRENT));
-    ((LogMergePolicy) writer.getMergePolicy()).setUseCompoundFile(true);
-    ((LogMergePolicy) writer.getMergePolicy()).setUseCompoundDocStore(true);
+    writer = newWriter(other, true);
+    writer.setUseCompoundFile(true);
     writer.addIndexesNoOptimize(new Directory[] {dir});
     assertTrue(writer.newestSegment().getUseCompoundFile());
     writer.close();

Modified: lucene/java/trunk/src/test/org/apache/lucene/index/TestAtomicUpdate.java
URL: http://svn.apache.org/viewvc/lucene/java/trunk/src/test/org/apache/lucene/index/TestAtomicUpdate.java?rev=921532&r1=921531&r2=921532&view=diff
==============================================================================
--- lucene/java/trunk/src/test/org/apache/lucene/index/TestAtomicUpdate.java (original)
+++ lucene/java/trunk/src/test/org/apache/lucene/index/TestAtomicUpdate.java Wed Mar 10 19:45:31 2010
@@ -19,19 +19,20 @@ package org.apache.lucene.index;
 import org.apache.lucene.util.*;
 import org.apache.lucene.store.*;
 import org.apache.lucene.document.*;
+import org.apache.lucene.analysis.*;
 
 import java.util.Random;
 import java.io.File;
 import java.io.IOException;
 
 public class TestAtomicUpdate extends LuceneTestCase {
-  
-  private static final class MockIndexWriter extends IndexWriter {
+  private static final Analyzer ANALYZER = new SimpleAnalyzer(TEST_VERSION_CURRENT);
+  private Random RANDOM;
 
-    static Random RANDOM;
+  public class MockIndexWriter extends IndexWriter {
 
-    public MockIndexWriter(Directory dir, IndexWriterConfig conf) throws IOException {
-      super(dir, conf);
+    public MockIndexWriter(Directory dir, Analyzer a, boolean create, IndexWriter.MaxFieldLength mfl) throws IOException {
+      super(dir, a, create, mfl);
     }
 
     @Override
@@ -125,8 +126,9 @@ public class TestAtomicUpdate extends Lu
 
     TimedThread[] threads = new TimedThread[4];
 
-    IndexWriter writer = new MockIndexWriter(directory, new IndexWriterConfig(TEST_VERSION_CURRENT).setMaxBufferedDocs(7));
-    ((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(3);
+    IndexWriter writer = new MockIndexWriter(directory, ANALYZER, true, IndexWriter.MaxFieldLength.UNLIMITED);
+    writer.setMaxBufferedDocs(7);
+    writer.setMergeFactor(3);
 
     // Establish a base index of 100 docs:
     for(int i=0;i<100;i++) {
@@ -181,7 +183,7 @@ public class TestAtomicUpdate extends Lu
     FSDirectory.
   */
   public void testAtomicUpdates() throws Exception {
-    MockIndexWriter.RANDOM = newRandom();
+    RANDOM = newRandom();
     Directory directory;
 
     // First in a RAM directory:

Modified: lucene/java/trunk/src/test/org/apache/lucene/index/TestBackwardsCompatibility.java
URL: http://svn.apache.org/viewvc/lucene/java/trunk/src/test/org/apache/lucene/index/TestBackwardsCompatibility.java?rev=921532&r1=921531&r2=921532&view=diff
==============================================================================
--- lucene/java/trunk/src/test/org/apache/lucene/index/TestBackwardsCompatibility.java (original)
+++ lucene/java/trunk/src/test/org/apache/lucene/index/TestBackwardsCompatibility.java Wed Mar 10 19:45:31 2010
@@ -32,12 +32,12 @@ import java.util.ArrayList;
 import java.util.zip.ZipEntry;
 import java.util.zip.ZipFile;
 
+import org.apache.lucene.analysis.WhitespaceAnalyzer;
 import org.apache.lucene.document.Document;
 import org.apache.lucene.document.Field;
 import org.apache.lucene.document.Fieldable;
 import org.apache.lucene.document.FieldSelector;
 import org.apache.lucene.document.FieldSelectorResult;
-import org.apache.lucene.index.IndexWriterConfig.OpenMode;
 import org.apache.lucene.search.IndexSearcher;
 import org.apache.lucene.search.ScoreDoc;
 import org.apache.lucene.search.TermQuery;
@@ -52,7 +52,8 @@ import org.apache.lucene.util._TestUtil;
   against it, and add documents to it.
 */
 
-public class TestBackwardsCompatibility extends LuceneTestCase {
+public class TestBackwardsCompatibility extends LuceneTestCase
+{
 
   // Uncomment these cases & run them on an older Lucene
   // version, to generate an index to test backwards
@@ -214,7 +215,7 @@ public class TestBackwardsCompatibility 
         hasTested29++;
       }
 
-      IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT));
+      IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
       w.optimize();
       w.close();
 
@@ -354,7 +355,7 @@ public class TestBackwardsCompatibility 
     Directory dir = FSDirectory.open(new File(dirName));
 
     // open writer
-    IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.APPEND));
+    IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), false, IndexWriter.MaxFieldLength.UNLIMITED);
 
     // add 10 docs
     for(int i=0;i<10;i++) {
@@ -398,7 +399,7 @@ public class TestBackwardsCompatibility 
     searcher.close();
 
     // optimize
-    writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.APPEND));
+    writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), false, IndexWriter.MaxFieldLength.UNLIMITED);
     writer.optimize();
     writer.close();
 
@@ -448,7 +449,7 @@ public class TestBackwardsCompatibility 
     searcher.close();
 
     // optimize
-    IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.APPEND));
+    IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), false, IndexWriter.MaxFieldLength.UNLIMITED);
     writer.optimize();
     writer.close();
 
@@ -470,9 +471,9 @@ public class TestBackwardsCompatibility 
     dirName = fullDir(dirName);
 
     Directory dir = FSDirectory.open(new File(dirName));
-    IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setMaxBufferedDocs(10));
-    ((LogMergePolicy) writer.getMergePolicy()).setUseCompoundFile(doCFS);
-    ((LogMergePolicy) writer.getMergePolicy()).setUseCompoundDocStore(doCFS);
+    IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
+    writer.setUseCompoundFile(doCFS);
+    writer.setMaxBufferedDocs(10);
     
     for(int i=0;i<35;i++) {
       addDoc(writer, i);
@@ -481,9 +482,9 @@ public class TestBackwardsCompatibility 
     writer.close();
 
     // open fresh writer so we get no prx file in the added segment
-    writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setMaxBufferedDocs(10));
-    ((LogMergePolicy) writer.getMergePolicy()).setUseCompoundFile(doCFS);
-    ((LogMergePolicy) writer.getMergePolicy()).setUseCompoundDocStore(doCFS);
+    writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
+    writer.setUseCompoundFile(doCFS);
+    writer.setMaxBufferedDocs(10);
     addNoProxDoc(writer);
     writer.close();
 
@@ -508,7 +509,8 @@ public class TestBackwardsCompatibility 
     try {
       Directory dir = FSDirectory.open(new File(fullDir(outputDir)));
 
-      IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT));
+      IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
+      writer.setRAMBufferSizeMB(16.0);
       for(int i=0;i<35;i++) {
         addDoc(writer, i);
       }

Modified: lucene/java/trunk/src/test/org/apache/lucene/index/TestCheckIndex.java
URL: http://svn.apache.org/viewvc/lucene/java/trunk/src/test/org/apache/lucene/index/TestCheckIndex.java?rev=921532&r1=921531&r2=921532&view=diff
==============================================================================
--- lucene/java/trunk/src/test/org/apache/lucene/index/TestCheckIndex.java (original)
+++ lucene/java/trunk/src/test/org/apache/lucene/index/TestCheckIndex.java Wed Mar 10 19:45:31 2010
@@ -25,6 +25,7 @@ import java.util.ArrayList;
 
 import org.apache.lucene.util.LuceneTestCase;
 import org.apache.lucene.store.MockRAMDirectory;
+import org.apache.lucene.analysis.WhitespaceAnalyzer;
 import org.apache.lucene.document.Document;
 import org.apache.lucene.document.Field;
 import org.apache.lucene.util.Constants;
@@ -33,7 +34,9 @@ public class TestCheckIndex extends Luce
 
   public void testDeletedDocs() throws IOException {
     MockRAMDirectory dir = new MockRAMDirectory();
-    IndexWriter writer  = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setMaxBufferedDocs(2));
+    IndexWriter writer  = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, 
+                                          IndexWriter.MaxFieldLength.LIMITED);      
+    writer.setMaxBufferedDocs(2);
     Document doc = new Document();
     doc.add(new Field("field", "aaa", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
     for(int i=0;i<19;i++) {

Modified: lucene/java/trunk/src/test/org/apache/lucene/index/TestConcurrentMergeScheduler.java
URL: http://svn.apache.org/viewvc/lucene/java/trunk/src/test/org/apache/lucene/index/TestConcurrentMergeScheduler.java?rev=921532&r1=921531&r2=921532&view=diff
==============================================================================
--- lucene/java/trunk/src/test/org/apache/lucene/index/TestConcurrentMergeScheduler.java (original)
+++ lucene/java/trunk/src/test/org/apache/lucene/index/TestConcurrentMergeScheduler.java Wed Mar 10 19:45:31 2010
@@ -17,17 +17,20 @@ package org.apache.lucene.index;
  * limitations under the License.
  */
 
+import org.apache.lucene.analysis.SimpleAnalyzer;
+import org.apache.lucene.analysis.Analyzer;
 import org.apache.lucene.store.MockRAMDirectory;
 import org.apache.lucene.store.RAMDirectory;
 import org.apache.lucene.document.Document;
 import org.apache.lucene.document.Field;
-import org.apache.lucene.index.IndexWriterConfig.OpenMode;
 
 import org.apache.lucene.util.LuceneTestCase;
 import java.io.IOException;
 
 public class TestConcurrentMergeScheduler extends LuceneTestCase {
   
+  private static final Analyzer ANALYZER = new SimpleAnalyzer(TEST_VERSION_CURRENT);
+
   private static class FailOnlyOnFlush extends MockRAMDirectory.Failure {
     boolean doFail;
     boolean hitExc;
@@ -65,7 +68,10 @@ public class TestConcurrentMergeSchedule
     FailOnlyOnFlush failure = new FailOnlyOnFlush();
     directory.failOn(failure);
 
-    IndexWriter writer = new IndexWriter(directory, new IndexWriterConfig(TEST_VERSION_CURRENT).setMaxBufferedDocs(2));
+    IndexWriter writer = new IndexWriter(directory, ANALYZER, true, IndexWriter.MaxFieldLength.UNLIMITED);
+    ConcurrentMergeScheduler cms = new ConcurrentMergeScheduler();
+    writer.setMergeScheduler(cms);
+    writer.setMaxBufferedDocs(2);
     Document doc = new Document();
     Field idField = new Field("id", "", Field.Store.YES, Field.Index.NOT_ANALYZED);
     doc.add(idField);
@@ -109,7 +115,9 @@ public class TestConcurrentMergeSchedule
 
     RAMDirectory directory = new MockRAMDirectory();
 
-    IndexWriter writer = new IndexWriter(directory, new IndexWriterConfig(TEST_VERSION_CURRENT));
+    IndexWriter writer = new IndexWriter(directory, ANALYZER, true, IndexWriter.MaxFieldLength.UNLIMITED);
+    ConcurrentMergeScheduler cms = new ConcurrentMergeScheduler();
+    writer.setMergeScheduler(cms);
 
     LogDocMergePolicy mp = new LogDocMergePolicy(writer);
     writer.setMergePolicy(mp);
@@ -149,10 +157,12 @@ public class TestConcurrentMergeSchedule
 
     RAMDirectory directory = new MockRAMDirectory();
 
-    IndexWriter writer = new IndexWriter(directory, new IndexWriterConfig(
-        TEST_VERSION_CURRENT).setMaxBufferedDocs(2));
+    IndexWriter writer = new IndexWriter(directory, ANALYZER, true, IndexWriter.MaxFieldLength.UNLIMITED);
 
     for(int iter=0;iter<7;iter++) {
+      ConcurrentMergeScheduler cms = new ConcurrentMergeScheduler();
+      writer.setMergeScheduler(cms);
+      writer.setMaxBufferedDocs(2);
 
       for(int j=0;j<21;j++) {
         Document doc = new Document();
@@ -164,9 +174,7 @@ public class TestConcurrentMergeSchedule
       TestIndexWriter.assertNoUnreferencedFiles(directory, "testNoExtraFiles");
 
       // Reopen
-      writer = new IndexWriter(directory, new IndexWriterConfig(
-          TEST_VERSION_CURRENT).setOpenMode(OpenMode.APPEND)
-          .setMaxBufferedDocs(2));
+      writer = new IndexWriter(directory, ANALYZER, false, IndexWriter.MaxFieldLength.UNLIMITED);
     }
 
     writer.close();
@@ -181,10 +189,13 @@ public class TestConcurrentMergeSchedule
     Field idField = new Field("id", "", Field.Store.YES, Field.Index.NOT_ANALYZED);
     doc.add(idField);
 
-    IndexWriter writer = new IndexWriter(directory, new IndexWriterConfig(TEST_VERSION_CURRENT).setMaxBufferedDocs(2));
-    ((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(100);
+    IndexWriter writer = new IndexWriter(directory, ANALYZER, true, IndexWriter.MaxFieldLength.UNLIMITED);
 
     for(int iter=0;iter<10;iter++) {
+      ConcurrentMergeScheduler cms = new ConcurrentMergeScheduler();
+      writer.setMergeScheduler(cms);
+      writer.setMaxBufferedDocs(2);
+      writer.setMergeFactor(100);
 
       for(int j=0;j<201;j++) {
         idField.setValue(Integer.toString(iter*201+j));
@@ -199,7 +210,7 @@ public class TestConcurrentMergeSchedule
 
       // Force a bunch of merge threads to kick off so we
       // stress out aborting them on close:
-      ((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(3);
+      writer.setMergeFactor(3);
       writer.addDocument(doc);
       writer.commit();
 
@@ -210,8 +221,7 @@ public class TestConcurrentMergeSchedule
       reader.close();
 
       // Reopen
-      writer = new IndexWriter(directory, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.APPEND));
-      ((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(100);
+      writer = new IndexWriter(directory, ANALYZER, false, IndexWriter.MaxFieldLength.UNLIMITED);
     }
     writer.close();
 

Modified: lucene/java/trunk/src/test/org/apache/lucene/index/TestCrash.java
URL: http://svn.apache.org/viewvc/lucene/java/trunk/src/test/org/apache/lucene/index/TestCrash.java?rev=921532&r1=921531&r2=921532&view=diff
==============================================================================
--- lucene/java/trunk/src/test/org/apache/lucene/index/TestCrash.java (original)
+++ lucene/java/trunk/src/test/org/apache/lucene/index/TestCrash.java Wed Mar 10 19:45:31 2010
@@ -20,6 +20,7 @@ package org.apache.lucene.index;
 import java.io.IOException;
 
 import org.apache.lucene.util.LuceneTestCase;
+import org.apache.lucene.analysis.WhitespaceAnalyzer;
 import org.apache.lucene.store.MockRAMDirectory;
 import org.apache.lucene.store.NoLockFactory;
 import org.apache.lucene.document.Document;
@@ -34,8 +35,10 @@ public class TestCrash extends LuceneTes
   private IndexWriter initIndex(MockRAMDirectory dir) throws IOException {
     dir.setLockFactory(NoLockFactory.getNoLockFactory());
 
-    IndexWriter writer  = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setMaxBufferedDocs(10));
-    ((ConcurrentMergeScheduler) writer.getConfig().getMergeScheduler()).setSuppressExceptions();
+    IndexWriter writer  = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.UNLIMITED);
+    //writer.setMaxBufferedDocs(2);
+    writer.setMaxBufferedDocs(10);
+    ((ConcurrentMergeScheduler) writer.getMergeScheduler()).setSuppressExceptions();
 
     Document doc = new Document();
     doc.add(new Field("content", "aaa", Field.Store.YES, Field.Index.ANALYZED));
@@ -48,7 +51,7 @@ public class TestCrash extends LuceneTes
 
   private void crash(final IndexWriter writer) throws IOException {
     final MockRAMDirectory dir = (MockRAMDirectory) writer.getDirectory();
-    ConcurrentMergeScheduler cms = (ConcurrentMergeScheduler) writer.getConfig().getMergeScheduler();
+    ConcurrentMergeScheduler cms = (ConcurrentMergeScheduler) writer.getMergeScheduler();
     dir.crash();
     cms.sync();
     dir.clearCrash();

Modified: lucene/java/trunk/src/test/org/apache/lucene/index/TestDeletionPolicy.java
URL: http://svn.apache.org/viewvc/lucene/java/trunk/src/test/org/apache/lucene/index/TestDeletionPolicy.java?rev=921532&r1=921531&r2=921532&view=diff
==============================================================================
--- lucene/java/trunk/src/test/org/apache/lucene/index/TestDeletionPolicy.java (original)
+++ lucene/java/trunk/src/test/org/apache/lucene/index/TestDeletionPolicy.java Wed Mar 10 19:45:31 2010
@@ -23,9 +23,9 @@ import java.util.List;
 import java.util.Set;
 import java.util.Collection;
 
+import org.apache.lucene.analysis.WhitespaceAnalyzer;
 import org.apache.lucene.document.Document;
 import org.apache.lucene.document.Field;
-import org.apache.lucene.index.IndexWriterConfig.OpenMode;
 import org.apache.lucene.search.IndexSearcher;
 import org.apache.lucene.search.Query;
 import org.apache.lucene.search.ScoreDoc;
@@ -40,8 +40,8 @@ import org.apache.lucene.util.LuceneTest
   against it, and add documents to it.
 */
 
-public class TestDeletionPolicy extends LuceneTestCase {
-  
+public class TestDeletionPolicy extends LuceneTestCase
+{
   private void verifyCommitOrder(List<? extends IndexCommit> commits) throws IOException {
     final IndexCommit firstCommit =  commits.get(0);
     long last = SegmentInfos.generationFromSegmentsFileName(firstCommit.getSegmentsFileName());
@@ -201,10 +201,8 @@ public class TestDeletionPolicy extends 
 
     Directory dir = new RAMDirectory();
     ExpirationTimeDeletionPolicy policy = new ExpirationTimeDeletionPolicy(dir, SECONDS);
-    IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setIndexDeletionPolicy(policy));
-    LogMergePolicy lmp = (LogMergePolicy) writer.getMergePolicy();
-    lmp.setUseCompoundFile(useCompoundFile);
-    lmp.setUseCompoundDocStore(useCompoundFile);
+    IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, policy, IndexWriter.MaxFieldLength.UNLIMITED);
+    writer.setUseCompoundFile(useCompoundFile);
     writer.close();
 
     long lastDeleteTime = 0;
@@ -212,11 +210,8 @@ public class TestDeletionPolicy extends 
       // Record last time when writer performed deletes of
       // past commits
       lastDeleteTime = System.currentTimeMillis();
-      writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT)
-          .setOpenMode(OpenMode.APPEND).setIndexDeletionPolicy(policy));
-      lmp = (LogMergePolicy) writer.getMergePolicy();
-      lmp.setUseCompoundFile(useCompoundFile);
-      lmp.setUseCompoundDocStore(useCompoundFile);
+      writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), false, policy, IndexWriter.MaxFieldLength.UNLIMITED);
+      writer.setUseCompoundFile(useCompoundFile);
       for(int j=0;j<17;j++) {
         addDoc(writer);
       }
@@ -276,22 +271,17 @@ public class TestDeletionPolicy extends 
       Directory dir = new RAMDirectory();
       policy.dir = dir;
 
-      IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
-          TEST_VERSION_CURRENT).setIndexDeletionPolicy(policy)
-          .setMaxBufferedDocs(10).setMergeScheduler(new SerialMergeScheduler()));
-      LogMergePolicy lmp = (LogMergePolicy) writer.getMergePolicy();
-      lmp.setUseCompoundFile(useCompoundFile);
-      lmp.setUseCompoundDocStore(useCompoundFile);
+      IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, policy, IndexWriter.MaxFieldLength.UNLIMITED);
+      writer.setMaxBufferedDocs(10);
+      writer.setUseCompoundFile(useCompoundFile);
+      writer.setMergeScheduler(new SerialMergeScheduler());
       for(int i=0;i<107;i++) {
         addDoc(writer);
       }
       writer.close();
 
-      writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT)
-          .setOpenMode(OpenMode.APPEND).setIndexDeletionPolicy(policy));
-      lmp = (LogMergePolicy) writer.getMergePolicy();
-      lmp.setUseCompoundFile(useCompoundFile);
-      lmp.setUseCompoundDocStore(useCompoundFile);
+      writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), false, policy, IndexWriter.MaxFieldLength.UNLIMITED);
+      writer.setUseCompoundFile(useCompoundFile);
       writer.optimize();
       writer.close();
 
@@ -328,9 +318,7 @@ public class TestDeletionPolicy extends 
           // Open & close a writer and assert that it
           // actually removed something:
           int preCount = dir.listAll().length;
-          writer = new IndexWriter(dir, new IndexWriterConfig(
-              TEST_VERSION_CURRENT).setOpenMode(OpenMode.APPEND)
-              .setIndexDeletionPolicy(policy));
+          writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), false, policy, IndexWriter.MaxFieldLength.LIMITED);
           writer.close();
           int postCount = dir.listAll().length;
           assertTrue(postCount < preCount);
@@ -352,9 +340,8 @@ public class TestDeletionPolicy extends 
     Directory dir = new MockRAMDirectory();
     policy.dir = dir;
 
-    IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
-        TEST_VERSION_CURRENT).setIndexDeletionPolicy(policy)
-        .setMaxBufferedDocs(2));
+    IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), policy, IndexWriter.MaxFieldLength.LIMITED);
+    writer.setMaxBufferedDocs(2);
     for(int i=0;i<10;i++) {
       addDoc(writer);
       if ((1+i)%2 == 0)
@@ -372,7 +359,7 @@ public class TestDeletionPolicy extends 
     assertTrue(lastCommit != null);
 
     // Now add 1 doc and optimize
-    writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setIndexDeletionPolicy(policy));
+    writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), policy, IndexWriter.MaxFieldLength.LIMITED);
     addDoc(writer);
     assertEquals(11, writer.numDocs());
     writer.optimize();
@@ -381,8 +368,7 @@ public class TestDeletionPolicy extends 
     assertEquals(7, IndexReader.listCommits(dir).size());
 
     // Now open writer on the commit just before optimize:
-    writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT)
-        .setIndexDeletionPolicy(policy).setIndexCommit(lastCommit));
+    writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), policy, IndexWriter.MaxFieldLength.LIMITED, lastCommit);
     assertEquals(10, writer.numDocs());
 
     // Should undo our rollback:
@@ -394,8 +380,7 @@ public class TestDeletionPolicy extends 
     assertEquals(11, r.numDocs());
     r.close();
 
-    writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT)
-        .setIndexDeletionPolicy(policy).setIndexCommit(lastCommit));
+    writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), policy, IndexWriter.MaxFieldLength.LIMITED, lastCommit);
     assertEquals(10, writer.numDocs());
     // Commits the rollback:
     writer.close();
@@ -411,7 +396,7 @@ public class TestDeletionPolicy extends 
     r.close();
 
     // Reoptimize
-    writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setIndexDeletionPolicy(policy));
+    writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), policy, IndexWriter.MaxFieldLength.LIMITED);
     writer.optimize();
     writer.close();
 
@@ -422,7 +407,7 @@ public class TestDeletionPolicy extends 
 
     // Now open writer on the commit just before optimize,
     // but this time keeping only the last commit:
-    writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setIndexCommit(lastCommit));
+    writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), new KeepOnlyLastCommitDeletionPolicy(), IndexWriter.MaxFieldLength.LIMITED, lastCommit);
     assertEquals(10, writer.numDocs());
     
     // Reader still sees optimized index, because writer
@@ -458,22 +443,16 @@ public class TestDeletionPolicy extends 
 
       Directory dir = new RAMDirectory();
 
-      IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
-          TEST_VERSION_CURRENT).setOpenMode(OpenMode.CREATE)
-          .setIndexDeletionPolicy(policy).setMaxBufferedDocs(10));
-      LogMergePolicy lmp = (LogMergePolicy) writer.getMergePolicy();
-      lmp.setUseCompoundFile(useCompoundFile);
-      lmp.setUseCompoundDocStore(useCompoundFile);
+      IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, policy, IndexWriter.MaxFieldLength.UNLIMITED);
+      writer.setMaxBufferedDocs(10);
+      writer.setUseCompoundFile(useCompoundFile);
       for(int i=0;i<107;i++) {
         addDoc(writer);
       }
       writer.close();
 
-      writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT)
-          .setOpenMode(OpenMode.APPEND).setIndexDeletionPolicy(policy));
-      lmp = (LogMergePolicy) writer.getMergePolicy();
-      lmp.setUseCompoundFile(useCompoundFile);
-      lmp.setUseCompoundDocStore(useCompoundFile);
+      writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), false, policy, IndexWriter.MaxFieldLength.UNLIMITED);
+      writer.setUseCompoundFile(useCompoundFile);
       writer.optimize();
       writer.close();
 
@@ -507,12 +486,9 @@ public class TestDeletionPolicy extends 
       KeepLastNDeletionPolicy policy = new KeepLastNDeletionPolicy(N);
 
       for(int j=0;j<N+1;j++) {
-        IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
-            TEST_VERSION_CURRENT).setOpenMode(OpenMode.CREATE)
-            .setIndexDeletionPolicy(policy).setMaxBufferedDocs(10));
-        LogMergePolicy lmp = (LogMergePolicy) writer.getMergePolicy();
-        lmp.setUseCompoundFile(useCompoundFile);
-        lmp.setUseCompoundDocStore(useCompoundFile);
+        IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, policy, IndexWriter.MaxFieldLength.UNLIMITED);
+        writer.setMaxBufferedDocs(10);
+        writer.setUseCompoundFile(useCompoundFile);
         for(int i=0;i<17;i++) {
           addDoc(writer);
         }
@@ -565,23 +541,15 @@ public class TestDeletionPolicy extends 
       KeepLastNDeletionPolicy policy = new KeepLastNDeletionPolicy(N);
 
       Directory dir = new RAMDirectory();
-      IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
-          TEST_VERSION_CURRENT).setOpenMode(OpenMode.CREATE)
-          .setIndexDeletionPolicy(policy));
-      LogMergePolicy lmp = (LogMergePolicy) writer.getMergePolicy();
-      lmp.setUseCompoundFile(useCompoundFile);
-      lmp.setUseCompoundDocStore(useCompoundFile);
+      IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, policy, IndexWriter.MaxFieldLength.UNLIMITED);
+      writer.setUseCompoundFile(useCompoundFile);
       writer.close();
       Term searchTerm = new Term("content", "aaa");        
       Query query = new TermQuery(searchTerm);
 
       for(int i=0;i<N+1;i++) {
-        writer = new IndexWriter(dir, new IndexWriterConfig(
-            TEST_VERSION_CURRENT).setOpenMode(OpenMode.APPEND)
-            .setIndexDeletionPolicy(policy));
-        lmp = (LogMergePolicy) writer.getMergePolicy();
-        lmp.setUseCompoundFile(useCompoundFile);
-        lmp.setUseCompoundDocStore(useCompoundFile);
+        writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), false, policy, IndexWriter.MaxFieldLength.UNLIMITED);
+        writer.setUseCompoundFile(useCompoundFile);
         for(int j=0;j<17;j++) {
           addDoc(writer);
         }
@@ -597,11 +565,8 @@ public class TestDeletionPolicy extends 
         reader.close();
         searcher.close();
       }
-      writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT)
-          .setOpenMode(OpenMode.APPEND).setIndexDeletionPolicy(policy));
-      lmp = (LogMergePolicy) writer.getMergePolicy();
-      lmp.setUseCompoundFile(useCompoundFile);
-      lmp.setUseCompoundDocStore(useCompoundFile);
+      writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), false, policy, IndexWriter.MaxFieldLength.UNLIMITED);
+      writer.setUseCompoundFile(useCompoundFile);
       writer.optimize();
       // this is a commit
       writer.close();
@@ -671,24 +636,18 @@ public class TestDeletionPolicy extends 
       KeepLastNDeletionPolicy policy = new KeepLastNDeletionPolicy(N);
 
       Directory dir = new RAMDirectory();
-      IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
-          TEST_VERSION_CURRENT).setOpenMode(OpenMode.CREATE)
-          .setIndexDeletionPolicy(policy).setMaxBufferedDocs(10));
-      LogMergePolicy lmp = (LogMergePolicy) writer.getMergePolicy();
-      lmp.setUseCompoundFile(useCompoundFile);
-      lmp.setUseCompoundDocStore(useCompoundFile);
+      IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, policy, IndexWriter.MaxFieldLength.UNLIMITED);
+      writer.setMaxBufferedDocs(10);
+      writer.setUseCompoundFile(useCompoundFile);
       writer.close();
       Term searchTerm = new Term("content", "aaa");        
       Query query = new TermQuery(searchTerm);
 
       for(int i=0;i<N+1;i++) {
 
-        writer = new IndexWriter(dir, new IndexWriterConfig(
-            TEST_VERSION_CURRENT).setOpenMode(OpenMode.APPEND)
-            .setIndexDeletionPolicy(policy).setMaxBufferedDocs(10));
-        lmp = (LogMergePolicy) writer.getMergePolicy();
-        lmp.setUseCompoundFile(useCompoundFile);
-        lmp.setUseCompoundDocStore(useCompoundFile);
+        writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), false, policy, IndexWriter.MaxFieldLength.UNLIMITED);
+        writer.setMaxBufferedDocs(10);
+        writer.setUseCompoundFile(useCompoundFile);
         for(int j=0;j<17;j++) {
           addDoc(writer);
         }
@@ -704,9 +663,7 @@ public class TestDeletionPolicy extends 
         reader.close();
         searcher.close();
 
-        writer = new IndexWriter(dir, new IndexWriterConfig(
-            TEST_VERSION_CURRENT).setOpenMode(OpenMode.CREATE)
-            .setIndexDeletionPolicy(policy));
+        writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, policy, IndexWriter.MaxFieldLength.UNLIMITED);
         // This will not commit: there are no changes
         // pending because we opened for "create":
         writer.close();

Modified: lucene/java/trunk/src/test/org/apache/lucene/index/TestDirectoryReader.java
URL: http://svn.apache.org/viewvc/lucene/java/trunk/src/test/org/apache/lucene/index/TestDirectoryReader.java?rev=921532&r1=921531&r2=921532&view=diff
==============================================================================
--- lucene/java/trunk/src/test/org/apache/lucene/index/TestDirectoryReader.java (original)
+++ lucene/java/trunk/src/test/org/apache/lucene/index/TestDirectoryReader.java Wed Mar 10 19:45:31 2010
@@ -22,7 +22,6 @@ import org.apache.lucene.util.LuceneTest
 import org.apache.lucene.analysis.standard.StandardAnalyzer;
 import org.apache.lucene.document.Document;
 import org.apache.lucene.document.Field;
-import org.apache.lucene.index.IndexWriterConfig.OpenMode;
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.store.RAMDirectory;
 
@@ -188,10 +187,7 @@ public class TestDirectoryReader extends
   }
 
   private void addDoc(RAMDirectory ramDir1, String s, boolean create) throws IOException {
-    IndexWriter iw = new IndexWriter(ramDir1, new IndexWriterConfig(
-        TEST_VERSION_CURRENT).setAnalyzer(
-        new StandardAnalyzer(TEST_VERSION_CURRENT)).setOpenMode(
-        create ? OpenMode.CREATE : OpenMode.APPEND));
+    IndexWriter iw = new IndexWriter(ramDir1, new StandardAnalyzer(TEST_VERSION_CURRENT), create, IndexWriter.MaxFieldLength.LIMITED);
     Document doc = new Document();
     doc.add(new Field("body", s, Field.Store.YES, Field.Index.ANALYZED));
     iw.addDocument(doc);

Modified: lucene/java/trunk/src/test/org/apache/lucene/index/TestDoc.java
URL: http://svn.apache.org/viewvc/lucene/java/trunk/src/test/org/apache/lucene/index/TestDoc.java?rev=921532&r1=921531&r2=921532&view=diff
==============================================================================
--- lucene/java/trunk/src/test/org/apache/lucene/index/TestDoc.java (original)
+++ lucene/java/trunk/src/test/org/apache/lucene/index/TestDoc.java Wed Mar 10 19:45:31 2010
@@ -29,9 +29,9 @@ import java.util.List;
 import junit.framework.TestSuite;
 import junit.textui.TestRunner;
 
+import org.apache.lucene.analysis.SimpleAnalyzer;
 import org.apache.lucene.document.Document;
 import org.apache.lucene.document.Field;
-import org.apache.lucene.index.IndexWriterConfig.OpenMode;
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.store.FSDirectory;
 import org.apache.lucene.util.LuceneTestCase;
@@ -109,7 +109,7 @@ public class TestDoc extends LuceneTestC
       PrintWriter out = new PrintWriter(sw, true);
 
       Directory directory = FSDirectory.open(indexDir);
-      IndexWriter writer = new IndexWriter(directory, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.CREATE));
+      IndexWriter writer = new IndexWriter(directory, new SimpleAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
 
       SegmentInfo si1 = indexDoc(writer, "test.txt");
       printSegment(out, si1);
@@ -137,8 +137,7 @@ public class TestDoc extends LuceneTestC
       out = new PrintWriter(sw, true);
 
       directory = FSDirectory.open(indexDir);
-      writer = new IndexWriter(directory, new IndexWriterConfig(
-        TEST_VERSION_CURRENT).setOpenMode(OpenMode.CREATE));
+      writer = new IndexWriter(directory, new SimpleAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
 
       si1 = indexDoc(writer, "test.txt");
       printSegment(out, si1);

Modified: lucene/java/trunk/src/test/org/apache/lucene/index/TestDocumentWriter.java
URL: http://svn.apache.org/viewvc/lucene/java/trunk/src/test/org/apache/lucene/index/TestDocumentWriter.java?rev=921532&r1=921531&r2=921532&view=diff
==============================================================================
--- lucene/java/trunk/src/test/org/apache/lucene/index/TestDocumentWriter.java (original)
+++ lucene/java/trunk/src/test/org/apache/lucene/index/TestDocumentWriter.java Wed Mar 10 19:45:31 2010
@@ -24,6 +24,7 @@ import org.apache.lucene.analysis.Analyz
 import org.apache.lucene.analysis.SimpleAnalyzer;
 import org.apache.lucene.analysis.TokenFilter;
 import org.apache.lucene.analysis.TokenStream;
+import org.apache.lucene.analysis.WhitespaceAnalyzer;
 import org.apache.lucene.analysis.WhitespaceTokenizer;
 import org.apache.lucene.analysis.standard.StandardAnalyzer;
 import org.apache.lucene.analysis.tokenattributes.PayloadAttribute;
@@ -60,7 +61,8 @@ public class TestDocumentWriter extends 
   public void testAddDocument() throws Exception {
     Document testDoc = new Document();
     DocHelper.setupDoc(testDoc);
-    IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT));
+    Analyzer analyzer = new WhitespaceAnalyzer(TEST_VERSION_CURRENT);
+    IndexWriter writer = new IndexWriter(dir, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
     writer.addDocument(testDoc);
     writer.commit();
     SegmentInfo info = writer.newestSegment();
@@ -117,7 +119,7 @@ public class TestDocumentWriter extends 
       }
     };
 
-    IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setAnalyzer(analyzer));
+    IndexWriter writer = new IndexWriter(dir, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
 
     Document doc = new Document();
     doc.add(new Field("repeated", "repeated one", Field.Store.YES, Field.Index.ANALYZED));
@@ -180,7 +182,7 @@ public class TestDocumentWriter extends 
       }
     };
 
-    IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setAnalyzer(analyzer));
+    IndexWriter writer = new IndexWriter(dir, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
 
     Document doc = new Document();
     doc.add(new Field("f1", "a 5 a a", Field.Store.YES, Field.Index.ANALYZED));
@@ -205,9 +207,7 @@ public class TestDocumentWriter extends 
 
 
   public void testPreAnalyzedField() throws IOException {
-    IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
-        TEST_VERSION_CURRENT).setAnalyzer(new SimpleAnalyzer(
-        TEST_VERSION_CURRENT)));
+    IndexWriter writer = new IndexWriter(dir, new SimpleAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
     Document doc = new Document();
     
     doc.add(new Field("preanalyzed", new TokenStream() {
@@ -266,9 +266,7 @@ public class TestDocumentWriter extends 
     doc.add(new Field("f2", "v1", Store.YES, Index.NOT_ANALYZED, TermVector.WITH_POSITIONS_OFFSETS));
     doc.add(new Field("f2", "v2", Store.YES, Index.NOT_ANALYZED, TermVector.NO));
 
-    IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
-        TEST_VERSION_CURRENT).setAnalyzer(new StandardAnalyzer(
-        TEST_VERSION_CURRENT)));
+    IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
     writer.addDocument(doc);
     writer.close();
 
@@ -301,9 +299,7 @@ public class TestDocumentWriter extends 
     doc.add(f);
     doc.add(new Field("f2", "v2", Store.YES, Index.NO));
 
-    IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
-        TEST_VERSION_CURRENT).setAnalyzer(new StandardAnalyzer(
-        TEST_VERSION_CURRENT)));
+    IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
     writer.addDocument(doc);
     writer.optimize(); // be sure to have a single segment
     writer.close();

Modified: lucene/java/trunk/src/test/org/apache/lucene/index/TestFieldsReader.java
URL: http://svn.apache.org/viewvc/lucene/java/trunk/src/test/org/apache/lucene/index/TestFieldsReader.java?rev=921532&r1=921531&r2=921532&view=diff
==============================================================================
--- lucene/java/trunk/src/test/org/apache/lucene/index/TestFieldsReader.java (original)
+++ lucene/java/trunk/src/test/org/apache/lucene/index/TestFieldsReader.java Wed Mar 10 19:45:31 2010
@@ -17,31 +17,22 @@ package org.apache.lucene.index;
  * limitations under the License.
  */
 
-import java.io.File;
-import java.io.IOException;
-import java.util.Collections;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Set;
-
-import org.apache.lucene.document.Document;
-import org.apache.lucene.document.Field;
-import org.apache.lucene.document.FieldSelector;
-import org.apache.lucene.document.FieldSelectorResult;
-import org.apache.lucene.document.Fieldable;
-import org.apache.lucene.document.LoadFirstFieldSelector;
-import org.apache.lucene.document.SetBasedFieldSelector;
-import org.apache.lucene.index.IndexWriterConfig.OpenMode;
-import org.apache.lucene.store.AlreadyClosedException;
-import org.apache.lucene.store.BufferedIndexInput;
-import org.apache.lucene.store.Directory;
+import org.apache.lucene.util.LuceneTestCase;
+import org.apache.lucene.analysis.WhitespaceAnalyzer;
+import org.apache.lucene.document.*;
 import org.apache.lucene.store.FSDirectory;
 import org.apache.lucene.store.IndexInput;
 import org.apache.lucene.store.IndexOutput;
+import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.BufferedIndexInput;
 import org.apache.lucene.store.RAMDirectory;
-import org.apache.lucene.util.LuceneTestCase;
+import org.apache.lucene.store.AlreadyClosedException;
 import org.apache.lucene.util._TestUtil;
 
+import java.io.File;
+import java.io.IOException;
+import java.util.*;
+
 public class TestFieldsReader extends LuceneTestCase {
   private RAMDirectory dir = new RAMDirectory();
   private Document testDoc = new Document();
@@ -59,9 +50,8 @@ public class TestFieldsReader extends Lu
     fieldInfos = new FieldInfos();
     DocHelper.setupDoc(testDoc);
     fieldInfos.add(testDoc);
-    IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT));
-    ((LogMergePolicy) writer.getMergePolicy()).setUseCompoundFile(false);
-    ((LogMergePolicy) writer.getMergePolicy()).setUseCompoundDocStore(false);
+    IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
+    writer.setUseCompoundFile(false);
     writer.addDocument(testDoc);
     writer.close();
   }
@@ -217,8 +207,8 @@ public class TestFieldsReader extends Lu
     FSDirectory tmpDir = FSDirectory.open(file);
     assertTrue(tmpDir != null);
 
-    IndexWriter writer = new IndexWriter(tmpDir, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.CREATE));
-    ((LogMergePolicy) writer.getMergePolicy()).setUseCompoundFile(false);
+    IndexWriter writer = new IndexWriter(tmpDir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
+    writer.setUseCompoundFile(false);
     writer.addDocument(testDoc);
     writer.close();
 
@@ -397,8 +387,7 @@ public class TestFieldsReader extends Lu
 
     try {
       Directory dir = new FaultyFSDirectory(indexDir);
-      IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
-          TEST_VERSION_CURRENT).setOpenMode(OpenMode.CREATE));
+      IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
       for(int i=0;i<2;i++)
         writer.addDocument(testDoc);
       writer.optimize();

Modified: lucene/java/trunk/src/test/org/apache/lucene/index/TestFilterIndexReader.java
URL: http://svn.apache.org/viewvc/lucene/java/trunk/src/test/org/apache/lucene/index/TestFilterIndexReader.java?rev=921532&r1=921531&r2=921532&view=diff
==============================================================================
--- lucene/java/trunk/src/test/org/apache/lucene/index/TestFilterIndexReader.java (original)
+++ lucene/java/trunk/src/test/org/apache/lucene/index/TestFilterIndexReader.java Wed Mar 10 19:45:31 2010
@@ -24,6 +24,7 @@ import junit.textui.TestRunner;
 
 import org.apache.lucene.store.RAMDirectory;
 import org.apache.lucene.store.MockRAMDirectory;
+import org.apache.lucene.analysis.WhitespaceAnalyzer;
 import org.apache.lucene.document.Document;
 import org.apache.lucene.document.Field;
 
@@ -96,7 +97,8 @@ public class TestFilterIndexReader exten
    */
   public void testFilterIndexReader() throws Exception {
     RAMDirectory directory = new MockRAMDirectory();
-    IndexWriter writer = new IndexWriter(directory, new IndexWriterConfig(TEST_VERSION_CURRENT));
+    IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true,
+                                         IndexWriter.MaxFieldLength.LIMITED);
 
     Document d1 = new Document();
     d1.add(new Field("default","one two", Field.Store.YES, Field.Index.ANALYZED));

Modified: lucene/java/trunk/src/test/org/apache/lucene/index/TestIndexFileDeleter.java
URL: http://svn.apache.org/viewvc/lucene/java/trunk/src/test/org/apache/lucene/index/TestIndexFileDeleter.java?rev=921532&r1=921531&r2=921532&view=diff
==============================================================================
--- lucene/java/trunk/src/test/org/apache/lucene/index/TestIndexFileDeleter.java (original)
+++ lucene/java/trunk/src/test/org/apache/lucene/index/TestIndexFileDeleter.java Wed Mar 10 19:45:31 2010
@@ -18,14 +18,13 @@ package org.apache.lucene.index;
  */
 
 import org.apache.lucene.util.LuceneTestCase;
+import org.apache.lucene.analysis.WhitespaceAnalyzer;
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.store.IndexInput;
 import org.apache.lucene.store.IndexOutput;
 import org.apache.lucene.store.RAMDirectory;
 import org.apache.lucene.document.Document;
 import org.apache.lucene.document.Field;
-import org.apache.lucene.index.IndexWriterConfig.OpenMode;
-
 import java.io.*;
 import java.util.*;
 
@@ -34,19 +33,19 @@ import java.util.*;
   against it, and add documents to it.
 */
 
-public class TestIndexFileDeleter extends LuceneTestCase {
-  
+public class TestIndexFileDeleter extends LuceneTestCase
+{
   public void testDeleteLeftoverFiles() throws IOException {
 
     Directory dir = new RAMDirectory();
 
-    IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setMaxBufferedDocs(10));
+    IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
+    writer.setMaxBufferedDocs(10);
     int i;
     for(i=0;i<35;i++) {
       addDoc(writer, i);
     }
-    ((LogMergePolicy) writer.getMergePolicy()).setUseCompoundFile(false);
-    ((LogMergePolicy) writer.getMergePolicy()).setUseCompoundDocStore(false);
+    writer.setUseCompoundFile(false);
     for(;i<45;i++) {
       addDoc(writer, i);
     }
@@ -145,7 +144,7 @@ public class TestIndexFileDeleter extend
 
     // Open & close a writer: it should delete the above 4
     // files and nothing more:
-    writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.APPEND));
+    writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
     writer.close();
 
     String[] files2 = dir.listAll();



Mime
View raw message