lucene-java-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From busc...@apache.org
Subject svn commit: r825288 - in /lucene/java/trunk: ./ contrib/instantiated/src/java/org/apache/lucene/store/instantiated/ contrib/instantiated/src/test/org/apache/lucene/store/instantiated/ contrib/misc/src/java/org/apache/lucene/index/ contrib/misc/src/java...
Date Wed, 14 Oct 2009 21:21:07 GMT
Author: buschmi
Date: Wed Oct 14 21:21:05 2009
New Revision: 825288

URL: http://svn.apache.org/viewvc?rev=825288&view=rev
Log:
LUCENE-1979: remove more deprecations in the index package.

Modified:
    lucene/java/trunk/CHANGES.txt
    lucene/java/trunk/contrib/instantiated/src/java/org/apache/lucene/store/instantiated/InstantiatedTermEnum.java
    lucene/java/trunk/contrib/instantiated/src/test/org/apache/lucene/store/instantiated/TestEmptyIndex.java
    lucene/java/trunk/contrib/instantiated/src/test/org/apache/lucene/store/instantiated/TestIndicesEquals.java
    lucene/java/trunk/contrib/misc/src/java/org/apache/lucene/index/TermVectorAccessor.java
    lucene/java/trunk/contrib/misc/src/java/org/apache/lucene/misc/IndexMergeTool.java
    lucene/java/trunk/src/java/org/apache/lucene/index/CheckIndex.java
    lucene/java/trunk/src/java/org/apache/lucene/index/DocInverterPerField.java
    lucene/java/trunk/src/java/org/apache/lucene/index/DocumentsWriter.java
    lucene/java/trunk/src/java/org/apache/lucene/index/DocumentsWriterThreadState.java
    lucene/java/trunk/src/java/org/apache/lucene/index/FilterIndexReader.java
    lucene/java/trunk/src/java/org/apache/lucene/index/IndexReader.java
    lucene/java/trunk/src/java/org/apache/lucene/index/IndexWriter.java
    lucene/java/trunk/src/java/org/apache/lucene/index/MergePolicy.java
    lucene/java/trunk/src/java/org/apache/lucene/index/MultiReader.java
    lucene/java/trunk/src/java/org/apache/lucene/index/ParallelReader.java
    lucene/java/trunk/src/java/org/apache/lucene/index/SegmentReader.java
    lucene/java/trunk/src/java/org/apache/lucene/index/TermEnum.java
    lucene/java/trunk/src/test/org/apache/lucene/index/TestAddIndexesNoOptimize.java
    lucene/java/trunk/src/test/org/apache/lucene/index/TestCrash.java
    lucene/java/trunk/src/test/org/apache/lucene/index/TestIndexReader.java
    lucene/java/trunk/src/test/org/apache/lucene/index/TestIndexReaderCloneNorms.java
    lucene/java/trunk/src/test/org/apache/lucene/index/TestIndexWriter.java
    lucene/java/trunk/src/test/org/apache/lucene/index/TestIndexWriterMergePolicy.java
    lucene/java/trunk/src/test/org/apache/lucene/index/TestIndexWriterMerging.java
    lucene/java/trunk/src/test/org/apache/lucene/index/TestIndexWriterReader.java
    lucene/java/trunk/src/test/org/apache/lucene/index/TestNorms.java
    lucene/java/trunk/src/test/org/apache/lucene/index/TestThreadedOptimize.java
    lucene/java/trunk/src/test/org/apache/lucene/search/TestPositionIncrement.java
    lucene/java/trunk/src/test/org/apache/lucene/store/TestRAMDirectory.java

Modified: lucene/java/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/lucene/java/trunk/CHANGES.txt?rev=825288&r1=825287&r2=825288&view=diff
==============================================================================
--- lucene/java/trunk/CHANGES.txt (original)
+++ lucene/java/trunk/CHANGES.txt Wed Oct 14 21:21:05 2009
@@ -73,7 +73,7 @@
 * LUCENE-944: Remove deprecated methods in BooleanQuery. (Michael Busch)
 
 * LUCENE-1979: Remove remaining deprecations from indexer package.
-  (Michael Busch)
+  (Uwe Schindler, Michael Busch)
 
 Bug fixes
 

Modified: lucene/java/trunk/contrib/instantiated/src/java/org/apache/lucene/store/instantiated/InstantiatedTermEnum.java
URL: http://svn.apache.org/viewvc/lucene/java/trunk/contrib/instantiated/src/java/org/apache/lucene/store/instantiated/InstantiatedTermEnum.java?rev=825288&r1=825287&r2=825288&view=diff
==============================================================================
--- lucene/java/trunk/contrib/instantiated/src/java/org/apache/lucene/store/instantiated/InstantiatedTermEnum.java (original)
+++ lucene/java/trunk/contrib/instantiated/src/java/org/apache/lucene/store/instantiated/InstantiatedTermEnum.java Wed Oct 14 21:21:05 2009
@@ -77,36 +77,6 @@
   public void close() {
   }
 
-
-  public boolean skipTo(Term target) throws IOException {
-
-    // this method is not known to be used by anything
-    // in lucene for many years now, so there is
-    // very to gain by optimizing this method more,
-
-    if (reader.getIndex().getOrderedTerms().length == 0) {
-      return false;
-    }
-
-    InstantiatedTerm term = reader.getIndex().findTerm(target);
-    if (term != null) {
-      this.term = term;
-      nextTermIndex = term.getTermIndex() + 1;
-      return true;
-    } else {
-      int pos = Arrays.binarySearch(reader.getIndex().getOrderedTerms(), target, InstantiatedTerm.termComparator);
-      if (pos < 0) {
-        pos = -1 - pos;
-      }
-
-      if (pos > reader.getIndex().getOrderedTerms().length) {
-        return false;
-      }
-      this.term = reader.getIndex().getOrderedTerms()[pos];
-      nextTermIndex = pos + 1;
-      return true;
-    }
-  }
 }
 
 

Modified: lucene/java/trunk/contrib/instantiated/src/test/org/apache/lucene/store/instantiated/TestEmptyIndex.java
URL: http://svn.apache.org/viewvc/lucene/java/trunk/contrib/instantiated/src/test/org/apache/lucene/store/instantiated/TestEmptyIndex.java?rev=825288&r1=825287&r2=825288&view=diff
==============================================================================
--- lucene/java/trunk/contrib/instantiated/src/test/org/apache/lucene/store/instantiated/TestEmptyIndex.java (original)
+++ lucene/java/trunk/contrib/instantiated/src/test/org/apache/lucene/store/instantiated/TestEmptyIndex.java Wed Oct 14 21:21:05 2009
@@ -105,7 +105,6 @@
 
     assertNull(terms.term());
     assertFalse(terms.next());
-    assertFalse(terms.skipTo(new Term("foo", "bar")));
 
   }
 

Modified: lucene/java/trunk/contrib/instantiated/src/test/org/apache/lucene/store/instantiated/TestIndicesEquals.java
URL: http://svn.apache.org/viewvc/lucene/java/trunk/contrib/instantiated/src/test/org/apache/lucene/store/instantiated/TestIndicesEquals.java?rev=825288&r1=825287&r2=825288&view=diff
==============================================================================
--- lucene/java/trunk/contrib/instantiated/src/test/org/apache/lucene/store/instantiated/TestIndicesEquals.java (original)
+++ lucene/java/trunk/contrib/instantiated/src/test/org/apache/lucene/store/instantiated/TestIndicesEquals.java Wed Oct 14 21:21:05 2009
@@ -391,42 +391,6 @@
       }
     }
 
-    // compare term enumeration seeking
-
-    aprioriTermEnum = aprioriReader.terms();
-
-    TermEnum aprioriTermEnumSeeker = aprioriReader.terms();
-    TermEnum testTermEnumSeeker = testReader.terms();
-
-    while (aprioriTermEnum.next()) {
-      if (aprioriTermEnumSeeker.skipTo(aprioriTermEnum.term())) {
-        assertTrue(testTermEnumSeeker.skipTo(aprioriTermEnum.term()));
-        assertEquals(aprioriTermEnumSeeker.term(), testTermEnumSeeker.term());
-      } else {
-        assertFalse(testTermEnumSeeker.skipTo(aprioriTermEnum.term()));
-      }
-    }
-
-    aprioriTermEnum.close();
-    aprioriTermEnumSeeker.close();
-    testTermEnumSeeker.close();
-
-    // skip to non existing terms
-
-    aprioriTermEnumSeeker = aprioriReader.terms();
-    testTermEnumSeeker = testReader.terms();
-
-    aprioriTermEnum = aprioriReader.terms();
-    aprioriTermEnum.next();
-    Term nonExistingTerm = new Term(aprioriTermEnum.term().field(), "bzzzzoo993djdj380sdf");
-    aprioriTermEnum.close();
-
-    assertEquals(aprioriTermEnumSeeker.skipTo(nonExistingTerm), testTermEnumSeeker.skipTo(nonExistingTerm));
-    assertEquals(aprioriTermEnumSeeker.term(), testTermEnumSeeker.term());
-
-    aprioriTermEnumSeeker.close();
-    testTermEnumSeeker.close();
-
     // compare term vectors and position vectors
 
     for (int documentNumber = 0; documentNumber < aprioriReader.numDocs(); documentNumber++) {

Modified: lucene/java/trunk/contrib/misc/src/java/org/apache/lucene/index/TermVectorAccessor.java
URL: http://svn.apache.org/viewvc/lucene/java/trunk/contrib/misc/src/java/org/apache/lucene/index/TermVectorAccessor.java?rev=825288&r1=825287&r2=825288&view=diff
==============================================================================
--- lucene/java/trunk/contrib/misc/src/java/org/apache/lucene/index/TermVectorAccessor.java (original)
+++ lucene/java/trunk/contrib/misc/src/java/org/apache/lucene/index/TermVectorAccessor.java Wed Oct 14 21:21:05 2009
@@ -99,17 +99,16 @@
       positions.clear();
     }
 
-    TermEnum termEnum = indexReader.terms();
-    if (termEnum.skipTo(new Term(field, ""))) {
-
+    TermEnum termEnum = indexReader.terms(new Term(field, ""));
+    if (termEnum.term() != null) {
       while (termEnum.term().field() == field) {
         TermPositions termPositions = indexReader.termPositions(termEnum.term());
         if (termPositions.skipTo(documentNumber)) {
-
+  
           frequencies.add(Integer.valueOf(termPositions.freq()));
           tokens.add(termEnum.term().text());
-
-
+  
+  
           if (!mapper.isIgnoringPositions()) {
             int[] positions = new int[termPositions.freq()];
             for (int i = 0; i < positions.length; i++) {
@@ -125,13 +124,11 @@
           break;
         }
       }
-
       mapper.setDocumentNumber(documentNumber);
       mapper.setExpectations(field, tokens.size(), false, !mapper.isIgnoringPositions());
       for (int i = 0; i < tokens.size(); i++) {
         mapper.map((String) tokens.get(i), ((Integer) frequencies.get(i)).intValue(), (TermVectorOffsetInfo[]) null, (int[]) positions.get(i));
       }
-
     }
     termEnum.close();
 

Modified: lucene/java/trunk/contrib/misc/src/java/org/apache/lucene/misc/IndexMergeTool.java
URL: http://svn.apache.org/viewvc/lucene/java/trunk/contrib/misc/src/java/org/apache/lucene/misc/IndexMergeTool.java?rev=825288&r1=825287&r2=825288&view=diff
==============================================================================
--- lucene/java/trunk/contrib/misc/src/java/org/apache/lucene/misc/IndexMergeTool.java (original)
+++ lucene/java/trunk/contrib/misc/src/java/org/apache/lucene/misc/IndexMergeTool.java Wed Oct 14 21:21:05 2009
@@ -45,7 +45,7 @@
     }
 
     System.out.println("Merging...");
-    writer.addIndexes(indexes);
+    writer.addIndexesNoOptimize(indexes);
 
     System.out.println("Optimizing...");
     writer.optimize();

Modified: lucene/java/trunk/src/java/org/apache/lucene/index/CheckIndex.java
URL: http://svn.apache.org/viewvc/lucene/java/trunk/src/java/org/apache/lucene/index/CheckIndex.java?rev=825288&r1=825287&r2=825288&view=diff
==============================================================================
--- lucene/java/trunk/src/java/org/apache/lucene/index/CheckIndex.java (original)
+++ lucene/java/trunk/src/java/org/apache/lucene/index/CheckIndex.java Wed Oct 14 21:21:05 2009
@@ -48,11 +48,6 @@
  */
 public class CheckIndex {
 
-  /** Default PrintStream for all CheckIndex instances.
-   *  @deprecated Use {@link #setInfoStream} per instance,
-   *  instead. */
-  public static PrintStream out = null;
-
   private PrintStream infoStream;
   private Directory dir;
 
@@ -257,7 +252,7 @@
   /** Create a new CheckIndex on the directory. */
   public CheckIndex(Directory dir) {
     this.dir = dir;
-    infoStream = out;
+    infoStream = null;
   }
 
   /** Set infoStream where messages should go.  If null, no

Modified: lucene/java/trunk/src/java/org/apache/lucene/index/DocInverterPerField.java
URL: http://svn.apache.org/viewvc/lucene/java/trunk/src/java/org/apache/lucene/index/DocInverterPerField.java?rev=825288&r1=825287&r2=825288&view=diff
==============================================================================
--- lucene/java/trunk/src/java/org/apache/lucene/index/DocInverterPerField.java (original)
+++ lucene/java/trunk/src/java/org/apache/lucene/index/DocInverterPerField.java Wed Oct 14 21:21:05 2009
@@ -129,9 +129,6 @@
 
           final int startLength = fieldState.length;
           
-          // deprecated
-          final boolean allowMinus1Position = docState.allowMinus1Position;
-
           try {
             int offsetEnd = fieldState.offset-1;
             
@@ -157,7 +154,7 @@
               
               final int posIncr = posIncrAttribute.getPositionIncrement();
               fieldState.position += posIncr;
-              if (allowMinus1Position || fieldState.position > 0) {
+              if (fieldState.position > 0) {
                 fieldState.position--;
               }
 

Modified: lucene/java/trunk/src/java/org/apache/lucene/index/DocumentsWriter.java
URL: http://svn.apache.org/viewvc/lucene/java/trunk/src/java/org/apache/lucene/index/DocumentsWriter.java?rev=825288&r1=825287&r2=825288&view=diff
==============================================================================
--- lucene/java/trunk/src/java/org/apache/lucene/index/DocumentsWriter.java (original)
+++ lucene/java/trunk/src/java/org/apache/lucene/index/DocumentsWriter.java Wed Oct 14 21:21:05 2009
@@ -150,9 +150,6 @@
     Document doc;
     String maxTermPrefix;
 
-    // deprecated
-    boolean allowMinus1Position;
-
     // Only called by asserts
     public boolean testPoint(String name) {
       return docWriter.writer.testPoint(name);
@@ -299,11 +296,6 @@
       threadStates[i].docState.similarity = similarity;
   }
 
-  synchronized void setAllowMinus1Position() {
-    for(int i=0;i<threadStates.length;i++)
-      threadStates[i].docState.allowMinus1Position = true;
-  }
-
   /** Set how much RAM we can use before flushing. */
   synchronized void setRAMBufferSizeMB(double mb) {
     if (mb == IndexWriter.DISABLE_AUTO_FLUSH) {

Modified: lucene/java/trunk/src/java/org/apache/lucene/index/DocumentsWriterThreadState.java
URL: http://svn.apache.org/viewvc/lucene/java/trunk/src/java/org/apache/lucene/index/DocumentsWriterThreadState.java?rev=825288&r1=825287&r2=825288&view=diff
==============================================================================
--- lucene/java/trunk/src/java/org/apache/lucene/index/DocumentsWriterThreadState.java (original)
+++ lucene/java/trunk/src/java/org/apache/lucene/index/DocumentsWriterThreadState.java Wed Oct 14 21:21:05 2009
@@ -40,7 +40,6 @@
     docState.infoStream = docWriter.infoStream;
     docState.similarity = docWriter.similarity;
     docState.docWriter = docWriter;
-    docState.allowMinus1Position = docWriter.writer.getAllowMinus1Position();
     consumer = docWriter.consumer.addThread(this);
   }
 

Modified: lucene/java/trunk/src/java/org/apache/lucene/index/FilterIndexReader.java
URL: http://svn.apache.org/viewvc/lucene/java/trunk/src/java/org/apache/lucene/index/FilterIndexReader.java?rev=825288&r1=825287&r2=825288&view=diff
==============================================================================
--- lucene/java/trunk/src/java/org/apache/lucene/index/FilterIndexReader.java (original)
+++ lucene/java/trunk/src/java/org/apache/lucene/index/FilterIndexReader.java Wed Oct 14 21:21:05 2009
@@ -211,9 +211,6 @@
 
   protected void doDelete(int n) throws  CorruptIndexException, IOException { in.deleteDocument(n); }
   
-  /** @deprecated */
-  protected void doCommit() throws IOException { doCommit(null); }
-  
   protected void doCommit(Map commitUserData) throws IOException { in.commit(commitUserData); }
   
   protected void doClose() throws IOException { in.close(); }

Modified: lucene/java/trunk/src/java/org/apache/lucene/index/IndexReader.java
URL: http://svn.apache.org/viewvc/lucene/java/trunk/src/java/org/apache/lucene/index/IndexReader.java?rev=825288&r1=825287&r2=825288&view=diff
==============================================================================
--- lucene/java/trunk/src/java/org/apache/lucene/index/IndexReader.java (original)
+++ lucene/java/trunk/src/java/org/apache/lucene/index/IndexReader.java Wed Oct 14 21:21:05 2009
@@ -487,37 +487,6 @@
     throw new UnsupportedOperationException("This reader does not support this method.");
   }
 
-  /**<p>For IndexReader implementations that use
-   * TermInfosReader to read terms, this sets the
-   * indexDivisor to subsample the number of indexed terms
-   * loaded into memory.  This has the same effect as {@link
-   * IndexWriter#setTermIndexInterval} except that setting
-   * must be done at indexing time while this setting can be
-   * set per reader.  When set to N, then one in every
-   * N*termIndexInterval terms in the index is loaded into
-   * memory.  By setting this to a value > 1 you can reduce
-   * memory usage, at the expense of higher latency when
-   * loading a TermInfo.  The default value is 1.</p>
-   *
-   * <b>NOTE:</b> you must call this before the term
-   * index is loaded.  If the index is already loaded, 
-   * an IllegalStateException is thrown.
-   * @throws IllegalStateException if the term index has already been loaded into memory
-   * @deprecated Please use {@link IndexReader#open(Directory, IndexDeletionPolicy, boolean, int)} to specify the required TermInfos index divisor instead.
-   */
-  public void setTermInfosIndexDivisor(int indexDivisor) throws IllegalStateException {
-    throw new UnsupportedOperationException("Please pass termInfosIndexDivisor up-front when opening IndexReader");
-  }
-
-  /** <p>For IndexReader implementations that use
-   *  TermInfosReader to read terms, this returns the
-   *  current indexDivisor as specified when the reader was
-   *  opened.
-   */
-  public int getTermInfosIndexDivisor() {
-    throw new UnsupportedOperationException("This reader does not support this method.");
-  }
-
   /**
    * Check whether this IndexReader is still using the
    * current (i.e., most recently committed) version of the

Modified: lucene/java/trunk/src/java/org/apache/lucene/index/IndexWriter.java
URL: http://svn.apache.org/viewvc/lucene/java/trunk/src/java/org/apache/lucene/index/IndexWriter.java?rev=825288&r1=825287&r2=825288&view=diff
==============================================================================
--- lucene/java/trunk/src/java/org/apache/lucene/index/IndexWriter.java (original)
+++ lucene/java/trunk/src/java/org/apache/lucene/index/IndexWriter.java Wed Oct 14 21:21:05 2009
@@ -181,12 +181,6 @@
   public static final String WRITE_LOCK_NAME = "write.lock";
 
   /**
-   * @deprecated
-   * @see LogMergePolicy#DEFAULT_MERGE_FACTOR
-   */
-  public final static int DEFAULT_MERGE_FACTOR = LogMergePolicy.DEFAULT_MERGE_FACTOR;
-
-  /**
    * Value to denote a flush trigger is disabled
    */
   public final static int DISABLE_AUTO_FLUSH = -1;
@@ -210,12 +204,6 @@
   public final static int DEFAULT_MAX_BUFFERED_DELETE_TERMS = DISABLE_AUTO_FLUSH;
 
   /**
-   * @deprecated
-   * @see LogDocMergePolicy#DEFAULT_MAX_MERGE_DOCS
-   */
-  public final static int DEFAULT_MAX_MERGE_DOCS = LogDocMergePolicy.DEFAULT_MAX_MERGE_DOCS;
-
-  /**
    * Default value is 10,000. Change using {@link #setMaxFieldLength(int)}.
    */
   public final static int DEFAULT_MAX_FIELD_LENGTH = 10000;
@@ -1790,16 +1778,6 @@
     return analyzer;
   }
 
-  /** Returns the number of documents currently in this
-   *  index, not counting deletions.
-   * @deprecated Please use {@link #maxDoc()} (same as this
-   * method) or {@link #numDocs()} (also takes deletions
-   * into account), instead. */
-  public synchronized int docCount() {
-    ensureOpen();
-    return maxDoc();
-  }
-
   /** Returns total number of docs in this index, including
    *  docs not yet flushed (still in the RAM buffer),
    *  not counting deletions.
@@ -1994,14 +1972,14 @@
    * @throws CorruptIndexException if the index is corrupt
    * @throws IOException if there is a low-level IO error
    */
-  public void deleteDocuments(Term[] terms) throws CorruptIndexException, IOException {
+  public void deleteDocuments(Term... terms) throws CorruptIndexException, IOException {
     ensureOpen();
     try {
       boolean doFlush = docWriter.bufferDeleteTerms(terms);
       if (doFlush)
         flush(true, false, false);
     } catch (OutOfMemoryError oom) {
-      handleOOM(oom, "deleteDocuments(Term[])");
+      handleOOM(oom, "deleteDocuments(Term..)");
     }
   }
 
@@ -2036,7 +2014,7 @@
    * @throws CorruptIndexException if the index is corrupt
    * @throws IOException if there is a low-level IO error
    */
-  public void deleteDocuments(Query[] queries) throws CorruptIndexException, IOException {
+  public void deleteDocuments(Query... queries) throws CorruptIndexException, IOException {
     ensureOpen();
     boolean doFlush = docWriter.bufferDeleteQueries(queries);
     if (doFlush)
@@ -2693,13 +2671,6 @@
   }
 
   /**
-   * @deprecated Please use {@link #rollback} instead.
-   */
-  public void abort() throws IOException {
-    rollback();
-  }
-
-  /**
    * Close the <code>IndexWriter</code> without committing
    * any changes that have occurred since the last commit
    * (or since it was opened, if commit hasn't been called).
@@ -2946,84 +2917,12 @@
     releaseRead();
   }
 
-  /** Merges all segments from an array of indexes into this index.
-   *
-   * <p><b>NOTE</b>: if this method hits an OutOfMemoryError
-   * you should immediately close the writer.  See <a
-   * href="#OOME">above</a> for details.</p>
-   *
-   * @deprecated Use {@link #addIndexesNoOptimize} instead,
-   * then separately call {@link #optimize} afterwards if
-   * you need to.
-   *
-   * @throws CorruptIndexException if the index is corrupt
-   * @throws IOException if there is a low-level IO error
-   */
-  public void addIndexes(Directory[] dirs)
-    throws CorruptIndexException, IOException {
-
-    ensureOpen();
-    
-    noDupDirs(dirs);
-
-    // Do not allow add docs or deletes while we are running:
-    docWriter.pauseAllThreads();
-
-    try {
-
-      if (infoStream != null)
-        message("flush at addIndexes");
-      flush(true, false, true);
-
-      boolean success = false;
-
-      startTransaction(false);
-
-      try {
-
-        int docCount = 0;
-        synchronized(this) {
-          ensureOpen();
-          for (int i = 0; i < dirs.length; i++) {
-            SegmentInfos sis = new SegmentInfos();	  // read infos from dir
-            sis.read(dirs[i]);
-            for (int j = 0; j < sis.size(); j++) {
-              final SegmentInfo info = sis.info(j);
-              docCount += info.docCount;
-              assert !segmentInfos.contains(info);
-              segmentInfos.add(info);	  // add each info
-            }
-          }
-        }
-
-        // Notify DocumentsWriter that the flushed count just increased
-        docWriter.updateFlushedDocCount(docCount);
-
-        optimize();
-
-        success = true;
-      } finally {
-        if (success) {
-          commitTransaction();
-        } else {
-          rollbackTransaction();
-        }
-      }
-    } catch (OutOfMemoryError oom) {
-      handleOOM(oom, "addIndexes(Directory[])");
-    } finally {
-      if (docWriter != null) {
-        docWriter.resumeAllThreads();
-      }
-    }
-  }
-
   private synchronized void resetMergeExceptions() {
     mergeExceptions = new ArrayList();
     mergeGen++;
   }
 
-  private void noDupDirs(Directory[] dirs) {
+  private void noDupDirs(Directory... dirs) {
     HashSet dups = new HashSet();
     for(int i=0;i<dirs.length;i++) {
       if (dups.contains(dirs[i]))
@@ -3084,7 +2983,7 @@
    * @throws CorruptIndexException if the index is corrupt
    * @throws IOException if there is a low-level IO error
    */
-  public void addIndexesNoOptimize(Directory[] dirs)
+  public void addIndexesNoOptimize(Directory... dirs)
       throws CorruptIndexException, IOException {
 
     ensureOpen();
@@ -3247,7 +3146,7 @@
    * add or delete documents (with another thread) will be
    * paused until this method completes.
    *
-   * <p>See {@link #addIndexesNoOptimize(Directory[])} for
+   * <p>See {@link #addIndexesNoOptimize} for
    * details on transactional semantics, temporary free
    * space required in the Directory, and non-CFS segments
    * on an Exception.</p>
@@ -3259,7 +3158,7 @@
    * @throws CorruptIndexException if the index is corrupt
    * @throws IOException if there is a low-level IO error
    */
-  public void addIndexes(IndexReader[] readers)
+  public void addIndexes(IndexReader... readers)
     throws CorruptIndexException, IOException {
 
     ensureOpen();
@@ -3326,7 +3225,7 @@
             segmentInfos.clear();                      // pop old infos & add new
             info = new SegmentInfo(mergedName, docCount, directory, false, true,
                                    -1, null, false, merger.hasProx());
-            setDiagnostics(info, "addIndexes(IndexReader[])");
+            setDiagnostics(info, "addIndexes(IndexReader...)");
             segmentInfos.add(info);
           }
 
@@ -3395,7 +3294,7 @@
         }
       }
     } catch (OutOfMemoryError oom) {
-      handleOOM(oom, "addIndexes(IndexReader[])");
+      handleOOM(oom, "addIndexes(IndexReader...)");
     } finally {
       if (docWriter != null) {
         docWriter.resumeAllThreads();
@@ -4930,22 +4829,6 @@
     throw oom;
   }
 
-  // deprecated
-  private boolean allowMinus1Position;
-
-  /** Deprecated: emulates IndexWriter's buggy behavior when
-   *  first token(s) have positionIncrement==0 (ie, prior to
-   *  fixing LUCENE-1542) */
-  public void setAllowMinus1Position() {
-    allowMinus1Position = true;
-    docWriter.setAllowMinus1Position();
-  }
-
-  // deprecated
-  boolean getAllowMinus1Position() {
-    return allowMinus1Position;
-  }
-
   // Used only by assert for testing.  Current points:
   //   startDoFlush
   //   startCommitMerge

Modified: lucene/java/trunk/src/java/org/apache/lucene/index/MergePolicy.java
URL: http://svn.apache.org/viewvc/lucene/java/trunk/src/java/org/apache/lucene/index/MergePolicy.java?rev=825288&r1=825287&r2=825288&view=diff
==============================================================================
--- lucene/java/trunk/src/java/org/apache/lucene/index/MergePolicy.java (original)
+++ lucene/java/trunk/src/java/org/apache/lucene/index/MergePolicy.java Wed Oct 14 21:21:05 2009
@@ -172,20 +172,12 @@
    *  executing a merge. */
   public static class MergeException extends RuntimeException {
     private Directory dir;
-    /** @deprecated
-     *  Use {@link #MergePolicy.MergeException(String,Directory)} instead */
-    public MergeException(String message) {
-      super(message);
-    }
+
     public MergeException(String message, Directory dir) {
       super(message);
       this.dir = dir;
     }
-    /** @deprecated
-     *  Use {@link #MergePolicy.MergeException(Throwable,Directory)} instead */
-    public MergeException(Throwable exc) {
-      super(exc);
-    }
+
     public MergeException(Throwable exc, Directory dir) {
       super(exc);
       this.dir = dir;

Modified: lucene/java/trunk/src/java/org/apache/lucene/index/MultiReader.java
URL: http://svn.apache.org/viewvc/lucene/java/trunk/src/java/org/apache/lucene/index/MultiReader.java?rev=825288&r1=825287&r2=825288&view=diff
==============================================================================
--- lucene/java/trunk/src/java/org/apache/lucene/index/MultiReader.java (original)
+++ lucene/java/trunk/src/java/org/apache/lucene/index/MultiReader.java Wed Oct 14 21:21:05 2009
@@ -49,7 +49,7 @@
   * @param subReaders set of (sub)readers
   * @throws IOException
   */
-  public MultiReader(IndexReader[] subReaders) {
+  public MultiReader(IndexReader... subReaders) {
     initialize(subReaders, true);
   }
 
@@ -352,11 +352,6 @@
     return new MultiTermPositions(this, subReaders, starts);
   }
 
-  /** @deprecated */
-  protected void doCommit() throws IOException {
-    doCommit(null);
-  }
-  
   protected void doCommit(Map commitUserData) throws IOException {
     for (int i = 0; i < subReaders.length; i++)
       subReaders[i].commit(commitUserData);

Modified: lucene/java/trunk/src/java/org/apache/lucene/index/ParallelReader.java
URL: http://svn.apache.org/viewvc/lucene/java/trunk/src/java/org/apache/lucene/index/ParallelReader.java?rev=825288&r1=825287&r2=825288&view=diff
==============================================================================
--- lucene/java/trunk/src/java/org/apache/lucene/index/ParallelReader.java (original)
+++ lucene/java/trunk/src/java/org/apache/lucene/index/ParallelReader.java Wed Oct 14 21:21:05 2009
@@ -435,11 +435,6 @@
     return (IndexReader[]) readers.toArray(new IndexReader[readers.size()]);
   }
 
-  /** @deprecated */
-  protected void doCommit() throws IOException {
-    doCommit(null);
-  }
-
   protected void doCommit(Map commitUserData) throws IOException {
     for (int i = 0; i < readers.size(); i++)
       ((IndexReader)readers.get(i)).commit(commitUserData);

Modified: lucene/java/trunk/src/java/org/apache/lucene/index/SegmentReader.java
URL: http://svn.apache.org/viewvc/lucene/java/trunk/src/java/org/apache/lucene/index/SegmentReader.java?rev=825288&r1=825287&r2=825288&view=diff
==============================================================================
--- lucene/java/trunk/src/java/org/apache/lucene/index/SegmentReader.java (original)
+++ lucene/java/trunk/src/java/org/apache/lucene/index/SegmentReader.java Wed Oct 14 21:21:05 2009
@@ -792,11 +792,6 @@
     return clone;
   }
 
-  /** @deprecated  */
-  protected void doCommit() throws IOException {
-    doCommit(null);
-  }
-
   protected void doCommit(Map commitUserData) throws IOException {
     if (hasChanges) {
       if (deletedDocsDirty) {               // re-write deleted

Modified: lucene/java/trunk/src/java/org/apache/lucene/index/TermEnum.java
URL: http://svn.apache.org/viewvc/lucene/java/trunk/src/java/org/apache/lucene/index/TermEnum.java?rev=825288&r1=825287&r2=825288&view=diff
==============================================================================
--- lucene/java/trunk/src/java/org/apache/lucene/index/TermEnum.java (original)
+++ lucene/java/trunk/src/java/org/apache/lucene/index/TermEnum.java Wed Oct 14 21:21:05 2009
@@ -36,29 +36,4 @@
 
   /** Closes the enumeration to further activity, freeing resources. */
   public abstract void close() throws IOException;
-  
-  /** Skips terms to the first beyond the current whose value is
-   * greater or equal to <i>target</i>. <p>Returns true iff there is such
-   * an entry.  <p>Behaves as if written: <pre>
-   *   public boolean skipTo(Term target) {
-   *     do {
-   *       if (!next())
-   * 	     return false;
-   *     } while (target > term());
-   *     return true;
-   *   }
-   * </pre>
-   * Some implementations *could* be considerably more efficient than a linear scan.
-   * Check the implementation to be sure.
-   * @deprecated This method is not performant and will be removed in Lucene 3.0.
-   * Use {@link IndexReader#terms(Term)} to create a new TermEnum positioned at a
-   * given term.
-   */
-  public boolean skipTo(Term target) throws IOException {
-     do {
-        if (!next())
-  	        return false;
-     } while (target.compareTo(term()) > 0);
-     return true;
-  }
 }

Modified: lucene/java/trunk/src/test/org/apache/lucene/index/TestAddIndexesNoOptimize.java
URL: http://svn.apache.org/viewvc/lucene/java/trunk/src/test/org/apache/lucene/index/TestAddIndexesNoOptimize.java?rev=825288&r1=825287&r2=825288&view=diff
==============================================================================
--- lucene/java/trunk/src/test/org/apache/lucene/index/TestAddIndexesNoOptimize.java (original)
+++ lucene/java/trunk/src/test/org/apache/lucene/index/TestAddIndexesNoOptimize.java Wed Oct 14 21:21:05 2009
@@ -43,27 +43,27 @@
     writer = newWriter(dir, true);
     // add 100 documents
     addDocs(writer, 100);
-    assertEquals(100, writer.docCount());
+    assertEquals(100, writer.maxDoc());
     writer.close();
 
     writer = newWriter(aux, true);
     writer.setUseCompoundFile(false); // use one without a compound file
     // add 40 documents in separate files
     addDocs(writer, 40);
-    assertEquals(40, writer.docCount());
+    assertEquals(40, writer.maxDoc());
     writer.close();
 
     writer = newWriter(aux2, true);
     // add 40 documents in compound files
     addDocs2(writer, 50);
-    assertEquals(50, writer.docCount());
+    assertEquals(50, writer.maxDoc());
     writer.close();
 
     // test doc count before segments are merged
     writer = newWriter(dir, false);
-    assertEquals(100, writer.docCount());
+    assertEquals(100, writer.maxDoc());
     writer.addIndexesNoOptimize(new Directory[] { aux, aux2 });
-    assertEquals(190, writer.docCount());
+    assertEquals(190, writer.maxDoc());
     writer.close();
 
     // make sure the old index is correct
@@ -77,14 +77,14 @@
     writer = newWriter(aux3, true);
     // add 40 documents
     addDocs(writer, 40);
-    assertEquals(40, writer.docCount());
+    assertEquals(40, writer.maxDoc());
     writer.close();
 
     // test doc count before segments are merged/index is optimized
     writer = newWriter(dir, false);
-    assertEquals(190, writer.docCount());
+    assertEquals(190, writer.maxDoc());
     writer.addIndexesNoOptimize(new Directory[] { aux3 });
-    assertEquals(230, writer.docCount());
+    assertEquals(230, writer.maxDoc());
     writer.close();
 
     // make sure the new index is correct
@@ -113,9 +113,9 @@
     writer.close();
 
     writer = newWriter(dir, false);
-    assertEquals(230, writer.docCount());
+    assertEquals(230, writer.maxDoc());
     writer.addIndexesNoOptimize(new Directory[] { aux4 });
-    assertEquals(231, writer.docCount());
+    assertEquals(231, writer.maxDoc());
     writer.close();
 
     verifyNumDocs(dir, 231);
@@ -250,7 +250,7 @@
     writer = newWriter(dir, true);
     // add 100 documents
     addDocs(writer, 100);
-    assertEquals(100, writer.docCount());
+    assertEquals(100, writer.maxDoc());
     writer.close();
 
     writer = newWriter(aux, true);
@@ -272,7 +272,7 @@
       assertTrue(false);
     }
     catch (IllegalArgumentException e) {
-      assertEquals(100, writer.docCount());
+      assertEquals(100, writer.maxDoc());
     }
     writer.close();
 
@@ -297,7 +297,7 @@
     addDocs(writer, 10);
 
     writer.addIndexesNoOptimize(new Directory[] { aux });
-    assertEquals(1040, writer.docCount());
+    assertEquals(1040, writer.maxDoc());
     assertEquals(2, writer.getSegmentCount());
     assertEquals(1000, writer.getDocCount(0));
     writer.close();
@@ -321,7 +321,7 @@
     addDocs(writer, 2);
 
     writer.addIndexesNoOptimize(new Directory[] { aux });
-    assertEquals(1032, writer.docCount());
+    assertEquals(1032, writer.maxDoc());
     assertEquals(2, writer.getSegmentCount());
     assertEquals(1000, writer.getDocCount(0));
     writer.close();
@@ -344,7 +344,7 @@
     writer.setMergeFactor(4);
 
     writer.addIndexesNoOptimize(new Directory[] { aux, new RAMDirectory(aux) });
-    assertEquals(1060, writer.docCount());
+    assertEquals(1060, writer.maxDoc());
     assertEquals(1000, writer.getDocCount(0));
     writer.close();
 
@@ -373,7 +373,7 @@
     writer.setMergeFactor(4);
 
     writer.addIndexesNoOptimize(new Directory[] { aux, new RAMDirectory(aux) });
-    assertEquals(1020, writer.docCount());
+    assertEquals(1020, writer.maxDoc());
     assertEquals(1000, writer.getDocCount(0));
     writer.close();
 
@@ -395,7 +395,7 @@
     writer.setMaxBufferedDocs(100);
     writer.setMergeFactor(10);
     writer.addIndexesNoOptimize(new Directory[] { aux });
-    assertEquals(30, writer.docCount());
+    assertEquals(30, writer.maxDoc());
     assertEquals(3, writer.getSegmentCount());
     writer.close();
 
@@ -418,7 +418,7 @@
     writer.setMergeFactor(4);
 
     writer.addIndexesNoOptimize(new Directory[] { aux, aux2 });
-    assertEquals(1025, writer.docCount());
+    assertEquals(1025, writer.maxDoc());
     assertEquals(1000, writer.getDocCount(0));
     writer.close();
 
@@ -476,7 +476,7 @@
     writer.setMaxBufferedDocs(1000);
     // add 1000 documents in 1 segment
     addDocs(writer, 1000);
-    assertEquals(1000, writer.docCount());
+    assertEquals(1000, writer.maxDoc());
     assertEquals(1, writer.getSegmentCount());
     writer.close();
 
@@ -493,7 +493,7 @@
       writer.setMaxBufferedDocs(100);
       writer.setMergeFactor(10);
     }
-    assertEquals(30, writer.docCount());
+    assertEquals(30, writer.maxDoc());
     assertEquals(3, writer.getSegmentCount());
     writer.close();
   }

Modified: lucene/java/trunk/src/test/org/apache/lucene/index/TestCrash.java
URL: http://svn.apache.org/viewvc/lucene/java/trunk/src/test/org/apache/lucene/index/TestCrash.java?rev=825288&r1=825287&r2=825288&view=diff
==============================================================================
--- lucene/java/trunk/src/test/org/apache/lucene/index/TestCrash.java (original)
+++ lucene/java/trunk/src/test/org/apache/lucene/index/TestCrash.java Wed Oct 14 21:21:05 2009
@@ -82,7 +82,7 @@
     MockRAMDirectory dir = (MockRAMDirectory) writer.getDirectory();
     writer.close();
     writer = initIndex(dir);
-    assertEquals(314, writer.docCount());
+    assertEquals(314, writer.maxDoc());
     crash(writer);
 
     /*

Modified: lucene/java/trunk/src/test/org/apache/lucene/index/TestIndexReader.java
URL: http://svn.apache.org/viewvc/lucene/java/trunk/src/test/org/apache/lucene/index/TestIndexReader.java?rev=825288&r1=825287&r2=825288&view=diff
==============================================================================
--- lucene/java/trunk/src/test/org/apache/lucene/index/TestIndexReader.java (original)
+++ lucene/java/trunk/src/test/org/apache/lucene/index/TestIndexReader.java Wed Oct 14 21:21:05 2009
@@ -1762,7 +1762,7 @@
     }
     assertFalse(((SegmentReader) r.getSequentialSubReaders()[0]).termsIndexLoaded());
 
-    assertEquals(-1, r.getTermInfosIndexDivisor());
+    assertEquals(-1, ((SegmentReader) r.getSequentialSubReaders()[0]).getTermInfosIndexDivisor());
     writer = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED);
     writer.addDocument(doc);
     writer.close();

Modified: lucene/java/trunk/src/test/org/apache/lucene/index/TestIndexReaderCloneNorms.java
URL: http://svn.apache.org/viewvc/lucene/java/trunk/src/test/org/apache/lucene/index/TestIndexReaderCloneNorms.java?rev=825288&r1=825287&r2=825288&view=diff
==============================================================================
--- lucene/java/trunk/src/test/org/apache/lucene/index/TestIndexReaderCloneNorms.java (original)
+++ lucene/java/trunk/src/test/org/apache/lucene/index/TestIndexReaderCloneNorms.java Wed Oct 14 21:21:05 2009
@@ -119,7 +119,8 @@
         IndexWriter.MaxFieldLength.LIMITED);
     iw.setMaxBufferedDocs(5);
     iw.setMergeFactor(3);
-    iw.addIndexes(new Directory[] { dir1, dir2 });
+    iw.addIndexesNoOptimize(new Directory[] { dir1, dir2 });
+    iw.optimize();
     iw.close();
 
     norms1.addAll(norms);

Modified: lucene/java/trunk/src/test/org/apache/lucene/index/TestIndexWriter.java
URL: http://svn.apache.org/viewvc/lucene/java/trunk/src/test/org/apache/lucene/index/TestIndexWriter.java?rev=825288&r1=825287&r2=825288&view=diff
==============================================================================
--- lucene/java/trunk/src/test/org/apache/lucene/index/TestIndexWriter.java (original)
+++ lucene/java/trunk/src/test/org/apache/lucene/index/TestIndexWriter.java Wed Oct 14 21:21:05 2009
@@ -96,7 +96,7 @@
         for (i = 0; i < 100; i++) {
             addDoc(writer);
         }
-        assertEquals(100, writer.docCount());
+        assertEquals(100, writer.maxDoc());
         writer.close();
 
         // delete 40 documents
@@ -108,7 +108,7 @@
 
         // test doc count before segments are merged/index is optimized
         writer = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
-        assertEquals(100, writer.docCount());
+        assertEquals(100, writer.maxDoc());
         writer.close();
 
         reader = IndexReader.open(dir, true);
@@ -156,7 +156,7 @@
 
     /*
       Test: make sure when we run out of disk space or hit
-      random IOExceptions in any of the addIndexes(*) calls
+      random IOExceptions in any of the addIndexesNoOptimize(*) calls
       that 1) index is not corrupt (searcher can open/search
       it) and 2) transactional semantics are followed:
       either all or none of the incoming documents were in
@@ -171,7 +171,7 @@
       boolean debug = false;
 
       // Build up a bunch of dirs that have indexes which we
-      // will then merge together by calling addIndexes(*):
+      // will then merge together by calling addIndexesNoOptimize(*):
       Directory[] dirs = new Directory[NUM_DIR];
       long inputDiskUsage = 0;
       for(int i=0;i<NUM_DIR;i++) {
@@ -188,7 +188,7 @@
       }
 
       // Now, build a starting index that has START_COUNT docs.  We
-      // will then try to addIndexes into a copy of this:
+      // will then try to addIndexesNoOptimize into a copy of this:
       RAMDirectory startDir = new RAMDirectory();
       IndexWriter writer = new IndexWriter(startDir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
       for(int j=0;j<START_COUNT;j++) {
@@ -209,12 +209,12 @@
 
       // Iterate with larger and larger amounts of free
       // disk space.  With little free disk space,
-      // addIndexes will certainly run out of space &
+      // addIndexesNoOptimize will certainly run out of space &
       // fail.  Verify that when this happens, index is
       // not corrupt and index in fact has added no
       // documents.  Then, we increase disk space by 2000
       // bytes each iteration.  At some point there is
-      // enough free disk space and addIndexes should
+      // enough free disk space and addIndexesNoOptimize should
       // succeed and index should show all documents were
       // added.
 
@@ -242,7 +242,7 @@
 
         String methodName;
         if (0 == method) {
-          methodName = "addIndexes(Directory[])";
+          methodName = "addIndexes(Directory[]) + optimize()";
         } else if (1 == method) {
           methodName = "addIndexes(IndexReader[])";
         } else {
@@ -306,7 +306,8 @@
             try {
 
               if (0 == method) {
-                writer.addIndexes(dirs);
+                writer.addIndexesNoOptimize(dirs);
+                writer.optimize();
               } else if (1 == method) {
                 IndexReader readers[] = new IndexReader[dirs.length];
                 for(int i=0;i<dirs.length;i++) {
@@ -488,7 +489,7 @@
 
           if (hitError) {
             if (doAbort) {
-              writer.abort();
+              writer.rollback();
             } else {
               try {
                 writer.close();
@@ -739,7 +740,7 @@
 
           // now open index for create:
           writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
-          assertEquals("should be zero documents", writer.docCount(), 0);
+          assertEquals("should be zero documents", writer.maxDoc(), 0);
           addDoc(writer);
           writer.close();
 
@@ -999,9 +1000,9 @@
       searcher.close();
 
       // Now, close the writer:
-      writer.abort();
+      writer.rollback();
 
-      assertNoUnreferencedFiles(dir, "unreferenced files remain after abort()");
+      assertNoUnreferencedFiles(dir, "unreferenced files remain after rollback()");
 
       searcher = new IndexSearcher(dir, false);
       hits = searcher.search(new TermQuery(searchTerm), null, 1000).scoreDocs;
@@ -1083,7 +1084,7 @@
 
     /*
      * Verify that calling optimize when writer is open for
-     * "commit on close" works correctly both for abort()
+     * "commit on close" works correctly both for rollback()
      * and close().
      */
     public void testCommitOnCloseOptimize() throws IOException {
@@ -1107,7 +1108,7 @@
       reader.close();
 
       // Abort the writer:
-      writer.abort();
+      writer.rollback();
       assertNoUnreferencedFiles(dir, "aborted writer after optimize");
 
       // Open a reader after aborting writer:
@@ -2473,25 +2474,25 @@
   }
 
   // LUCENE-1130: make sure initial IOException, and then 2nd
-  // IOException during abort(), is OK:
+  // IOException during rollback(), is OK:
   public void testIOExceptionDuringAbort() throws IOException {
     _testSingleThreadFailure(new FailOnlyOnAbortOrFlush(false));
   }
 
   // LUCENE-1130: make sure initial IOException, and then 2nd
-  // IOException during abort(), is OK:
+  // IOException during rollback(), is OK:
   public void testIOExceptionDuringAbortOnlyOnce() throws IOException {
     _testSingleThreadFailure(new FailOnlyOnAbortOrFlush(true));
   }
 
   // LUCENE-1130: make sure initial IOException, and then 2nd
-  // IOException during abort(), with multiple threads, is OK:
+  // IOException during rollback(), with multiple threads, is OK:
   public void testIOExceptionDuringAbortWithThreads() throws Exception {
     _testMultipleThreadsFailure(new FailOnlyOnAbortOrFlush(false));
   }
 
   // LUCENE-1130: make sure initial IOException, and then 2nd
-  // IOException during abort(), with multiple threads, is OK:
+  // IOException during rollback(), with multiple threads, is OK:
   public void testIOExceptionDuringAbortWithThreadsOnlyOnce() throws Exception {
     _testMultipleThreadsFailure(new FailOnlyOnAbortOrFlush(true));
   }
@@ -2771,7 +2772,8 @@
       writer.setMergePolicy(new LogDocMergePolicy(writer));
 
       Directory[] indexDirs = {new MockRAMDirectory(dir)};
-      writer.addIndexes(indexDirs);
+      writer.addIndexesNoOptimize(indexDirs);
+      writer.optimize();
       writer.close();
     }
     dir.close();
@@ -3224,7 +3226,7 @@
       // Expected
     }
     assertTrue(failure.fail1 && failure.fail2);
-    w.abort();
+    w.rollback();
     dir.close();
   }
   
@@ -3713,7 +3715,8 @@
     void doBody(int j, Directory[] dirs) throws Throwable {
       switch(j%4) {
       case 0:
-        writer2.addIndexes(dirs);
+        writer2.addIndexesNoOptimize(dirs);
+        writer2.optimize();
         break;
       case 1:
         writer2.addIndexesNoOptimize(dirs);
@@ -3799,7 +3802,8 @@
     void doBody(int j, Directory[] dirs) throws Throwable {
       switch(j%5) {
       case 0:
-        writer2.addIndexes(dirs);
+        writer2.addIndexesNoOptimize(dirs);
+        writer2.optimize();
         break;
       case 1:
         writer2.addIndexesNoOptimize(dirs);

Modified: lucene/java/trunk/src/test/org/apache/lucene/index/TestIndexWriterMergePolicy.java
URL: http://svn.apache.org/viewvc/lucene/java/trunk/src/test/org/apache/lucene/index/TestIndexWriterMergePolicy.java?rev=825288&r1=825287&r2=825288&view=diff
==============================================================================
--- lucene/java/trunk/src/test/org/apache/lucene/index/TestIndexWriterMergePolicy.java (original)
+++ lucene/java/trunk/src/test/org/apache/lucene/index/TestIndexWriterMergePolicy.java Wed Oct 14 21:21:05 2009
@@ -198,7 +198,7 @@
     ((ConcurrentMergeScheduler) writer.getMergeScheduler()).sync();
     writer.commit();
     checkInvariants(writer);
-    assertEquals(10, writer.docCount());
+    assertEquals(10, writer.maxDoc());
 
     writer.close();
   }

Modified: lucene/java/trunk/src/test/org/apache/lucene/index/TestIndexWriterMerging.java
URL: http://svn.apache.org/viewvc/lucene/java/trunk/src/test/org/apache/lucene/index/TestIndexWriterMerging.java?rev=825288&r1=825287&r2=825288&view=diff
==============================================================================
--- lucene/java/trunk/src/test/org/apache/lucene/index/TestIndexWriterMerging.java (original)
+++ lucene/java/trunk/src/test/org/apache/lucene/index/TestIndexWriterMerging.java Wed Oct 14 21:21:05 2009
@@ -29,7 +29,7 @@
 {
 
   /**
-   * Tests that index merging (specifically addIndexes()) doesn't
+   * Tests that index merging (specifically addIndexesNoOptimize()) doesn't
    * change the index order of documents.
    */
   public void testLucene() throws IOException
@@ -59,7 +59,8 @@
     IndexWriter writer = new IndexWriter(merged, new StandardAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
     writer.setMergeFactor(2);
 
-    writer.addIndexes(new Directory[]{indexA, indexB});
+    writer.addIndexesNoOptimize(new Directory[]{indexA, indexB});
+    writer.optimize();
     writer.close();
 
     fail = verifyIndex(merged, 0);

Modified: lucene/java/trunk/src/test/org/apache/lucene/index/TestIndexWriterReader.java
URL: http://svn.apache.org/viewvc/lucene/java/trunk/src/test/org/apache/lucene/index/TestIndexWriterReader.java?rev=825288&r1=825287&r2=825288&view=diff
==============================================================================
--- lucene/java/trunk/src/test/org/apache/lucene/index/TestIndexWriterReader.java (original)
+++ lucene/java/trunk/src/test/org/apache/lucene/index/TestIndexWriterReader.java Wed Oct 14 21:21:05 2009
@@ -428,7 +428,8 @@
     void doBody(int j, Directory[] dirs) throws Throwable {
       switch (j % 4) {
         case 0:
-          mainWriter.addIndexes(dirs);
+          mainWriter.addIndexesNoOptimize(dirs);
+          mainWriter.optimize();
           break;
         case 1:
           mainWriter.addIndexesNoOptimize(dirs);

Modified: lucene/java/trunk/src/test/org/apache/lucene/index/TestNorms.java
URL: http://svn.apache.org/viewvc/lucene/java/trunk/src/test/org/apache/lucene/index/TestNorms.java?rev=825288&r1=825287&r2=825288&view=diff
==============================================================================
--- lucene/java/trunk/src/test/org/apache/lucene/index/TestNorms.java (original)
+++ lucene/java/trunk/src/test/org/apache/lucene/index/TestNorms.java Wed Oct 14 21:21:05 2009
@@ -36,7 +36,7 @@
 
 /**
  * Test that norms info is preserved during index life - including
- * separate norms, addDocument, addIndexes, optimize.
+ * separate norms, addDocument, addIndexesNoOptimize, optimize.
  */
 public class TestNorms extends LuceneTestCase {
 
@@ -112,7 +112,8 @@
     IndexWriter iw = new IndexWriter(dir3,anlzr,false, IndexWriter.MaxFieldLength.LIMITED);
     iw.setMaxBufferedDocs(5);
     iw.setMergeFactor(3);
-    iw.addIndexes(new Directory[]{dir1,dir2});
+    iw.addIndexesNoOptimize(new Directory[]{dir1,dir2});
+    iw.optimize();
     iw.close();
     
     norms1.addAll(norms);

Modified: lucene/java/trunk/src/test/org/apache/lucene/index/TestThreadedOptimize.java
URL: http://svn.apache.org/viewvc/lucene/java/trunk/src/test/org/apache/lucene/index/TestThreadedOptimize.java?rev=825288&r1=825287&r2=825288&view=diff
==============================================================================
--- lucene/java/trunk/src/test/org/apache/lucene/index/TestThreadedOptimize.java (original)
+++ lucene/java/trunk/src/test/org/apache/lucene/index/TestThreadedOptimize.java Wed Oct 14 21:21:05 2009
@@ -114,7 +114,7 @@
 
       // System.out.println("TEST: now index=" + writer.segString());
 
-      assertEquals(expectedDocCount, writer.docCount());
+      assertEquals(expectedDocCount, writer.maxDoc());
 
       writer.close();
       writer = new IndexWriter(directory, ANALYZER, false, IndexWriter.MaxFieldLength.UNLIMITED);

Modified: lucene/java/trunk/src/test/org/apache/lucene/search/TestPositionIncrement.java
URL: http://svn.apache.org/viewvc/lucene/java/trunk/src/test/org/apache/lucene/search/TestPositionIncrement.java?rev=825288&r1=825287&r2=825288&view=diff
==============================================================================
--- lucene/java/trunk/src/test/org/apache/lucene/search/TestPositionIncrement.java (original)
+++ lucene/java/trunk/src/test/org/apache/lucene/search/TestPositionIncrement.java Wed Oct 14 21:21:05 2009
@@ -231,96 +231,83 @@
   }
   
   public void testPayloadsPos0() throws Exception {
-    for(int x=0;x<2;x++) {
-      Directory dir = new MockRAMDirectory();
-      IndexWriter writer = new IndexWriter(dir,
-                                           new TestPayloadAnalyzer(), true,
-                                           IndexWriter.MaxFieldLength.LIMITED);
-      if (x == 1) {
-        writer.setAllowMinus1Position();
-      }
-      Document doc = new Document();
-      doc.add(new Field("content",
-                        new StringReader("a a b c d e a f g h i j a b k k")));
-      writer.addDocument(doc);
-
-      IndexReader r = writer.getReader();
-
-      TermPositions tp = r.termPositions(new Term("content", "a"));
-      int count = 0;
-      assertTrue(tp.next());
-      // "a" occurs 4 times
-      assertEquals(4, tp.freq());
-      int expected;
-      if (x == 1) {
-        expected = Integer.MAX_VALUE;
-      } else {
-        expected = 0;
-      }
-      assertEquals(expected, tp.nextPosition());
-      if (x == 1) {
-        continue;
-      }
-      assertEquals(1, tp.nextPosition());
-      assertEquals(3, tp.nextPosition());
-      assertEquals(6, tp.nextPosition());
-
-      // only one doc has "a"
-      assertFalse(tp.next());
-
-      IndexSearcher is = new IndexSearcher(r);
-    
-      SpanTermQuery stq1 = new SpanTermQuery(new Term("content", "a"));
-      SpanTermQuery stq2 = new SpanTermQuery(new Term("content", "k"));
-      SpanQuery[] sqs = { stq1, stq2 };
-      SpanNearQuery snq = new SpanNearQuery(sqs, 30, false);
-
-      count = 0;
-      boolean sawZero = false;
-      //System.out.println("\ngetPayloadSpans test");
-      Spans pspans = snq.getSpans(is.getIndexReader());
-      while (pspans.next()) {
-        //System.out.println(pspans.doc() + " - " + pspans.start() + " - "+ pspans.end());
-        Collection payloads = pspans.getPayload();
-        sawZero |= pspans.start() == 0;
-        for (Iterator it = payloads.iterator(); it.hasNext();) {
-          count++;
-          it.next();
-          //System.out.println(new String((byte[]) it.next()));
-        }
-      }
-      assertEquals(5, count);
-      assertTrue(sawZero);
+    Directory dir = new MockRAMDirectory();
+    IndexWriter writer = new IndexWriter(dir,
+                                         new TestPayloadAnalyzer(), true,
+                                         IndexWriter.MaxFieldLength.LIMITED);
+    Document doc = new Document();
+    doc.add(new Field("content",
+                      new StringReader("a a b c d e a f g h i j a b k k")));
+    writer.addDocument(doc);
+
+    IndexReader r = writer.getReader();
+
+    TermPositions tp = r.termPositions(new Term("content", "a"));
+    int count = 0;
+    assertTrue(tp.next());
+    // "a" occurs 4 times
+    assertEquals(4, tp.freq());
+    int expected = 0;
+    assertEquals(expected, tp.nextPosition());
+    assertEquals(1, tp.nextPosition());
+    assertEquals(3, tp.nextPosition());
+    assertEquals(6, tp.nextPosition());
+
+    // only one doc has "a"
+    assertFalse(tp.next());
 
-      //System.out.println("\ngetSpans test");
-      Spans spans = snq.getSpans(is.getIndexReader());
-      count = 0;
-      sawZero = false;
-      while (spans.next()) {
+    IndexSearcher is = new IndexSearcher(r);
+  
+    SpanTermQuery stq1 = new SpanTermQuery(new Term("content", "a"));
+    SpanTermQuery stq2 = new SpanTermQuery(new Term("content", "k"));
+    SpanQuery[] sqs = { stq1, stq2 };
+    SpanNearQuery snq = new SpanNearQuery(sqs, 30, false);
+
+    count = 0;
+    boolean sawZero = false;
+    //System.out.println("\ngetPayloadSpans test");
+    Spans pspans = snq.getSpans(is.getIndexReader());
+    while (pspans.next()) {
+      //System.out.println(pspans.doc() + " - " + pspans.start() + " - "+ pspans.end());
+      Collection payloads = pspans.getPayload();
+      sawZero |= pspans.start() == 0;
+      for (Iterator it = payloads.iterator(); it.hasNext();) {
         count++;
-        sawZero |= spans.start() == 0;
-        //System.out.println(spans.doc() + " - " + spans.start() + " - " + spans.end());
+        it.next();
+        //System.out.println(new String((byte[]) it.next()));
       }
-      assertEquals(4, count);
-      assertTrue(sawZero);
-		
-      //System.out.println("\nPayloadSpanUtil test");
-
-      sawZero = false;
-      PayloadSpanUtil psu = new PayloadSpanUtil(is.getIndexReader());
-      Collection pls = psu.getPayloadsForQuery(snq);
-      count = pls.size();
-      for (Iterator it = pls.iterator(); it.hasNext();) {
-        String s = new String((byte[]) it.next());
-        //System.out.println(s);
-        sawZero |= s.equals("pos: 0");
-      }
-      assertEquals(5, count);
-      assertTrue(sawZero);
-      writer.close();
-      is.getIndexReader().close();
-      dir.close();
     }
+    assertEquals(5, count);
+    assertTrue(sawZero);
+
+    //System.out.println("\ngetSpans test");
+    Spans spans = snq.getSpans(is.getIndexReader());
+    count = 0;
+    sawZero = false;
+    while (spans.next()) {
+      count++;
+      sawZero |= spans.start() == 0;
+      //System.out.println(spans.doc() + " - " + spans.start() + " - " + spans.end());
+    }
+    assertEquals(4, count);
+    assertTrue(sawZero);
+  
+    //System.out.println("\nPayloadSpanUtil test");
+
+    sawZero = false;
+    PayloadSpanUtil psu = new PayloadSpanUtil(is.getIndexReader());
+    Collection pls = psu.getPayloadsForQuery(snq);
+    count = pls.size();
+    for (Iterator it = pls.iterator(); it.hasNext();) {
+      String s = new String((byte[]) it.next());
+      //System.out.println(s);
+      sawZero |= s.equals("pos: 0");
+    }
+    assertEquals(5, count);
+    assertTrue(sawZero);
+    writer.close();
+    is.getIndexReader().close();
+    dir.close();
   }
 }
 

Modified: lucene/java/trunk/src/test/org/apache/lucene/store/TestRAMDirectory.java
URL: http://svn.apache.org/viewvc/lucene/java/trunk/src/test/org/apache/lucene/store/TestRAMDirectory.java?rev=825288&r1=825287&r2=825288&view=diff
==============================================================================
--- lucene/java/trunk/src/test/org/apache/lucene/store/TestRAMDirectory.java (original)
+++ lucene/java/trunk/src/test/org/apache/lucene/store/TestRAMDirectory.java Wed Oct 14 21:21:05 2009
@@ -62,7 +62,7 @@
       doc.add(new Field("content", English.intToEnglish(i).trim(), Field.Store.YES, Field.Index.NOT_ANALYZED));
       writer.addDocument(doc);
     }
-    assertEquals(docsToAdd, writer.docCount());
+    assertEquals(docsToAdd, writer.maxDoc());
     writer.close();
     dir.close();
   }



Mime
View raw message