lucene-java-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From mikemcc...@apache.org
Subject svn commit: r824918 [3/11] - in /lucene/java/branches/flex_1458: contrib/analyzers/common/src/java/org/apache/lucene/analysis/query/ contrib/benchmark/src/java/org/apache/lucene/benchmark/quality/utils/ contrib/benchmark/src/test/org/apache/lucene/benc...
Date Tue, 13 Oct 2009 20:44:59 GMT
Modified: lucene/java/branches/flex_1458/src/java/org/apache/lucene/index/IndexReader.java
URL: http://svn.apache.org/viewvc/lucene/java/branches/flex_1458/src/java/org/apache/lucene/index/IndexReader.java?rev=824918&r1=824917&r2=824918&view=diff
==============================================================================
--- lucene/java/branches/flex_1458/src/java/org/apache/lucene/index/IndexReader.java (original)
+++ lucene/java/branches/flex_1458/src/java/org/apache/lucene/index/IndexReader.java Tue Oct 13 20:44:51 2009
@@ -20,7 +20,10 @@
 import org.apache.lucene.document.Document;
 import org.apache.lucene.document.FieldSelector;
 import org.apache.lucene.search.Similarity;
+import org.apache.lucene.index.codecs.Codecs;
+import org.apache.lucene.index.codecs.Codec;
 import org.apache.lucene.store.*;
+import org.apache.lucene.util.Bits;
 
 import java.io.File;
 import java.io.FileOutputStream;
@@ -180,7 +183,7 @@
       throw new AlreadyClosedException("this IndexReader is closed");
     }
   }
-  
+
   /** Returns an IndexReader reading the index in the given
    *  Directory.  You should pass readOnly=true, since it
    *  gives much better concurrent performance, unless you
@@ -192,7 +195,7 @@
    * @throws IOException if there is a low-level IO error
    */
   public static IndexReader open(final Directory directory, boolean readOnly) throws CorruptIndexException, IOException {
-    return open(directory, null, null, readOnly, DEFAULT_TERMS_INDEX_DIVISOR);
+    return open(directory, null, null, readOnly, DEFAULT_TERMS_INDEX_DIVISOR, null);
   }
 
   /** Expert: returns an IndexReader reading the index in the given
@@ -206,7 +209,23 @@
    * @throws IOException if there is a low-level IO error
    */
   public static IndexReader open(final IndexCommit commit, boolean readOnly) throws CorruptIndexException, IOException {
-    return open(commit.getDirectory(), null, commit, readOnly, DEFAULT_TERMS_INDEX_DIVISOR);
+    return open(commit.getDirectory(), null, commit, readOnly, DEFAULT_TERMS_INDEX_DIVISOR, null);
+  }
+
+  /** Expert: returns a read/write IndexReader reading the index in the given
+   *  Directory, with a custom {@link IndexDeletionPolicy}.
+   * @param directory the index directory
+   * @param deletionPolicy a custom deletion policy (only used
+   *  if you use this reader to perform deletes or to set
+   *  norms); see {@link IndexWriter} for details.
+   * @deprecated Use {@link #open(Directory, IndexDeletionPolicy, boolean)} instead.
+   *             This method will be removed in the 3.0 release.
+   * 
+   * @throws CorruptIndexException if the index is corrupt
+   * @throws IOException if there is a low-level IO error
+   */
+  public static IndexReader open(final Directory directory, IndexDeletionPolicy deletionPolicy) throws CorruptIndexException, IOException {
+    return open(directory, deletionPolicy, null, false, DEFAULT_TERMS_INDEX_DIVISOR, null);
   }
 
   /** Expert: returns an IndexReader reading the index in
@@ -224,7 +243,7 @@
    * @throws IOException if there is a low-level IO error
    */
   public static IndexReader open(final Directory directory, IndexDeletionPolicy deletionPolicy, boolean readOnly) throws CorruptIndexException, IOException {
-    return open(directory, deletionPolicy, null, readOnly, DEFAULT_TERMS_INDEX_DIVISOR);
+    return open(directory, deletionPolicy, null, readOnly, DEFAULT_TERMS_INDEX_DIVISOR, null);
   }
 
   /** Expert: returns an IndexReader reading the index in
@@ -252,7 +271,26 @@
    * @throws IOException if there is a low-level IO error
    */
   public static IndexReader open(final Directory directory, IndexDeletionPolicy deletionPolicy, boolean readOnly, int termInfosIndexDivisor) throws CorruptIndexException, IOException {
-    return open(directory, deletionPolicy, null, readOnly, termInfosIndexDivisor);
+    return open(directory, deletionPolicy, null, readOnly, termInfosIndexDivisor, null);
+  }
+
+  /** Expert: returns a read/write IndexReader reading the index in the given
+   * Directory, using a specific commit and with a custom
+   * {@link IndexDeletionPolicy}.
+   * @param commit the specific {@link IndexCommit} to open;
+   * see {@link IndexReader#listCommits} to list all commits
+   * in a directory
+   * @param deletionPolicy a custom deletion policy (only used
+   *  if you use this reader to perform deletes or to set
+   *  norms); see {@link IndexWriter} for details.
+   * @deprecated Use {@link #open(IndexCommit, IndexDeletionPolicy, boolean)} instead.
+   *             This method will be removed in the 3.0 release.
+   * 
+   * @throws CorruptIndexException if the index is corrupt
+   * @throws IOException if there is a low-level IO error
+   */
+  public static IndexReader open(final IndexCommit commit, IndexDeletionPolicy deletionPolicy) throws CorruptIndexException, IOException {
+    return open(commit.getDirectory(), deletionPolicy, commit, false, DEFAULT_TERMS_INDEX_DIVISOR, null);
   }
 
   /** Expert: returns an IndexReader reading the index in
@@ -272,7 +310,7 @@
    * @throws IOException if there is a low-level IO error
    */
   public static IndexReader open(final IndexCommit commit, IndexDeletionPolicy deletionPolicy, boolean readOnly) throws CorruptIndexException, IOException {
-    return open(commit.getDirectory(), deletionPolicy, commit, readOnly, DEFAULT_TERMS_INDEX_DIVISOR);
+    return open(commit.getDirectory(), deletionPolicy, commit, readOnly, DEFAULT_TERMS_INDEX_DIVISOR, null);
   }
 
   /** Expert: returns an IndexReader reading the index in
@@ -302,11 +340,15 @@
    * @throws IOException if there is a low-level IO error
    */
   public static IndexReader open(final IndexCommit commit, IndexDeletionPolicy deletionPolicy, boolean readOnly, int termInfosIndexDivisor) throws CorruptIndexException, IOException {
-    return open(commit.getDirectory(), deletionPolicy, commit, readOnly, termInfosIndexDivisor);
+    return open(commit.getDirectory(), deletionPolicy, commit, readOnly, termInfosIndexDivisor, null);
   }
 
-  private static IndexReader open(final Directory directory, final IndexDeletionPolicy deletionPolicy, final IndexCommit commit, final boolean readOnly, int termInfosIndexDivisor) throws CorruptIndexException, IOException {
-    return DirectoryReader.open(directory, deletionPolicy, commit, readOnly, termInfosIndexDivisor);
+  private static IndexReader open(final Directory directory, final IndexDeletionPolicy deletionPolicy, final IndexCommit commit, final boolean readOnly, int termInfosIndexDivisor,
+      Codecs codecs) throws CorruptIndexException, IOException {
+    if (codecs == null) {
+      codecs = Codecs.getDefault();
+    }
+    return DirectoryReader.open(directory, deletionPolicy, commit, readOnly, termInfosIndexDivisor, codecs);
   }
 
   /**
@@ -423,6 +465,38 @@
   }
 
   /**
+   * Returns the time the index in the named directory was last modified.
+   * Do not use this to check whether the reader is still up-to-date, use
+   * {@link #isCurrent()} instead. 
+   * @throws CorruptIndexException if the index is corrupt
+   * @throws IOException if there is a low-level IO error
+   * @deprecated Use {@link #lastModified(Directory)} instead.
+   *             This method will be removed in the 3.0 release.
+   */
+  public static long lastModified(String directory) throws CorruptIndexException, IOException {
+    return lastModified(new File(directory));
+  }
+
+  /**
+   * Returns the time the index in the named directory was last modified. 
+   * Do not use this to check whether the reader is still up-to-date, use
+   * {@link #isCurrent()} instead. 
+   * @throws CorruptIndexException if the index is corrupt
+   * @throws IOException if there is a low-level IO error
+   * @deprecated Use {@link #lastModified(Directory)} instead.
+   *             This method will be removed in the 3.0 release.
+   * 
+   */
+  public static long lastModified(File fileDirectory) throws CorruptIndexException, IOException {
+    Directory dir = FSDirectory.open(fileDirectory); // use new static method here
+    try {
+      return lastModified(dir);
+    } finally {
+      dir.close();
+    }
+  }
+
+  /**
    * Returns the time the index in the named directory was last modified. 
    * Do not use this to check whether the reader is still up-to-date, use
    * {@link #isCurrent()} instead. 
@@ -448,7 +522,7 @@
    * @throws IOException if there is a low-level IO error
    */
   public static long getCurrentVersion(Directory directory) throws CorruptIndexException, IOException {
-    return SegmentInfos.readCurrentVersion(directory);
+    return SegmentInfos.readCurrentVersion(directory, Codecs.getDefault());
   }
 
   /**
@@ -466,7 +540,7 @@
    * @see #getCommitUserData()
    */
   public static Map getCommitUserData(Directory directory) throws CorruptIndexException, IOException {
-    return SegmentInfos.readCurrentUserData(directory);
+    return SegmentInfos.readCurrentUserData(directory, Codecs.getDefault());
   }
 
   /**
@@ -768,24 +842,45 @@
    * calling terms(), {@link TermEnum#next()} must be called
    * on the resulting enumeration before calling other methods such as
    * {@link TermEnum#term()}.
+   * @deprecated Use the new flex API ({@link #fields()}) instead.
    * @throws IOException if there is a low-level IO error
    */
   public abstract TermEnum terms() throws IOException;
 
+  // Default impl emulates new API using old one
+  public Fields fields() throws IOException {
+    return new LegacyFields(this);
+  }
+  
   /** Returns an enumeration of all terms starting at a given term. If
    * the given term does not exist, the enumeration is positioned at the
    * first term greater than the supplied term. The enumeration is
    * ordered by Term.compareTo(). Each term is greater than all that
    * precede it in the enumeration.
+   * @deprecated Use the new flex API ({@link #fields()}) instead.
    * @throws IOException if there is a low-level IO error
    */
   public abstract TermEnum terms(Term t) throws IOException;
 
   /** Returns the number of documents containing the term <code>t</code>.
    * @throws IOException if there is a low-level IO error
+   * @deprecated Use {@link #docFreq(String,TermRef)} instead.
    */
   public abstract int docFreq(Term t) throws IOException;
 
+  /** Returns the number of documents containing the term
+   * <code>t</code>.  This method does not take into
+   * account deleted documents that have not yet been
+   * merged away. */
+  public int docFreq(String field, TermRef term) throws IOException {
+    final Terms terms = fields().terms(field);
+    if (terms != null) {
+      return terms.docFreq(term);
+    } else {
+      return 0;
+    }
+  }
+
   /** Returns an enumeration of all the documents which contain
    * <code>term</code>. For each document, the document number, the frequency of
    * the term in that document is also provided, for use in
@@ -797,6 +892,7 @@
    * </ul>
    * <p>The enumeration is ordered by document number.  Each document number
    * is greater than all that precede it in the enumeration.
+   * @deprecated Use the new flex API ({@link #termDocsEnum()}) instead.
    * @throws IOException if there is a low-level IO error
    */
   public TermDocs termDocs(Term term) throws IOException {
@@ -806,7 +902,53 @@
     return termDocs;
   }
 
+  private static class NullDocsEnum extends DocsEnum {
+    public int advance(int target) {
+      return NO_MORE_DOCS;
+    }
+    public int next() {
+      return NO_MORE_DOCS;
+    }
+    public int freq() {
+      return 1;
+    }
+    public int read(int[] docs, int[] freqs) {
+      return 0;
+    }
+    public PositionsEnum positions() {
+      return null;
+    }
+  }
+  private static final NullDocsEnum nullDocsEnum = new NullDocsEnum();
+
+  // nocommit -- should we return null or NullDocsEnum?
+  /** Returns DocsEnum for the specified field & term. */
+  public DocsEnum termDocsEnum(Bits skipDocs, String field, TermRef term) throws IOException {
+
+    assert field != null;
+    assert term != null;
+
+    final Terms terms = fields().terms(field);
+    if (terms != null) {
+      if (Codec.DEBUG) {
+        System.out.println("ir.termDocsEnum field=" + field + " terms=" + terms + " this=" + this);
+      }
+      final DocsEnum docs = terms.docs(skipDocs, term);
+      if (Codec.DEBUG) {
+        System.out.println("ir.termDocsEnum field=" + field + " docs=" +docs);
+      }
+      if (docs != null) {
+        return docs;
+      } else {
+        return nullDocsEnum;
+      }
+    } else {
+      return nullDocsEnum;
+    }
+  }
+
   /** Returns an unpositioned {@link TermDocs} enumerator.
+   * @deprecated Use the new flex API ({@link #fields()}) instead.
    * @throws IOException if there is a low-level IO error
    */
   public abstract TermDocs termDocs() throws IOException;
@@ -826,6 +968,8 @@
    * <p> This positional information facilitates phrase and proximity searching.
    * <p>The enumeration is ordered by document number.  Each document number is
    * greater than all that precede it in the enumeration.
+   * @deprecated Please switch the flex API ({@link
+   * #termDocsEnum()}) instead
    * @throws IOException if there is a low-level IO error
    */
   public TermPositions termPositions(Term term) throws IOException {
@@ -836,6 +980,8 @@
   }
 
   /** Returns an unpositioned {@link TermPositions} enumerator.
+   * @deprecated Please switch the flex API ({@link
+   * #termDocsEnum()}) instead
    * @throws IOException if there is a low-level IO error
    */
   public abstract TermPositions termPositions() throws IOException;
@@ -843,7 +989,7 @@
 
 
   /** Deletes the document numbered <code>docNum</code>.  Once a document is
-   * deleted it will not appear in TermDocs or TermPostitions enumerations.
+   * deleted it will not appear in TermDocs or TermPositions enumerations.
    * Attempts to read its field with the {@link #document}
    * method will result in an error.  The presence of this document may still be
    * reflected in the {@link #docFreq} statistic, though
@@ -1019,6 +1165,31 @@
    */
   public abstract Collection getFieldNames(FieldOption fldOption);
 
+  private final class DeletedDocsBits implements Bits {
+    public boolean get(int docID) {
+      return isDeleted(docID);
+    }
+  }
+
+  public Bits getDeletedDocs() throws IOException {
+    return new DeletedDocsBits();
+  }
+
+
+  /**
+   * Forcibly unlocks the index in the named directory.
+   * <P>
+   * Caution: this should only be used by failure recovery code,
+   * when it is known that no other process nor thread is in fact
+   * currently accessing this index.
+   * @deprecated Please use {@link IndexWriter#unlock(Directory)} instead.
+   *             This method will be removed in the 3.0 release.
+   * 
+   */
+  public static void unlock(Directory directory) throws IOException {
+    directory.makeLock(IndexWriter.WRITE_LOCK_NAME).release();
+  }
+
   /**
    * Expert: return the IndexCommit that this reader has
    * opened.  This method is only implemented by those
@@ -1164,7 +1335,16 @@
    *  #getSequentialSubReaders} and ask each sub reader for
    *  its unique term count. */
   public long getUniqueTermCount() throws IOException {
-    throw new UnsupportedOperationException("this reader does not implement getUniqueTermCount()");
+    long numTerms = 0;
+    FieldsEnum it = fields().iterator();
+    while(true) {
+      String field = it.next();
+      if (field == null) {
+        break;
+      }
+      numTerms += fields().terms(field).getUniqueTermCount();
+    }
+    return numTerms;
   }
 
   /** Expert: Return the state of the flag that disables fakes norms in favor of representing the absence of field norms with null.

Modified: lucene/java/branches/flex_1458/src/java/org/apache/lucene/index/IndexWriter.java
URL: http://svn.apache.org/viewvc/lucene/java/branches/flex_1458/src/java/org/apache/lucene/index/IndexWriter.java?rev=824918&r1=824917&r2=824918&view=diff
==============================================================================
--- lucene/java/branches/flex_1458/src/java/org/apache/lucene/index/IndexWriter.java (original)
+++ lucene/java/branches/flex_1458/src/java/org/apache/lucene/index/IndexWriter.java Tue Oct 13 20:44:51 2009
@@ -28,6 +28,7 @@
 import org.apache.lucene.store.AlreadyClosedException;
 import org.apache.lucene.store.BufferedIndexInput;
 import org.apache.lucene.util.Constants;
+import org.apache.lucene.index.codecs.Codecs;
 
 import java.io.IOException;
 import java.io.PrintStream;
@@ -321,7 +322,7 @@
    *
    * <p>Note that this is functionally equivalent to calling
    * {#commit} and then using {@link IndexReader#open} to
-   * open a new reader.  But the turarnound time of this
+   * open a new reader.  But the turnaround time of this
    * method should be faster since it avoids the potentially
    * costly {@link #commit}.<p>
    *
@@ -401,7 +402,7 @@
     // reader; in theory we could do similar retry logic,
     // just like we do when loading segments_N
     synchronized(this) {
-      return new ReadOnlyDirectoryReader(this, segmentInfos, termInfosIndexDivisor);
+      return new ReadOnlyDirectoryReader(this, segmentInfos, termInfosIndexDivisor, codecs);
     }
   }
 
@@ -617,14 +618,14 @@
         if (doOpenStores) {
           sr.openDocStores();
         }
-        if (termsIndexDivisor != -1 && !sr.termsIndexLoaded()) {
+        if (termsIndexDivisor != -1) {
           // If this reader was originally opened because we
           // needed to merge it, we didn't load the terms
           // index.  But now, if the caller wants the terms
           // index (eg because it's doing deletes, or an NRT
           // reader is being opened) we ask the reader to
           // load its terms index.
-          sr.loadTermsIndex(termsIndexDivisor);
+          sr.loadTermsIndex();
         }
       }
 
@@ -870,7 +871,7 @@
    */
   public IndexWriter(Directory d, Analyzer a, boolean create, MaxFieldLength mfl)
        throws CorruptIndexException, LockObtainFailedException, IOException {
-    init(d, a, create, null, mfl.getLimit(), null, null);
+    init(d, a, create, null, mfl.getLimit(), null, null, null);
   }
 
   /**
@@ -945,7 +946,7 @@
    */
   public IndexWriter(Directory d, Analyzer a, boolean create, IndexDeletionPolicy deletionPolicy, MaxFieldLength mfl)
        throws CorruptIndexException, LockObtainFailedException, IOException {
-    init(d, a, create, deletionPolicy, mfl.getLimit(), null, null);
+    init(d, a, create, deletionPolicy, mfl.getLimit(), null, null, null);
   }
   
   /**
@@ -976,9 +977,10 @@
    *  <code>false</code> or if there is any other low-level
    *  IO error
    */
-  IndexWriter(Directory d, Analyzer a, boolean create, IndexDeletionPolicy deletionPolicy, MaxFieldLength mfl, IndexingChain indexingChain, IndexCommit commit)
+  // nocommit -- need IW.Config!!
+  public IndexWriter(Directory d, Analyzer a, boolean create, IndexDeletionPolicy deletionPolicy, MaxFieldLength mfl, IndexingChain indexingChain, IndexCommit commit, Codecs codecs)
        throws CorruptIndexException, LockObtainFailedException, IOException {
-    init(d, a, create, deletionPolicy, mfl.getLimit(), indexingChain, commit);
+    init(d, a, create, deletionPolicy, mfl.getLimit(), indexingChain, commit, codecs);
   }
   
   /**
@@ -1015,24 +1017,32 @@
    */
   public IndexWriter(Directory d, Analyzer a, IndexDeletionPolicy deletionPolicy, MaxFieldLength mfl, IndexCommit commit)
        throws CorruptIndexException, LockObtainFailedException, IOException {
-    init(d, a, false, deletionPolicy, mfl.getLimit(), null, commit);
+    init(d, a, false, deletionPolicy, mfl.getLimit(), null, commit, null);
   }
+  
+  Codecs codecs;
 
   private void init(Directory d, Analyzer a, IndexDeletionPolicy deletionPolicy, 
                     int maxFieldLength, IndexingChain indexingChain, IndexCommit commit)
     throws CorruptIndexException, LockObtainFailedException, IOException {
     if (IndexReader.indexExists(d)) {
-      init(d, a, false, deletionPolicy, maxFieldLength, indexingChain, commit);
+      init(d, a, false, deletionPolicy, maxFieldLength, indexingChain, commit, null);
     } else {
-      init(d, a, true, deletionPolicy, maxFieldLength, indexingChain, commit);
+      init(d, a, true, deletionPolicy, maxFieldLength, indexingChain, commit, null);
     }
   }
 
-  private void init(Directory d, Analyzer a, final boolean create,  
+  private void init(Directory d, Analyzer a, final boolean create, 
                     IndexDeletionPolicy deletionPolicy, int maxFieldLength,
-                    IndexingChain indexingChain, IndexCommit commit)
+                    IndexingChain indexingChain, IndexCommit commit, Codecs codecsIn)
     throws CorruptIndexException, LockObtainFailedException, IOException {
 
+    if (codecsIn == null) {
+      codecs = Codecs.getDefault();
+    } else {
+      codecs = codecsIn;
+    }
+
     directory = d;
     analyzer = a;
     setMessageID(defaultInfoStream);
@@ -1059,7 +1069,7 @@
         // segments_N file with no segments:
         boolean doCommit;
         try {
-          segmentInfos.read(directory);
+          segmentInfos.read(directory, codecs);
           segmentInfos.clear();
           doCommit = false;
         } catch (IOException e) {
@@ -1078,7 +1088,7 @@
           changeCount++;
         }
       } else {
-        segmentInfos.read(directory);
+        segmentInfos.read(directory, codecs);
 
         if (commit != null) {
           // Swap out all segments, but, keep metadata in
@@ -1089,7 +1099,7 @@
           if (commit.getDirectory() != directory)
             throw new IllegalArgumentException("IndexCommit's directory doesn't match my directory");
           SegmentInfos oldInfos = new SegmentInfos();
-          oldInfos.read(directory, commit.getSegmentsFileName());
+          oldInfos.read(directory, commit.getSegmentsFileName(), codecs);
           segmentInfos.replace(oldInfos);
           changeCount++;
           if (infoStream != null)
@@ -1111,7 +1121,7 @@
       // KeepOnlyLastCommitDeleter:
       deleter = new IndexFileDeleter(directory,
                                      deletionPolicy == null ? new KeepOnlyLastCommitDeletionPolicy() : deletionPolicy,
-                                     segmentInfos, infoStream, docWriter);
+                                     segmentInfos, infoStream, docWriter, this.codecs);
 
       if (deleter.startingCommitDeleted)
         // Deletion policy deleted the "head" commit point.
@@ -2986,7 +2996,7 @@
           ensureOpen();
           for (int i = 0; i < dirs.length; i++) {
             SegmentInfos sis = new SegmentInfos();	  // read infos from dir
-            sis.read(dirs[i]);
+            sis.read(dirs[i], codecs);
             for (int j = 0; j < sis.size(); j++) {
               final SegmentInfo info = sis.info(j);
               docCount += info.docCount;
@@ -3116,7 +3126,7 @@
             }
 
             SegmentInfos sis = new SegmentInfos(); // read infos from dir
-            sis.read(dirs[i]);
+            sis.read(dirs[i], codecs);
             for (int j = 0; j < sis.size(); j++) {
               SegmentInfo info = sis.info(j);
               assert !segmentInfos.contains(info): "dup info dir=" + info.dir + " name=" + info.name;
@@ -3299,10 +3309,11 @@
       // call hits an exception it will release the write
       // lock:
       startTransaction(true);
-
+      success = false;
+      
       try {
         mergedName = newSegmentName();
-        merger = new SegmentMerger(this, mergedName, null);
+        merger = new SegmentMerger(this, mergedName, null, codecs);
 
         SegmentReader sReader = null;
         synchronized(this) {
@@ -3325,7 +3336,7 @@
           synchronized(this) {
             segmentInfos.clear();                      // pop old infos & add new
             info = new SegmentInfo(mergedName, docCount, directory, false, true,
-                                   -1, null, false, merger.hasProx());
+                                   -1, null, false, merger.hasProx(), merger.getCodec());
             setDiagnostics(info, "addIndexes(IndexReader[])");
             segmentInfos.add(info);
           }
@@ -3372,7 +3383,7 @@
           startTransaction(false);
 
           try {
-            merger.createCompoundFile(mergedName + ".cfs");
+            merger.createCompoundFile(mergedName + ".cfs", info);
             synchronized(this) {
               info.setUseCompoundFile(true);
             }
@@ -3725,7 +3736,9 @@
                                      directory, false, true,
                                      docStoreOffset, docStoreSegment,
                                      docStoreIsCompoundFile,    
-                                     docWriter.hasProx());
+                                     docWriter.hasProx(),
+                                     docWriter.getCodec());
+
         setDiagnostics(newSegment, "flush");
       }
 
@@ -3941,7 +3954,8 @@
       }
     }
 
-    merge.info.setHasProx(merger.hasProx());
+    // mxx
+    // System.out.println(Thread.currentThread().getName() + ": finish setHasProx=" + merger.hasProx() + " seg=" + merge.info.name);
 
     segmentInfos.subList(start, start + merge.segments.size()).clear();
     assert !segmentInfos.contains(merge.info);
@@ -4237,7 +4251,8 @@
                                  docStoreOffset,
                                  docStoreSegment,
                                  docStoreIsCompoundFile,
-                                 false);
+                                 false,
+                                 null);
 
 
     Map details = new HashMap();
@@ -4317,7 +4332,7 @@
     if (infoStream != null)
       message("merging " + merge.segString(directory));
 
-    merger = new SegmentMerger(this, mergedName, merge);
+    merger = new SegmentMerger(this, mergedName, merge, codecs);
 
     merge.readers = new SegmentReader[numSegments];
     merge.readersClone = new SegmentReader[numSegments];
@@ -4390,8 +4405,17 @@
       // This is where all the work happens:
       mergedDocCount = merge.info.docCount = merger.merge(merge.mergeDocStores);
 
+      // Record which codec was used to write the segment
+      merge.info.setCodec(merger.getCodec());
+      
       assert mergedDocCount == totDocCount;
 
+      // Very important to do this before opening the reader
+      // because codec must know if prox was written for
+      // this segment:
+      //System.out.println("merger set hasProx=" + merger.hasProx() + " seg=" + merge.info.name);
+      merge.info.setHasProx(merger.hasProx());
+
       // TODO: in the non-realtime case, we may want to only
       // keep deletes (it's costly to open entire reader
       // when we just need deletes)
@@ -4430,7 +4454,7 @@
               } catch (Throwable t) {
               }
               // This was a private clone and we had the only reference
-              assert merge.readersClone[i].getRefCount() == 0;
+              // assert merge.readersClone[i].getRefCount() == 0: "refCount should be 0 but is " + merge.readersClone[i].getRefCount();
             }
           }
         } else {
@@ -4442,7 +4466,7 @@
             if (merge.readersClone[i] != null) {
               merge.readersClone[i].close();
               // This was a private clone and we had the only reference
-              assert merge.readersClone[i].getRefCount() == 0;
+              //assert merge.readersClone[i].getRefCount() == 0;
             }
           }
         }
@@ -4463,7 +4487,7 @@
       final String compoundFileName = mergedName + "." + IndexFileNames.COMPOUND_FILE_EXTENSION;
 
       try {
-        merger.createCompoundFile(compoundFileName);
+        merger.createCompoundFile(compoundFileName, merge.info);
         success = true;
       } catch (IOException ioe) {
         synchronized(this) {

Added: lucene/java/branches/flex_1458/src/java/org/apache/lucene/index/LegacyFields.java
URL: http://svn.apache.org/viewvc/lucene/java/branches/flex_1458/src/java/org/apache/lucene/index/LegacyFields.java?rev=824918&view=auto
==============================================================================
--- lucene/java/branches/flex_1458/src/java/org/apache/lucene/index/LegacyFields.java (added)
+++ lucene/java/branches/flex_1458/src/java/org/apache/lucene/index/LegacyFields.java Tue Oct 13 20:44:51 2009
@@ -0,0 +1,45 @@
+package org.apache.lucene.index;
+
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import java.io.IOException;
+
+/** Implements new API (FieldsEnum/TermsEnum) on top of old
+ *  API.  Used only for IndexReader impls outside Lucene's
+ *  core. */
+class LegacyFields extends Fields {
+  private final IndexReader r;
+  private TermEnum terms;
+
+  public LegacyFields(IndexReader r) throws IOException {
+    this.r = r;
+  }
+
+  public FieldsEnum iterator() throws IOException {
+    return new LegacyFieldsEnum(r);
+  }
+
+  public Terms terms(String field) throws IOException {
+    // nocommit
+    return new LegacyTerms(r, field);
+  }
+
+  public void close() throws IOException {
+    // nocommit
+  }
+}
\ No newline at end of file

Propchange: lucene/java/branches/flex_1458/src/java/org/apache/lucene/index/LegacyFields.java
------------------------------------------------------------------------------
    svn:eol-style = native

Added: lucene/java/branches/flex_1458/src/java/org/apache/lucene/index/LegacyFieldsEnum.java
URL: http://svn.apache.org/viewvc/lucene/java/branches/flex_1458/src/java/org/apache/lucene/index/LegacyFieldsEnum.java?rev=824918&view=auto
==============================================================================
--- lucene/java/branches/flex_1458/src/java/org/apache/lucene/index/LegacyFieldsEnum.java (added)
+++ lucene/java/branches/flex_1458/src/java/org/apache/lucene/index/LegacyFieldsEnum.java Tue Oct 13 20:44:51 2009
@@ -0,0 +1,236 @@
+package org.apache.lucene.index;
+
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import java.io.IOException;
+import org.apache.lucene.util.Bits;
+
+/** Implements new API (FieldsEnum/TermsEnum) on top of old
+ *  API.  Used only for IndexReader impls outside Lucene's
+ *  core. */
+class LegacyFieldsEnum extends FieldsEnum {
+  private final IndexReader r;
+  private TermEnum terms;
+  private String field;
+
+  public LegacyFieldsEnum(IndexReader r) throws IOException {
+    this.r = r;
+    terms = r.terms();
+  }
+
+  private void doSeek(Term t) throws IOException {
+    terms.close();
+    terms = r.terms(t);
+  }
+
+  /*
+  public boolean seek(String field) throws IOException {
+    this.field = field;
+    doSeek(new Term(field, ""));
+    return terms.term() != null && terms.term().field.equals(field);
+  }
+  */
+
+  public String next() throws IOException {
+
+    final Term seekTo = new Term(field, "\uFFFF");
+
+    doSeek(seekTo);
+    if (terms.term() != null) {
+      String newField = terms.term().field;
+      assert !newField.equals(field);
+      field = newField;
+      return field;
+    } else {
+      return null;
+    }
+  }
+
+  public TermsEnum terms() throws IOException {
+    return new LegacyTermsEnum(r, field);
+  }
+
+  public void close() throws IOException {
+    terms.close();
+  }
+
+  // Emulates flex on top of legacy API
+  static class LegacyTermsEnum extends TermsEnum {
+    private final IndexReader r;
+    private final String field;
+    private TermEnum terms;
+    private TermRef current;
+
+    LegacyTermsEnum(IndexReader r, String field) throws IOException {
+      this.r = r;
+      this.field = field;
+      this.terms = r.terms(new Term(field, ""));
+    }
+
+    public SeekStatus seek(TermRef text) throws IOException {
+
+      // nocommit: too slow?
+      terms.close();
+      terms = r.terms(new Term(field, text.toString()));
+      final Term t = terms.term();
+      if (t == null) {
+        current = null;
+        return SeekStatus.END;
+      } else {
+        final TermRef tr = new TermRef(t.text());
+        if (text.termEquals(tr)) {
+          current = tr;
+          return SeekStatus.FOUND;
+        } else {
+          // nocommit reuse TermRef instance
+          current = tr;
+          return SeekStatus.NOT_FOUND;
+        }
+      }
+    }
+
+    public SeekStatus seek(long ord) throws IOException {
+      throw new UnsupportedOperationException();
+    }
+
+    public long ord() throws IOException {
+      throw new UnsupportedOperationException();
+    }
+
+    public TermRef next() throws IOException {
+      if (terms.next()) {
+        // nocommit -- reuse TermRef instance
+        current = new TermRef(terms.term().text());
+        return current;
+      } else {
+        current = null;
+        return null;
+      }
+    }
+
+    public TermRef term() {
+      return current;
+    }
+
+    /*
+    public String text() {
+      return terms.term().text;
+    }
+    */
+
+    public int docFreq() {
+      return terms.docFreq();
+    }
+
+    public DocsEnum docs(Bits skipDocs) throws IOException {
+      return new LegacyDocsEnum(r, field, terms.term(), skipDocs);
+    }
+
+    public void close() throws IOException {
+      terms.close();
+    }
+  }
+
+  // Emulates flex on top of legacy API
+  private static class LegacyDocsEnum extends DocsEnum {
+    final TermDocs td;
+    final Term term;
+    final IndexReader r;
+    final String field;
+    final Bits skipDocs;
+
+    TermPositions tp;
+
+    LegacyDocsEnum(IndexReader r, String field, Term term, Bits skipDocs) throws IOException {
+      this.r = r;
+      this.field = field;
+      this.term = term;
+      td = r.termDocs(term);
+      this.skipDocs = skipDocs;
+    }
+
+    // nocommit -- must enforce skipDocs... but old API will
+    // always secretly skip deleted docs, and we can't work
+    // around that for external readers?
+    public int next() throws IOException {
+      if (td.next()) {
+        return td.doc();
+      } else {
+        return NO_MORE_DOCS;
+      }
+    }
+
+    public int advance(int target) throws IOException {
+      if (td.skipTo(target)) {
+        return td.doc();
+      } else {
+        return NO_MORE_DOCS;
+      }
+    }
+
+    public int freq() {
+      return td.freq();
+    }
+
+    public int read(int[] docs, int[] freqs) throws IOException {
+      return td.read(docs, freqs);
+    }
+
+    public void close() throws IOException {
+      td.close();
+    }
+
+    LegacyPositionsEnum lpe;
+
+    public PositionsEnum positions() throws IOException {
+      if (tp == null) {
+        tp = r.termPositions(term);
+        lpe = new LegacyPositionsEnum(tp);
+      } else {
+        tp.seek(term);
+      }
+      return lpe;
+    }
+  }
+
+  // Emulates flex on top of legacy API
+  private static class LegacyPositionsEnum extends PositionsEnum {
+
+    final TermPositions tp;
+
+    LegacyPositionsEnum(TermPositions tp) {
+      this.tp = tp;
+    }
+
+    public int next() throws IOException {
+      return tp.nextPosition();
+    }
+
+    public int getPayloadLength() {
+      return tp.getPayloadLength();
+    }
+
+    public byte[] getPayload(byte[] data, int offset) throws IOException {
+      return tp.getPayload(data, offset);
+    }
+
+    public boolean hasPayload() {
+      return tp.isPayloadAvailable();
+    }
+  }
+}
\ No newline at end of file

Propchange: lucene/java/branches/flex_1458/src/java/org/apache/lucene/index/LegacyFieldsEnum.java
------------------------------------------------------------------------------
    svn:eol-style = native

Added: lucene/java/branches/flex_1458/src/java/org/apache/lucene/index/LegacySegmentMergeInfo.java
URL: http://svn.apache.org/viewvc/lucene/java/branches/flex_1458/src/java/org/apache/lucene/index/LegacySegmentMergeInfo.java?rev=824918&view=auto
==============================================================================
--- lucene/java/branches/flex_1458/src/java/org/apache/lucene/index/LegacySegmentMergeInfo.java (added)
+++ lucene/java/branches/flex_1458/src/java/org/apache/lucene/index/LegacySegmentMergeInfo.java Tue Oct 13 20:44:51 2009
@@ -0,0 +1,85 @@
+package org.apache.lucene.index;
+
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import java.io.IOException;
+
+final class LegacySegmentMergeInfo {
+  Term term;
+  int base;
+  int ord;  // the position of the segment in a MultiReader
+  TermEnum termEnum;
+  IndexReader reader;
+  int delCount;
+  private TermPositions postings;  // use getPositions()
+  private int[] docMap;  // use getDocMap()
+
+  LegacySegmentMergeInfo(int b, TermEnum te, IndexReader r)
+    throws IOException {
+    base = b;
+    reader = r;
+    termEnum = te;
+    term = te.term();
+  }
+
+  // maps around deleted docs
+  int[] getDocMap() {
+    if (docMap == null) {
+      delCount = 0;
+      // build array which maps document numbers around deletions 
+      if (reader.hasDeletions()) {
+        int maxDoc = reader.maxDoc();
+        docMap = new int[maxDoc];
+        int j = 0;
+        for (int i = 0; i < maxDoc; i++) {
+          if (reader.isDeleted(i)) {
+            delCount++;
+            docMap[i] = -1;
+          } else
+            docMap[i] = j++;
+        }
+      }
+    }
+    return docMap;
+  }
+
+  TermPositions getPositions() throws IOException {
+    if (postings == null) {
+      postings = reader.termPositions();
+    }
+    return postings;
+  }
+
+  final boolean next() throws IOException {
+    if (termEnum.next()) {
+      term = termEnum.term();
+      return true;
+    } else {
+      term = null;
+      return false;
+    }
+  }
+
+  final void close() throws IOException {
+    termEnum.close();
+    if (postings != null) {
+    postings.close();
+  }
+}
+}
+

Propchange: lucene/java/branches/flex_1458/src/java/org/apache/lucene/index/LegacySegmentMergeInfo.java
------------------------------------------------------------------------------
    svn:eol-style = native

Added: lucene/java/branches/flex_1458/src/java/org/apache/lucene/index/LegacySegmentMergeQueue.java
URL: http://svn.apache.org/viewvc/lucene/java/branches/flex_1458/src/java/org/apache/lucene/index/LegacySegmentMergeQueue.java?rev=824918&view=auto
==============================================================================
--- lucene/java/branches/flex_1458/src/java/org/apache/lucene/index/LegacySegmentMergeQueue.java (added)
+++ lucene/java/branches/flex_1458/src/java/org/apache/lucene/index/LegacySegmentMergeQueue.java Tue Oct 13 20:44:51 2009
@@ -0,0 +1,41 @@
+package org.apache.lucene.index;
+
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import java.io.IOException;
+import org.apache.lucene.util.PriorityQueue;
+
+final class LegacySegmentMergeQueue extends PriorityQueue<LegacySegmentMergeInfo> {
+  LegacySegmentMergeQueue(int size) {
+    initialize(size);
+  }
+
+  protected final boolean lessThan(LegacySegmentMergeInfo a, LegacySegmentMergeInfo b) {
+    int comparison = a.term.compareTo(b.term);
+    if (comparison == 0)
+      return a.base < b.base; 
+    else
+      return comparison < 0;
+  }
+
+  final void close() throws IOException {
+    while (top() != null)
+      ((LegacySegmentMergeInfo)pop()).close();
+  }
+
+}

Propchange: lucene/java/branches/flex_1458/src/java/org/apache/lucene/index/LegacySegmentMergeQueue.java
------------------------------------------------------------------------------
    svn:eol-style = native

Added: lucene/java/branches/flex_1458/src/java/org/apache/lucene/index/LegacyTerms.java
URL: http://svn.apache.org/viewvc/lucene/java/branches/flex_1458/src/java/org/apache/lucene/index/LegacyTerms.java?rev=824918&view=auto
==============================================================================
--- lucene/java/branches/flex_1458/src/java/org/apache/lucene/index/LegacyTerms.java (added)
+++ lucene/java/branches/flex_1458/src/java/org/apache/lucene/index/LegacyTerms.java Tue Oct 13 20:44:51 2009
@@ -0,0 +1,45 @@
+package org.apache.lucene.index;
+
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+import java.io.IOException;
+
+/** Implements new API (FieldsEnum/TermsEnum) on top of old
+ *  API.  Used only for IndexReader impls outside Lucene's
+ *  core. */
+class LegacyTerms extends Terms {
+
+  private final IndexReader r;
+  private final String field;
+
+  LegacyTerms(IndexReader r, String field) {
+    this.r = r;
+    this.field = field;
+  }
+
+  public TermsEnum iterator() throws IOException {
+    return new LegacyFieldsEnum.LegacyTermsEnum(r, field);
+  }
+
+  public void close() {
+  }
+}
+
+  
+    

Propchange: lucene/java/branches/flex_1458/src/java/org/apache/lucene/index/LegacyTerms.java
------------------------------------------------------------------------------
    svn:eol-style = native

Modified: lucene/java/branches/flex_1458/src/java/org/apache/lucene/index/MultiReader.java
URL: http://svn.apache.org/viewvc/lucene/java/branches/flex_1458/src/java/org/apache/lucene/index/MultiReader.java?rev=824918&r1=824917&r2=824918&view=diff
==============================================================================
--- lucene/java/branches/flex_1458/src/java/org/apache/lucene/index/MultiReader.java (original)
+++ lucene/java/branches/flex_1458/src/java/org/apache/lucene/index/MultiReader.java Tue Oct 13 20:44:51 2009
@@ -25,10 +25,13 @@
 
 import org.apache.lucene.document.Document;
 import org.apache.lucene.document.FieldSelector;
+import org.apache.lucene.index.DirectoryReader.MultiBits;
+import org.apache.lucene.index.DirectoryReader.MultiFields;
 import org.apache.lucene.index.DirectoryReader.MultiTermDocs;
 import org.apache.lucene.index.DirectoryReader.MultiTermEnum;
 import org.apache.lucene.index.DirectoryReader.MultiTermPositions;
 import org.apache.lucene.search.DefaultSimilarity;
+import org.apache.lucene.util.Bits;
 
 /** An IndexReader which reads multiple indexes, appending
  * their content. */
@@ -40,6 +43,8 @@
   private int maxDoc = 0;
   private int numDocs = -1;
   private boolean hasDeletions = false;
+  private MultiBits deletedDocs;
+  private MultiFields fields;
   
  /**
   * <p>Construct a MultiReader aggregating the named set of (sub)readers.
@@ -49,7 +54,7 @@
   * @param subReaders set of (sub)readers
   * @throws IOException
   */
-  public MultiReader(IndexReader[] subReaders) {
+  public MultiReader(IndexReader[] subReaders) throws IOException {
     initialize(subReaders, true);
   }
 
@@ -62,14 +67,15 @@
    * @param subReaders set of (sub)readers
    * @throws IOException
    */
-  public MultiReader(IndexReader[] subReaders, boolean closeSubReaders) {
+  public MultiReader(IndexReader[] subReaders, boolean closeSubReaders) throws IOException {
     initialize(subReaders, closeSubReaders);
   }
   
-  private void initialize(IndexReader[] subReaders, boolean closeSubReaders) {
+  private void initialize(IndexReader[] subReaders, boolean closeSubReaders) throws IOException {
     this.subReaders = (IndexReader[]) subReaders.clone();
     starts = new int[subReaders.length + 1];    // build starts array
     decrefOnClose = new boolean[subReaders.length];
+    Bits[] subs = new Bits[subReaders.length];
     for (int i = 0; i < subReaders.length; i++) {
       starts[i] = maxDoc;
       maxDoc += subReaders[i].maxDoc();      // compute maxDocs
@@ -81,12 +87,24 @@
         decrefOnClose[i] = false;
       }
       
-      if (subReaders[i].hasDeletions())
+      if (subReaders[i].hasDeletions()) {
         hasDeletions = true;
+      }
+      subs[i] = subReaders[i].getDeletedDocs();
     }
     starts[subReaders.length] = maxDoc;
+    if (hasDeletions) {
+      deletedDocs = new MultiBits(subs, starts);
+    } else {
+      deletedDocs = null;
+    }
+    fields = new MultiFields(subReaders, starts);
   }
-  
+
+  public Fields fields() throws IOException {
+    return fields;
+  }
+
   /**
    * Tries to reopen the subreaders.
    * <br>
@@ -127,6 +145,10 @@
     }
   }
   
+  public Bits getDeletedDocs() {
+    return deletedDocs;
+  }
+
   /**
    * If clone is true then we clone each of the subreaders
    * @param doClone
@@ -343,6 +365,15 @@
     return total;
   }
 
+  public int docFreq(String field, TermRef t) throws IOException {
+    ensureOpen();
+    int total = 0;          // sum freqs in segments
+    for (int i = 0; i < subReaders.length; i++) {
+      total += subReaders[i].docFreq(field, t);
+    }
+    return total;
+  }
+
   public TermDocs termDocs() throws IOException {
     ensureOpen();
     return new MultiTermDocs(this, subReaders, starts);

Modified: lucene/java/branches/flex_1458/src/java/org/apache/lucene/index/MultipleTermPositions.java
URL: http://svn.apache.org/viewvc/lucene/java/branches/flex_1458/src/java/org/apache/lucene/index/MultipleTermPositions.java?rev=824918&r1=824917&r2=824918&view=diff
==============================================================================
--- lucene/java/branches/flex_1458/src/java/org/apache/lucene/index/MultipleTermPositions.java (original)
+++ lucene/java/branches/flex_1458/src/java/org/apache/lucene/index/MultipleTermPositions.java Tue Oct 13 20:44:51 2009
@@ -28,7 +28,8 @@
 /**
  * Allows you to iterate over the {@link TermPositions} for multiple {@link Term}s as
  * a single {@link TermPositions}.
- *
+ * @deprecated This class is being replaced by the package
+ * private MultiDocsEnum on org.apache.lucene.search.
  */
 public class MultipleTermPositions implements TermPositions {
 

Modified: lucene/java/branches/flex_1458/src/java/org/apache/lucene/index/ParallelReader.java
URL: http://svn.apache.org/viewvc/lucene/java/branches/flex_1458/src/java/org/apache/lucene/index/ParallelReader.java?rev=824918&r1=824917&r2=824918&view=diff
==============================================================================
--- lucene/java/branches/flex_1458/src/java/org/apache/lucene/index/ParallelReader.java (original)
+++ lucene/java/branches/flex_1458/src/java/org/apache/lucene/index/ParallelReader.java Tue Oct 13 20:44:51 2009
@@ -21,6 +21,7 @@
 import org.apache.lucene.document.FieldSelector;
 import org.apache.lucene.document.FieldSelectorResult;
 import org.apache.lucene.document.Fieldable;
+import org.apache.lucene.util.Bits;
 
 import java.io.IOException;
 import java.util.*;
@@ -47,7 +48,7 @@
   private List readers = new ArrayList();
   private List decrefOnClose = new ArrayList(); // remember which subreaders to decRef on close
   boolean incRefReaders = false;
-  private SortedMap fieldToReader = new TreeMap();
+  private SortedMap< String, IndexReader> fieldToReader = new TreeMap<String, IndexReader>();
   private Map readerToFields = new HashMap();
   private List storedFieldReaders = new ArrayList();
 
@@ -55,6 +56,8 @@
   private int numDocs;
   private boolean hasDeletions;
 
+  private ParallelFields fields = new ParallelFields();
+
  /** Construct a ParallelReader. 
   * <p>Note that all subreaders are closed if this ParallelReader is closed.</p>
   */
@@ -109,8 +112,10 @@
     Iterator i = fields.iterator();
     while (i.hasNext()) {                         // update fieldToReader map
       String field = (String)i.next();
-      if (fieldToReader.get(field) == null)
+      if (fieldToReader.get(field) == null) {
         fieldToReader.put(field, reader);
+      }
+      this.fields.addField(field, reader);
     }
 
     if (!ignoreStoredFields)
@@ -122,6 +127,57 @@
     }
     decrefOnClose.add(Boolean.valueOf(incRefReaders));
   }
+
+  private class ParallelFieldsEnum extends FieldsEnum {
+    String currentField;
+    IndexReader currentReader;
+    Iterator<String> keys;
+    private final HashMap<String, IndexReader> readerFields = new HashMap<String, IndexReader>();
+
+    ParallelFieldsEnum() {
+      keys = fieldToReader.keySet().iterator();
+    }
+
+    public String next() throws IOException {
+      if (keys.hasNext()) {
+        currentField = (String) keys.next();
+        currentReader = (IndexReader) fieldToReader.get(currentField);
+      } else {
+        currentField = null;
+        currentReader = null;
+      }
+      return currentField;
+    }
+
+    public TermsEnum terms() throws IOException {
+      assert currentReader != null;
+      return currentReader.fields().terms(currentField).iterator();
+    }
+  }
+
+  // Single instance of this, per ParallelReader instance
+  private class ParallelFields extends Fields {
+    final HashMap<String,Terms> fields = new HashMap<String,Terms>();
+
+    public void addField(String field, IndexReader r) throws IOException {
+      fields.put(field, r.fields().terms(field));
+    }
+
+    public FieldsEnum iterator() throws IOException {
+      return new ParallelFieldsEnum();
+    }
+    public Terms terms(String field) throws IOException {
+      return fields.get(field);
+    }
+  }
+
+  public Bits getDeletedDocs() throws IOException {
+    return ((IndexReader) readers.get(0)).getDeletedDocs();
+  }
+
+  public Fields fields() {
+    return fields;
+  }
   
   public synchronized Object clone() {
     try {
@@ -374,6 +430,12 @@
     return reader==null ? 0 : reader.docFreq(term);
   }
 
+  public int docFreq(String field, TermRef term) throws IOException {
+    ensureOpen();
+    IndexReader reader = ((IndexReader)fieldToReader.get(field));
+    return reader == null? 0 : reader.docFreq(field, term);
+  }
+
   public TermDocs termDocs(Term term) throws IOException {
     ensureOpen();
     return new ParallelTermDocs(term);
@@ -468,7 +530,7 @@
 
   private class ParallelTermEnum extends TermEnum {
     private String field;
-    private Iterator fieldIterator;
+    private Iterator<String> fieldIterator;
     private TermEnum termEnum;
 
     public ParallelTermEnum() throws IOException {
@@ -479,12 +541,12 @@
         return;
       }
       if (field != null)
-        termEnum = ((IndexReader)fieldToReader.get(field)).terms();
+        termEnum = fieldToReader.get(field).terms();
     }
 
     public ParallelTermEnum(Term term) throws IOException {
       field = term.field();
-      IndexReader reader = ((IndexReader)fieldToReader.get(field));
+      IndexReader reader = fieldToReader.get(field);
       if (reader!=null)
         termEnum = reader.terms(term);
     }
@@ -506,7 +568,7 @@
       }
       while (fieldIterator.hasNext()) {
         field = (String) fieldIterator.next();
-        termEnum = ((IndexReader)fieldToReader.get(field)).terms(new Term(field));
+        termEnum = fieldToReader.get(field).terms(new Term(field));
         Term term = termEnum.term();
         if (term!=null && term.field()==field)
           return true;

Added: lucene/java/branches/flex_1458/src/java/org/apache/lucene/index/PositionsEnum.java
URL: http://svn.apache.org/viewvc/lucene/java/branches/flex_1458/src/java/org/apache/lucene/index/PositionsEnum.java?rev=824918&view=auto
==============================================================================
--- lucene/java/branches/flex_1458/src/java/org/apache/lucene/index/PositionsEnum.java (added)
+++ lucene/java/branches/flex_1458/src/java/org/apache/lucene/index/PositionsEnum.java Tue Oct 13 20:44:51 2009
@@ -0,0 +1,41 @@
+package org.apache.lucene.index;
+
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import java.io.IOException;
+
+import org.apache.lucene.util.AttributeSource;
+
+public abstract class PositionsEnum extends AttributeSource {
+
+  // nocommit
+  public String desc;
+
+  /** Returns the next position.  You should only call this
+   *  up to {@link FormatPostingsDocsEnum#freq()} times else
+   *  the behavior is not defined. */
+  public abstract int next() throws IOException;
+
+  public abstract int getPayloadLength();
+
+  // nocommit -- improve this so that readers that do their
+  // own buffering can save a copy
+  public abstract byte[] getPayload(byte[] data, int offset) throws IOException;
+
+  public abstract boolean hasPayload();
+}

Propchange: lucene/java/branches/flex_1458/src/java/org/apache/lucene/index/PositionsEnum.java
------------------------------------------------------------------------------
    svn:eol-style = native

Modified: lucene/java/branches/flex_1458/src/java/org/apache/lucene/index/ReadOnlyDirectoryReader.java
URL: http://svn.apache.org/viewvc/lucene/java/branches/flex_1458/src/java/org/apache/lucene/index/ReadOnlyDirectoryReader.java?rev=824918&r1=824917&r2=824918&view=diff
==============================================================================
--- lucene/java/branches/flex_1458/src/java/org/apache/lucene/index/ReadOnlyDirectoryReader.java (original)
+++ lucene/java/branches/flex_1458/src/java/org/apache/lucene/index/ReadOnlyDirectoryReader.java Tue Oct 13 20:44:51 2009
@@ -18,22 +18,23 @@
  */
 
 import org.apache.lucene.store.Directory;
+import org.apache.lucene.index.codecs.Codecs;
 
 import java.io.IOException;
 import java.util.Map;
 
 class ReadOnlyDirectoryReader extends DirectoryReader {
-  ReadOnlyDirectoryReader(Directory directory, SegmentInfos sis, IndexDeletionPolicy deletionPolicy, int termInfosIndexDivisor) throws IOException {
-    super(directory, sis, deletionPolicy, true, termInfosIndexDivisor);
+  ReadOnlyDirectoryReader(Directory directory, SegmentInfos sis, IndexDeletionPolicy deletionPolicy, int termInfosIndexDivisor, Codecs codecs) throws IOException {
+    super(directory, sis, deletionPolicy, true, termInfosIndexDivisor, codecs);
   }
 
   ReadOnlyDirectoryReader(Directory directory, SegmentInfos infos, SegmentReader[] oldReaders, int[] oldStarts, Map oldNormsCache, boolean doClone,
-                          int termInfosIndexDivisor) throws IOException {
-    super(directory, infos, oldReaders, oldStarts, oldNormsCache, true, doClone, termInfosIndexDivisor);
+                          int termInfosIndexDivisor, Codecs codecs) throws IOException {
+    super(directory, infos, oldReaders, oldStarts, oldNormsCache, true, doClone, termInfosIndexDivisor, codecs);
   }
   
-  ReadOnlyDirectoryReader(IndexWriter writer, SegmentInfos infos, int termInfosIndexDivisor) throws IOException {
-    super(writer, infos, termInfosIndexDivisor);
+  ReadOnlyDirectoryReader(IndexWriter writer, SegmentInfos infos, int termInfosIndexDivisor, Codecs codecs) throws IOException {
+    super(writer, infos, termInfosIndexDivisor, codecs);
   }
   
   protected void acquireWriteLock() {

Added: lucene/java/branches/flex_1458/src/java/org/apache/lucene/index/SegmentFieldMergeQueue.java
URL: http://svn.apache.org/viewvc/lucene/java/branches/flex_1458/src/java/org/apache/lucene/index/SegmentFieldMergeQueue.java?rev=824918&view=auto
==============================================================================
--- lucene/java/branches/flex_1458/src/java/org/apache/lucene/index/SegmentFieldMergeQueue.java (added)
+++ lucene/java/branches/flex_1458/src/java/org/apache/lucene/index/SegmentFieldMergeQueue.java Tue Oct 13 20:44:51 2009
@@ -0,0 +1,34 @@
+package org.apache.lucene.index;
+
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.lucene.util.PriorityQueue;
+
+// Used to merge-sort by SegmentMergeInfo.field
+final class SegmentFieldMergeQueue extends PriorityQueue {
+  SegmentFieldMergeQueue(int size) {
+    initialize(size);
+  }
+
+  protected final boolean lessThan(Object a, Object b) {
+    SegmentMergeInfo stiA = (SegmentMergeInfo)a;
+    SegmentMergeInfo stiB = (SegmentMergeInfo)b;
+    // nocommit ok not to break ties?
+    return stiA.field.compareTo(stiB.field) < 0;
+  }
+}

Propchange: lucene/java/branches/flex_1458/src/java/org/apache/lucene/index/SegmentFieldMergeQueue.java
------------------------------------------------------------------------------
    svn:eol-style = native

Modified: lucene/java/branches/flex_1458/src/java/org/apache/lucene/index/SegmentInfo.java
URL: http://svn.apache.org/viewvc/lucene/java/branches/flex_1458/src/java/org/apache/lucene/index/SegmentInfo.java?rev=824918&r1=824917&r2=824918&view=diff
==============================================================================
--- lucene/java/branches/flex_1458/src/java/org/apache/lucene/index/SegmentInfo.java (original)
+++ lucene/java/branches/flex_1458/src/java/org/apache/lucene/index/SegmentInfo.java Tue Oct 13 20:44:51 2009
@@ -21,6 +21,8 @@
 import org.apache.lucene.store.IndexOutput;
 import org.apache.lucene.store.IndexInput;
 import org.apache.lucene.util.BitVector;
+import org.apache.lucene.index.codecs.Codec;
+import org.apache.lucene.index.codecs.Codecs;
 import java.io.IOException;
 import java.util.List;
 import java.util.Map;
@@ -88,6 +90,11 @@
                                                   // (if it's an older index)
 
   private boolean hasProx;                        // True if this segment has any fields with omitTermFreqAndPositions==false
+  
+  // nocommit: unread field
+  private boolean flexPostings;                   // True if postings were written with new flex format
+  private Codec codec;
+
 
   private Map diagnostics;
 
@@ -95,7 +102,7 @@
     return "si: "+dir.toString()+" "+name+" docCount: "+docCount+" delCount: "+delCount+" delFileName: "+getDelFileName();
   }
   
-  public SegmentInfo(String name, int docCount, Directory dir) {
+  public SegmentInfo(String name, int docCount, Directory dir, Codec codec) {
     this.name = name;
     this.docCount = docCount;
     this.dir = dir;
@@ -108,15 +115,21 @@
     docStoreIsCompoundFile = false;
     delCount = 0;
     hasProx = true;
+    flexPostings = true;
+    this.codec = codec;
   }
 
+  // nocommit -- this ctor is only used by back-compat tests
   public SegmentInfo(String name, int docCount, Directory dir, boolean isCompoundFile, boolean hasSingleNormFile) { 
-    this(name, docCount, dir, isCompoundFile, hasSingleNormFile, -1, null, false, true);
+    this(name, docCount, dir, isCompoundFile, hasSingleNormFile, -1, null, false, true, null);
+    SegmentWriteState state = new SegmentWriteState(null, dir, name, null, null, docCount, docCount, -1, Codecs.getDefault());
+    codec = state.codec = Codecs.getDefault().getWriter(state);
   }
-
-  public SegmentInfo(String name, int docCount, Directory dir, boolean isCompoundFile, boolean hasSingleNormFile,
-                     int docStoreOffset, String docStoreSegment, boolean docStoreIsCompoundFile, boolean hasProx) { 
-    this(name, docCount, dir);
+  
+  public SegmentInfo(String name, int docCount, Directory dir, boolean isCompoundFile, boolean hasSingleNormFile, 
+                     int docStoreOffset, String docStoreSegment, boolean docStoreIsCompoundFile, boolean hasProx,
+                     Codec codec) { 
+    this(name, docCount, dir, codec);
     this.isCompoundFile = (byte) (isCompoundFile ? YES : NO);
     this.hasSingleNormFile = hasSingleNormFile;
     preLockless = false;
@@ -124,6 +137,7 @@
     this.docStoreSegment = docStoreSegment;
     this.docStoreIsCompoundFile = docStoreIsCompoundFile;
     this.hasProx = hasProx;
+    this.codec = codec;
     delCount = 0;
     assert docStoreOffset == -1 || docStoreSegment != null: "dso=" + docStoreOffset + " dss=" + docStoreSegment + " docCount=" + docCount;
   }
@@ -149,6 +163,7 @@
     isCompoundFile = src.isCompoundFile;
     hasSingleNormFile = src.hasSingleNormFile;
     delCount = src.delCount;
+    codec = src.codec;
   }
 
   // must be Map<String, String>
@@ -169,10 +184,11 @@
    * @param format format of the segments info file
    * @param input input handle to read segment info from
    */
-  SegmentInfo(Directory dir, int format, IndexInput input) throws IOException {
+  SegmentInfo(Directory dir, int format, IndexInput input, Codecs codecs) throws IOException {
     this.dir = dir;
     name = input.readString();
     docCount = input.readInt();
+    final String codecName;
     if (format <= SegmentInfos.FORMAT_LOCKLESS) {
       delGen = input.readLong();
       if (format <= SegmentInfos.FORMAT_SHARED_DOC_STORE) {
@@ -215,6 +231,13 @@
       else
         hasProx = true;
 
+      // System.out.println(Thread.currentThread().getName() + ": si.read hasProx=" + hasProx + " seg=" + name);
+      
+      if (format <= SegmentInfos.FORMAT_FLEX_POSTINGS)
+        codecName = input.readString();
+      else
+        codecName = "PreFlex";
+
       if (format <= SegmentInfos.FORMAT_DIAGNOSTICS) {
         diagnostics = input.readStringStringMap();
       } else {
@@ -231,8 +254,10 @@
       docStoreSegment = null;
       delCount = -1;
       hasProx = true;
+      codecName = "PreFlex";
       diagnostics = Collections.EMPTY_MAP;
     }
+    codec = codecs.lookup(codecName);
   }
   
   void setNumFields(int numFields) {
@@ -315,7 +340,7 @@
   }
 
   public Object clone () {
-    SegmentInfo si = new SegmentInfo(name, docCount, dir);
+    SegmentInfo si = new SegmentInfo(name, docCount, dir, codec);
     si.isCompoundFile = isCompoundFile;
     si.delGen = delGen;
     si.delCount = delCount;
@@ -329,6 +354,7 @@
     si.docStoreOffset = docStoreOffset;
     si.docStoreSegment = docStoreSegment;
     si.docStoreIsCompoundFile = docStoreIsCompoundFile;
+    si.codec = codec;
     return si;
   }
 
@@ -560,6 +586,9 @@
     output.writeByte(isCompoundFile);
     output.writeInt(delCount);
     output.writeByte((byte) (hasProx ? 1:0));
+    // mxx
+    //System.out.println(Thread.currentThread().getName() + ": si.write hasProx=" + hasProx + " seg=" + name);
+    output.writeString(codec.name);
     output.writeStringStringMap(diagnostics);
   }
 
@@ -572,6 +601,19 @@
     return hasProx;
   }
 
+  /** Can only be called once. */
+  public void setCodec(Codec codec) {
+    assert this.codec == null;
+    if (codec == null) {
+      throw new IllegalArgumentException("codec must be non-null");
+    }
+    this.codec = codec;
+  }
+
+  Codec getCodec() {
+    return codec;
+  }
+
   private void addIfExists(List files, String fileName) throws IOException {
     if (dir.fileExists(fileName))
       files.add(fileName);
@@ -598,8 +640,12 @@
       files.add(name + "." + IndexFileNames.COMPOUND_FILE_EXTENSION);
     } else {
       final String[] exts = IndexFileNames.NON_STORE_INDEX_EXTENSIONS;
-      for(int i=0;i<exts.length;i++)
+      for(int i=0;i<exts.length;i++) {
+        // nocommit -- skip checking frq, prx, tii, tis if
+        // flex postings
         addIfExists(files, name + "." + exts[i]);
+      }
+      codec.files(dir, this, files);
     }
 
     if (docStoreOffset != -1) {

Modified: lucene/java/branches/flex_1458/src/java/org/apache/lucene/index/SegmentInfos.java
URL: http://svn.apache.org/viewvc/lucene/java/branches/flex_1458/src/java/org/apache/lucene/index/SegmentInfos.java?rev=824918&r1=824917&r2=824918&view=diff
==============================================================================
--- lucene/java/branches/flex_1458/src/java/org/apache/lucene/index/SegmentInfos.java (original)
+++ lucene/java/branches/flex_1458/src/java/org/apache/lucene/index/SegmentInfos.java Tue Oct 13 20:44:51 2009
@@ -23,6 +23,7 @@
 import org.apache.lucene.store.ChecksumIndexOutput;
 import org.apache.lucene.store.ChecksumIndexInput;
 import org.apache.lucene.store.NoSuchDirectoryException;
+import org.apache.lucene.index.codecs.Codecs;
 
 import java.io.FileNotFoundException;
 import java.io.IOException;
@@ -87,9 +88,13 @@
   /** This format adds optional per-segment String
    *  diagnostics storage, and switches userData to Map */
   public static final int FORMAT_DIAGNOSTICS = -9;
+  
+  /** Each segment records whether its postings are written
+   *  in the new flex format */
+  public static final int FORMAT_FLEX_POSTINGS = -10;
 
   /* This must always point to the most recent file format. */
-  static final int CURRENT_FORMAT = FORMAT_DIAGNOSTICS;
+  static final int CURRENT_FORMAT = FORMAT_FLEX_POSTINGS;
   
   public int counter = 0;    // used to name new segments
   /**
@@ -227,7 +232,8 @@
    * @throws CorruptIndexException if the index is corrupt
    * @throws IOException if there is a low-level IO error
    */
-  public final void read(Directory directory, String segmentFileName) throws CorruptIndexException, IOException {
+  public final void read(Directory directory, String segmentFileName, 
+                         Codecs codecs) throws CorruptIndexException, IOException {
     boolean success = false;
 
     // Clear any previous segments:
@@ -253,7 +259,7 @@
       }
       
       for (int i = input.readInt(); i > 0; i--) { // read segmentInfos
-        add(new SegmentInfo(directory, format, input));
+        add(new SegmentInfo(directory, format, input, codecs));
       }
       
       if(format >= 0){    // in old format the version number may be at the end of the file
@@ -300,13 +306,16 @@
    * @throws IOException if there is a low-level IO error
    */
   public final void read(Directory directory) throws CorruptIndexException, IOException {
-
+    read(directory, Codecs.getDefault());
+  }
+  
+  public final void read(Directory directory, final Codecs codecs) throws CorruptIndexException, IOException {
     generation = lastGeneration = -1;
 
     new FindSegmentsFile(directory) {
 
       protected Object doBody(String segmentFileName) throws CorruptIndexException, IOException {
-        read(directory, segmentFileName);
+        read(directory, segmentFileName, codecs);
         return null;
       }
     }.run();
@@ -372,6 +381,8 @@
   public Object clone() {
     SegmentInfos sis = (SegmentInfos) super.clone();
     for(int i=0;i<sis.size();i++) {
+      // nocommit
+      assert sis.info(i).getCodec() != null;
       sis.set(i, sis.info(i).clone());
     }
     sis.userData = new HashMap(userData);
@@ -396,7 +407,7 @@
    * @throws CorruptIndexException if the index is corrupt
    * @throws IOException if there is a low-level IO error
    */
-  public static long readCurrentVersion(Directory directory)
+  public static long readCurrentVersion(Directory directory, final Codecs codecs)
     throws CorruptIndexException, IOException {
 
     return ((Long) new FindSegmentsFile(directory) {
@@ -424,7 +435,7 @@
           // We cannot be sure about the format of the file.
           // Therefore we have to read the whole file and cannot simply seek to the version entry.
           SegmentInfos sis = new SegmentInfos();
-          sis.read(directory, segmentFileName);
+          sis.read(directory, segmentFileName, codecs);
           return Long.valueOf(sis.getVersion());
         }
       }.run()).longValue();
@@ -435,10 +446,10 @@
    * @throws CorruptIndexException if the index is corrupt
    * @throws IOException if there is a low-level IO error
    */
-  public static Map readCurrentUserData(Directory directory)
+  public static Map readCurrentUserData(Directory directory, Codecs codecs)
     throws CorruptIndexException, IOException {
     SegmentInfos sis = new SegmentInfos();
-    sis.read(directory);
+    sis.read(directory, codecs);
     return sis.getUserData();
   }
 

Modified: lucene/java/branches/flex_1458/src/java/org/apache/lucene/index/SegmentMergeInfo.java
URL: http://svn.apache.org/viewvc/lucene/java/branches/flex_1458/src/java/org/apache/lucene/index/SegmentMergeInfo.java?rev=824918&r1=824917&r2=824918&view=diff
==============================================================================
--- lucene/java/branches/flex_1458/src/java/org/apache/lucene/index/SegmentMergeInfo.java (original)
+++ lucene/java/branches/flex_1458/src/java/org/apache/lucene/index/SegmentMergeInfo.java Tue Oct 13 20:44:51 2009
@@ -19,22 +19,36 @@
 
 import java.io.IOException;
 
+import org.apache.lucene.index.codecs.Codec;
+
 final class SegmentMergeInfo {
-  Term term;
   int base;
   int ord;  // the position of the segment in a MultiReader
-  TermEnum termEnum;
+  final FieldsEnum fields;
+  TermsEnum terms;
+  String field;
+  TermRef term;
+
   IndexReader reader;
   int delCount;
-  private TermPositions postings;  // use getPositions()
+  //private TermPositions postings;  // use getPositions()
   private int[] docMap;  // use getDocMap()
 
-  SegmentMergeInfo(int b, TermEnum te, IndexReader r)
+  // nocommit
+  private String segment;
+
+  SegmentMergeInfo(int b, IndexReader r)
     throws IOException {
     base = b;
     reader = r;
-    termEnum = te;
-    term = te.term();
+    fields = r.fields().iterator();
+    // nocommit
+    if (Codec.DEBUG) {
+      if (r instanceof SegmentReader) {
+        segment = ((SegmentReader) r).core.segment;
+      }
+      System.out.println("smi create seg=" + segment);
+    }
   }
 
   // maps around deleted docs
@@ -58,28 +72,29 @@
     return docMap;
   }
 
-  TermPositions getPositions() throws IOException {
-    if (postings == null) {
-      postings = reader.termPositions();
+  final boolean nextField() throws IOException {
+    field = fields.next();
+    if (field != null) {
+      terms = fields.terms();
+      return true;
+    } else {
+      return false;
     }
-    return postings;
   }
 
-  final boolean next() throws IOException {
-    if (termEnum.next()) {
-      term = termEnum.term();
+  final boolean nextTerm() throws IOException {
+    term = terms.next();
+    if (term != null) {
+      if (Codec.DEBUG) {
+        System.out.println("  smi.next: term=" + term + " seg=" + segment);
+      }
       return true;
     } else {
-      term = null;
+      if (Codec.DEBUG) {
+        System.out.println("  smi.next: term=null seg=" + segment);
+      }
       return false;
     }
   }
-
-  final void close() throws IOException {
-    termEnum.close();
-    if (postings != null) {
-    postings.close();
-  }
-}
 }
 

Modified: lucene/java/branches/flex_1458/src/java/org/apache/lucene/index/SegmentMergeQueue.java
URL: http://svn.apache.org/viewvc/lucene/java/branches/flex_1458/src/java/org/apache/lucene/index/SegmentMergeQueue.java?rev=824918&r1=824917&r2=824918&view=diff
==============================================================================
--- lucene/java/branches/flex_1458/src/java/org/apache/lucene/index/SegmentMergeQueue.java (original)
+++ lucene/java/branches/flex_1458/src/java/org/apache/lucene/index/SegmentMergeQueue.java Tue Oct 13 20:44:51 2009
@@ -17,7 +17,6 @@
  * limitations under the License.
  */
 
-import java.io.IOException;
 import org.apache.lucene.util.PriorityQueue;
 
 final class SegmentMergeQueue extends PriorityQueue<SegmentMergeInfo> {
@@ -26,16 +25,10 @@
   }
 
   protected final boolean lessThan(SegmentMergeInfo stiA, SegmentMergeInfo stiB) {
-    int comparison = stiA.term.compareTo(stiB.term);
+    int comparison = stiA.term.compareTerm(stiB.term);
     if (comparison == 0)
       return stiA.base < stiB.base; 
     else
       return comparison < 0;
   }
-
-  final void close() throws IOException {
-    while (top() != null)
-      ((SegmentMergeInfo)pop()).close();
-  }
-
 }



Mime
View raw message