lucene-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From mikemcc...@apache.org
Subject svn commit: r1045322 - in /lucene/dev/trunk/lucene/src: java/org/apache/lucene/index/ java/org/apache/lucene/index/codecs/ test/org/apache/lucene/index/ test/org/apache/lucene/store/
Date Mon, 13 Dec 2010 19:20:33 GMT
Author: mikemccand
Date: Mon Dec 13 19:20:32 2010
New Revision: 1045322

URL: http://svn.apache.org/viewvc?rev=1045322&view=rev
Log:
LUCENE-2811: SegmentInfo now tracks whether a segment wrote term vectors

Modified:
    lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/DocumentsWriter.java
    lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/IndexWriter.java
    lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/MergePolicy.java
    lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/SegmentInfo.java
    lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/SegmentMerger.java
    lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/SegmentReader.java
    lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/SegmentWriteState.java
    lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/TermVectorsReader.java
    lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/TermVectorsTermsWriter.java
    lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/codecs/DefaultSegmentInfosWriter.java
    lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestAddIndexes.java
    lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestCodecs.java
    lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestDoc.java
    lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestIndexWriter.java
    lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestIndexWriterOnDiskFull.java
    lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestSegmentMerger.java
    lucene/dev/trunk/lucene/src/test/org/apache/lucene/store/MockIndexOutputWrapper.java

Modified: lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/DocumentsWriter.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/DocumentsWriter.java?rev=1045322&r1=1045321&r2=1045322&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/DocumentsWriter.java (original)
+++ lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/DocumentsWriter.java Mon Dec
13 19:20:32 2010
@@ -450,7 +450,7 @@ final class DocumentsWriter {
     assert docStoreSegment != null;
 
     if (infoStream != null) {
-      message("closeDocStore: files=" + openFiles + "; segment=" + docStoreSegment + "; docStoreOffset="
+ docStoreOffset + "; numDocsInStore=" + numDocsInStore + "; isSeparate=" + isSeparate);
+      message("closeDocStore: openFiles=" + openFiles + "; segment=" + docStoreSegment +
"; docStoreOffset=" + docStoreOffset + "; numDocsInStore=" + numDocsInStore + "; isSeparate="
+ isSeparate);
     }
 
     closedFiles.clear();
@@ -720,17 +720,21 @@ final class DocumentsWriter {
                                                                  docStoreSegment, numDocsInRAM,
numDocsInStore, writer.getConfig().getTermIndexInterval(),
                                                                  SegmentCodecs.build(fieldInfos,
writer.codecs));
 
-      newSegment = new SegmentInfo(segment, numDocsInRAM, directory, false, -1, null, false,
hasProx(), flushState.segmentCodecs);
+      newSegment = new SegmentInfo(segment, numDocsInRAM, directory, false, -1, null, false,
hasProx(), flushState.segmentCodecs, false);
 
       if (!closeDocStore || docStoreOffset != 0) {
         newSegment.setDocStoreSegment(docStoreSegment);
         newSegment.setDocStoreOffset(docStoreOffset);
       }
+      
+      boolean hasVectors = false;
 
       if (closeDocStore) {
         closeDocStore(flushState, writer, deleter, newSegment, mergePolicy, segmentInfos);
       }
 
+      hasVectors |= flushState.hasVectors;
+
       if (numDocsInRAM > 0) {
 
         assert nextDocID == numDocsInRAM;
@@ -749,6 +753,19 @@ final class DocumentsWriter {
         final long startNumBytesUsed = bytesUsed();
         consumer.flush(threads, flushState);
 
+        hasVectors |= flushState.hasVectors;
+
+        if (hasVectors) {
+          if (infoStream != null) {
+            message("new segment has vectors");
+          }
+          newSegment.setHasVectors(true);
+        } else {
+          if (infoStream != null) {
+            message("new segment has no vectors");
+          }
+        }
+
         if (infoStream != null) {
           message("flushedFiles=" + flushState.flushedFiles);
           message("flushed codecs=" + newSegment.getSegmentCodecs());

Modified: lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/IndexWriter.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/IndexWriter.java?rev=1045322&r1=1045321&r2=1045322&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/IndexWriter.java (original)
+++ lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/IndexWriter.java Mon Dec 13 19:20:32
2010
@@ -1047,8 +1047,10 @@ public class IndexWriter implements Clos
 
       mergePolicy.close();
 
-      finishMerges(waitForMerges);
-      stopMerges = true;
+      synchronized(this) {
+        finishMerges(waitForMerges);
+        stopMerges = true;
+      }
 
       mergeScheduler.close();
 
@@ -1251,10 +1253,12 @@ public class IndexWriter implements Clos
           synchronized (this) {
             // If docWriter has some aborted files that were
             // never incref'd, then we clean them up here
+            deleter.checkpoint(segmentInfos, false);
             if (docWriter != null) {
               final Collection<String> files = docWriter.abortedFiles();
-              if (files != null)
+              if (files != null) {
                 deleter.deleteNewFiles(files);
+              }
             }
           }
         }
@@ -1879,7 +1883,14 @@ public class IndexWriter implements Clos
     }
 
     try {
-      finishMerges(false);
+      synchronized(this) {
+        finishMerges(false);
+        stopMerges = true;
+      }
+
+      if (infoStream != null ) {
+        message("rollback: done finish merges");
+      }
 
       // Must pre-close these two, in case they increment
       // changeCount so that we can then set it to false
@@ -2230,7 +2241,8 @@ public class IndexWriter implements Clos
       int docCount = merger.merge();                // merge 'em
       
       SegmentInfo info = new SegmentInfo(mergedName, docCount, directory,
-          false, -1, null, false, merger.hasProx(), merger.getSegmentCodecs());
+                                         false, -1, null, false, merger.hasProx(), merger.getSegmentCodecs(),
+                                         merger.hasVectors());
       setDiagnostics(info, "addIndexes(IndexReader...)");
 
       boolean useCompoundFile;
@@ -2541,8 +2553,16 @@ public class IndexWriter implements Clos
       return false;
     } finally {
       flushControl.clearFlushPending();
-      if (!success && infoStream != null) {
-        message("hit exception during flush");
+      if (!success) {
+        if (infoStream != null) {
+          message("hit exception during flush");
+        }
+        if (docWriter != null) {
+          final Collection<String> files = docWriter.abortedFiles();
+          if (files != null) {
+            deleter.deleteNewFiles(files);
+          }
+        }
       }
     }
   }
@@ -2928,6 +2948,7 @@ public class IndexWriter implements Clos
 
     boolean mergeDocStores = false;
     boolean doFlushDocStore = false;
+    boolean hasVectors = false;
     final String currentDocStoreSegment = docWriter.getDocStoreSegment();
 
     // Test each segment to be merged: check if we need to
@@ -2939,6 +2960,10 @@ public class IndexWriter implements Clos
       if (si.hasDeletions())
         mergeDocStores = true;
 
+      if (si.getHasVectors()) {
+        hasVectors = true;
+      }
+
       // If it has its own (private) doc stores we must
       // merge the doc stores
       if (-1 == si.getDocStoreOffset())
@@ -3014,6 +3039,7 @@ public class IndexWriter implements Clos
       updatePendingMerges(1, false);
     }
 
+    merge.hasVectors = hasVectors;
     merge.mergeDocStores = mergeDocStores;
 
     // Bind a new segment name here so even with
@@ -3024,8 +3050,8 @@ public class IndexWriter implements Clos
                                  docStoreSegment,
                                  docStoreIsCompoundFile,
                                  false,
-                                 null);
-
+                                 null,
+                                 false);
 
     Map<String,String> details = new HashMap<String,String>();
     details.put("optimize", Boolean.toString(merge.optimize));
@@ -3033,6 +3059,10 @@ public class IndexWriter implements Clos
     details.put("mergeDocStores", Boolean.toString(mergeDocStores));
     setDiagnostics(merge.info, "merge", details);
 
+    if (infoStream != null) {
+      message("merge seg=" + merge.info.name + " mergeDocStores=" + mergeDocStores);
+    }
+
     // Also enroll the merged segment into mergingSegments;
     // this prevents it from getting selected for a merge
     // after our merge is done but while we are building the
@@ -3252,6 +3282,7 @@ public class IndexWriter implements Clos
 
       // Record which codec was used to write the segment
       merge.info.setSegmentCodecs(merger.getSegmentCodecs());
+      merge.info.setHasVectors(merger.hasVectors() || merge.hasVectors);
 
       if (infoStream != null) {
         message("merge segmentCodecs=" + merger.getSegmentCodecs());
@@ -3446,7 +3477,7 @@ public class IndexWriter implements Clos
       // are called, deleter should know about every
       // file referenced by the current head
       // segmentInfos:
-      assert deleter.exists(fileName) : "IndexFileDeleter doesn't know about file " + fileName;
+      assert deleter.exists(fileName): "IndexFileDeleter doesn't know about file " + fileName;
     }
     return true;
   }

Modified: lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/MergePolicy.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/MergePolicy.java?rev=1045322&r1=1045321&r2=1045322&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/MergePolicy.java (original)
+++ lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/MergePolicy.java Mon Dec 13 19:20:32
2010
@@ -68,6 +68,7 @@ public abstract class MergePolicy implem
 
     SegmentInfo info;               // used by IndexWriter
     boolean mergeDocStores;         // used by IndexWriter
+    boolean hasVectors;             // used by IndexWriter
     boolean optimize;               // used by IndexWriter
     boolean registerDone;           // used by IndexWriter
     long mergeGen;                  // used by IndexWriter
@@ -156,6 +157,9 @@ public abstract class MergePolicy implem
       if (mergeDocStores) {
         b.append(" [mergeDocStores]");
       }
+      if (aborted) {
+        b.append(" [ABORTED]");
+      }
       return b.toString();
     }
     

Modified: lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/SegmentInfo.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/SegmentInfo.java?rev=1045322&r1=1045321&r2=1045322&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/SegmentInfo.java (original)
+++ lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/SegmentInfo.java Mon Dec 13 19:20:32
2010
@@ -79,13 +79,16 @@ public final class SegmentInfo {
   private int delCount;                           // How many deleted docs in this segment
 
   private boolean hasProx;                        // True if this segment has any fields
with omitTermFreqAndPositions==false
-  
+
+  private byte hasVectors;                        // 0 if no; 1 if yes; 2 if must-check-filesystem
(old index)
+
   private SegmentCodecs segmentCodecs;
 
   private Map<String,String> diagnostics;
 
   public SegmentInfo(String name, int docCount, Directory dir, boolean isCompoundFile, int
docStoreOffset, 
-                     String docStoreSegment, boolean docStoreIsCompoundFile, boolean hasProx,
SegmentCodecs segmentCodecs) { 
+                     String docStoreSegment, boolean docStoreIsCompoundFile, boolean hasProx,
SegmentCodecs segmentCodecs,
+                     boolean hasVectors) { 
     this.name = name;
     this.docCount = docCount;
     this.dir = dir;
@@ -96,6 +99,7 @@ public final class SegmentInfo {
     this.docStoreIsCompoundFile = docStoreIsCompoundFile;
     this.hasProx = hasProx;
     this.segmentCodecs = segmentCodecs;
+    this.hasVectors = (byte) (hasVectors ? 1 : 0);
     delCount = 0;
     assert docStoreOffset == -1 || docStoreSegment != null: "dso=" + docStoreOffset + " dss="
+ docStoreSegment + " docCount=" + docCount;
   }
@@ -111,6 +115,7 @@ public final class SegmentInfo {
     delGen = src.delGen;
     docStoreOffset = src.docStoreOffset;
     docStoreIsCompoundFile = src.docStoreIsCompoundFile;
+    hasVectors = src.hasVectors;
     if (src.normGen == null) {
       normGen = null;
     } else {
@@ -184,6 +189,12 @@ public final class SegmentInfo {
       segmentCodecs.codecs = new Codec[] { codecs.lookup("PreFlex")};
     }
     diagnostics = input.readStringStringMap();
+    
+    if (format <= DefaultSegmentInfosWriter.FORMAT_HAS_VECTORS) {
+      hasVectors = input.readByte();
+    } else {
+      hasVectors = 2;
+    }
   }
   
   /** Returns total size in bytes of all of files used by
@@ -204,6 +215,27 @@ public final class SegmentInfo {
     return sizeInBytes;
   }
 
+  public boolean getHasVectors() throws IOException {
+    if (hasVectors == 1) {
+      return true;
+    } else if (hasVectors == 0) {
+      return false;
+    } else {
+      final String storesSegment;
+      if (getDocStoreOffset() != -1) {
+        storesSegment = getDocStoreSegment();
+      } else {
+        storesSegment = name;
+      }
+      return dir.fileExists(IndexFileNames.segmentFileName(storesSegment, "", IndexFileNames.VECTORS_INDEX_EXTENSION));
+    }
+  }
+
+  public void setHasVectors(boolean v) {
+    hasVectors = (byte) (v ? 1 : 0);
+    clearFiles();
+  }
+
   public boolean hasDeletions() {
     // Cases:
     //
@@ -229,18 +261,14 @@ public final class SegmentInfo {
 
   @Override
   public Object clone() {
-    SegmentInfo si = new SegmentInfo(name, docCount, dir, isCompoundFile, docStoreOffset,
docStoreSegment, docStoreIsCompoundFile, hasProx, segmentCodecs);
-    si.isCompoundFile = isCompoundFile;
+    SegmentInfo si = new SegmentInfo(name, docCount, dir, isCompoundFile, docStoreOffset,
docStoreSegment, docStoreIsCompoundFile, hasProx, segmentCodecs, false);
     si.delGen = delGen;
     si.delCount = delCount;
-    si.hasProx = hasProx;
     si.diagnostics = new HashMap<String, String>(diagnostics);
     if (normGen != null) {
       si.normGen = normGen.clone();
     }
-    si.docStoreOffset = docStoreOffset;
-    si.docStoreSegment = docStoreSegment;
-    si.docStoreIsCompoundFile = docStoreIsCompoundFile;
+    si.hasVectors = hasVectors;
     return si;
   }
 
@@ -404,6 +432,7 @@ public final class SegmentInfo {
     output.writeByte((byte) (hasProx ? 1:0));
     segmentCodecs.write(output);
     output.writeStringStringMap(diagnostics);
+    output.writeByte(hasVectors);
   }
 
   void setHasProx(boolean hasProx) {
@@ -466,12 +495,30 @@ public final class SegmentInfo {
       if (docStoreIsCompoundFile) {
         fileSet.add(IndexFileNames.segmentFileName(docStoreSegment, "", IndexFileNames.COMPOUND_FILE_STORE_EXTENSION));
       } else {
-        for (String ext : IndexFileNames.STORE_INDEX_EXTENSIONS)
-          addIfExists(fileSet, IndexFileNames.segmentFileName(docStoreSegment, "", ext));
+        fileSet.add(IndexFileNames.segmentFileName(docStoreSegment, "", IndexFileNames.FIELDS_INDEX_EXTENSION));
+        fileSet.add(IndexFileNames.segmentFileName(docStoreSegment, "", IndexFileNames.FIELDS_EXTENSION));
+        if (hasVectors == 1) {
+          fileSet.add(IndexFileNames.segmentFileName(docStoreSegment, "", IndexFileNames.VECTORS_INDEX_EXTENSION));
+          fileSet.add(IndexFileNames.segmentFileName(docStoreSegment, "", IndexFileNames.VECTORS_DOCUMENTS_EXTENSION));
+          fileSet.add(IndexFileNames.segmentFileName(docStoreSegment, "", IndexFileNames.VECTORS_FIELDS_EXTENSION));
+        } else if (hasVectors == 2) {
+          addIfExists(fileSet, IndexFileNames.segmentFileName(docStoreSegment, "", IndexFileNames.VECTORS_INDEX_EXTENSION));
+          addIfExists(fileSet, IndexFileNames.segmentFileName(docStoreSegment, "", IndexFileNames.VECTORS_DOCUMENTS_EXTENSION));
+          addIfExists(fileSet, IndexFileNames.segmentFileName(docStoreSegment, "", IndexFileNames.VECTORS_FIELDS_EXTENSION));
+        }      
       }
     } else if (!useCompoundFile) {
-      for (String ext : IndexFileNames.STORE_INDEX_EXTENSIONS)
-        addIfExists(fileSet, IndexFileNames.segmentFileName(name, "", ext));
+      fileSet.add(IndexFileNames.segmentFileName(name, "", IndexFileNames.FIELDS_INDEX_EXTENSION));
+      fileSet.add(IndexFileNames.segmentFileName(name, "", IndexFileNames.FIELDS_EXTENSION));
+      if (hasVectors == 1) {
+        fileSet.add(IndexFileNames.segmentFileName(name, "", IndexFileNames.VECTORS_INDEX_EXTENSION));
+        fileSet.add(IndexFileNames.segmentFileName(name, "", IndexFileNames.VECTORS_DOCUMENTS_EXTENSION));
+        fileSet.add(IndexFileNames.segmentFileName(name, "", IndexFileNames.VECTORS_FIELDS_EXTENSION));
+      } else if (hasVectors == 2) {
+        addIfExists(fileSet, IndexFileNames.segmentFileName(name, "", IndexFileNames.VECTORS_INDEX_EXTENSION));
+        addIfExists(fileSet, IndexFileNames.segmentFileName(name, "", IndexFileNames.VECTORS_DOCUMENTS_EXTENSION));
+        addIfExists(fileSet, IndexFileNames.segmentFileName(name, "", IndexFileNames.VECTORS_FIELDS_EXTENSION));
+      }      
     }
 
     String delFileName = IndexFileNames.fileNameFromGeneration(name, IndexFileNames.DELETES_EXTENSION,
delGen);

Modified: lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/SegmentMerger.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/SegmentMerger.java?rev=1045322&r1=1045321&r2=1045322&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/SegmentMerger.java (original)
+++ lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/SegmentMerger.java Mon Dec 13
19:20:32 2010
@@ -75,6 +75,7 @@ final class SegmentMerger {
   private final CodecProvider codecs;
   private Codec codec;
   private SegmentWriteState segmentWriteState;
+  private boolean hasVectors;
 
   private PayloadProcessorProvider payloadProcessorProvider;
   
@@ -100,6 +101,10 @@ final class SegmentMerger {
     return fieldInfos.hasProx();
   }
 
+  boolean hasVectors() {
+    return hasVectors;
+  }
+
   /**
    * Add an IndexReader to the collection of readers that are to be merged
    * @param reader
@@ -427,7 +432,7 @@ final class SegmentMerger {
   private final void mergeVectors() throws IOException {
     TermVectorsWriter termVectorsWriter = 
       new TermVectorsWriter(directory, segment, fieldInfos);
-
+    hasVectors = true;
     try {
       int idx = 0;
       for (final IndexReader reader : readers) {

Modified: lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/SegmentReader.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/SegmentReader.java?rev=1045322&r1=1045321&r2=1045322&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/SegmentReader.java (original)
+++ lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/SegmentReader.java Mon Dec 13
19:20:32 2010
@@ -243,7 +243,7 @@ public class SegmentReader extends Index
           throw new CorruptIndexException("doc counts differ for segment " + segment + ":
fieldsReader shows " + fieldsReaderOrig.size() + " but segmentInfo shows " + si.docCount);
         }
 
-        if (fieldInfos.hasVectors()) { // open term vector files only as needed
+        if (si.getHasVectors()) { // open term vector files only as needed
           termVectorsReaderOrig = new TermVectorsReader(storeDir, storesSegment, fieldInfos,
readBufferSize, si.getDocStoreOffset(), si.docCount);
         }
       }

Modified: lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/SegmentWriteState.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/SegmentWriteState.java?rev=1045322&r1=1045321&r2=1045322&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/SegmentWriteState.java (original)
+++ lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/SegmentWriteState.java Mon Dec
13 19:20:32 2010
@@ -34,6 +34,7 @@ public class SegmentWriteState {
   public final String docStoreSegmentName;
   public final int numDocs;
   public int numDocsInStore;
+  public boolean hasVectors;
   public final Collection<String> flushedFiles;
 
   final SegmentCodecs segmentCodecs;

Modified: lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/TermVectorsReader.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/TermVectorsReader.java?rev=1045322&r1=1045321&r2=1045322&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/TermVectorsReader.java (original)
+++ lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/TermVectorsReader.java Mon Dec
13 19:20:32 2010
@@ -77,38 +77,30 @@ class TermVectorsReader implements Clone
 
     try {
       String idxName = IndexFileNames.segmentFileName(segment, "", IndexFileNames.VECTORS_INDEX_EXTENSION);
-      if (d.fileExists(idxName)) {
-        tvx = d.openInput(idxName, readBufferSize);
-        format = checkValidFormat(tvx, idxName);
-        String fn = IndexFileNames.segmentFileName(segment, "", IndexFileNames.VECTORS_DOCUMENTS_EXTENSION);
-        tvd = d.openInput(fn, readBufferSize);
-        final int tvdFormat = checkValidFormat(tvd, fn);
-        fn = IndexFileNames.segmentFileName(segment, "", IndexFileNames.VECTORS_FIELDS_EXTENSION);
-        tvf = d.openInput(fn, readBufferSize);
-        final int tvfFormat = checkValidFormat(tvf, fn);
-
-        assert format == tvdFormat;
-        assert format == tvfFormat;
-
-        numTotalDocs = (int) (tvx.length() >> 4);
-
-        if (-1 == docStoreOffset) {
-          this.docStoreOffset = 0;
-          this.size = numTotalDocs;
-          assert size == 0 || numTotalDocs == size;
-        } else {
-          this.docStoreOffset = docStoreOffset;
-          this.size = size;
-          // Verify the file is long enough to hold all of our
-          // docs
-          assert numTotalDocs >= size + docStoreOffset: "numTotalDocs=" + numTotalDocs
+ " size=" + size + " docStoreOffset=" + docStoreOffset;
-        }
+      tvx = d.openInput(idxName, readBufferSize);
+      format = checkValidFormat(tvx, idxName);
+      String fn = IndexFileNames.segmentFileName(segment, "", IndexFileNames.VECTORS_DOCUMENTS_EXTENSION);
+      tvd = d.openInput(fn, readBufferSize);
+      final int tvdFormat = checkValidFormat(tvd, fn);
+      fn = IndexFileNames.segmentFileName(segment, "", IndexFileNames.VECTORS_FIELDS_EXTENSION);
+      tvf = d.openInput(fn, readBufferSize);
+      final int tvfFormat = checkValidFormat(tvf, fn);
+
+      assert format == tvdFormat;
+      assert format == tvfFormat;
+
+      numTotalDocs = (int) (tvx.length() >> 4);
+
+      if (-1 == docStoreOffset) {
+        this.docStoreOffset = 0;
+        this.size = numTotalDocs;
+        assert size == 0 || numTotalDocs == size;
       } else {
-        // If all documents flushed in a segment had hit
-        // non-aborting exceptions, it's possible that
-        // FieldInfos.hasVectors returns true yet the term
-        // vector files don't exist.
-        format = 0;
+        this.docStoreOffset = docStoreOffset;
+        this.size = size;
+        // Verify the file is long enough to hold all of our
+        // docs
+        assert numTotalDocs >= size + docStoreOffset: "numTotalDocs=" + numTotalDocs +
" size=" + size + " docStoreOffset=" + docStoreOffset;
       }
 
       this.fieldInfos = fieldInfos;

Modified: lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/TermVectorsTermsWriter.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/TermVectorsTermsWriter.java?rev=1045322&r1=1045321&r2=1045322&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/TermVectorsTermsWriter.java (original)
+++ lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/TermVectorsTermsWriter.java Mon
Dec 13 19:20:32 2010
@@ -37,6 +37,7 @@ final class TermVectorsTermsWriter exten
   IndexOutput tvd;
   IndexOutput tvf;
   int lastDocID;
+  boolean hasVectors;
 
   public TermVectorsTermsWriter(DocumentsWriter docWriter) {
     this.docWriter = docWriter;
@@ -57,6 +58,7 @@ final class TermVectorsTermsWriter exten
     // because, although FieldInfos.hasVectors() will return
     // true, the TermVectorsReader gracefully handles
     // non-existence of the term vectors files.
+    state.hasVectors = hasVectors;
 
     if (tvx != null) {
 
@@ -108,6 +110,8 @@ final class TermVectorsTermsWriter exten
       docWriter.removeOpenFile(docName);
 
       lastDocID = 0;
+      state.hasVectors = hasVectors;
+      hasVectors = false;
     }    
   }
 
@@ -146,7 +150,7 @@ final class TermVectorsTermsWriter exten
 
   synchronized void initTermVectorsWriter() throws IOException {        
     if (tvx == null) {
-      
+
       final String docStoreSegment = docWriter.getDocStoreSegment();
 
       if (docStoreSegment == null)
@@ -159,6 +163,7 @@ final class TermVectorsTermsWriter exten
       String idxName = IndexFileNames.segmentFileName(docStoreSegment, "", IndexFileNames.VECTORS_INDEX_EXTENSION);
       String docName = IndexFileNames.segmentFileName(docStoreSegment, "", IndexFileNames.VECTORS_DOCUMENTS_EXTENSION);
       String fldName = IndexFileNames.segmentFileName(docStoreSegment, "", IndexFileNames.VECTORS_FIELDS_EXTENSION);
+      hasVectors = true;
       tvx = docWriter.directory.createOutput(idxName);
       tvd = docWriter.directory.createOutput(docName);
       tvf = docWriter.directory.createOutput(fldName);
@@ -218,6 +223,7 @@ final class TermVectorsTermsWriter exten
 
   @Override
   public void abort() {
+    hasVectors = false;
     if (tvx != null) {
       try {
         tvx.close();

Modified: lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/codecs/DefaultSegmentInfosWriter.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/codecs/DefaultSegmentInfosWriter.java?rev=1045322&r1=1045321&r2=1045322&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/codecs/DefaultSegmentInfosWriter.java
(original)
+++ lucene/dev/trunk/lucene/src/java/org/apache/lucene/index/codecs/DefaultSegmentInfosWriter.java
Mon Dec 13 19:20:32 2010
@@ -39,9 +39,12 @@ public class DefaultSegmentInfosWriter e
    *  in the new flex format */
   public static final int FORMAT_4_0 = -10;
 
+  /** Each segment records whether it has term vectors */
+  public static final int FORMAT_HAS_VECTORS = -11;
+
   /** This must always point to the most recent file format.
    * whenever you add a new format, make it 1 smaller (negative version logic)! */
-  public static final int FORMAT_CURRENT = FORMAT_4_0;
+  public static final int FORMAT_CURRENT = FORMAT_HAS_VECTORS;
   
   /** This must always point to the first supported file format. */
   public static final int FORMAT_MINIMUM = FORMAT_DIAGNOSTICS;

Modified: lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestAddIndexes.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestAddIndexes.java?rev=1045322&r1=1045321&r2=1045322&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestAddIndexes.java (original)
+++ lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestAddIndexes.java Mon Dec 13
19:20:32 2010
@@ -39,7 +39,6 @@ import org.apache.lucene.store.Directory
 import org.apache.lucene.store.MockDirectoryWrapper;
 import org.apache.lucene.store.RAMDirectory;
 import org.apache.lucene.util.LuceneTestCase;
-import org.apache.lucene.util.Version;
 import org.apache.lucene.util._TestUtil;
 
 public class TestAddIndexes extends LuceneTestCase {
@@ -642,9 +641,11 @@ public class TestAddIndexes extends Luce
         addDoc(writer);
       writer.close();
 
-      dir2 = new MockDirectoryWrapper(random, new RAMDirectory());
+      dir2 = newDirectory();
       writer2 = new IndexWriter(dir2, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
+      writer2.setInfoStream(VERBOSE ? System.out : null);
       writer2.commit();
+      
 
       readers = new IndexReader[NUM_COPY];
       for(int i=0;i<NUM_COPY;i++)
@@ -914,14 +915,17 @@ public class TestAddIndexes extends Luce
 
   // LUCENE-1335: test simultaneous addIndexes & close
   public void testAddIndexesWithRollback() throws Throwable {
-    
+
     final int NUM_COPY = 50;
     CommitAndAddIndexes3 c = new CommitAndAddIndexes3(NUM_COPY);
     c.launchThreads(-1);
 
-    Thread.sleep(500);
+    Thread.sleep(_TestUtil.nextInt(random, 100, 500));
 
     // Close w/o first stopping/joining the threads
+    if (VERBOSE) {
+      System.out.println("TEST: now force rollback");
+    }
     c.didClose = true;
     c.writer2.rollback();
 

Modified: lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestCodecs.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestCodecs.java?rev=1045322&r1=1045321&r2=1045322&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestCodecs.java (original)
+++ lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestCodecs.java Mon Dec 13 19:20:32
2010
@@ -270,7 +270,7 @@ public class TestCodecs extends LuceneTe
 
     final Directory dir = newDirectory();
     this.write(fieldInfos, dir, fields);
-    final SegmentInfo si = new SegmentInfo(SEGMENT, 10000, dir, false, -1, SEGMENT, false,
true, SegmentCodecs.build(fieldInfos, CodecProvider.getDefault()));
+    final SegmentInfo si = new SegmentInfo(SEGMENT, 10000, dir, false, -1, SEGMENT, false,
true, SegmentCodecs.build(fieldInfos, CodecProvider.getDefault()), fieldInfos.hasVectors());
     si.setHasProx(false);
 
     final FieldsProducer reader = si.getSegmentCodecs().codec().fieldsProducer(new SegmentReadState(dir,
si, fieldInfos, 64, IndexReader.DEFAULT_TERMS_INDEX_DIVISOR));
@@ -318,7 +318,7 @@ public class TestCodecs extends LuceneTe
     final Directory dir = newDirectory();
 
     this.write(fieldInfos, dir, fields);
-    final SegmentInfo si = new SegmentInfo(SEGMENT, 10000, dir, false, -1, SEGMENT, false,
true, SegmentCodecs.build(fieldInfos, CodecProvider.getDefault()));
+    final SegmentInfo si = new SegmentInfo(SEGMENT, 10000, dir, false, -1, SEGMENT, false,
true, SegmentCodecs.build(fieldInfos, CodecProvider.getDefault()), fieldInfos.hasVectors());
 
     final FieldsProducer terms = si.getSegmentCodecs().codec().fieldsProducer(new SegmentReadState(dir,
si, fieldInfos, 1024, IndexReader.DEFAULT_TERMS_INDEX_DIVISOR));
 

Modified: lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestDoc.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestDoc.java?rev=1045322&r1=1045321&r2=1045322&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestDoc.java (original)
+++ lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestDoc.java Mon Dec 13 19:20:32
2010
@@ -201,7 +201,8 @@ public class TestDoc extends LuceneTestC
       r2.close();
       
       final SegmentInfo info = new SegmentInfo(merged, si1.docCount + si2.docCount, si1.dir,
-                                               useCompoundFile, -1, null, false, merger.hasProx(),
merger.getSegmentCodecs());
+                                               useCompoundFile, -1, null, false, merger.hasProx(),
merger.getSegmentCodecs(),
+                                               merger.hasVectors());
       
       if (useCompoundFile) {
         Collection<String> filesToDelete = merger.createCompoundFile(merged + ".cfs",
info);

Modified: lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestIndexWriter.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestIndexWriter.java?rev=1045322&r1=1045321&r2=1045322&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestIndexWriter.java (original)
+++ lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestIndexWriter.java Mon Dec
13 19:20:32 2010
@@ -881,10 +881,14 @@ public class TestIndexWriter extends Luc
     public void testEmptyDocAfterFlushingRealDoc() throws IOException {
       Directory dir = newDirectory();
       IndexWriter writer  = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT,
new MockAnalyzer()));
+      writer.setInfoStream(VERBOSE ? System.out : null);
       Document doc = new Document();
       doc.add(newField("field", "aaa", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
       writer.addDocument(doc);
       writer.commit();
+      if (VERBOSE) {
+        System.out.println("\nTEST: now add empty doc");
+      }
       writer.addDocument(new Document());
       writer.close();
       _TestUtil.checkIndex(dir);
@@ -1027,7 +1031,11 @@ public class TestIndexWriter extends Luc
     Directory dir = newDirectory();
     int delID = 0;
     for(int i=0;i<20;i++) {
+      if (VERBOSE) {
+        System.out.println("TEST: iter=" + i);
+      }
       IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT,
new MockAnalyzer()).setMaxBufferedDocs(2));
+      writer.setInfoStream(VERBOSE ? System.out : null);
       LogMergePolicy lmp = (LogMergePolicy) writer.getConfig().getMergePolicy();
       lmp.setMergeFactor(2);
       lmp.setUseCompoundFile(false);

Modified: lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestIndexWriterOnDiskFull.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestIndexWriterOnDiskFull.java?rev=1045322&r1=1045321&r2=1045322&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestIndexWriterOnDiskFull.java
(original)
+++ lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestIndexWriterOnDiskFull.java
Mon Dec 13 19:20:32 2010
@@ -51,7 +51,7 @@ public class TestIndexWriterOnDiskFull e
         System.out.println("TEST: pass=" + pass);
       }
       boolean doAbort = pass == 1;
-      long diskFree = 200;
+      long diskFree = _TestUtil.nextInt(random, 100, 300);
       while(true) {
         if (VERBOSE) {
           System.out.println("TEST: cycle: diskFree=" + diskFree);
@@ -120,7 +120,7 @@ public class TestIndexWriterOnDiskFull e
           dir.close();
           // Now try again w/ more space:
 
-          diskFree += 500;
+          diskFree += _TestUtil.nextInt(random, 400, 600);
         } else {
           //_TestUtil.syncConcurrentMerges(writer);
           dir.setMaxSizeInBytes(0);

Modified: lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestSegmentMerger.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestSegmentMerger.java?rev=1045322&r1=1045321&r2=1045322&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestSegmentMerger.java (original)
+++ lucene/dev/trunk/lucene/src/test/org/apache/lucene/index/TestSegmentMerger.java Mon Dec
13 19:20:32 2010
@@ -80,7 +80,8 @@ public class TestSegmentMerger extends L
     assertTrue(docsMerged == 2);
     //Should be able to open a new SegmentReader against the new directory
     SegmentReader mergedReader = SegmentReader.get(false, mergedDir, new SegmentInfo(mergedSegment,
docsMerged, mergedDir, false, -1,
-        null, false, merger.hasProx(), merger.getSegmentCodecs()), BufferedIndexInput.BUFFER_SIZE,
true, IndexReader.DEFAULT_TERMS_INDEX_DIVISOR);
+                                                                                     null,
false, merger.hasProx(), merger.getSegmentCodecs(), merger.hasVectors()),
+                                                   BufferedIndexInput.BUFFER_SIZE, true,
IndexReader.DEFAULT_TERMS_INDEX_DIVISOR);
 
     assertTrue(mergedReader != null);
     assertTrue(mergedReader.numDocs() == 2);

Modified: lucene/dev/trunk/lucene/src/test/org/apache/lucene/store/MockIndexOutputWrapper.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/src/test/org/apache/lucene/store/MockIndexOutputWrapper.java?rev=1045322&r1=1045321&r2=1045322&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/src/test/org/apache/lucene/store/MockIndexOutputWrapper.java (original)
+++ lucene/dev/trunk/lucene/src/test/org/apache/lucene/store/MockIndexOutputWrapper.java Mon
Dec 13 19:20:32 2010
@@ -106,6 +106,7 @@ public class MockIndexOutputWrapper exte
       message += ")";
       if (LuceneTestCase.VERBOSE) {
         System.out.println(Thread.currentThread().getName() + ": MDW: now throw fake disk
full");
+        new Throwable().printStackTrace(System.out);
       }
       throw new IOException(message);
     } else {



Mime
View raw message