lucene-java-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From mikemcc...@apache.org
Subject svn commit: r824918 [10/11] - in /lucene/java/branches/flex_1458: contrib/analyzers/common/src/java/org/apache/lucene/analysis/query/ contrib/benchmark/src/java/org/apache/lucene/benchmark/quality/utils/ contrib/benchmark/src/test/org/apache/lucene/ben...
Date Tue, 13 Oct 2009 20:44:59 GMT
Modified: lucene/java/branches/flex_1458/src/java/org/apache/lucene/search/spans/TermSpans.java
URL: http://svn.apache.org/viewvc/lucene/java/branches/flex_1458/src/java/org/apache/lucene/search/spans/TermSpans.java?rev=824918&r1=824917&r2=824918&view=diff
==============================================================================
--- lucene/java/branches/flex_1458/src/java/org/apache/lucene/search/spans/TermSpans.java (original)
+++ lucene/java/branches/flex_1458/src/java/org/apache/lucene/search/spans/TermSpans.java Tue Oct 13 20:44:51 2009
@@ -17,7 +17,8 @@
 
 
 import org.apache.lucene.index.Term;
-import org.apache.lucene.index.TermPositions;
+import org.apache.lucene.index.DocsEnum;
+import org.apache.lucene.index.PositionsEnum;
 
 import java.io.IOException;
 import java.util.Collections;
@@ -28,47 +29,46 @@
  * Public for extension only
  */
 public class TermSpans extends Spans {
-  protected TermPositions positions;
-  protected Term term;
+  protected final DocsEnum docs;
+  protected PositionsEnum positions;
+  protected final Term term;
   protected int doc;
   protected int freq;
   protected int count;
   protected int position;
 
-
-  public TermSpans(TermPositions positions, Term term) throws IOException {
-
-    this.positions = positions;
+  public TermSpans(DocsEnum docs, Term term) throws IOException {
+    this.docs = docs;
     this.term = term;
     doc = -1;
   }
 
   public boolean next() throws IOException {
     if (count == freq) {
-      if (!positions.next()) {
-        doc = Integer.MAX_VALUE;
+      doc = docs.next();
+      if (doc == DocsEnum.NO_MORE_DOCS) {
         return false;
       }
-      doc = positions.doc();
-      freq = positions.freq();
+      freq = docs.freq();
+      positions = docs.positions();
       count = 0;
     }
-    position = positions.nextPosition();
+    position = positions.next();
     count++;
     return true;
   }
 
   public boolean skipTo(int target) throws IOException {
-    if (!positions.skipTo(target)) {
-      doc = Integer.MAX_VALUE;
+    doc = docs.advance(target);
+    if (doc == DocsEnum.NO_MORE_DOCS) {
       return false;
     }
 
-    doc = positions.doc();
-    freq = positions.freq();
+    freq = docs.freq();
     count = 0;
+    positions = docs.positions();
 
-    position = positions.nextPosition();
+    position = positions.next();
     count++;
 
     return true;
@@ -95,7 +95,7 @@
 
   // TODO: Remove warning after API has been finalized
  public boolean isPayloadAvailable() {
-    return positions.isPayloadAvailable();
+    return positions.hasPayload();
   }
 
   public String toString() {
@@ -103,8 +103,7 @@
             (doc == -1 ? "START" : (doc == Integer.MAX_VALUE) ? "END" : doc + "-" + position);
   }
 
-
-  public TermPositions getPositions() {
+  public PositionsEnum getPositions() {
     return positions;
   }
 }

Modified: lucene/java/branches/flex_1458/src/java/org/apache/lucene/store/Directory.java
URL: http://svn.apache.org/viewvc/lucene/java/branches/flex_1458/src/java/org/apache/lucene/store/Directory.java?rev=824918&r1=824917&r2=824918&view=diff
==============================================================================
--- lucene/java/branches/flex_1458/src/java/org/apache/lucene/store/Directory.java (original)
+++ lucene/java/branches/flex_1458/src/java/org/apache/lucene/store/Directory.java Tue Oct 13 20:44:51 2009
@@ -19,8 +19,6 @@
 
 import java.io.IOException;
 
-import org.apache.lucene.index.IndexFileNameFilter;
-
 /** A Directory is a flat list of files.  Files may be written once, when they
  * are created.  Once a file is created it may only be opened for read, or
  * deleted.  Random access is permitted both when reading and writing.
@@ -158,6 +156,9 @@
       return this.toString();
   }
 
+  // nocommit -- note runtime change that all files are
+  // copied
+
   /**
    * Copy contents of a directory src to a directory dest.
    * If a file in src already exists in dest then the
@@ -168,9 +169,8 @@
    * are undefined and you could easily hit a
    * FileNotFoundException.
    *
-   * <p><b>NOTE:</b> this method only copies files that look
-   * like index files (ie, have extensions matching the
-   * known extensions of index files).
+   * <p><b>NOTE:</b> this method copies all files, not only
+   * files that look like index files
    *
    * @param src source directory
    * @param dest destination directory
@@ -180,14 +180,9 @@
   public static void copy(Directory src, Directory dest, boolean closeDirSrc) throws IOException {
     final String[] files = src.listAll();
 
-    IndexFileNameFilter filter = IndexFileNameFilter.getFilter();
-
     byte[] buf = new byte[BufferedIndexOutput.BUFFER_SIZE];
     for (int i = 0; i < files.length; i++) {
 
-      if (!filter.accept(null, files[i]))
-        continue;
-
       IndexOutput os = null;
       IndexInput is = null;
       try {

Modified: lucene/java/branches/flex_1458/src/java/org/apache/lucene/store/FileSwitchDirectory.java
URL: http://svn.apache.org/viewvc/lucene/java/branches/flex_1458/src/java/org/apache/lucene/store/FileSwitchDirectory.java?rev=824918&r1=824917&r2=824918&view=diff
==============================================================================
--- lucene/java/branches/flex_1458/src/java/org/apache/lucene/store/FileSwitchDirectory.java (original)
+++ lucene/java/branches/flex_1458/src/java/org/apache/lucene/store/FileSwitchDirectory.java Tue Oct 13 20:44:51 2009
@@ -18,8 +18,6 @@
  */
 
 import java.io.IOException;
-import java.util.ArrayList;
-import java.util.List;
 import java.util.Set;
 
 /**

Modified: lucene/java/branches/flex_1458/src/java/org/apache/lucene/store/RAMDirectory.java
URL: http://svn.apache.org/viewvc/lucene/java/branches/flex_1458/src/java/org/apache/lucene/store/RAMDirectory.java?rev=824918&r1=824917&r2=824918&view=diff
==============================================================================
--- lucene/java/branches/flex_1458/src/java/org/apache/lucene/store/RAMDirectory.java (original)
+++ lucene/java/branches/flex_1458/src/java/org/apache/lucene/store/RAMDirectory.java Tue Oct 13 20:44:51 2009
@@ -19,7 +19,6 @@
 
 import java.io.IOException;
 import java.io.FileNotFoundException;
-import java.io.File;
 import java.io.Serializable;
 import java.util.HashMap;
 import java.util.Iterator;
@@ -193,7 +192,8 @@
       file = (RAMFile)fileMap.get(name);
     }
     if (file == null)
-      throw new FileNotFoundException(name);
+      // nocommit
+      throw new FileNotFoundException(name + " dir=" + this);
     return new RAMInputStream(file);
   }
 

Modified: lucene/java/branches/flex_1458/src/java/org/apache/lucene/util/ArrayUtil.java
URL: http://svn.apache.org/viewvc/lucene/java/branches/flex_1458/src/java/org/apache/lucene/util/ArrayUtil.java?rev=824918&r1=824917&r2=824918&view=diff
==============================================================================
--- lucene/java/branches/flex_1458/src/java/org/apache/lucene/util/ArrayUtil.java (original)
+++ lucene/java/branches/flex_1458/src/java/org/apache/lucene/util/ArrayUtil.java Tue Oct 13 20:44:51 2009
@@ -204,6 +204,29 @@
     return grow(array, 1 + array.length);
   }
 
+  public static char[] shrink(char[] array, int targetSize) {
+    final int newSize = getShrinkSize(array.length, targetSize);
+    if (newSize != array.length) {
+      char[] newArray = new char[newSize];
+      System.arraycopy(array, 0, newArray, 0, newSize);
+      return newArray;
+    } else
+      return array;
+  }
+
+  public static char[] grow(char[] array, int minSize) {
+    if (array.length < minSize) {
+      char[] newArray = new char[getNextSize(minSize)];
+      System.arraycopy(array, 0, newArray, 0, array.length);
+      return newArray;
+    } else
+      return array;
+  }
+
+  public static char[] grow(char[] array) {
+    return grow(array, 1 + array.length);
+  }
+
   public static byte[] shrink(byte[] array, int targetSize) {
     final int newSize = getShrinkSize(array.length, targetSize);
     if (newSize != array.length) {

Modified: lucene/java/branches/flex_1458/src/java/org/apache/lucene/util/AttributeSource.java
URL: http://svn.apache.org/viewvc/lucene/java/branches/flex_1458/src/java/org/apache/lucene/util/AttributeSource.java?rev=824918&r1=824917&r2=824918&view=diff
==============================================================================
--- lucene/java/branches/flex_1458/src/java/org/apache/lucene/util/AttributeSource.java (original)
+++ lucene/java/branches/flex_1458/src/java/org/apache/lucene/util/AttributeSource.java Tue Oct 13 20:44:51 2009
@@ -406,7 +406,7 @@
     } else
       return false;
   }
-  
+
   public String toString() {
     StringBuilder sb = new StringBuilder().append('(');
     if (hasAttributes()) {

Modified: lucene/java/branches/flex_1458/src/java/org/apache/lucene/util/BitVector.java
URL: http://svn.apache.org/viewvc/lucene/java/branches/flex_1458/src/java/org/apache/lucene/util/BitVector.java?rev=824918&r1=824917&r2=824918&view=diff
==============================================================================
--- lucene/java/branches/flex_1458/src/java/org/apache/lucene/util/BitVector.java (original)
+++ lucene/java/branches/flex_1458/src/java/org/apache/lucene/util/BitVector.java Tue Oct 13 20:44:51 2009
@@ -32,7 +32,7 @@
   <li>store and load, as bit set or d-gaps, depending on sparseness;</li> 
   </ul>
   */
-public final class BitVector implements Cloneable {
+public final class BitVector implements Cloneable, Bits {
 
   private byte[] bits;
   private int size;

Added: lucene/java/branches/flex_1458/src/java/org/apache/lucene/util/Bits.java
URL: http://svn.apache.org/viewvc/lucene/java/branches/flex_1458/src/java/org/apache/lucene/util/Bits.java?rev=824918&view=auto
==============================================================================
--- lucene/java/branches/flex_1458/src/java/org/apache/lucene/util/Bits.java (added)
+++ lucene/java/branches/flex_1458/src/java/org/apache/lucene/util/Bits.java Tue Oct 13 20:44:51 2009
@@ -0,0 +1,22 @@
+package org.apache.lucene.util;
+
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+public interface Bits {
+  public boolean get(int index);
+}

Propchange: lucene/java/branches/flex_1458/src/java/org/apache/lucene/util/Bits.java
------------------------------------------------------------------------------
    svn:eol-style = native

Modified: lucene/java/branches/flex_1458/src/java/org/apache/lucene/util/NumericUtils.java
URL: http://svn.apache.org/viewvc/lucene/java/branches/flex_1458/src/java/org/apache/lucene/util/NumericUtils.java?rev=824918&r1=824917&r2=824918&view=diff
==============================================================================
--- lucene/java/branches/flex_1458/src/java/org/apache/lucene/util/NumericUtils.java (original)
+++ lucene/java/branches/flex_1458/src/java/org/apache/lucene/util/NumericUtils.java Tue Oct 13 20:44:51 2009
@@ -21,6 +21,7 @@
 import org.apache.lucene.document.NumericField; // for javadocs
 import org.apache.lucene.search.NumericRangeQuery; // for javadocs
 import org.apache.lucene.search.NumericRangeFilter; // for javadocs
+import org.apache.lucene.index.TermRef;
 
 /**
  * This is a helper class to generate prefix-encoded representations for numerical values
@@ -219,6 +220,26 @@
     return (sortableBits << shift) ^ 0x8000000000000000L;
   }
 
+  public static long prefixCodedToLong(final TermRef term) {
+    final int shift = term.bytes[term.offset]-SHIFT_START_LONG;
+    if (shift>63 || shift<0)
+      throw new NumberFormatException("Invalid shift value in prefixCoded string (is encoded value really an INT?)");
+    long sortableBits = 0L;
+    final int limit = term.offset + term.length;
+    for (int i=term.offset+1; i<limit; i++) {
+      sortableBits <<= 7;
+      final byte ch = term.bytes[i];
+      if (ch < 0) {
+        throw new NumberFormatException(
+          "Invalid prefixCoded numerical value representation (char "+
+          Integer.toHexString((int)(ch&0xff))+" at position "+(i-term.offset)+" is invalid)"
+        );
+      }
+      sortableBits |= (long) ch;
+    }
+    return (sortableBits << shift) ^ 0x8000000000000000L;
+  }
+
   /**
    * Returns an int from prefixCoded characters.
    * Rightmost bits will be zero for lower precision codes.
@@ -246,6 +267,26 @@
     return (sortableBits << shift) ^ 0x80000000;
   }
 
+  public static int prefixCodedToInt(final TermRef term) {
+    final int shift = term.bytes[term.offset]-SHIFT_START_INT;
+    if (shift>31 || shift<0)
+      throw new NumberFormatException("Invalid shift value in prefixCoded string (is encoded value really an INT?)");
+    int sortableBits = 0;
+    final int limit = term.offset + term.length;
+    for (int i=term.offset+1; i<limit; i++) {
+      sortableBits <<= 7;
+      final byte ch = term.bytes[i];
+      if (ch < 0) {
+        throw new NumberFormatException(
+          "Invalid prefixCoded numerical value representation (char "+
+          Integer.toHexString((int)(ch&0xff))+" at position "+(i-term.offset)+" is invalid)"
+        );
+      }
+      sortableBits |= (int) ch;
+    }
+    return (sortableBits << shift) ^ 0x80000000;
+  }
+
   /**
    * Converts a <code>double</code> value to a sortable signed <code>long</code>.
    * The value is converted by getting their IEEE 754 floating-point &quot;double format&quot;

Modified: lucene/java/branches/flex_1458/src/java/org/apache/lucene/util/UnicodeUtil.java
URL: http://svn.apache.org/viewvc/lucene/java/branches/flex_1458/src/java/org/apache/lucene/util/UnicodeUtil.java?rev=824918&r1=824917&r2=824918&view=diff
==============================================================================
--- lucene/java/branches/flex_1458/src/java/org/apache/lucene/util/UnicodeUtil.java (original)
+++ lucene/java/branches/flex_1458/src/java/org/apache/lucene/util/UnicodeUtil.java Tue Oct 13 20:44:51 2009
@@ -73,14 +73,16 @@
   private static final long HALF_MASK = 0x3FFL;
 
   public static final class UTF8Result {
-    public byte[] result = new byte[10];
+    public byte[] result;
     public int length;
 
+    public UTF8Result() {
+      result = new byte[10];
+    }
+
     public void setLength(int newLength) {
       if (result.length < newLength) {
-        byte[] newArray = new byte[(int) (1.5*newLength)];
-        System.arraycopy(result, 0, newArray, 0, length);
-        result = newArray;
+        result = ArrayUtil.grow(result, newLength);
       }
       length = newLength;
     }
@@ -91,12 +93,15 @@
     public int[] offsets = new int[10];
     public int length;
 
+    /*
+    public String toString() {
+      return new String(result, 0, length);
+    }
+    */
+
     public void setLength(int newLength) {
-      if (result.length < newLength) {
-        char[] newArray = new char[(int) (1.5*newLength)];
-        System.arraycopy(result, 0, newArray, 0, length);
-        result = newArray;
-      }
+      if (result.length < newLength)
+        result = ArrayUtil.grow(result, newLength);
       length = newLength;
     }
 
@@ -104,6 +109,13 @@
       setLength(other.length);
       System.arraycopy(other.result, 0, result, 0, length);
     }
+
+    public void copyText(String other) {
+      final int otherLength = other.length();
+      setLength(otherLength);
+      other.getChars(0, otherLength, result, 0);
+      length = otherLength;
+    }
   }
 
   /** Encode characters from a char[] source, starting at

Modified: lucene/java/branches/flex_1458/src/test/org/apache/lucene/TestSearchForDuplicates.java
URL: http://svn.apache.org/viewvc/lucene/java/branches/flex_1458/src/test/org/apache/lucene/TestSearchForDuplicates.java?rev=824918&r1=824917&r2=824918&view=diff
==============================================================================
--- lucene/java/branches/flex_1458/src/test/org/apache/lucene/TestSearchForDuplicates.java (original)
+++ lucene/java/branches/flex_1458/src/test/org/apache/lucene/TestSearchForDuplicates.java Tue Oct 13 20:44:51 2009
@@ -89,6 +89,9 @@
       for (int j = 0; j < MAX_DOCS; j++) {
         Document d = new Document();
         d.add(new Field(PRIORITY_FIELD, HIGH_PRIORITY, Field.Store.YES, Field.Index.ANALYZED));
+
+        // NOTE: this ID_FIELD produces no tokens since
+        // SimpleAnalyzer discards numbers
         d.add(new Field(ID_FIELD, Integer.toString(j), Field.Store.YES, Field.Index.ANALYZED));
         writer.addDocument(d);
       }

Modified: lucene/java/branches/flex_1458/src/test/org/apache/lucene/index/TestAddIndexesNoOptimize.java
URL: http://svn.apache.org/viewvc/lucene/java/branches/flex_1458/src/test/org/apache/lucene/index/TestAddIndexesNoOptimize.java?rev=824918&r1=824917&r2=824918&view=diff
==============================================================================
--- lucene/java/branches/flex_1458/src/test/org/apache/lucene/index/TestAddIndexesNoOptimize.java (original)
+++ lucene/java/branches/flex_1458/src/test/org/apache/lucene/index/TestAddIndexesNoOptimize.java Tue Oct 13 20:44:51 2009
@@ -27,6 +27,7 @@
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.store.RAMDirectory;
 import org.apache.lucene.store.MockRAMDirectory;
+import org.apache.lucene.util._TestUtil;
 
 import org.apache.lucene.search.PhraseQuery;
 
@@ -45,6 +46,7 @@
     addDocs(writer, 100);
     assertEquals(100, writer.docCount());
     writer.close();
+    _TestUtil.checkIndex(dir);
 
     writer = newWriter(aux, true);
     writer.setUseCompoundFile(false); // use one without a compound file
@@ -65,6 +67,7 @@
     writer.addIndexesNoOptimize(new Directory[] { aux, aux2 });
     assertEquals(190, writer.docCount());
     writer.close();
+    _TestUtil.checkIndex(dir);
 
     // make sure the old index is correct
     verifyNumDocs(aux, 40);
@@ -125,12 +128,13 @@
 
   public void testWithPendingDeletes() throws IOException {
     // main directory
-    Directory dir = new RAMDirectory();
+    Directory dir = new MockRAMDirectory();
     // auxiliary directory
-    Directory aux = new RAMDirectory();
+    Directory aux = new MockRAMDirectory();
 
     setUpDirs(dir, aux);
     IndexWriter writer = newWriter(dir, false);
+
     writer.addIndexesNoOptimize(new Directory[] {aux});
 
     // Adds 10 docs, then replaces them with another 10

Modified: lucene/java/branches/flex_1458/src/test/org/apache/lucene/index/TestAtomicUpdate.java
URL: http://svn.apache.org/viewvc/lucene/java/branches/flex_1458/src/test/org/apache/lucene/index/TestAtomicUpdate.java?rev=824918&r1=824917&r2=824918&view=diff
==============================================================================
--- lucene/java/branches/flex_1458/src/test/org/apache/lucene/index/TestAtomicUpdate.java (original)
+++ lucene/java/branches/flex_1458/src/test/org/apache/lucene/index/TestAtomicUpdate.java Tue Oct 13 20:44:51 2009
@@ -16,16 +16,20 @@
  * limitations under the License.
  */
 
-import org.apache.lucene.util.*;
-import org.apache.lucene.store.*;
-import org.apache.lucene.document.*;
-import org.apache.lucene.analysis.*;
-import org.apache.lucene.search.*;
-import org.apache.lucene.queryParser.*;
-
-import java.util.Random;
 import java.io.File;
 import java.io.IOException;
+import java.util.Random;
+
+import org.apache.lucene.analysis.Analyzer;
+import org.apache.lucene.analysis.SimpleAnalyzer;
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.Field;
+import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.FSDirectory;
+import org.apache.lucene.store.MockRAMDirectory;
+import org.apache.lucene.util.English;
+import org.apache.lucene.util.LuceneTestCase;
+import org.apache.lucene.util._TestUtil;
 
 public class TestAtomicUpdate extends LuceneTestCase {
   private static final Analyzer ANALYZER = new SimpleAnalyzer();
@@ -126,8 +130,8 @@
     TimedThread[] threads = new TimedThread[4];
 
     IndexWriter writer = new MockIndexWriter(directory, ANALYZER, true, IndexWriter.MaxFieldLength.UNLIMITED);
-    writer.setMaxBufferedDocs(7);
-    writer.setMergeFactor(3);
+    writer.setMaxBufferedDocs(4);
+    writer.setMergeFactor(2);
 
     // Establish a base index of 100 docs:
     for(int i=0;i<100;i++) {
@@ -145,33 +149,34 @@
     assertEquals(100, r.numDocs());
     r.close();
 
+    int upto = 0;
+
     IndexerThread indexerThread = new IndexerThread(writer, threads);
-    threads[0] = indexerThread;
+    threads[upto++] = indexerThread;
     indexerThread.start();
     
-    IndexerThread indexerThread2 = new IndexerThread(writer, threads);
-    threads[1] = indexerThread2;
-    indexerThread2.start();
+    //IndexerThread indexerThread2 = new IndexerThread(writer, threads);
+    //threads[upto++] = indexerThread2;
+    //indexerThread2.start();
       
     SearcherThread searcherThread1 = new SearcherThread(directory, threads);
-    threads[2] = searcherThread1;
+    threads[upto++] = searcherThread1;
     searcherThread1.start();
 
-    SearcherThread searcherThread2 = new SearcherThread(directory, threads);
-    threads[3] = searcherThread2;
-    searcherThread2.start();
-
-    indexerThread.join();
-    indexerThread2.join();
-    searcherThread1.join();
-    searcherThread2.join();
+    //SearcherThread searcherThread2 = new SearcherThread(directory, threads);
+    //threads[upto++] = searcherThread2;
+    //searcherThread2.start();
+
+    for(int i=0;i<upto;i++) {
+      threads[i].join();
+    }
 
     writer.close();
 
-    assertTrue("hit unexpected exception in indexer", !indexerThread.failed);
-    assertTrue("hit unexpected exception in indexer2", !indexerThread2.failed);
-    assertTrue("hit unexpected exception in search1", !searcherThread1.failed);
-    assertTrue("hit unexpected exception in search2", !searcherThread2.failed);
+    for(int i=0;i<upto;i++) {
+      assertTrue("hit unexpected exception in thread " + i, !threads[i].failed);
+    }
+
     //System.out.println("    Writer: " + indexerThread.count + " iterations");
     //System.out.println("Searcher 1: " + searcherThread1.count + " searchers created");
     //System.out.println("Searcher 2: " + searcherThread2.count + " searchers created");

Added: lucene/java/branches/flex_1458/src/test/org/apache/lucene/index/TestCodecs.java
URL: http://svn.apache.org/viewvc/lucene/java/branches/flex_1458/src/test/org/apache/lucene/index/TestCodecs.java?rev=824918&view=auto
==============================================================================
--- lucene/java/branches/flex_1458/src/test/org/apache/lucene/index/TestCodecs.java (added)
+++ lucene/java/branches/flex_1458/src/test/org/apache/lucene/index/TestCodecs.java Tue Oct 13 20:44:51 2009
@@ -0,0 +1,583 @@
+package org.apache.lucene.index;
+
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.lucene.util.*;
+import org.apache.lucene.index.codecs.*;
+import org.apache.lucene.index.codecs.standard.*;
+import org.apache.lucene.store.*;
+import java.util.*;
+
+// nocommit -- test multiple codecs here?
+
+// TODO
+//   - fix this test to run once for all codecs
+//   - make more docs per term, to test > 1 level skipping
+//   - test all combinations of payloads/not and omitTF/not
+//   - test w/ different indexDivisor
+//   - test field where payload length rarely changes
+//   - 0-term fields
+//   - seek/skip to same term/doc i'm already on
+//   - mix in deleted docs
+//   - seek, skip beyond end -- assert returns false
+//   - seek, skip to things that don't exist -- ensure it
+//     goes to 1 before next one known to exist
+//   - skipTo(term)
+//   - skipTo(doc)
+
+public class TestCodecs extends LuceneTestCase {
+
+  // nocommit -- switch to newRandom():
+  private static final Random RANDOM = new Random(42);
+  private static String[] fieldNames = new String[] {"one", "two", "three", "four"};
+
+  private final static int NUM_TEST_ITER = 4000;
+  // nocommit
+  //private final static int NUM_TEST_THREADS = 3;
+  private final static int NUM_TEST_THREADS = 2;
+  private final static int NUM_FIELDS = 4;
+  private final static int NUM_TERMS_RAND = 50; // must be > 16 to test skipping
+  private final static int DOC_FREQ_RAND = 500; // must be > 16 to test skipping
+  private final static int TERM_DOC_FREQ_RAND = 20;
+
+  // start is inclusive and end is exclusive
+  public int nextInt(int start, int end) {
+    return start + RANDOM.nextInt(end-start);
+  }
+
+  private int nextInt(int lim) {
+    return RANDOM.nextInt(lim);
+  }
+
+  private boolean nextBoolean() {
+    return 0 == nextInt(1);
+  }
+
+  char[] getRandomText() {
+
+    final int len = 1+nextInt(10);
+    char[] buffer = new char[len+1];
+    for(int i=0;i<len;i++) {
+      buffer[i] = (char) nextInt(97, 123);
+      /*
+      final int t = nextInt(5);
+      if (0 == t && i < len-1) {
+        // Make a surrogate pair
+        // High surrogate
+        buffer[i++] = (char) nextInt(0xd800, 0xdc00);
+        // Low surrogate
+        buffer[i] = (char) nextInt(0xdc00, 0xe000);
+      } else if (t <= 1)
+        buffer[i] = (char) nextInt(0x80);
+      else if (2 == t)
+        buffer[i] = (char) nextInt(0x80, 0x800);
+      else if (3 == t)
+        buffer[i] = (char) nextInt(0x800, 0xd800);
+      else
+        buffer[i] = (char) nextInt(0xe000, 0xffff);
+    */
+    }
+    buffer[len] = 0xffff;
+    return buffer;
+  }
+
+  class FieldData implements Comparable {
+    final FieldInfo fieldInfo;
+    final TermData[] terms;
+    final boolean omitTF;
+    final boolean storePayloads;
+
+    public FieldData(String name, FieldInfos fieldInfos, TermData[] terms, boolean omitTF, boolean storePayloads) {
+      this.omitTF = omitTF;
+      this.storePayloads = storePayloads;
+      fieldInfos.add(name, true);
+      fieldInfo = fieldInfos.fieldInfo(name);
+      fieldInfo.omitTermFreqAndPositions = omitTF;
+      fieldInfo.storePayloads = storePayloads;
+      this.terms = terms;
+      for(int i=0;i<terms.length;i++)
+        terms[i].field = this;
+      
+      Arrays.sort(terms);
+    }
+
+    public int compareTo(Object other) {
+      return fieldInfo.name.compareTo(((FieldData) other).fieldInfo.name);
+    }
+
+    public void write(FieldsConsumer consumer) throws Throwable {
+      if (Codec.DEBUG)
+        System.out.println("WRITE field=" + fieldInfo.name);
+      Arrays.sort(terms);
+      final TermsConsumer termsConsumer = consumer.addField(fieldInfo);
+      for(int i=0;i<terms.length;i++)
+        terms[i].write(termsConsumer);
+      termsConsumer.finish();
+    }
+  }
+
+  class PositionData {
+    int pos;
+    byte[] payload;
+
+    PositionData(int pos, byte[] payload) {
+      this.pos = pos;
+      this.payload = payload;
+    }
+  }
+
+  class TermData implements Comparable {
+    char[] text;
+    String text2;
+    int[] docs;
+    PositionData[][] positions;
+    FieldData field;
+    
+    public TermData(String text, int[] docs, PositionData[][] positions) {
+      this.text = new char[text.length()+1];
+      text.getChars(0, text.length(), this.text, 0);
+      this.text[text.length()] = 0xffff;
+      this.text2 = text;
+      this.docs = docs;
+      this.positions = positions;
+    }
+
+    public int compareTo(Object o) {
+      return text2.compareTo(((TermData) o).text2);
+    }    
+
+    public void write(TermsConsumer termsConsumer) throws Throwable {
+      if (Codec.DEBUG)
+        System.out.println("  term=" + text2);
+      final DocsConsumer docsConsumer = termsConsumer.startTerm(text, 0);
+      for(int i=0;i<docs.length;i++) {
+        final int termDocFreq;
+        if (field.omitTF)
+          termDocFreq = 0;
+        else
+          termDocFreq = positions[i].length;
+        final PositionsConsumer posConsumer = docsConsumer.addDoc(docs[i], termDocFreq);
+        if (!field.omitTF) {
+          for(int j=0;j<positions[i].length;j++) {
+            PositionData pos = positions[i][j];
+            if (pos.payload != null)
+              posConsumer.addPosition(pos.pos, pos.payload, 0, pos.payload.length);
+            else
+              posConsumer.addPosition(pos.pos, null, 0, 0);
+          }
+          posConsumer.finishDoc();
+        } else
+          assert posConsumer==null;
+      }
+      termsConsumer.finishTerm(text, 0, docs.length);
+    }
+  }
+
+  final private static String SEGMENT = "0";
+
+  TermData[] makeRandomTerms(boolean omitTF, boolean storePayloads) {
+    final int numTerms = 1+nextInt(NUM_TERMS_RAND);
+    //final int numTerms = 2;
+    TermData[] terms = new TermData[numTerms];
+
+    final HashSet termsSeen = new HashSet();
+
+    for(int i=0;i<numTerms;i++) {
+
+      // Make term text
+      char[] text;
+      String text2;
+      while(true) {
+        text = getRandomText();
+        text2 = new String(text, 0, text.length-1);
+        if (!termsSeen.contains(text2)) {
+          termsSeen.add(text2);
+          break;
+        }
+      }
+      
+      final int docFreq = 1+nextInt(DOC_FREQ_RAND);
+      int[] docs = new int[docFreq];
+      PositionData[][] positions;
+
+      if (!omitTF)
+        positions = new PositionData[docFreq][];
+      else
+        positions = null;
+
+      int docID = 0;
+      for(int j=0;j<docFreq;j++) {
+        docID += nextInt(1, 10);
+        docs[j] = docID;
+
+        if (!omitTF) {
+          final int termFreq = 1+nextInt(TERM_DOC_FREQ_RAND);
+          positions[j] = new PositionData[termFreq];
+          int position = 0;
+          for(int k=0;k<termFreq;k++) {
+            position += nextInt(1, 10);
+
+            byte[] payload;
+            if (storePayloads && nextInt(4) == 0) {
+              payload = new byte[1+nextInt(5)];
+              for(int l=0;l<payload.length;l++)
+                payload[l] = (byte) nextInt(255);
+            } else
+              payload = null;
+
+            positions[j][k] = new PositionData(position, payload);
+          }
+        }
+      }
+
+      terms[i] = new TermData(text2, docs, positions);
+    }
+
+    return terms;
+  }
+
+  public void testFixedPostings() throws Throwable {
+
+    final int NUM_TERMS = 100;
+    TermData[] terms = new TermData[NUM_TERMS];
+    for(int i=0;i<NUM_TERMS;i++) {
+      int[] docs = new int[] {1};
+      String text = Integer.toString(i, Character.MAX_RADIX);
+      terms[i] = new TermData(text, docs, null);
+    }
+
+    final FieldInfos fieldInfos = new FieldInfos();
+    
+    FieldData field = new FieldData("field", fieldInfos, terms, true, false);
+    FieldData[] fields = new FieldData[] {field};
+
+    Directory dir = new MockRAMDirectory();
+    write(fieldInfos, dir, fields);
+    SegmentInfo si = new SegmentInfo(SEGMENT, 10000, dir, Codecs.getDefault().getWriter(null));
+    si.setHasProx(false);
+
+    FieldsProducer reader = si.getCodec().fieldsProducer(dir, fieldInfos, si, 64, IndexReader.DEFAULT_TERMS_INDEX_DIVISOR);
+    
+    FieldsEnum fieldsEnum = reader.iterator();
+    assertNotNull(fieldsEnum.next());
+    TermsEnum termsEnum = fieldsEnum.terms();
+    for(int i=0;i<NUM_TERMS;i++) {
+      TermRef term = termsEnum.next();
+      assertNotNull(term);
+      assertEquals(terms[i].text2, term.toString());
+    }
+    assertNull(termsEnum.next());
+
+    for(int i=0;i<NUM_TERMS;i++) {
+      assertEquals(termsEnum.seek(new TermRef(terms[i].text2)), TermsEnum.SeekStatus.FOUND);
+    }
+
+    assertNull(fieldsEnum.next());
+  }
+
+  public void testRandomPostings() throws Throwable {
+
+    // Codec.DEBUG = true;
+
+    final FieldInfos fieldInfos = new FieldInfos();
+    
+    FieldData[] fields = new FieldData[NUM_FIELDS];
+    for(int i=0;i<NUM_FIELDS;i++) {
+      boolean omitTF = 0==(i%3);
+      boolean storePayloads = 1==(i%3);
+      fields[i] = new FieldData(fieldNames[i], fieldInfos, makeRandomTerms(omitTF, storePayloads), omitTF, storePayloads);
+    }
+
+    Directory dir = new MockRAMDirectory();
+
+    write(fieldInfos, dir, fields);
+    SegmentInfo si = new SegmentInfo(SEGMENT, 10000, dir, Codecs.getDefault().getWriter(null));
+
+    if (Codec.DEBUG) {
+      System.out.println("\nTEST: now read");
+    }
+
+    FieldsProducer terms = si.getCodec().fieldsProducer(dir, fieldInfos, si, 1024, IndexReader.DEFAULT_TERMS_INDEX_DIVISOR);
+
+    Verify[] threads = new Verify[NUM_TEST_THREADS-1];
+    for(int i=0;i<NUM_TEST_THREADS-1;i++) {
+      threads[i] = new Verify(fields, terms);
+      threads[i].setDaemon(true);
+      threads[i].start();
+    }
+    
+    new Verify(fields, terms).run();
+
+    for(int i=0;i<NUM_TEST_THREADS-1;i++) {
+      threads[i].join();
+      assert !threads[i].failed;
+    }
+
+    terms.close();
+    dir.close();
+  }
+
+  private String getDesc(FieldData field, TermData term) {
+    return field.fieldInfo.name + ":" + term.text2;
+  }
+
+  private String getDesc(FieldData field, TermData term, int doc) {
+    return getDesc(field, term) + ":" + doc;
+  }
+  
+  private class Verify extends Thread {
+    final Fields termsDict;
+    final FieldData[] fields;
+    volatile boolean failed;
+
+    Verify(FieldData[] fields, Fields termsDict) {
+      this.fields = fields;
+      this.termsDict = termsDict;
+    }
+    
+    public void run() {
+      try {
+        _run();
+      } catch (Throwable t) {
+        failed = true;
+        throw new RuntimeException(t);
+      }
+    }
+
+    private void verifyDocs(int[] docs, PositionData[][] positions, DocsEnum docsEnum, boolean doPos) throws Throwable {
+      for(int i=0;i<docs.length;i++) {
+        int doc = docsEnum.next();
+        assertTrue(doc != DocsEnum.NO_MORE_DOCS);
+        assertEquals(docs[i], doc);
+        if (doPos) {
+          verifyPositions(positions[i], docsEnum.positions());
+        }
+      }
+      assertEquals(DocsEnum.NO_MORE_DOCS, docsEnum.next());
+    }
+
+    byte[] data = new byte[10];
+
+    private void verifyPositions(PositionData[] positions, PositionsEnum posEnum) throws Throwable {
+      for(int i=0;i<positions.length;i++) {
+        int pos = posEnum.next();
+        if (Codec.DEBUG) {
+          System.out.println("TEST pos " + (1+i) + " of " + positions.length + " pos=" + pos);
+        }
+        assertEquals(positions[i].pos, pos);
+        if (positions[i].payload != null) {
+          assertTrue(posEnum.hasPayload());
+          assertEquals(positions[i].payload.length, posEnum.getPayloadLength());
+          if (nextInt(3) < 2) {
+            if (Codec.DEBUG) {
+              System.out.println("TEST do check payload len=" + posEnum.getPayloadLength());
+            }
+
+            // Verify the payload bytes
+            posEnum.getPayload(data, 0);
+            for(int j=0;j<positions[i].payload.length;j++) {
+              assertEquals(data[j], positions[i].payload[j]);
+            }
+          } else {
+            if (Codec.DEBUG) {
+              System.out.println("TEST skip check payload len=" + posEnum.getPayloadLength());
+            }
+          }
+        } else {
+          assertFalse(posEnum.hasPayload());
+        }
+      }
+    }
+
+    public void _run() throws Throwable {
+      
+      final FieldsEnum fieldsEnum = termsDict.iterator();
+
+      for(int iter=0;iter<NUM_TEST_ITER;iter++) {
+        final FieldData field = fields[nextInt(fields.length)];
+        if (Codec.DEBUG) {
+          System.out.println("verify field=" + field.fieldInfo.name);
+        }
+
+        final TermsEnum termsEnum = termsDict.terms(field.fieldInfo.name).iterator();
+
+        // Test straight enum of the terms:
+        if (Codec.DEBUG) {
+          System.out.println("\nTEST: pure enum");
+        }
+
+        int upto = 0;
+        while(true) {
+          TermRef term = termsEnum.next();
+          if (term == null) {
+            break;
+          }
+          if (Codec.DEBUG) {
+            System.out.println("check " + upto + ": " + field.terms[upto].text2);
+          }
+          assertTrue(new TermRef(field.terms[upto++].text2).termEquals(term));
+        }
+        assertEquals(upto, field.terms.length);
+
+        // Test random seek:
+        if (Codec.DEBUG) {
+          System.out.println("\nTEST: random seek");
+        }
+        TermData term = field.terms[nextInt(field.terms.length)];
+        TermsEnum.SeekStatus status = termsEnum.seek(new TermRef(term.text2));
+        assertEquals(status, TermsEnum.SeekStatus.FOUND);
+        assertEquals(term.docs.length, termsEnum.docFreq());
+        verifyDocs(term.docs, term.positions, termsEnum.docs(null), !field.omitTF);
+
+        // Test random seek by ord:
+        int idx = nextInt(field.terms.length);
+        term = field.terms[idx];
+        status = termsEnum.seek(idx);
+        assertEquals(status, TermsEnum.SeekStatus.FOUND);
+        assertTrue(termsEnum.term().termEquals(new TermRef(term.text2)));
+        assertEquals(term.docs.length, termsEnum.docFreq());
+        verifyDocs(term.docs, term.positions, termsEnum.docs(null), !field.omitTF);
+
+        // Test seek to non-existent terms:
+        if (Codec.DEBUG)
+          System.out.println("\nTEST: seek to non-existent term");
+        for(int i=0;i<100;i++) {
+          char[] text = getRandomText();
+          String text2 = new String(text, 0, text.length-1) + ".";
+          status = termsEnum.seek(new TermRef(text2));
+          assertTrue(status == TermsEnum.SeekStatus.NOT_FOUND ||
+                     status == TermsEnum.SeekStatus.END);
+        }
+        
+        // Seek to each term, backwards:
+        if (Codec.DEBUG) {
+          System.out.println("\n" + Thread.currentThread().getName() + ": TEST: seek backwards through terms");
+        }
+        for(int i=field.terms.length-1;i>=0;i--) {
+          if (Codec.DEBUG) {
+            System.out.println(Thread.currentThread().getName() + ": TEST: term=" + field.terms[i].text2 + " has docFreq=" + field.terms[i].docs.length);
+          }
+          assertEquals(Thread.currentThread().getName() + ": field=" + field.fieldInfo.name + " term=" + field.terms[i].text2, TermsEnum.SeekStatus.FOUND, termsEnum.seek(new TermRef(field.terms[i].text2)));
+          assertEquals(field.terms[i].docs.length, termsEnum.docFreq());
+        }
+
+        // Seek to each term by ord, backwards
+        if (Codec.DEBUG) {
+          System.out.println("\n" + Thread.currentThread().getName() + ": TEST: seek backwards through terms, by ord");
+        }
+        for(int i=field.terms.length-1;i>=0;i--) {
+          if (Codec.DEBUG) {
+            System.out.println(Thread.currentThread().getName() + ": TEST: term=" + field.terms[i].text2 + " has docFreq=" + field.terms[i].docs.length);
+          }
+          assertEquals(Thread.currentThread().getName() + ": field=" + field.fieldInfo.name + " term=" + field.terms[i].text2, TermsEnum.SeekStatus.FOUND, termsEnum.seek(i));
+          assertEquals(field.terms[i].docs.length, termsEnum.docFreq());
+          assertTrue(termsEnum.term().termEquals(new TermRef(field.terms[i].text2)));
+        }
+
+        // Seek to non-existent empty-string term
+        status = termsEnum.seek(new TermRef(""));
+        assertNotNull(status);
+        assertEquals(status, TermsEnum.SeekStatus.NOT_FOUND);
+
+        // Make sure we're now pointing to first term
+        assertTrue(termsEnum.term().termEquals(new TermRef(field.terms[0].text2)));
+
+        // Test docs enum
+        if (Codec.DEBUG) {
+          System.out.println("\nTEST: docs/positions");
+        }
+        termsEnum.seek(new TermRef(""));
+        upto = 0;
+        do {
+          term = field.terms[upto];
+          if (nextInt(3) == 1) {
+            if (Codec.DEBUG) {
+              System.out.println("\nTEST [" + getDesc(field, term) + "]: iterate docs...");
+            }
+            DocsEnum docs = termsEnum.docs(null);
+            int upto2 = -1;
+            while(upto2 < term.docs.length-1) {
+              // Maybe skip:
+              final int left = term.docs.length-upto2;
+              int doc;
+              if (nextInt(3) == 1 && left >= 1) {
+                int inc = 1+nextInt(left-1);
+                upto2 += inc;
+                if (Codec.DEBUG) {
+                  System.out.println("TEST [" + getDesc(field, term) + "]: skip: " + left + " docs left; skip to doc=" + term.docs[upto2] + " [" + upto2 + " of " + term.docs.length + "]");
+                }
+
+                doc = docs.advance(term.docs[upto2]);
+                // nocommit -- test skipping to non-existent doc
+                assertEquals(term.docs[upto2], doc);
+              } else {
+                doc = docs.next();
+                assertTrue(doc != -1);
+                if (Codec.DEBUG) {
+                  System.out.println("TEST [" + getDesc(field, term) + "]: got next doc...");
+                }
+                upto2++;
+              }
+              assertEquals(term.docs[upto2], doc);
+              if (!field.omitTF) {
+                assertEquals(term.positions[upto2].length, docs.freq());
+                if (nextInt(2) == 1) {
+                  if (Codec.DEBUG) {
+                    System.out.println("TEST [" + getDesc(field, term, term.docs[upto2]) + "]: check positions for doc " + term.docs[upto2] + "...");
+                  }
+                  verifyPositions(term.positions[upto2], docs.positions());
+                } else if (Codec.DEBUG) {
+                  System.out.println("TEST: skip positions...");
+                }
+              } else if (Codec.DEBUG) {
+                System.out.println("TEST: skip positions: omitTF=true");
+              }
+            }
+
+            assertEquals(DocsEnum.NO_MORE_DOCS, docs.next());
+
+          } else if (Codec.DEBUG) {
+            System.out.println("\nTEST [" + getDesc(field, term) + "]: skip docs");
+          }
+          upto++;
+
+        } while (termsEnum.next() != null);
+
+        assertEquals(upto, field.terms.length);
+        
+        //termsEnum.close();
+      }
+    }
+  }
+
+  private void write(FieldInfos fieldInfos, Directory dir, FieldData[] fields) throws Throwable {
+
+    // nocommit -- randomize this:
+    final int termIndexInterval = 16;
+
+    SegmentWriteState state = new SegmentWriteState(null, dir, SEGMENT, fieldInfos, null, 10000, 10000, termIndexInterval,
+                                                    Codecs.getDefault());
+
+    final FieldsConsumer consumer = state.codec.fieldsConsumer(state);
+    Arrays.sort(fields);
+    for(int i=0;i<fields.length;i++) {
+      fields[i].write(consumer);
+    }
+    consumer.close();
+  }
+}

Propchange: lucene/java/branches/flex_1458/src/test/org/apache/lucene/index/TestCodecs.java
------------------------------------------------------------------------------
    svn:eol-style = native

Modified: lucene/java/branches/flex_1458/src/test/org/apache/lucene/index/TestDirectoryReader.java
URL: http://svn.apache.org/viewvc/lucene/java/branches/flex_1458/src/test/org/apache/lucene/index/TestDirectoryReader.java?rev=824918&r1=824917&r2=824918&view=diff
==============================================================================
--- lucene/java/branches/flex_1458/src/test/org/apache/lucene/index/TestDirectoryReader.java (original)
+++ lucene/java/branches/flex_1458/src/test/org/apache/lucene/index/TestDirectoryReader.java Tue Oct 13 20:44:51 2009
@@ -122,7 +122,7 @@
   }
         
   
-  public void _testTermVectors() {
+  public void _testTermVectors() throws IOException {
     MultiReader reader = new MultiReader(readers);
     assertTrue(reader != null);
   }

Modified: lucene/java/branches/flex_1458/src/test/org/apache/lucene/index/TestDoc.java
URL: http://svn.apache.org/viewvc/lucene/java/branches/flex_1458/src/test/org/apache/lucene/index/TestDoc.java?rev=824918&r1=824917&r2=824918&view=diff
==============================================================================
--- lucene/java/branches/flex_1458/src/test/org/apache/lucene/index/TestDoc.java (original)
+++ lucene/java/branches/flex_1458/src/test/org/apache/lucene/index/TestDoc.java Tue Oct 13 20:44:51 2009
@@ -186,13 +186,17 @@
       merger.merge();
       merger.closeReaders();
       
+      final SegmentInfo info = new SegmentInfo(merged, si1.docCount + si2.docCount, si1.dir,
+                                               useCompoundFile, true, -1, null, false, merger.hasProx(),
+                                               merger.getCodec());
+      
       if (useCompoundFile) {
-        List filesToDelete = merger.createCompoundFile(merged + ".cfs");
+        List filesToDelete = merger.createCompoundFile(merged + ".cfs", info);
         for (Iterator iter = filesToDelete.iterator(); iter.hasNext();)
           si1.dir.deleteFile((String) iter.next());
       }
 
-      return new SegmentInfo(merged, si1.docCount + si2.docCount, si1.dir, useCompoundFile, true);
+      return info;
    }
 
 

Modified: lucene/java/branches/flex_1458/src/test/org/apache/lucene/index/TestIndexReader.java
URL: http://svn.apache.org/viewvc/lucene/java/branches/flex_1458/src/test/org/apache/lucene/index/TestIndexReader.java?rev=824918&r1=824917&r2=824918&view=diff
==============================================================================
--- lucene/java/branches/flex_1458/src/test/org/apache/lucene/index/TestIndexReader.java (original)
+++ lucene/java/branches/flex_1458/src/test/org/apache/lucene/index/TestIndexReader.java Tue Oct 13 20:44:51 2009
@@ -40,6 +40,7 @@
 import org.apache.lucene.document.Fieldable;
 import org.apache.lucene.document.SetBasedFieldSelector;
 import org.apache.lucene.index.IndexReader.FieldOption;
+import org.apache.lucene.index.codecs.Codecs;
 import org.apache.lucene.search.FieldCache;
 import org.apache.lucene.search.IndexSearcher;
 import org.apache.lucene.search.ScoreDoc;
@@ -53,6 +54,7 @@
 import org.apache.lucene.store.RAMDirectory;
 import org.apache.lucene.util.LuceneTestCase;
 import org.apache.lucene.util._TestUtil;
+import org.apache.lucene.index.codecs.Codec;
 
 public class TestIndexReader extends LuceneTestCase
 {
@@ -894,15 +896,18 @@
         d.add(new Field("id", Integer.toString(i), Field.Store.YES, Field.Index.NOT_ANALYZED));
         d.add(new Field("content", "aaa " + i, Field.Store.NO, Field.Index.ANALYZED));
         writer.addDocument(d);
+        if (0==i%10)
+          writer.commit();
       }
       writer.close();
 
-      long diskUsage = startDir.sizeInBytes();
-      long diskFree = diskUsage+100;      
+      long diskUsage = ((MockRAMDirectory) startDir).getRecomputedActualSizeInBytes();
+      long diskFree = diskUsage+100;
 
       IOException err = null;
 
       boolean done = false;
+      boolean gotExc = false;
 
       // Iterate w/ ever increasing free disk space:
       while(!done) {
@@ -959,7 +964,7 @@
               int docId = 12;
               for(int i=0;i<13;i++) {
                 reader.deleteDocument(docId);
-                reader.setNorm(docId, "contents", (float) 2.0);
+                reader.setNorm(docId, "content", (float) 2.0);
                 docId += 12;
               }
             }
@@ -974,6 +979,7 @@
               e.printStackTrace(System.out);
             }
             err = e;
+            gotExc = true;
             if (1 == x) {
               e.printStackTrace();
               fail(testName + " hit IOException after disk space was freed up");
@@ -986,29 +992,7 @@
           // new IndexFileDeleter, have it delete
           // unreferenced files, then verify that in fact
           // no files were deleted:
-          String[] startFiles = dir.listAll();
-          SegmentInfos infos = new SegmentInfos();
-          infos.read(dir);
-          new IndexFileDeleter(dir, new KeepOnlyLastCommitDeletionPolicy(), infos, null, null);
-          String[] endFiles = dir.listAll();
-
-          Arrays.sort(startFiles);
-          Arrays.sort(endFiles);
-
-          //for(int i=0;i<startFiles.length;i++) {
-          //  System.out.println("  startFiles: " + i + ": " + startFiles[i]);
-          //}
-
-          if (!Arrays.equals(startFiles, endFiles)) {
-            String successStr;
-            if (success) {
-              successStr = "success";
-            } else {
-              successStr = "IOException";
-              err.printStackTrace();
-            }
-            fail("reader.close() failed to delete unreferenced files after " + successStr + " (" + diskFree + " bytes): before delete:\n    " + arrayToString(startFiles) + "\n  after delete:\n    " + arrayToString(endFiles));
-          }
+          TestIndexWriter.assertNoUnreferencedFiles(dir, "reader.close() failed to delete unreferenced files");
 
           // Finally, verify index is not corrupt, and, if
           // we succeeded, we see all docs changed, and if
@@ -1063,6 +1047,8 @@
           newReader.close();
 
           if (result2 == END_COUNT) {
+            if (!gotExc)
+              fail("never hit disk full");
             break;
           }
         }
@@ -1425,7 +1411,7 @@
       writer.close();
 
       SegmentInfos sis = new SegmentInfos();
-      sis.read(d);
+      sis.read(d, Codecs.getDefault());
       IndexReader r = IndexReader.open(d, false);
       IndexCommit c = r.getIndexCommit();
 
@@ -1607,6 +1593,7 @@
   // LUCENE-1579: Ensure that on a cloned reader, segments
   // reuse the doc values arrays in FieldCache
   public void testFieldCacheReuseAfterClone() throws Exception {
+    //Codec.DEBUG = true;
     Directory dir = new MockRAMDirectory();
     IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED);
     Document doc = new Document();
@@ -1760,7 +1747,6 @@
     } catch (IllegalStateException ise) {
       // expected
     }
-    assertFalse(((SegmentReader) r.getSequentialSubReaders()[0]).termsIndexLoaded());
 
     assertEquals(-1, r.getTermInfosIndexDivisor());
     writer = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED);
@@ -1773,8 +1759,14 @@
     IndexReader[] subReaders = r2.getSequentialSubReaders();
     assertEquals(2, subReaders.length);
     for(int i=0;i<2;i++) {
-      assertFalse(((SegmentReader) subReaders[i]).termsIndexLoaded());
+      try {
+        subReaders[i].docFreq(new Term("field", "f"));
+        fail("did not hit expected exception");
+      } catch (IllegalStateException ise) {
+        // expected
+      }
     }
+
     r2.close();
     dir.close();
   }

Modified: lucene/java/branches/flex_1458/src/test/org/apache/lucene/index/TestIndexWriter.java
URL: http://svn.apache.org/viewvc/lucene/java/branches/flex_1458/src/test/org/apache/lucene/index/TestIndexWriter.java?rev=824918&r1=824917&r2=824918&view=diff
==============================================================================
--- lucene/java/branches/flex_1458/src/test/org/apache/lucene/index/TestIndexWriter.java (original)
+++ lucene/java/branches/flex_1458/src/test/org/apache/lucene/index/TestIndexWriter.java Tue Oct 13 20:44:51 2009
@@ -48,6 +48,7 @@
 import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute;
 import org.apache.lucene.document.Document;
 import org.apache.lucene.document.Field;
+import org.apache.lucene.index.codecs.Codecs;
 import org.apache.lucene.search.IndexSearcher;
 import org.apache.lucene.search.PhraseQuery;
 import org.apache.lucene.search.Query;
@@ -64,6 +65,7 @@
 import org.apache.lucene.store.MockRAMDirectory;
 import org.apache.lucene.store.RAMDirectory;
 import org.apache.lucene.store.SingleInstanceLockFactory;
+import org.apache.lucene.store.NoLockFactory;
 import org.apache.lucene.util.UnicodeUtil;
 import org.apache.lucene.util._TestUtil;
 
@@ -526,9 +528,8 @@
       String[] startFiles = dir.listAll();
       SegmentInfos infos = new SegmentInfos();
       infos.read(dir);
-      new IndexFileDeleter(dir, new KeepOnlyLastCommitDeletionPolicy(), infos, null, null);
+      new IndexFileDeleter(dir, new KeepOnlyLastCommitDeletionPolicy(), infos, null, null, Codecs.getDefault());
       String[] endFiles = dir.listAll();
-
       Arrays.sort(startFiles);
       Arrays.sort(endFiles);
 
@@ -3489,6 +3490,7 @@
     TermPositions tps = s.getIndexReader().termPositions(new Term("field", "a"));
     assertTrue(tps.next());
     assertEquals(1, tps.freq());
+    // would be -1 if we called w.setAllowMinus1Position();
     assertEquals(0, tps.nextPosition());
     w.close();
 
@@ -4290,10 +4292,6 @@
 
       assertTrue(dir.fileExists("myrandomfile"));
 
-      // Make sure this does not copy myrandomfile:
-      Directory dir2 = new RAMDirectory(dir);
-      assertTrue(!dir2.fileExists("myrandomfile"));
-
     } finally {
       dir.close();
       _TestUtil.rmDir(indexDir);

Modified: lucene/java/branches/flex_1458/src/test/org/apache/lucene/index/TestIndexWriterDelete.java
URL: http://svn.apache.org/viewvc/lucene/java/branches/flex_1458/src/test/org/apache/lucene/index/TestIndexWriterDelete.java?rev=824918&r1=824917&r2=824918&view=diff
==============================================================================
--- lucene/java/branches/flex_1458/src/test/org/apache/lucene/index/TestIndexWriterDelete.java (original)
+++ lucene/java/branches/flex_1458/src/test/org/apache/lucene/index/TestIndexWriterDelete.java Tue Oct 13 20:44:51 2009
@@ -18,7 +18,6 @@
  */
 
 import java.io.IOException;
-import java.util.Arrays;
 
 import org.apache.lucene.analysis.WhitespaceAnalyzer;
 import org.apache.lucene.document.Document;
@@ -780,30 +779,22 @@
       }
     }
 
-    String[] startFiles = dir.listAll();
-    SegmentInfos infos = new SegmentInfos();
-    infos.read(dir);
-    new IndexFileDeleter(dir, new KeepOnlyLastCommitDeletionPolicy(), infos, null, null);
-    String[] endFiles = dir.listAll();
-
-    if (!Arrays.equals(startFiles, endFiles)) {
-      fail("docswriter abort() failed to delete unreferenced files:\n  before delete:\n    "
-           + arrayToString(startFiles) + "\n  after delete:\n    "
-           + arrayToString(endFiles));
-    }
-
+    TestIndexWriter.assertNoUnreferencedFiles(dir, "docsWriter.abort() failed to delete unreferenced files");
     modifier.close();
-
   }
 
-  private String arrayToString(String[] l) {
-    String s = "";
-    for (int i = 0; i < l.length; i++) {
-      if (i > 0) {
-        s += "\n    ";
-      }
-      s += l[i];
+  public void testDeleteNullQuery() throws IOException {
+    Directory dir = new MockRAMDirectory();
+    IndexWriter modifier = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED);
+
+    for (int i = 0; i < 5; i++) {
+      addDoc(modifier, i, 2*i);
     }
-    return s;
+
+    modifier.deleteDocuments(new TermQuery(new Term("nada", "nada")));
+    modifier.commit();
+    assertEquals(5, modifier.numDocs());
+    modifier.close();
+    dir.close();
   }
 }

Modified: lucene/java/branches/flex_1458/src/test/org/apache/lucene/index/TestLazyProxSkipping.java
URL: http://svn.apache.org/viewvc/lucene/java/branches/flex_1458/src/test/org/apache/lucene/index/TestLazyProxSkipping.java?rev=824918&r1=824917&r2=824918&view=diff
==============================================================================
--- lucene/java/branches/flex_1458/src/test/org/apache/lucene/index/TestLazyProxSkipping.java (original)
+++ lucene/java/branches/flex_1458/src/test/org/apache/lucene/index/TestLazyProxSkipping.java Tue Oct 13 20:44:51 2009
@@ -47,8 +47,9 @@
     private class SeekCountingDirectory extends RAMDirectory {
       public IndexInput openInput(String name) throws IOException {
         IndexInput ii = super.openInput(name);
-        if (name.endsWith(".prx")) {
+        if (name.endsWith(".prx") || name.endsWith(".pos") ) {
           // we decorate the proxStream with a wrapper class that allows to count the number of calls of seek()
+          // nocommit -- fix this:
           ii = new SeeksCountingStream(ii);
         }
         return ii;
@@ -115,7 +116,7 @@
         performTest(10);
     }
     
-    public void testSeek() throws IOException {
+    public void xxxtestSeek() throws IOException {
         Directory directory = new RAMDirectory();
         IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
         for (int i = 0; i < 10; i++) {

Modified: lucene/java/branches/flex_1458/src/test/org/apache/lucene/index/TestMultiLevelSkipList.java
URL: http://svn.apache.org/viewvc/lucene/java/branches/flex_1458/src/test/org/apache/lucene/index/TestMultiLevelSkipList.java?rev=824918&r1=824917&r2=824918&view=diff
==============================================================================
--- lucene/java/branches/flex_1458/src/test/org/apache/lucene/index/TestMultiLevelSkipList.java (original)
+++ lucene/java/branches/flex_1458/src/test/org/apache/lucene/index/TestMultiLevelSkipList.java Tue Oct 13 20:44:51 2009
@@ -29,8 +29,9 @@
 import org.apache.lucene.document.Field;
 import org.apache.lucene.document.Field.Index;
 import org.apache.lucene.document.Field.Store;
+import org.apache.lucene.store.Directory;
 import org.apache.lucene.store.IndexInput;
-import org.apache.lucene.store.RAMDirectory;
+import org.apache.lucene.store.MockRAMDirectory;
 import org.apache.lucene.util.LuceneTestCase;
 
 /**
@@ -42,8 +43,18 @@
  * 
  */
 public class TestMultiLevelSkipList extends LuceneTestCase {
+  
+  class CountingRAMDirectory extends MockRAMDirectory {
+    public IndexInput openInput(String fileName) throws IOException {
+      IndexInput in = super.openInput(fileName);
+      if (fileName.endsWith(".frq"))
+        in = new CountingStream(in);
+      return in;
+    }
+  }
+
   public void testSimpleSkip() throws IOException {
-    RAMDirectory dir = new RAMDirectory();
+    Directory dir = new CountingRAMDirectory();
     IndexWriter writer = new IndexWriter(dir, new PayloadAnalyzer(), true,
                                          IndexWriter.MaxFieldLength.LIMITED);
     Term term = new Term("test", "a");
@@ -57,9 +68,8 @@
     writer.close();
 
     IndexReader reader = SegmentReader.getOnlySegmentReader(dir);
-    SegmentTermPositions tp = (SegmentTermPositions) reader.termPositions();
-    tp.freqStream = new CountingStream(tp.freqStream);
-
+    TermPositions tp = reader.termPositions();
+    
     for (int i = 0; i < 2; i++) {
       counter = 0;
       tp.seek(term);

Modified: lucene/java/branches/flex_1458/src/test/org/apache/lucene/index/TestNorms.java
URL: http://svn.apache.org/viewvc/lucene/java/branches/flex_1458/src/test/org/apache/lucene/index/TestNorms.java?rev=824918&r1=824917&r2=824918&view=diff
==============================================================================
--- lucene/java/branches/flex_1458/src/test/org/apache/lucene/index/TestNorms.java (original)
+++ lucene/java/branches/flex_1458/src/test/org/apache/lucene/index/TestNorms.java Tue Oct 13 20:44:51 2009
@@ -29,6 +29,7 @@
 import org.apache.lucene.search.Similarity;
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.store.FSDirectory;
+import org.apache.lucene.util._TestUtil;
 
 import java.io.File;
 import java.io.IOException;
@@ -73,14 +74,8 @@
    * Including optimize. 
    */
   public void testNorms() throws IOException {
-    // tmp dir
-    String tempDir = System.getProperty("java.io.tmpdir");
-    if (tempDir == null) {
-      throw new IOException("java.io.tmpdir undefined, cannot run test");
-    }
-    
     // test with a single index: index1
-    File indexDir1 = new File(tempDir, "lucenetestindex1");
+    File indexDir1 = _TestUtil.getTempDir("lucenetestindex1");
     Directory dir1 = FSDirectory.open(indexDir1);
 
     norms = new ArrayList();
@@ -98,14 +93,14 @@
     modifiedNorms = new ArrayList();
     numDocNorms = 0;
     
-    File indexDir2 = new File(tempDir, "lucenetestindex2");
+    File indexDir2 = _TestUtil.getTempDir("lucenetestindex2");
     Directory dir2 = FSDirectory.open(indexDir2);
 
     createIndex(dir2);
     doTestNorms(dir2);
 
     // add index1 and index2 to a third index: index3
-    File indexDir3 = new File(tempDir, "lucenetestindex3");
+    File indexDir3 = _TestUtil.getTempDir("lucenetestindex3");
     Directory dir3 = FSDirectory.open(indexDir3);
 
     createIndex(dir3);
@@ -136,6 +131,9 @@
     dir1.close();
     dir2.close();
     dir3.close();
+    _TestUtil.rmDir(indexDir1);
+    _TestUtil.rmDir(indexDir2);
+    _TestUtil.rmDir(indexDir3);
   }
 
   private void doTestNorms(Directory dir) throws IOException {

Modified: lucene/java/branches/flex_1458/src/test/org/apache/lucene/index/TestPayloads.java
URL: http://svn.apache.org/viewvc/lucene/java/branches/flex_1458/src/test/org/apache/lucene/index/TestPayloads.java?rev=824918&r1=824917&r2=824918&view=diff
==============================================================================
--- lucene/java/branches/flex_1458/src/test/org/apache/lucene/index/TestPayloads.java (original)
+++ lucene/java/branches/flex_1458/src/test/org/apache/lucene/index/TestPayloads.java Tue Oct 13 20:44:51 2009
@@ -38,7 +38,7 @@
 import org.apache.lucene.document.Field;
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.store.FSDirectory;
-import org.apache.lucene.store.RAMDirectory;
+import org.apache.lucene.store.MockRAMDirectory;
 import org.apache.lucene.util.LuceneTestCase;
 import org.apache.lucene.util.UnicodeUtil;
 import org.apache.lucene.util._TestUtil;
@@ -98,7 +98,7 @@
     // payload bit in the FieldInfo
     public void testPayloadFieldBit() throws Exception {
         rnd = newRandom();
-        Directory ram = new RAMDirectory();
+        Directory ram = new MockRAMDirectory();
         PayloadAnalyzer analyzer = new PayloadAnalyzer();
         IndexWriter writer = new IndexWriter(ram, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
         Document d = new Document();
@@ -154,7 +154,7 @@
     public void testPayloadsEncoding() throws Exception {
         rnd = newRandom();
         // first perform the test using a RAMDirectory
-        Directory dir = new RAMDirectory();
+        Directory dir = new MockRAMDirectory();
         performTest(dir);
         
         // now use a FSDirectory and repeat same test
@@ -256,11 +256,17 @@
         TermPositions tp = reader.termPositions(terms[0]);
         tp.next();
         tp.nextPosition();
+        // NOTE: prior rev of this test was failing to first
+        // call next here:
+        tp.next();
         // now we don't read this payload
         tp.nextPosition();
         assertEquals("Wrong payload length.", 1, tp.getPayloadLength());
         byte[] payload = tp.getPayload(null, 0);
         assertEquals(payload[0], payloadData[numTerms]);
+        // NOTE: prior rev of this test was failing to first
+        // call next here:
+        tp.next();
         tp.nextPosition();
         
         // we don't read this payload and skip to a different document
@@ -465,7 +471,7 @@
         final int numDocs = 50;
         final ByteArrayPool pool = new ByteArrayPool(numThreads, 5);
         
-        Directory dir = new RAMDirectory();
+        Directory dir = new MockRAMDirectory();
         final IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
         final String field = "test";
         

Modified: lucene/java/branches/flex_1458/src/test/org/apache/lucene/index/TestSegmentMerger.java
URL: http://svn.apache.org/viewvc/lucene/java/branches/flex_1458/src/test/org/apache/lucene/index/TestSegmentMerger.java?rev=824918&r1=824917&r2=824918&view=diff
==============================================================================
--- lucene/java/branches/flex_1458/src/test/org/apache/lucene/index/TestSegmentMerger.java (original)
+++ lucene/java/branches/flex_1458/src/test/org/apache/lucene/index/TestSegmentMerger.java Tue Oct 13 20:44:51 2009
@@ -69,7 +69,8 @@
     merger.closeReaders();
     assertTrue(docsMerged == 2);
     //Should be able to open a new SegmentReader against the new directory
-    SegmentReader mergedReader = SegmentReader.get(new SegmentInfo(mergedSegment, docsMerged, mergedDir, false, true));
+    SegmentReader mergedReader = SegmentReader.get(new SegmentInfo(mergedSegment, docsMerged, mergedDir, false, true,
+                                                                   -1, null, false, merger.hasProx(), merger.getCodec()));
     assertTrue(mergedReader != null);
     assertTrue(mergedReader.numDocs() == 2);
     Document newDoc1 = mergedReader.document(0);

Modified: lucene/java/branches/flex_1458/src/test/org/apache/lucene/index/TestSegmentReader.java
URL: http://svn.apache.org/viewvc/lucene/java/branches/flex_1458/src/test/org/apache/lucene/index/TestSegmentReader.java?rev=824918&r1=824917&r2=824918&view=diff
==============================================================================
--- lucene/java/branches/flex_1458/src/test/org/apache/lucene/index/TestSegmentReader.java (original)
+++ lucene/java/branches/flex_1458/src/test/org/apache/lucene/index/TestSegmentReader.java Tue Oct 13 20:44:51 2009
@@ -136,6 +136,9 @@
     TermPositions positions = reader.termPositions();
     positions.seek(new Term(DocHelper.TEXT_FIELD_1_KEY, "field"));
     assertTrue(positions != null);
+    // NOTE: prior rev of this test was failing to first
+    // call next here:
+    assertTrue(positions.next());
     assertTrue(positions.doc() == 0);
     assertTrue(positions.nextPosition() >= 0);
   }    

Modified: lucene/java/branches/flex_1458/src/test/org/apache/lucene/index/TestSegmentTermDocs.java
URL: http://svn.apache.org/viewvc/lucene/java/branches/flex_1458/src/test/org/apache/lucene/index/TestSegmentTermDocs.java?rev=824918&r1=824917&r2=824918&view=diff
==============================================================================
--- lucene/java/branches/flex_1458/src/test/org/apache/lucene/index/TestSegmentTermDocs.java (original)
+++ lucene/java/branches/flex_1458/src/test/org/apache/lucene/index/TestSegmentTermDocs.java Tue Oct 13 20:44:51 2009
@@ -55,14 +55,13 @@
     SegmentReader reader = SegmentReader.get(true, info, indexDivisor);
     assertTrue(reader != null);
     assertEquals(indexDivisor, reader.getTermInfosIndexDivisor());
-    SegmentTermDocs segTermDocs = new SegmentTermDocs(reader);
-    assertTrue(segTermDocs != null);
-    segTermDocs.seek(new Term(DocHelper.TEXT_FIELD_2_KEY, "field"));
-    if (segTermDocs.next() == true)
-    {
-      int docId = segTermDocs.doc();
+    TermDocs termDocs = reader.termDocs();
+    assertTrue(termDocs != null);
+    termDocs.seek(new Term(DocHelper.TEXT_FIELD_2_KEY, "field"));
+    if (termDocs.next() == true)    {
+      int docId = termDocs.doc();
       assertTrue(docId == 0);
-      int freq = segTermDocs.freq();
+      int freq = termDocs.freq();
       assertTrue(freq == 3);  
     }
     reader.close();
@@ -77,20 +76,20 @@
       //After adding the document, we should be able to read it back in
       SegmentReader reader = SegmentReader.get(true, info, indexDivisor);
       assertTrue(reader != null);
-      SegmentTermDocs segTermDocs = new SegmentTermDocs(reader);
-      assertTrue(segTermDocs != null);
-      segTermDocs.seek(new Term("textField2", "bad"));
-      assertTrue(segTermDocs.next() == false);
+      TermDocs termDocs = reader.termDocs();
+      assertTrue(termDocs != null);
+      termDocs.seek(new Term("textField2", "bad"));
+      assertTrue(termDocs.next() == false);
       reader.close();
     }
     {
       //After adding the document, we should be able to read it back in
       SegmentReader reader = SegmentReader.get(true, info, indexDivisor);
       assertTrue(reader != null);
-      SegmentTermDocs segTermDocs = new SegmentTermDocs(reader);
-      assertTrue(segTermDocs != null);
-      segTermDocs.seek(new Term("junk", "bad"));
-      assertTrue(segTermDocs.next() == false);
+      TermDocs termDocs = reader.termDocs();
+      assertTrue(termDocs != null);
+      termDocs.seek(new Term("junk", "bad"));
+      assertTrue(termDocs.next() == false);
       reader.close();
     }
   }

Modified: lucene/java/branches/flex_1458/src/test/org/apache/lucene/index/TestSegmentTermEnum.java
URL: http://svn.apache.org/viewvc/lucene/java/branches/flex_1458/src/test/org/apache/lucene/index/TestSegmentTermEnum.java?rev=824918&r1=824917&r2=824918&view=diff
==============================================================================
--- lucene/java/branches/flex_1458/src/test/org/apache/lucene/index/TestSegmentTermEnum.java (original)
+++ lucene/java/branches/flex_1458/src/test/org/apache/lucene/index/TestSegmentTermEnum.java Tue Oct 13 20:44:51 2009
@@ -61,23 +61,6 @@
     verifyDocFreq();
   }
 
-  public void testPrevTermAtEnd() throws IOException
-  {
-    Directory dir = new MockRAMDirectory();
-    IndexWriter writer  = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
-    addDoc(writer, "aaa bbb");
-    writer.close();
-    SegmentReader reader = SegmentReader.getOnlySegmentReader(dir);
-    SegmentTermEnum termEnum = (SegmentTermEnum) reader.terms();
-    assertTrue(termEnum.next());
-    assertEquals("aaa", termEnum.term().text());
-    assertTrue(termEnum.next());
-    assertEquals("aaa", termEnum.prev().text());
-    assertEquals("bbb", termEnum.term().text());
-    assertFalse(termEnum.next());
-    assertEquals("bbb", termEnum.prev().text());
-  }
-
   private void verifyDocFreq()
       throws IOException
   {

Modified: lucene/java/branches/flex_1458/src/test/org/apache/lucene/index/TestStressIndexing2.java
URL: http://svn.apache.org/viewvc/lucene/java/branches/flex_1458/src/test/org/apache/lucene/index/TestStressIndexing2.java?rev=824918&r1=824917&r2=824918&view=diff
==============================================================================
--- lucene/java/branches/flex_1458/src/test/org/apache/lucene/index/TestStressIndexing2.java (original)
+++ lucene/java/branches/flex_1458/src/test/org/apache/lucene/index/TestStressIndexing2.java Tue Oct 13 20:44:51 2009
@@ -72,6 +72,7 @@
     // dir1 = FSDirectory.open("foofoofoo");
     Directory dir2 = new MockRAMDirectory();
     // mergeFactor=2; maxBufferedDocs=2; Map docs = indexRandom(1, 3, 2, dir1);
+
     Map docs = indexRandom(10, 100, 100, dir1);
     indexSerial(docs, dir2);
 
@@ -96,8 +97,12 @@
       int range=r.nextInt(20)+1;
       Directory dir1 = new MockRAMDirectory();
       Directory dir2 = new MockRAMDirectory();
+      //System.out.println("iter=" + iter + " range=" + range);
+      //System.out.println("TEST: index random");
       Map docs = indexRandom(nThreads, iter, range, dir1);
+      //System.out.println("TEST: index serial");
       indexSerial(docs, dir2);
+      //System.out.println("TEST: verify");
       verifyEquals(dir1, dir2, "id");
     }
   }
@@ -199,7 +204,8 @@
         threads[i].join();
       }
 
-      // w.optimize();
+      // nocommit -- comment out again
+      //w.optimize();
       w.close();    
 
       for (int i=0; i<threads.length; i++) {
@@ -210,6 +216,7 @@
       }
     }
 
+    //System.out.println("TEST: checkindex");
     _TestUtil.checkIndex(dir);
 
     return docs;
@@ -269,6 +276,7 @@
     TermEnum termEnum = r1.terms (new Term (idField, ""));
     do {
       Term term = termEnum.term();
+      //System.out.println("TEST: match id term=" + term);
       if (term==null || term.field() != idField) break;
 
       termDocs1.seek (termEnum);
@@ -322,9 +330,12 @@
     } while (termEnum.next());
 
     termEnum.close();
+    //System.out.println("TEST: done match id");
 
     // Verify postings
+    //System.out.println("TEST: create te1");
     TermEnum termEnum1 = r1.terms (new Term ("", ""));
+    //System.out.println("TEST: create te2");
     TermEnum termEnum2 = r2.terms (new Term ("", ""));
 
     // pack both doc and freq into single element for easy sorting
@@ -339,6 +350,7 @@
       for(;;) {
         len1=0;
         term1 = termEnum1.term();
+        //System.out.println("TEST: term1=" + term1);
         if (term1==null) break;
         termDocs1.seek(termEnum1);
         while (termDocs1.next()) {
@@ -356,6 +368,7 @@
       for(;;) {
         len2=0;
         term2 = termEnum2.term();
+        //System.out.println("TEST: term2=" + term2);
         if (term2==null) break;
         termDocs2.seek(termEnum2);
         while (termDocs2.next()) {
@@ -368,13 +381,13 @@
         if (!termEnum2.next()) break;
       }
 
-      if (!hasDeletes)
-        assertEquals(termEnum1.docFreq(), termEnum2.docFreq());
-
       assertEquals(len1, len2);
       if (len1==0) break;  // no more terms
 
-      assertEquals(term1, term2);
+      if (!hasDeletes)
+        assertEquals(termEnum1.docFreq(), termEnum2.docFreq());
+
+      assertEquals("len1=" + len1 + " len2=" + len2 + " deletes?=" + hasDeletes, term1, term2);
 
       // sort info2 to get it into ascending docid
       Arrays.sort(info2, 0, len2);

Modified: lucene/java/branches/flex_1458/src/test/org/apache/lucene/search/JustCompileSearch.java
URL: http://svn.apache.org/viewvc/lucene/java/branches/flex_1458/src/test/org/apache/lucene/search/JustCompileSearch.java?rev=824918&r1=824917&r2=824918&view=diff
==============================================================================
--- lucene/java/branches/flex_1458/src/test/org/apache/lucene/search/JustCompileSearch.java (original)
+++ lucene/java/branches/flex_1458/src/test/org/apache/lucene/search/JustCompileSearch.java Tue Oct 13 20:44:51 2009
@@ -24,7 +24,8 @@
 import org.apache.lucene.index.CorruptIndexException;
 import org.apache.lucene.index.IndexReader;
 import org.apache.lucene.index.Term;
-import org.apache.lucene.index.TermPositions;
+import org.apache.lucene.index.DocsEnum;
+import org.apache.lucene.index.TermRef;
 import org.apache.lucene.util.PriorityQueue;
 
 /**
@@ -174,7 +175,7 @@
   
   static final class JustCompileExtendedFieldCacheLongParser implements FieldCache.LongParser {
 
-    public long parseLong(String string) {
+    public long parseLong(TermRef string) {
       throw new UnsupportedOperationException(UNSUPPORTED_MSG);
     }
     
@@ -182,7 +183,7 @@
   
   static final class JustCompileExtendedFieldCacheDoubleParser implements FieldCache.DoubleParser {
     
-    public double parseDouble(String string) {
+    public double parseDouble(TermRef term) {
       throw new UnsupportedOperationException(UNSUPPORTED_MSG);
     }
     
@@ -278,9 +279,9 @@
 
   static final class JustCompilePhraseScorer extends PhraseScorer {
 
-    JustCompilePhraseScorer(Weight weight, TermPositions[] tps, int[] offsets,
+    JustCompilePhraseScorer(Weight weight, DocsEnum[] docs, int[] offsets,
         Similarity similarity, byte[] norms) {
-      super(weight, tps, offsets, similarity, norms);
+      super(weight, docs, offsets, similarity, norms);
     }
 
     protected float phraseFreq() throws IOException {

Modified: lucene/java/branches/flex_1458/src/test/org/apache/lucene/search/QueryUtils.java
URL: http://svn.apache.org/viewvc/lucene/java/branches/flex_1458/src/test/org/apache/lucene/search/QueryUtils.java?rev=824918&r1=824917&r2=824918&view=diff
==============================================================================
--- lucene/java/branches/flex_1458/src/test/org/apache/lucene/search/QueryUtils.java (original)
+++ lucene/java/branches/flex_1458/src/test/org/apache/lucene/search/QueryUtils.java Tue Oct 13 20:44:51 2009
@@ -375,7 +375,6 @@
         this.scorer = scorer;
       }
       public void collect(int doc) throws IOException {
-        //System.out.println("doc="+doc);
         float score = scorer.score();
         try {
           

Modified: lucene/java/branches/flex_1458/src/test/org/apache/lucene/search/TestBoolean2.java
URL: http://svn.apache.org/viewvc/lucene/java/branches/flex_1458/src/test/org/apache/lucene/search/TestBoolean2.java?rev=824918&r1=824917&r2=824918&view=diff
==============================================================================
--- lucene/java/branches/flex_1458/src/test/org/apache/lucene/search/TestBoolean2.java (original)
+++ lucene/java/branches/flex_1458/src/test/org/apache/lucene/search/TestBoolean2.java Tue Oct 13 20:44:51 2009
@@ -146,6 +146,7 @@
   }
 
   public void testRandomQueries() throws Exception {
+    // nocommit -- remove 17 seed
     Random rnd = newRandom();
 
     String[] vals = {"w1","w2","w3","w4","w5","xx","yy","zzz"};

Modified: lucene/java/branches/flex_1458/src/test/org/apache/lucene/search/TestFieldCache.java
URL: http://svn.apache.org/viewvc/lucene/java/branches/flex_1458/src/test/org/apache/lucene/search/TestFieldCache.java?rev=824918&r1=824917&r2=824918&view=diff
==============================================================================
--- lucene/java/branches/flex_1458/src/test/org/apache/lucene/search/TestFieldCache.java (original)
+++ lucene/java/branches/flex_1458/src/test/org/apache/lucene/search/TestFieldCache.java Tue Oct 13 20:44:51 2009
@@ -98,7 +98,7 @@
     assertSame("Second request with explicit parser return same array", bytes, cache.getBytes(reader, "theByte", FieldCache.DEFAULT_BYTE_PARSER));
     assertTrue("bytes Size: " + bytes.length + " is not: " + NUM_DOCS, bytes.length == NUM_DOCS);
     for (int i = 0; i < bytes.length; i++) {
-      assertTrue(bytes[i] + " does not equal: " + (Byte.MAX_VALUE - i), bytes[i] == (byte) (Byte.MAX_VALUE - i));
+      assertTrue(bytes[i] + " does not equal: " + (Byte.MAX_VALUE - i) + " doc=" + i, bytes[i] == (byte) (Byte.MAX_VALUE - i));
 
     }
     

Modified: lucene/java/branches/flex_1458/src/test/org/apache/lucene/search/TestMultiPhraseQuery.java
URL: http://svn.apache.org/viewvc/lucene/java/branches/flex_1458/src/test/org/apache/lucene/search/TestMultiPhraseQuery.java?rev=824918&r1=824917&r2=824918&view=diff
==============================================================================
--- lucene/java/branches/flex_1458/src/test/org/apache/lucene/search/TestMultiPhraseQuery.java (original)
+++ lucene/java/branches/flex_1458/src/test/org/apache/lucene/search/TestMultiPhraseQuery.java Tue Oct 13 20:44:51 2009
@@ -22,7 +22,7 @@
 import org.apache.lucene.index.TermEnum;
 import org.apache.lucene.index.IndexReader;
 import org.apache.lucene.index.IndexWriter;
-import org.apache.lucene.store.RAMDirectory;
+import org.apache.lucene.store.MockRAMDirectory;
 import org.apache.lucene.analysis.SimpleAnalyzer;
 import org.apache.lucene.analysis.standard.StandardAnalyzer;
 import org.apache.lucene.document.Document;
@@ -46,7 +46,7 @@
     }
 
     public void testPhrasePrefix() throws IOException {
-        RAMDirectory indexStore = new RAMDirectory();
+        MockRAMDirectory indexStore = new MockRAMDirectory();
         IndexWriter writer = new IndexWriter(indexStore, new SimpleAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
         add("blueberry pie", writer);
         add("blueberry strudel", writer);
@@ -102,6 +102,8 @@
                 termsWithPrefix.add(te.term());
             }
         } while (te.next());
+        ir.close();
+
         query3.add((Term[])termsWithPrefix.toArray(new Term[0]));
         query3.add(new Term("body", "pizza"));
 
@@ -125,7 +127,6 @@
         
         searcher.close();
         indexStore.close();
-
     }
     
     private void add(String s, IndexWriter writer) throws IOException {
@@ -140,7 +141,7 @@
       // and all terms required.
       // The contained PhraseMultiQuery must contain exactly one term array.
 
-      RAMDirectory indexStore = new RAMDirectory();
+      MockRAMDirectory indexStore = new MockRAMDirectory();
       IndexWriter writer = new IndexWriter(indexStore, new SimpleAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
       add("blueberry pie", writer);
       add("blueberry chewing gum", writer);
@@ -165,10 +166,11 @@
 
       assertEquals("Wrong number of hits", 2, hits.length);
       searcher.close();
+      indexStore.close();
   }
     
   public void testPhrasePrefixWithBooleanQuery() throws IOException {
-    RAMDirectory indexStore = new RAMDirectory();
+    MockRAMDirectory indexStore = new MockRAMDirectory();
     IndexWriter writer = new IndexWriter(indexStore, new StandardAnalyzer(new HashSet(0)), true, IndexWriter.MaxFieldLength.LIMITED);
     add("This is a test", "object", writer);
     add("a note", "note", writer);
@@ -189,6 +191,23 @@
     ScoreDoc[] hits = searcher.search(q, null, 1000).scoreDocs;
     assertEquals("Wrong number of hits", 0, hits.length);
     searcher.close();
+    indexStore.close();
+  }
+
+  public void testNoDocs() throws Exception {
+    MockRAMDirectory indexStore = new MockRAMDirectory();
+    IndexWriter writer = new IndexWriter(indexStore, new StandardAnalyzer(new HashSet(0)), true, IndexWriter.MaxFieldLength.LIMITED);
+    add("a note", "note", writer);
+    writer.close();
+
+    IndexSearcher searcher = new IndexSearcher(indexStore, true);
+
+    MultiPhraseQuery q = new MultiPhraseQuery();
+    q.add(new Term("body", "a"));
+    q.add(new Term[] { new Term("body", "nope"), new Term("body", "nope") });
+    assertEquals("Wrong number of hits", 0, searcher.search(q, null, 1).totalHits);
+    searcher.close();
+    indexStore.close();
   }
   
   public void testHashCodeAndEquals(){

Modified: lucene/java/branches/flex_1458/src/test/org/apache/lucene/search/TestPositionIncrement.java
URL: http://svn.apache.org/viewvc/lucene/java/branches/flex_1458/src/test/org/apache/lucene/search/TestPositionIncrement.java?rev=824918&r1=824917&r2=824918&view=diff
==============================================================================
--- lucene/java/branches/flex_1458/src/test/org/apache/lucene/search/TestPositionIncrement.java (original)
+++ lucene/java/branches/flex_1458/src/test/org/apache/lucene/search/TestPositionIncrement.java Tue Oct 13 20:44:51 2009
@@ -58,6 +58,8 @@
  */
 public class TestPositionIncrement extends BaseTokenStreamTestCase {
 
+  final static boolean VERBOSE = false;
+
   public void testSetPosition() throws Exception {
     Analyzer analyzer = new Analyzer() {
       public TokenStream tokenStream(String fieldName, Reader reader) {
@@ -232,6 +234,7 @@
   
   public void testPayloadsPos0() throws Exception {
     for(int x=0;x<2;x++) {
+      
       Directory dir = new MockRAMDirectory();
       IndexWriter writer = new IndexWriter(dir,
                                            new TestPayloadAnalyzer(), true,
@@ -277,16 +280,23 @@
 
       count = 0;
       boolean sawZero = false;
-      //System.out.println("\ngetPayloadSpans test");
+      if (VERBOSE) {
+        System.out.println("\ngetPayloadSpans test");
+      }
       Spans pspans = snq.getSpans(is.getIndexReader());
       while (pspans.next()) {
-        //System.out.println(pspans.doc() + " - " + pspans.start() + " - "+ pspans.end());
+        if (VERBOSE) {
+          System.out.println("doc " + pspans.doc() + ": span " + pspans.start() + " to "+ pspans.end());
+        }
         Collection payloads = pspans.getPayload();
         sawZero |= pspans.start() == 0;
         for (Iterator it = payloads.iterator(); it.hasNext();) {
           count++;
-          it.next();
-          //System.out.println(new String((byte[]) it.next()));
+          if (!VERBOSE) {
+            it.next();
+          } else {
+            System.out.println("  payload: " + new String((byte[]) it.next()));
+          }
         }
       }
       assertEquals(5, count);
@@ -364,7 +374,9 @@
       }
       posIncrAttr.setPositionIncrement(posIncr);
       pos += posIncr;
-      // System.out.println("term=" + termAttr.term() + " pos=" + pos);
+      if (TestPositionIncrement.VERBOSE) {
+        System.out.println("term=" + termAttr.term() + " pos=" + pos);
+      }
       i++;
       return true;
     } else {



Mime
View raw message