lucene-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From rm...@apache.org
Subject svn commit: r1201715 [1/2] - in /lucene/dev/branches/lucene2621: ./ dev-tools/idea/lucene/contrib/ lucene/ lucene/contrib/sandbox/src/test/org/apache/lucene/sandbox/queries/regex/ lucene/src/java/org/apache/lucene/document/ lucene/src/java/org/apache/l...
Date Mon, 14 Nov 2011 14:15:22 GMT
Author: rmuir
Date: Mon Nov 14 14:15:19 2011
New Revision: 1201715

URL: http://svn.apache.org/viewvc?rev=1201715&view=rev
Log:
merge trunk (1201394:1201711)

Added:
    lucene/dev/branches/lucene2621/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/CachePropertyUtil.java
      - copied unchanged from r1201711, lucene/dev/trunk/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/CachePropertyUtil.java
    lucene/dev/branches/lucene2621/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DIHCache.java
      - copied unchanged from r1201711, lucene/dev/trunk/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DIHCache.java
    lucene/dev/branches/lucene2621/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DIHCacheSupport.java
      - copied unchanged from r1201711, lucene/dev/trunk/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DIHCacheSupport.java
    lucene/dev/branches/lucene2621/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DIHWriterBase.java
      - copied unchanged from r1201711, lucene/dev/trunk/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DIHWriterBase.java
    lucene/dev/branches/lucene2621/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/SortedMapBackedCache.java
      - copied unchanged from r1201711, lucene/dev/trunk/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/SortedMapBackedCache.java
    lucene/dev/branches/lucene2621/solr/contrib/dataimporthandler/src/test-files/dih/solr/conf/dataimport-cache-ephemeral.xml
      - copied unchanged from r1201711, lucene/dev/trunk/solr/contrib/dataimporthandler/src/test-files/dih/solr/conf/dataimport-cache-ephemeral.xml
    lucene/dev/branches/lucene2621/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/AbstractDIHCacheTestCase.java
      - copied unchanged from r1201711, lucene/dev/trunk/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/AbstractDIHCacheTestCase.java
    lucene/dev/branches/lucene2621/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestEphemeralCache.java
      - copied unchanged from r1201711, lucene/dev/trunk/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestEphemeralCache.java
    lucene/dev/branches/lucene2621/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestSortedMapBackedCache.java
      - copied unchanged from r1201711, lucene/dev/trunk/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestSortedMapBackedCache.java
Modified:
    lucene/dev/branches/lucene2621/   (props changed)
    lucene/dev/branches/lucene2621/dev-tools/idea/lucene/contrib/   (props changed)
    lucene/dev/branches/lucene2621/lucene/   (props changed)
    lucene/dev/branches/lucene2621/lucene/CHANGES.txt
    lucene/dev/branches/lucene2621/lucene/contrib/sandbox/src/test/org/apache/lucene/sandbox/queries/regex/TestSpanRegexQuery.java   (props changed)
    lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/document/FieldType.java
    lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/index/codecs/lucene3x/TermInfosReaderIndex.java   (props changed)
    lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/index/values/Bytes.java
    lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/index/values/FixedSortedBytesImpl.java
    lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/index/values/IndexDocValues.java
    lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/index/values/SortedBytesMergeUtils.java
    lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/index/values/VarSortedBytesImpl.java
    lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/search/FieldComparator.java
    lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/search/SortField.java
    lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/search/TestSort.java
    lucene/dev/branches/lucene2621/modules/facet/src/test/org/apache/lucene/facet/FacetTestBase.java
    lucene/dev/branches/lucene2621/modules/facet/src/test/org/apache/lucene/facet/search/CategoryListIteratorTest.java
    lucene/dev/branches/lucene2621/modules/queryparser/src/test/org/apache/lucene/queryparser/xml/builders/TestNumericRangeFilterBuilder.java   (props changed)
    lucene/dev/branches/lucene2621/solr/   (props changed)
    lucene/dev/branches/lucene2621/solr/CHANGES.txt   (contents, props changed)
    lucene/dev/branches/lucene2621/solr/LICENSE.txt   (props changed)
    lucene/dev/branches/lucene2621/solr/NOTICE.txt   (props changed)
    lucene/dev/branches/lucene2621/solr/README.txt   (props changed)
    lucene/dev/branches/lucene2621/solr/build.xml   (props changed)
    lucene/dev/branches/lucene2621/solr/client/   (props changed)
    lucene/dev/branches/lucene2621/solr/common-build.xml   (props changed)
    lucene/dev/branches/lucene2621/solr/contrib/   (props changed)
    lucene/dev/branches/lucene2621/solr/contrib/clustering/src/test-files/   (props changed)
    lucene/dev/branches/lucene2621/solr/contrib/dataimporthandler-extras/src/java/   (props changed)
    lucene/dev/branches/lucene2621/solr/contrib/dataimporthandler/src/java/   (props changed)
    lucene/dev/branches/lucene2621/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/CachedSqlEntityProcessor.java
    lucene/dev/branches/lucene2621/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DIHWriter.java
    lucene/dev/branches/lucene2621/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DataConfig.java
    lucene/dev/branches/lucene2621/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DocBuilder.java
    lucene/dev/branches/lucene2621/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/EntityProcessorBase.java
    lucene/dev/branches/lucene2621/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/SolrWriter.java
    lucene/dev/branches/lucene2621/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/ThreadedContext.java
    lucene/dev/branches/lucene2621/solr/contrib/dataimporthandler/src/test-files/   (props changed)
    lucene/dev/branches/lucene2621/solr/contrib/dataimporthandler/src/test-files/dih/solr/conf/dataimport-schema.xml
    lucene/dev/branches/lucene2621/solr/contrib/dataimporthandler/src/test/org/   (props changed)
    lucene/dev/branches/lucene2621/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestCachedSqlEntityProcessor.java
    lucene/dev/branches/lucene2621/solr/contrib/uima/src/java/   (props changed)
    lucene/dev/branches/lucene2621/solr/contrib/uima/src/test-files/   (props changed)
    lucene/dev/branches/lucene2621/solr/core/   (props changed)
    lucene/dev/branches/lucene2621/solr/core/src/java/   (props changed)
    lucene/dev/branches/lucene2621/solr/core/src/test/   (props changed)
    lucene/dev/branches/lucene2621/solr/core/src/test-files/solr/conf/schema-numeric.xml
    lucene/dev/branches/lucene2621/solr/core/src/test/org/apache/solr/schema/NumericFieldsTest.java
    lucene/dev/branches/lucene2621/solr/dev-tools/   (props changed)
    lucene/dev/branches/lucene2621/solr/example/   (props changed)
    lucene/dev/branches/lucene2621/solr/example/solr/conf/schema.xml
    lucene/dev/branches/lucene2621/solr/lib/   (props changed)
    lucene/dev/branches/lucene2621/solr/scripts/   (props changed)
    lucene/dev/branches/lucene2621/solr/site/   (props changed)
    lucene/dev/branches/lucene2621/solr/site-src/   (props changed)
    lucene/dev/branches/lucene2621/solr/solrj/   (props changed)
    lucene/dev/branches/lucene2621/solr/solrj/src/java/   (props changed)
    lucene/dev/branches/lucene2621/solr/solrj/src/test/org/apache/solr/client/   (props changed)
    lucene/dev/branches/lucene2621/solr/solrj/src/test/org/apache/solr/client/solrj/   (props changed)
    lucene/dev/branches/lucene2621/solr/solrj/src/test/org/apache/solr/common/   (props changed)
    lucene/dev/branches/lucene2621/solr/test-framework/   (props changed)
    lucene/dev/branches/lucene2621/solr/testlogging.properties   (props changed)
    lucene/dev/branches/lucene2621/solr/webapp/   (props changed)

Modified: lucene/dev/branches/lucene2621/lucene/CHANGES.txt
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/lucene/CHANGES.txt?rev=1201715&r1=1201714&r2=1201715&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/lucene/CHANGES.txt (original)
+++ lucene/dev/branches/lucene2621/lucene/CHANGES.txt Mon Nov 14 14:15:19 2011
@@ -719,7 +719,7 @@ Bug fixes
   (Robert Muir)
   
 * LUCENE-3548: Fix CharsRef#append to extend length of the existing char[]
-  and presever existing chars. (Simon Willnauer) 
+  and preserve existing chars. (Simon Willnauer) 
 
 API Changes
 

Modified: lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/document/FieldType.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/document/FieldType.java?rev=1201715&r1=1201714&r2=1201715&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/document/FieldType.java (original)
+++ lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/document/FieldType.java Mon Nov 14 14:15:19 2011
@@ -145,33 +145,33 @@ public class FieldType implements Indexa
       if (result.length() > 0)
         result.append(",");
       result.append("indexed");
-    }
-    if (tokenized()) {
-      if (result.length() > 0)
-        result.append(",");
-      result.append("tokenized");
-    }
-    if (storeTermVectors()) {
-      if (result.length() > 0)
-        result.append(",");
-      result.append("termVector");
-    }
-    if (storeTermVectorOffsets()) {
-      if (result.length() > 0)
-        result.append(",");
-      result.append("termVectorOffsets");
-    }
-    if (storeTermVectorPositions()) {
-      if (result.length() > 0)
-        result.append(",");
-      result.append("termVectorPosition");
-    }
-    if (omitNorms()) {
-      result.append(",omitNorms");
-    }
-    if (indexOptions != IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) {
-      result.append(",indexOptions=");
-      result.append(indexOptions);
+      if (tokenized()) {
+        if (result.length() > 0)
+          result.append(",");
+        result.append("tokenized");
+      }
+      if (storeTermVectors()) {
+        if (result.length() > 0)
+          result.append(",");
+        result.append("termVector");
+      }
+      if (storeTermVectorOffsets()) {
+        if (result.length() > 0)
+          result.append(",");
+        result.append("termVectorOffsets");
+      }
+      if (storeTermVectorPositions()) {
+        if (result.length() > 0)
+          result.append(",");
+        result.append("termVectorPosition");
+      }
+      if (omitNorms()) {
+        result.append(",omitNorms");
+      }
+      if (indexOptions != IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) {
+        result.append(",indexOptions=");
+        result.append(indexOptions);
+      }
     }
     
     return result.toString();

Modified: lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/index/values/Bytes.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/index/values/Bytes.java?rev=1201715&r1=1201714&r2=1201715&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/index/values/Bytes.java (original)
+++ lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/index/values/Bytes.java Mon Nov 14 14:15:19 2011
@@ -32,17 +32,17 @@ import org.apache.lucene.store.IOContext
 import org.apache.lucene.store.IndexInput;
 import org.apache.lucene.store.IndexOutput;
 import org.apache.lucene.util.ArrayUtil;
+import org.apache.lucene.util.ByteBlockPool.Allocator;
+import org.apache.lucene.util.ByteBlockPool.DirectTrackingAllocator;
 import org.apache.lucene.util.ByteBlockPool;
 import org.apache.lucene.util.BytesRef;
+import org.apache.lucene.util.BytesRefHash.TrackingDirectBytesStartArray;
 import org.apache.lucene.util.BytesRefHash;
 import org.apache.lucene.util.CodecUtil;
 import org.apache.lucene.util.Counter;
 import org.apache.lucene.util.IOUtils;
 import org.apache.lucene.util.PagedBytes;
 import org.apache.lucene.util.RamUsageEstimator;
-import org.apache.lucene.util.ByteBlockPool.Allocator;
-import org.apache.lucene.util.ByteBlockPool.DirectTrackingAllocator;
-import org.apache.lucene.util.BytesRefHash.TrackingDirectBytesStartArray;
 import org.apache.lucene.util.packed.PackedInts;
 
 /**
@@ -586,7 +586,11 @@ public final class Bytes {
       this.idxIn = idxIn;
       ordToOffsetIndex = hasOffsets ? PackedInts.getReader(idxIn) : null; 
       docToOrdIndex = PackedInts.getReader(idxIn);
+    }
 
+    @Override
+    public PackedInts.Reader getDocToOrd() {
+      return docToOrdIndex;
     }
     
     @Override

Modified: lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/index/values/FixedSortedBytesImpl.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/index/values/FixedSortedBytesImpl.java?rev=1201715&r1=1201714&r2=1201715&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/index/values/FixedSortedBytesImpl.java (original)
+++ lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/index/values/FixedSortedBytesImpl.java Mon Nov 14 14:15:19 2011
@@ -195,6 +195,11 @@ class FixedSortedBytesImpl {
     }
 
     @Override
+    public PackedInts.Reader getDocToOrd() {
+      return docToOrdIndex;
+    }
+
+    @Override
     public BytesRef getByOrd(int ord, BytesRef bytesRef) {
       try {
         datIn.seek(basePointer + size * ord);

Modified: lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/index/values/IndexDocValues.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/index/values/IndexDocValues.java?rev=1201715&r1=1201714&r2=1201715&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/index/values/IndexDocValues.java (original)
+++ lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/index/values/IndexDocValues.java Mon Nov 14 14:15:19 2011
@@ -26,6 +26,7 @@ import org.apache.lucene.index.FieldsEnu
 import org.apache.lucene.index.IndexReader;
 import org.apache.lucene.index.codecs.DocValuesFormat;
 import org.apache.lucene.util.BytesRef;
+import org.apache.lucene.util.packed.PackedInts;
 
 /**
  * {@link IndexDocValues} provides a dense per-document typed storage for fast
@@ -223,7 +224,7 @@ public abstract class IndexDocValues imp
       return null;
     }
   }
-  
+
   /**
    * A sorted variant of {@link Source} for <tt>byte[]</tt> values per document.
    * <p>
@@ -258,6 +259,18 @@ public abstract class IndexDocValues imp
     public abstract BytesRef getByOrd(int ord, BytesRef bytesRef);
 
     /**
+     * Returns the PackedInts.Reader impl that maps document to ord.
+     */
+    public abstract PackedInts.Reader getDocToOrd();
+    
+    /**
+     * Returns the comparator used to order the BytesRefs.
+     */
+    public Comparator<BytesRef> getComparator() {
+      return comparator;
+    }
+
+    /**
      * Performs a lookup by value.
      * 
      * @param value
@@ -304,4 +317,98 @@ public abstract class IndexDocValues imp
      */
     public abstract int getValueCount();
   }
+
+  /** Returns a Source that always returns default (missing)
+   *  values for all documents. */
+  public static Source getDefaultSource(final ValueType type) {
+    return new Source(type) {
+      @Override
+      public long getInt(int docID) {
+        return 0;
+      }
+
+      @Override
+      public double getFloat(int docID) {
+        return 0.0;
+      }
+
+      @Override
+      public BytesRef getBytes(int docID, BytesRef ref) {
+        ref.length = 0;
+        return ref;
+      }
+    };
+  }
+
+  /** Returns a SortedSource that always returns default (missing)
+   *  values for all documents. */
+  public static SortedSource getDefaultSortedSource(final ValueType type, final int size) {
+
+    final PackedInts.Reader docToOrd = new PackedInts.Reader() {
+      @Override
+      public long get(int index) {
+        return 0;
+      }
+
+      @Override
+      public int getBitsPerValue() {
+        return 0;
+      }
+
+      @Override
+      public int size() {
+        return size;
+      }
+
+      @Override
+      public boolean hasArray() {
+        return false;
+      }
+
+      @Override
+      public Object getArray() {
+        return null;
+      }
+    };
+
+    return new SortedSource(type, BytesRef.getUTF8SortedAsUnicodeComparator()) {
+
+      @Override
+      public BytesRef getBytes(int docID, BytesRef ref) {
+        ref.length = 0;
+        return ref;
+      }
+
+      @Override
+      public int ord(int docID) {
+        return 0;
+      }
+
+      @Override
+      public BytesRef getByOrd(int ord, BytesRef bytesRef) {
+        assert ord == 0;
+        bytesRef.length = 0;
+        return bytesRef;
+      }
+
+      @Override
+      public PackedInts.Reader getDocToOrd() {
+        return docToOrd;
+      }
+
+      @Override
+      public int getByValue(BytesRef value, BytesRef spare) {
+        if (value.length == 0) {
+          return 0;
+        } else {
+          return -1;
+        }
+      }
+
+      @Override
+        public int getValueCount() {
+        return 1;
+      }
+    };
+  }
 }

Modified: lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/index/values/SortedBytesMergeUtils.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/index/values/SortedBytesMergeUtils.java?rev=1201715&r1=1201714&r2=1201715&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/index/values/SortedBytesMergeUtils.java (original)
+++ lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/index/values/SortedBytesMergeUtils.java Mon Nov 14 14:15:19 2011
@@ -300,6 +300,11 @@ final class SortedBytesMergeUtils {
     }
 
     @Override
+    public PackedInts.Reader getDocToOrd() {
+      return null;
+    }
+
+    @Override
     public int getValueCount() {
       return 1;
     }

Modified: lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/index/values/VarSortedBytesImpl.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/index/values/VarSortedBytesImpl.java?rev=1201715&r1=1201714&r2=1201715&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/index/values/VarSortedBytesImpl.java (original)
+++ lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/index/values/VarSortedBytesImpl.java Mon Nov 14 14:15:19 2011
@@ -215,6 +215,11 @@ final class VarSortedBytesImpl {
     }
 
     @Override
+    public PackedInts.Reader getDocToOrd() {
+      return docToOrdIndex;
+    }
+
+    @Override
     public BytesRef getByOrd(int ord, BytesRef bytesRef) {
       try {
         final long offset = ordToOffsetIndex.get(ord);

Modified: lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/search/FieldComparator.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/search/FieldComparator.java?rev=1201715&r1=1201714&r2=1201715&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/search/FieldComparator.java (original)
+++ lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/search/FieldComparator.java Mon Nov 14 14:15:19 2011
@@ -18,10 +18,14 @@ package org.apache.lucene.search;
  */
 
 import java.io.IOException;
+import java.util.Comparator;
 
 import org.apache.lucene.index.IndexReader.AtomicReaderContext;
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.index.values.IndexDocValues.SortedSource;
 import org.apache.lucene.index.values.IndexDocValues.Source;
 import org.apache.lucene.index.values.IndexDocValues;
+import org.apache.lucene.index.values.ValueType;
 import org.apache.lucene.search.FieldCache.ByteParser;
 import org.apache.lucene.search.FieldCache.DocTerms;
 import org.apache.lucene.search.FieldCache.DocTermsIndex;
@@ -399,6 +403,8 @@ public abstract class FieldComparator<T>
       final IndexDocValues docValues = context.reader.docValues(field);
       if (docValues != null) {
         currentReaderValues = docValues.getSource(); 
+      } else {
+        currentReaderValues = IndexDocValues.getDefaultSource(ValueType.FLOAT_64);
       }
       return this;
     }
@@ -690,6 +696,8 @@ public abstract class FieldComparator<T>
       IndexDocValues docValues = context.reader.docValues(field);
       if (docValues != null) {
         currentReaderValues = docValues.getSource();
+      } else {
+        currentReaderValues = IndexDocValues.getDefaultSource(ValueType.FIXED_INTS_64);
       }
       return this;
     }
@@ -911,30 +919,53 @@ public abstract class FieldComparator<T>
    *  than {@link TermValComparator}.  For very small
    *  result sets it may be slower. */
   public static final class TermOrdValComparator extends FieldComparator<BytesRef> {
-    /** @lucene.internal */
+    /* Ords for each slot.
+       @lucene.internal */
     final int[] ords;
-    /** @lucene.internal */
+
+    /* Values for each slot.
+       @lucene.internal */
     final BytesRef[] values;
-    /** @lucene.internal */
+
+    /* Which reader last copied a value into the slot. When
+       we compare two slots, we just compare-by-ord if the
+       readerGen is the same; else we must compare the
+       values (slower).
+       @lucene.internal */
     final int[] readerGen;
 
-    /** @lucene.internal */
+    /* Gen of current reader we are on.
+       @lucene.internal */
     int currentReaderGen = -1;
-    private DocTermsIndex termsIndex;
+
+    /* Current reader's doc ord/values.
+       @lucene.internal */
+    DocTermsIndex termsIndex;
+
     private final String field;
 
-    /** @lucene.internal */
+    /* Bottom slot, or -1 if queue isn't full yet
+       @lucene.internal */
     int bottomSlot = -1;
-    /** @lucene.internal */
+
+    /* Bottom ord (same as ords[bottomSlot] once bottomSlot
+       is set).  Cached for faster compares.
+       @lucene.internal */
     int bottomOrd;
-    /** @lucene.internal */
+
+    /* True if current bottom slot matches the current
+       reader.
+       @lucene.internal */
     boolean bottomSameReader;
-    /** @lucene.internal */
+
+    /* Bottom value (same as values[bottomSlot] once
+       bottomSlot is set).  Cached for faster compares.
+      @lucene.internal */
     BytesRef bottomValue;
-    /** @lucene.internal */
+
     final BytesRef tempBR = new BytesRef();
 
-    public TermOrdValComparator(int numHits, String field, int sortPos, boolean reversed) {
+    public TermOrdValComparator(int numHits, String field) {
       ords = new int[numHits];
       values = new BytesRef[numHits];
       readerGen = new int[numHits];
@@ -1325,6 +1356,396 @@ public abstract class FieldComparator<T>
     }
   }
 
+  /** Sorts by field's natural Term sort order, using
+   *  ordinals; this is just like {@link
+   *  TermOrdValComparator} except it uses DocValues to
+   *  retrieve the sort ords saved during indexing. */
+  public static final class TermOrdValDocValuesComparator extends FieldComparator<BytesRef> {
+    /* Ords for each slot.
+       @lucene.internal */
+    final int[] ords;
+
+    /* Values for each slot.
+       @lucene.internal */
+    final BytesRef[] values;
+
+    /* Which reader last copied a value into the slot. When
+       we compare two slots, we just compare-by-ord if the
+       readerGen is the same; else we must compare the
+       values (slower).
+       @lucene.internal */
+    final int[] readerGen;
+
+    /* Gen of current reader we are on.
+       @lucene.internal */
+    int currentReaderGen = -1;
+
+    /* Current reader's doc ord/values.
+       @lucene.internal */
+    SortedSource termsIndex;
+
+    /* Comparator for comparing by value.
+       @lucene.internal */
+    Comparator<BytesRef> comp;
+
+    private final String field;
+
+    /* Bottom slot, or -1 if queue isn't full yet
+       @lucene.internal */
+    int bottomSlot = -1;
+
+    /* Bottom ord (same as ords[bottomSlot] once bottomSlot
+       is set).  Cached for faster compares.
+       @lucene.internal */
+    int bottomOrd;
+
+    /* True if current bottom slot matches the current
+       reader.
+       @lucene.internal */
+    boolean bottomSameReader;
+
+    /* Bottom value (same as values[bottomSlot] once
+       bottomSlot is set).  Cached for faster compares.
+      @lucene.internal */
+    BytesRef bottomValue;
+
+    /** @lucene.internal */
+    final BytesRef tempBR = new BytesRef();
+
+    public TermOrdValDocValuesComparator(int numHits, String field) {
+      ords = new int[numHits];
+      values = new BytesRef[numHits];
+      readerGen = new int[numHits];
+      this.field = field;
+    }
+
+    @Override
+    public int compare(int slot1, int slot2) {
+      if (readerGen[slot1] == readerGen[slot2]) {
+        return ords[slot1] - ords[slot2];
+      }
+
+      final BytesRef val1 = values[slot1];
+      final BytesRef val2 = values[slot2];
+      if (val1 == null) {
+        if (val2 == null) {
+          return 0;
+        }
+        return -1;
+      } else if (val2 == null) {
+        return 1;
+      }
+      return comp.compare(val1, val2);
+    }
+
+    @Override
+    public int compareBottom(int doc) {
+      throw new UnsupportedOperationException();
+    }
+
+    @Override
+    public void copy(int slot, int doc) {
+      throw new UnsupportedOperationException();
+    }
+
+    // TODO: would be nice to share these specialized impls
+    // w/ TermOrdValComparator
+
+    /** Base class for specialized (per bit width of the
+     * ords) per-segment comparator.  NOTE: this is messy;
+     * we do this only because hotspot can't reliably inline
+     * the underlying array access when looking up doc->ord
+     * @lucene.internal
+     */
+    abstract class PerSegmentComparator extends FieldComparator<BytesRef> {
+      
+      @Override
+      public FieldComparator setNextReader(AtomicReaderContext context) throws IOException {
+        return TermOrdValDocValuesComparator.this.setNextReader(context);
+      }
+
+      @Override
+      public int compare(int slot1, int slot2) {
+        return TermOrdValDocValuesComparator.this.compare(slot1, slot2);
+      }
+
+      @Override
+      public void setBottom(final int bottom) {
+        TermOrdValDocValuesComparator.this.setBottom(bottom);
+      }
+
+      @Override
+      public BytesRef value(int slot) {
+        return TermOrdValDocValuesComparator.this.value(slot);
+      }
+
+      @Override
+      public int compareValues(BytesRef val1, BytesRef val2) {
+        assert val1 != null;
+        assert val2 != null;
+        return comp.compare(val1, val2);
+      }
+    }
+
+    // Used per-segment when bit width of doc->ord is 8:
+    private final class ByteOrdComparator extends PerSegmentComparator {
+      private final byte[] readerOrds;
+      private final SortedSource termsIndex;
+      private final int docBase;
+
+      public ByteOrdComparator(byte[] readerOrds, SortedSource termsIndex, int docBase) {
+        this.readerOrds = readerOrds;
+        this.termsIndex = termsIndex;
+        this.docBase = docBase;
+      }
+
+      @Override
+      public int compareBottom(int doc) {
+        assert bottomSlot != -1;
+        if (bottomSameReader) {
+          // ord is precisely comparable, even in the equal case
+          return bottomOrd - (readerOrds[doc]&0xFF);
+        } else {
+          // ord is only approx comparable: if they are not
+          // equal, we can use that; if they are equal, we
+          // must fallback to compare by value
+          final int order = readerOrds[doc]&0xFF;
+          final int cmp = bottomOrd - order;
+          if (cmp != 0) {
+            return cmp;
+          }
+
+          termsIndex.getByOrd(order, tempBR);
+          return comp.compare(bottomValue, tempBR);
+        }
+      }
+
+      @Override
+      public void copy(int slot, int doc) {
+        final int ord = readerOrds[doc]&0xFF;
+        ords[slot] = ord;
+        if (values[slot] == null) {
+          values[slot] = new BytesRef();
+        }
+        termsIndex.getByOrd(ord, values[slot]);
+        readerGen[slot] = currentReaderGen;
+      }
+    }
+
+    // Used per-segment when bit width of doc->ord is 16:
+    private final class ShortOrdComparator extends PerSegmentComparator {
+      private final short[] readerOrds;
+      private final SortedSource termsIndex;
+      private final int docBase;
+
+      public ShortOrdComparator(short[] readerOrds, SortedSource termsIndex, int docBase) {
+        this.readerOrds = readerOrds;
+        this.termsIndex = termsIndex;
+        this.docBase = docBase;
+      }
+
+      @Override
+      public int compareBottom(int doc) {
+        assert bottomSlot != -1;
+        if (bottomSameReader) {
+          // ord is precisely comparable, even in the equal case
+          return bottomOrd - (readerOrds[doc]&0xFFFF);
+        } else {
+          // ord is only approx comparable: if they are not
+          // equal, we can use that; if they are equal, we
+          // must fallback to compare by value
+          final int order = readerOrds[doc]&0xFFFF;
+          final int cmp = bottomOrd - order;
+          if (cmp != 0) {
+            return cmp;
+          }
+
+          termsIndex.getByOrd(order, tempBR);
+          return comp.compare(bottomValue, tempBR);
+        }
+      }
+
+      @Override
+      public void copy(int slot, int doc) {
+        final int ord = readerOrds[doc]&0xFFFF;
+        ords[slot] = ord;
+        if (values[slot] == null) {
+          values[slot] = new BytesRef();
+        }
+        termsIndex.getByOrd(ord, values[slot]);
+        readerGen[slot] = currentReaderGen;
+      }
+    }
+
+    // Used per-segment when bit width of doc->ord is 32:
+    private final class IntOrdComparator extends PerSegmentComparator {
+      private final int[] readerOrds;
+      private final SortedSource termsIndex;
+      private final int docBase;
+
+      public IntOrdComparator(int[] readerOrds, SortedSource termsIndex, int docBase) {
+        this.readerOrds = readerOrds;
+        this.termsIndex = termsIndex;
+        this.docBase = docBase;
+      }
+
+      @Override
+      public int compareBottom(int doc) {
+        assert bottomSlot != -1;
+        if (bottomSameReader) {
+          // ord is precisely comparable, even in the equal case
+          return bottomOrd - readerOrds[doc];
+        } else {
+          // ord is only approx comparable: if they are not
+          // equal, we can use that; if they are equal, we
+          // must fallback to compare by value
+          final int order = readerOrds[doc];
+          final int cmp = bottomOrd - order;
+          if (cmp != 0) {
+            return cmp;
+          }
+          termsIndex.getByOrd(order, tempBR);
+          return comp.compare(bottomValue, tempBR);
+        }
+      }
+
+      @Override
+      public void copy(int slot, int doc) {
+        final int ord = readerOrds[doc];
+        ords[slot] = ord;
+        if (values[slot] == null) {
+          values[slot] = new BytesRef();
+        }
+        termsIndex.getByOrd(ord, values[slot]);
+        readerGen[slot] = currentReaderGen;
+      }
+    }
+
+    // Used per-segment when bit width is not a native array
+    // size (8, 16, 32):
+    private final class AnyOrdComparator extends PerSegmentComparator {
+      private final PackedInts.Reader readerOrds;
+      private final int docBase;
+
+      public AnyOrdComparator(PackedInts.Reader readerOrds, int docBase) {
+        this.readerOrds = readerOrds;
+        this.docBase = docBase;
+      }
+
+      @Override
+      public int compareBottom(int doc) {
+        assert bottomSlot != -1;
+        if (bottomSameReader) {
+          // ord is precisely comparable, even in the equal case
+          return bottomOrd - (int) readerOrds.get(doc);
+        } else {
+          // ord is only approx comparable: if they are not
+          // equal, we can use that; if they are equal, we
+          // must fallback to compare by value
+          final int order = (int) readerOrds.get(doc);
+          final int cmp = bottomOrd - order;
+          if (cmp != 0) {
+            return cmp;
+          }
+          termsIndex.getByOrd(order, tempBR);
+          return comp.compare(bottomValue, tempBR);
+        }
+      }
+
+      @Override
+      public void copy(int slot, int doc) {
+        final int ord = (int) readerOrds.get(doc);
+        ords[slot] = ord;
+        if (values[slot] == null) {
+          values[slot] = new BytesRef();
+        }
+        termsIndex.getByOrd(ord, values[slot]);
+        readerGen[slot] = currentReaderGen;
+      }
+    }
+
+    @Override
+    public FieldComparator setNextReader(AtomicReaderContext context) throws IOException {
+      final int docBase = context.docBase;
+
+      final IndexDocValues dv = context.reader.docValues(field);
+      if (dv == null) {
+        termsIndex = IndexDocValues.getDefaultSortedSource(ValueType.BYTES_VAR_SORTED, context.reader.maxDoc());
+      } else {
+        termsIndex = dv.getSource().asSortedSource();
+        if (termsIndex == null) {
+          termsIndex = IndexDocValues.getDefaultSortedSource(ValueType.BYTES_VAR_SORTED, context.reader.maxDoc());
+        }
+      }
+
+      comp = termsIndex.getComparator();
+
+      FieldComparator perSegComp = null;
+      final PackedInts.Reader docToOrd = termsIndex.getDocToOrd();
+      if (docToOrd.hasArray()) {
+        final Object arr = docToOrd.getArray();
+        assert arr != null;
+        if (arr instanceof byte[]) {
+          // 8 bit packed
+          perSegComp = new ByteOrdComparator((byte[]) arr, termsIndex, docBase);
+        } else if (arr instanceof short[]) {
+          // 16 bit packed
+          perSegComp = new ShortOrdComparator((short[]) arr, termsIndex, docBase);
+        } else if (arr instanceof int[]) {
+          // 32 bit packed
+          perSegComp = new IntOrdComparator((int[]) arr, termsIndex, docBase);
+        }
+      }
+
+      if (perSegComp == null) {
+        perSegComp = new AnyOrdComparator(docToOrd, docBase);
+      }
+        
+      currentReaderGen++;
+      if (bottomSlot != -1) {
+        perSegComp.setBottom(bottomSlot);
+      }
+
+      return perSegComp;
+    }
+    
+    @Override
+    public void setBottom(final int bottom) {
+      bottomSlot = bottom;
+
+      bottomValue = values[bottomSlot];
+      if (currentReaderGen == readerGen[bottomSlot]) {
+        bottomOrd = ords[bottomSlot];
+        bottomSameReader = true;
+      } else {
+        if (bottomValue == null) {
+          // 0 ord is null for all segments
+          assert ords[bottomSlot] == 0;
+          bottomOrd = 0;
+          bottomSameReader = true;
+          readerGen[bottomSlot] = currentReaderGen;
+        } else {
+          final int index = termsIndex.getByValue(bottomValue, tempBR);
+          if (index < 0) {
+            bottomOrd = -index - 2;
+            bottomSameReader = false;
+          } else {
+            bottomOrd = index;
+            // exact value match
+            bottomSameReader = true;
+            readerGen[bottomSlot] = currentReaderGen;            
+            ords[bottomSlot] = bottomOrd;
+          }
+        }
+      }
+    }
+
+    @Override
+    public BytesRef value(int slot) {
+      return values[slot];
+    }
+  }
+
   /** Sorts by field's natural Term sort order.  All
    *  comparisons are done using BytesRef.compareTo, which is
    *  slow for medium to large result sets but possibly
@@ -1410,6 +1831,74 @@ public abstract class FieldComparator<T>
     }
   }
 
+  /** Sorts by field's natural Term sort order.  All
+   *  comparisons are done using BytesRef.compareTo, which is
+   *  slow for medium to large result sets but possibly
+   *  very fast for very small results sets.  The BytesRef
+   *  values are obtained using {@link IndexReader#docValues}. */
+  public static final class TermValDocValuesComparator extends FieldComparator<BytesRef> {
+
+    private BytesRef[] values;
+    private Source docTerms;
+    private final String field;
+    private BytesRef bottom;
+    private final BytesRef tempBR = new BytesRef();
+
+    TermValDocValuesComparator(int numHits, String field) {
+      values = new BytesRef[numHits];
+      this.field = field;
+    }
+
+    @Override
+    public int compare(int slot1, int slot2) {
+      assert values[slot1] != null;
+      assert values[slot2] != null;
+      return values[slot1].compareTo(values[slot2]);
+    }
+
+    @Override
+    public int compareBottom(int doc) {
+      assert bottom != null;
+      return bottom.compareTo(docTerms.getBytes(doc, tempBR));
+    }
+
+    @Override
+    public void copy(int slot, int doc) {
+      if (values[slot] == null) {
+        values[slot] = new BytesRef();
+      }
+      docTerms.getBytes(doc, values[slot]);
+    }
+
+    @Override
+    public FieldComparator setNextReader(AtomicReaderContext context) throws IOException {
+      final IndexDocValues dv = context.reader.docValues(field);
+      if (dv != null) {
+        docTerms = dv.getSource();
+      } else {
+        docTerms = IndexDocValues.getDefaultSource(ValueType.BYTES_VAR_DEREF);
+      }
+      return this;
+    }
+    
+    @Override
+    public void setBottom(final int bottom) {
+      this.bottom = values[bottom];
+    }
+
+    @Override
+    public BytesRef value(int slot) {
+      return values[slot];
+    }
+
+    @Override
+    public int compareValues(BytesRef val1, BytesRef val2) {
+      assert val1 != null;
+      assert val2 != null;
+      return val1.compareTo(val2);
+    }
+  }
+
   final protected static int binarySearch(BytesRef br, DocTermsIndex a, BytesRef key) {
     return binarySearch(br, a, key, 1, a.numOrd()-1);
   }

Modified: lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/search/SortField.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/search/SortField.java?rev=1201715&r1=1201714&r2=1201715&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/search/SortField.java (original)
+++ lucene/dev/branches/lucene2621/lucene/src/java/org/apache/lucene/search/SortField.java Mon Nov 14 14:15:19 2011
@@ -254,6 +254,7 @@ public class SortField {
   @Override
   public String toString() {
     StringBuilder buffer = new StringBuilder();
+    String dv = useIndexValues ? " [dv]" : "";
     switch (type) {
       case SCORE:
         buffer.append("<score>");
@@ -264,11 +265,11 @@ public class SortField {
         break;
 
       case STRING:
-        buffer.append("<string: \"").append(field).append("\">");
+        buffer.append("<string" + dv + ": \"").append(field).append("\">");
         break;
 
       case STRING_VAL:
-        buffer.append("<string_val: \"").append(field).append("\">");
+        buffer.append("<string_val" + dv + ": \"").append(field).append("\">");
         break;
 
       case BYTE:
@@ -280,7 +281,7 @@ public class SortField {
         break;
 
       case INT:
-        buffer.append("<int: \"").append(field).append("\">");
+        buffer.append("<int" + dv + ": \"").append(field).append("\">");
         break;
 
       case LONG:
@@ -288,11 +289,11 @@ public class SortField {
         break;
 
       case FLOAT:
-        buffer.append("<float: \"").append(field).append("\">");
+        buffer.append("<float" + dv + ": \"").append(field).append("\">");
         break;
 
       case DOUBLE:
-        buffer.append("<double: \"").append(field).append("\">");
+        buffer.append("<double" + dv + ": \"").append(field).append("\">");
         break;
 
       case CUSTOM:
@@ -415,10 +416,18 @@ public class SortField {
       return comparatorSource.newComparator(field, numHits, sortPos, reverse);
 
     case STRING:
-      return new FieldComparator.TermOrdValComparator(numHits, field, sortPos, reverse);
+      if (useIndexValues) {
+        return new FieldComparator.TermOrdValDocValuesComparator(numHits, field);
+      } else {
+        return new FieldComparator.TermOrdValComparator(numHits, field);
+      }
 
     case STRING_VAL:
-      return new FieldComparator.TermValComparator(numHits, field);
+      if (useIndexValues) {
+        return new FieldComparator.TermValDocValuesComparator(numHits, field);
+      } else {
+        return new FieldComparator.TermValComparator(numHits, field);
+      }
 
     case REWRITEABLE:
       throw new IllegalStateException("SortField needs to be rewritten through Sort.rewrite(..) and SortField.rewrite(..)");

Modified: lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/search/TestSort.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/search/TestSort.java?rev=1201715&r1=1201714&r2=1201715&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/search/TestSort.java (original)
+++ lucene/dev/branches/lucene2621/lucene/src/test/org/apache/lucene/search/TestSort.java Mon Nov 14 14:15:19 2011
@@ -25,20 +25,21 @@ import java.util.concurrent.Executors;
 import java.util.concurrent.TimeUnit;
 
 import org.apache.lucene.analysis.MockAnalyzer;
-import org.apache.lucene.document.IndexDocValuesField;
 import org.apache.lucene.document.Document;
 import org.apache.lucene.document.Field;
 import org.apache.lucene.document.FieldType;
+import org.apache.lucene.document.IndexDocValuesField;
 import org.apache.lucene.document.StringField;
 import org.apache.lucene.document.TextField;
 import org.apache.lucene.index.CorruptIndexException;
-import org.apache.lucene.index.IndexReader;
 import org.apache.lucene.index.IndexReader.AtomicReaderContext;
+import org.apache.lucene.index.IndexReader;
 import org.apache.lucene.index.IndexWriter;
 import org.apache.lucene.index.IndexWriterConfig;
 import org.apache.lucene.index.IndexableField;
 import org.apache.lucene.index.MultiReader;
 import org.apache.lucene.index.RandomIndexWriter;
+import org.apache.lucene.index.SlowMultiReaderWrapper;
 import org.apache.lucene.index.Term;
 import org.apache.lucene.index.codecs.Codec;
 import org.apache.lucene.index.values.ValueType;
@@ -81,6 +82,7 @@ public class TestSort extends LuceneTest
   public static void beforeClass() throws Exception {
     NUM_STRINGS = atLeast(6000);
   }
+
   // document data:
   // the tracer field is used to determine which document was hit
   // the contents field is used to search and sort by relevance
@@ -111,7 +113,7 @@ public class TestSort extends LuceneTest
   {   "c",   "m",            "5",           "5.0",           "5",    null,    null,              "5",           "5", "5", "5", null},
   {   "d",   "m",            null,          null,           null,    null,    null,              null,           null, null, null, null}
   }; 
-  
+
   // create an index of all the documents, or just the x, or just the y documents
   private IndexSearcher getIndex (boolean even, boolean odd)
   throws IOException {
@@ -119,6 +121,21 @@ public class TestSort extends LuceneTest
     dirs.add(indexStore);
     RandomIndexWriter writer = new RandomIndexWriter(random, indexStore, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(newLogMergePolicy()));
 
+    final ValueType stringDVType;
+    if (dvStringSorted) {
+      // Index sorted
+      stringDVType = random.nextBoolean() ? ValueType.BYTES_VAR_SORTED : ValueType.BYTES_FIXED_SORTED;
+    } else {
+      // Index non-sorted
+      if (random.nextBoolean()) {
+        // Fixed
+        stringDVType = random.nextBoolean() ? ValueType.BYTES_FIXED_STRAIGHT : ValueType.BYTES_FIXED_DEREF;
+      } else {
+        // Var
+        stringDVType = random.nextBoolean() ? ValueType.BYTES_VAR_STRAIGHT : ValueType.BYTES_VAR_DEREF;
+      }
+    }
+
     FieldType ft1 = new FieldType();
     ft1.setStored(true);
     FieldType ft2 = new FieldType();
@@ -142,7 +159,13 @@ public class TestSort extends LuceneTest
           }
           doc.add(f);
         }
-        if (data[i][4] != null) doc.add (new StringField ("string",   data[i][4]));
+        if (data[i][4] != null) {
+          Field f = new StringField ("string", data[i][4]);
+          if (supportsDocValues) {
+            f = IndexDocValuesField.build(f, stringDVType);
+          }
+          doc.add(f);
+        }
         if (data[i][5] != null) doc.add (new StringField ("custom",   data[i][5]));
         if (data[i][6] != null) doc.add (new StringField ("i18n",     data[i][6]));
         if (data[i][7] != null) doc.add (new StringField ("long",     data[i][7]));
@@ -185,21 +208,52 @@ public class TestSort extends LuceneTest
             setMaxBufferedDocs(4).
             setMergePolicy(newLogMergePolicy(97))
     );
-    FieldType customType = new FieldType();
-    customType.setStored(true);
+    FieldType onlyStored = new FieldType();
+    onlyStored.setStored(true);
+    final int fixedLen = getRandomNumber(2, 8);
+    final int fixedLen2 = getRandomNumber(1, 4);
     for (int i=0; i<NUM_STRINGS; i++) {
-        Document doc = new Document();
-        String num = getRandomCharString(getRandomNumber(2, 8), 48, 52);
-        doc.add (new Field ("tracer", num, customType));
-        //doc.add (new Field ("contents", Integer.toString(i), Field.Store.NO, Field.Index.ANALYZED));
-        doc.add (new StringField ("string", num));
-        String num2 = getRandomCharString(getRandomNumber(1, 4), 48, 50);
-        doc.add (new StringField ("string2", num2));
-        doc.add (new Field ("tracer2", num2, customType));
-        for(IndexableField f : doc.getFields()) {
-          ((Field) f).setBoost(2.0f);
-        }
-        writer.addDocument (doc);
+      Document doc = new Document();
+      String num = getRandomCharString(getRandomNumber(2, 8), 48, 52);
+      doc.add (new Field ("tracer", num, onlyStored));
+      //doc.add (new Field ("contents", Integer.toString(i), Field.Store.NO, Field.Index.ANALYZED));
+      Field f = new StringField("string", num);
+      if (supportsDocValues) {
+        f = IndexDocValuesField.build(f, ValueType.BYTES_VAR_SORTED);
+      }
+      doc.add (f);
+      String num2 = getRandomCharString(getRandomNumber(1, 4), 48, 50);
+      f = new StringField ("string2", num2);
+      if (supportsDocValues) {
+        f = IndexDocValuesField.build(f, ValueType.BYTES_VAR_SORTED);
+      }
+      doc.add (f);
+      doc.add (new Field ("tracer2", num2, onlyStored));
+      for(IndexableField f2 : doc.getFields()) {
+        ((Field) f2).setBoost(2.0f);
+      }
+
+      String numFixed = getRandomCharString(fixedLen, 48, 52);
+      doc.add (new Field ("fixed_tracer", numFixed, onlyStored));
+      //doc.add (new Field ("contents", Integer.toString(i), Field.Store.NO, Field.Index.ANALYZED));
+      f = new StringField("string_fixed", numFixed);
+      if (supportsDocValues) {
+        f = IndexDocValuesField.build(f, ValueType.BYTES_FIXED_SORTED);
+      }
+      doc.add (f);
+      String num2Fixed = getRandomCharString(fixedLen2, 48, 52);
+      f = new StringField ("string2_fixed", num2Fixed);
+      if (supportsDocValues) {
+        f = IndexDocValuesField.build(f, ValueType.BYTES_FIXED_SORTED);
+      }
+      doc.add (f);
+      doc.add (new Field ("tracer2_fixed", num2Fixed, onlyStored));
+
+      for(IndexableField f2 : doc.getFields()) {
+        ((Field) f2).setBoost(2.0f);
+      }
+
+      writer.addDocument (doc);
     }
     //writer.forceMerge(1);
     //System.out.println(writer.getSegmentCount());
@@ -249,10 +303,15 @@ public class TestSort extends LuceneTest
     return getIndex (false, false);
   }
 
+  // Set to true if the DV "string" field is indexed as a
+  // sorted source:
+  private boolean dvStringSorted;
+  
   @Override
   public void setUp() throws Exception {
     super.setUp();
     
+    dvStringSorted = random.nextBoolean();
     full = getFullIndex();
     searchX = getXIndex();
     searchY = getYIndex();
@@ -339,6 +398,20 @@ public class TestSort extends LuceneTest
       sort.setSort (useDocValues(new SortField ("double", SortField.Type.DOUBLE)), SortField.FIELD_DOC );
       assertMatches (full, queryX, sort, "AGICE");
       assertMatches (full, queryY, sort, "DJHBF");
+
+      sort.setSort (useDocValues(new SortField ("string", getDVStringSortType())), SortField.FIELD_DOC );
+      assertMatches (full, queryX, sort, "AIGEC");
+      assertMatches (full, queryY, sort, "DJHFB");
+    }
+  }
+
+  private SortField.Type getDVStringSortType() {
+    if (dvStringSorted) {
+      // If you index as sorted source you can still sort by
+      // value instead:
+      return random.nextBoolean() ? SortField.Type.STRING : SortField.Type.STRING_VAL;
+    } else {
+      return SortField.Type.STRING_VAL;
     }
   }
   
@@ -405,42 +478,72 @@ public class TestSort extends LuceneTest
   /**
    * Test String sorting: small queue to many matches, multi field sort, reverse sort
    */
-  public void testStringSort() throws IOException {
-    ScoreDoc[] result = null;
-    IndexSearcher searcher = getFullStrings();
+  public void testStringSort() throws Exception {
+    // Normal string field, var length
     sort.setSort(
         new SortField("string", SortField.Type.STRING),
         new SortField("string2", SortField.Type.STRING, true),
         SortField.FIELD_DOC);
+    verifyStringSort(sort);
 
-    result = searcher.search(new MatchAllDocsQuery(), null, 500, sort).scoreDocs;
+    // Normal string field, fixed length
+    sort.setSort(
+        new SortField("string_fixed", SortField.Type.STRING),
+        new SortField("string2_fixed", SortField.Type.STRING, true),
+        SortField.FIELD_DOC);
+    verifyStringSort(sort);
 
+    // Doc values field, var length
+    assumeFalse("cannot work with preflex codec",
+                "Lucene3x".equals(Codec.getDefault().getName()));
+    sort.setSort(
+                 useDocValues(new SortField("string", getDVStringSortType())),
+                 useDocValues(new SortField("string2", getDVStringSortType(), true)),
+                 SortField.FIELD_DOC);
+    verifyStringSort(sort);
+
+    // Doc values field, fixed length
+    sort.setSort(
+                 useDocValues(new SortField("string_fixed", getDVStringSortType())),
+                 useDocValues(new SortField("string2_fixed", getDVStringSortType(), true)),
+                 SortField.FIELD_DOC);
+    verifyStringSort(sort);
+  }
+
+  private void verifyStringSort(Sort sort) throws Exception {
+    final IndexSearcher searcher = getFullStrings();
+    final ScoreDoc[] result = searcher.search(new MatchAllDocsQuery(), null, _TestUtil.nextInt(random, 500, searcher.getIndexReader().maxDoc()), sort).scoreDocs;
     StringBuilder buff = new StringBuilder();
     int n = result.length;
     String last = null;
     String lastSub = null;
     int lastDocId = 0;
     boolean fail = false;
+    final String fieldSuffix = sort.getSort()[0].getField().endsWith("_fixed") ? "_fixed" : "";
     for (int x = 0; x < n; ++x) {
       Document doc2 = searcher.doc(result[x].doc);
-      IndexableField[] v = doc2.getFields("tracer");
-      IndexableField[] v2 = doc2.getFields("tracer2");
+      IndexableField[] v = doc2.getFields("tracer" + fieldSuffix);
+      IndexableField[] v2 = doc2.getFields("tracer2" + fieldSuffix);
       for (int j = 0; j < v.length; ++j) {
+        buff.append(v[j] + "(" + v2[j] + ")(" + result[x].doc+")\n");
         if (last != null) {
           int cmp = v[j].stringValue().compareTo(last);
           if (!(cmp >= 0)) { // ensure first field is in order
             fail = true;
             System.out.println("fail:" + v[j] + " < " + last);
+            buff.append("  WRONG tracer\n");
           }
           if (cmp == 0) { // ensure second field is in reverse order
             cmp = v2[j].stringValue().compareTo(lastSub);
             if (cmp > 0) {
               fail = true;
               System.out.println("rev field fail:" + v2[j] + " > " + lastSub);
+              buff.append("  WRONG tracer2\n");
             } else if(cmp == 0) { // ensure docid is in order
               if (result[x].doc < lastDocId) {
                 fail = true;
                 System.out.println("doc fail:" + result[x].doc + " > " + lastDocId);
+                buff.append("  WRONG docID\n");
               }
             }
           }
@@ -448,11 +551,10 @@ public class TestSort extends LuceneTest
         last = v[j].stringValue();
         lastSub = v2[j].stringValue();
         lastDocId = result[x].doc;
-        buff.append(v[j] + "(" + v2[j] + ")(" + result[x].doc+") ");
       }
     }
-    if(fail) {
-      System.out.println("topn field1(field2)(docID):" + buff);
+    if (fail) {
+      System.out.println("topn field1(field2)(docID):\n" + buff);
     }
     assertFalse("Found sort results out of order", fail);
     searcher.close();
@@ -549,6 +651,16 @@ public class TestSort extends LuceneTest
     
     sort.setSort (useDocValues(new SortField ("float", SortField.Type.FLOAT)), new SortField ("string", SortField.Type.STRING) );
     assertMatches (empty, queryX, sort, "");
+
+    sort.setSort (useDocValues(new SortField ("string", getDVStringSortType(), true)), SortField.FIELD_DOC );
+    assertMatches (empty, queryX, sort, "");
+
+    sort.setSort (useDocValues(new SortField ("float", SortField.Type.FLOAT)),
+                  useDocValues(new SortField ("string", getDVStringSortType())) );
+    assertMatches (empty, queryX, sort, "");
+    
+    sort.setSort (useDocValues(new SortField ("float", SortField.Type.FLOAT)), useDocValues(new SortField ("string", getDVStringSortType())) );
+    assertMatches (empty, queryX, sort, "");
   }
 
   static class MyFieldComparator extends FieldComparator<Integer> {
@@ -642,11 +754,18 @@ public class TestSort extends LuceneTest
       sort.setSort (useDocValues(new SortField ("float", SortField.Type.FLOAT, true)) );
       assertMatches (full, queryX, sort, "AECIG");
       assertMatches (full, queryY, sort, "BFJHD");
+
+      sort.setSort (useDocValues(new SortField ("string", getDVStringSortType(), true)) );
+      assertMatches (full, queryX, sort, "CEGIA");
+      assertMatches (full, queryY, sort, "BFHJD");
     }
   }
 
   // test sorting when the sort field is empty (undefined) for some of the documents
   public void testEmptyFieldSort() throws Exception {
+
+    // NOTE: do not test DocValues fields here, since you
+    // can't sort when some documents don't have the field
     sort.setSort (new SortField ("string", SortField.Type.STRING) );
     assertMatches (full, queryF, sort, "ZJI");
 
@@ -662,14 +781,6 @@ public class TestSort extends LuceneTest
     sort.setSort (new SortField ("float", SortField.Type.FLOAT) );
     assertMatches (full, queryF, sort, "ZJI");
 
-    if (supportsDocValues) {
-      sort.setSort (useDocValues(new SortField ("int", SortField.Type.INT)) );
-      assertMatches (full, queryF, sort, "IZJ");
-    
-      sort.setSort (useDocValues(new SortField ("float", SortField.Type.FLOAT)) );
-      assertMatches (full, queryF, sort, "ZJI");
-    }
-
     // using a nonexisting field as first sort key shouldn't make a difference:
     sort.setSort (new SortField ("nosuchfield", SortField.Type.STRING),
         new SortField ("float", SortField.Type.FLOAT) );
@@ -679,7 +790,6 @@ public class TestSort extends LuceneTest
     assertMatches (full, queryF, sort, "IJZ");
 
     // When a field is null for both documents, the next SortField should be used.
-                // Works for
     sort.setSort (new SortField ("int", SortField.Type.INT),
                                 new SortField ("string", SortField.Type.STRING),
         new SortField ("float", SortField.Type.FLOAT) );
@@ -688,7 +798,7 @@ public class TestSort extends LuceneTest
     // Reverse the last criterium to make sure the test didn't pass by chance
     sort.setSort (new SortField ("int", SortField.Type.INT),
                                 new SortField ("string", SortField.Type.STRING),
-        new SortField ("float", SortField.Type.FLOAT, true) );
+                  new SortField ("float", SortField.Type.FLOAT, true) );
     assertMatches (full, queryG, sort, "ZYXW");
 
     // Do the same for a ParallelMultiSearcher
@@ -696,13 +806,13 @@ public class TestSort extends LuceneTest
     IndexSearcher parallelSearcher=new IndexSearcher (full.getIndexReader(), exec);
 
     sort.setSort (new SortField ("int", SortField.Type.INT),
-                                new SortField ("string", SortField.Type.STRING),
-        new SortField ("float", SortField.Type.FLOAT) );
+                  new SortField ("string", SortField.Type.STRING),
+                  new SortField ("float", SortField.Type.FLOAT) );
     assertMatches (parallelSearcher, queryG, sort, "ZWXY");
 
     sort.setSort (new SortField ("int", SortField.Type.INT),
-                                new SortField ("string", SortField.Type.STRING),
-        new SortField ("float", SortField.Type.FLOAT, true) );
+                  new SortField ("string", SortField.Type.STRING),
+                  new SortField ("float", SortField.Type.FLOAT, true) );
     assertMatches (parallelSearcher, queryG, sort, "ZYXW");
     parallelSearcher.close();
     exec.shutdown();
@@ -719,6 +829,20 @@ public class TestSort extends LuceneTest
 
     sort.setSort (new SortField ("float", SortField.Type.FLOAT), new SortField ("string", SortField.Type.STRING) );
     assertMatches (full, queryX, sort, "GICEA");
+
+    if (supportsDocValues) {
+      sort.setSort (useDocValues(new SortField ("int", SortField.Type.INT)),
+                    useDocValues(new SortField ("float", SortField.Type.FLOAT)));
+      assertMatches (full, queryX, sort, "IGEAC");
+
+      sort.setSort (useDocValues(new SortField ("int", SortField.Type.INT, true)),
+                    useDocValues(new SortField (null, SortField.Type.DOC, true)));
+      assertMatches (full, queryX, sort, "CEAGI");
+
+      sort.setSort (useDocValues(new SortField ("float", SortField.Type.FLOAT)),
+                    useDocValues(new SortField ("string", getDVStringSortType())));
+      assertMatches (full, queryX, sort, "GICEA");
+    }
   }
 
   // test a variety of sorts using a parallel multisearcher
@@ -1064,6 +1188,21 @@ public class TestSort extends LuceneTest
 
       sort.setSort(useDocValues(new SortField ("int", SortField.Type.INT, true)));
       assertMatches(multi, queryF, sort, "JZI");
+
+      sort.setSort(useDocValues(new SortField("string", getDVStringSortType())));
+      assertMatches(multi, queryA, sort, "DJAIHGFEBC");
+      
+      sort.setSort(useDocValues(new SortField("string", getDVStringSortType(), true)));
+      assertMatches(multi, queryA, sort, "CBEFGHIAJD");
+      
+      sort.setSort(useDocValues(new SortField("float", SortField.Type.FLOAT)),useDocValues(new SortField("string", getDVStringSortType())));
+      assertMatches(multi, queryA, sort, "GDHJICEFAB");
+
+      sort.setSort(useDocValues(new SortField ("string", getDVStringSortType())));
+      assertMatches(multi, queryF, sort, "ZJI");
+
+      sort.setSort(useDocValues(new SortField ("string", getDVStringSortType(), true)));
+      assertMatches(multi, queryF, sort, "IJZ");
     }
     
     // up to this point, all of the searches should have "sane" 
@@ -1080,6 +1219,16 @@ public class TestSort extends LuceneTest
   // make sure the documents returned by the search match the expected list
   private void assertMatches(String msg, IndexSearcher searcher, Query query, Sort sort,
       String expectedResult) throws IOException {
+
+    for(SortField sortField : sort.getSort()) {
+      if (sortField.getUseIndexValues() && sortField.getType() == SortField.Type.STRING) {
+        if (searcher.getIndexReader() instanceof SlowMultiReaderWrapper) {
+          // Cannot use STRING DocValues sort with SlowMultiReaderWrapper
+          return;
+        }
+      }
+    }
+
     //ScoreDoc[] result = searcher.search (query, null, 1000, sort).scoreDocs;
     TopDocs hits = searcher.search(query, null, Math.max(1, expectedResult.length()), sort);
     ScoreDoc[] result = hits.scoreDocs;

Modified: lucene/dev/branches/lucene2621/modules/facet/src/test/org/apache/lucene/facet/FacetTestBase.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/modules/facet/src/test/org/apache/lucene/facet/FacetTestBase.java?rev=1201715&r1=1201714&r2=1201715&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/modules/facet/src/test/org/apache/lucene/facet/FacetTestBase.java (original)
+++ lucene/dev/branches/lucene2621/modules/facet/src/test/org/apache/lucene/facet/FacetTestBase.java Mon Nov 14 14:15:19 2011
@@ -30,6 +30,7 @@ import org.apache.lucene.search.IndexSea
 import org.apache.lucene.store.Directory;
 
 import org.apache.lucene.util.Bits;
+import org.apache.lucene.util.IOUtils;
 import org.apache.lucene.util.LuceneTestCase;
 import org.apache.lucene.util._TestUtil;
 import org.apache.lucene.facet.index.CategoryDocumentBuilder;
@@ -45,6 +46,8 @@ import org.apache.lucene.facet.taxonomy.
 import org.apache.lucene.facet.taxonomy.TaxonomyWriter;
 import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyReader;
 import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
 
 /**
  * Licensed to the Apache Software Foundation (ASF) under one or more
@@ -66,14 +69,17 @@ import org.apache.lucene.facet.taxonomy.
 /** Base faceted search test. */
 public abstract class FacetTestBase extends LuceneTestCase {
   
-  /** Documents text field. */
-  protected static final String CONTENT_FIELD = "content";
+  /** Holds a search and taxonomy Directories pair. */
+  private static final class SearchTaxoDirPair {
+    Directory searchDir, taxoDir;
+    SearchTaxoDirPair() {}
+  }
   
-  /** Directory for the index */
-  protected Directory indexDir;
+  private static HashMap<Integer, SearchTaxoDirPair> dirsPerPartitionSize;
+  private static File TEST_DIR;
   
-  /** Directory for the taxonomy */
-  protected Directory taxoDir;
+  /** Documents text field. */
+  protected static final String CONTENT_FIELD = "content";
   
   /** taxonomy Reader for the test. */
   protected TaxonomyReader taxoReader;
@@ -84,6 +90,19 @@ public abstract class FacetTestBase exte
   /** Searcher for the test. */
   protected IndexSearcher searcher;
   
+  @BeforeClass
+  public static void beforeClassFacetTestBase() throws Exception {
+    TEST_DIR = _TestUtil.getTempDir("facets");
+    dirsPerPartitionSize = new HashMap<Integer, FacetTestBase.SearchTaxoDirPair>(); 
+  }
+  
+  @AfterClass
+  public static void afterClassFacetTestBase() throws Exception {
+    for (SearchTaxoDirPair pair : dirsPerPartitionSize.values()) {
+      IOUtils.close(pair.searchDir, pair.taxoDir);
+    }
+  }
+  
   /** documents text (for the text field). */
   private static final String[] DEFAULT_CONTENT = {
       "the white car is the one I want.",
@@ -122,34 +141,39 @@ public abstract class FacetTestBase exte
   }
 
   /** Prepare index (in RAM/Disk) with some documents and some facets */
-  protected final void initIndex(int partitionSize, boolean onDisk) throws Exception {
+  protected final void initIndex(int partitionSize, boolean forceDisk) throws Exception {
     if (VERBOSE) {
-      System.out.println("Partition Size: " + partitionSize+"  onDisk: "+onDisk);
+      System.out.println("Partition Size: " + partitionSize+"  forceDisk: "+forceDisk);
     }
 
-    if (onDisk) {
-      File indexFile = _TestUtil.getTempDir("index");
-      indexDir = newFSDirectory(indexFile);
-      taxoDir = newFSDirectory(new File(indexFile,"facets"));
-    } else { 
-      indexDir = newDirectory();
-      taxoDir = newDirectory();
+    SearchTaxoDirPair pair = dirsPerPartitionSize.get(Integer.valueOf(partitionSize));
+    if (pair == null) {
+      pair = new SearchTaxoDirPair();
+      if (forceDisk) {
+        pair.searchDir = newFSDirectory(new File(TEST_DIR, "index"));
+        pair.taxoDir = newFSDirectory(new File(TEST_DIR, "taxo"));
+      } else {
+        pair.searchDir = newDirectory();
+        pair.taxoDir = newDirectory();
+      }
+      
+      RandomIndexWriter iw = new RandomIndexWriter(random, pair.searchDir, getIndexWriterConfig(getAnalyzer()));
+      TaxonomyWriter taxo = new DirectoryTaxonomyWriter(pair.taxoDir, OpenMode.CREATE);
+      
+      populateIndex(iw, taxo, getFacetIndexingParams(partitionSize));
+      
+      // commit changes (taxonomy prior to search index for consistency)
+      taxo.commit();
+      iw.commit();
+      taxo.close();
+      iw.close();
+      
+      dirsPerPartitionSize.put(Integer.valueOf(partitionSize), pair);
     }
     
-    RandomIndexWriter iw = new RandomIndexWriter(random, indexDir, getIndexWriterConfig(getAnalyzer()));
-    TaxonomyWriter taxo = new DirectoryTaxonomyWriter(taxoDir, OpenMode.CREATE);
-    
-    populateIndex(iw, taxo, getFacetIndexingParams(partitionSize));
-    
-    // commit changes (taxonomy prior to search index for consistency)
-    taxo.commit();
-    iw.commit();
-    taxo.close();
-    iw.close();
-    
     // prepare for searching
-    taxoReader = new DirectoryTaxonomyReader(taxoDir);
-    indexReader = IndexReader.open(indexDir);
+    taxoReader = new DirectoryTaxonomyReader(pair.taxoDir);
+    indexReader = IndexReader.open(pair.searchDir);
     searcher = newSearcher(indexReader);
   }
   
@@ -207,16 +231,10 @@ public abstract class FacetTestBase exte
   /** Close all indexes */
   protected void closeAll() throws Exception {
     // close and nullify everything
-    taxoReader.close();
+    IOUtils.close(taxoReader, indexReader, searcher);
     taxoReader = null;
-    indexReader.close();
     indexReader = null;
-    searcher.close();
     searcher = null;
-    indexDir.close();
-    indexDir = null;
-    taxoDir.close();
-    taxoDir = null;
   }
   
   /**

Modified: lucene/dev/branches/lucene2621/modules/facet/src/test/org/apache/lucene/facet/search/CategoryListIteratorTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/modules/facet/src/test/org/apache/lucene/facet/search/CategoryListIteratorTest.java?rev=1201715&r1=1201714&r2=1201715&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/modules/facet/src/test/org/apache/lucene/facet/search/CategoryListIteratorTest.java (original)
+++ lucene/dev/branches/lucene2621/modules/facet/src/test/org/apache/lucene/facet/search/CategoryListIteratorTest.java Mon Nov 14 14:15:19 2011
@@ -5,7 +5,10 @@ import java.io.Reader;
 import java.util.HashSet;
 import java.util.Set;
 
-import org.apache.lucene.analysis.*;
+import org.apache.lucene.analysis.Analyzer;
+import org.apache.lucene.analysis.MockAnalyzer;
+import org.apache.lucene.analysis.MockTokenizer;
+import org.apache.lucene.analysis.TokenStream;
 import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
 import org.apache.lucene.analysis.tokenattributes.PayloadAttribute;
 import org.apache.lucene.document.Document;
@@ -15,8 +18,6 @@ import org.apache.lucene.index.Payload;
 import org.apache.lucene.index.RandomIndexWriter;
 import org.apache.lucene.index.Term;
 import org.apache.lucene.store.Directory;
-import org.junit.Test;
-
 import org.apache.lucene.util.LuceneTestCase;
 import org.apache.lucene.util.UnsafeByteArrayOutputStream;
 import org.apache.lucene.util.encoding.DGapIntEncoder;
@@ -24,6 +25,7 @@ import org.apache.lucene.util.encoding.I
 import org.apache.lucene.util.encoding.SortingIntEncoder;
 import org.apache.lucene.util.encoding.UniqueValuesIntEncoder;
 import org.apache.lucene.util.encoding.VInt8IntEncoder;
+import org.junit.Test;
 
 /**
  * Licensed to the Apache Software Foundation (ASF) under one or more
@@ -134,8 +136,6 @@ public class CategoryListIteratorTest ex
     Directory dir = newDirectory();
     DataTokenStream dts = new DataTokenStream("1",new SortingIntEncoder(
         new UniqueValuesIntEncoder(new DGapIntEncoder(new VInt8IntEncoder()))));
-    DataTokenStream dts2 = new DataTokenStream("2",new SortingIntEncoder(
-        new UniqueValuesIntEncoder(new DGapIntEncoder(new VInt8IntEncoder()))));
     // this test requires that no payloads ever be randomly present!
     final Analyzer noPayloadsAnalyzer = new Analyzer() {
       @Override
@@ -147,36 +147,23 @@ public class CategoryListIteratorTest ex
     RandomIndexWriter writer = new RandomIndexWriter(random, dir, 
         newIndexWriterConfig(TEST_VERSION_CURRENT, noPayloadsAnalyzer).setMergePolicy(newLogMergePolicy()));
     for (int i = 0; i < data.length; i++) {
-      dts.setIdx(i);
       Document doc = new Document();
-      if (i==0 || i == 2) {
-        doc.add(new TextField("f", dts)); // only docs 0 & 2 have payloads!
+      if (i == 0) {
+        dts.setIdx(i);
+        doc.add(new TextField("f", dts)); // only doc 0 has payloads!
+      } else {
+        doc.add(new TextField("f", "1"));
       }
-      dts2.setIdx(i);
-      doc.add(new TextField("f", dts2));
       writer.addDocument(doc);
       writer.commit();
     }
 
-    // add more documents to expose the bug.
-    // for some reason, this bug is not exposed unless these additional documents are added.
-    for (int i = 0; i < 10; ++i) {
-      Document d = new Document();
-      dts.setIdx(2);
-      d.add(new TextField("f", dts2));
-      writer.addDocument(d);
-      if (i %10 == 0) {
-        writer.commit();
-      }
-      
-    }
-
     IndexReader reader = writer.getReader();
     writer.close();
 
     CategoryListIterator cli = new PayloadIntDecodingIterator(reader, new Term(
         "f","1"), dts.encoder.createMatchingDecoder());
-    cli.init();
+    assertTrue("Failed to initialize payload iterator", cli.init());
     int totalCats = 0;
     for (int i = 0; i < data.length; i++) {
       // doc no. i
@@ -186,21 +173,19 @@ public class CategoryListIteratorTest ex
       }
       boolean hasDoc = cli.skipTo(i);
       if (hasDoc) {
-        assertTrue("Document "+i+" must not have a payload!", i==0 || i==2 );
+        assertTrue("Document " + i + " must not have a payload!", i == 0);
         long cat;
         while ((cat = cli.nextCategory()) < Integer.MAX_VALUE) {
           assertTrue("expected category not found: " + cat, values.contains((int) cat));
           ++totalCats;
         }
       } else {
-        assertFalse("Document "+i+" must have a payload!", i==0 || i==2 );
+        assertFalse("Document " + i + " must have a payload!", i == 0);
       }
 
     }
-    assertEquals("Wrong number of total categories!", 4, totalCats);
+    assertEquals("Wrong number of total categories!", 2, totalCats);
 
-    // Ok.. went through the first 4 docs, now lets try the 6th doc (docid 5)
-    assertFalse("Doc #6 (docid=5) should not have a payload!",cli.skipTo(5));
     reader.close();
     dir.close();
   }

Modified: lucene/dev/branches/lucene2621/solr/CHANGES.txt
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/solr/CHANGES.txt?rev=1201715&r1=1201714&r2=1201715&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/solr/CHANGES.txt (original)
+++ lucene/dev/branches/lucene2621/solr/CHANGES.txt Mon Nov 14 14:15:19 2011
@@ -183,6 +183,9 @@ New Features
     LMDirichletSimilarity: LM with Dirichlet smoothing
     LMJelinekMercerSimilarity: LM with Jelinek-Mercer smoothing
  (David Mark Nemeskey, Robert Muir)
+ 
+* SOLR-2134 Trie* fields should support sortMissingLast=true, and deprecate Sortable* Field Types
+  (Ryan McKinley, Mike McCandless, Uwe Schindler, Erick Erickson)
 
 Optimizations
 ----------------------

Modified: lucene/dev/branches/lucene2621/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/CachedSqlEntityProcessor.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/CachedSqlEntityProcessor.java?rev=1201715&r1=1201714&r2=1201715&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/CachedSqlEntityProcessor.java (original)
+++ lucene/dev/branches/lucene2621/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/CachedSqlEntityProcessor.java Mon Nov 14 14:15:19 2011
@@ -16,66 +16,26 @@
  */
 package org.apache.solr.handler.dataimport;
 
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Map;
-
 /**
  * This class enables caching of data obtained from the DB to avoid too many sql
  * queries
  * <p/>
  * <p>
  * Refer to <a
- * href="http://wiki.apache.org/solr/DataImportHandler">http://wiki.apache.org/solr/DataImportHandler</a>
- * for more details.
+ * href="http://wiki.apache.org/solr/DataImportHandler">http://wiki.apache
+ * .org/solr/DataImportHandler</a> for more details.
  * </p>
  * <p/>
  * <b>This API is experimental and subject to change</b>
- *
+ * 
  * @since solr 1.3
+ * @deprecated - Use SqlEntityProcessor with cacheImpl parameter.
  */
+@Deprecated
 public class CachedSqlEntityProcessor extends SqlEntityProcessor {
-  private boolean isFirst;
-
-  @Override
-  @SuppressWarnings("unchecked")
-  public void init(Context context) {
-    super.init(context);
-    super.cacheInit();
-    isFirst = true;
-  }
-
-  @Override
-  public Map<String, Object> nextRow() {
-    if (dataSourceRowCache != null)
-      return getFromRowCacheTransformed();
-    if (!isFirst)
-      return null;
-    String query = context.replaceTokens(context.getEntityAttribute("query"));
-    isFirst = false;
-    if (simpleCache != null) {
-      return getSimpleCacheData(query);
-    } else {
-      return getIdCacheData(query);
+    @Override
+    protected void initCache(Context context) {
+      cacheSupport = new DIHCacheSupport(context, "SortedMapBackedCache");
     }
 
-  }
-
-  @Override
-  protected List<Map<String, Object>> getAllNonCachedRows() {
-    List<Map<String, Object>> rows = new ArrayList<Map<String, Object>>();
-    String q = getQuery();
-    initQuery(context.replaceTokens(q));
-    if (rowIterator == null)
-      return rows;
-    while (rowIterator.hasNext()) {
-      Map<String, Object> arow = rowIterator.next();
-      if (arow == null) {
-        break;
-      } else {
-        rows.add(arow);
-      }
-    }
-    return rows;
-  }
 }

Modified: lucene/dev/branches/lucene2621/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DIHWriter.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DIHWriter.java?rev=1201715&r1=1201714&r2=1201715&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DIHWriter.java (original)
+++ lucene/dev/branches/lucene2621/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DIHWriter.java Mon Nov 14 14:15:19 2011
@@ -15,6 +15,9 @@ package org.apache.solr.handler.dataimpo
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+import java.util.Map;
+import java.util.Set;
+
 import org.apache.solr.common.SolrInputDocument;
 
 /**
@@ -90,4 +93,13 @@ public interface DIHWriter {
 	 */
 	public void init(Context context) ;
 
+	
+	/**
+	 * <p>
+	 *  Specify the keys to be modified by a delta update (required by writers that can store duplicate keys)
+	 * </p>
+	 * @param deltaKeys
+	 */
+	public void setDeltaKeys(Set<Map<String, Object>> deltaKeys) ;
+
 }

Modified: lucene/dev/branches/lucene2621/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DataConfig.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DataConfig.java?rev=1201715&r1=1201714&r2=1201715&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DataConfig.java (original)
+++ lucene/dev/branches/lucene2621/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DataConfig.java Mon Nov 14 14:15:19 2011
@@ -109,6 +109,8 @@ public class DataConfig {
     public DataSource dataSrc;
 
     public Map<String, List<Field>> colNameVsField = new HashMap<String, List<Field>>();
+    
+    public boolean initalized = false;
 
     public Entity() {
     }

Modified: lucene/dev/branches/lucene2621/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DocBuilder.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene2621/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DocBuilder.java?rev=1201715&r1=1201714&r2=1201715&view=diff
==============================================================================
--- lucene/dev/branches/lucene2621/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DocBuilder.java (original)
+++ lucene/dev/branches/lucene2621/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DocBuilder.java Mon Nov 14 14:15:19 2011
@@ -336,6 +336,7 @@ public class DocBuilder {
       // Make sure that documents are not re-created
     }
     deletedKeys = null;
+    writer.setDeltaKeys(allPks);
 
     statusMessages.put("Total Changed Documents", allPks.size());
     VariableResolverImpl vri = getVariableResolver();
@@ -428,7 +429,7 @@ public class DocBuilder {
       for (int i = 0; i < threads; i++) {
         entityProcessorWrapper.add(new ThreadedEntityProcessorWrapper(entityProcessor, DocBuilder.this, this, getVariableResolver()));
       }
-      context = new ThreadedContext(this, DocBuilder.this);
+      context = new ThreadedContext(this, DocBuilder.this, getVariableResolver());
     }
 
 
@@ -557,7 +558,6 @@ public class DocBuilder {
           }
         }
       } finally {
-        epw.destroy();
         currentEntityProcWrapper.remove();
         Context.CURRENT_CONTEXT.remove();
       }
@@ -590,10 +590,35 @@ public class DocBuilder {
     }
   }
 
+  private void resetEntity(DataConfig.Entity entity) {
+    entity.initalized = false;
+    if (entity.entities != null) {
+      for (DataConfig.Entity child : entity.entities) {
+        resetEntity(child);
+      }
+    }
+  }
+  
+  private void buildDocument(VariableResolverImpl vr, DocWrapper doc,
+      Map<String,Object> pk, DataConfig.Entity entity, boolean isRoot,
+      ContextImpl parentCtx) {
+    List<EntityProcessorWrapper> entitiesToDestroy = new ArrayList<EntityProcessorWrapper>();
+    try {
+      buildDocument(vr, doc, pk, entity, isRoot, parentCtx, entitiesToDestroy);
+    } catch (Exception e) {
+      throw new RuntimeException(e);
+    } finally {
+      for (EntityProcessorWrapper entityWrapper : entitiesToDestroy) {
+        entityWrapper.destroy();
+      }
+      resetEntity(entity);
+    }
+  }
+
   @SuppressWarnings("unchecked")
   private void buildDocument(VariableResolverImpl vr, DocWrapper doc,
                              Map<String, Object> pk, DataConfig.Entity entity, boolean isRoot,
-                             ContextImpl parentCtx) {
+                             ContextImpl parentCtx, List<EntityProcessorWrapper> entitiesToDestroy) {
 
     EntityProcessorWrapper entityProcessor = getEntityProcessor(entity);
 
@@ -602,6 +627,10 @@ public class DocBuilder {
             session, parentCtx, this);
     entityProcessor.init(ctx);
     Context.CURRENT_CONTEXT.set(ctx);
+    if (!entity.initalized) {
+      entitiesToDestroy.add(entityProcessor);
+      entity.initalized = true;
+    }
     
     if (requestParameters.start > 0) {
       getDebugLogger().log(DIHLogLevels.DISABLE_LOGGING, null, null);
@@ -666,7 +695,7 @@ public class DocBuilder {
             vr.addNamespace(entity.name, arow);
             for (DataConfig.Entity child : entity.entities) {
               buildDocument(vr, doc,
-                  child.isDocRoot ? pk : null, child, false, ctx);
+                  child.isDocRoot ? pk : null, child, false, ctx, entitiesToDestroy);
             }
             vr.removeNamespace(entity.name);
           }
@@ -729,7 +758,6 @@ public class DocBuilder {
       if (verboseDebug) {
         getDebugLogger().log(DIHLogLevels.END_ENTITY, null, null);
       }
-      entityProcessor.destroy();
     }
   }
 



Mime
View raw message