lucene-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From sh...@apache.org
Subject svn commit: r1479862 [19/38] - in /lucene/dev/branches/lucene4258: ./ dev-tools/ dev-tools/idea/.idea/ dev-tools/idea/.idea/libraries/ dev-tools/maven/ dev-tools/maven/solr/ dev-tools/maven/solr/core/src/java/ dev-tools/maven/solr/solrj/src/java/ dev-t...
Date Tue, 07 May 2013 11:21:14 GMT
Modified: lucene/dev/branches/lucene4258/lucene/facet/src/java/org/apache/lucene/facet/search/FacetsCollector.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene4258/lucene/facet/src/java/org/apache/lucene/facet/search/FacetsCollector.java?rev=1479862&r1=1479861&r2=1479862&view=diff
==============================================================================
--- lucene/dev/branches/lucene4258/lucene/facet/src/java/org/apache/lucene/facet/search/FacetsCollector.java (original)
+++ lucene/dev/branches/lucene4258/lucene/facet/src/java/org/apache/lucene/facet/search/FacetsCollector.java Tue May  7 11:20:55 2013
@@ -87,7 +87,7 @@ public abstract class FacetsCollector ex
     }
     
     @Override
-    public final void setNextReader(AtomicReaderContext context) throws IOException {
+    protected final void doSetNextReader(AtomicReaderContext context) throws IOException {
       if (bits != null) {
         matchingDocs.add(new MatchingDocs(this.context, bits, totalHits, scores));
       }
@@ -133,7 +133,7 @@ public abstract class FacetsCollector ex
     public final void setScorer(Scorer scorer) throws IOException {}
     
     @Override
-    public final void setNextReader(AtomicReaderContext context) throws IOException {
+    protected final void doSetNextReader(AtomicReaderContext context) throws IOException {
       if (bits != null) {
         matchingDocs.add(new MatchingDocs(this.context, bits, totalHits, null));
       }
@@ -175,7 +175,7 @@ public abstract class FacetsCollector ex
    * given {@link FacetsAccumulator}.
    */
   public static FacetsCollector create(FacetsAccumulator accumulator) {
-    if (accumulator.getAggregator().requiresDocScores()) {
+    if (accumulator.requiresDocScores()) {
       return new DocsAndScoresCollector(accumulator);
     } else {
       return new DocsOnlyCollector(accumulator);
@@ -183,6 +183,7 @@ public abstract class FacetsCollector ex
   }
 
   private final FacetsAccumulator accumulator;
+  private List<FacetResult> cachedResults;
   
   protected final List<MatchingDocs> matchingDocs = new ArrayList<MatchingDocs>();
 
@@ -196,15 +197,24 @@ public abstract class FacetsCollector ex
    */
   protected abstract void finish();
   
+  /** Performs the actual work of {@link #setNextReader(AtomicReaderContext)}. */
+  protected abstract void doSetNextReader(AtomicReaderContext context) throws IOException;
+  
   /**
    * Returns a {@link FacetResult} per {@link FacetRequest} set in
-   * {@link FacetSearchParams}. Note that if one of the {@link FacetRequest
-   * requests} is for a {@link CategoryPath} that does not exist in the taxonomy,
-   * no matching {@link FacetResult} will be returned.
+   * {@link FacetSearchParams}. Note that if a {@link FacetRequest} defines a
+   * {@link CategoryPath} which does not exist in the taxonomy, an empty
+   * {@link FacetResult} will be returned for it.
    */
   public final List<FacetResult> getFacetResults() throws IOException {
-    finish();
-    return accumulator.accumulate(matchingDocs);
+    // LUCENE-4893: if results are not cached, counts are multiplied as many
+    // times as this method is called. 
+    if (cachedResults == null) {
+      finish();
+      cachedResults = accumulator.accumulate(matchingDocs);
+    }
+    
+    return cachedResults;
   }
   
   /**
@@ -218,12 +228,22 @@ public abstract class FacetsCollector ex
   
   /**
    * Allows to reuse the collector between search requests. This method simply
-   * clears all collected documents (and scores) information, and does not
-   * attempt to reuse allocated memory spaces.
+   * clears all collected documents (and scores) information (as well as cached
+   * results), and does not attempt to reuse allocated memory spaces.
    */
   public final void reset() {
     finish();
     matchingDocs.clear();
+    cachedResults = null;
   }
 
+  @Override
+  public final void setNextReader(AtomicReaderContext context) throws IOException {
+    // clear cachedResults - needed in case someone called getFacetResults()
+    // before doing a search and didn't call reset(). Defensive code to prevent
+    // traps.
+    cachedResults = null;
+    doSetNextReader(context);
+  }
+  
 }

Modified: lucene/dev/branches/lucene4258/lucene/facet/src/java/org/apache/lucene/facet/search/FastCountingFacetsAggregator.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene4258/lucene/facet/src/java/org/apache/lucene/facet/search/FastCountingFacetsAggregator.java?rev=1479862&r1=1479861&r2=1479862&view=diff
==============================================================================
--- lucene/dev/branches/lucene4258/lucene/facet/src/java/org/apache/lucene/facet/search/FastCountingFacetsAggregator.java (original)
+++ lucene/dev/branches/lucene4258/lucene/facet/src/java/org/apache/lucene/facet/search/FastCountingFacetsAggregator.java Tue May  7 11:20:55 2013
@@ -83,6 +83,7 @@ public final class FastCountingFacetsAgg
           byte b = buf.bytes[offset++];
           if (b >= 0) {
             prev = ord = ((ord << 7) | b) + prev;
+            assert ord < counts.length: "ord=" + ord + " vs maxOrd=" + counts.length;
             ++counts[ord];
             ord = 0;
           } else {

Modified: lucene/dev/branches/lucene4258/lucene/facet/src/java/org/apache/lucene/facet/search/FloatFacetResultsHandler.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene4258/lucene/facet/src/java/org/apache/lucene/facet/search/FloatFacetResultsHandler.java?rev=1479862&r1=1479861&r2=1479862&view=diff
==============================================================================
--- lucene/dev/branches/lucene4258/lucene/facet/src/java/org/apache/lucene/facet/search/FloatFacetResultsHandler.java (original)
+++ lucene/dev/branches/lucene4258/lucene/facet/src/java/org/apache/lucene/facet/search/FloatFacetResultsHandler.java Tue May  7 11:20:55 2013
@@ -43,18 +43,19 @@ public final class FloatFacetResultsHand
     return values[ordinal];
   }
 
-  
   @Override
   protected final int addSiblings(int ordinal, int[] siblings, PriorityQueue<FacetResultNode> pq) {
     FacetResultNode top = pq.top();
     int numResults = 0;
     while (ordinal != TaxonomyReader.INVALID_ORDINAL) {
       float value = values[ordinal];
-      if (value > top.value) {
-        top.value = value;
-        top.ordinal = ordinal;
-        top = pq.updateTop();
+      if (value > 0.0f) {
         ++numResults;
+        if (value > top.value) {
+          top.value = value;
+          top.ordinal = ordinal;
+          top = pq.updateTop();
+        }
       }
       ordinal = siblings[ordinal];
     }
@@ -66,9 +67,8 @@ public final class FloatFacetResultsHand
     while (ordinal != TaxonomyReader.INVALID_ORDINAL) {
       float value = values[ordinal];
       if (value > 0) {
-        FacetResultNode node = new FacetResultNode();
+        FacetResultNode node = new FacetResultNode(ordinal, value);
         node.label = taxonomyReader.getPath(ordinal);
-        node.value = value;
         nodes.add(node);
       }
       ordinal = siblings[ordinal];

Modified: lucene/dev/branches/lucene4258/lucene/facet/src/java/org/apache/lucene/facet/search/IntFacetResultsHandler.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene4258/lucene/facet/src/java/org/apache/lucene/facet/search/IntFacetResultsHandler.java?rev=1479862&r1=1479861&r2=1479862&view=diff
==============================================================================
--- lucene/dev/branches/lucene4258/lucene/facet/src/java/org/apache/lucene/facet/search/IntFacetResultsHandler.java (original)
+++ lucene/dev/branches/lucene4258/lucene/facet/src/java/org/apache/lucene/facet/search/IntFacetResultsHandler.java Tue May  7 11:20:55 2013
@@ -49,11 +49,13 @@ public final class IntFacetResultsHandle
     int numResults = 0;
     while (ordinal != TaxonomyReader.INVALID_ORDINAL) {
       int value = values[ordinal];
-      if (value > top.value) {
-        top.value = value;
-        top.ordinal = ordinal;
-        top = pq.updateTop();
+      if (value > 0) {
         ++numResults;
+        if (value > top.value) {
+          top.value = value;
+          top.ordinal = ordinal;
+          top = pq.updateTop();
+        }
       }
       ordinal = siblings[ordinal];
     }
@@ -65,9 +67,8 @@ public final class IntFacetResultsHandle
     while (ordinal != TaxonomyReader.INVALID_ORDINAL) {
       int value = values[ordinal];
       if (value > 0) {
-        FacetResultNode node = new FacetResultNode();
+        FacetResultNode node = new FacetResultNode(ordinal, value);
         node.label = taxonomyReader.getPath(ordinal);
-        node.value = value;
         nodes.add(node);
       }
       ordinal = siblings[ordinal];

Modified: lucene/dev/branches/lucene4258/lucene/facet/src/java/org/apache/lucene/facet/search/StandardFacetsAccumulator.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene4258/lucene/facet/src/java/org/apache/lucene/facet/search/StandardFacetsAccumulator.java?rev=1479862&r1=1479861&r2=1479862&view=diff
==============================================================================
--- lucene/dev/branches/lucene4258/lucene/facet/src/java/org/apache/lucene/facet/search/StandardFacetsAccumulator.java (original)
+++ lucene/dev/branches/lucene4258/lucene/facet/src/java/org/apache/lucene/facet/search/StandardFacetsAccumulator.java Tue May  7 11:20:55 2013
@@ -198,11 +198,7 @@ public class StandardFacetsAccumulator e
         IntermediateFacetResult tmpResult = fr2tmpRes.get(fr);
         if (tmpResult == null) {
           // Add empty FacetResult:
-          FacetResultNode root = new FacetResultNode();
-          root.ordinal = TaxonomyReader.INVALID_ORDINAL;
-          root.label = fr.categoryPath;
-          root.value = 0;
-          res.add(new FacetResult(fr, root, 0));
+          res.add(emptyResult(taxonomyReader.getOrdinal(fr.categoryPath), fr));
           continue;
         }
         FacetResult facetRes = frHndlr.renderFacetResult(tmpResult);

Modified: lucene/dev/branches/lucene4258/lucene/facet/src/java/org/apache/lucene/facet/search/SumScoreFacetsAggregator.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene4258/lucene/facet/src/java/org/apache/lucene/facet/search/SumScoreFacetsAggregator.java?rev=1479862&r1=1479861&r2=1479862&view=diff
==============================================================================
--- lucene/dev/branches/lucene4258/lucene/facet/src/java/org/apache/lucene/facet/search/SumScoreFacetsAggregator.java (original)
+++ lucene/dev/branches/lucene4258/lucene/facet/src/java/org/apache/lucene/facet/search/SumScoreFacetsAggregator.java Tue May  7 11:20:55 2013
@@ -42,11 +42,13 @@ public class SumScoreFacetsAggregator im
     int doc = 0;
     int length = matchingDocs.bits.length();
     float[] scores = facetArrays.getFloatArray();
+    int scoresIdx = 0;
     while (doc < length && (doc = matchingDocs.bits.nextSetBit(doc)) != -1) {
       cli.getOrdinals(doc, ordinals);
       int upto = ordinals.offset + ordinals.length;
+      final float score = matchingDocs.scores[scoresIdx++];
       for (int i = ordinals.offset; i < upto; i++) {
-        scores[ordinals.ints[i]] += matchingDocs.scores[doc];
+        scores[ordinals.ints[i]] += score;
       }
       ++doc;
     }

Modified: lucene/dev/branches/lucene4258/lucene/facet/src/java/org/apache/lucene/facet/search/TopKFacetResultsHandler.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene4258/lucene/facet/src/java/org/apache/lucene/facet/search/TopKFacetResultsHandler.java?rev=1479862&r1=1479861&r2=1479862&view=diff
==============================================================================
--- lucene/dev/branches/lucene4258/lucene/facet/src/java/org/apache/lucene/facet/search/TopKFacetResultsHandler.java (original)
+++ lucene/dev/branches/lucene4258/lucene/facet/src/java/org/apache/lucene/facet/search/TopKFacetResultsHandler.java Tue May  7 11:20:55 2013
@@ -256,7 +256,6 @@ public class TopKFacetResultsHandler ext
      * Create a Facet Result.
      * @param facetRequest Request for which this result was obtained.
      * @param facetResultNode top result node for this facet result.
-     * @param totalFacets - number of children of the targetFacet, up till the requested depth.
      */
     TopKFacetResult(FacetRequest facetRequest, FacetResultNode facetResultNode, int totalFacets) {
       super(facetRequest, facetResultNode, totalFacets);

Modified: lucene/dev/branches/lucene4258/lucene/facet/src/java/org/apache/lucene/facet/search/TopKInEachNodeHandler.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene4258/lucene/facet/src/java/org/apache/lucene/facet/search/TopKInEachNodeHandler.java?rev=1479862&r1=1479861&r2=1479862&view=diff
==============================================================================
--- lucene/dev/branches/lucene4258/lucene/facet/src/java/org/apache/lucene/facet/search/TopKInEachNodeHandler.java (original)
+++ lucene/dev/branches/lucene4258/lucene/facet/src/java/org/apache/lucene/facet/search/TopKInEachNodeHandler.java Tue May  7 11:20:55 2013
@@ -706,8 +706,7 @@ public class TopKInEachNodeHandler exten
       value = tmp.rootNodeValue;
     }
     FacetResultNode root = generateNode(ordinal, value, tmp.mapToAACOs);
-    return new FacetResult (tmp.facetRequest, root, tmp.totalNumOfFacetsConsidered); 
-
+    return new FacetResult(tmp.facetRequest, root, tmp.totalNumOfFacetsConsidered);
   }
 
   private FacetResultNode generateNode(int ordinal, double val,  IntToObjectMap<AACO> mapToAACOs) {

Modified: lucene/dev/branches/lucene4258/lucene/facet/src/java/org/apache/lucene/facet/sortedset/SortedSetDocValuesAccumulator.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene4258/lucene/facet/src/java/org/apache/lucene/facet/sortedset/SortedSetDocValuesAccumulator.java?rev=1479862&r1=1479861&r2=1479862&view=diff
==============================================================================
--- lucene/dev/branches/lucene4258/lucene/facet/src/java/org/apache/lucene/facet/sortedset/SortedSetDocValuesAccumulator.java (original)
+++ lucene/dev/branches/lucene4258/lucene/facet/src/java/org/apache/lucene/facet/sortedset/SortedSetDocValuesAccumulator.java Tue May  7 11:20:55 2013
@@ -107,7 +107,6 @@ public class SortedSetDocValuesAccumulat
 
           if (matchingDocs.totalHits < numSegOrds/10) {
             // Remap every ord to global ord as we iterate:
-            final int[] segCounts = new int[numSegOrds];
             int doc = 0;
             while (doc < maxDoc && (doc = matchingDocs.bits.nextSetBit(doc)) != -1) {
               segValues.setDocument(doc);
@@ -259,22 +258,26 @@ public class SortedSetDocValuesAccumulat
 
       //System.out.println("collect");
       int dimCount = 0;
+      int childCount = 0;
       FacetResultNode reuse = null;
       for(int ord=ordRange.start; ord<=ordRange.end; ord++) {
         //System.out.println("  ord=" + ord + " count= "+ counts[ord] + " bottomCount=" + bottomCount);
-        if (counts[ord] > bottomCount) {
-          dimCount += counts[ord];
-          //System.out.println("    keep");
-          if (reuse == null) {
-            reuse = new FacetResultNode(ord, counts[ord]);
-          } else {
-            reuse.ordinal = ord;
-            reuse.value = counts[ord];
-          }
-          reuse = q.insertWithOverflow(reuse);
-          if (q.size() == request.numResults) {
-            bottomCount = (int) q.top().value;
-            //System.out.println("    new bottom=" + bottomCount);
+        if (counts[ord] > 0) {
+          childCount++;
+          if (counts[ord] > bottomCount) {
+            dimCount += counts[ord];
+            //System.out.println("    keep");
+            if (reuse == null) {
+              reuse = new FacetResultNode(ord, counts[ord]);
+            } else {
+              reuse.ordinal = ord;
+              reuse.value = counts[ord];
+            }
+            reuse = q.insertWithOverflow(reuse);
+            if (q.size() == request.numResults) {
+              bottomCount = (int) q.top().value;
+              //System.out.println("    new bottom=" + bottomCount);
+            }
           }
         }
       }
@@ -295,7 +298,7 @@ public class SortedSetDocValuesAccumulat
       }
       rootNode.subResults = Arrays.asList(childNodes);
       
-      results.add(new FacetResult(request, rootNode, childNodes.length));
+      results.add(new FacetResult(request, rootNode, childCount));
     }
 
     return results;

Modified: lucene/dev/branches/lucene4258/lucene/facet/src/java/org/apache/lucene/facet/taxonomy/TaxonomyReader.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene4258/lucene/facet/src/java/org/apache/lucene/facet/taxonomy/TaxonomyReader.java?rev=1479862&r1=1479861&r2=1479862&view=diff
==============================================================================
--- lucene/dev/branches/lucene4258/lucene/facet/src/java/org/apache/lucene/facet/taxonomy/TaxonomyReader.java (original)
+++ lucene/dev/branches/lucene4258/lucene/facet/src/java/org/apache/lucene/facet/taxonomy/TaxonomyReader.java Tue May  7 11:20:55 2013
@@ -65,6 +65,31 @@ import org.apache.lucene.store.AlreadyCl
  */
 public abstract class TaxonomyReader implements Closeable {
   
+  /** An iterator over a category's children. */
+  public static class ChildrenIterator {
+    
+    private final int[] siblings;
+    private int child;
+    
+    ChildrenIterator(int child, int[] siblings) {
+      this.siblings = siblings;
+      this.child = child;
+    }
+
+    /**
+     * Return the next child ordinal, or {@link TaxonomyReader#INVALID_ORDINAL}
+     * if no more children.
+     */
+    public int next() {
+      int res = child;
+      if (child != TaxonomyReader.INVALID_ORDINAL) {
+        child = siblings[child];
+      }
+      return res;
+    }
+    
+  }
+  
   /**
    * The root category (the category with the empty path) always has the ordinal
    * 0, to which we give a name ROOT_ORDINAL. {@link #getOrdinal(CategoryPath)}
@@ -167,6 +192,13 @@ public abstract class TaxonomyReader imp
    */
   public abstract ParallelTaxonomyArrays getParallelTaxonomyArrays() throws IOException;
   
+  /** Returns an iterator over the children of the given ordinal. */
+  public ChildrenIterator getChildren(final int ordinal) throws IOException {
+    ParallelTaxonomyArrays arrays = getParallelTaxonomyArrays();
+    int child = ordinal >= 0 ? arrays.children()[ordinal] : INVALID_ORDINAL;
+    return new ChildrenIterator(child, arrays.siblings());
+  }
+  
   /**
    * Retrieve user committed data.
    * 
@@ -214,4 +246,16 @@ public abstract class TaxonomyReader imp
     refCount.incrementAndGet();
   }
 
+  /** Expert: increments the refCount of this TaxonomyReader
+   *  instance only if it has not been closed yet.  Returns
+   *  true on success. */
+  public final boolean tryIncRef() {
+    int count;
+    while ((count = refCount.get()) > 0) {
+      if (refCount.compareAndSet(count, count+1)) {
+        return true;
+      }
+    }
+    return false;
+  }
 }

Modified: lucene/dev/branches/lucene4258/lucene/facet/src/java/org/apache/lucene/facet/taxonomy/directory/DirectoryTaxonomyReader.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene4258/lucene/facet/src/java/org/apache/lucene/facet/taxonomy/directory/DirectoryTaxonomyReader.java?rev=1479862&r1=1479861&r2=1479862&view=diff
==============================================================================
--- lucene/dev/branches/lucene4258/lucene/facet/src/java/org/apache/lucene/facet/taxonomy/directory/DirectoryTaxonomyReader.java (original)
+++ lucene/dev/branches/lucene4258/lucene/facet/src/java/org/apache/lucene/facet/taxonomy/directory/DirectoryTaxonomyReader.java Tue May  7 11:20:55 2013
@@ -9,7 +9,7 @@ import org.apache.lucene.facet.collectio
 import org.apache.lucene.facet.taxonomy.CategoryPath;
 import org.apache.lucene.facet.taxonomy.ParallelTaxonomyArrays;
 import org.apache.lucene.facet.taxonomy.TaxonomyReader;
-import org.apache.lucene.index.CorruptIndexException;
+import org.apache.lucene.index.CorruptIndexException; // javadocs
 import org.apache.lucene.index.DirectoryReader;
 import org.apache.lucene.index.DocsEnum;
 import org.apache.lucene.index.IndexWriter;

Modified: lucene/dev/branches/lucene4258/lucene/facet/src/java/org/apache/lucene/facet/taxonomy/directory/DirectoryTaxonomyWriter.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene4258/lucene/facet/src/java/org/apache/lucene/facet/taxonomy/directory/DirectoryTaxonomyWriter.java?rev=1479862&r1=1479861&r2=1479862&view=diff
==============================================================================
--- lucene/dev/branches/lucene4258/lucene/facet/src/java/org/apache/lucene/facet/taxonomy/directory/DirectoryTaxonomyWriter.java (original)
+++ lucene/dev/branches/lucene4258/lucene/facet/src/java/org/apache/lucene/facet/taxonomy/directory/DirectoryTaxonomyWriter.java Tue May  7 11:20:55 2013
@@ -29,7 +29,7 @@ import org.apache.lucene.facet.taxonomy.
 import org.apache.lucene.facet.taxonomy.writercache.lru.LruTaxonomyWriterCache;
 import org.apache.lucene.index.AtomicReader;
 import org.apache.lucene.index.AtomicReaderContext;
-import org.apache.lucene.index.CorruptIndexException;
+import org.apache.lucene.index.CorruptIndexException; // javadocs
 import org.apache.lucene.index.DirectoryReader;
 import org.apache.lucene.index.DocsEnum;
 import org.apache.lucene.index.IndexReader;
@@ -44,7 +44,7 @@ import org.apache.lucene.index.TermsEnum
 import org.apache.lucene.index.TieredMergePolicy;
 import org.apache.lucene.store.AlreadyClosedException;
 import org.apache.lucene.store.Directory;
-import org.apache.lucene.store.LockObtainFailedException;
+import org.apache.lucene.store.LockObtainFailedException; // javadocs
 import org.apache.lucene.store.NativeFSLockFactory;
 import org.apache.lucene.store.SimpleFSLockFactory;
 import org.apache.lucene.util.BytesRef;
@@ -225,7 +225,7 @@ public class DirectoryTaxonomyWriter imp
       }
       // no commit data, or no epoch in it means an old taxonomy, so set its epoch to 1, for lack
       // of a better value.
-      indexEpoch = epochStr == null ? 1 : Long.parseLong(epochStr);
+      indexEpoch = epochStr == null ? 1 : Long.parseLong(epochStr, 16);
     }
     
     if (openMode == OpenMode.CREATE) {
@@ -354,8 +354,7 @@ public class DirectoryTaxonomyWriter imp
   @Override
   public synchronized void close() throws IOException {
     if (!isClosed) {
-      indexWriter.setCommitData(combinedCommitData(indexWriter.getCommitData()));
-      indexWriter.commit();
+      commit();
       doClose();
     }
   }
@@ -616,7 +615,11 @@ public class DirectoryTaxonomyWriter imp
   @Override
   public synchronized void commit() throws IOException {
     ensureOpen();
-    indexWriter.setCommitData(combinedCommitData(indexWriter.getCommitData()));
+    // LUCENE-4972: if we always call setCommitData, we create empty commits
+    String epochStr = indexWriter.getCommitData().get(INDEX_EPOCH);
+    if (epochStr == null || Long.parseLong(epochStr, 16) != indexEpoch) {
+      indexWriter.setCommitData(combinedCommitData(indexWriter.getCommitData()));
+    }
     indexWriter.commit();
   }
 
@@ -626,7 +629,7 @@ public class DirectoryTaxonomyWriter imp
     if (commitData != null) {
       m.putAll(commitData);
     }
-    m.put(INDEX_EPOCH, Long.toString(indexEpoch));
+    m.put(INDEX_EPOCH, Long.toString(indexEpoch, 16));
     return m;
   }
   
@@ -647,7 +650,11 @@ public class DirectoryTaxonomyWriter imp
   @Override
   public synchronized void prepareCommit() throws IOException {
     ensureOpen();
-    indexWriter.setCommitData(combinedCommitData(indexWriter.getCommitData()));
+    // LUCENE-4972: if we always call setCommitData, we create empty commits
+    String epochStr = indexWriter.getCommitData().get(INDEX_EPOCH);
+    if (epochStr == null || Long.parseLong(epochStr, 16) != indexEpoch) {
+      indexWriter.setCommitData(combinedCommitData(indexWriter.getCommitData()));
+    }
     indexWriter.prepareCommit();
   }
   
@@ -991,9 +998,12 @@ public class DirectoryTaxonomyWriter imp
     return indexWriter;
   }
   
-  /** Used by {@link DirectoryTaxonomyReader} to support NRT. */
-  final long getTaxonomyEpoch() {
+  /** Expert: returns current index epoch, if this is a
+   * near-real-time reader.  Used by {@link
+   * DirectoryTaxonomyReader} to support NRT. 
+   *
+   * @lucene.internal */
+  public final long getTaxonomyEpoch() {
     return indexEpoch;
   }
-  
 }

Modified: lucene/dev/branches/lucene4258/lucene/facet/src/java/org/apache/lucene/facet/util/PrintTaxonomyStats.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene4258/lucene/facet/src/java/org/apache/lucene/facet/util/PrintTaxonomyStats.java?rev=1479862&r1=1479861&r2=1479862&view=diff
==============================================================================
--- lucene/dev/branches/lucene4258/lucene/facet/src/java/org/apache/lucene/facet/util/PrintTaxonomyStats.java (original)
+++ lucene/dev/branches/lucene4258/lucene/facet/src/java/org/apache/lucene/facet/util/PrintTaxonomyStats.java Tue May  7 11:20:55 2013
@@ -22,8 +22,8 @@ import java.io.IOException;
 import java.io.PrintStream;
 
 import org.apache.lucene.facet.taxonomy.CategoryPath;
-import org.apache.lucene.facet.taxonomy.ParallelTaxonomyArrays;
 import org.apache.lucene.facet.taxonomy.TaxonomyReader;
+import org.apache.lucene.facet.taxonomy.TaxonomyReader.ChildrenIterator;
 import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyReader;
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.store.FSDirectory;
@@ -55,45 +55,40 @@ public class PrintTaxonomyStats {
   }
 
   public static void printStats(TaxonomyReader r, PrintStream out, boolean printTree) throws IOException {
-    ParallelTaxonomyArrays arrays = r.getParallelTaxonomyArrays();
-    //int[] parents = arrays.parents();
-    int[] children = arrays.children();
-    int[] siblings = arrays.siblings();
     out.println(r.getSize() + " total categories.");
 
-    int childOrd = children[TaxonomyReader.ROOT_ORDINAL];
-    while(childOrd != -1) {
-      CategoryPath cp = r.getPath(childOrd);
-      int childOrd2 = children[childOrd];
+    ChildrenIterator it = r.getChildren(TaxonomyReader.ROOT_ORDINAL);
+    int child;
+    while ((child = it.next()) != TaxonomyReader.INVALID_ORDINAL) {
+      ChildrenIterator chilrenIt = r.getChildren(child);
       int numImmediateChildren = 0;
-      while(childOrd2 != -1) {
+      while (chilrenIt.next() != TaxonomyReader.INVALID_ORDINAL) {
         numImmediateChildren++;
-        childOrd2 = siblings[childOrd2];
       }
-      out.println("/" + cp + ": " + numImmediateChildren + " immediate children; " + (1+countAllChildren(r, childOrd, children, siblings)) + " total categories");
+      CategoryPath cp = r.getPath(child);
+      out.println("/" + cp + ": " + numImmediateChildren + " immediate children; " + (1+countAllChildren(r, child)) + " total categories");
       if (printTree) {
-        printAllChildren(out, r, childOrd, children, siblings, "  ", 1);
+        printAllChildren(out, r, child, "  ", 1);
       }
-      childOrd = siblings[childOrd];
     }
   }
 
-  private static int countAllChildren(TaxonomyReader r, int ord, int[] children, int[] siblings) throws IOException {
-    int childOrd = children[ord];
+  private static int countAllChildren(TaxonomyReader r, int ord) throws IOException {
     int count = 0;
-    while(childOrd != -1) {
-      count += 1+countAllChildren(r, childOrd, children, siblings);
-      childOrd = siblings[childOrd];
+    ChildrenIterator it = r.getChildren(ord);
+    int child;
+    while ((child = it.next()) != TaxonomyReader.INVALID_ORDINAL) {
+      count += 1 + countAllChildren(r, child);
     }
     return count;
   }
 
-  private static void printAllChildren(PrintStream out, TaxonomyReader r, int ord, int[] children, int[] siblings, String indent, int depth) throws IOException {
-    int childOrd = children[ord];
-    while(childOrd != -1) {
-      out.println(indent + "/" + r.getPath(childOrd).components[depth]);
-      printAllChildren(out, r, childOrd, children, siblings, indent + "  ", depth+1);
-      childOrd = siblings[childOrd];
+  private static void printAllChildren(PrintStream out, TaxonomyReader r, int ord, String indent, int depth) throws IOException {
+    ChildrenIterator it = r.getChildren(ord);
+    int child;
+    while ((child = it.next()) != TaxonomyReader.INVALID_ORDINAL) {
+      out.println(indent + "/" + r.getPath(child).components[depth]);
+      printAllChildren(out, r, child, indent + "  ", depth+1);
     }
   }
 }

Modified: lucene/dev/branches/lucene4258/lucene/facet/src/test/org/apache/lucene/facet/sampling/OversampleWithDepthTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene4258/lucene/facet/src/test/org/apache/lucene/facet/sampling/OversampleWithDepthTest.java?rev=1479862&r1=1479861&r2=1479862&view=diff
==============================================================================
--- lucene/dev/branches/lucene4258/lucene/facet/src/test/org/apache/lucene/facet/sampling/OversampleWithDepthTest.java (original)
+++ lucene/dev/branches/lucene4258/lucene/facet/src/test/org/apache/lucene/facet/sampling/OversampleWithDepthTest.java Tue May  7 11:20:55 2013
@@ -119,7 +119,7 @@ public class OversampleWithDepthTest ext
     StandardFacetsAccumulator sfa = new SamplingAccumulator(sampler, fsp, r, tr);
     FacetsCollector fcWithSampling = FacetsCollector.create(sfa);
     
-    IndexSearcher s = new IndexSearcher(r);
+    IndexSearcher s = newSearcher(r);
     s.search(new MatchAllDocsQuery(), fcWithSampling);
     
     // there's only one expected result, return just it.

Modified: lucene/dev/branches/lucene4258/lucene/facet/src/test/org/apache/lucene/facet/search/CountingFacetsAggregatorTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene4258/lucene/facet/src/test/org/apache/lucene/facet/search/CountingFacetsAggregatorTest.java?rev=1479862&r1=1479861&r2=1479862&view=diff
==============================================================================
--- lucene/dev/branches/lucene4258/lucene/facet/src/test/org/apache/lucene/facet/search/CountingFacetsAggregatorTest.java (original)
+++ lucene/dev/branches/lucene4258/lucene/facet/src/test/org/apache/lucene/facet/search/CountingFacetsAggregatorTest.java Tue May  7 11:20:55 2013
@@ -292,7 +292,7 @@ public class CountingFacetsAggregatorTes
     // test the collector w/ FacetRequests and different numResults
     DirectoryReader indexReader = DirectoryReader.open(indexDir);
     TaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoDir);
-    IndexSearcher searcher = new IndexSearcher(indexReader);
+    IndexSearcher searcher = newSearcher(indexReader);
     
     FacetSearchParams fsp = new FacetSearchParams(new CountFacetRequest(CP_A, NUM_CHILDREN_CP_A), 
         new CountFacetRequest(CP_B, NUM_CHILDREN_CP_B));
@@ -317,7 +317,7 @@ public class CountingFacetsAggregatorTes
   public void testAllCounts() throws Exception {
     DirectoryReader indexReader = DirectoryReader.open(indexDir);
     TaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoDir);
-    IndexSearcher searcher = new IndexSearcher(indexReader);
+    IndexSearcher searcher = newSearcher(indexReader);
     
     FacetSearchParams fsp = new FacetSearchParams(new CountFacetRequest(CP_A, NUM_CHILDREN_CP_A), 
         new CountFacetRequest(CP_B, NUM_CHILDREN_CP_B));
@@ -349,7 +349,7 @@ public class CountingFacetsAggregatorTes
   public void testBigNumResults() throws Exception {
     DirectoryReader indexReader = DirectoryReader.open(indexDir);
     TaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoDir);
-    IndexSearcher searcher = new IndexSearcher(indexReader);
+    IndexSearcher searcher = newSearcher(indexReader);
     
     FacetSearchParams fsp = new FacetSearchParams(new CountFacetRequest(CP_A, Integer.MAX_VALUE), 
         new CountFacetRequest(CP_B, Integer.MAX_VALUE));
@@ -373,7 +373,7 @@ public class CountingFacetsAggregatorTes
   public void testNoParents() throws Exception {
     DirectoryReader indexReader = DirectoryReader.open(indexDir);
     TaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoDir);
-    IndexSearcher searcher = new IndexSearcher(indexReader);
+    IndexSearcher searcher = newSearcher(indexReader);
     FacetSearchParams fsp = new FacetSearchParams(fip, new CountFacetRequest(CP_C, NUM_CHILDREN_CP_C), 
         new CountFacetRequest(CP_D, NUM_CHILDREN_CP_D));
     FacetsCollector fc = FacetsCollector.create(randomAccumulator(fsp, indexReader, taxoReader));

Modified: lucene/dev/branches/lucene4258/lucene/facet/src/test/org/apache/lucene/facet/search/DrillDownQueryTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene4258/lucene/facet/src/test/org/apache/lucene/facet/search/DrillDownQueryTest.java?rev=1479862&r1=1479861&r2=1479862&view=diff
==============================================================================
--- lucene/dev/branches/lucene4258/lucene/facet/src/test/org/apache/lucene/facet/search/DrillDownQueryTest.java (original)
+++ lucene/dev/branches/lucene4258/lucene/facet/src/test/org/apache/lucene/facet/search/DrillDownQueryTest.java Tue May  7 11:20:55 2013
@@ -43,6 +43,7 @@ import org.apache.lucene.index.Term;
 import org.apache.lucene.search.IndexSearcher;
 import org.apache.lucene.search.MatchAllDocsQuery;
 import org.apache.lucene.search.Query;
+import org.apache.lucene.search.QueryUtils;
 import org.apache.lucene.search.ScoreDoc;
 import org.apache.lucene.search.TermQuery;
 import org.apache.lucene.search.TopDocs;
@@ -148,6 +149,7 @@ public class DrillDownQueryTest extends 
     // Making sure the query yields 25 documents with the facet "a"
     DrillDownQuery q = new DrillDownQuery(defaultParams);
     q.add(new CategoryPath("a"));
+    QueryUtils.check(q);
     TopDocs docs = searcher.search(q, 100);
     assertEquals(25, docs.totalHits);
     

Modified: lucene/dev/branches/lucene4258/lucene/facet/src/test/org/apache/lucene/facet/search/TestDemoFacets.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene4258/lucene/facet/src/test/org/apache/lucene/facet/search/TestDemoFacets.java?rev=1479862&r1=1479861&r2=1479862&view=diff
==============================================================================
--- lucene/dev/branches/lucene4258/lucene/facet/src/test/org/apache/lucene/facet/search/TestDemoFacets.java (original)
+++ lucene/dev/branches/lucene4258/lucene/facet/src/test/org/apache/lucene/facet/search/TestDemoFacets.java Tue May  7 11:20:55 2013
@@ -203,7 +203,7 @@ public class TestDemoFacets extends Face
     FacetSearchParams fsp = new FacetSearchParams(fip,
                                                   new CountFacetRequest(new CategoryPath("a", '/'), 10));
 
-    // Aggregatses the facet counts:
+    // Aggregate the facet counts:
     FacetsCollector c = FacetsCollector.create(fsp, searcher.getIndexReader(), taxoReader);
 
     // MatchAllDocsQuery is for "browsing" (counts facets
@@ -215,6 +215,11 @@ public class TestDemoFacets extends Face
     assertEquals(1, results.size());
     assertEquals(1, (int) results.get(0).getFacetResultNode().value);
 
+    // LUCENE-4913:
+    for(FacetResultNode childNode : results.get(0).getFacetResultNode().subResults) {
+      assertTrue(childNode.ordinal != 0);
+    }
+
     searcher.getIndexReader().close();
     taxoReader.close();
     dir.close();

Modified: lucene/dev/branches/lucene4258/lucene/facet/src/test/org/apache/lucene/facet/search/TestDrillSideways.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene4258/lucene/facet/src/test/org/apache/lucene/facet/search/TestDrillSideways.java?rev=1479862&r1=1479861&r2=1479862&view=diff
==============================================================================
--- lucene/dev/branches/lucene4258/lucene/facet/src/test/org/apache/lucene/facet/search/TestDrillSideways.java (original)
+++ lucene/dev/branches/lucene4258/lucene/facet/src/test/org/apache/lucene/facet/search/TestDrillSideways.java Tue May  7 11:20:55 2013
@@ -65,8 +65,8 @@ import org.apache.lucene.store.Directory
 import org.apache.lucene.util.Bits;
 import org.apache.lucene.util.BytesRef;
 import org.apache.lucene.util.FixedBitSet;
+import org.apache.lucene.util.InPlaceMergeSorter;
 import org.apache.lucene.util.InfoStream;
-import org.apache.lucene.util.SorterTemplate;
 import org.apache.lucene.util._TestUtil;
 
 public class TestDrillSideways extends FacetTestCase {
@@ -120,12 +120,14 @@ public class TestDrillSideways extends F
         new CountFacetRequest(new CategoryPath("Publish Date"), 10), 
         new CountFacetRequest(new CategoryPath("Author"), 10));
 
+    DrillSideways ds = new DrillSideways(searcher, taxoReader);
+
     // Simple case: drill-down on a single field; in this
     // case the drill-sideways + drill-down counts ==
     // drill-down of just the query: 
     DrillDownQuery ddq = new DrillDownQuery(fsp.indexingParams, new MatchAllDocsQuery());
     ddq.add(new CategoryPath("Author", "Lisa"));
-    DrillSidewaysResult r = new DrillSideways(searcher, taxoReader).search(null, ddq, 10, fsp);
+    DrillSidewaysResult r = ds.search(null, ddq, 10, fsp);
 
     assertEquals(2, r.hits.totalHits);
     assertEquals(2, r.facetResults.size());
@@ -143,23 +145,26 @@ public class TestDrillSideways extends F
     // just the query:
     ddq = new DrillDownQuery(fsp.indexingParams);
     ddq.add(new CategoryPath("Author", "Lisa"));
-    r = new DrillSideways(searcher, taxoReader).search(null, ddq, 10, fsp);
+    r = ds.search(null, ddq, 10, fsp);
 
     assertEquals(2, r.hits.totalHits);
     assertEquals(2, r.facetResults.size());
     // Publish Date is only drill-down, and Lisa published
     // one in 2012 and one in 2010:
     assertEquals("Publish Date: 2012=1 2010=1", toString(r.facetResults.get(0)));
+    assertEquals(2, r.facetResults.get(0).getNumValidDescendants());
+
     // Author is drill-sideways + drill-down: Lisa
     // (drill-down) published twice, and Frank/Susan/Bob
     // published once:
     assertEquals("Author: Lisa=2 Frank=1 Susan=1 Bob=1", toString(r.facetResults.get(1)));
+    assertEquals(4, r.facetResults.get(1).getNumValidDescendants());
 
     // Another simple case: drill-down on on single fields
     // but OR of two values
     ddq = new DrillDownQuery(fsp.indexingParams, new MatchAllDocsQuery());
     ddq.add(new CategoryPath("Author", "Lisa"), new CategoryPath("Author", "Bob"));
-    r = new DrillSideways(searcher, taxoReader).search(null, ddq, 10, fsp);
+    r = ds.search(null, ddq, 10, fsp);
     assertEquals(3, r.hits.totalHits);
     assertEquals(2, r.facetResults.size());
     // Publish Date is only drill-down: Lisa and Bob
@@ -174,7 +179,7 @@ public class TestDrillSideways extends F
     ddq = new DrillDownQuery(fsp.indexingParams, new MatchAllDocsQuery());
     ddq.add(new CategoryPath("Author", "Lisa"));
     ddq.add(new CategoryPath("Publish Date", "2010"));
-    r = new DrillSideways(searcher, taxoReader).search(null, ddq, 10, fsp);
+    r = ds.search(null, ddq, 10, fsp);
     assertEquals(1, r.hits.totalHits);
     assertEquals(2, r.facetResults.size());
     // Publish Date is drill-sideways + drill-down: Lisa
@@ -192,7 +197,7 @@ public class TestDrillSideways extends F
     ddq.add(new CategoryPath("Author", "Lisa"),
             new CategoryPath("Author", "Bob"));
     ddq.add(new CategoryPath("Publish Date", "2010"));
-    r = new DrillSideways(searcher, taxoReader).search(null, ddq, 10, fsp);
+    r = ds.search(null, ddq, 10, fsp);
     assertEquals(2, r.hits.totalHits);
     assertEquals(2, r.facetResults.size());
     // Publish Date is both drill-sideways + drill-down:
@@ -208,7 +213,7 @@ public class TestDrillSideways extends F
     fsp = new FacetSearchParams(
         new CountFacetRequest(new CategoryPath("Publish Date"), 10), 
         new CountFacetRequest(new CategoryPath("Foobar"), 10));
-    r = new DrillSideways(searcher, taxoReader).search(null, ddq, 10, fsp);
+    r = ds.search(null, ddq, 10, fsp);
     assertEquals(0, r.hits.totalHits);
     assertEquals(2, r.facetResults.size());
     assertEquals("Publish Date:", toString(r.facetResults.get(0)));
@@ -221,7 +226,7 @@ public class TestDrillSideways extends F
     fsp = new FacetSearchParams(
         new CountFacetRequest(new CategoryPath("Publish Date"), 10), 
         new CountFacetRequest(new CategoryPath("Author"), 10));
-    r = new DrillSideways(searcher, taxoReader).search(null, ddq, 10, fsp);
+    r = ds.search(null, ddq, 10, fsp);
     assertEquals(2, r.hits.totalHits);
     assertEquals(2, r.facetResults.size());
     // Publish Date is only drill-down, and Lisa published
@@ -232,13 +237,27 @@ public class TestDrillSideways extends F
     // published once:
     assertEquals("Author: Lisa=2 Frank=1 Susan=1 Bob=1", toString(r.facetResults.get(1)));
 
+    // LUCENE-4915: test drilling down on a dimension but
+    // NOT facet counting it:
+    ddq = new DrillDownQuery(fsp.indexingParams, new MatchAllDocsQuery());
+    ddq.add(new CategoryPath("Author", "Lisa"),
+            new CategoryPath("Author", "Tom"));
+    fsp = new FacetSearchParams(
+              new CountFacetRequest(new CategoryPath("Publish Date"), 10));
+    r = ds.search(null, ddq, 10, fsp);
+    assertEquals(2, r.hits.totalHits);
+    assertEquals(1, r.facetResults.size());
+    // Publish Date is only drill-down, and Lisa published
+    // one in 2012 and one in 2010:
+    assertEquals("Publish Date: 2012=1 2010=1", toString(r.facetResults.get(0)));
+
     // Test main query gets null scorer:
     fsp = new FacetSearchParams(
         new CountFacetRequest(new CategoryPath("Publish Date"), 10), 
         new CountFacetRequest(new CategoryPath("Author"), 10));
     ddq = new DrillDownQuery(fsp.indexingParams, new TermQuery(new Term("foobar", "baz")));
     ddq.add(new CategoryPath("Author", "Lisa"));
-    r = new DrillSideways(searcher, taxoReader).search(null, ddq, 10, fsp);
+    r = ds.search(null, ddq, 10, fsp);
 
     assertEquals(0, r.hits.totalHits);
     assertEquals(2, r.facetResults.size());
@@ -589,25 +608,43 @@ public class TestDrillSideways extends F
     TaxonomyReader tr = new DirectoryTaxonomyReader(tw);
     tw.close();
 
-    IndexSearcher s = new IndexSearcher(r);
+    IndexSearcher s = newSearcher(r);
 
     int numIters = atLeast(10);
 
     for(int iter=0;iter<numIters;iter++) {
-      List<FacetRequest> requests = new ArrayList<FacetRequest>();
-      for(int i=0;i<numDims;i++) {
-        requests.add(new CountFacetRequest(new CategoryPath("dim" + i), dimValues[numDims-1].length));
-      }
 
-      FacetSearchParams fsp = new FacetSearchParams(requests);
       String contentToken = random().nextInt(30) == 17 ? null : randomContentToken(true);
       int numDrillDown = _TestUtil.nextInt(random(), 1, Math.min(4, numDims));
-      String[][] drillDowns = new String[numDims][];
       if (VERBOSE) {
         System.out.println("\nTEST: iter=" + iter + " baseQuery=" + contentToken + " numDrillDown=" + numDrillDown + " useSortedSetDV=" + doUseDV);
       }
 
+      List<FacetRequest> requests = new ArrayList<FacetRequest>();
+      while(true) {
+        for(int i=0;i<numDims;i++) {
+          // LUCENE-4915: sometimes don't request facet
+          // counts on the dim(s) we drill down on
+          if (random().nextDouble() <= 0.9) {
+            if (VERBOSE) {
+              System.out.println("  do facet request on dim=" + i);
+            }
+            requests.add(new CountFacetRequest(new CategoryPath("dim" + i), dimValues[numDims-1].length));
+          } else {
+            if (VERBOSE) {
+              System.out.println("  skip facet request on dim=" + i);
+            }
+          }
+        }
+        if (!requests.isEmpty()) {
+          break;
+        }
+      }
+      FacetSearchParams fsp = new FacetSearchParams(requests);
+      String[][] drillDowns = new String[numDims][];
+
       int count = 0;
+      boolean anyMultiValuedDrillDowns = false;
       while (count < numDrillDown) {
         int dim = random().nextInt(numDims);
         if (drillDowns[dim] == null) {
@@ -617,6 +654,7 @@ public class TestDrillSideways extends F
           } else {
             int orCount = _TestUtil.nextInt(random(), 1, Math.min(5, dimValues[dim].length));
             drillDowns[dim] = new String[orCount];
+            anyMultiValuedDrillDowns |= orCount > 1;
             for(int i=0;i<orCount;i++) {
               while (true) {
                 String value = dimValues[dim][random().nextInt(dimValues[dim].length)];
@@ -715,7 +753,23 @@ public class TestDrillSideways extends F
                              }
                            }, fsp);
 
-      SimpleFacetResult expected = slowDrillSidewaysSearch(s, docs, contentToken, drillDowns, dimValues, filter);
+      // Also separately verify that DS respects the
+      // scoreSubDocsAtOnce method, to ensure that all
+      // subScorers are on the same docID:
+      if (!anyMultiValuedDrillDowns) {
+        // Can only do this test when there are no OR'd
+        // drill-down values, beacuse in that case it's
+        // easily possible for one of the DD terms to be on
+        // a future docID:
+        new DrillSideways(s, tr) {
+          @Override
+          protected boolean scoreSubDocsAtOnce() {
+            return true;
+          }
+        }.search(ddq, new AssertingSubDocsAtOnceCollector(), fsp);
+      }
+
+      SimpleFacetResult expected = slowDrillSidewaysSearch(s, requests, docs, contentToken, drillDowns, dimValues, filter);
 
       Sort sort = new Sort(new SortField("id", SortField.Type.STRING));
       DrillSideways ds;
@@ -735,6 +789,7 @@ public class TestDrillSideways extends F
         ds = new DrillSideways(s, tr);
       }
 
+      // Retrieve all facets:
       DrillSidewaysResult actual = ds.search(ddq, filter, null, numDocs, sort, true, true, fsp);
 
       TopDocs hits = s.search(baseQuery, numDocs);
@@ -742,18 +797,24 @@ public class TestDrillSideways extends F
       for(ScoreDoc sd : hits.scoreDocs) {
         scores.put(s.doc(sd.doc).get("id"), sd.score);
       }
-      verifyEquals(dimValues, s, expected, actual, scores, -1, doUseDV);
+      if (VERBOSE) {
+        System.out.println("  verify all facets");
+      }
+      verifyEquals(requests, dimValues, s, expected, actual, scores, -1, doUseDV);
 
-      // Make sure topN works:
+      // Retrieve topN facets:
       int topN = _TestUtil.nextInt(random(), 1, 20);
 
-      requests = new ArrayList<FacetRequest>();
-      for(int i=0;i<numDims;i++) {
-        requests.add(new CountFacetRequest(new CategoryPath("dim" + i), topN));
+      List<FacetRequest> newRequests = new ArrayList<FacetRequest>();
+      for(FacetRequest oldRequest : requests) {
+        newRequests.add(new CountFacetRequest(oldRequest.categoryPath, topN));
       }
-      fsp = new FacetSearchParams(requests);
+      fsp = new FacetSearchParams(newRequests);
       actual = ds.search(ddq, filter, null, numDocs, sort, true, true, fsp);
-      verifyEquals(dimValues, s, expected, actual, scores, topN, doUseDV);
+      if (VERBOSE) {
+        System.out.println("  verify topN=" + topN);
+      }
+      verifyEquals(newRequests, dimValues, s, expected, actual, scores, topN, doUseDV);
 
       // Make sure drill down doesn't change score:
       TopDocs ddqHits = s.search(ddq, filter, numDocs);
@@ -803,6 +864,7 @@ public class TestDrillSideways extends F
   private static class SimpleFacetResult {
     List<Doc> hits;
     int[][] counts;
+    int[] uniqueCounts;
   }
   
   private int[] getTopNOrds(final int[] counts, final String[] values, int topN) {
@@ -813,9 +875,7 @@ public class TestDrillSideways extends F
 
     // Naive (on purpose, to reduce bug in tester/gold):
     // sort all ids, then return top N slice:
-    new SorterTemplate() {
-
-      private int pivot;
+    new InPlaceMergeSorter() {
 
       @Override
       protected void swap(int i, int j) {
@@ -839,26 +899,7 @@ public class TestDrillSideways extends F
         }
       }
 
-      @Override
-      protected void setPivot(int i) {
-        pivot = ids[i];
-      }
-
-      @Override
-      protected int comparePivot(int j) {
-        int counti = counts[pivot];
-        int countj = counts[ids[j]];
-        // Sort by count descending...
-        if (counti > countj) {
-          return -1;
-        } else if (counti < countj) {
-          return 1;
-        } else {
-          // ... then by ord ascending:
-          return new BytesRef(values[pivot]).compareTo(new BytesRef(values[ids[j]]));
-        }
-      }
-    }.mergeSort(0, ids.length-1);
+    }.sort(0, ids.length);
 
     if (topN > ids.length) {
       topN = ids.length;
@@ -877,7 +918,8 @@ public class TestDrillSideways extends F
     return topNIDs;
   }
 
-  private SimpleFacetResult slowDrillSidewaysSearch(IndexSearcher s, List<Doc> docs, String contentToken, String[][] drillDowns,
+  private SimpleFacetResult slowDrillSidewaysSearch(IndexSearcher s, List<FacetRequest> requests, List<Doc> docs,
+                                                    String contentToken, String[][] drillDowns,
                                                     String[][] dimValues, Filter onlyEven) throws Exception {
     int numDims = dimValues.length;
 
@@ -953,18 +995,27 @@ public class TestDrillSideways extends F
     SimpleFacetResult res = new SimpleFacetResult();
     res.hits = hits;
     res.counts = new int[numDims][];
-    for(int dim=0;dim<numDims;dim++) {
+    res.uniqueCounts = new int[numDims];
+    for (int i = 0; i < requests.size(); i++) {
+      int dim = Integer.parseInt(requests.get(i).categoryPath.components[0].substring(3));
       if (drillDowns[dim] != null) {
         res.counts[dim] = drillSidewaysCounts[dim].counts[dim];
       } else {
         res.counts[dim] = drillDownCounts.counts[dim];
       }
+      int uniqueCount = 0;
+      for (int j = 0; j < res.counts[dim].length; j++) {
+        if (res.counts[dim][j] != 0) {
+          uniqueCount++;
+        }
+      }
+      res.uniqueCounts[dim] = uniqueCount;
     }
 
     return res;
   }
 
-  void verifyEquals(String[][] dimValues, IndexSearcher s, SimpleFacetResult expected,
+  void verifyEquals(List<FacetRequest> requests, String[][] dimValues, IndexSearcher s, SimpleFacetResult expected,
                     DrillSidewaysResult actual, Map<String,Float> scores, int topN, boolean isSortedSetDV) throws Exception {
     if (VERBOSE) {
       System.out.println("  verify totHits=" + expected.hits.size());
@@ -981,9 +1032,28 @@ public class TestDrillSideways extends F
       assertEquals(scores.get(expected.hits.get(i).id), actual.hits.scoreDocs[i].score, 0.0f);
     }
 
-    assertEquals(expected.counts.length, actual.facetResults.size());
+    int numExpected = 0;
     for(int dim=0;dim<expected.counts.length;dim++) {
-      FacetResult fr = actual.facetResults.get(dim);
+      if (expected.counts[dim] != null) {
+        numExpected++;
+      }
+    }
+
+    assertEquals(numExpected, actual.facetResults.size());
+
+    for(int dim=0;dim<expected.counts.length;dim++) {
+      if (expected.counts[dim] == null) {
+        continue;
+      }
+      int idx = -1;
+      for(int i=0;i<requests.size();i++) {
+        if (Integer.parseInt(requests.get(i).categoryPath.components[0].substring(3)) == dim) {
+          idx = i;
+          break;
+        }
+      }
+      assert idx != -1;
+      FacetResult fr = actual.facetResults.get(idx);
       List<FacetResultNode> subResults = fr.getFacetResultNode().subResults;
       if (VERBOSE) {
         System.out.println("    dim" + dim);
@@ -991,7 +1061,7 @@ public class TestDrillSideways extends F
       }
 
       Map<String,Integer> actualValues = new HashMap<String,Integer>();
-      int idx = 0;
+      idx = 0;
       for(FacetResultNode childNode : subResults) {
         actualValues.put(childNode.label.components[1], (int) childNode.value);
         if (VERBOSE) {
@@ -1055,6 +1125,8 @@ public class TestDrillSideways extends F
         }
         assertEquals(setCount, actualValues.size());
       }
+
+      assertEquals("dim=" + dim, expected.uniqueCounts[dim], fr.getNumValidDescendants());
     }
   }
 

Modified: lucene/dev/branches/lucene4258/lucene/facet/src/test/org/apache/lucene/facet/search/TestFacetsCollector.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene4258/lucene/facet/src/test/org/apache/lucene/facet/search/TestFacetsCollector.java?rev=1479862&r1=1479861&r2=1479862&view=diff
==============================================================================
--- lucene/dev/branches/lucene4258/lucene/facet/src/test/org/apache/lucene/facet/search/TestFacetsCollector.java (original)
+++ lucene/dev/branches/lucene4258/lucene/facet/src/test/org/apache/lucene/facet/search/TestFacetsCollector.java Tue May  7 11:20:55 2013
@@ -1,6 +1,7 @@
 package org.apache.lucene.facet.search;
 
 import java.util.ArrayList;
+import java.util.Arrays;
 import java.util.Collections;
 import java.util.HashMap;
 import java.util.List;
@@ -16,15 +17,18 @@ import org.apache.lucene.facet.params.Ca
 import org.apache.lucene.facet.params.FacetIndexingParams;
 import org.apache.lucene.facet.params.FacetSearchParams;
 import org.apache.lucene.facet.params.PerDimensionIndexingParams;
+import org.apache.lucene.facet.search.FacetRequest.ResultMode;
 import org.apache.lucene.facet.taxonomy.CategoryPath;
 import org.apache.lucene.facet.taxonomy.TaxonomyWriter;
 import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyReader;
 import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter;
 import org.apache.lucene.index.DirectoryReader;
 import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.search.ConstantScoreQuery;
 import org.apache.lucene.search.IndexSearcher;
 import org.apache.lucene.search.MatchAllDocsQuery;
 import org.apache.lucene.search.MultiCollector;
+import org.apache.lucene.search.TopDocs;
 import org.apache.lucene.search.TopScoreDocCollector;
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.util.IOUtils;
@@ -60,7 +64,9 @@ public class TestFacetsCollector extends
     FacetFields facetFields = new FacetFields(taxonomyWriter);
     for(int i = atLeast(30); i > 0; --i) {
       Document doc = new Document();
-      doc.add(new StringField("f", "v", Store.NO));
+      if (random().nextBoolean()) { // don't match all documents
+        doc.add(new StringField("f", "v", Store.NO));
+      }
       facetFields.addFields(doc, Collections.singletonList(new CategoryPath("a")));
       iw.addDocument(doc);
     }
@@ -80,12 +86,16 @@ public class TestFacetsCollector extends
     };
     FacetsCollector fc = FacetsCollector.create(fa);
     TopScoreDocCollector topDocs = TopScoreDocCollector.create(10, false);
-    new IndexSearcher(r).search(new MatchAllDocsQuery(), MultiCollector.wrap(fc, topDocs));
+    ConstantScoreQuery csq = new ConstantScoreQuery(new MatchAllDocsQuery());
+    csq.setBoost(2.0f);
+    
+    newSearcher(r).search(csq, MultiCollector.wrap(fc, topDocs));
     
     List<FacetResult> res = fc.getFacetResults();
-    double value = res.get(0).getFacetResultNode().value;
-    double expected = topDocs.topDocs().getMaxScore() * r.numDocs();
-    assertEquals(expected, value, 1E-10);
+    float value = (float) res.get(0).getFacetResultNode().value;
+    TopDocs td = topDocs.topDocs();
+    int expected = (int) (td.getMaxScore() * td.totalHits);
+    assertEquals(expected, (int) value);
     
     IOUtils.close(taxo, taxoDir, r, indexDir);
   }
@@ -120,7 +130,7 @@ public class TestFacetsCollector extends
         new CountFacetRequest(new CategoryPath("a"), 10), 
         new CountFacetRequest(new CategoryPath("b"), 10));
     FacetsCollector fc = FacetsCollector.create(sParams, r, taxo);
-    new IndexSearcher(r).search(new MatchAllDocsQuery(), fc);
+    newSearcher(r).search(new MatchAllDocsQuery(), fc);
     
     for (FacetResult res : fc.getFacetResults()) {
       assertEquals("unexpected count for " + res, r.maxDoc(), (int) res.getFacetResultNode().value);
@@ -172,7 +182,7 @@ public class TestFacetsCollector extends
     
     FacetsCollector fc = FacetsCollector.create(fa);
     TopScoreDocCollector topDocs = TopScoreDocCollector.create(10, false);
-    new IndexSearcher(r).search(new MatchAllDocsQuery(), MultiCollector.wrap(fc, topDocs));
+    newSearcher(r).search(new MatchAllDocsQuery(), MultiCollector.wrap(fc, topDocs));
     
     List<FacetResult> facetResults = fc.getFacetResults();
     FacetResult fresA = facetResults.get(0);
@@ -184,5 +194,195 @@ public class TestFacetsCollector extends
     
     IOUtils.close(taxo, taxoDir, r, indexDir);
   }
+  
+  @Test
+  public void testCountRoot() throws Exception {
+    // LUCENE-4882: FacetsAccumulator threw NPE if a FacetRequest was defined on CP.EMPTY
+    Directory indexDir = newDirectory();
+    Directory taxoDir = newDirectory();
+    
+    TaxonomyWriter taxonomyWriter = new DirectoryTaxonomyWriter(taxoDir);
+    IndexWriter iw = new IndexWriter(indexDir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
+    
+    FacetFields facetFields = new FacetFields(taxonomyWriter);
+    for(int i = atLeast(30); i > 0; --i) {
+      Document doc = new Document();
+      facetFields.addFields(doc, Arrays.asList(new CategoryPath("a"), new CategoryPath("b")));
+      iw.addDocument(doc);
+    }
+    
+    taxonomyWriter.close();
+    iw.close();
+    
+    DirectoryReader r = DirectoryReader.open(indexDir);
+    DirectoryTaxonomyReader taxo = new DirectoryTaxonomyReader(taxoDir);
+    
+    FacetSearchParams fsp = new FacetSearchParams(new CountFacetRequest(CategoryPath.EMPTY, 10));
+    
+    final FacetsAccumulator fa = random().nextBoolean() ? new FacetsAccumulator(fsp, r, taxo) : new StandardFacetsAccumulator(fsp, r, taxo);
+    FacetsCollector fc = FacetsCollector.create(fa);
+    newSearcher(r).search(new MatchAllDocsQuery(), fc);
+    
+    FacetResult res = fc.getFacetResults().get(0);
+    for (FacetResultNode node : res.getFacetResultNode().subResults) {
+      assertEquals(r.numDocs(), (int) node.value);
+    }
+    
+    IOUtils.close(taxo, taxoDir, r, indexDir);
+  }
 
+  @Test
+  public void testGetFacetResultsTwice() throws Exception {
+    // LUCENE-4893: counts were multiplied as many times as getFacetResults was called.
+    Directory indexDir = newDirectory();
+    Directory taxoDir = newDirectory();
+    
+    TaxonomyWriter taxonomyWriter = new DirectoryTaxonomyWriter(taxoDir);
+    IndexWriter iw = new IndexWriter(indexDir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
+    
+    FacetFields facetFields = new FacetFields(taxonomyWriter);
+    Document doc = new Document();
+    facetFields.addFields(doc, Arrays.asList(new CategoryPath("a/1", '/'), new CategoryPath("b/1", '/')));
+    iw.addDocument(doc);
+    taxonomyWriter.close();
+    iw.close();
+    
+    DirectoryReader r = DirectoryReader.open(indexDir);
+    DirectoryTaxonomyReader taxo = new DirectoryTaxonomyReader(taxoDir);
+    
+    FacetSearchParams fsp = new FacetSearchParams(
+        new CountFacetRequest(new CategoryPath("a"), 10), 
+        new CountFacetRequest(new CategoryPath("b"), 10));
+    final FacetsAccumulator fa = random().nextBoolean() ? new FacetsAccumulator(fsp, r, taxo) : new StandardFacetsAccumulator(fsp, r, taxo);
+    final FacetsCollector fc = FacetsCollector.create(fa);
+    newSearcher(r).search(new MatchAllDocsQuery(), fc);
+    
+    List<FacetResult> res1 = fc.getFacetResults();
+    List<FacetResult> res2 = fc.getFacetResults();
+    assertSame("calling getFacetResults twice should return the exact same result", res1, res2);
+    
+    IOUtils.close(taxo, taxoDir, r, indexDir);
+  }
+  
+  @Test
+  public void testReset() throws Exception {
+    Directory indexDir = newDirectory();
+    Directory taxoDir = newDirectory();
+    
+    TaxonomyWriter taxonomyWriter = new DirectoryTaxonomyWriter(taxoDir);
+    IndexWriter iw = new IndexWriter(indexDir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
+    
+    FacetFields facetFields = new FacetFields(taxonomyWriter);
+    Document doc = new Document();
+    facetFields.addFields(doc, Arrays.asList(new CategoryPath("a/1", '/'), new CategoryPath("b/1", '/')));
+    iw.addDocument(doc);
+    taxonomyWriter.close();
+    iw.close();
+    
+    DirectoryReader r = DirectoryReader.open(indexDir);
+    DirectoryTaxonomyReader taxo = new DirectoryTaxonomyReader(taxoDir);
+    
+    FacetSearchParams fsp = new FacetSearchParams(
+        new CountFacetRequest(new CategoryPath("a"), 10), 
+        new CountFacetRequest(new CategoryPath("b"), 10));
+    final FacetsAccumulator fa = random().nextBoolean() ? new FacetsAccumulator(fsp, r, taxo) : new StandardFacetsAccumulator(fsp, r, taxo);
+    final FacetsCollector fc = FacetsCollector.create(fa);
+    // this should populate the cached results, but doing search should clear the cache
+    fc.getFacetResults();
+    newSearcher(r).search(new MatchAllDocsQuery(), fc);
+    
+    List<FacetResult> res1 = fc.getFacetResults();
+    // verify that we didn't get the cached result
+    assertEquals(2, res1.size());
+    for (FacetResult res : res1) {
+      assertEquals(1, res.getFacetResultNode().subResults.size());
+      assertEquals(1, (int) res.getFacetResultNode().subResults.get(0).value);
+    }
+    fc.reset();
+    List<FacetResult> res2 = fc.getFacetResults();
+    assertNotSame("reset() should clear the cached results", res1, res2);
+    
+    IOUtils.close(taxo, taxoDir, r, indexDir);
+  }
+  
+  @Test
+  public void testParentOrdinal() throws Exception {
+    // LUCENE-4913: root ordinal was always 0 when all children were requested
+    Directory indexDir = newDirectory();
+    Directory taxoDir = newDirectory();
+    
+    TaxonomyWriter taxonomyWriter = new DirectoryTaxonomyWriter(taxoDir);
+    IndexWriter iw = new IndexWriter(indexDir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
+    
+    FacetFields facetFields = new FacetFields(taxonomyWriter);
+    Document doc = new Document();
+    facetFields.addFields(doc, Arrays.asList(new CategoryPath("a/1", '/')));
+    iw.addDocument(doc);
+    taxonomyWriter.close();
+    iw.close();
+    
+    DirectoryReader r = DirectoryReader.open(indexDir);
+    DirectoryTaxonomyReader taxo = new DirectoryTaxonomyReader(taxoDir);
+
+    // assert IntFacetResultHandler
+    FacetSearchParams fsp = new FacetSearchParams(new CountFacetRequest(new CategoryPath("a"), 10));
+    FacetsAccumulator fa = random().nextBoolean() ? new FacetsAccumulator(fsp, r, taxo) : new StandardFacetsAccumulator(fsp, r, taxo);
+    FacetsCollector fc = FacetsCollector.create(fa);
+    newSearcher(r).search(new MatchAllDocsQuery(), fc);
+    assertTrue("invalid ordinal for child node: 0", 0 != fc.getFacetResults().get(0).getFacetResultNode().subResults.get(0).ordinal);
+    
+    // assert IntFacetResultHandler
+    fsp = new FacetSearchParams(new SumScoreFacetRequest(new CategoryPath("a"), 10));
+    if (random().nextBoolean()) {
+      fa = new FacetsAccumulator(fsp, r, taxo) {
+        @Override
+        public FacetsAggregator getAggregator() {
+          return new SumScoreFacetsAggregator();
+        }
+      };
+    } else {
+      fa = new StandardFacetsAccumulator(fsp, r, taxo);
+    }
+    fc = FacetsCollector.create(fa);
+    newSearcher(r).search(new MatchAllDocsQuery(), fc);
+    assertTrue("invalid ordinal for child node: 0", 0 != fc.getFacetResults().get(0).getFacetResultNode().subResults.get(0).ordinal);
+    
+    IOUtils.close(taxo, taxoDir, r, indexDir);
+  }
+  
+  @Test
+  public void testNumValidDescendants() throws Exception {
+    // LUCENE-4885: FacetResult.numValidDescendants was not set properly by FacetsAccumulator
+    Directory indexDir = newDirectory();
+    Directory taxoDir = newDirectory();
+    
+    TaxonomyWriter taxonomyWriter = new DirectoryTaxonomyWriter(taxoDir);
+    IndexWriter iw = new IndexWriter(indexDir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
+    
+    FacetFields facetFields = new FacetFields(taxonomyWriter);
+    for (int i = 0; i < 10; i++) {
+      Document doc = new Document();
+      facetFields.addFields(doc, Arrays.asList(new CategoryPath("a", Integer.toString(i))));
+      iw.addDocument(doc);
+    }
+    
+    taxonomyWriter.close();
+    iw.close();
+    
+    DirectoryReader r = DirectoryReader.open(indexDir);
+    DirectoryTaxonomyReader taxo = new DirectoryTaxonomyReader(taxoDir);
+    
+    CountFacetRequest cfr = new CountFacetRequest(new CategoryPath("a"), 2);
+    cfr.setResultMode(random().nextBoolean() ? ResultMode.GLOBAL_FLAT : ResultMode.PER_NODE_IN_TREE);
+    FacetSearchParams fsp = new FacetSearchParams(cfr);
+    final FacetsAccumulator fa = random().nextBoolean() ? new FacetsAccumulator(fsp, r, taxo) : new StandardFacetsAccumulator(fsp, r, taxo);
+    FacetsCollector fc = FacetsCollector.create(fa);
+    newSearcher(r).search(new MatchAllDocsQuery(), fc);
+    
+    FacetResult res = fc.getFacetResults().get(0);
+    assertEquals(10, res.getNumValidDescendants());
+    
+    IOUtils.close(taxo, taxoDir, r, indexDir);
+  }
+  
 }

Modified: lucene/dev/branches/lucene4258/lucene/facet/src/test/org/apache/lucene/facet/search/TestStandardFacetsAccumulator.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene4258/lucene/facet/src/test/org/apache/lucene/facet/search/TestStandardFacetsAccumulator.java?rev=1479862&r1=1479861&r2=1479862&view=diff
==============================================================================
--- lucene/dev/branches/lucene4258/lucene/facet/src/test/org/apache/lucene/facet/search/TestStandardFacetsAccumulator.java (original)
+++ lucene/dev/branches/lucene4258/lucene/facet/src/test/org/apache/lucene/facet/search/TestStandardFacetsAccumulator.java Tue May  7 11:20:55 2013
@@ -93,7 +93,7 @@ public class TestStandardFacetsAccumulat
 
     DirectoryReader indexReader = DirectoryReader.open(indexDir);
     TaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoDir);
-    IndexSearcher indexSearcher = new IndexSearcher(indexReader);
+    IndexSearcher indexSearcher = newSearcher(indexReader);
     
     // search for "f:a", only segments 1 and 3 should match results
     Query q = new TermQuery(new Term("f", "a"));

Modified: lucene/dev/branches/lucene4258/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/directory/TestDirectoryTaxonomyReader.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene4258/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/directory/TestDirectoryTaxonomyReader.java?rev=1479862&r1=1479861&r2=1479862&view=diff
==============================================================================
--- lucene/dev/branches/lucene4258/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/directory/TestDirectoryTaxonomyReader.java (original)
+++ lucene/dev/branches/lucene4258/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/directory/TestDirectoryTaxonomyReader.java Tue May  7 11:20:55 2013
@@ -1,12 +1,16 @@
 package org.apache.lucene.facet.taxonomy.directory;
 
 import java.io.IOException;
+import java.util.Arrays;
+import java.util.HashSet;
 import java.util.Random;
+import java.util.Set;
 
 import org.apache.lucene.analysis.MockAnalyzer;
 import org.apache.lucene.facet.FacetTestCase;
 import org.apache.lucene.facet.taxonomy.CategoryPath;
 import org.apache.lucene.facet.taxonomy.TaxonomyReader;
+import org.apache.lucene.facet.taxonomy.TaxonomyReader.ChildrenIterator;
 import org.apache.lucene.facet.taxonomy.TaxonomyWriter;
 import org.apache.lucene.index.IndexWriter;
 import org.apache.lucene.index.IndexWriterConfig;
@@ -461,5 +465,69 @@ public class TestDirectoryTaxonomyReader
     
     src.close();
   }
+
+  @Test
+  public void testGetChildren() throws Exception {
+    Directory dir = newDirectory();
+    DirectoryTaxonomyWriter taxoWriter = new DirectoryTaxonomyWriter(dir);
+    int numCategories = atLeast(10);
+    int numA = 0, numB = 0;
+    Random random = random();
+    for (int i = 0; i < numCategories; i++) {
+      if (random.nextBoolean()) {
+        taxoWriter.addCategory(new CategoryPath("a", Integer.toString(i)));
+        ++numA;
+      } else {
+        taxoWriter.addCategory(new CategoryPath("b", Integer.toString(i)));
+        ++numB;
+      }
+    }
+    // add category with no children
+    taxoWriter.addCategory(new CategoryPath("c"));
+    taxoWriter.close();
+    
+    DirectoryTaxonomyReader taxoReader = new DirectoryTaxonomyReader(dir);
+
+    // non existing category
+    ChildrenIterator it = taxoReader.getChildren(taxoReader.getOrdinal(new CategoryPath("invalid")));
+    assertEquals(TaxonomyReader.INVALID_ORDINAL, it.next());
+
+    // a category with no children
+    it = taxoReader.getChildren(taxoReader.getOrdinal(new CategoryPath("c")));
+    assertEquals(TaxonomyReader.INVALID_ORDINAL, it.next());
+
+    // arbitrary negative ordinal
+    it = taxoReader.getChildren(-2);
+    assertEquals(TaxonomyReader.INVALID_ORDINAL, it.next());
+
+    // root's children
+    Set<String> roots = new HashSet<String>(Arrays.asList("a", "b", "c"));
+    it = taxoReader.getChildren(TaxonomyReader.ROOT_ORDINAL);
+    while (!roots.isEmpty()) {
+      CategoryPath root = taxoReader.getPath(it.next());
+      assertEquals(1, root.length);
+      assertTrue(roots.remove(root.components[0]));
+    }
+    assertEquals(TaxonomyReader.INVALID_ORDINAL, it.next());
+    
+    for (int i = 0; i < 2; i++) {
+      CategoryPath cp = i == 0 ? new CategoryPath("a") : new CategoryPath("b");
+      int ordinal = taxoReader.getOrdinal(cp);
+      it = taxoReader.getChildren(ordinal);
+      int numChildren = 0;
+      int child;
+      while ((child = it.next()) != TaxonomyReader.INVALID_ORDINAL) {
+        CategoryPath path = taxoReader.getPath(child);
+        assertEquals(2, path.length);
+        assertEquals(path.components[0], i == 0 ? "a" : "b");
+        ++numChildren;
+      }
+      int expected = i == 0 ? numA : numB;
+      assertEquals("invalid num children", expected, numChildren);
+    }
+    taxoReader.close();
+    
+    dir.close();
+  }
   
 }

Modified: lucene/dev/branches/lucene4258/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/directory/TestDirectoryTaxonomyWriter.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene4258/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/directory/TestDirectoryTaxonomyWriter.java?rev=1479862&r1=1479861&r2=1479862&view=diff
==============================================================================
--- lucene/dev/branches/lucene4258/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/directory/TestDirectoryTaxonomyWriter.java (original)
+++ lucene/dev/branches/lucene4258/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/directory/TestDirectoryTaxonomyWriter.java Tue May  7 11:20:55 2013
@@ -359,5 +359,58 @@ public class TestDirectoryTaxonomyWriter
     taxoWriter.close();
     dir.close();
   }
+
+  @Test
+  public void testCommitNoEmptyCommits() throws Exception {
+    // LUCENE-4972: DTW used to create empty commits even if no changes were made
+    Directory dir = newDirectory();
+    DirectoryTaxonomyWriter taxoWriter = new DirectoryTaxonomyWriter(dir);
+    taxoWriter.addCategory(new CategoryPath("a"));
+    taxoWriter.commit();
+    
+    long gen1 = SegmentInfos.getLastCommitGeneration(dir);
+    taxoWriter.commit();
+    long gen2 = SegmentInfos.getLastCommitGeneration(dir);
+    assertEquals("empty commit should not have changed the index", gen1, gen2);
+    
+    taxoWriter.close();
+    dir.close();
+  }
+  
+  @Test
+  public void testCloseNoEmptyCommits() throws Exception {
+    // LUCENE-4972: DTW used to create empty commits even if no changes were made
+    Directory dir = newDirectory();
+    DirectoryTaxonomyWriter taxoWriter = new DirectoryTaxonomyWriter(dir);
+    taxoWriter.addCategory(new CategoryPath("a"));
+    taxoWriter.commit();
+    
+    long gen1 = SegmentInfos.getLastCommitGeneration(dir);
+    taxoWriter.close();
+    long gen2 = SegmentInfos.getLastCommitGeneration(dir);
+    assertEquals("empty commit should not have changed the index", gen1, gen2);
+    
+    taxoWriter.close();
+    dir.close();
+  }
+  
+  @Test
+  public void testPrepareCommitNoEmptyCommits() throws Exception {
+    // LUCENE-4972: DTW used to create empty commits even if no changes were made
+    Directory dir = newDirectory();
+    DirectoryTaxonomyWriter taxoWriter = new DirectoryTaxonomyWriter(dir);
+    taxoWriter.addCategory(new CategoryPath("a"));
+    taxoWriter.prepareCommit();
+    taxoWriter.commit();
+    
+    long gen1 = SegmentInfos.getLastCommitGeneration(dir);
+    taxoWriter.prepareCommit();
+    taxoWriter.commit();
+    long gen2 = SegmentInfos.getLastCommitGeneration(dir);
+    assertEquals("empty commit should not have changed the index", gen1, gen2);
+    
+    taxoWriter.close();
+    dir.close();
+  }
   
 }

Modified: lucene/dev/branches/lucene4258/lucene/facet/src/test/org/apache/lucene/facet/util/TestFacetsPayloadMigrationReader.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene4258/lucene/facet/src/test/org/apache/lucene/facet/util/TestFacetsPayloadMigrationReader.java?rev=1479862&r1=1479861&r2=1479862&view=diff
==============================================================================
--- lucene/dev/branches/lucene4258/lucene/facet/src/test/org/apache/lucene/facet/util/TestFacetsPayloadMigrationReader.java (original)
+++ lucene/dev/branches/lucene4258/lucene/facet/src/test/org/apache/lucene/facet/util/TestFacetsPayloadMigrationReader.java Tue May  7 11:20:55 2013
@@ -229,7 +229,7 @@ public class TestFacetsPayloadMigrationR
       FacetIndexingParams fip) throws Exception {
     DirectoryReader indexReader = DirectoryReader.open(indexDir);
     TaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoDir);
-    IndexSearcher searcher = new IndexSearcher(indexReader);
+    IndexSearcher searcher = newSearcher(indexReader);
 
     assertFalse("index should not have deletions", indexReader.hasDeletions());
     

Modified: lucene/dev/branches/lucene4258/lucene/grouping/src/java/org/apache/lucene/search/grouping/AbstractDistinctValuesCollector.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene4258/lucene/grouping/src/java/org/apache/lucene/search/grouping/AbstractDistinctValuesCollector.java?rev=1479862&r1=1479861&r2=1479862&view=diff
==============================================================================
--- lucene/dev/branches/lucene4258/lucene/grouping/src/java/org/apache/lucene/search/grouping/AbstractDistinctValuesCollector.java (original)
+++ lucene/dev/branches/lucene4258/lucene/grouping/src/java/org/apache/lucene/search/grouping/AbstractDistinctValuesCollector.java Tue May  7 11:20:55 2013
@@ -57,7 +57,7 @@ public abstract class AbstractDistinctVa
 
     public GroupCount(GROUP_VALUE_TYPE groupValue) {
       this.groupValue = groupValue;
-      this.uniqueValues = new HashSet<GROUP_VALUE_TYPE>();
+      this.uniqueValues = new HashSet<>();
     }
   }
 

Modified: lucene/dev/branches/lucene4258/lucene/grouping/src/java/org/apache/lucene/search/grouping/AbstractFirstPassGroupingCollector.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene4258/lucene/grouping/src/java/org/apache/lucene/search/grouping/AbstractFirstPassGroupingCollector.java?rev=1479862&r1=1479861&r2=1479862&view=diff
==============================================================================
--- lucene/dev/branches/lucene4258/lucene/grouping/src/java/org/apache/lucene/search/grouping/AbstractFirstPassGroupingCollector.java (original)
+++ lucene/dev/branches/lucene4258/lucene/grouping/src/java/org/apache/lucene/search/grouping/AbstractFirstPassGroupingCollector.java Tue May  7 11:20:55 2013
@@ -85,7 +85,7 @@ abstract public class AbstractFirstPassG
     }
 
     spareSlot = topNGroups;
-    groupMap = new HashMap<GROUP_VALUE_TYPE, CollectedSearchGroup<GROUP_VALUE_TYPE>>(topNGroups);
+    groupMap = new HashMap<>(topNGroups);
   }
 
   /**
@@ -113,7 +113,7 @@ abstract public class AbstractFirstPassG
       buildSortedSet();
     }
 
-    final Collection<SearchGroup<GROUP_VALUE_TYPE>> result = new ArrayList<SearchGroup<GROUP_VALUE_TYPE>>();
+    final Collection<SearchGroup<GROUP_VALUE_TYPE>> result = new ArrayList<>();
     int upto = 0;
     final int sortFieldCount = groupSort.getSort().length;
     for(CollectedSearchGroup<GROUP_VALUE_TYPE> group : orderedGroups) {
@@ -121,7 +121,7 @@ abstract public class AbstractFirstPassG
         continue;
       }
       //System.out.println("  group=" + (group.groupValue == null ? "null" : group.groupValue.utf8ToString()));
-      SearchGroup<GROUP_VALUE_TYPE> searchGroup = new SearchGroup<GROUP_VALUE_TYPE>();
+      SearchGroup<GROUP_VALUE_TYPE> searchGroup = new SearchGroup<>();
       searchGroup.groupValue = group.groupValue;
       if (fillFields) {
         searchGroup.sortValues = new Object[sortFieldCount];
@@ -193,7 +193,7 @@ abstract public class AbstractFirstPassG
         // just keep collecting them
 
         // Add a new CollectedSearchGroup:
-        CollectedSearchGroup<GROUP_VALUE_TYPE> sg = new CollectedSearchGroup<GROUP_VALUE_TYPE>();
+        CollectedSearchGroup<GROUP_VALUE_TYPE> sg = new CollectedSearchGroup<>();
         sg.groupValue = copyDocGroupValue(groupValue, null);
         sg.comparatorSlot = groupMap.size();
         sg.topDoc = docBase + doc;
@@ -311,7 +311,7 @@ abstract public class AbstractFirstPassG
       }
     };
 
-    orderedGroups = new TreeSet<CollectedSearchGroup<GROUP_VALUE_TYPE>>(comparator);
+    orderedGroups = new TreeSet<>(comparator);
     orderedGroups.addAll(groupMap.values());
     assert orderedGroups.size() > 0;
 

Modified: lucene/dev/branches/lucene4258/lucene/grouping/src/java/org/apache/lucene/search/grouping/AbstractGroupFacetCollector.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene4258/lucene/grouping/src/java/org/apache/lucene/search/grouping/AbstractGroupFacetCollector.java?rev=1479862&r1=1479861&r2=1479862&view=diff
==============================================================================
--- lucene/dev/branches/lucene4258/lucene/grouping/src/java/org/apache/lucene/search/grouping/AbstractGroupFacetCollector.java (original)
+++ lucene/dev/branches/lucene4258/lucene/grouping/src/java/org/apache/lucene/search/grouping/AbstractGroupFacetCollector.java Tue May  7 11:20:55 2013
@@ -46,7 +46,7 @@ public abstract class AbstractGroupFacet
     this.groupField = groupField;
     this.facetField = facetField;
     this.facetPrefix = facetPrefix;
-    segmentResults = new ArrayList<SegmentResult>();
+    segmentResults = new ArrayList<>();
   }
 
   /**
@@ -148,7 +148,7 @@ public abstract class AbstractGroupFacet
     private int currentMin;
 
     public GroupedFacetResult(int size, int minCount, boolean orderByCount, int totalCount, int totalMissingCount) {
-      this.facetEntries = new TreeSet<FacetEntry>(orderByCount ? orderByCountAndValue : orderByValue);
+      this.facetEntries = new TreeSet<>(orderByCount ? orderByCountAndValue : orderByValue);
       this.totalMissingCount = totalMissingCount;
       this.totalCount = totalCount;
       maxSize = size;
@@ -183,16 +183,16 @@ public abstract class AbstractGroupFacet
      * @return a list of facet entries to be rendered based on the specified offset and limit
      */
     public List<FacetEntry> getFacetEntries(int offset, int limit) {
-      List<FacetEntry> entries = new LinkedList<FacetEntry>();
-      limit += offset;
+      List<FacetEntry> entries = new LinkedList<>();
 
-      int i = 0;
+      int skipped = 0;
+      int included = 0;
       for (FacetEntry facetEntry : facetEntries) {
-        if (i < offset) {
-          i++;
+        if (skipped < offset) {
+          skipped++;
           continue;
         }
-        if (i++ >= limit) {
+        if (included++ >= limit) {
           break;
         }
         entries.add(facetEntry);

Modified: lucene/dev/branches/lucene4258/lucene/grouping/src/java/org/apache/lucene/search/grouping/AbstractSecondPassGroupingCollector.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene4258/lucene/grouping/src/java/org/apache/lucene/search/grouping/AbstractSecondPassGroupingCollector.java?rev=1479862&r1=1479861&r2=1479862&view=diff
==============================================================================
--- lucene/dev/branches/lucene4258/lucene/grouping/src/java/org/apache/lucene/search/grouping/AbstractSecondPassGroupingCollector.java (original)
+++ lucene/dev/branches/lucene4258/lucene/grouping/src/java/org/apache/lucene/search/grouping/AbstractSecondPassGroupingCollector.java Tue May  7 11:20:55 2013
@@ -62,7 +62,7 @@ public abstract class AbstractSecondPass
     this.withinGroupSort = withinGroupSort;
     this.groups = groups;
     this.maxDocsPerGroup = maxDocsPerGroup;
-    groupMap = new HashMap<GROUP_VALUE_TYPE, SearchGroupDocs<GROUP_VALUE_TYPE>>(groups.size());
+    groupMap = new HashMap<>(groups.size());
 
     for (SearchGroup<GROUP_VALUE_TYPE> group : groups) {
       //System.out.println("  prep group=" + (group.groupValue == null ? "null" : group.groupValue.utf8ToString()));
@@ -75,7 +75,7 @@ public abstract class AbstractSecondPass
         collector = TopFieldCollector.create(withinGroupSort, maxDocsPerGroup, fillSortFields, getScores, getMaxScores, true);
       }
       groupMap.put(group.groupValue,
-          new SearchGroupDocs<GROUP_VALUE_TYPE>(group.groupValue,
+          new SearchGroupDocs<>(group.groupValue,
               collector));
     }
   }
@@ -128,7 +128,7 @@ public abstract class AbstractSecondPass
     for(SearchGroup<?> group : groups) {
       final SearchGroupDocs<GROUP_VALUE_TYPE> groupDocs = groupMap.get(group.groupValue);
       final TopDocs topDocs = groupDocs.collector.topDocs(withinGroupOffset, maxDocsPerGroup);
-      groupDocsResult[groupIDX++] = new GroupDocs<GROUP_VALUE_TYPE>(Float.NaN,
+      groupDocsResult[groupIDX++] = new GroupDocs<>(Float.NaN,
                                                                     topDocs.getMaxScore(),
                                                                     topDocs.totalHits,
                                                                     topDocs.scoreDocs,
@@ -137,7 +137,7 @@ public abstract class AbstractSecondPass
       maxScore = Math.max(maxScore, topDocs.getMaxScore());
     }
 
-    return new TopGroups<GROUP_VALUE_TYPE>(groupSort.getSort(),
+    return new TopGroups<>(groupSort.getSort(),
                                            withinGroupSort == null ? null : withinGroupSort.getSort(),
                                            totalHitCount, totalGroupedHitCount, groupDocsResult,
                                            maxScore);

Modified: lucene/dev/branches/lucene4258/lucene/grouping/src/java/org/apache/lucene/search/grouping/BlockGroupingCollector.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene4258/lucene/grouping/src/java/org/apache/lucene/search/grouping/BlockGroupingCollector.java?rev=1479862&r1=1479861&r2=1479862&view=diff
==============================================================================
--- lucene/dev/branches/lucene4258/lucene/grouping/src/java/org/apache/lucene/search/grouping/BlockGroupingCollector.java (original)
+++ lucene/dev/branches/lucene4258/lucene/grouping/src/java/org/apache/lucene/search/grouping/BlockGroupingCollector.java Tue May  7 11:20:55 2013
@@ -365,7 +365,7 @@ public class BlockGroupingCollector exte
 
       // TODO: we could aggregate scores across children
       // by Sum/Avg instead of passing NaN:
-      groups[downTo] = new GroupDocs<Object>(Float.NaN,
+      groups[downTo] = new GroupDocs<>(Float.NaN,
                                              topDocs.getMaxScore(),
                                              og.count,
                                              topDocs.scoreDocs,
@@ -382,7 +382,7 @@ public class BlockGroupingCollector exte
     }
     */
 
-    return new TopGroups<Object>(new TopGroups<Object>(groupSort.getSort(),
+    return new TopGroups<>(new TopGroups<>(groupSort.getSort(),
                                        withinGroupSort == null ? null : withinGroupSort.getSort(),
                                        totalHitCount, totalGroupedHitCount, groups, maxScore),
                          totalGroupCount);

Modified: lucene/dev/branches/lucene4258/lucene/grouping/src/java/org/apache/lucene/search/grouping/GroupingSearch.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene4258/lucene/grouping/src/java/org/apache/lucene/search/grouping/GroupingSearch.java?rev=1479862&r1=1479861&r2=1479862&view=diff
==============================================================================
--- lucene/dev/branches/lucene4258/lucene/grouping/src/java/org/apache/lucene/search/grouping/GroupingSearch.java (original)
+++ lucene/dev/branches/lucene4258/lucene/grouping/src/java/org/apache/lucene/search/grouping/GroupingSearch.java Tue May  7 11:20:55 2013
@@ -17,11 +17,16 @@ package org.apache.lucene.search.groupin
  * limitations under the License.
  */
 
-import java.io.IOException;
-import java.util.*;
-
 import org.apache.lucene.queries.function.ValueSource;
-import org.apache.lucene.search.*;
+import org.apache.lucene.search.CachingCollector;
+import org.apache.lucene.search.Collector;
+import org.apache.lucene.search.FieldCache;
+import org.apache.lucene.search.Filter;
+import org.apache.lucene.search.IndexSearcher;
+import org.apache.lucene.search.MultiCollector;
+import org.apache.lucene.search.Query;
+import org.apache.lucene.search.Sort;
+import org.apache.lucene.search.SortField;
 import org.apache.lucene.search.grouping.function.FunctionAllGroupHeadsCollector;
 import org.apache.lucene.search.grouping.function.FunctionAllGroupsCollector;
 import org.apache.lucene.search.grouping.function.FunctionFirstPassGroupingCollector;
@@ -34,6 +39,13 @@ import org.apache.lucene.util.Bits;
 import org.apache.lucene.util.BytesRef;
 import org.apache.lucene.util.mutable.MutableValue;
 
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.List;
+import java.util.Map;
+
 /**
  * Convenience class to perform grouping in a non distributed environment.
  *
@@ -173,7 +185,7 @@ public class GroupingSearch {
 
     final Collector firstRound;
     if (allGroupHeads || allGroups) {
-      List<Collector> collectors = new ArrayList<Collector>();
+      List<Collector> collectors = new ArrayList<>();
       collectors.add(firstPassCollector);
       if (allGroups) {
         collectors.add(allGroupsCollector);

Modified: lucene/dev/branches/lucene4258/lucene/grouping/src/java/org/apache/lucene/search/grouping/SearchGroup.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene4258/lucene/grouping/src/java/org/apache/lucene/search/grouping/SearchGroup.java?rev=1479862&r1=1479861&r2=1479862&view=diff
==============================================================================
--- lucene/dev/branches/lucene4258/lucene/grouping/src/java/org/apache/lucene/search/grouping/SearchGroup.java (original)
+++ lucene/dev/branches/lucene4258/lucene/grouping/src/java/org/apache/lucene/search/grouping/SearchGroup.java Tue May  7 11:20:55 2013
@@ -198,9 +198,9 @@ public class SearchGroup<GROUP_VALUE_TYP
     private final Map<T,MergedGroup<T>> groupsSeen;
 
     public GroupMerger(Sort groupSort) throws IOException {
-      groupComp = new GroupComparator<T>(groupSort);
-      queue = new TreeSet<MergedGroup<T>>(groupComp);
-      groupsSeen = new HashMap<T,MergedGroup<T>>();
+      groupComp = new GroupComparator<>(groupSort);
+      queue = new TreeSet<>(groupComp);
+      groupsSeen = new HashMap<>();
     }
 
     @SuppressWarnings({"unchecked","rawtypes"})



Mime
View raw message