lucenenet-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From paulir...@apache.org
Subject [45/53] [abbrv] git commit: Finish porting Facet.Search
Date Thu, 07 Nov 2013 13:54:00 GMT
Finish porting Facet.Search


Project: http://git-wip-us.apache.org/repos/asf/lucenenet/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucenenet/commit/f682a4a1
Tree: http://git-wip-us.apache.org/repos/asf/lucenenet/tree/f682a4a1
Diff: http://git-wip-us.apache.org/repos/asf/lucenenet/diff/f682a4a1

Branch: refs/heads/branch_4x
Commit: f682a4a189d88706404b476cbf5ba3c23938860f
Parents: 38c33d5
Author: Paul Irwin <paulirwin@gmail.com>
Authored: Wed Nov 6 13:02:56 2013 -0500
Committer: Paul Irwin <paulirwin@gmail.com>
Committed: Wed Nov 6 13:02:56 2013 -0500

----------------------------------------------------------------------
 src/contrib/Facet/Contrib.Facet.csproj          |  13 +
 .../Facet/Search/AdaptiveFacetsAccumulator.cs   |  66 +++
 src/contrib/Facet/Search/ArraysPool.cs          |  68 +++
 .../CachedOrdsCountingFacetsAggregator.cs       |  35 ++
 src/contrib/Facet/Search/DrillDownQuery.cs      |   2 +-
 src/contrib/Facet/Search/DrillSideways.cs       | 248 +++++++++
 src/contrib/Facet/Search/DrillSidewaysQuery.cs  | 205 ++++++++
 src/contrib/Facet/Search/DrillSidewaysScorer.cs | 497 +++++++++++++++++++
 src/contrib/Facet/Search/OrdinalsCache.cs       |  83 ++++
 .../Facet/Search/PerCategoryListAggregator.cs   |  47 ++
 src/contrib/Facet/Search/ReusingFacetArrays.cs  |  34 ++
 src/contrib/Facet/Search/ScoringAggregator.cs   |  50 ++
 .../Facet/Search/SearcherTaxonomyManager.cs     |  94 ++++
 .../Facet/Search/SumScoreFacetRequest.cs        |  34 ++
 .../Facet/Search/SumScoreFacetsAggregator.cs    |  70 +++
 src/core/Lucene.Net.csproj                      |   1 +
 src/core/Search/MultiCollector.cs               | 107 ++++
 src/core/Support/Arrays.cs                      |  17 +
 18 files changed, 1670 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucenenet/blob/f682a4a1/src/contrib/Facet/Contrib.Facet.csproj
----------------------------------------------------------------------
diff --git a/src/contrib/Facet/Contrib.Facet.csproj b/src/contrib/Facet/Contrib.Facet.csproj
index 2bc69f5..b65ae1c 100644
--- a/src/contrib/Facet/Contrib.Facet.csproj
+++ b/src/contrib/Facet/Contrib.Facet.csproj
@@ -106,12 +106,18 @@
     <Compile Include="Sampling\SamplingParams.cs" />
     <Compile Include="Sampling\SamplingWrapper.cs" />
     <Compile Include="Sampling\TakmiSampleFixer.cs" />
+    <Compile Include="Search\AdaptiveFacetsAccumulator.cs" />
+    <Compile Include="Search\ArraysPool.cs" />
+    <Compile Include="Search\CachedOrdsCountingFacetsAggregator.cs" />
     <Compile Include="Search\CountFacetRequest.cs" />
     <Compile Include="Search\CountingAggregator.cs" />
     <Compile Include="Search\CountingFacetsAggregator.cs" />
     <Compile Include="Search\DepthOneFacetResultsHandler.cs" />
     <Compile Include="Search\DocValuesCategoryListIterator.cs" />
     <Compile Include="Search\DrillDownQuery.cs" />
+    <Compile Include="Search\DrillSideways.cs" />
+    <Compile Include="Search\DrillSidewaysQuery.cs" />
+    <Compile Include="Search\DrillSidewaysScorer.cs" />
     <Compile Include="Search\FacetArrays.cs" />
     <Compile Include="Search\FacetRequest.cs" />
     <Compile Include="Search\FacetResult.cs" />
@@ -130,7 +136,14 @@
     <Compile Include="Search\IScoredDocIDs.cs" />
     <Compile Include="Search\IScoredDocIDsIterator.cs" />
     <Compile Include="Search\MatchingDocsAsScoredDocIDs.cs" />
+    <Compile Include="Search\OrdinalsCache.cs" />
+    <Compile Include="Search\PerCategoryListAggregator.cs" />
+    <Compile Include="Search\ReusingFacetArrays.cs" />
+    <Compile Include="Search\ScoringAggregator.cs" />
+    <Compile Include="Search\SearcherTaxonomyManager.cs" />
     <Compile Include="Search\StandardFacetsAccumulator.cs" />
+    <Compile Include="Search\SumScoreFacetRequest.cs" />
+    <Compile Include="Search\SumScoreFacetsAggregator.cs" />
     <Compile Include="Search\TopKFacetResultsHandler.cs" />
     <Compile Include="Search\TopKInEachNodeHandler.cs" />
     <Compile Include="SortedSet\SortedSetDocValuesAccumulator.cs" />

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/f682a4a1/src/contrib/Facet/Search/AdaptiveFacetsAccumulator.cs
----------------------------------------------------------------------
diff --git a/src/contrib/Facet/Search/AdaptiveFacetsAccumulator.cs b/src/contrib/Facet/Search/AdaptiveFacetsAccumulator.cs
new file mode 100644
index 0000000..3775c64
--- /dev/null
+++ b/src/contrib/Facet/Search/AdaptiveFacetsAccumulator.cs
@@ -0,0 +1,66 @@
+using Lucene.Net.Facet.Params;
+using Lucene.Net.Facet.Sampling;
+using Lucene.Net.Facet.Taxonomy;
+using Lucene.Net.Index;
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Text;
+
+namespace Lucene.Net.Facet.Search
+{
+    public sealed class AdaptiveFacetsAccumulator : StandardFacetsAccumulator
+    {
+        private Sampler sampler = new RandomSampler();
+
+        public AdaptiveFacetsAccumulator(FacetSearchParams searchParams, IndexReader indexReader, TaxonomyReader taxonomyReader)
+            : base(searchParams, indexReader, taxonomyReader)
+        {
+        }
+
+        public AdaptiveFacetsAccumulator(FacetSearchParams searchParams, IndexReader indexReader, TaxonomyReader taxonomyReader, FacetArrays facetArrays)
+            : base(searchParams, indexReader, taxonomyReader, facetArrays)
+        {
+        }
+
+        public Sampler Sampler
+        {
+            get
+            {
+                return this.sampler;
+            }
+            set
+            {
+                this.sampler = value;
+            }
+        }
+
+        public override List<FacetResult> Accumulate(IScoredDocIDs docids)
+        {
+            StandardFacetsAccumulator delegee = AppropriateFacetCountingAccumulator(docids);
+            if (delegee == this)
+            {
+                return base.Accumulate(docids);
+            }
+
+            return delegee.Accumulate(docids);
+        }
+
+        private StandardFacetsAccumulator AppropriateFacetCountingAccumulator(IScoredDocIDs docids)
+        {
+            if (!MayComplement())
+            {
+                return this;
+            }
+
+            if (sampler == null || !sampler.ShouldSample(docids))
+            {
+                return this;
+            }
+
+            SamplingAccumulator samplingAccumulator = new SamplingAccumulator(sampler, searchParams, indexReader, taxonomyReader);
+            samplingAccumulator.ComplementThreshold = ComplementThreshold;
+            return samplingAccumulator;
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/f682a4a1/src/contrib/Facet/Search/ArraysPool.cs
----------------------------------------------------------------------
diff --git a/src/contrib/Facet/Search/ArraysPool.cs b/src/contrib/Facet/Search/ArraysPool.cs
new file mode 100644
index 0000000..315341c
--- /dev/null
+++ b/src/contrib/Facet/Search/ArraysPool.cs
@@ -0,0 +1,68 @@
+using Lucene.Net.Support;
+using System;
+using System.Collections.Concurrent;
+using System.Collections.Generic;
+using System.Linq;
+using System.Text;
+
+namespace Lucene.Net.Facet.Search
+{
+    public sealed class ArraysPool
+    {
+        private readonly ConcurrentQueue<int[]> intsPool;
+        private readonly ConcurrentQueue<float[]> floatsPool;
+        public readonly int arrayLength;
+
+        public ArraysPool(int arrayLength, int maxArrays)
+        {
+            if (maxArrays == 0)
+            {
+                throw new ArgumentException(@"maxArrays cannot be 0 - don't use this class if you don't intend to pool arrays");
+            }
+
+            this.arrayLength = arrayLength;
+            this.intsPool = new ConcurrentQueue<int[]>();
+            this.floatsPool = new ConcurrentQueue<float[]>();
+        }
+
+        public int[] AllocateIntArray()
+        {
+            int[] arr;
+            if (!intsPool.TryDequeue(out arr))
+            {
+                return new int[arrayLength];
+            }
+
+            Arrays.Fill(arr, 0);
+            return arr;
+        }
+
+        public float[] AllocateFloatArray()
+        {
+            float[] arr;
+            if (!floatsPool.TryDequeue(out arr))
+            {
+                return new float[arrayLength];
+            }
+
+            Arrays.Fill(arr, 0F);
+            return arr;
+        }
+
+        public void Free(int[] arr)
+        {
+            if (arr != null)
+            {
+                intsPool.Enqueue(arr);
+            }
+        }
+
+        public void Free(float[] arr)
+        {
+            if (arr != null)
+            {
+                floatsPool.Enqueue(arr);
+            }
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/f682a4a1/src/contrib/Facet/Search/CachedOrdsCountingFacetsAggregator.cs
----------------------------------------------------------------------
diff --git a/src/contrib/Facet/Search/CachedOrdsCountingFacetsAggregator.cs b/src/contrib/Facet/Search/CachedOrdsCountingFacetsAggregator.cs
new file mode 100644
index 0000000..fed8c56
--- /dev/null
+++ b/src/contrib/Facet/Search/CachedOrdsCountingFacetsAggregator.cs
@@ -0,0 +1,35 @@
+using Lucene.Net.Facet.Params;
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Text;
+
+namespace Lucene.Net.Facet.Search
+{
+    public class CachedOrdsCountingFacetsAggregator : IntRollupFacetsAggregator
+    {
+        public override void Aggregate(FacetsCollector.MatchingDocs matchingDocs, CategoryListParams clp, FacetArrays facetArrays)
+        {
+            var ords = OrdinalsCache.GetCachedOrds(matchingDocs.context, clp);
+            if (ords == null)
+            {
+                return;
+            }
+
+            int[] counts = facetArrays.GetIntArray();
+            int doc = 0;
+            int length = matchingDocs.bits.Length;
+            while (doc < length && (doc = matchingDocs.bits.NextSetBit(doc)) != -1)
+            {
+                int start = ords.offsets[doc];
+                int end = ords.offsets[doc + 1];
+                for (int i = start; i < end; i++)
+                {
+                    ++counts[ords.ordinals[i]];
+                }
+
+                ++doc;
+            }
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/f682a4a1/src/contrib/Facet/Search/DrillDownQuery.cs
----------------------------------------------------------------------
diff --git a/src/contrib/Facet/Search/DrillDownQuery.cs b/src/contrib/Facet/Search/DrillDownQuery.cs
index a9cf63a..3476d00 100644
--- a/src/contrib/Facet/Search/DrillDownQuery.cs
+++ b/src/contrib/Facet/Search/DrillDownQuery.cs
@@ -23,7 +23,7 @@ namespace Lucene.Net.Facet.Search
 
         private readonly BooleanQuery query;
         private readonly IDictionary<string, int?> drillDownDims = new HashMap<string, int?>();
-        readonly FacetIndexingParams fip;
+        internal readonly FacetIndexingParams fip;
 
         internal DrillDownQuery(FacetIndexingParams fip, BooleanQuery query, IDictionary<string, int?> drillDownDims)
         {

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/f682a4a1/src/contrib/Facet/Search/DrillSideways.cs
----------------------------------------------------------------------
diff --git a/src/contrib/Facet/Search/DrillSideways.cs b/src/contrib/Facet/Search/DrillSideways.cs
new file mode 100644
index 0000000..e854a97
--- /dev/null
+++ b/src/contrib/Facet/Search/DrillSideways.cs
@@ -0,0 +1,248 @@
+using Lucene.Net.Facet.Params;
+using Lucene.Net.Facet.Taxonomy;
+using Lucene.Net.Index;
+using Lucene.Net.Search;
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Text;
+
+namespace Lucene.Net.Facet.Search
+{
+    public class DrillSideways
+    {
+        protected readonly IndexSearcher searcher;
+        protected readonly TaxonomyReader taxoReader;
+
+        public DrillSideways(IndexSearcher searcher, TaxonomyReader taxoReader)
+        {
+            this.searcher = searcher;
+            this.taxoReader = taxoReader;
+        }
+
+        private static DrillDownQuery MoveDrillDownOnlyClauses(DrillDownQuery in_renamed, FacetSearchParams fsp)
+        {
+            ISet<String> facetDims = new HashSet<String>();
+            foreach (FacetRequest fr in fsp.facetRequests)
+            {
+                if (fr.categoryPath.length == 0)
+                {
+                    throw new ArgumentException(@"all FacetRequests must have CategoryPath with length > 0");
+                }
+
+                facetDims.Add(fr.categoryPath.components[0]);
+            }
+
+            BooleanClause[] clauses = in_renamed.BooleanQuery.Clauses;
+            IDictionary<String, int?> drillDownDims = in_renamed.Dims;
+            int startClause;
+
+            if (clauses.Length == drillDownDims.Count)
+            {
+                startClause = 0;
+            }
+            else
+            {
+                startClause = 1;
+            }
+
+            List<Query> nonFacetClauses = new List<Query>();
+            List<Query> facetClauses = new List<Query>();
+            for (int i = startClause; i < clauses.Length; i++)
+            {
+                Query q = clauses[i].Query;
+                string dim = in_renamed.GetDim(q);
+                if (!facetDims.Contains(dim))
+                {
+                    nonFacetClauses.Add(q);
+                }
+                else
+                {
+                    facetClauses.Add(q);
+                }
+            }
+
+            if (nonFacetClauses.Count > 0)
+            {
+                BooleanQuery newBaseQuery = new BooleanQuery(true);
+                if (startClause == 1)
+                {
+                    newBaseQuery.Add(clauses[0].Query, Occur.MUST);
+                }
+
+                foreach (Query q in nonFacetClauses)
+                {
+                    newBaseQuery.Add(q, Occur.MUST);
+                }
+
+                return new DrillDownQuery(fsp.indexingParams, newBaseQuery, facetClauses);
+            }
+            else
+            {
+                return in_renamed;
+            }
+        }
+
+        public virtual DrillSidewaysResult Search(DrillDownQuery query, Collector hitCollector, FacetSearchParams fsp)
+        {
+            if (query.fip != fsp.indexingParams)
+            {
+                throw new ArgumentException(@"DrillDownQuery's FacetIndexingParams should match FacetSearchParams'");
+            }
+
+            query = MoveDrillDownOnlyClauses(query, fsp);
+            var drillDownDims = query.Dims;
+            if (drillDownDims.Count == 0)
+            {
+                FacetsCollector c = FacetsCollector.Create(GetDrillDownAccumulator(fsp));
+                searcher.Search(query, MultiCollector.Wrap(hitCollector, c));
+                return new DrillSidewaysResult(c.GetFacetResults(), null);
+            }
+
+            BooleanQuery ddq = query.BooleanQuery;
+            BooleanClause[] clauses = ddq.Clauses;
+            Query baseQuery;
+            int startClause;
+            if (clauses.Length == drillDownDims.Count)
+            {
+                baseQuery = new MatchAllDocsQuery();
+                startClause = 0;
+            }
+            else
+            {
+                baseQuery = clauses[0].Query;
+                startClause = 1;
+            }
+
+            Term[][] drillDownTerms = new Term[clauses.Length - startClause][];
+            for (int i = startClause; i < clauses.Length; i++)
+            {
+                Query q = clauses[i].Query;
+                q = ((ConstantScoreQuery)q).Query;
+                if (q is TermQuery)
+                {
+                    drillDownTerms[i - startClause] = new Term[] { ((TermQuery)q).Term };
+                }
+                else
+                {
+                    BooleanQuery q2 = (BooleanQuery)q;
+                    BooleanClause[] clauses2 = q2.Clauses;
+                    drillDownTerms[i - startClause] = new Term[clauses2.Length];
+                    for (int j = 0; j < clauses2.Length; j++)
+                    {
+                        drillDownTerms[i - startClause][j] = ((TermQuery)clauses2[j].Query).Term;
+                    }
+                }
+            }
+
+            FacetsCollector drillDownCollector = FacetsCollector.Create(GetDrillDownAccumulator(fsp));
+            FacetsCollector[] drillSidewaysCollectors = new FacetsCollector[drillDownDims.Count];
+            int idx = 0;
+            foreach (string dim in drillDownDims.Keys)
+            {
+                List<FacetRequest> requests = new List<FacetRequest>();
+                foreach (FacetRequest fr in fsp.facetRequests)
+                {
+                    if (fr.categoryPath.components[0].Equals(dim))
+                    {
+                        requests.Add(fr);
+                    }
+                }
+
+                if (requests.Count == 0)
+                {
+                    throw new ArgumentException(@"could not find FacetRequest for drill-sideways dimension \" + dim + @"\");
+                }
+
+                drillSidewaysCollectors[idx++] = FacetsCollector.Create(GetDrillSidewaysAccumulator(dim, new FacetSearchParams(fsp.indexingParams, requests)));
+            }
+
+            DrillSidewaysQuery dsq = new DrillSidewaysQuery(baseQuery, drillDownCollector, drillSidewaysCollectors, drillDownTerms);
+            searcher.Search(dsq, hitCollector);
+            int numDims = drillDownDims.Count;
+            List<FacetResult>[] drillSidewaysResults = new List<FacetResult>[numDims];
+            List<FacetResult> drillDownResults = null;
+            List<FacetResult> mergedResults = new List<FacetResult>();
+            int[] requestUpto = new int[drillDownDims.Count];
+            for (int i = 0; i < fsp.facetRequests.Count; i++)
+            {
+                FacetRequest fr = fsp.facetRequests[i];
+                int? dimIndex = drillDownDims[fr.categoryPath.components[0]];
+                if (dimIndex == null)
+                {
+                    if (drillDownResults == null)
+                    {
+                        drillDownResults = drillDownCollector.GetFacetResults();
+                    }
+
+                    mergedResults.Add(drillDownResults[i]);
+                }
+                else
+                {
+                    int dim = dimIndex.Value;
+                    List<FacetResult> sidewaysResult = drillSidewaysResults[dim];
+                    if (sidewaysResult == null)
+                    {
+                        sidewaysResult = drillSidewaysCollectors[dim].GetFacetResults();
+                        drillSidewaysResults[dim] = sidewaysResult;
+                    }
+
+                    mergedResults.Add(sidewaysResult[requestUpto[dim]]);
+                    requestUpto[dim]++;
+                }
+            }
+
+            return new DrillSidewaysResult(mergedResults, null);
+        }
+
+        public virtual DrillSidewaysResult Search(DrillDownQuery query, Filter filter, FieldDoc after, int topN, Sort sort, bool doDocScores, bool doMaxScore, FacetSearchParams fsp)
+        {
+            if (filter != null)
+            {
+                query = new DrillDownQuery(filter, query);
+            }
+
+            if (sort != null)
+            {
+                TopFieldCollector hitCollector = TopFieldCollector.Create(sort, Math.Min(topN, searcher.IndexReader.MaxDoc), after, true, doDocScores, doMaxScore, true);
+                DrillSidewaysResult r = Search(query, hitCollector, fsp);
+                r.hits = hitCollector.TopDocs();
+                return r;
+            }
+            else
+            {
+                return Search(after, query, topN, fsp);
+            }
+        }
+
+        public virtual DrillSidewaysResult Search(ScoreDoc after, DrillDownQuery query, int topN, FacetSearchParams fsp)
+        {
+            TopScoreDocCollector hitCollector = TopScoreDocCollector.Create(Math.Min(topN, searcher.IndexReader.MaxDoc), after, true);
+            DrillSidewaysResult r = Search(query, hitCollector, fsp);
+            r.hits = hitCollector.TopDocs();
+            return r;
+        }
+
+        protected virtual FacetsAccumulator GetDrillDownAccumulator(FacetSearchParams fsp)
+        {
+            return FacetsAccumulator.Create(fsp, searcher.IndexReader, taxoReader);
+        }
+
+        protected virtual FacetsAccumulator GetDrillSidewaysAccumulator(string dim, FacetSearchParams fsp)
+        {
+            return FacetsAccumulator.Create(fsp, searcher.IndexReader, taxoReader);
+        }
+
+        public class DrillSidewaysResult
+        {
+            public readonly List<FacetResult> facetResults;
+            public TopDocs hits;
+            
+            internal DrillSidewaysResult(List<FacetResult> facetResults, TopDocs hits)
+            {
+                this.facetResults = facetResults;
+                this.hits = hits;
+            }
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/f682a4a1/src/contrib/Facet/Search/DrillSidewaysQuery.cs
----------------------------------------------------------------------
diff --git a/src/contrib/Facet/Search/DrillSidewaysQuery.cs b/src/contrib/Facet/Search/DrillSidewaysQuery.cs
new file mode 100644
index 0000000..36da867
--- /dev/null
+++ b/src/contrib/Facet/Search/DrillSidewaysQuery.cs
@@ -0,0 +1,205 @@
+using Lucene.Net.Index;
+using Lucene.Net.Search;
+using Lucene.Net.Support;
+using Lucene.Net.Util;
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Text;
+
+namespace Lucene.Net.Facet.Search
+{
+    internal class DrillSidewaysQuery : Query
+    {
+        readonly Query baseQuery;
+        readonly Collector drillDownCollector;
+        readonly Collector[] drillSidewaysCollectors;
+        readonly Term[][] drillDownTerms;
+
+        internal DrillSidewaysQuery(Query baseQuery, Collector drillDownCollector, Collector[] drillSidewaysCollectors, Term[][] drillDownTerms)
+        {
+            this.baseQuery = baseQuery;
+            this.drillDownCollector = drillDownCollector;
+            this.drillSidewaysCollectors = drillSidewaysCollectors;
+            this.drillDownTerms = drillDownTerms;
+        }
+
+        public override string ToString(string field)
+        {
+            return @"DrillSidewaysQuery";
+        }
+
+        public override Query Rewrite(IndexReader reader)
+        {
+            Query newQuery = baseQuery;
+            while (true)
+            {
+                Query rewrittenQuery = newQuery.Rewrite(reader);
+                if (rewrittenQuery == newQuery)
+                {
+                    break;
+                }
+
+                newQuery = rewrittenQuery;
+            }
+
+            if (newQuery == baseQuery)
+            {
+                return this;
+            }
+            else
+            {
+                return new DrillSidewaysQuery(newQuery, drillDownCollector, drillSidewaysCollectors, drillDownTerms);
+            }
+        }
+
+        public override Weight CreateWeight(IndexSearcher searcher)
+        {
+            Weight baseWeight = baseQuery.CreateWeight(searcher);
+            return new AnonymousWeight(this, baseWeight);
+        }
+
+        private sealed class AnonymousWeight : Weight
+        {
+            public AnonymousWeight(DrillSidewaysQuery parent, Weight baseWeight)
+            {
+                this.parent = parent;
+                this.baseWeight = baseWeight;
+            }
+
+            private readonly DrillSidewaysQuery parent;
+            private readonly Weight baseWeight;
+
+            public override Explanation Explain(AtomicReaderContext context, int doc)
+            {
+                return baseWeight.Explain(context, doc);
+            }
+
+            public override Query Query
+            {
+                get
+                {
+                    return parent.baseQuery;
+                }
+            }
+
+            public override float ValueForNormalization
+            {
+                get
+                {
+                    return baseWeight.ValueForNormalization;
+                }
+            }
+
+            public override void Normalize(float norm, float topLevelBoost)
+            {
+                baseWeight.Normalize(norm, topLevelBoost);
+            }
+
+            public override bool ScoresDocsOutOfOrder
+            {
+                get
+                {
+                    return false;
+                }
+            }
+
+            public override Scorer Scorer(AtomicReaderContext context, bool scoreDocsInOrder, bool topScorer, IBits acceptDocs)
+            {
+                DrillSidewaysScorer.DocsEnumsAndFreq[] dims = new DrillSidewaysScorer.DocsEnumsAndFreq[parent.drillDownTerms.Length];
+                TermsEnum termsEnum = null;
+                string lastField = null;
+                int nullCount = 0;
+                for (int dim = 0; dim < dims.Length; dim++)
+                {
+                    dims[dim] = new DrillSidewaysScorer.DocsEnumsAndFreq();
+                    dims[dim].sidewaysCollector = parent.drillSidewaysCollectors[dim];
+                    string field = parent.drillDownTerms[dim][0].Field;
+                    dims[dim].dim = parent.drillDownTerms[dim][0].Text;
+                    if (lastField == null || !lastField.Equals(field))
+                    {
+                        AtomicReader reader = context.AtomicReader;
+                        Terms terms = reader.Terms(field);
+                        if (terms != null)
+                        {
+                            termsEnum = terms.Iterator(null);
+                        }
+
+                        lastField = field;
+                    }
+
+                    if (termsEnum == null)
+                    {
+                        nullCount++;
+                        continue;
+                    }
+
+                    dims[dim].docsEnums = new DocsEnum[parent.drillDownTerms[dim].Length];
+                    for (int i = 0; i < parent.drillDownTerms[dim].Length; i++)
+                    {
+                        if (termsEnum.SeekExact(parent.drillDownTerms[dim][i].Bytes, false))
+                        {
+                            dims[dim].freq = Math.Max(dims[dim].freq, termsEnum.DocFreq);
+                            dims[dim].docsEnums[i] = termsEnum.Docs(null, null);
+                        }
+                    }
+                }
+
+                if (nullCount > 1)
+                {
+                    return null;
+                }
+
+                Array.Sort(dims);
+                Scorer baseScorer = baseWeight.Scorer(context, scoreDocsInOrder, false, acceptDocs);
+                if (baseScorer == null)
+                {
+                    return null;
+                }
+
+                return new DrillSidewaysScorer(this, context, baseScorer, parent.drillDownCollector, dims);
+            }
+        }
+
+        public override int GetHashCode()
+        {
+            int prime = 31;
+            int result = base.GetHashCode();
+            result = prime * result + ((baseQuery == null) ? 0 : baseQuery.GetHashCode());
+            result = prime * result + ((drillDownCollector == null) ? 0 : drillDownCollector.GetHashCode());
+            result = prime * result + Arrays.GetHashCode(drillDownTerms);
+            result = prime * result + Arrays.GetHashCode(drillSidewaysCollectors);
+            return result;
+        }
+
+        public override bool Equals(Object obj)
+        {
+            if (this == obj)
+                return true;
+            if (!base.Equals(obj))
+                return false;
+            if (GetType() != obj.GetType())
+                return false;
+            DrillSidewaysQuery other = (DrillSidewaysQuery)obj;
+            if (baseQuery == null)
+            {
+                if (other.baseQuery != null)
+                    return false;
+            }
+            else if (!baseQuery.Equals(other.baseQuery))
+                return false;
+            if (drillDownCollector == null)
+            {
+                if (other.drillDownCollector != null)
+                    return false;
+            }
+            else if (!drillDownCollector.Equals(other.drillDownCollector))
+                return false;
+            if (!Arrays.Equals(drillDownTerms, other.drillDownTerms))
+                return false;
+            if (!Arrays.Equals(drillSidewaysCollectors, other.drillSidewaysCollectors))
+                return false;
+            return true;
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/f682a4a1/src/contrib/Facet/Search/DrillSidewaysScorer.cs
----------------------------------------------------------------------
diff --git a/src/contrib/Facet/Search/DrillSidewaysScorer.cs b/src/contrib/Facet/Search/DrillSidewaysScorer.cs
new file mode 100644
index 0000000..7c48e0c
--- /dev/null
+++ b/src/contrib/Facet/Search/DrillSidewaysScorer.cs
@@ -0,0 +1,497 @@
+using Lucene.Net.Index;
+using Lucene.Net.Search;
+using Lucene.Net.Util;
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Text;
+
+namespace Lucene.Net.Facet.Search
+{
+    internal class DrillSidewaysScorer : Scorer
+    {
+        private readonly Collector drillDownCollector;
+        private readonly DocsEnumsAndFreq[] dims;
+        private readonly Scorer baseScorer;
+        private readonly AtomicReaderContext context;
+        private static readonly int CHUNK = 2048;
+        private static readonly int MASK = CHUNK - 1;
+        private int collectDocID = -1;
+        private float collectScore;
+
+        internal DrillSidewaysScorer(Weight w, AtomicReaderContext context, Scorer baseScorer, Collector drillDownCollector, DocsEnumsAndFreq[] dims)
+            : base(w)
+        {
+            this.dims = dims;
+            this.context = context;
+            this.baseScorer = baseScorer;
+            this.drillDownCollector = drillDownCollector;
+        }
+
+        public override void Score(Collector collector)
+        {
+            collector.SetScorer(this);
+            drillDownCollector.SetScorer(this);
+            drillDownCollector.SetNextReader(context);
+            foreach (DocsEnumsAndFreq dim in dims)
+            {
+                dim.sidewaysCollector.SetScorer(this);
+                dim.sidewaysCollector.SetNextReader(context);
+            }
+
+            int baseDocID = baseScorer.NextDoc();
+            foreach (DocsEnumsAndFreq dim in dims)
+            {
+                foreach (DocsEnum docsEnum in dim.docsEnums)
+                {
+                    if (docsEnum != null)
+                    {
+                        docsEnum.NextDoc();
+                    }
+                }
+            }
+
+            int numDims = dims.Length;
+            DocsEnum[][] docsEnums = new DocsEnum[numDims][];
+            Collector[] sidewaysCollectors = new Collector[numDims];
+            int maxFreq = 0;
+            for (int dim = 0; dim < numDims; dim++)
+            {
+                docsEnums[dim] = dims[dim].docsEnums;
+                sidewaysCollectors[dim] = dims[dim].sidewaysCollector;
+                maxFreq = Math.Max(maxFreq, dims[dim].freq);
+            }
+
+            int estBaseHitCount = context.AtomicReader.MaxDoc / (1 + baseDocID);
+            if (estBaseHitCount < maxFreq / 10)
+            {
+                DoBaseAdvanceScoring(collector, docsEnums, sidewaysCollectors);
+            }
+            else if (numDims > 1 && (dims[1].freq < estBaseHitCount / 10))
+            {
+                DoDrillDownAdvanceScoring(collector, docsEnums, sidewaysCollectors);
+            }
+            else
+            {
+                DoUnionScoring(collector, docsEnums, sidewaysCollectors);
+            }
+        }
+
+        private void DoDrillDownAdvanceScoring(Collector collector, DocsEnum[][] docsEnums, Collector[] sidewaysCollectors)
+        {
+            int maxDoc = context.AtomicReader.MaxDoc;
+            int numDims = dims.Length;
+            int[] filledSlots = new int[CHUNK];
+            int[] docIDs = new int[CHUNK];
+            float[] scores = new float[CHUNK];
+            int[] missingDims = new int[CHUNK];
+            int[] counts = new int[CHUNK];
+            docIDs[0] = -1;
+            int nextChunkStart = CHUNK;
+            FixedBitSet seen = new FixedBitSet(CHUNK);
+            while (true)
+            {
+                foreach (DocsEnum docsEnum in docsEnums[0])
+                {
+                    if (docsEnum == null)
+                    {
+                        continue;
+                    }
+
+                    int docID = docsEnum.DocID;
+                    while (docID < nextChunkStart)
+                    {
+                        int slot = docID & MASK;
+                        if (docIDs[slot] != docID)
+                        {
+                            seen.Set(slot);
+                            docIDs[slot] = docID;
+                            missingDims[slot] = 1;
+                            counts[slot] = 1;
+                        }
+
+                        docID = docsEnum.NextDoc();
+                    }
+                }
+
+                foreach (DocsEnum docsEnum in docsEnums[1])
+                {
+                    if (docsEnum == null)
+                    {
+                        continue;
+                    }
+
+                    int docID = docsEnum.DocID;
+                    while (docID < nextChunkStart)
+                    {
+                        int slot = docID & MASK;
+                        if (docIDs[slot] != docID)
+                        {
+                            seen.Set(slot);
+                            docIDs[slot] = docID;
+                            missingDims[slot] = 0;
+                            counts[slot] = 1;
+                        }
+                        else
+                        {
+                            if (missingDims[slot] >= 1)
+                            {
+                                missingDims[slot] = 2;
+                                counts[slot] = 2;
+                            }
+                            else
+                            {
+                                counts[slot] = 1;
+                            }
+                        }
+
+                        docID = docsEnum.NextDoc();
+                    }
+                }
+
+                int filledCount = 0;
+                int slot0 = 0;
+                while (slot0 < CHUNK && (slot0 = seen.NextSetBit(slot0)) != -1)
+                {
+                    int ddDocID = docIDs[slot0];
+                    int baseDocID = baseScorer.DocID;
+                    if (baseDocID < ddDocID)
+                    {
+                        baseDocID = baseScorer.Advance(ddDocID);
+                    }
+
+                    if (baseDocID == ddDocID)
+                    {
+                        scores[slot0] = baseScorer.Score();
+                        filledSlots[filledCount++] = slot0;
+                        counts[slot0]++;
+                    }
+                    else
+                    {
+                        docIDs[slot0] = -1;
+                    }
+
+                    slot0++;
+                }
+
+                seen.Clear(0, CHUNK);
+                if (filledCount == 0)
+                {
+                    if (nextChunkStart >= maxDoc)
+                    {
+                        break;
+                    }
+
+                    nextChunkStart += CHUNK;
+                    continue;
+                }
+
+                for (int dim = 2; dim < numDims; dim++)
+                {
+                    foreach (DocsEnum docsEnum in docsEnums[dim])
+                    {
+                        if (docsEnum == null)
+                        {
+                            continue;
+                        }
+
+                        int docID = docsEnum.DocID;
+                        while (docID < nextChunkStart)
+                        {
+                            int slot = docID & MASK;
+                            if (docIDs[slot] == docID && counts[slot] >= dim)
+                            {
+                                if (missingDims[slot] >= dim)
+                                {
+                                    missingDims[slot] = dim + 1;
+                                    counts[slot] = dim + 2;
+                                }
+                                else
+                                {
+                                    counts[slot] = dim + 1;
+                                }
+                            }
+
+                            docID = docsEnum.NextDoc();
+                        }
+                    }
+                }
+
+                for (int i = 0; i < filledCount; i++)
+                {
+                    int slot = filledSlots[i];
+                    collectDocID = docIDs[slot];
+                    collectScore = scores[slot];
+                    if (counts[slot] == 1 + numDims)
+                    {
+                        CollectHit(collector, sidewaysCollectors);
+                    }
+                    else if (counts[slot] == numDims)
+                    {
+                        CollectNearMiss(sidewaysCollectors, missingDims[slot]);
+                    }
+                }
+
+                if (nextChunkStart >= maxDoc)
+                {
+                    break;
+                }
+
+                nextChunkStart += CHUNK;
+            }
+        }
+
+        private void DoBaseAdvanceScoring(Collector collector, DocsEnum[][] docsEnums, Collector[] sidewaysCollectors)
+        {
+            int docID = baseScorer.DocID;
+            int numDims = dims.Length;
+        
+            while (docID != NO_MORE_DOCS)
+            {
+                int failedDim = -1;
+                bool shouldContinueOuter = false;
+
+                for (int dim = 0; dim < numDims; dim++)
+                {
+                    bool found = false;
+                    foreach (DocsEnum docsEnum in docsEnums[dim])
+                    {
+                        if (docsEnum == null)
+                        {
+                            continue;
+                        }
+
+                        if (docsEnum.DocID < docID)
+                        {
+                            docsEnum.Advance(docID);
+                        }
+
+                        if (docsEnum.DocID == docID)
+                        {
+                            found = true;
+                            break;
+                        }
+                    }
+
+                    if (!found)
+                    {
+                        if (failedDim != -1)
+                        {
+                            docID = baseScorer.NextDoc();
+                            shouldContinueOuter = true;
+                            break;
+                        }
+                        else
+                        {
+                            failedDim = dim;
+                        }
+                    }
+                }
+
+                if (shouldContinueOuter)
+                    continue;
+
+                collectDocID = docID;
+                collectScore = baseScorer.Score();
+                if (failedDim == -1)
+                {
+                    CollectHit(collector, sidewaysCollectors);
+                }
+                else
+                {
+                    CollectNearMiss(sidewaysCollectors, failedDim);
+                }
+
+                docID = baseScorer.NextDoc();
+            }
+        }
+
+        private void CollectHit(Collector collector, Collector[] sidewaysCollectors)
+        {
+            collector.Collect(collectDocID);
+            drillDownCollector.Collect(collectDocID);
+            for (int dim = 0; dim < sidewaysCollectors.Length; dim++)
+            {
+                sidewaysCollectors[dim].Collect(collectDocID);
+            }
+        }
+
+        private void CollectNearMiss(Collector[] sidewaysCollectors, int dim)
+        {
+            sidewaysCollectors[dim].Collect(collectDocID);
+        }
+
+        private void DoUnionScoring(Collector collector, DocsEnum[][] docsEnums, Collector[] sidewaysCollectors)
+        {
+            int maxDoc = context.AtomicReader.MaxDoc;
+            int numDims = dims.Length;
+            int[] filledSlots = new int[CHUNK];
+            int[] docIDs = new int[CHUNK];
+            float[] scores = new float[CHUNK];
+            int[] missingDims = new int[CHUNK];
+            int[] counts = new int[CHUNK];
+            docIDs[0] = -1;
+            int nextChunkStart = CHUNK;
+            while (true)
+            {
+                int filledCount = 0;
+                int docID = baseScorer.DocID;
+                while (docID < nextChunkStart)
+                {
+                    int slot = docID & MASK;
+                    docIDs[slot] = docID;
+                    scores[slot] = baseScorer.Score();
+                    filledSlots[filledCount++] = slot;
+                    missingDims[slot] = 0;
+                    counts[slot] = 1;
+                    docID = baseScorer.NextDoc();
+                }
+
+                if (filledCount == 0)
+                {
+                    if (nextChunkStart >= maxDoc)
+                    {
+                        break;
+                    }
+
+                    nextChunkStart += CHUNK;
+                    continue;
+                }
+
+                foreach (DocsEnum docsEnum in docsEnums[0])
+                {
+                    if (docsEnum == null)
+                    {
+                        continue;
+                    }
+
+                    docID = docsEnum.DocID;
+                    while (docID < nextChunkStart)
+                    {
+                        int slot = docID & MASK;
+                        if (docIDs[slot] == docID)
+                        {
+                            missingDims[slot] = 1;
+                            counts[slot] = 2;
+                        }
+
+                        docID = docsEnum.NextDoc();
+                    }
+                }
+
+                for (int dim = 1; dim < numDims; dim++)
+                {
+                    foreach (DocsEnum docsEnum in docsEnums[dim])
+                    {
+                        if (docsEnum == null)
+                        {
+                            continue;
+                        }
+
+                        docID = docsEnum.DocID;
+                        while (docID < nextChunkStart)
+                        {
+                            int slot = docID & MASK;
+                            if (docIDs[slot] == docID && counts[slot] >= dim)
+                            {
+                                if (missingDims[slot] >= dim)
+                                {
+                                    missingDims[slot] = dim + 1;
+                                    counts[slot] = dim + 2;
+                                }
+                                else
+                                {
+                                    counts[slot] = dim + 1;
+                                }
+                            }
+
+                            docID = docsEnum.NextDoc();
+                        }
+                    }
+                }
+
+                for (int i = 0; i < filledCount; i++)
+                {
+                    int slot = filledSlots[i];
+                    collectDocID = docIDs[slot];
+                    collectScore = scores[slot];
+                    if (counts[slot] == 1 + numDims)
+                    {
+                        CollectHit(collector, sidewaysCollectors);
+                    }
+                    else if (counts[slot] == numDims)
+                    {
+                        CollectNearMiss(sidewaysCollectors, missingDims[slot]);
+                    }
+                }
+
+                if (nextChunkStart >= maxDoc)
+                {
+                    break;
+                }
+
+                nextChunkStart += CHUNK;
+            }
+        }
+
+        public override int DocID
+        {
+            get
+            {
+                return collectDocID;
+            }
+        }
+
+        public override float Score()
+        {
+            return collectScore;
+        }
+
+        public override int Freq
+        {
+            get
+            {
+                return 1 + dims.Length;
+            }
+        }
+
+        public override int NextDoc()
+        {
+            throw new NotSupportedException();
+        }
+
+        public override int Advance(int target)
+        {
+            throw new NotSupportedException();
+        }
+
+        public override long Cost
+        {
+            get
+            {
+                return baseScorer.Cost;
+            }
+        }
+
+        public override ICollection<ChildScorer> Children
+        {
+            get
+            {
+                return new List<ChildScorer>() { new ChildScorer(baseScorer, @"MUST") };
+            }
+        }
+
+        internal class DocsEnumsAndFreq : IComparable<DocsEnumsAndFreq>
+        {
+            internal DocsEnum[] docsEnums;
+            internal int freq;
+            internal Collector sidewaysCollector;
+            internal string dim;
+
+            public int CompareTo(DocsEnumsAndFreq other)
+            {
+                return freq - other.freq;
+            }
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/f682a4a1/src/contrib/Facet/Search/OrdinalsCache.cs
----------------------------------------------------------------------
diff --git a/src/contrib/Facet/Search/OrdinalsCache.cs b/src/contrib/Facet/Search/OrdinalsCache.cs
new file mode 100644
index 0000000..968baaa
--- /dev/null
+++ b/src/contrib/Facet/Search/OrdinalsCache.cs
@@ -0,0 +1,83 @@
+using Lucene.Net.Facet.Encoding;
+using Lucene.Net.Facet.Params;
+using Lucene.Net.Index;
+using Lucene.Net.Support;
+using Lucene.Net.Util;
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Text;
+
+namespace Lucene.Net.Facet.Search
+{
+    public class OrdinalsCache
+    {
+        public sealed class CachedOrds
+        {
+            public readonly int[] offsets;
+            public readonly int[] ordinals;
+
+            public CachedOrds(BinaryDocValues dv, int maxDoc, CategoryListParams clp)
+            {
+                BytesRef buf = new BytesRef();
+                offsets = new int[maxDoc + 1];
+                int[] ords = new int[maxDoc];
+                int totOrds = 0;
+                IntDecoder decoder = clp.CreateEncoder().CreateMatchingDecoder();
+                IntsRef values = new IntsRef(32);
+                for (int docID = 0; docID < maxDoc; docID++)
+                {
+                    offsets[docID] = totOrds;
+                    dv.Get(docID, buf);
+                    if (buf.length > 0)
+                    {
+                        decoder.Decode(buf, values);
+                        if (totOrds + values.length >= ords.Length)
+                        {
+                            ords = ArrayUtil.Grow(ords, totOrds + values.length + 1);
+                        }
+
+                        for (int i = 0; i < values.length; i++)
+                        {
+                            ords[totOrds++] = values.ints[i];
+                        }
+                    }
+                }
+
+                offsets[maxDoc] = totOrds;
+                if ((double)totOrds / ords.Length < 0.9)
+                {
+                    this.ordinals = new int[totOrds];
+                    Array.Copy(ords, 0, this.ordinals, 0, totOrds);
+                }
+                else
+                {
+                    this.ordinals = ords;
+                }
+            }
+        }
+
+        private static readonly IDictionary<BinaryDocValues, CachedOrds> intsCache = new WeakDictionary<BinaryDocValues, CachedOrds>();
+
+        public static CachedOrds GetCachedOrds(AtomicReaderContext context, CategoryListParams clp)
+        {
+            lock (typeof(OrdinalsCache))
+            {
+                BinaryDocValues dv = context.AtomicReader.GetBinaryDocValues(clp.field);
+                if (dv == null)
+                {
+                    return null;
+                }
+
+                CachedOrds ci = intsCache[dv];
+                if (ci == null)
+                {
+                    ci = new CachedOrds(dv, context.AtomicReader.MaxDoc, clp);
+                    intsCache[dv] = ci;
+                }
+
+                return ci;
+            }
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/f682a4a1/src/contrib/Facet/Search/PerCategoryListAggregator.cs
----------------------------------------------------------------------
diff --git a/src/contrib/Facet/Search/PerCategoryListAggregator.cs b/src/contrib/Facet/Search/PerCategoryListAggregator.cs
new file mode 100644
index 0000000..7c2ec60
--- /dev/null
+++ b/src/contrib/Facet/Search/PerCategoryListAggregator.cs
@@ -0,0 +1,47 @@
+using Lucene.Net.Facet.Params;
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Text;
+
+namespace Lucene.Net.Facet.Search
+{
+    public class PerCategoryListAggregator : IFacetsAggregator
+    {
+        private readonly IDictionary<CategoryListParams, IFacetsAggregator> aggregators;
+        private readonly FacetIndexingParams fip;
+
+        public PerCategoryListAggregator(IDictionary<CategoryListParams, IFacetsAggregator> aggregators, FacetIndexingParams fip)
+        {
+            this.aggregators = aggregators;
+            this.fip = fip;
+        }
+
+        public void Aggregate(FacetsCollector.MatchingDocs matchingDocs, CategoryListParams clp, FacetArrays facetArrays)
+        {
+            aggregators[clp].Aggregate(matchingDocs, clp, facetArrays);
+        }
+
+        public void RollupValues(FacetRequest fr, int ordinal, int[] children, int[] siblings, FacetArrays facetArrays)
+        {
+            CategoryListParams clp = fip.GetCategoryListParams(fr.categoryPath);
+            aggregators[clp].RollupValues(fr, ordinal, children, siblings, facetArrays);
+        }
+
+        public bool RequiresDocScores
+        {
+            get
+            {
+                foreach (IFacetsAggregator aggregator in aggregators.Values)
+                {
+                    if (aggregator.RequiresDocScores)
+                    {
+                        return true;
+                    }
+                }
+
+                return false;
+            }
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/f682a4a1/src/contrib/Facet/Search/ReusingFacetArrays.cs
----------------------------------------------------------------------
diff --git a/src/contrib/Facet/Search/ReusingFacetArrays.cs b/src/contrib/Facet/Search/ReusingFacetArrays.cs
new file mode 100644
index 0000000..7aafbae
--- /dev/null
+++ b/src/contrib/Facet/Search/ReusingFacetArrays.cs
@@ -0,0 +1,34 @@
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Text;
+
+namespace Lucene.Net.Facet.Search
+{
+    public class ReusingFacetArrays : FacetArrays
+    {
+        private readonly ArraysPool arraysPool;
+
+        public ReusingFacetArrays(ArraysPool arraysPool)
+            : base(arraysPool.arrayLength)
+        {
+            this.arraysPool = arraysPool;
+        }
+
+        protected override int[] NewIntArray()
+        {
+            return arraysPool.AllocateIntArray();
+        }
+
+        protected override float[] NewFloatArray()
+        {
+            return arraysPool.AllocateFloatArray();
+        }
+
+        protected override void DoFree(float[] floats, int[] ints)
+        {
+            arraysPool.Free(floats);
+            arraysPool.Free(ints);
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/f682a4a1/src/contrib/Facet/Search/ScoringAggregator.cs
----------------------------------------------------------------------
diff --git a/src/contrib/Facet/Search/ScoringAggregator.cs b/src/contrib/Facet/Search/ScoringAggregator.cs
new file mode 100644
index 0000000..f4c1a53
--- /dev/null
+++ b/src/contrib/Facet/Search/ScoringAggregator.cs
@@ -0,0 +1,50 @@
+using Lucene.Net.Index;
+using Lucene.Net.Util;
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Text;
+
+namespace Lucene.Net.Facet.Search
+{
+    public class ScoringAggregator : IAggregator
+    {
+        private readonly float[] scoreArray;
+        private readonly int hashCode;
+
+        public ScoringAggregator(float[] counterArray)
+        {
+            this.scoreArray = counterArray;
+            this.hashCode = scoreArray == null ? 0 : scoreArray.GetHashCode();
+        }
+
+        public void Aggregate(int docID, float score, IntsRef ordinals)
+        {
+            for (int i = 0; i < ordinals.length; i++)
+            {
+                scoreArray[ordinals.ints[i]] += score;
+            }
+        }
+
+        public override bool Equals(Object obj)
+        {
+            if (obj == null || obj.GetType() != this.GetType())
+            {
+                return false;
+            }
+
+            ScoringAggregator that = (ScoringAggregator)obj;
+            return that.scoreArray == this.scoreArray;
+        }
+
+        public override int GetHashCode()
+        {
+            return hashCode;
+        }
+
+        public bool SetNextReader(AtomicReaderContext context)
+        {
+            return true;
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/f682a4a1/src/contrib/Facet/Search/SearcherTaxonomyManager.cs
----------------------------------------------------------------------
diff --git a/src/contrib/Facet/Search/SearcherTaxonomyManager.cs b/src/contrib/Facet/Search/SearcherTaxonomyManager.cs
new file mode 100644
index 0000000..03e4b8f
--- /dev/null
+++ b/src/contrib/Facet/Search/SearcherTaxonomyManager.cs
@@ -0,0 +1,94 @@
+using Lucene.Net.Facet.Taxonomy;
+using Lucene.Net.Facet.Taxonomy.Directory;
+using Lucene.Net.Index;
+using Lucene.Net.Search;
+using Lucene.Net.Util;
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Text;
+
+namespace Lucene.Net.Facet.Search
+{
+    public class SearcherTaxonomyManager : ReferenceManager<SearcherTaxonomyManager.SearcherAndTaxonomy>
+    {
+        public class SearcherAndTaxonomy
+        {
+            public readonly IndexSearcher searcher;
+            public readonly DirectoryTaxonomyReader taxonomyReader;
+            
+            internal SearcherAndTaxonomy(IndexSearcher searcher, DirectoryTaxonomyReader taxonomyReader)
+            {
+                this.searcher = searcher;
+                this.taxonomyReader = taxonomyReader;
+            }
+        }
+
+        private readonly SearcherFactory searcherFactory;
+        private readonly long taxoEpoch;
+        private readonly DirectoryTaxonomyWriter taxoWriter;
+
+        public SearcherTaxonomyManager(IndexWriter writer, bool applyAllDeletes, SearcherFactory searcherFactory, DirectoryTaxonomyWriter taxoWriter)
+        {
+            if (searcherFactory == null)
+            {
+                searcherFactory = new SearcherFactory();
+            }
+
+            this.searcherFactory = searcherFactory;
+            this.taxoWriter = taxoWriter;
+            DirectoryTaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoWriter);
+            current = new SearcherAndTaxonomy(SearcherManager.GetSearcher(searcherFactory, DirectoryReader.Open(writer, applyAllDeletes)), taxoReader);
+            taxoEpoch = taxoWriter.TaxonomyEpoch;
+        }
+
+        protected override void DecRef(SearcherAndTaxonomy ref_renamed)
+        {
+            ref_renamed.searcher.IndexReader.DecRef();
+            ref_renamed.taxonomyReader.DecRef();
+        }
+
+        protected override bool TryIncRef(SearcherAndTaxonomy ref_renamed)
+        {
+            if (ref_renamed.searcher.IndexReader.TryIncRef())
+            {
+                if (ref_renamed.taxonomyReader.TryIncRef())
+                {
+                    return true;
+                }
+                else
+                {
+                    ref_renamed.searcher.IndexReader.DecRef();
+                }
+            }
+
+            return false;
+        }
+
+        protected override SearcherAndTaxonomy RefreshIfNeeded(SearcherAndTaxonomy ref_renamed)
+        {
+            IndexReader r = ref_renamed.searcher.IndexReader;
+            IndexReader newReader = DirectoryReader.OpenIfChanged((DirectoryReader)r);
+            if (newReader == null)
+            {
+                return null;
+            }
+            else
+            {
+                DirectoryTaxonomyReader tr = TaxonomyReader.OpenIfChanged(ref_renamed.taxonomyReader);
+                if (tr == null)
+                {
+                    ref_renamed.taxonomyReader.IncRef();
+                    tr = ref_renamed.taxonomyReader;
+                }
+                else if (taxoWriter.TaxonomyEpoch != taxoEpoch)
+                {
+                    IOUtils.Close(newReader, tr);
+                    throw new InvalidOperationException(@"DirectoryTaxonomyWriter.replaceTaxonomy was called, which is not allowed when using SearcherTaxonomyManager");
+                }
+
+                return new SearcherAndTaxonomy(SearcherManager.GetSearcher(searcherFactory, newReader), tr);
+            }
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/f682a4a1/src/contrib/Facet/Search/SumScoreFacetRequest.cs
----------------------------------------------------------------------
diff --git a/src/contrib/Facet/Search/SumScoreFacetRequest.cs b/src/contrib/Facet/Search/SumScoreFacetRequest.cs
new file mode 100644
index 0000000..db2d203
--- /dev/null
+++ b/src/contrib/Facet/Search/SumScoreFacetRequest.cs
@@ -0,0 +1,34 @@
+using Lucene.Net.Facet.Taxonomy;
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Text;
+
+namespace Lucene.Net.Facet.Search
+{
+    public class SumScoreFacetRequest : FacetRequest
+    {
+        public SumScoreFacetRequest(CategoryPath path, int num)
+            : base(path, num)
+        {
+        }
+
+        public override IAggregator CreateAggregator(bool useComplements, FacetArrays arrays, TaxonomyReader taxonomy)
+        {
+            return new ScoringAggregator(arrays.GetFloatArray());
+        }
+
+        public override double GetValueOf(FacetArrays arrays, int ordinal)
+        {
+            return arrays.GetFloatArray()[ordinal];
+        }
+
+        public override FacetArraysSource FacetArraysSourceValue
+        {
+            get
+            {
+                return FacetArraysSource.FLOAT;
+            }
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/f682a4a1/src/contrib/Facet/Search/SumScoreFacetsAggregator.cs
----------------------------------------------------------------------
diff --git a/src/contrib/Facet/Search/SumScoreFacetsAggregator.cs b/src/contrib/Facet/Search/SumScoreFacetsAggregator.cs
new file mode 100644
index 0000000..f0639a0
--- /dev/null
+++ b/src/contrib/Facet/Search/SumScoreFacetsAggregator.cs
@@ -0,0 +1,70 @@
+using Lucene.Net.Facet.Params;
+using Lucene.Net.Facet.Taxonomy;
+using Lucene.Net.Util;
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Text;
+
+namespace Lucene.Net.Facet.Search
+{
+    public class SumScoreFacetsAggregator : IFacetsAggregator
+    {
+        private readonly IntsRef ordinals = new IntsRef(32);
+
+        public void Aggregate(FacetsCollector.MatchingDocs matchingDocs, CategoryListParams clp, FacetArrays facetArrays)
+        {
+            ICategoryListIterator cli = clp.CreateCategoryListIterator(0);
+            if (!cli.SetNextReader(matchingDocs.context))
+            {
+                return;
+            }
+
+            int doc = 0;
+            int length = matchingDocs.bits.Length;
+            float[] scores = facetArrays.GetFloatArray();
+            int scoresIdx = 0;
+            while (doc < length && (doc = matchingDocs.bits.NextSetBit(doc)) != -1)
+            {
+                cli.GetOrdinals(doc, ordinals);
+                int upto = ordinals.offset + ordinals.length;
+                float score = matchingDocs.scores[scoresIdx++];
+                for (int i = ordinals.offset; i < upto; i++)
+                {
+                    scores[ordinals.ints[i]] += score;
+                }
+
+                ++doc;
+            }
+        }
+
+        private float RollupScores(int ordinal, int[] children, int[] siblings, float[] scores)
+        {
+            float score = 0F;
+            while (ordinal != TaxonomyReader.INVALID_ORDINAL)
+            {
+                float childScore = scores[ordinal];
+                childScore += RollupScores(children[ordinal], children, siblings, scores);
+                scores[ordinal] = childScore;
+                score += childScore;
+                ordinal = siblings[ordinal];
+            }
+
+            return score;
+        }
+
+        public void RollupValues(FacetRequest fr, int ordinal, int[] children, int[] siblings, FacetArrays facetArrays)
+        {
+            float[] scores = facetArrays.GetFloatArray();
+            scores[ordinal] += RollupScores(children[ordinal], children, siblings, scores);
+        }
+
+        public bool RequiresDocScores
+        {
+            get
+            {
+                return true;
+            }
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/f682a4a1/src/core/Lucene.Net.csproj
----------------------------------------------------------------------
diff --git a/src/core/Lucene.Net.csproj b/src/core/Lucene.Net.csproj
index 5941972..c754a84 100644
--- a/src/core/Lucene.Net.csproj
+++ b/src/core/Lucene.Net.csproj
@@ -585,6 +585,7 @@
     </Compile>
     <Compile Include="Search\MaxNonCompetitiveBoostAttribute.cs" />
     <Compile Include="Search\MinShouldMatchSumScorer.cs" />
+    <Compile Include="Search\MultiCollector.cs" />
     <Compile Include="Search\MultiPhraseQuery.cs">
       <SubType>Code</SubType>
     </Compile>

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/f682a4a1/src/core/Search/MultiCollector.cs
----------------------------------------------------------------------
diff --git a/src/core/Search/MultiCollector.cs b/src/core/Search/MultiCollector.cs
new file mode 100644
index 0000000..9e639c5
--- /dev/null
+++ b/src/core/Search/MultiCollector.cs
@@ -0,0 +1,107 @@
+using Lucene.Net.Index;
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Text;
+
+namespace Lucene.Net.Search
+{
+    public class MultiCollector : Collector
+    {
+        public static Collector Wrap(params Collector[] collectors)
+        {
+            int n = 0;
+            foreach (Collector c in collectors)
+            {
+                if (c != null)
+                {
+                    n++;
+                }
+            }
+
+            if (n == 0)
+            {
+                throw new ArgumentException(@"At least 1 collector must not be null");
+            }
+            else if (n == 1)
+            {
+                Collector col = null;
+                foreach (Collector c in collectors)
+                {
+                    if (c != null)
+                    {
+                        col = c;
+                        break;
+                    }
+                }
+
+                return col;
+            }
+            else if (n == collectors.Length)
+            {
+                return new MultiCollector(collectors);
+            }
+            else
+            {
+                Collector[] colls = new Collector[n];
+                n = 0;
+                foreach (Collector c in collectors)
+                {
+                    if (c != null)
+                    {
+                        colls[n++] = c;
+                    }
+                }
+
+                return new MultiCollector(colls);
+            }
+        }
+
+        private readonly Collector[] collectors;
+
+        private MultiCollector(params Collector[] collectors)
+        {
+            this.collectors = collectors;
+        }
+
+        public override bool AcceptsDocsOutOfOrder
+        {
+            get
+            {
+                foreach (Collector c in collectors)
+                {
+                    if (!c.AcceptsDocsOutOfOrder)
+                    {
+                        return false;
+                    }
+                }
+
+                return true;
+            }
+        }
+
+        public override void Collect(int doc)
+        {
+            foreach (Collector c in collectors)
+            {
+                c.Collect(doc);
+            }
+        }
+
+        public override void SetNextReader(AtomicReaderContext context)
+        {
+            foreach (Collector c in collectors)
+            {
+                c.SetNextReader(context);
+            }
+        }
+
+        public override void SetScorer(Scorer s)
+        {
+            foreach (Collector c in collectors)
+            {
+                c.SetScorer(s);
+            }
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/lucenenet/blob/f682a4a1/src/core/Support/Arrays.cs
----------------------------------------------------------------------
diff --git a/src/core/Support/Arrays.cs b/src/core/Support/Arrays.cs
index 285504e..1232bd7 100644
--- a/src/core/Support/Arrays.cs
+++ b/src/core/Support/Arrays.cs
@@ -7,6 +7,23 @@ namespace Lucene.Net.Support
 {
     public static class Arrays
     {
+        public static int GetHashCode<T>(T[] a)
+        {
+            if (a == null)
+                return 0;
+
+            const int prime = 31;
+
+            int hash = 17;
+
+            foreach (var item in a)
+            {
+                hash = hash * 23 + (item == null ? 0 : item.GetHashCode());
+            }
+
+            return hash;
+        }
+
         public static void Fill<T>(T[] a, T val)
         {
             for (int i = 0; i < a.Length; i++)


Mime
View raw message