lucene-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From gerlowsk...@apache.org
Subject [lucene-solr] 01/02: SOLR-13892: Add "join" postfilter implementation
Date Wed, 15 Jan 2020 14:47:18 GMT
This is an automated email from the ASF dual-hosted git repository.

gerlowskija pushed a commit to branch jira/solr-13892
in repository https://gitbox.apache.org/repos/asf/lucene-solr.git

commit 45f19108cff9eff6a39868fe73b579d14aa1a2dc
Author: Jason Gerlowski <gerlowskija@apache.org>
AuthorDate: Fri Jan 10 09:14:09 2020 -0500

    SOLR-13892: Add "join" postfilter implementation
---
 .../org/apache/solr/search/JoinQParserPlugin.java  | 221 ++++++++++++++++++-
 .../solr/search/join/MVTermOrdinalCollector.java   |  72 +++++++
 .../solr/search/join/SVTermOrdinalCollector.java   |  69 ++++++
 .../solr/search/join/TopLevelDVTermsCollector.java |  78 +++++++
 .../test-files/solr/collection1/conf/schema12.xml  |   1 +
 solr/core/src/test/org/apache/solr/TestJoin.java   | 178 ++++++++++------
 .../solr/search/TestJoinQueryPerformance.java      | 235 +++++++++++++++++++++
 7 files changed, 784 insertions(+), 70 deletions(-)

diff --git a/solr/core/src/java/org/apache/solr/search/JoinQParserPlugin.java b/solr/core/src/java/org/apache/solr/search/JoinQParserPlugin.java
index c6fb0de..b56e967 100644
--- a/solr/core/src/java/org/apache/solr/search/JoinQParserPlugin.java
+++ b/solr/core/src/java/org/apache/solr/search/JoinQParserPlugin.java
@@ -18,18 +18,23 @@ package org.apache.solr.search;
 
 import java.io.Closeable;
 import java.io.IOException;
+import java.lang.invoke.MethodHandles;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;
 import java.util.Objects;
 
+import org.apache.lucene.index.DocValues;
 import org.apache.lucene.index.IndexReader;
 import org.apache.lucene.index.LeafReader;
 import org.apache.lucene.index.LeafReaderContext;
 import org.apache.lucene.index.MultiPostingsEnum;
 import org.apache.lucene.index.PostingsEnum;
+import org.apache.lucene.index.SortedDocValues;
+import org.apache.lucene.index.SortedSetDocValues;
 import org.apache.lucene.index.Terms;
 import org.apache.lucene.index.TermsEnum;
+import org.apache.lucene.search.Collector;
 import org.apache.lucene.search.ConstantScoreScorer;
 import org.apache.lucene.search.ConstantScoreWeight;
 import org.apache.lucene.search.DocIdSet;
@@ -43,6 +48,7 @@ import org.apache.lucene.search.Weight;
 import org.apache.lucene.util.Bits;
 import org.apache.lucene.util.BytesRef;
 import org.apache.lucene.util.FixedBitSet;
+import org.apache.lucene.util.LongBitSet;
 import org.apache.lucene.util.StringHelper;
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.params.SolrParams;
@@ -53,15 +59,25 @@ import org.apache.solr.handler.component.ResponseBuilder;
 import org.apache.solr.request.LocalSolrQueryRequest;
 import org.apache.solr.request.SolrQueryRequest;
 import org.apache.solr.request.SolrRequestInfo;
+import org.apache.solr.schema.IndexSchema;
 import org.apache.solr.schema.SchemaField;
 import org.apache.solr.schema.TrieField;
 import org.apache.solr.search.join.GraphPointsCollector;
+import org.apache.solr.search.join.MVTermOrdinalCollector;
+import org.apache.solr.search.join.SVTermOrdinalCollector;
 import org.apache.solr.search.join.ScoreJoinQParserPlugin;
+import org.apache.solr.search.join.TopLevelDVTermsCollector;
 import org.apache.solr.util.RTimer;
 import org.apache.solr.util.RefCounted;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 public class JoinQParserPlugin extends QParserPlugin {
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+
   public static final String NAME = "join";
+  public static final String COST = "cost";
+  public static final String CACHE = "cache";
 
   @Override
   public QParser createParser(String qstr, SolrParams localParams, SolrParams params, SolrQueryRequest
req) {
@@ -71,10 +87,16 @@ public class JoinQParserPlugin extends QParserPlugin {
       public Query parse() throws SyntaxError {
         if(localParams!=null && localParams.get(ScoreJoinQParserPlugin.SCORE)!=null){
           return new ScoreJoinQParserPlugin().createParser(qstr, localParams, params, req).parse();
-        }else{
+        } else {
           return parseJoin();
         }
       }
+
+      private boolean postFilterEnabled() {
+        return localParams != null &&
+            localParams.getInt(COST) != null && localParams.getPrimitiveInt(COST)
> 99 &&
+            localParams.getBool(CACHE) != null && localParams.getPrimitiveBool(CACHE)
== false;
+      }
       
       Query parseJoin() throws SyntaxError {
         final String fromField = getParam("from");
@@ -117,7 +139,9 @@ public class JoinQParserPlugin extends QParserPlugin {
           fromQuery = fromQueryParser.getQuery();
         }
 
-        JoinQuery jq = new JoinQuery(fromField, toField, coreName == null ? fromIndex : coreName,
fromQuery);
+
+        final String indexToUse = coreName == null ? fromIndex : coreName;
+        final JoinQuery jq = new JoinQuery(fromField, toField, indexToUse, fromQuery);
         jq.fromCoreOpenTime = fromCoreOpenTime;
         return jq;
       }
@@ -138,12 +162,17 @@ public class JoinQParserPlugin extends QParserPlugin {
 }
 
 
-class JoinQuery extends Query {
+class JoinQuery extends Query implements PostFilter {
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+
   String fromField;
   String toField;
   String fromIndex; // TODO: name is missleading here compared to JoinQParserPlugin usage
- here it must be a core name
   Query q;
   long fromCoreOpenTime;
+  private boolean cache;
+  private boolean cacheSep;
+  private int cost;
 
   public JoinQuery(String fromField, String toField, String coreName, Query subQuery) {
     assert null != fromField;
@@ -175,6 +204,37 @@ class JoinQuery extends Query {
     return new JoinQueryWeight((SolrIndexSearcher) searcher, scoreMode, boost);
   }
 
+  @Override
+  public DelegatingCollector getFilterCollector(IndexSearcher searcher) {
+    log.info("JEGERLOW: Running join-postfilter query");
+    final SolrIndexSearcher solrSearcher = (SolrIndexSearcher) searcher;
+    final JoinQueryWeight weight = new JoinQueryWeight(solrSearcher, ScoreMode.COMPLETE_NO_SCORES,
1.0f);
+    final SolrIndexSearcher fromSearcher = weight.fromSearcher;
+    final SolrIndexSearcher toSearcher = weight.toSearcher;
+    try {
+      ensureJoinFieldExistsAndHasDocValues(fromSearcher, fromField, "from");
+      ensureJoinFieldExistsAndHasDocValues(toSearcher, toField, "to");
+
+      final SortedSetDocValues toValues = DocValues.getSortedSet(toSearcher.getSlowAtomicReader(),
toField);
+      ensureDocValuesAreNonEmpty(toValues, toField, "to");
+      final LongBitSet toOrdBitSet = new LongBitSet(toValues.getValueCount());
+
+      final boolean multivalued = fromSearcher.getSchema().getField(fromField).multiValued();
+      long start = System.currentTimeMillis();
+      final BitsetBounds toBitsetBounds = (multivalued) ? populateToBitsetMultivalued(fromSearcher,
toValues, toOrdBitSet) : populateToBitsetSinglevalued(fromSearcher, toValues, toOrdBitSet);
+      long end = System.currentTimeMillis();
+      log.debug("Built the join filter in {} millis", Long.toString(end - start));
+
+      if (toBitsetBounds.lower != BitsetBounds.NO_MATCHES) {
+        return new TopLevelDVTermsCollector(toValues, toOrdBitSet, toBitsetBounds.lower,
toBitsetBounds.upper);
+      } else {
+        return new NoMatchesCollector();
+      }
+    } catch (IOException e) {
+      throw new RuntimeException(e);
+    }
+  }
+
   private class JoinQueryWeight extends ConstantScoreWeight {
     SolrIndexSearcher fromSearcher;
     RefCounted<SolrIndexSearcher> fromRef;
@@ -556,6 +616,36 @@ class JoinQuery extends Query {
   }
 
   @Override
+  public boolean getCache() {
+    return cache;
+  }
+
+  @Override
+  public void setCache(boolean cache) {
+    this.cache = cache;
+  }
+
+  @Override
+  public int getCost() {
+    return cost;
+  }
+
+  @Override
+  public void setCost(int cost) {
+    this.cost = cost;
+  }
+
+  @Override
+  public boolean getCacheSep() {
+    return cacheSep;
+  }
+
+  @Override
+  public void setCacheSep(boolean cacheSep) {
+    this.cacheSep = cacheSep;
+  }
+
+  @Override
   public String toString(String field) {
     return "{!join from="+fromField+" to="+toField
         + (fromIndex != null ? " fromIndex="+fromIndex : "")
@@ -587,4 +677,129 @@ class JoinQuery extends Query {
     return h;
   }
 
+  private void ensureJoinFieldExistsAndHasDocValues(SolrIndexSearcher solrSearcher, String
fieldName, String querySide) {
+    final IndexSchema schema = solrSearcher.getSchema();
+    final SchemaField field = schema.getFieldOrNull(fieldName);
+    if (field == null) {
+      throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, querySide + " field '"
+ fieldName + "' does not exist");
+    }
+
+    if (!field.hasDocValues()) {
+      throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
+          "Postfilter join queries require 'to' and 'from' fields to have docvalues enabled:
'" +
+              querySide + "' field '" + fieldName + "' doesn't");
+    }
+  }
+
+  private void ensureDocValuesAreNonEmpty(SortedDocValues docValues, String fieldName, String
type) {
+    if (docValues.getValueCount() == 0) {
+      throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "'" + type + "' field
" + fieldName+ " has no docvalues");
+    }
+  }
+
+  private void ensureDocValuesAreNonEmpty(SortedSetDocValues docValues, String fieldName,
String type) {
+    if (docValues.getValueCount() == 0) {
+      throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "'" + type + "' field
" + fieldName+ " has no docvalues");
+    }
+  }
+
+
+  private BitsetBounds populateToBitsetMultivalued(SolrIndexSearcher fromSearcher, SortedSetDocValues
toValues, LongBitSet toOrdBitSet) throws IOException {
+    final SortedSetDocValues fromValues = DocValues.getSortedSet(fromSearcher.getSlowAtomicReader(),
fromField);
+    ensureDocValuesAreNonEmpty(fromValues, fromField, "from");
+    final LongBitSet fromOrdBitSet = new LongBitSet(fromValues.getValueCount());
+    final Collector fromCollector = new MVTermOrdinalCollector(fromField, fromValues, fromOrdBitSet);
+
+    fromSearcher.search(q, fromCollector);
+    if (fromOrdBitSet.scanIsEmpty()) {
+      log.info("JEGERLOW: from-query found 0 matching ordinals");
+    }
+
+    long fromOrdinal = 0;
+    long firstToOrd = BitsetBounds.NO_MATCHES;
+    long lastToOrd = 0;
+    int count = 0;
+    while (fromOrdinal < fromOrdBitSet.length() && (fromOrdinal = fromOrdBitSet.nextSetBit(fromOrdinal))
>= 0) {
+      ++count;
+      final BytesRef fromBytesRef = fromValues.lookupOrd((int)fromOrdinal);
+      final long toOrdinal = lookupTerm(toValues, fromBytesRef, lastToOrd);//toValues.lookupTerm(fromBytesRef);
+      if (toOrdinal >= 0) {
+        toOrdBitSet.set(toOrdinal);
+        if (firstToOrd == BitsetBounds.NO_MATCHES) firstToOrd = toOrdinal;
+        lastToOrd = toOrdinal;
+      }
+      fromOrdinal++;
+    }
+
+    return new BitsetBounds(firstToOrd, lastToOrd);
+  }
+
+  private BitsetBounds populateToBitsetSinglevalued(SolrIndexSearcher fromSearcher, SortedSetDocValues
toValues, LongBitSet toOrdBitSet) throws IOException {
+    final SortedDocValues fromValues = DocValues.getSorted(fromSearcher.getSlowAtomicReader(),
fromField);
+    ensureDocValuesAreNonEmpty(fromValues, fromField, "from");
+    final LongBitSet fromOrdBitSet = new LongBitSet(fromValues.getValueCount());
+    final Collector fromCollector = new SVTermOrdinalCollector(fromField, fromValues, fromOrdBitSet);
+
+    fromSearcher.search(q, fromCollector);
+
+    long fromOrdinal = 0;
+    long firstToOrd = BitsetBounds.NO_MATCHES;
+    long lastToOrd = 0;
+    int count = 0;
+    while (fromOrdinal < fromOrdBitSet.length() && (fromOrdinal = fromOrdBitSet.nextSetBit(fromOrdinal))
>= 0) {
+      ++count;
+      final BytesRef fromBytesRef = fromValues.lookupOrd((int)fromOrdinal);
+      final long toOrdinal = lookupTerm(toValues, fromBytesRef, lastToOrd);//toValues.lookupTerm(fromBytesRef);
+      if (toOrdinal >= 0) {
+        toOrdBitSet.set(toOrdinal);
+        if (firstToOrd == BitsetBounds.NO_MATCHES) firstToOrd = toOrdinal;
+        lastToOrd = toOrdinal;
+      }
+      fromOrdinal++;
+    }
+
+    return new BitsetBounds(firstToOrd, lastToOrd);
+  }
+
+  /*
+   * Same binary-search based implementation as SortedSetDocValues.lookupTerm(BytesRef),
but with an
+   * optimization to narrow the search space where possible by providing a startOrd instead
of beginning each search
+   * at 0.
+   */
+  private long lookupTerm(SortedSetDocValues docValues, BytesRef key, long startOrd) throws
IOException {
+    long low = startOrd;
+    long high = docValues.getValueCount()-1;
+
+    while (low <= high) {
+      long mid = (low + high) >>> 1;
+      final BytesRef term = docValues.lookupOrd(mid);
+      int cmp = term.compareTo(key);
+
+      if (cmp < 0) {
+        low = mid + 1;
+      } else if (cmp > 0) {
+        high = mid - 1;
+      } else {
+        return mid; // key found
+      }
+    }
+
+    return -(low + 1);  // key not found.
+  }
+
+  private static class BitsetBounds {
+    public static final long NO_MATCHES = -1L;
+    public final long lower;
+    public final long upper;
+
+    public BitsetBounds(long lower, long upper) {
+      this.lower = lower;
+      this.upper = upper;
+    }
+  }
+
+  private static class NoMatchesCollector extends DelegatingCollector {
+    @Override
+    public void collect(int doc) throws IOException {}
+  }
 }
diff --git a/solr/core/src/java/org/apache/solr/search/join/MVTermOrdinalCollector.java b/solr/core/src/java/org/apache/solr/search/join/MVTermOrdinalCollector.java
new file mode 100644
index 0000000..8bbeabe
--- /dev/null
+++ b/solr/core/src/java/org/apache/solr/search/join/MVTermOrdinalCollector.java
@@ -0,0 +1,72 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.search.join;
+
+import java.io.IOException;
+import java.lang.invoke.MethodHandles;
+
+import org.apache.lucene.index.LeafReaderContext;
+import org.apache.lucene.index.SortedSetDocValues;
+import org.apache.lucene.search.ScoreMode;
+import org.apache.lucene.util.LongBitSet;
+import org.apache.solr.search.DelegatingCollector;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Populates a bitset of (top-level) ordinals based on field values in a multi-valued field.
+ */
+public class MVTermOrdinalCollector extends DelegatingCollector {
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+
+  private int docBase;
+  private SortedSetDocValues topLevelDocValues;
+  private final String fieldName;
+  private final LongBitSet topLevelDocValuesBitSet;
+
+  public MVTermOrdinalCollector(String fieldName, SortedSetDocValues topLevelDocValues, LongBitSet
topLevelDocValuesBitSet) {
+    this.fieldName = fieldName;
+    this.topLevelDocValues = topLevelDocValues;
+    this.topLevelDocValuesBitSet = topLevelDocValuesBitSet;
+  }
+
+  public ScoreMode scoreMode() {
+    return ScoreMode.COMPLETE_NO_SCORES;
+  }
+
+  public boolean needsScores(){
+    return false;
+  }
+
+  @Override
+  public void doSetNextReader(LeafReaderContext context) throws IOException {
+    this.docBase = context.docBase;
+  }
+
+  @Override
+  public void collect(int doc) throws IOException {
+    final int globalDoc = docBase + doc;
+
+    if (topLevelDocValues.advanceExact(globalDoc)) {
+      long ord = SortedSetDocValues.NO_MORE_ORDS;
+      while ((ord = topLevelDocValues.nextOrd()) != SortedSetDocValues.NO_MORE_ORDS) {
+        topLevelDocValuesBitSet.set(ord);
+      }
+    }
+  }
+}
diff --git a/solr/core/src/java/org/apache/solr/search/join/SVTermOrdinalCollector.java b/solr/core/src/java/org/apache/solr/search/join/SVTermOrdinalCollector.java
new file mode 100644
index 0000000..8e16aa6
--- /dev/null
+++ b/solr/core/src/java/org/apache/solr/search/join/SVTermOrdinalCollector.java
@@ -0,0 +1,69 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.search.join;
+
+import java.io.IOException;
+import java.lang.invoke.MethodHandles;
+
+import org.apache.lucene.index.LeafReaderContext;
+import org.apache.lucene.index.SortedDocValues;
+import org.apache.lucene.search.ScoreMode;
+import org.apache.lucene.util.LongBitSet;
+import org.apache.solr.search.DelegatingCollector;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Populates a bitset of (top-level) ordinals based on field values in a single-valued field.
+ */
+public class SVTermOrdinalCollector extends DelegatingCollector {
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+
+  private int docBase;
+  private SortedDocValues topLevelDocValues;
+  private final String fieldName;
+  private final LongBitSet topLevelDocValuesBitSet;
+
+  public SVTermOrdinalCollector(String fieldName, SortedDocValues topLevelDocValues, LongBitSet
topLevelDocValuesBitSet) {
+    this.fieldName = fieldName;
+    this.topLevelDocValues = topLevelDocValues;
+    this.topLevelDocValuesBitSet = topLevelDocValuesBitSet;
+  }
+
+  public ScoreMode scoreMode() {
+    return ScoreMode.COMPLETE_NO_SCORES;
+  }
+
+  public boolean needsScores(){
+    return false;
+  }
+
+  @Override
+  public void doSetNextReader(LeafReaderContext context) throws IOException {
+    this.docBase = context.docBase;
+  }
+
+  @Override
+  public void collect(int doc) throws IOException {
+    final int globalDoc = docBase + doc;
+
+    if (topLevelDocValues.advanceExact(globalDoc)) {
+      topLevelDocValuesBitSet.set(topLevelDocValues.ordValue());
+    }
+  }
+}
diff --git a/solr/core/src/java/org/apache/solr/search/join/TopLevelDVTermsCollector.java
b/solr/core/src/java/org/apache/solr/search/join/TopLevelDVTermsCollector.java
new file mode 100644
index 0000000..a699f25
--- /dev/null
+++ b/solr/core/src/java/org/apache/solr/search/join/TopLevelDVTermsCollector.java
@@ -0,0 +1,78 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.search.join;
+
+import java.io.IOException;
+
+import org.apache.lucene.index.LeafReaderContext;
+import org.apache.lucene.index.SortedSetDocValues;
+import org.apache.lucene.search.LeafCollector;
+import org.apache.lucene.search.Scorable;
+import org.apache.lucene.util.LongBitSet;
+import org.apache.solr.search.DelegatingCollector;
+
+/**
+ * Collects all documents with a field value matching a set value in an ordinal bitset.
+ *
+ * Implementation is similar to {@link org.apache.lucene.search.join.TermsCollector}, but
uses top-level ordinals
+ * explicitly and has wider visibility.
+ */
+public class TopLevelDVTermsCollector extends DelegatingCollector {
+  private LeafCollector leafCollector;
+  private int docBase;
+  private SortedSetDocValues topLevelDocValues;
+  private LongBitSet topLevelDocValuesBitSet;
+  private long firstOrd;
+  private long lastOrd;
+
+  public TopLevelDVTermsCollector(SortedSetDocValues topLevelDocValues, LongBitSet topLevelDocValuesBitSet,
long firstOrd, long lastOrd) {
+    this.topLevelDocValues = topLevelDocValues;
+    this.topLevelDocValuesBitSet = topLevelDocValuesBitSet;
+    this.firstOrd = firstOrd;
+    this.lastOrd = lastOrd;
+  }
+
+  @Override
+  public void setScorer(Scorable scorer) throws IOException {
+    leafCollector.setScorer(scorer);
+  }
+
+  @Override
+  public void doSetNextReader(LeafReaderContext context) throws IOException {
+    this.leafCollector = delegate.getLeafCollector(context);
+    this.docBase = context.docBase;
+  }
+
+  @Override
+  public void collect(int doc) throws IOException {
+    final int globalDoc = doc + docBase;
+
+    if (topLevelDocValues.advanceExact(globalDoc)) {
+      while (true) {
+        final long ord = topLevelDocValues.nextOrd();
+        if (ord == SortedSetDocValues.NO_MORE_ORDS) break;
+        if (ord > lastOrd) break;
+        if (ord < firstOrd) continue;
+        if (topLevelDocValuesBitSet.get(ord)) {
+          leafCollector.collect(doc);
+          break;
+        }
+      }
+    }
+  }
+}
diff --git a/solr/core/src/test-files/solr/collection1/conf/schema12.xml b/solr/core/src/test-files/solr/collection1/conf/schema12.xml
index 9438f16..73d8b9e 100644
--- a/solr/core/src/test-files/solr/collection1/conf/schema12.xml
+++ b/solr/core/src/test-files/solr/collection1/conf/schema12.xml
@@ -698,6 +698,7 @@
   <dynamicField name="*_s" type="string" indexed="true" stored="true" multiValued="true"/>
   <dynamicField name="*_ss" type="string" indexed="true" stored="true" multiValued="true"/>
   <dynamicField name="*_sdv" type="string" indexed="false" stored="false" docValues="true"
useDocValuesAsStored="true"/>
+  <dynamicField name="*_ss_dv" type="string" indexed="true" stored="true" docValues="true"
multiValued="true"/>
   <dynamicField name="*_bdv" type="boolean" indexed="false" stored="false" docValues="true"
useDocValuesAsStored="true"/>
   <dynamicField name="*_t" type="text" indexed="true" stored="true"/>
   <dynamicField name="*_tt" type="text" indexed="true" stored="true"/>
diff --git a/solr/core/src/test/org/apache/solr/TestJoin.java b/solr/core/src/test/org/apache/solr/TestJoin.java
index e263aa9..65c640c 100644
--- a/solr/core/src/test/org/apache/solr/TestJoin.java
+++ b/solr/core/src/test/org/apache/solr/TestJoin.java
@@ -28,7 +28,9 @@ import java.util.Map;
 import java.util.Set;
 
 import org.apache.solr.common.params.ModifiableSolrParams;
+import org.apache.solr.common.params.SolrParams;
 import org.apache.solr.common.util.Utils;
+import org.apache.solr.request.LocalSolrQueryRequest;
 import org.apache.solr.request.SolrQueryRequest;
 import org.junit.BeforeClass;
 import org.junit.Test;
@@ -51,99 +53,110 @@ public class TestJoin extends SolrTestCaseJ4 {
     initCore("solrconfig.xml","schema12.xml");
   }
 
+  private void indexEmployeeDocs() {
+    assertU(add(doc("id", "1","name", "john", "title", "Director", "dept_ss_dv","Engineering")));
+    assertU(add(doc("id", "2","name", "mark", "title", "VP", "dept_ss_dv","Marketing")));
+    assertU(add(doc("id", "3","name", "nancy", "title", "MTS", "dept_ss_dv","Sales")));
+    assertU(add(doc("id", "4","name", "dave", "title", "MTS", "dept_ss_dv","Support", "dept_ss_dv","Engineering")));
+    assertU(add(doc("id", "5","name", "tina", "title", "VP", "dept_ss_dv","Engineering")));
 
-  @Test
-  public void testJoin() throws Exception {
-    assertU(add(doc("id", "1","name", "john", "title", "Director", "dept_s","Engineering")));
-    assertU(add(doc("id", "2","name", "mark", "title", "VP", "dept_s","Marketing")));
-    assertU(add(doc("id", "3","name", "nancy", "title", "MTS", "dept_s","Sales")));
-    assertU(add(doc("id", "4","name", "dave", "title", "MTS", "dept_s","Support", "dept_s","Engineering")));
-    assertU(add(doc("id", "5","name", "tina", "title", "VP", "dept_s","Engineering")));
-
-    assertU(add(doc("id","10", "dept_id_s", "Engineering", "text","These guys develop stuff")));
-    assertU(add(doc("id","11", "dept_id_s", "Marketing", "text","These guys make you look
good")));
-    assertU(add(doc("id","12", "dept_id_s", "Sales", "text","These guys sell stuff")));
-    assertU(add(doc("id","13", "dept_id_s", "Support", "text","These guys help customers")));
+    assertU(add(doc("id","10", "dept_id_ss_dv", "Engineering", "text","These guys develop
stuff")));
+    assertU(add(doc("id","11", "dept_id_ss_dv", "Marketing", "text","These guys make you
look good")));
+    assertU(add(doc("id","12", "dept_id_ss_dv", "Sales", "text","These guys sell stuff")));
+    assertU(add(doc("id","13", "dept_id_ss_dv", "Support", "text","These guys help customers")));
 
     assertU(commit());
+  }
 
+  /**
+   * Tests join behavior scenarios that work with or without a postfilter.
+   *
+   * Standard vs post-filter execution is chosen using randomization on a per-query basis.
+   */
+  @Test
+  public void testJoinPostfilterCompatible() throws Exception {
+    indexEmployeeDocs();
     ModifiableSolrParams p = params("sort","id asc");
 
-    // test debugging
-    assertJQ(req(p, "q","{!join from=dept_s to=dept_id_s}title:MTS", "fl","id", "debugQuery","true")
-        ,"/debug/join/{!join from=dept_s to=dept_id_s}title:MTS=={'_MATCH_':'fromSetSize,toSetSize',
'fromSetSize':2, 'toSetSize':3}"
-    );
-
-    assertJQ(req(p, "q","{!join from=dept_s to=dept_id_s}title:MTS", "fl","id")
+    assertJQ(joinreq(p, buildJoinRequest("dept_ss_dv", "dept_id_ss_dv", "title:MTS"), "fl","id")
         ,"/response=={'numFound':3,'start':0,'docs':[{'id':'10'},{'id':'12'},{'id':'13'}]}"
     );
 
-    // empty from
-    assertJQ(req(p, "q","{!join from=noexist_s to=dept_id_s}*:*", "fl","id")
-        ,"/response=={'numFound':0,'start':0,'docs':[]}"
-    );
-
-    // empty to
-    assertJQ(req(p, "q","{!join from=dept_s to=noexist_s}*:*", "fl","id")
-        ,"/response=={'numFound':0,'start':0,'docs':[]}"
-    );
-
-    // self join... return everyone with she same title as Dave
-    assertJQ(req(p, "q","{!join from=title to=title}name:dave", "fl","id")
-        ,"/response=={'numFound':2,'start':0,'docs':[{'id':'3'},{'id':'4'}]}"
-    );
-
     // find people that develop stuff
-    assertJQ(req(p, "q","{!join from=dept_id_s to=dept_s}text:develop", "fl","id")
+    assertJQ(joinreq(p, buildJoinRequest("dept_id_ss_dv", "dept_ss_dv", "text:develop"),
"fl","id")
         ,"/response=={'numFound':3,'start':0,'docs':[{'id':'1'},{'id':'4'},{'id':'5'}]}"
     );
 
-    // self join on multivalued text field
-    assertJQ(req(p, "q","{!join from=title to=title}name:dave", "fl","id")
-        ,"/response=={'numFound':2,'start':0,'docs':[{'id':'3'},{'id':'4'}]}"
-    );
-
-    assertJQ(req(p, "q","{!join from=dept_s to=dept_id_s}title:MTS", "fl","id", "debugQuery","true")
-        ,"/response=={'numFound':3,'start':0,'docs':[{'id':'10'},{'id':'12'},{'id':'13'}]}"
-    );
-    
     // expected outcome for a sub query matching dave joined against departments
-    final String davesDepartments = 
-      "/response=={'numFound':2,'start':0,'docs':[{'id':'10'},{'id':'13'}]}";
+    final String davesDepartments =
+        "/response=={'numFound':2,'start':0,'docs':[{'id':'10'},{'id':'13'}]}";
 
     // straight forward query
-    assertJQ(req(p, "q","{!join from=dept_s to=dept_id_s}name:dave",
-                 "fl","id"),
-             davesDepartments);
+    assertJQ(joinreq(p, buildJoinRequest("dept_ss_dv", "dept_id_ss_dv", "name:dave"), "fl","id"),
+        davesDepartments);
 
     // variable deref for sub-query parsing
-    assertJQ(req(p, "q","{!join from=dept_s to=dept_id_s v=$qq}",
-                 "qq","{!dismax}dave",
-                 "qf","name",
-                 "fl","id", 
-                 "debugQuery","true"),
-             davesDepartments);
+    assertJQ(joinreq(p, buildJoinRequest("dept_ss_dv", "dept_id_ss_dv", "$qq"), "qq","{!dismax}dave",
"qf","name", "fl","id", "debugQuery","true"),
+        davesDepartments);
 
     // variable deref for sub-query parsing w/localparams
-    assertJQ(req(p, "q","{!join from=dept_s to=dept_id_s v=$qq}",
-                 "qq","{!dismax qf=name}dave",
-                 "fl","id", 
-                 "debugQuery","true"),
-             davesDepartments);
+    assertJQ(joinreq(p, buildJoinRequest("dept_ss_dv", "dept_id_ss_dv", "$qq"),
+        "qq","{!dismax qf=name}dave",
+        "fl","id",
+        "debugQuery","true"),
+        davesDepartments);
 
     // defType local param to control sub-query parsing
-    assertJQ(req(p, "q","{!join from=dept_s to=dept_id_s defType=dismax}dave",
-                 "qf","name",
-                 "fl","id", 
-                 "debugQuery","true"),
-             davesDepartments);
+    assertJQ(joinreq(p, buildJoinRequest("dept_ss_dv", "dept_id_ss_dv", "dave", "defType=dismax"),
+        "qf","name",
+        "fl","id",
+        "debugQuery","true"),
+        davesDepartments);
 
     // find people that develop stuff - but limit via filter query to a name of "john"
     // this tests filters being pushed down to queries (SOLR-3062)
-    assertJQ(req(p, "q","{!join from=dept_id_s to=dept_s}text:develop", "fl","id", "fq",
"name:john")
-             ,"/response=={'numFound':1,'start':0,'docs':[{'id':'1'}]}"
-            );
+    assertJQ(joinreq(p, buildJoinRequest("dept_id_ss_dv", "dept_ss_dv", "text:develop"),
"fl","id", "fq", "name:john")
+        ,"/response=={'numFound':1,'start':0,'docs':[{'id':'1'}]}"
+    );
 
+    assertJQ(joinreq(p, buildJoinRequest("dept_ss_dv", "dept_id_ss_dv", "title:MTS"), "fl","id",
"debugQuery","true")
+        ,"/response=={'numFound':3,'start':0,'docs':[{'id':'10'},{'id':'12'},{'id':'13'}]}"
+    );
+  }
+
+  /*
+   * Test join behavior scenarios that are only supported when the join runs regularly (i.e.
not as a postfilter)
+   */
+  @Test
+  public void testJoinNonPostfilterCompatible() throws Exception {
+    indexEmployeeDocs();
+
+    ModifiableSolrParams p = params("sort","id asc");
+
+    // Join debug statistics
+    assertJQ(req(p, "q","{!join from=dept_ss_dv to=dept_id_ss_dv}title:MTS", "fl","id", "debugQuery","true")
+        ,"/debug/join/{!join from=dept_ss_dv to=dept_id_ss_dv}title:MTS=={'_MATCH_':'fromSetSize,toSetSize',
'fromSetSize':2, 'toSetSize':3}"
+    );
+
+    // Empty/nonexistent "from" field
+    assertJQ(req(p, "q","{!join from=noexist_s to=dept_id_ss_dv}*:*", "fl","id")
+        ,"/response=={'numFound':0,'start':0,'docs':[]}"
+    );
+
+    // Empty/nonexistent "to" field
+    assertJQ(req(p, "q","{!join from=dept_ss_dv to=noexist_s}*:*", "fl","id")
+        ,"/response=={'numFound':0,'start':0,'docs':[]}"
+    );
+
+    // Self join on text field... return everyone with the same title as Dave
+    assertJQ(req(p, "q","{!join from=title to=title}name:dave", "fl","id")
+        ,"/response=={'numFound':2,'start':0,'docs':[{'id':'3'},{'id':'4'}]}"
+    );
+
+    // Self join on multivalued text field
+    assertJQ(req(p, "q","{!join from=title to=title}name:dave", "fl","id")
+        ,"/response=={'numFound':2,'start':0,'docs':[{'id':'3'},{'id':'4'}]}"
+    );
   }
 
 
@@ -288,4 +301,35 @@ public class TestJoin extends SolrTestCaseJ4 {
     return ids;
   }
 
+
+  private static String buildJoinRequest(String fromField, String toField, String fromQuery,
String... otherLocalParams) {
+    final String baseJoinParams = "from=" + fromField + " to=" + toField + " v=" + fromQuery;
+    final String optionalParamsJoined = (otherLocalParams != null && otherLocalParams.length
> 0) ? String.join(" ", otherLocalParams) : " ";
+    final String allProvidedParams = baseJoinParams + " " + optionalParamsJoined;
+
+    if (random().nextBoolean()) {
+      return "{!join " + allProvidedParams + " cost=101 cache=false}";
+    }
+    return "{!join " + allProvidedParams + " }";
+  }
+
+  /**
+   * Similar interface to {@link SolrTestCaseJ4#req(SolrParams, String...)}, but chooses
whether the join runs as a
+   * normal query or postfilter
+   */
+  private static SolrQueryRequest joinreq(SolrParams params, String joinQuery, String...
moreParams) {
+    ModifiableSolrParams mp = new ModifiableSolrParams(params);
+    if (joinQuery.contains("cost=101")) {
+      mp.set("q", "*:*");
+      mp.set("fq", joinQuery);
+    } else {
+      mp.set("q", joinQuery);
+    }
+
+    for (int i=0; i<moreParams.length; i+=2) {
+      mp.add(moreParams[i], moreParams[i+1]);
+    }
+    return new LocalSolrQueryRequest(h.getCore(), mp);
+  }
+
 }
diff --git a/solr/core/src/test/org/apache/solr/search/TestJoinQueryPerformance.java b/solr/core/src/test/org/apache/solr/search/TestJoinQueryPerformance.java
new file mode 100644
index 0000000..e172e04
--- /dev/null
+++ b/solr/core/src/test/org/apache/solr/search/TestJoinQueryPerformance.java
@@ -0,0 +1,235 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.search;
+
+import java.io.BufferedReader;
+import java.io.FileReader;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.List;
+import java.util.UUID;
+
+import com.google.common.collect.Lists;
+import org.apache.solr.SolrTestCaseJ4;
+import org.apache.solr.client.solrj.SolrQuery;
+import org.apache.solr.client.solrj.SolrRequest;
+import org.apache.solr.client.solrj.impl.HttpSolrClient;
+import org.apache.solr.client.solrj.response.QueryResponse;
+import org.apache.solr.common.SolrInputDocument;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+
+public class TestJoinQueryPerformance extends SolrTestCaseJ4 {
+  // Dictionary used to load String data
+  private static final String DICT_PATH = "/usr/share/dict/words";
+  private static final int NUM_DICT_TERMS = 235886;
+  private static final String[] LOADED_DICTIONARY = new String[NUM_DICT_TERMS];
+
+  // Performance run parameters: Indexing
+  private static final String FROM_COLLECTION_NAME = "user_acls";
+  private static final int NUM_FROM_DOCS = 5050; // 1 + 2 + 3 + 4 + ...  + 100
+  private static final String TO_COLLECTION_NAME = "products";
+  private static final int NUM_TO_DOCS = 500000;
+  private static final int PERMISSION_CARDINALITY = 50000; // 50K unique groups/roles/whatever
+  private static int BATCH_SIZE = 500;
+  private static int NUM_COMMITS = 500;
+  private static final int VAL_MAX = 1000;
+  private static final int USER_MAX = 100;
+
+  private static String COLLECTION_NAME= "foo";
+
+  /*
+   * As I start out here, I think I'll want a few different axes.
+   *  - "from" collection matches (with "to" matches held constant)
+   *  - "to" collection matches (with "from" matches held constant)
+   *
+   * So I think I should index a finite number of docs
+   */
+
+  @BeforeClass
+  public static void setUpCluster() throws Exception {
+    loadDictionary();
+    //loadCollectionData(DType.USER);
+    //loadCollectionData(DType.DATA);
+  }
+
+  private static void loadDictionary() throws Exception {
+    try (BufferedReader reader = new BufferedReader(new FileReader(DICT_PATH))) {
+      for (int i = 0; i < NUM_DICT_TERMS; i++) {
+        LOADED_DICTIONARY[i] = reader.readLine();
+      }
+    }
+  }
+
+  public enum DType {
+    USER(NUM_FROM_DOCS, FROM_COLLECTION_NAME) {
+      // id - unique string
+      // userid_s - username (user# from 1-100)...each user appears in # entries
+      // permissions_ss - set of 300 string permissions (cardinality 50K)
+      @Override
+      SolrInputDocument buildDoc() {
+        if (userRecordCounts[currentUser - 1] == currentUser) {
+          currentUser++;
+        } else {
+          userRecordCounts[currentUser -1]++;
+        }
+
+        final SolrInputDocument newDoc = new SolrInputDocument("id", UUID.randomUUID().toString());
+        final String userString = "user" + currentUser;
+        final String[] permissions = getAFewDictionaryWords(300, PERMISSION_CARDINALITY);
+
+        newDoc.addField("userid_s", userString);
+        newDoc.addField("permissions_ss", permissions);
+
+        return newDoc;
+      }
+    },
+    DATA(NUM_TO_DOCS, TO_COLLECTION_NAME) {
+      // id - unique string
+      // val_i - random int between 1-1000
+      // cost_d - random cost between 1-1000
+      // body_txt - random text string between 100 - 10000 words
+      // acl_ss - set of 100-3000 string permissions (cardinality 50K)
+      @Override
+      SolrInputDocument buildDoc() {
+        final SolrInputDocument newDoc = new SolrInputDocument("id", UUID.randomUUID().toString());
+        final int val = random().nextInt(1000) + 1;
+        final double cost = random().nextDouble() * 1000d;
+        final String body = String.join(" ", getAFewDictionaryWords(random().nextInt(9900)
+ 100));
+        final String[] acls = getAFewDictionaryWords(random().nextInt(2900) + 100, PERMISSION_CARDINALITY);
+
+        newDoc.addField("val_i", val);
+        newDoc.addField("cost_d", cost);
+        newDoc.addField("body_txt", body);
+        newDoc.addField("acl_ss", acls);
+
+        return newDoc;
+      }
+    };
+
+    private int numDocs;
+    private String collName;
+    private static int[] userRecordCounts = new int[100];
+    private static int currentUser = 1;
+
+    private DType(int numDocs, String collectionName) {
+      this.numDocs = numDocs;
+      this.collName = collectionName;
+    }
+
+    abstract SolrInputDocument buildDoc();
+  }
+
+  private static void loadCollectionData(DType type) throws Exception {
+    int numDocs = type.numDocs;
+    String collectionName = type.collName;
+    int numLoaded = 0;
+    try (HttpSolrClient client = new HttpSolrClient.Builder("http://localhost:8983/solr").build())
{
+      final int numBatches = numDocs / BATCH_SIZE + 1;
+      final int commitEveryBatches = NUM_COMMITS > 0 ? numBatches / NUM_COMMITS : Integer.MAX_VALUE;
+      int batchCount = 0;
+      while (numLoaded < numDocs) {
+        final int sizeOfBatch = numLoaded + BATCH_SIZE > numDocs ? numDocs - numLoaded
: BATCH_SIZE;
+        final Collection<SolrInputDocument> batch = buildBatch(type, sizeOfBatch);
+        client.add(collectionName, batch);
+        batchCount++;
+        numLoaded+=sizeOfBatch;
+
+        if (batchCount == commitEveryBatches) {
+          client.commit(collectionName);
+          batchCount = 0;
+        }
+      }
+      client.commit(collectionName);
+    }
+
+  }
+
+  private static Collection<SolrInputDocument> buildBatch(DType type, int sizeOfBatch)
{
+    final List<SolrInputDocument> batch = Lists.newArrayList();
+    for (int i = 0; i < sizeOfBatch; i++) {
+      batch.add(type.buildDoc());
+    }
+    return batch;
+  }
+
+  private static String[] getAFewDictionaryWords(int numWords) {
+    return getAFewDictionaryWords(numWords, NUM_DICT_TERMS);
+  }
+
+  private static String[] getAFewDictionaryWords(int numWords, int onlyFirstN) {
+    final String[] words = new String[numWords];
+    for (int i = 0; i < numWords; i++) {
+      words[i] = LOADED_DICTIONARY[random().nextInt(onlyFirstN)];
+    }
+
+    return words;
+  }
+
+
+  @Test
+  public void testJoinPerformanceAsMainQueryHitsIncrease() throws Exception {
+    final String joinQueryBase = "{!join fromIndex=" + FROM_COLLECTION_NAME + " from=permissions_ss
to=acl_ss cache=false";
+    final String fromQuery = "userid_s:user25"; // The higher the user number, the more permissions
he has attached to his name (1-100)
+    final String standardJoin = joinQueryBase + "}" + fromQuery;
+    final String noScoreJoin = joinQueryBase + " score=none}" + fromQuery;
+    final String postfilterJoin = joinQueryBase + " cost=102}" + fromQuery;
+
+    try (HttpSolrClient client = new HttpSolrClient.Builder("http://localhost:8983/solr").build())
{
+      for ( int i = 0; i < VAL_MAX; i+=20) {
+        final String mainQuery = "val_i:[1 TO " + (i+1) + "]";
+        final QueryResponse standardJoinRsp = client.query(TO_COLLECTION_NAME, new SolrQuery("q",
mainQuery, "fq", standardJoin), SolrRequest.METHOD.POST);
+        final QueryResponse postfilterJoinRsp = client.query(TO_COLLECTION_NAME, new SolrQuery("q",
mainQuery, "fq", postfilterJoin), SolrRequest.METHOD.POST);
+        final QueryResponse noScoreJoinRsp = client.query(TO_COLLECTION_NAME, new SolrQuery("q",
mainQuery, "fq", noScoreJoin), SolrRequest.METHOD.POST);
+        final long numFound = postfilterJoinRsp.getResults().getNumFound();
+
+        System.out.println(i + "," + numFound + "," + standardJoinRsp.getQTime() + "," +
noScoreJoinRsp.getQTime() + "," + postfilterJoinRsp.getQTime());
+      }
+    }
+  }
+
+  @Test
+  public void testJoinPerformanceAsFromQueryHitsIncrease() throws Exception {
+    final String mainQuery = "val_i:[1 TO 250]"; // Half the docs match the query (250K)
+
+    final String joinQueryBase = "{!join fromIndex=" + FROM_COLLECTION_NAME + " from=permissions_ss
to=acl_ss cache=false";
+
+
+    try (HttpSolrClient client = new HttpSolrClient.Builder("http://localhost:8983/solr").build())
{
+      for ( int i = 1; i <= USER_MAX; i++) {
+        final String fromQuery = "userid_s:user" + i;
+        final String standardJoin = joinQueryBase + "}" + fromQuery;
+        final String noScoreJoin = joinQueryBase + " score=none}" + fromQuery;
+        final String postfilterJoin = joinQueryBase + " cost=102}" + fromQuery;
+
+        final QueryResponse standardJoinRsp = client.query(TO_COLLECTION_NAME, new SolrQuery("q",
mainQuery, "fq", standardJoin), SolrRequest.METHOD.POST);
+        final QueryResponse postfilterJoinRsp = client.query(TO_COLLECTION_NAME, new SolrQuery("q",
mainQuery, "fq", postfilterJoin), SolrRequest.METHOD.POST);
+        final QueryResponse noScoreJoinRsp = client.query(TO_COLLECTION_NAME, new SolrQuery("q",
mainQuery, "fq", noScoreJoin), SolrRequest.METHOD.POST);
+        final long numFound = postfilterJoinRsp.getResults().getNumFound();
+
+        System.out.println(i + "," + numFound + "," + standardJoinRsp.getQTime() + "," +
noScoreJoinRsp.getQTime() + "," + postfilterJoinRsp.getQTime());
+      }
+    }
+  }
+
+  private static String buildTermsQueryString(int numTerms) {
+    final String[] terms = getAFewDictionaryWords(numTerms);
+    return String.join(",", terms);
+  }
+}


Mime
View raw message