lucene-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From sim...@apache.org
Subject svn commit: r1143719 [18/20] - in /lucene/dev/branches/LUCENE2793: ./ dev-tools/eclipse/ dev-tools/idea/.idea/ dev-tools/idea/lucene/contrib/ dev-tools/idea/lucene/contrib/demo/ dev-tools/idea/lucene/contrib/highlighter/ dev-tools/idea/lucene/contrib/i...
Date Thu, 07 Jul 2011 09:04:29 GMT
Modified: lucene/dev/branches/LUCENE2793/solr/src/java/org/apache/solr/search/SolrIndexSearcher.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/LUCENE2793/solr/src/java/org/apache/solr/search/SolrIndexSearcher.java?rev=1143719&r1=1143718&r2=1143719&view=diff
==============================================================================
--- lucene/dev/branches/LUCENE2793/solr/src/java/org/apache/solr/search/SolrIndexSearcher.java (original)
+++ lucene/dev/branches/LUCENE2793/solr/src/java/org/apache/solr/search/SolrIndexSearcher.java Thu Jul  7 09:03:58 2011
@@ -493,7 +493,7 @@ public class SolrIndexSearcher extends I
     Terms terms = fields.terms(t.field());
     if (terms == null) return -1;
     BytesRef termBytes = t.bytes();
-    DocsEnum docs = terms.docs(MultiFields.getDeletedDocs(reader), termBytes, null);
+    DocsEnum docs = terms.docs(MultiFields.getLiveDocs(reader), termBytes, null);
     if (docs == null) return -1;
     int id = docs.nextDoc();
     return id == DocIdSetIterator.NO_MORE_DOCS ? -1 : id;
@@ -542,6 +542,17 @@ public class SolrIndexSearcher extends I
    * The DocSet returned should <b>not</b> be modified.
    */
   public DocSet getDocSet(Query query) throws IOException {
+    if (query instanceof ExtendedQuery) {
+      ExtendedQuery eq = (ExtendedQuery)query;
+      if (!eq.getCache()) {
+        if (query instanceof WrappedQuery) {
+          query = ((WrappedQuery)query).getWrappedQuery();
+        }
+        query = QueryUtils.makeQueryable(query);
+        return getDocSetNC(query, null);
+      }
+    }
+
     // Get the absolute value (positive version) of this query.  If we
     // get back the same reference, we know it's positive.
     Query absQ = QueryUtils.getAbs(query);
@@ -574,12 +585,29 @@ public class SolrIndexSearcher extends I
       if (answer!=null) return answer;
     }
     answer = getDocSetNC(q,null);
-    if (filterCache != null) filterCache.put(q,answer);
+    if (filterCache != null) filterCache.put(
+        q,answer);
     return answer;
   }
 
   private static Query matchAllDocsQuery = new MatchAllDocsQuery();
 
+
+  static class ProcessedFilter {
+    DocSet answer;  // the answer, if non-null
+    Filter filter;
+    DelegatingCollector postFilter;
+  }
+
+
+  private static Comparator<Query> sortByCost = new Comparator<Query>() {
+    @Override
+    public int compare(Query q1, Query q2) {
+      return ((ExtendedQuery)q1).getCost() - ((ExtendedQuery)q2).getCost();
+    }
+  };
+
+
   /**
    * Returns the set of document ids matching all queries.
    * This method is cache-aware and attempts to retrieve the answer from the cache if possible.
@@ -589,123 +617,160 @@ public class SolrIndexSearcher extends I
    * The DocSet returned should <b>not</b> be modified.
    */
   public DocSet getDocSet(List<Query> queries) throws IOException {
-    if (queries==null) return null;
-    if (queries.size()==1) return getDocSet(queries.get(0));
-    DocSet answer=null;
+    ProcessedFilter pf = getProcessedFilter(null, queries);
+    if (pf.answer != null) return pf.answer;
 
-    boolean[] neg = new boolean[queries.size()];
-    DocSet[] sets = new DocSet[queries.size()];
 
-    int smallestIndex = -1;
-    int smallestCount = Integer.MAX_VALUE;
-    for (int i=0; i<sets.length; i++) {
-      Query q = queries.get(i);
-      Query posQuery = QueryUtils.getAbs(q);
-      sets[i] = getPositiveDocSet(posQuery);
-      // Negative query if absolute value different from original
-      if (q==posQuery) {
-        neg[i] = false;
-        // keep track of the smallest positive set.
-        // This optimization is only worth it if size() is cached, which it would
-        // be if we don't do any set operations.
-        int sz = sets[i].size();
-        if (sz<smallestCount) {
-          smallestCount=sz;
-          smallestIndex=i;
-          answer = sets[i];
-        }
-      } else {
-        neg[i] = true;
-      }
+    DocSetCollector setCollector = new DocSetCollector(maxDoc()>>6, maxDoc());
+    Collector collector = setCollector;
+    if (pf.postFilter != null) {
+      pf.postFilter.setLastDelegate(collector);
+      collector = pf.postFilter;
     }
 
-    // if no positive queries, start off with all docs
-    if (answer==null) answer = getPositiveDocSet(matchAllDocsQuery);
+    final AtomicReaderContext[] leaves = leafContexts;
 
-    // do negative queries first to shrink set size
-    for (int i=0; i<sets.length; i++) {
-      if (neg[i]) answer = answer.andNot(sets[i]);
-    }
 
-    for (int i=0; i<sets.length; i++) {
-      if (!neg[i] && i!=smallestIndex) answer = answer.intersection(sets[i]);
-    }
-
-    return answer;
-  }
+    for (int i=0; i<leaves.length; i++) {
+      final AtomicReaderContext leaf = leaves[i];
+      final IndexReader reader = leaf.reader;
+      DocIdSet idSet = null;
+      if (pf.filter != null) {
+        idSet = pf.filter.getDocIdSet(leaf);
+        if (idSet == null) continue;
+      }
+      DocIdSetIterator idIter = null;
+      if (idSet != null) {
+        idIter = idSet.iterator();
+        if (idIter == null) continue;
+      }
 
-  Filter getFilter(Query q) throws IOException {
-    if (q == null) return null;
-    // TODO: support pure negative queries?
+      collector.setNextReader(leaf);
+      Bits liveDocs = reader.getLiveDocs();
+      int max = reader.maxDoc();
 
-    // if (q instanceof) {
-    // }
+      if (idIter == null) {
+        for (int docid = 0; docid<max; docid++) {
+          if (liveDocs != null && !liveDocs.get(docid)) continue;
+          collector.collect(docid);
+        }
+      } else {
+        for (int docid = -1; (docid = idIter.advance(docid+1)) < max; ) {
+          collector.collect(docid);
+        }
+      }
+    }
 
-    return getDocSet(q).getTopFilter();
+    return setCollector.getDocSet();
   }
 
 
-  Filter getFilter(DocSet setFilter, List<Query> queries) throws IOException {
-    Filter answer = setFilter == null ? null : setFilter.getTopFilter();
-
-    if (queries == null || queries.size() == 0) {
-      return answer;
+  public ProcessedFilter getProcessedFilter(DocSet setFilter, List<Query> queries) throws IOException {
+    ProcessedFilter pf = new ProcessedFilter();
+    if (queries==null || queries.size()==0) {
+      if (setFilter != null)
+        pf.filter = setFilter.getTopFilter();
+      return pf;
     }
 
-    if (answer == null && queries.size() == 1) {
-      return getFilter(queries.get(0));  
-    }
+    DocSet answer=null;
 
+    boolean[] neg = new boolean[queries.size()+1];
+    DocSet[] sets = new DocSet[queries.size()+1];
+    List<Query> notCached = null;
+    List<Query> postFilters = null;
 
-    DocSet finalSet=null;
+    int end = 0;
+    int smallestIndex = -1;
 
-    int nDocSets =0;
-    boolean[] neg = new boolean[queries.size()];
-    DocSet[] sets = new DocSet[queries.size()];
-    Query[] nocache = new Query[queries.size()];
+    if (setFilter != null) {
+      answer = sets[end++] = setFilter;
+      smallestIndex = end;
+    }
 
-    int smallestIndex = -1;
     int smallestCount = Integer.MAX_VALUE;
     for (Query q : queries) {
-      // if (q instanceof)
-
+      if (q instanceof ExtendedQuery) {
+        ExtendedQuery eq = (ExtendedQuery)q;
+        if (!eq.getCache()) {
+          if (eq.getCost() >= 100 && eq instanceof PostFilter) {
+            if (postFilters == null) postFilters = new ArrayList<Query>(sets.length-end);
+            postFilters.add(q);
+          } else {
+            if (notCached == null) notCached = new ArrayList<Query>(sets.length-end);
+            notCached.add(q);
+          }
+          continue;
+        }
+      }
 
       Query posQuery = QueryUtils.getAbs(q);
-      sets[nDocSets] = getPositiveDocSet(posQuery);
+      sets[end] = getPositiveDocSet(posQuery);
       // Negative query if absolute value different from original
       if (q==posQuery) {
-        neg[nDocSets] = false;
+        neg[end] = false;
         // keep track of the smallest positive set.
         // This optimization is only worth it if size() is cached, which it would
         // be if we don't do any set operations.
-        int sz = sets[nDocSets].size();
+        int sz = sets[end].size();
         if (sz<smallestCount) {
           smallestCount=sz;
-          smallestIndex=nDocSets;
-          finalSet = sets[nDocSets];
+          smallestIndex=end;
+          answer = sets[end];
         }
       } else {
-        neg[nDocSets] = true;
+        neg[end] = true;
       }
 
-      nDocSets++;
+      end++;
     }
 
-    // if no positive queries, start off with all docs
-    if (finalSet==null) finalSet = getPositiveDocSet(matchAllDocsQuery);
+    // Are all of our normal cached filters negative?
+    if (end > 0 && answer==null) {
+      answer = getPositiveDocSet(matchAllDocsQuery);
+    }
 
     // do negative queries first to shrink set size
-    for (int i=0; i<sets.length; i++) {
-      if (neg[i]) finalSet = finalSet.andNot(sets[i]);
+    for (int i=0; i<end; i++) {
+      if (neg[i]) answer = answer.andNot(sets[i]);
     }
 
-    for (int i=0; i<sets.length; i++) {
-      if (!neg[i] && i!=smallestIndex) finalSet = finalSet.intersection(sets[i]);
+    for (int i=0; i<end; i++) {
+      if (!neg[i] && i!=smallestIndex) answer = answer.intersection(sets[i]);
     }
 
-    return finalSet.getTopFilter();
+    if (notCached != null) {
+      Collections.sort(notCached, sortByCost);
+      List<Weight> weights = new ArrayList<Weight>(notCached.size());
+      for (Query q : notCached) {
+        Query qq = QueryUtils.makeQueryable(q);
+        weights.add(createNormalizedWeight(qq));
+      }
+      pf.filter = new FilterImpl(answer, weights);
+    } else {
+      if (postFilters == null) {
+        if (answer == null) {
+          answer = getPositiveDocSet(matchAllDocsQuery);
+        }
+        // "answer" is the only part of the filter, so set it.
+        pf.answer = answer;
+      }
+
+      if (answer != null) {
+        pf.filter = answer.getTopFilter();
+      }
+    }
 
+    if (postFilters != null) {
+      Collections.sort(postFilters, sortByCost);
+      for (int i=postFilters.size()-1; i>=0; i--) {
+        DelegatingCollector prev = pf.postFilter;
+        pf.postFilter = ((PostFilter)postFilters.get(i)).getFilterCollector(this);
+        if (prev != null) pf.postFilter.setDelegate(prev);
+      }
+    }
 
+    return pf;
   }
 
   /** lucene.internal */
@@ -715,7 +780,7 @@ public class SolrIndexSearcher extends I
     TermQuery key = null;
 
     if (useCache) {
-      key = new TermQuery(new Term(deState.fieldName, new BytesRef(deState.termsEnum.term()), false));
+      key = new TermQuery(new Term(deState.fieldName, new BytesRef(deState.termsEnum.term())));
       DocSet result = filterCache.get(key);
       if (result != null) return result;
     }
@@ -730,7 +795,7 @@ public class SolrIndexSearcher extends I
     int bitsSet = 0;
     OpenBitSet obs = null;
 
-    DocsEnum docsEnum = deState.termsEnum.docs(deState.deletedDocs, deState.docsEnum);
+    DocsEnum docsEnum = deState.termsEnum.docs(deState.liveDocs, deState.docsEnum);
     if (deState.docsEnum == null) {
       deState.docsEnum = docsEnum;
     }
@@ -819,8 +884,8 @@ public class SolrIndexSearcher extends I
           Terms terms = fields.terms(t.field());
           BytesRef termBytes = t.bytes();
           
-          Bits skipDocs = reader.getDeletedDocs();
-          DocsEnum docsEnum = terms==null ? null : terms.docs(skipDocs, termBytes, null);
+          Bits liveDocs = reader.getLiveDocs();
+          DocsEnum docsEnum = terms==null ? null : terms.docs(liveDocs, termBytes, null);
 
           if (docsEnum != null) {
             DocsEnum.BulkReadResult readResult = docsEnum.getBulkResult();
@@ -861,6 +926,17 @@ public class SolrIndexSearcher extends I
   public DocSet getDocSet(Query query, DocSet filter) throws IOException {
     if (filter==null) return getDocSet(query);
 
+    if (query instanceof ExtendedQuery) {
+      ExtendedQuery eq = (ExtendedQuery)query;
+      if (!eq.getCache()) {
+        if (query instanceof WrappedQuery) {
+          query = ((WrappedQuery)query).getWrappedQuery();
+        }
+        query = QueryUtils.makeQueryable(query);
+        return getDocSetNC(query, filter);
+      }
+    }
+
     // Negative query if absolute value different from original
     Query absQ = QueryUtils.getAbs(query);
     boolean positive = absQ==query;
@@ -942,7 +1018,8 @@ public class SolrIndexSearcher extends I
   static final int NO_CHECK_QCACHE       = 0x80000000;
   static final int GET_DOCSET            = 0x40000000;
   static final int NO_CHECK_FILTERCACHE  = 0x20000000;
-  
+  static final int NO_SET_QCACHE         = 0x10000000;
+
   public static final int GET_DOCLIST           =        0x02; // get the documents actually returned in a response
   public static final int GET_SCORES             =       0x01;
 
@@ -959,21 +1036,33 @@ public class SolrIndexSearcher extends I
     // check for overflow, and check for # docs in index
     if (maxDocRequested < 0 || maxDocRequested > maxDoc()) maxDocRequested = maxDoc();
     int supersetMaxDoc= maxDocRequested;
-    DocList superset;
+    DocList superset = null;
+
+    int flags = cmd.getFlags();
+    Query q = cmd.getQuery();
+    if (q instanceof ExtendedQuery) {
+      ExtendedQuery eq = (ExtendedQuery)q;
+      if (!eq.getCache()) {
+        flags |= (NO_CHECK_QCACHE | NO_SET_QCACHE | NO_CHECK_FILTERCACHE);
+      }
+    }
+
 
     // we can try and look up the complete query in the cache.
     // we can't do that if filter!=null though (we don't want to
     // do hashCode() and equals() for a big DocSet).
-    if (queryResultCache != null && cmd.getFilter()==null) {
+    if (queryResultCache != null && cmd.getFilter()==null
+        && (flags & (NO_CHECK_QCACHE|NO_SET_QCACHE)) != ((NO_CHECK_QCACHE|NO_SET_QCACHE)))
+    {
         // all of the current flags can be reused during warming,
         // so set all of them on the cache key.
-        key = new QueryResultKey(cmd.getQuery(), cmd.getFilterList(), cmd.getSort(), cmd.getFlags());
-        if ((cmd.getFlags() & NO_CHECK_QCACHE)==0) {
+        key = new QueryResultKey(q, cmd.getFilterList(), cmd.getSort(), flags);
+        if ((flags & NO_CHECK_QCACHE)==0) {
           superset = queryResultCache.get(key);
 
           if (superset != null) {
             // check that the cache entry has scores recorded if we need them
-            if ((cmd.getFlags() & GET_SCORES)==0 || superset.hasScores()) {
+            if ((flags & GET_SCORES)==0 || superset.hasScores()) {
               // NOTE: subset() returns null if the DocList has fewer docs than
               // requested
               out.docList = superset.subset(cmd.getOffset(),cmd.getLen());
@@ -983,12 +1072,11 @@ public class SolrIndexSearcher extends I
             // found the docList in the cache... now check if we need the docset too.
             // OPT: possible future optimization - if the doclist contains all the matches,
             // use it to make the docset instead of rerunning the query.
-            if (out.docSet==null && ((cmd.getFlags() & GET_DOCSET)!=0) ) {
+            if (out.docSet==null && ((flags & GET_DOCSET)!=0) ) {
               if (cmd.getFilterList()==null) {
                 out.docSet = getDocSet(cmd.getQuery());
               } else {
-                List<Query> newList = new ArrayList<Query>(cmd.getFilterList()
-.size()+1);
+                List<Query> newList = new ArrayList<Query>(cmd.getFilterList().size()+1);
                 newList.add(cmd.getQuery());
                 newList.addAll(cmd.getFilterList());
                 out.docSet = getDocSet(newList);
@@ -998,9 +1086,10 @@ public class SolrIndexSearcher extends I
           }
         }
 
-        // If we are going to generate the result, bump up to the
-        // next resultWindowSize for better caching.
+      // If we are going to generate the result, bump up to the
+      // next resultWindowSize for better caching.
 
+      if ((flags & NO_SET_QCACHE) == 0) {
         // handle 0 special case as well as avoid idiv in the common case.
         if (maxDocRequested < queryResultWindowSize) {
           supersetMaxDoc=queryResultWindowSize;
@@ -1008,6 +1097,9 @@ public class SolrIndexSearcher extends I
           supersetMaxDoc = ((maxDocRequested -1)/queryResultWindowSize + 1)*queryResultWindowSize;
           if (supersetMaxDoc < 0) supersetMaxDoc=maxDocRequested;
         }
+      } else {
+        key = null;  // we won't be caching the result
+      }
     }
 
 
@@ -1020,11 +1112,11 @@ public class SolrIndexSearcher extends I
 
     // check if we should try and use the filter cache
     boolean useFilterCache=false;
-    if ((cmd.getFlags() & (GET_SCORES|NO_CHECK_FILTERCACHE))==0 && useFilterForSortedQuery && cmd.getSort() != null && filterCache != null) {
+    if ((flags & (GET_SCORES|NO_CHECK_FILTERCACHE))==0 && useFilterForSortedQuery && cmd.getSort() != null && filterCache != null) {
       useFilterCache=true;
       SortField[] sfields = cmd.getSort().getSort();
       for (SortField sf : sfields) {
-        if (sf.getType() == SortField.SCORE) {
+        if (sf.getType() == SortField.Type.SCORE) {
           useFilterCache=false;
           break;
         }
@@ -1049,7 +1141,7 @@ public class SolrIndexSearcher extends I
     } else {
       // do it the normal way...
       cmd.setSupersetMaxDoc(supersetMaxDoc);
-      if ((cmd.getFlags() & GET_DOCSET)!=0) {
+      if ((flags & GET_DOCSET)!=0) {
         // this currently conflates returning the docset for the base query vs
         // the base query and all filters.
         DocSet qDocSet = getDocListAndSetNC(qr,cmd);
@@ -1059,8 +1151,10 @@ public class SolrIndexSearcher extends I
         getDocListNC(qr,cmd);
         //Parameters: cmd.getQuery(),theFilt,cmd.getSort(),0,supersetMaxDoc,cmd.getFlags(),cmd.getTimeAllowed(),responseHeader);
       }
-      superset = out.docList;
-      out.docList = superset.subset(cmd.getOffset(),cmd.getLen());
+      if (key != null) {
+        superset = out.docList;
+        out.docList = superset.subset(cmd.getOffset(),cmd.getLen());
+      }
     }
 
     // lastly, put the superset in the cache if the size is less than or equal
@@ -1073,9 +1167,6 @@ public class SolrIndexSearcher extends I
 
 
   private void getDocListNC(QueryResult qr,QueryCommand cmd) throws IOException {
-    //Parameters: cmd.getQuery(),theFilt,cmd.getSort(),0,supersetMaxDoc,cmd.getFlags(),cmd.getTimeAllowed(),responseHeader);
-    //Query query, DocSet filter, Sort lsort, int offset, int len, int flags, long timeAllowed, NamedList<Object> responseHeader
-    DocSet filter = cmd.getFilter()!=null ? cmd.getFilter() : getDocSet(cmd.getFilterList());
     final long timeAllowed = cmd.getTimeAllowed();
     int len = cmd.getSupersetMaxDoc();
     int last = len;
@@ -1091,7 +1182,8 @@ public class SolrIndexSearcher extends I
 
     Query query = QueryUtils.makeQueryable(cmd.getQuery());
 
-    final Filter luceneFilter = filter==null ? null : filter.getTopFilter();
+    ProcessedFilter pf = getProcessedFilter(cmd.getFilter(), cmd.getFilterList());
+    final Filter luceneFilter = pf.filter;
 
     // handle zero case...
     if (lastDocRequested<=0) {
@@ -1143,6 +1235,11 @@ public class SolrIndexSearcher extends I
       if( timeAllowed > 0 ) {
         collector = new TimeLimitingCollector(collector, timeAllowed);
       }
+      if (pf.postFilter != null) {
+        pf.postFilter.setLastDelegate(collector);
+        collector = pf.postFilter;
+      }
+
       try {
         super.search(query, luceneFilter, collector);
       }
@@ -1167,6 +1264,10 @@ public class SolrIndexSearcher extends I
       if( timeAllowed > 0 ) {
         collector = new TimeLimitingCollector(collector, timeAllowed);
       }
+      if (pf.postFilter != null) {
+        pf.postFilter.setLastDelegate(collector);
+        collector = pf.postFilter;
+      }
       try {
         super.search(query, luceneFilter, collector);
       }
@@ -1199,7 +1300,6 @@ public class SolrIndexSearcher extends I
   // be cached if desired.
   private DocSet getDocListAndSetNC(QueryResult qr,QueryCommand cmd) throws IOException {
     int len = cmd.getSupersetMaxDoc();
-    DocSet filter = cmd.getFilter()!=null ? cmd.getFilter() : getDocSet(cmd.getFilterList());
     int last = len;
     if (last < 0 || last > maxDoc()) last=maxDoc();
     final int lastDocRequested = last;
@@ -1214,11 +1314,12 @@ public class SolrIndexSearcher extends I
     int maxDoc = maxDoc();
     int smallSetSize = maxDoc>>6;
 
+    ProcessedFilter pf = getProcessedFilter(cmd.getFilter(), cmd.getFilterList());
+    final Filter luceneFilter = pf.filter;
+
     Query query = QueryUtils.makeQueryable(cmd.getQuery());
     final long timeAllowed = cmd.getTimeAllowed();
 
-    final Filter luceneFilter = filter==null ? null : filter.getTopFilter();
-
     // handle zero case...
     if (lastDocRequested<=0) {
       final float[] topscore = new float[] { Float.NEGATIVE_INFINITY };
@@ -1253,6 +1354,11 @@ public class SolrIndexSearcher extends I
        if( timeAllowed > 0 ) {
          collector = new TimeLimitingCollector(collector, timeAllowed);
        }
+      if (pf.postFilter != null) {
+        pf.postFilter.setLastDelegate(collector);
+        collector = pf.postFilter;
+      }
+
        try {
          super.search(query, luceneFilter, collector);
        }
@@ -1284,6 +1390,10 @@ public class SolrIndexSearcher extends I
       if( timeAllowed > 0 ) {
         collector = new TimeLimitingCollector(collector, timeAllowed );
       }
+      if (pf.postFilter != null) {
+        pf.postFilter.setLastDelegate(collector);
+        collector = pf.postFilter;
+      }
       try {
         super.search(query, luceneFilter, collector);
       }
@@ -1320,7 +1430,7 @@ public class SolrIndexSearcher extends I
 
     // TODO: currently we don't generate the DocSet for the base query,
     // but the QueryDocSet == CompleteDocSet if filter==null.
-    return filter==null ? qr.getDocSet() : null;
+    return pf.filter==null && pf.postFilter==null ? qr.getDocSet() : null;
   }
 
 
@@ -1617,7 +1727,7 @@ public class SolrIndexSearcher extends I
   public static class DocsEnumState {
     public String fieldName;  // currently interned for as long as lucene requires it
     public TermsEnum termsEnum;
-    public Bits deletedDocs;
+    public Bits liveDocs;
     public DocsEnum docsEnum;
 
     public int minSetSizeCached;
@@ -1733,6 +1843,11 @@ public class SolrIndexSearcher extends I
     return openTime;
   }
 
+  @Override
+  public Explanation explain(Query query, int doc) throws IOException {
+    return super.explain(QueryUtils.makeQueryable(query), doc);
+  }
+
   /////////////////////////////////////////////////////////////////////
   // SolrInfoMBean stuff: Statistics and Module Info
   /////////////////////////////////////////////////////////////////////
@@ -1933,3 +2048,133 @@ public class SolrIndexSearcher extends I
 }
 
 
+class FilterImpl extends Filter {
+  final DocSet filter;
+  final Filter topFilter;
+  final List<Weight> weights;
+
+  public FilterImpl(DocSet filter, List<Weight> weights) {
+    this.filter = filter;
+    this.weights = weights;
+    this.topFilter = filter == null ? null : filter.getTopFilter();
+  }
+
+  @Override
+  public DocIdSet getDocIdSet(AtomicReaderContext context) throws IOException {
+    DocIdSet sub = topFilter == null ? null : topFilter.getDocIdSet(context);
+    if (weights.size() == 0) return sub;
+    return new FilterSet(sub, context);
+  }
+
+  private class FilterSet extends DocIdSet {
+    DocIdSet docIdSet;
+    AtomicReaderContext context;
+
+    public FilterSet(DocIdSet docIdSet, AtomicReaderContext context) {
+      this.docIdSet = docIdSet;
+      this.context = context;
+    }
+
+    @Override
+    public DocIdSetIterator iterator() throws IOException {
+      List<DocIdSetIterator> iterators = new ArrayList<DocIdSetIterator>(weights.size()+1);
+      if (docIdSet != null) {
+        DocIdSetIterator iter = docIdSet.iterator();
+        if (iter == null) return null;
+        iterators.add(iter);
+      }
+      for (Weight w : weights) {
+        Scorer scorer = w.scorer(context, Weight.ScorerContext.def());
+        if (scorer == null) return null;
+        iterators.add(scorer);
+      }
+      if (iterators.size()==0) return null;
+      if (iterators.size()==1) return iterators.get(0);
+      if (iterators.size()==2) return new DualFilterIterator(iterators.get(0), iterators.get(1));
+      return new FilterIterator(iterators.toArray(new DocIdSetIterator[iterators.size()]));
+    }
+  }
+
+  private static class FilterIterator extends DocIdSetIterator {
+    final DocIdSetIterator[] iterators;
+    final DocIdSetIterator first;
+
+    public FilterIterator(DocIdSetIterator[] iterators) {
+      this.iterators = iterators;
+      this.first = iterators[0];
+    }
+
+    @Override
+    public int docID() {
+      return first.docID();
+    }
+
+    private int doNext(int doc) throws IOException {
+      int which=0;  // index of the iterator with the highest id
+      int i=1;
+      outer: for(;;) {
+        for (; i<iterators.length; i++) {
+          if (i == which) continue;
+          DocIdSetIterator iter = iterators[i];
+          int next = iter.advance(doc);
+          if (next != doc) {
+            doc = next;
+            which = i;
+            i = 0;
+            continue outer;
+          }
+        }
+        return doc;
+      }
+    }
+
+
+    @Override
+    public int nextDoc() throws IOException {
+      return doNext(first.nextDoc());
+    }
+
+    @Override
+    public int advance(int target) throws IOException {
+      return doNext(first.advance(target));
+    }
+  }
+
+  private static class DualFilterIterator extends DocIdSetIterator {
+    final DocIdSetIterator a;
+    final DocIdSetIterator b;
+
+    public DualFilterIterator(DocIdSetIterator a, DocIdSetIterator b) {
+      this.a = a;
+      this.b = b;
+    }
+
+    @Override
+    public int docID() {
+      return a.docID();
+    }
+
+    @Override
+    public int nextDoc() throws IOException {
+      int doc = a.nextDoc();
+      for(;;) {
+        int other = b.advance(doc);
+        if (other == doc) return doc;
+        doc = a.advance(other);
+        if (other == doc) return doc;
+      }
+    }
+
+    @Override
+    public int advance(int target) throws IOException {
+      int doc = a.advance(target);
+      for(;;) {
+        int other = b.advance(doc);
+        if (other == doc) return doc;
+        doc = a.advance(other);
+        if (other == doc) return doc;
+      }
+    }
+  }
+
+}
\ No newline at end of file

Modified: lucene/dev/branches/LUCENE2793/solr/src/java/org/apache/solr/search/SortSpec.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/LUCENE2793/solr/src/java/org/apache/solr/search/SortSpec.java?rev=1143719&r1=1143718&r2=1143719&view=diff
==============================================================================
--- lucene/dev/branches/LUCENE2793/solr/src/java/org/apache/solr/search/SortSpec.java (original)
+++ lucene/dev/branches/LUCENE2793/solr/src/java/org/apache/solr/search/SortSpec.java Thu Jul  7 09:03:58 2011
@@ -48,7 +48,7 @@ public class SortSpec 
   public static boolean includesScore(Sort sort) {
     if (sort==null) return true;
     for (SortField sf : sort.getSort()) {
-      if (sf.getType() == SortField.SCORE) return true;
+      if (sf.getType() == SortField.Type.SCORE) return true;
     }
     return false;
   }

Modified: lucene/dev/branches/LUCENE2793/solr/src/java/org/apache/solr/search/Sorting.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/LUCENE2793/solr/src/java/org/apache/solr/search/Sorting.java?rev=1143719&r1=1143718&r2=1143719&view=diff
==============================================================================
--- lucene/dev/branches/LUCENE2793/solr/src/java/org/apache/solr/search/Sorting.java (original)
+++ lucene/dev/branches/LUCENE2793/solr/src/java/org/apache/solr/search/Sorting.java Thu Jul  7 09:03:58 2011
@@ -42,12 +42,12 @@ public class Sorting {
   public static SortField getStringSortField(String fieldName, boolean reverse, boolean nullLast, boolean nullFirst) {
     if (nullLast) {
       if (!reverse) return new SortField(fieldName, nullStringLastComparatorSource);
-      else return new SortField(fieldName, SortField.STRING, true);
+      else return new SortField(fieldName, SortField.Type.STRING, true);
     } else if (nullFirst) {
       if (reverse) return new SortField(fieldName, nullStringLastComparatorSource, true);
-      else return new SortField(fieldName, SortField.STRING, false);
+      else return new SortField(fieldName, SortField.Type.STRING, false);
     } else {
-      return new SortField(fieldName, SortField.STRING, reverse);
+      return new SortField(fieldName, SortField.Type.STRING, reverse);
     }
   }
 

Modified: lucene/dev/branches/LUCENE2793/solr/src/java/org/apache/solr/search/SpatialFilterQParserPlugin.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/LUCENE2793/solr/src/java/org/apache/solr/search/SpatialFilterQParserPlugin.java?rev=1143719&r1=1143718&r2=1143719&view=diff
==============================================================================
--- lucene/dev/branches/LUCENE2793/solr/src/java/org/apache/solr/search/SpatialFilterQParserPlugin.java (original)
+++ lucene/dev/branches/LUCENE2793/solr/src/java/org/apache/solr/search/SpatialFilterQParserPlugin.java Thu Jul  7 09:03:58 2011
@@ -35,7 +35,7 @@ import org.apache.solr.request.SolrQuery
  * <ul>
  * <li>sfield - The field to filter on. Required.</li>
  * <li>pt - The point to use as a reference.  Must match the dimension of the field. Required.</li>
- * <li>d - The distance in km.  Requited.</li>
+ * <li>d - The distance in km.  Required.</li>
  * </ul>
  * The distance measure used currently depends on the FieldType.  LatLonType defaults to using haversine, PointType defaults to Euclidean (2-norm).
  *

Modified: lucene/dev/branches/LUCENE2793/solr/src/java/org/apache/solr/search/ValueSourceParser.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/LUCENE2793/solr/src/java/org/apache/solr/search/ValueSourceParser.java?rev=1143719&r1=1143718&r2=1143719&view=diff
==============================================================================
--- lucene/dev/branches/LUCENE2793/solr/src/java/org/apache/solr/search/ValueSourceParser.java (original)
+++ lucene/dev/branches/LUCENE2793/solr/src/java/org/apache/solr/search/ValueSourceParser.java Thu Jul  7 09:03:58 2011
@@ -18,6 +18,13 @@ package org.apache.solr.search;
 
 import org.apache.lucene.index.IndexReader.AtomicReaderContext;
 import org.apache.lucene.index.Term;
+import org.apache.lucene.queries.function.BoostedQuery;
+import org.apache.lucene.queries.function.DocValues;
+import org.apache.lucene.queries.function.ValueSource;
+import org.apache.lucene.queries.function.docvalues.BoolDocValues;
+import org.apache.lucene.queries.function.docvalues.DoubleDocValues;
+import org.apache.lucene.queries.function.docvalues.LongDocValues;
+import org.apache.lucene.queries.function.valuesource.*;
 import org.apache.lucene.queryParser.ParseException;
 import org.apache.lucene.search.Query;
 import org.apache.lucene.search.IndexSearcher;
@@ -32,7 +39,6 @@ import org.apache.lucene.util.UnicodeUti
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.util.NamedList;
 import org.apache.solr.schema.*;
-import org.apache.solr.search.function.*;
 
 import org.apache.solr.search.function.distance.*;
 import org.apache.solr.util.plugin.NamedListInitializedPlugin;
@@ -534,6 +540,24 @@ public abstract class ValueSourceParser 
       }
     });
 
+    addParser("totaltermfreq", new ValueSourceParser() {
+      @Override
+      public ValueSource parse(FunctionQParser fp) throws ParseException {
+        TInfo tinfo = parseTerm(fp);
+        return new TotalTermFreqValueSource(tinfo.field, tinfo.val, tinfo.indexedField, tinfo.indexedBytes);
+      }
+    });
+    alias("totaltermfreq","ttf");
+
+    addParser("sumtotaltermfreq", new ValueSourceParser() {
+      @Override
+      public ValueSource parse(FunctionQParser fp) throws ParseException {
+        String field = fp.parseArg();
+        return new SumTotalTermFreqValueSource(field);
+      }
+    });
+    alias("sumtotaltermfreq","sttf");
+
     addParser("idf", new ValueSourceParser() {
       @Override
       public ValueSource parse(FunctionQParser fp) throws ParseException {

Modified: lucene/dev/branches/LUCENE2793/solr/src/java/org/apache/solr/search/function/FileFloatSource.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/LUCENE2793/solr/src/java/org/apache/solr/search/function/FileFloatSource.java?rev=1143719&r1=1143718&r2=1143719&view=diff
==============================================================================
--- lucene/dev/branches/LUCENE2793/solr/src/java/org/apache/solr/search/function/FileFloatSource.java (original)
+++ lucene/dev/branches/LUCENE2793/solr/src/java/org/apache/solr/search/function/FileFloatSource.java Thu Jul  7 09:03:58 2011
@@ -33,9 +33,11 @@ import org.apache.lucene.index.MultiFiel
 import org.apache.lucene.index.TermsEnum;
 import org.apache.lucene.index.IndexReader.AtomicReaderContext;
 import org.apache.lucene.index.IndexReader.ReaderContext;
+import org.apache.lucene.queries.function.DocValues;
+import org.apache.lucene.queries.function.ValueSource;
+import org.apache.lucene.queries.function.docvalues.FloatDocValues;
 import org.apache.lucene.util.BytesRef;
 import org.apache.lucene.util.ReaderUtil;
-import org.apache.lucene.util.StringHelper;
 import org.apache.solr.core.SolrCore;
 import org.apache.solr.handler.RequestHandlerBase;
 import org.apache.solr.handler.RequestHandlerUtils;
@@ -75,9 +77,8 @@ public class FileFloatSource extends Val
 
   @Override
   public DocValues getValues(Map context, AtomicReaderContext readerContext) throws IOException {
-    int offset = 0;
+    final int off = readerContext.docBase;
     ReaderContext topLevelContext = ReaderUtil.getTopLevelContext(readerContext);
-    final int off = offset;
 
     final float[] arr = getCachedFloats(topLevelContext.reader);
     return new FloatDocValues(this) {
@@ -224,7 +225,7 @@ public class FileFloatSource extends Val
 
     BufferedReader r = new BufferedReader(new InputStreamReader(is));
 
-    String idName = StringHelper.intern(ffs.keyField.getName());
+    String idName = ffs.keyField.getName();
     FieldType idType = ffs.keyField.getType();
 
     // warning: lucene's termEnum.skipTo() is not optimized... it simply does a next()
@@ -244,7 +245,7 @@ public class FileFloatSource extends Val
       DocsEnum docsEnum = null;
 
       // removing deleted docs shouldn't matter
-      // final Bits delDocs = MultiFields.getDeletedDocs(reader);
+      // final Bits liveDocs = MultiFields.getLiveDocs(reader);
 
       for (String line; (line=r.readLine())!=null;) {
         int delimIndex = line.indexOf(delimiter);
@@ -268,7 +269,7 @@ public class FileFloatSource extends Val
           continue;  // go to next line in file.. leave values as default.
         }
 
-        if (termsEnum.seek(internalKey, false) != TermsEnum.SeekStatus.FOUND) {
+        if (!termsEnum.seekExact(internalKey, false)) {
           if (notFoundCount<10) {  // collect first 10 not found for logging
             notFound.add(key);
           }

Modified: lucene/dev/branches/LUCENE2793/solr/src/java/org/apache/solr/search/function/ValueSourceRangeFilter.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/LUCENE2793/solr/src/java/org/apache/solr/search/function/ValueSourceRangeFilter.java?rev=1143719&r1=1143718&r2=1143719&view=diff
==============================================================================
--- lucene/dev/branches/LUCENE2793/solr/src/java/org/apache/solr/search/function/ValueSourceRangeFilter.java (original)
+++ lucene/dev/branches/LUCENE2793/solr/src/java/org/apache/solr/search/function/ValueSourceRangeFilter.java Thu Jul  7 09:03:58 2011
@@ -17,6 +17,7 @@
 
 package org.apache.solr.search.function;
 
+import org.apache.lucene.queries.function.ValueSource;
 import org.apache.lucene.search.DocIdSet;
 import org.apache.lucene.search.DocIdSetIterator;
 import org.apache.lucene.search.IndexSearcher;
@@ -49,6 +50,27 @@ public class ValueSourceRangeFilter exte
     this.includeUpper = upperVal != null && includeUpper;
   }
 
+  public ValueSource getValueSource() {
+    return valueSource;
+  }
+
+  public String getLowerVal() {
+    return lowerVal;
+  }
+
+  public String getUpperVal() {
+    return upperVal;
+  }
+
+  public boolean isIncludeLower() {
+    return includeLower;
+  }
+
+  public boolean isIncludeUpper() {
+    return includeUpper;
+  }
+
+
   @Override
   public DocIdSet getDocIdSet(final Map context, final AtomicReaderContext readerContext) throws IOException {
      return new DocIdSet() {

Modified: lucene/dev/branches/LUCENE2793/solr/src/java/org/apache/solr/search/function/distance/GeohashFunction.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/LUCENE2793/solr/src/java/org/apache/solr/search/function/distance/GeohashFunction.java?rev=1143719&r1=1143718&r2=1143719&view=diff
==============================================================================
--- lucene/dev/branches/LUCENE2793/solr/src/java/org/apache/solr/search/function/distance/GeohashFunction.java (original)
+++ lucene/dev/branches/LUCENE2793/solr/src/java/org/apache/solr/search/function/distance/GeohashFunction.java Thu Jul  7 09:03:58 2011
@@ -16,9 +16,9 @@ package org.apache.solr.search.function.
  * limitations under the License.
  */
 
-import org.apache.solr.search.function.ValueSource;
-import org.apache.solr.search.function.DocValues;
 import org.apache.lucene.index.IndexReader.AtomicReaderContext;
+import org.apache.lucene.queries.function.DocValues;
+import org.apache.lucene.queries.function.ValueSource;
 import org.apache.lucene.spatial.geohash.GeoHashUtils;
 
 import java.util.Map;

Modified: lucene/dev/branches/LUCENE2793/solr/src/java/org/apache/solr/search/function/distance/GeohashHaversineFunction.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/LUCENE2793/solr/src/java/org/apache/solr/search/function/distance/GeohashHaversineFunction.java?rev=1143719&r1=1143718&r2=1143719&view=diff
==============================================================================
--- lucene/dev/branches/LUCENE2793/solr/src/java/org/apache/solr/search/function/distance/GeohashHaversineFunction.java (original)
+++ lucene/dev/branches/LUCENE2793/solr/src/java/org/apache/solr/search/function/distance/GeohashHaversineFunction.java Thu Jul  7 09:03:58 2011
@@ -17,10 +17,10 @@ package org.apache.solr.search.function.
  */
 
 
+import org.apache.lucene.queries.function.DocValues;
+import org.apache.lucene.queries.function.ValueSource;
+import org.apache.lucene.queries.function.docvalues.DoubleDocValues;
 import org.apache.lucene.spatial.DistanceUtils;
-import org.apache.solr.search.function.DoubleDocValues;
-import org.apache.solr.search.function.ValueSource;
-import org.apache.solr.search.function.DocValues;
 import org.apache.lucene.index.IndexReader.AtomicReaderContext;
 import org.apache.lucene.search.IndexSearcher;
 import org.apache.lucene.spatial.geohash.GeoHashUtils;

Modified: lucene/dev/branches/LUCENE2793/solr/src/java/org/apache/solr/search/function/distance/HaversineConstFunction.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/LUCENE2793/solr/src/java/org/apache/solr/search/function/distance/HaversineConstFunction.java?rev=1143719&r1=1143718&r2=1143719&view=diff
==============================================================================
--- lucene/dev/branches/LUCENE2793/solr/src/java/org/apache/solr/search/function/distance/HaversineConstFunction.java (original)
+++ lucene/dev/branches/LUCENE2793/solr/src/java/org/apache/solr/search/function/distance/HaversineConstFunction.java Thu Jul  7 09:03:58 2011
@@ -17,6 +17,13 @@ package org.apache.solr.search.function.
  */
 
 import org.apache.lucene.index.IndexReader.AtomicReaderContext;
+import org.apache.lucene.queries.function.DocValues;
+import org.apache.lucene.queries.function.ValueSource;
+import org.apache.lucene.queries.function.docvalues.DoubleDocValues;
+import org.apache.lucene.queries.function.valuesource.ConstNumberSource;
+import org.apache.lucene.queries.function.valuesource.DoubleConstValueSource;
+import org.apache.lucene.queries.function.valuesource.MultiValueSource;
+import org.apache.lucene.queries.function.valuesource.VectorValueSource;
 import org.apache.lucene.queryParser.ParseException;
 import org.apache.lucene.search.IndexSearcher;
 import org.apache.lucene.spatial.DistanceUtils;

Modified: lucene/dev/branches/LUCENE2793/solr/src/java/org/apache/solr/search/function/distance/HaversineFunction.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/LUCENE2793/solr/src/java/org/apache/solr/search/function/distance/HaversineFunction.java?rev=1143719&r1=1143718&r2=1143719&view=diff
==============================================================================
--- lucene/dev/branches/LUCENE2793/solr/src/java/org/apache/solr/search/function/distance/HaversineFunction.java (original)
+++ lucene/dev/branches/LUCENE2793/solr/src/java/org/apache/solr/search/function/distance/HaversineFunction.java Thu Jul  7 09:03:58 2011
@@ -17,13 +17,13 @@ package org.apache.solr.search.function.
  */
 
 import org.apache.lucene.index.IndexReader.AtomicReaderContext;
+import org.apache.lucene.queries.function.DocValues;
+import org.apache.lucene.queries.function.ValueSource;
+import org.apache.lucene.queries.function.docvalues.DoubleDocValues;
+import org.apache.lucene.queries.function.valuesource.MultiValueSource;
 import org.apache.lucene.search.IndexSearcher;
 import org.apache.lucene.spatial.DistanceUtils;
 import org.apache.solr.common.SolrException;
-import org.apache.solr.search.function.DoubleDocValues;
-import org.apache.solr.search.function.MultiValueSource;
-import org.apache.solr.search.function.DocValues;
-import org.apache.solr.search.function.ValueSource;
 
 import java.io.IOException;
 import java.util.Map;

Modified: lucene/dev/branches/LUCENE2793/solr/src/java/org/apache/solr/search/function/distance/SquaredEuclideanFunction.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/LUCENE2793/solr/src/java/org/apache/solr/search/function/distance/SquaredEuclideanFunction.java?rev=1143719&r1=1143718&r2=1143719&view=diff
==============================================================================
--- lucene/dev/branches/LUCENE2793/solr/src/java/org/apache/solr/search/function/distance/SquaredEuclideanFunction.java (original)
+++ lucene/dev/branches/LUCENE2793/solr/src/java/org/apache/solr/search/function/distance/SquaredEuclideanFunction.java Thu Jul  7 09:03:58 2011
@@ -16,9 +16,9 @@ package org.apache.solr.search.function.
  * limitations under the License.
  */
 
+import org.apache.lucene.queries.function.DocValues;
+import org.apache.lucene.queries.function.valuesource.MultiValueSource;
 import org.apache.lucene.spatial.DistanceUtils;
-import org.apache.solr.search.function.DocValues;
-import org.apache.solr.search.function.MultiValueSource;
 
 
 /**

Modified: lucene/dev/branches/LUCENE2793/solr/src/java/org/apache/solr/search/function/distance/StringDistanceFunction.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/LUCENE2793/solr/src/java/org/apache/solr/search/function/distance/StringDistanceFunction.java?rev=1143719&r1=1143718&r2=1143719&view=diff
==============================================================================
--- lucene/dev/branches/LUCENE2793/solr/src/java/org/apache/solr/search/function/distance/StringDistanceFunction.java (original)
+++ lucene/dev/branches/LUCENE2793/solr/src/java/org/apache/solr/search/function/distance/StringDistanceFunction.java Thu Jul  7 09:03:58 2011
@@ -18,10 +18,10 @@ package org.apache.solr.search.function.
  */
 
 import org.apache.lucene.index.IndexReader.AtomicReaderContext;
+import org.apache.lucene.queries.function.DocValues;
+import org.apache.lucene.queries.function.ValueSource;
+import org.apache.lucene.queries.function.docvalues.FloatDocValues;
 import org.apache.lucene.search.spell.StringDistance;
-import org.apache.solr.search.function.DocValues;
-import org.apache.solr.search.function.FloatDocValues;
-import org.apache.solr.search.function.ValueSource;
 
 import java.io.IOException;
 import java.util.Map;

Modified: lucene/dev/branches/LUCENE2793/solr/src/java/org/apache/solr/search/function/distance/VectorDistanceFunction.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/LUCENE2793/solr/src/java/org/apache/solr/search/function/distance/VectorDistanceFunction.java?rev=1143719&r1=1143718&r2=1143719&view=diff
==============================================================================
--- lucene/dev/branches/LUCENE2793/solr/src/java/org/apache/solr/search/function/distance/VectorDistanceFunction.java (original)
+++ lucene/dev/branches/LUCENE2793/solr/src/java/org/apache/solr/search/function/distance/VectorDistanceFunction.java Thu Jul  7 09:03:58 2011
@@ -17,13 +17,13 @@ package org.apache.solr.search.function.
  */
 
 import org.apache.lucene.index.IndexReader.AtomicReaderContext;
+import org.apache.lucene.queries.function.DocValues;
+import org.apache.lucene.queries.function.ValueSource;
+import org.apache.lucene.queries.function.docvalues.DoubleDocValues;
+import org.apache.lucene.queries.function.valuesource.MultiValueSource;
 import org.apache.lucene.search.IndexSearcher;
 import org.apache.lucene.spatial.DistanceUtils;
 import org.apache.solr.common.SolrException;
-import org.apache.solr.search.function.DocValues;
-import org.apache.solr.search.function.DoubleDocValues;
-import org.apache.solr.search.function.MultiValueSource;
-import org.apache.solr.search.function.ValueSource;
 
 import java.io.IOException;
 import java.util.Map;

Modified: lucene/dev/branches/LUCENE2793/solr/src/java/org/apache/solr/spelling/AbstractLuceneSpellChecker.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/LUCENE2793/solr/src/java/org/apache/solr/spelling/AbstractLuceneSpellChecker.java?rev=1143719&r1=1143718&r2=1143719&view=diff
==============================================================================
--- lucene/dev/branches/LUCENE2793/solr/src/java/org/apache/solr/spelling/AbstractLuceneSpellChecker.java (original)
+++ lucene/dev/branches/LUCENE2793/solr/src/java/org/apache/solr/spelling/AbstractLuceneSpellChecker.java Thu Jul  7 09:03:58 2011
@@ -181,13 +181,13 @@ public abstract class AbstractLuceneSpel
       }
 
       if (options.extendedResults == true && reader != null && field != null) {
-        term = term.createTerm(tokenText);
+        term = new Term(field, tokenText);
         result.addFrequency(token, reader.docFreq(term));
         int countLimit = Math.min(options.count, suggestions.length);
         if(countLimit>0)
         {
 	        for (int i = 0; i < countLimit; i++) {
-	          term = term.createTerm(suggestions[i]);
+	          term = new Term(field, suggestions[i]);
 	          result.add(token, suggestions[i], reader.docFreq(term));
 	        }
         } else if(shardRequest) {

Modified: lucene/dev/branches/LUCENE2793/solr/src/java/org/apache/solr/update/CommitUpdateCommand.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/LUCENE2793/solr/src/java/org/apache/solr/update/CommitUpdateCommand.java?rev=1143719&r1=1143718&r2=1143719&view=diff
==============================================================================
--- lucene/dev/branches/LUCENE2793/solr/src/java/org/apache/solr/update/CommitUpdateCommand.java (original)
+++ lucene/dev/branches/LUCENE2793/solr/src/java/org/apache/solr/update/CommitUpdateCommand.java Thu Jul  7 09:03:58 2011
@@ -24,9 +24,9 @@ import org.apache.solr.request.SolrQuery
  */
 public class CommitUpdateCommand extends UpdateCommand {
   public boolean optimize;
-  public boolean waitFlush;
   public boolean waitSearcher=true;
   public boolean expungeDeletes = false;
+  public boolean softCommit = false;
 
   /**
    * During optimize, optimize down to <= this many segments.  Must be >= 1
@@ -42,9 +42,9 @@ public class CommitUpdateCommand extends
   @Override
   public String toString() {
     return "commit(optimize="+optimize
-            +",waitFlush="+waitFlush
             +",waitSearcher="+waitSearcher
             +",expungeDeletes="+expungeDeletes
+            +",softCommit="+softCommit
             +')';
   }
 }

Modified: lucene/dev/branches/LUCENE2793/solr/src/java/org/apache/solr/update/DirectUpdateHandler2.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/LUCENE2793/solr/src/java/org/apache/solr/update/DirectUpdateHandler2.java?rev=1143719&r1=1143718&r2=1143719&view=diff
==============================================================================
--- lucene/dev/branches/LUCENE2793/solr/src/java/org/apache/solr/update/DirectUpdateHandler2.java (original)
+++ lucene/dev/branches/LUCENE2793/solr/src/java/org/apache/solr/update/DirectUpdateHandler2.java Thu Jul  7 09:03:58 2011
@@ -32,35 +32,29 @@ import org.apache.lucene.search.TermQuer
 import org.apache.lucene.search.BooleanClause.Occur;
 import org.apache.lucene.store.Directory;
 
-import java.util.concurrent.Executors;
 import java.util.concurrent.Future;
 import java.util.concurrent.ExecutionException;
-import java.util.concurrent.ScheduledExecutorService;
-import java.util.concurrent.ScheduledFuture;
-import java.util.concurrent.TimeUnit;
-import java.util.concurrent.locks.Lock;
-import java.util.concurrent.locks.ReadWriteLock;
-import java.util.concurrent.locks.ReentrantReadWriteLock;
 import java.util.concurrent.atomic.AtomicLong;
 import java.io.IOException;
 import java.net.URL;
 
-import org.apache.solr.common.params.ModifiableSolrParams;
-import org.apache.solr.common.params.SolrParams;
-import org.apache.solr.request.LocalSolrQueryRequest;
-import org.apache.solr.request.SolrQueryRequest;
 import org.apache.solr.search.QParser;
-import org.apache.solr.search.QueryParsing;
 import org.apache.solr.common.SolrException;
+import org.apache.solr.common.SolrException.ErrorCode;
 import org.apache.solr.common.util.NamedList;
 import org.apache.solr.common.util.SimpleOrderedMap;
 import org.apache.solr.core.SolrCore;
+import org.apache.solr.core.SolrConfig.UpdateHandlerInfo;
+import org.apache.solr.search.SolrIndexSearcher;
 
 /**
+ *  TODO: add soft commitWithin support
+ * 
  * <code>DirectUpdateHandler2</code> implements an UpdateHandler where documents are added
  * directly to the main Lucene index as opposed to adding to a separate smaller index.
  */
 public class DirectUpdateHandler2 extends UpdateHandler {
+  protected IndexWriterProvider indexWriterProvider;
 
   // stats
   AtomicLong addCommands = new AtomicLong();
@@ -79,66 +73,61 @@ public class DirectUpdateHandler2 extend
   AtomicLong numErrorsCumulative = new AtomicLong();
 
   // tracks when auto-commit should occur
-  protected final CommitTracker tracker;
-
-  // iwCommit protects internal data and open/close of the IndexWriter and
-  // is a mutex. Any use of the index writer should be protected by iwAccess, 
-  // which admits multiple simultaneous acquisitions.  iwAccess is 
-  // mutually-exclusive with the iwCommit lock.
-  protected final Lock iwAccess, iwCommit;
-
-  protected IndexWriter writer;
+  protected final CommitTracker commitTracker;
+  protected final CommitTracker softCommitTracker;
 
   public DirectUpdateHandler2(SolrCore core) throws IOException {
     super(core);
-
-    // Pass fairness=true so commit request is not starved
-    // when add/updates are running hot (SOLR-2342):
-    ReadWriteLock rwl = new ReentrantReadWriteLock(true);
-    iwAccess = rwl.readLock();
-    iwCommit = rwl.writeLock();
-
-    tracker = new CommitTracker();
+   
+    indexWriterProvider = new DefaultIndexWriterProvider(core);
+    
+    UpdateHandlerInfo updateHandlerInfo = core.getSolrConfig()
+        .getUpdateHandlerInfo();
+    int docsUpperBound = updateHandlerInfo.autoCommmitMaxDocs; // getInt("updateHandler/autoCommit/maxDocs", -1);
+    int timeUpperBound = updateHandlerInfo.autoCommmitMaxTime; // getInt("updateHandler/autoCommit/maxTime", -1);
+    commitTracker = new CommitTracker(core, docsUpperBound, timeUpperBound, true, false);
+    
+    int softCommitDocsUpperBound = updateHandlerInfo.autoSoftCommmitMaxDocs; // getInt("updateHandler/autoSoftCommit/maxDocs", -1);
+    int softCommitTimeUpperBound = updateHandlerInfo.autoSoftCommmitMaxTime; // getInt("updateHandler/autoSoftCommit/maxTime", -1);
+    softCommitTracker = new CommitTracker(core, softCommitDocsUpperBound, softCommitTimeUpperBound, true, true);
   }
-
-  // must only be called when iwCommit lock held
-  private void deleteAll() throws IOException {
-    core.log.info(core.getLogId()+"REMOVING ALL DOCUMENTS FROM INDEX");
-    closeWriter();
-    writer = createMainIndexWriter("DirectUpdateHandler2", true);
-  }
-
-  // must only be called when iwCommit lock held
-  protected void openWriter() throws IOException {
-    if (writer==null) {
-      writer = createMainIndexWriter("DirectUpdateHandler2", false);
-    }
+  
+  public DirectUpdateHandler2(SolrCore core, UpdateHandler updateHandler) throws IOException {
+    super(core);
+    if (updateHandler instanceof DirectUpdateHandler2) {
+      this.indexWriterProvider = ((DirectUpdateHandler2)updateHandler).indexWriterProvider;
+    } else {
+      // the impl has changed, so we cannot use the old state - decref it
+      updateHandler.decref();
+      indexWriterProvider = new DefaultIndexWriterProvider(core);
+    }
+    
+    UpdateHandlerInfo updateHandlerInfo = core.getSolrConfig()
+        .getUpdateHandlerInfo();
+    int docsUpperBound = updateHandlerInfo.autoCommmitMaxDocs; // getInt("updateHandler/autoCommit/maxDocs", -1);
+    int timeUpperBound = updateHandlerInfo.autoCommmitMaxTime; // getInt("updateHandler/autoCommit/maxTime", -1);
+    commitTracker = new CommitTracker(core, docsUpperBound, timeUpperBound, true, false);
+    
+    int softCommitDocsUpperBound = updateHandlerInfo.autoSoftCommmitMaxDocs; // getInt("updateHandler/autoSoftCommit/maxDocs", -1);
+    int softCommitTimeUpperBound = updateHandlerInfo.autoSoftCommmitMaxTime; // getInt("updateHandler/autoSoftCommit/maxTime", -1);
+    softCommitTracker = new CommitTracker(core, softCommitDocsUpperBound, softCommitTimeUpperBound, true, true);
+    
   }
 
-  // must only be called when iwCommit lock held
-  protected void closeWriter() throws IOException {
-    try {
-      numDocsPending.set(0);
-      if (writer!=null) writer.close();
-    } finally {
-      // if an exception causes the writelock to not be
-      // released, we could try and delete it here
-      writer=null;
-    }
+  private void deleteAll() throws IOException {
+    SolrCore.log.info(core.getLogId()+"REMOVING ALL DOCUMENTS FROM INDEX");
+    indexWriterProvider.getIndexWriter().deleteAll();
   }
 
-  // must only be called when iwCommit lock held
   protected void rollbackWriter() throws IOException {
-    try {
-      numDocsPending.set(0);
-      if (writer!=null) writer.rollback();
-    } finally {
-      writer = null;
-    }
+    numDocsPending.set(0);
+    indexWriterProvider.rollbackIndexWriter();
+    
   }
 
   @Override
   public int addDoc(AddUpdateCommand cmd) throws IOException {
+    IndexWriter writer = indexWriterProvider.getIndexWriter();
     addCommands.incrementAndGet();
     addCommandsCumulative.incrementAndGet();
     int rc=-1;
@@ -148,19 +137,18 @@ public class DirectUpdateHandler2 extend
       cmd.overwrite = false;
     }
 
-    iwAccess.lock();
-    try {
-
-      // We can't use iwCommit to protect internal data here, since it would
-      // block other addDoc calls.  Hence, we synchronize to protect internal
-      // state.  This is safe as all other state-changing operations are
-      // protected with iwCommit (which iwAccess excludes from this block).
-      synchronized (this) {
-        // adding document -- prep writer
-        openWriter();
-        tracker.addedDocument( cmd.commitWithin );
-      } // end synchronized block
 
+    try {
+      boolean triggered = commitTracker.addedDocument( cmd.commitWithin );
+    
+      if (!triggered) {
+        // if we hard commit, don't soft commit
+        softCommitTracker.addedDocument( cmd.commitWithin );
+      } else {
+        // still inc softCommit
+        softCommitTracker.docsSinceCommit++;
+      }
+      
       // this is the only unsynchronized code in the iwAccess block, which
       // should account for most of the time
 			Term updateTerm = null;
@@ -169,13 +157,13 @@ public class DirectUpdateHandler2 extend
         if (cmd.indexedId == null) {
           cmd.indexedId = getIndexedId(cmd.doc);
         }
-        Term idTerm = this.idTerm.createTerm(cmd.indexedId);
+        Term idTerm = new Term(idField.getName(), cmd.indexedId);
         boolean del = false;
         if (cmd.updateTerm == null) {
           updateTerm = idTerm;
         } else {
           del = true;
-        	updateTerm = cmd.updateTerm;
+          updateTerm = cmd.updateTerm;
         }
 
         writer.updateDocument(updateTerm, cmd.getLuceneDocument(schema));
@@ -192,7 +180,6 @@ public class DirectUpdateHandler2 extend
 
       rc = 1;
     } finally {
-      iwAccess.unlock();
       if (rc!=1) {
         numErrors.incrementAndGet();
         numErrorsCumulative.incrementAndGet();
@@ -211,16 +198,12 @@ public class DirectUpdateHandler2 extend
     deleteByIdCommands.incrementAndGet();
     deleteByIdCommandsCumulative.incrementAndGet();
 
-    iwCommit.lock();
-    try {
-      openWriter();
-      writer.deleteDocuments(idTerm.createTerm(idFieldType.toInternal(cmd.id)));
-    } finally {
-      iwCommit.unlock();
-    }
+    indexWriterProvider.getIndexWriter().deleteDocuments(new Term(idField.getName(), idFieldType.toInternal(cmd.id)));
 
-    if( tracker.timeUpperBound > 0 ) {
-      tracker.scheduleCommitWithin( tracker.timeUpperBound );
+    if (commitTracker.timeUpperBound > 0) {
+      commitTracker.scheduleCommitWithin(commitTracker.timeUpperBound);
+    } else if (softCommitTracker.timeUpperBound > 0) {
+      softCommitTracker.scheduleCommitWithin(softCommitTracker.timeUpperBound);
     }
   }
 
@@ -230,7 +213,6 @@ public class DirectUpdateHandler2 extend
   public void deleteByQuery(DeleteUpdateCommand cmd) throws IOException {
     deleteByQueryCommands.incrementAndGet();
     deleteByQueryCommandsCumulative.incrementAndGet();
-
     boolean madeIt=false;
     boolean delAll=false;
     try {
@@ -241,26 +223,23 @@ public class DirectUpdateHandler2 extend
       } catch (ParseException e) {
         throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, e);
       }
-
+      
       delAll = MatchAllDocsQuery.class == q.getClass();
-
-      iwCommit.lock();
-      try {
-        if (delAll) {
-          deleteAll();
-        } else {
-          openWriter();
-          writer.deleteDocuments(q);
-        }
-      } finally {
-        iwCommit.unlock();
+      
+      if (delAll) {
+        deleteAll();
+      } else {
+        indexWriterProvider.getIndexWriter().deleteDocuments(q);
       }
-
-      madeIt=true;
-
-      if( tracker.timeUpperBound > 0 ) {
-        tracker.scheduleCommitWithin( tracker.timeUpperBound );
+      
+      madeIt = true;
+      
+      if (commitTracker.timeUpperBound > 0) {
+        commitTracker.scheduleCommitWithin(commitTracker.timeUpperBound);
+      } else if (softCommitTracker.timeUpperBound > 0) {
+        softCommitTracker.scheduleCommitWithin(softCommitTracker.timeUpperBound);
       }
+      
     } finally {
       if (!madeIt) {
         numErrors.incrementAndGet();
@@ -274,42 +253,30 @@ public class DirectUpdateHandler2 extend
     mergeIndexesCommands.incrementAndGet();
     int rc = -1;
 
-    iwCommit.lock();
-    try {
-      log.info("start " + cmd);
-
-      IndexReader[] readers = cmd.readers;
-      if (readers != null && readers.length > 0) {
-        openWriter();
-        writer.addIndexes(readers);
-        rc = 1;
-      } else {
-        rc = 0;
-      }
-      log.info("end_mergeIndexes");
-    } finally {
-      iwCommit.unlock();
+    log.info("start " + cmd);
+    
+    IndexReader[] readers = cmd.readers;
+    if (readers != null && readers.length > 0) {
+      indexWriterProvider.getIndexWriter().addIndexes(readers);
+      rc = 1;
+    } else {
+      rc = 0;
     }
+    log.info("end_mergeIndexes");
 
-    if (rc == 1 && tracker.timeUpperBound > 0) {
-      tracker.scheduleCommitWithin(tracker.timeUpperBound);
+    // TODO: consider soft commit issues
+    if (rc == 1 && commitTracker.timeUpperBound > 0) {
+      commitTracker.scheduleCommitWithin(commitTracker.timeUpperBound);
+    } else if (rc == 1 && softCommitTracker.timeUpperBound > 0) {
+      softCommitTracker.scheduleCommitWithin(softCommitTracker.timeUpperBound);
     }
 
     return rc;
   }
 
-   public void forceOpenWriter() throws IOException  {
-    iwCommit.lock();
-    try {
-      openWriter();
-    } finally {
-      iwCommit.unlock();
-    }
-  }
-
   @Override
   public void commit(CommitUpdateCommand cmd) throws IOException {
-
+    IndexWriter writer = indexWriterProvider.getIndexWriter();
     if (cmd.optimize) {
       optimizeCommands.incrementAndGet();
     } else {
@@ -323,38 +290,50 @@ public class DirectUpdateHandler2 extend
     }
 
     boolean error=true;
-    iwCommit.lock();
     try {
       log.info("start "+cmd);
 
       if (cmd.optimize) {
-        openWriter();
         writer.optimize(cmd.maxOptimizeSegments);
       } else if (cmd.expungeDeletes) {
-        openWriter();
         writer.expungeDeletes();
       }
       
-      closeWriter();
+      if (!cmd.softCommit) {
+        writer.commit();
+        
+        callPostCommitCallbacks();
+      } else {
+        callPostSoftCommitCallbacks();
+      }
+
 
-      callPostCommitCallbacks();
       if (cmd.optimize) {
         callPostOptimizeCallbacks();
       }
+      
       // open a new searcher in the sync block to avoid opening it
       // after a deleteByQuery changed the index, or in between deletes
       // and adds of another commit being done.
-      core.getSearcher(true,false,waitSearcher);
+      if (cmd.softCommit) {
+        core.getSearcher(true,false,waitSearcher, true);
+      } else {
+        core.getSearcher(true,false,waitSearcher);
+      }
 
       // reset commit tracking
-      tracker.didCommit();
 
+      if (cmd.softCommit) {
+        softCommitTracker.didCommit();
+      } else {
+        commitTracker.didCommit();
+      }
+      
       log.info("end_commit_flush");
 
       error=false;
     }
     finally {
-      iwCommit.unlock();
       addCommands.set(0);
       deleteByIdCommands.set(0);
       deleteByQueryCommands.set(0);
@@ -374,16 +353,36 @@ public class DirectUpdateHandler2 extend
     }
   }
 
+  @Override
+  public SolrIndexSearcher reopenSearcher(SolrIndexSearcher previousSearcher) throws IOException {
+    
+    IndexReader currentReader = previousSearcher.getIndexReader();
+    IndexReader newReader;
+
+    newReader = currentReader.reopen(indexWriterProvider.getIndexWriter(), true);
+  
+    
+    if (newReader == currentReader) {
+      currentReader.incRef();
+    }
+    
+    return new SolrIndexSearcher(core, schema, "main", newReader, true, true);
+  }
+  
+  @Override
+  public void newIndexWriter() throws IOException {
+    indexWriterProvider.newIndexWriter();
+  }
+  
   /**
    * @since Solr 1.4
    */
   @Override
   public void rollback(RollbackUpdateCommand cmd) throws IOException {
-
     rollbackCommands.incrementAndGet();
 
     boolean error=true;
-    iwCommit.lock();
+
     try {
       log.info("start "+cmd);
 
@@ -392,14 +391,14 @@ public class DirectUpdateHandler2 extend
       //callPostRollbackCallbacks();
 
       // reset commit tracking
-      tracker.didRollback();
-
+      commitTracker.didRollback();
+      softCommitTracker.didRollback();
+      
       log.info("end_rollback");
 
       error=false;
     }
     finally {
-      iwCommit.unlock();
       addCommandsCumulative.set(
           addCommandsCumulative.get() - addCommands.getAndSet( 0 ) );
       deleteByIdCommandsCumulative.set(
@@ -414,162 +413,16 @@ public class DirectUpdateHandler2 extend
   @Override
   public void close() throws IOException {
     log.info("closing " + this);
-    iwCommit.lock();
-    try{
-      // cancel any pending operations
-      if( tracker.pending != null ) {
-        tracker.pending.cancel( true );
-        tracker.pending = null;
-      }
-      tracker.scheduler.shutdown();
-      closeWriter();
-    } finally {
-      iwCommit.unlock();
-    }
+    
+    commitTracker.close();
+    softCommitTracker.close();
+
+    numDocsPending.set(0);
+    indexWriterProvider.decref();
+    
     log.info("closed " + this);
   }
 
-  /** Helper class for tracking autoCommit state.
-   *
-   * Note: This is purely an implementation detail of autoCommit and will
-   * definitely change in the future, so the interface should not be
-   * relied-upon
-   *
-   * Note: all access must be synchronized.
-   */
-  class CommitTracker implements Runnable
-  {
-    // scheduler delay for maxDoc-triggered autocommits
-    public final int DOC_COMMIT_DELAY_MS = 250;
-
-    // settings, not final so we can change them in testing
-    int docsUpperBound;
-    long timeUpperBound;
-
-    private final ScheduledExecutorService scheduler =
-       Executors.newScheduledThreadPool(1);
-    private ScheduledFuture pending;
-
-    // state
-    long docsSinceCommit;
-    int autoCommitCount = 0;
-    long lastAddedTime = -1;
-
-    public CommitTracker() {
-      docsSinceCommit = 0;
-      pending = null;
-
-      docsUpperBound = core.getSolrConfig().getUpdateHandlerInfo().autoCommmitMaxDocs;   //getInt("updateHandler/autoCommit/maxDocs", -1);
-      timeUpperBound = core.getSolrConfig().getUpdateHandlerInfo().autoCommmitMaxTime;    //getInt("updateHandler/autoCommit/maxTime", -1);
-
-      SolrCore.log.info("AutoCommit: " + this);
-    }
-
-    /** schedule individual commits */
-    public synchronized void scheduleCommitWithin(long commitMaxTime)
-    {
-      _scheduleCommitWithin( commitMaxTime );
-    }
-
-    private void _scheduleCommitWithin(long commitMaxTime)
-    {
-      // Check if there is a commit already scheduled for longer then this time
-      if( pending != null &&
-          pending.getDelay(TimeUnit.MILLISECONDS) >= commitMaxTime )
-      {
-        pending.cancel(false);
-        pending = null;
-      }
-
-      // schedule a new commit
-      if( pending == null ) {
-        pending = scheduler.schedule( this, commitMaxTime, TimeUnit.MILLISECONDS );
-      }
-    }
-
-    /** Indicate that documents have been added
-     */
-    public void addedDocument( int commitWithin ) {
-      docsSinceCommit++;
-      lastAddedTime = System.currentTimeMillis();
-      // maxDocs-triggered autoCommit
-      if( docsUpperBound > 0 && (docsSinceCommit > docsUpperBound) ) {
-        _scheduleCommitWithin( DOC_COMMIT_DELAY_MS );
-      }
-
-      // maxTime-triggered autoCommit
-      long ctime = (commitWithin>0) ? commitWithin : timeUpperBound;
-      if( ctime > 0 ) {
-        _scheduleCommitWithin( ctime );
-      }
-    }
-
-    /** Inform tracker that a commit has occurred, cancel any pending commits */
-    public void didCommit() {
-      if( pending != null ) {
-        pending.cancel(false);
-        pending = null; // let it start another one
-      }
-      docsSinceCommit = 0;
-    }
-
-    /** Inform tracker that a rollback has occurred, cancel any pending commits */
-    public void didRollback() {
-      if( pending != null ) {
-        pending.cancel(false);
-        pending = null; // let it start another one
-      }
-      docsSinceCommit = 0;
-    }
-
-    /** This is the worker part for the ScheduledFuture **/
-    public synchronized void run() {
-      long started = System.currentTimeMillis();
-      SolrQueryRequest req = new LocalSolrQueryRequest(core, new ModifiableSolrParams());
-      try {
-        CommitUpdateCommand command = new CommitUpdateCommand(req, false );
-        command.waitFlush = true;
-        command.waitSearcher = true;
-        //no need for command.maxOptimizeSegments = 1;  since it is not optimizing
-        commit( command );
-        autoCommitCount++;
-      }
-      catch (Exception e) {
-        log.error( "auto commit error..." );
-        e.printStackTrace();
-      }
-      finally {
-        pending = null;
-        req.close();
-      }
-
-      // check if docs have been submitted since the commit started
-      if( lastAddedTime > started ) {
-        if( docsUpperBound > 0 && docsSinceCommit > docsUpperBound ) {
-          pending = scheduler.schedule( this, 100, TimeUnit.MILLISECONDS );
-        }
-        else if( timeUpperBound > 0 ) {
-          pending = scheduler.schedule( this, timeUpperBound, TimeUnit.MILLISECONDS );
-        }
-      }
-    }
-
-    // to facilitate testing: blocks if called during commit
-    public synchronized int getCommitCount() { return autoCommitCount; }
-
-    @Override
-    public String toString() {
-      if(timeUpperBound > 0 || docsUpperBound > 0) {
-        return
-          (timeUpperBound > 0 ? ("if uncommited for " + timeUpperBound + "ms; ") : "") +
-          (docsUpperBound > 0 ? ("if " + docsUpperBound + " uncommited docs ") : "");
-
-      } else {
-        return "disabled";
-      }
-    }
-  }
-
 
   /////////////////////////////////////////////////////////////////////
   // SolrInfoMBean stuff: Statistics and Module Info
@@ -606,13 +459,20 @@ public class DirectUpdateHandler2 extend
   public NamedList getStatistics() {
     NamedList lst = new SimpleOrderedMap();
     lst.add("commits", commitCommands.get());
-    if (tracker.docsUpperBound > 0) {
-      lst.add("autocommit maxDocs", tracker.docsUpperBound);
+    if (commitTracker.docsUpperBound > 0) {
+      lst.add("autocommit maxDocs", commitTracker.docsUpperBound);
     }
-    if (tracker.timeUpperBound > 0) {
-      lst.add("autocommit maxTime", "" + tracker.timeUpperBound + "ms");
+    if (commitTracker.timeUpperBound > 0) {
+      lst.add("autocommit maxTime", "" + commitTracker.timeUpperBound + "ms");
     }
-    lst.add("autocommits", tracker.autoCommitCount);
+    lst.add("autocommits", commitTracker.autoCommitCount);
+    if (softCommitTracker.docsUpperBound > 0) {
+      lst.add("soft autocommit maxDocs", softCommitTracker.docsUpperBound);
+    }
+    if (softCommitTracker.timeUpperBound > 0) {
+      lst.add("soft autocommit maxTime", "" + softCommitTracker.timeUpperBound + "ms");
+    }
+    lst.add("soft autocommits", softCommitTracker.autoCommitCount);
     lst.add("optimizes", optimizeCommands.get());
     lst.add("rollbacks", rollbackCommands.get());
     lst.add("expungeDeletes", expungeDeleteCommands.get());
@@ -634,4 +494,22 @@ public class DirectUpdateHandler2 extend
   public String toString() {
     return "DirectUpdateHandler2" + getStatistics();
   }
+  
+  public IndexWriterProvider getIndexWriterProvider() {
+    return indexWriterProvider;
+  }
+
+  @Override
+  public void decref() {
+    try {
+      indexWriterProvider.decref();
+    } catch (IOException e) {
+      throw new SolrException(ErrorCode.SERVER_ERROR, "", e, false);
+    }
+  }
+
+  @Override
+  public void incref() {
+    indexWriterProvider.incref();
+  }
 }

Modified: lucene/dev/branches/LUCENE2793/solr/src/java/org/apache/solr/update/SolrIndexWriter.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/LUCENE2793/solr/src/java/org/apache/solr/update/SolrIndexWriter.java?rev=1143719&r1=1143718&r2=1143719&view=diff
==============================================================================
--- lucene/dev/branches/LUCENE2793/solr/src/java/org/apache/solr/update/SolrIndexWriter.java (original)
+++ lucene/dev/branches/LUCENE2793/solr/src/java/org/apache/solr/update/SolrIndexWriter.java Thu Jul  7 09:03:58 2011
@@ -35,6 +35,7 @@ import java.io.PrintStream;
 import java.text.DateFormat;
 import java.util.Date;
 import java.util.Locale;
+import java.util.concurrent.atomic.AtomicLong;
 
 /**
  * An IndexWriter that is configured via Solr config mechanisms.
@@ -44,6 +45,9 @@ import java.util.Locale;
 
 public class SolrIndexWriter extends IndexWriter {
   private static Logger log = LoggerFactory.getLogger(SolrIndexWriter.class);
+  // These should *only* be used for debugging or monitoring purposes
+  public static final AtomicLong numOpens = new AtomicLong();
+  public static final AtomicLong numCloses = new AtomicLong();
 
   String name;
   private PrintStream infoStream;
@@ -90,6 +94,7 @@ public class SolrIndexWriter extends Ind
     this.name = name;
 
     setInfoStream(config);
+    numOpens.incrementAndGet();
   }
 
   private void setInfoStream(SolrIndexConfig config)
@@ -147,6 +152,7 @@ public class SolrIndexWriter extends Ind
       }
     } finally {
       isClosed = true;
+      numCloses.incrementAndGet();
     }
   }
 
@@ -163,6 +169,7 @@ public class SolrIndexWriter extends Ind
   protected void finalize() throws Throwable {
     try {
       if(!isClosed){
+        assert false : "SolrIndexWriter was not closed prior to finalize()";
         log.error("SolrIndexWriter was not closed prior to finalize(), indicates a bug -- POSSIBLE RESOURCE LEAK!!!");
         close();
       }

Modified: lucene/dev/branches/LUCENE2793/solr/src/java/org/apache/solr/update/UpdateHandler.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/LUCENE2793/solr/src/java/org/apache/solr/update/UpdateHandler.java?rev=1143719&r1=1143718&r2=1143719&view=diff
==============================================================================
--- lucene/dev/branches/LUCENE2793/solr/src/java/org/apache/solr/update/UpdateHandler.java (original)
+++ lucene/dev/branches/LUCENE2793/solr/src/java/org/apache/solr/update/UpdateHandler.java Thu Jul  7 09:03:58 2011
@@ -54,11 +54,21 @@ public abstract class UpdateHandler impl
 
   protected final SchemaField idField;
   protected final FieldType idFieldType;
-  protected final Term idTerm; // prototype term to avoid interning fieldname
 
   protected Vector<SolrEventListener> commitCallbacks = new Vector<SolrEventListener>();
+  protected Vector<SolrEventListener> softCommitCallbacks = new Vector<SolrEventListener>();
   protected Vector<SolrEventListener> optimizeCallbacks = new Vector<SolrEventListener>();
 
+  /**
+   * Called when a SolrCore using this UpdateHandler is closed.
+   */
+  public abstract void decref();
+  
+  /**
+   * Called when this UpdateHandler is shared with another SolrCore.
+   */
+  public abstract void incref();
+
   private void parseEventListeners() {
     final Class<SolrEventListener> clazz = SolrEventListener.class;
     final String label = "Event Listener";
@@ -82,6 +92,12 @@ public abstract class UpdateHandler impl
     }
   }
 
+  protected void callPostSoftCommitCallbacks() {
+    for (SolrEventListener listener : softCommitCallbacks) {
+      listener.postSoftCommit();
+    }
+  }  
+  
   protected void callPostOptimizeCallbacks() {
     for (SolrEventListener listener : optimizeCallbacks) {
       listener.postCommit();
@@ -93,14 +109,9 @@ public abstract class UpdateHandler impl
     schema = core.getSchema();
     idField = schema.getUniqueKeyField();
     idFieldType = idField!=null ? idField.getType() : null;
-    idTerm = idField!=null ? new Term(idField.getName(),"") : null;
     parseEventListeners();
   }
 
-  protected SolrIndexWriter createMainIndexWriter(String name, boolean removeAllExisting) throws IOException {
-    return new SolrIndexWriter(name,core.getNewIndexDir(), core.getDirectoryFactory(), removeAllExisting, schema, core.getSolrConfig().mainIndexConfig, core.getDeletionPolicy(), core.getCodecProvider());
-  }
-
   protected final Term idTerm(String readableId) {
     // to correctly create the Term, the string needs to be run
     // through the Analyzer for that field.
@@ -128,6 +139,23 @@ public abstract class UpdateHandler impl
     if (f == null) return null;
     return idFieldType.storedToIndexed(f);
   }
+  
+  /**
+   * Allows the UpdateHandler to create the SolrIndexSearcher after it
+   * has issued a 'softCommit'. 
+   * 
+   * @param previousSearcher
+   * @throws IOException
+   */
+  public abstract SolrIndexSearcher reopenSearcher(SolrIndexSearcher previousSearcher) throws IOException;
+  
+  /**
+   * Called when the Writer should be opened again - eg when replication replaces
+   * all of the index files.
+   * 
+   * @throws IOException
+   */
+  public abstract void newIndexWriter() throws IOException;
 
 
   public abstract int addDoc(AddUpdateCommand cmd) throws IOException;
@@ -188,6 +216,18 @@ public abstract class UpdateHandler impl
   {
     commitCallbacks.add( listener );
   }
+  
+  /**
+   * NOTE: this function is not thread safe.  However, it is safe to call within the
+   * <code>inform( SolrCore core )</code> function for <code>SolrCoreAware</code> classes.
+   * Outside <code>inform</code>, this could potentially throw a ConcurrentModificationException
+   *
+   * @see SolrCoreAware
+   */
+  public void registerSoftCommitCallback( SolrEventListener listener )
+  {
+    softCommitCallbacks.add( listener );
+  }
 
   /**
    * NOTE: this function is not thread safe.  However, it is safe to call within the

Modified: lucene/dev/branches/LUCENE2793/solr/src/java/org/apache/solr/update/processor/SignatureUpdateProcessorFactory.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/LUCENE2793/solr/src/java/org/apache/solr/update/processor/SignatureUpdateProcessorFactory.java?rev=1143719&r1=1143718&r2=1143719&view=diff
==============================================================================
--- lucene/dev/branches/LUCENE2793/solr/src/java/org/apache/solr/update/processor/SignatureUpdateProcessorFactory.java (original)
+++ lucene/dev/branches/LUCENE2793/solr/src/java/org/apache/solr/update/processor/SignatureUpdateProcessorFactory.java Thu Jul  7 09:03:58 2011
@@ -61,8 +61,6 @@ public class SignatureUpdateProcessorFac
 
       signatureField = params.get("signatureField", "signatureField");
 
-      signatureTerm = new Term(signatureField, "");
-
       signatureClass = params.get("signatureClass",
           "org.apache.solr.update.processor.Lookup3Signature");
       this.params = params;
@@ -173,7 +171,7 @@ public class SignatureUpdateProcessorFac
         doc.addField(signatureField, sigString);
 
         if (overwriteDupes) {
-          cmd.updateTerm = signatureTerm.createTerm(sigString);
+          cmd.updateTerm = new Term(signatureField, sigString);
         }
 
       }

Modified: lucene/dev/branches/LUCENE2793/solr/src/java/org/apache/solr/util/SimplePostTool.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/LUCENE2793/solr/src/java/org/apache/solr/util/SimplePostTool.java?rev=1143719&r1=1143718&r2=1143719&view=diff
==============================================================================
--- lucene/dev/branches/LUCENE2793/solr/src/java/org/apache/solr/util/SimplePostTool.java (original)
+++ lucene/dev/branches/LUCENE2793/solr/src/java/org/apache/solr/util/SimplePostTool.java Thu Jul  7 09:03:58 2011
@@ -19,7 +19,6 @@ package org.apache.solr.util;
 
 import java.io.File;
 import java.io.FileInputStream;
-import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.io.InputStream;
 import java.io.ByteArrayInputStream;
@@ -39,10 +38,10 @@ import java.net.URL;
  */
 public class SimplePostTool {
   public static final String DEFAULT_POST_URL = "http://localhost:8983/solr/update";
-  public static final String VERSION_OF_THIS_TOOL = "1.3";
-  private static final String SOLR_OK_RESPONSE_EXCERPT = "<int name=\"status\">0</int>";
+  public static final String VERSION_OF_THIS_TOOL = "1.4";
 
   private static final String DEFAULT_COMMIT = "yes";
+  private static final String DEFAULT_OPTIMIZE = "no";
   private static final String DEFAULT_OUT = "no";
 
   private static final String DEFAULT_DATA_TYPE = "application/xml";
@@ -64,23 +63,27 @@ public class SimplePostTool {
   public static void main(String[] args) {
     info("version " + VERSION_OF_THIS_TOOL);
 
-    if (0 < args.length && "-help".equals(args[0])) {
+    if (0 < args.length && ("-help".equals(args[0]) || "--help".equals(args[0]) || "-h".equals(args[0]))) {
       System.out.println
         ("This is a simple command line tool for POSTing raw data to a Solr\n"+
          "port.  Data can be read from files specified as commandline args,\n"+
          "as raw commandline arg strings, or via STDIN.\n"+
          "Examples:\n"+
-         "  java -Ddata=files -jar post.jar *.xml\n"+
+         "  java -jar post.jar *.xml\n"+
          "  java -Ddata=args  -jar post.jar '<delete><id>42</id></delete>'\n"+
          "  java -Ddata=stdin -jar post.jar < hd.xml\n"+
+         "  java -Durl=http://localhost:8983/solr/update/csv -Dtype=text/csv -jar post.jar *.csv\n"+
+         "  java -Durl=http://localhost:8983/solr/update/json -Dtype=application/json -jar post.jar *.json\n"+
+         "  java -Durl=http://localhost:8983/solr/update/extract?literal.id=a -Dtype=application/pdf -jar post.jar a.pdf\n"+
          "Other options controlled by System Properties include the Solr\n"+
          "URL to POST to, the Content-Type of the data, whether a commit\n"+
-         "should be executed, and whether the response should be written\n"+
-         "to STDOUT. These are the defaults for all System Properties...\n"+
+         "or optimize should be executed, and whether the response should\n"+
+         "be written to STDOUT. These are the defaults for all System Properties:\n"+
          "  -Ddata=" + DEFAULT_DATA_MODE + "\n"+
          "  -Dtype=" + DEFAULT_DATA_TYPE + "\n"+
          "  -Durl=" + DEFAULT_POST_URL + "\n"+
          "  -Dcommit=" + DEFAULT_COMMIT + "\n"+
+         "  -Doptimize=" + DEFAULT_OPTIMIZE + "\n"+
          "  -Dout=" + DEFAULT_OUT + "\n");
       return;
     }
@@ -100,7 +103,6 @@ public class SimplePostTool {
       fatal("System Property 'data' is not valid for this tool: " + mode);
     }
 
-    final String doOut = System.getProperty("out", DEFAULT_OUT);
     if ("yes".equals(System.getProperty("out", DEFAULT_OUT))) {
       out = System.out;
     }
@@ -109,14 +111,16 @@ public class SimplePostTool {
       if (DATA_MODE_FILES.equals(mode)) {
         if (0 < args.length) {
           info("POSTing files to " + u + "..");
-          final int posted = t.postFiles(args, 0, out);
+          t.postFiles(args, 0, out);
+        } else {
+          info("No files specified. (Use -h for help)");
         }
         
       } else if (DATA_MODE_ARGS.equals(mode)) {
         if (0 < args.length) {
           info("POSTing args to " + u + "..");
           for (String a : args) {
-            t.postData(t.stringToStream(a), null, out);
+            t.postData(SimplePostTool.stringToStream(a), null, out);
           }
         }
         
@@ -126,10 +130,15 @@ public class SimplePostTool {
       }
       if ("yes".equals(System.getProperty("commit",DEFAULT_COMMIT))) {
         info("COMMITting Solr index changes..");
-        t.commit(out);
+        t.commit();
+      }
+      if ("yes".equals(System.getProperty("optimize",DEFAULT_OPTIMIZE))) {
+        info("Performing an OPTIMIZE..");
+        t.optimize();
       }
     
     } catch(RuntimeException e) {
+      e.printStackTrace();
       fatal("RuntimeException " + e);
     }
   }
@@ -174,8 +183,19 @@ public class SimplePostTool {
   /**
    * Does a simple commit operation 
    */
-  public void commit(OutputStream output) {
-    postData(stringToStream("<commit/>"), null, output);
+  public void commit() {
+    doGet(appendParam(solrUrl.toString(), "commit=true"));
+  }
+
+  /**
+   * Does a simple optimize operation 
+   */
+  public void optimize() {
+    doGet(appendParam(solrUrl.toString(), "optimize=true"));
+  }
+
+  private String appendParam(String url, String param) {
+    return url + (url.indexOf('?')>0 ? "&" : "?") + param;
   }
 
   /**
@@ -201,6 +221,34 @@ public class SimplePostTool {
   }
 
   /**
+   * Performs a simple get on the given URL
+   * @param url
+   */
+  public void doGet(String url) {
+    try {
+      doGet(new URL(url));
+    } catch (MalformedURLException e) {
+      fatal("The specified URL "+url+" is not a valid URL. Please check");
+    }
+  }
+  
+  /**
+   * Performs a simple get on the given URL
+   * @param url
+   */
+  public void doGet(URL url) {
+    try {
+      HttpURLConnection urlc = (HttpURLConnection) url.openConnection();
+      if (HttpURLConnection.HTTP_OK != urlc.getResponseCode()) {
+        fatal("Solr returned an error #" + urlc.getResponseCode() + 
+            " " + urlc.getResponseMessage());
+      }
+    } catch (IOException e) {
+      fatal("An error occured posting data to "+url+". Please check that Solr is running.");
+    }
+  }
+
+  /**
    * Reads data from the data stream and posts it to solr,
    * writes to the response to output
    */

Modified: lucene/dev/branches/LUCENE2793/solr/src/site/src/documentation/content/xdocs/index.xml
URL: http://svn.apache.org/viewvc/lucene/dev/branches/LUCENE2793/solr/src/site/src/documentation/content/xdocs/index.xml?rev=1143719&r1=1143718&r2=1143719&view=diff
==============================================================================
--- lucene/dev/branches/LUCENE2793/solr/src/site/src/documentation/content/xdocs/index.xml (original)
+++ lucene/dev/branches/LUCENE2793/solr/src/site/src/documentation/content/xdocs/index.xml Thu Jul  7 09:03:58 2011
@@ -66,6 +66,29 @@ customization is required.
     </section>
     <section id="news">
       <title>News</title>
+<section>
+   <title>May 2011 - Solr 3.2 Released</title>
+  <p>The Lucene PMC is pleased to announce the release of <a href="http://www.apache.org/dyn/closer.cgi/lucene/solr">Apache Solr 3.2</a>!
+  </p> 
+   <p>
+    Solr's version number was synced with Lucene following the Lucene/Solr merge, so Solr 3.2 contains Lucene 3.2.
+    Solr 3.2 is the first release after Solr 3.1.
+   </p>
+  <p>
+   Solr 3.2 release highlights include
+ </p>
+  <ul>
+    <li>Ability to specify overwrite and commitWithin as request parameters when using the JSON update format</li>
+    <li>TermQParserPlugin, useful when generating filter queries from terms returned from field faceting or the terms component.</li>
+    <li>DebugComponent now supports using a NamedList to model Explanation objects in it's responses instead of Explanation.toString</li>
+    <li>Improvements to the UIMA and Carrot2 integrations</li>
+    <li>Bugfixes and improvements from Apache Lucene 3.2</li>
+  </ul>
+     
+  <p>See the <a href="http://svn.apache.org/repos/asf/lucene/dev/tags/lucene_solr_3_2/solr/CHANGES.txt">release notes</a> for a more complete list of all the new features, improvements, and bugfixes.
+ </p>
+
+</section>
             <section>
    <title>March 2011 - Solr 3.1 Released</title>
   <p>The Lucene PMC is pleased to announce the release of <a href="http://www.apache.org/dyn/closer.cgi/lucene/solr">Apache Solr 3.1</a>!

Modified: lucene/dev/branches/LUCENE2793/solr/src/solrj/org/apache/solr/client/solrj/impl/StreamingUpdateSolrServer.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/LUCENE2793/solr/src/solrj/org/apache/solr/client/solrj/impl/StreamingUpdateSolrServer.java?rev=1143719&r1=1143718&r2=1143719&view=diff
==============================================================================
--- lucene/dev/branches/LUCENE2793/solr/src/solrj/org/apache/solr/client/solrj/impl/StreamingUpdateSolrServer.java (original)
+++ lucene/dev/branches/LUCENE2793/solr/src/solrj/org/apache/solr/client/solrj/impl/StreamingUpdateSolrServer.java Thu Jul  7 09:03:58 2011
@@ -129,8 +129,7 @@ public class StreamingUpdateSolrServer e
                       if( fmt != null ) {
                         log.info( fmt );
                         writer.write( String.format( fmt, 
-                            params.getBool( UpdateParams.WAIT_SEARCHER, false )+"",
-                            params.getBool( UpdateParams.WAIT_FLUSH, false )+"") );
+                            params.getBool( UpdateParams.WAIT_SEARCHER, false )+"") );
                       }
                     }
                     



Mime
View raw message