lucene-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From mikemcc...@apache.org
Subject svn commit: r1562403 [6/7] - in /lucene/dev/branches/lucene5376: ./ dev-tools/ dev-tools/idea/lucene/suggest/ dev-tools/idea/solr/core/src/java/ lucene/ lucene/analysis/ lucene/analysis/common/ lucene/analysis/common/src/java/org/apache/lucene/analysis...
Date Wed, 29 Jan 2014 11:14:56 GMT
Modified: lucene/dev/branches/lucene5376/solr/core/src/test/org/apache/solr/CursorPagingTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376/solr/core/src/test/org/apache/solr/CursorPagingTest.java?rev=1562403&r1=1562402&r2=1562403&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376/solr/core/src/test/org/apache/solr/CursorPagingTest.java (original)
+++ lucene/dev/branches/lucene5376/solr/core/src/test/org/apache/solr/CursorPagingTest.java Wed Jan 29 11:14:53 2014
@@ -120,6 +120,9 @@ public class CursorPagingTest extends So
     String cursorMark;
     SolrParams params = null;
     
+    final String intsort = "int" + (random().nextBoolean() ? "" : "_dv");
+    final String intmissingsort = defaultCodecSupportsMissingDocValues() ? intsort : "int";
+
     // trivial base case: ensure cursorMark against an empty index doesn't blow up
     cursorMark = CURSOR_MARK_START;
     params = params("q", "*:*", 
@@ -145,7 +148,7 @@ public class CursorPagingTest extends So
     assertU(adoc("id", "6", "str", "a", "float", "64.5", "int", "7"));
     assertU(adoc("id", "1", "str", "a", "float", "64.5", "int", "7"));
     assertU(adoc("id", "4", "str", "a", "float", "11.1", "int", "6"));
-    assertU(adoc("id", "3", "str", "a", "float", "11.1", "int", "3"));
+    assertU(adoc("id", "3", "str", "a", "float", "11.1")); // int is missing
     assertU(commit());
 
     // base case: ensure cursorMark that matches no docs doesn't blow up
@@ -241,7 +244,7 @@ public class CursorPagingTest extends So
                     "facet", "true",
                     "facet.field", "str",
                     "json.nl", "map",
-                    "sort", "int asc, id asc");
+                    "sort", intsort + " asc, id asc");
     cursorMark = assertCursor(req(params, CURSOR_MARK_PARAM, cursorMark)
                               ,"/response/numFound==8"
                               ,"/response/start==0"
@@ -269,6 +272,66 @@ public class CursorPagingTest extends So
                               ,"/facet_counts/facet_fields/str=={'a':4,'b':1,'c':3}"
                               ));
 
+    // int missing first sort with dups, id tie breaker
+    cursorMark = CURSOR_MARK_START;
+    params = params("q", "-int:2001 -int:4055", 
+                    "rows","3",
+                    "fl", "id",
+                    "json.nl", "map",
+                    "sort", intmissingsort + "_first asc, id asc");
+    cursorMark = assertCursor(req(params, CURSOR_MARK_PARAM, cursorMark)
+                              ,"/response/numFound==8"
+                              ,"/response/start==0"
+                              ,"/response/docs==[{'id':3},{'id':7},{'id':0}]"
+                              );
+    cursorMark = assertCursor(req(params, CURSOR_MARK_PARAM, cursorMark)
+                              ,"/response/numFound==8"
+                              ,"/response/start==0"
+                              ,"/response/docs==[{'id':4},{'id':1},{'id':6}]"
+                              );
+    cursorMark = assertCursor(req(params, CURSOR_MARK_PARAM, cursorMark)
+                              ,"/response/numFound==8"
+                              ,"/response/start==0"
+                              ,"/response/docs==[{'id':9},{'id':2}]"
+                              );
+    // no more, so no change to cursorMark, and no new docs
+    assertEquals(cursorMark,
+                 assertCursor(req(params, CURSOR_MARK_PARAM, cursorMark)
+                              ,"/response/numFound==8" 
+                              ,"/response/start==0"
+                              ,"/response/docs==[]"
+                              ));
+
+    // int missing last sort with dups, id tie breaker
+    cursorMark = CURSOR_MARK_START;
+    params = params("q", "-int:2001 -int:4055", 
+                    "rows","3",
+                    "fl", "id",
+                    "json.nl", "map",
+                    "sort", intmissingsort + "_last asc, id asc");
+    cursorMark = assertCursor(req(params, CURSOR_MARK_PARAM, cursorMark)
+                              ,"/response/numFound==8"
+                              ,"/response/start==0"
+                              ,"/response/docs==[{'id':7},{'id':0},{'id':4}]"
+                              );
+    cursorMark = assertCursor(req(params, CURSOR_MARK_PARAM, cursorMark)
+                              ,"/response/numFound==8"
+                              ,"/response/start==0"
+                              ,"/response/docs==[{'id':1},{'id':6},{'id':9}]"
+                              );
+    cursorMark = assertCursor(req(params, CURSOR_MARK_PARAM, cursorMark)
+                              ,"/response/numFound==8"
+                              ,"/response/start==0"
+                              ,"/response/docs==[{'id':2},{'id':3}]"
+                              );
+    // no more, so no change to cursorMark, and no new docs
+    assertEquals(cursorMark,
+                 assertCursor(req(params, CURSOR_MARK_PARAM, cursorMark)
+                              ,"/response/numFound==8" 
+                              ,"/response/start==0"
+                              ,"/response/docs==[]"
+                              ));
+
     // string sort with dups, id tie breaker
     cursorMark = CURSOR_MARK_START;
     params = params("q", "*:*", 
@@ -298,7 +361,7 @@ public class CursorPagingTest extends So
     params = params("q", "*:*", 
                     "rows","2",
                     "fl", "id",
-                    "sort", "float asc, int desc, id desc");
+                    "sort", "float asc, "+intsort+" desc, id desc");
     cursorMark = assertCursor(req(params, CURSOR_MARK_PARAM, cursorMark)
                               ,"/response/numFound==10"
                               ,"/response/start==0"
@@ -338,7 +401,7 @@ public class CursorPagingTest extends So
     params = params("q", "id:3 id:7", 
                     "rows","111",
                     "fl", "id",
-                    "sort", "int asc, id asc");
+                    "sort", intsort + " asc, id asc");
     cursorMark = assertCursor(req(params, CURSOR_MARK_PARAM, cursorMark)
                               ,"/response/numFound==2"
                               ,"/response/start==0"
@@ -367,7 +430,7 @@ public class CursorPagingTest extends So
     ids = assertFullWalkNoDups(9, params("q", "*:*", 
                                          "rows", "3",
                                          "fq", "-id:6",
-                                         "sort", "float desc, id asc, int asc"));
+                                         "sort", "float desc, id asc, "+intsort+" asc"));
     assertEquals(9, ids.size());
     assertFalse("matched on id:6 unexpectedly", ids.exists(6));
     ids = assertFullWalkNoDups(9, params("q", "float:[0 TO *] int:7 id:6", 
@@ -451,7 +514,7 @@ public class CursorPagingTest extends So
     assertU(adoc("id", "3", "str", "a", "float", "11.1", "int", "3"));
     assertU(commit());
 
-    final Collection<String> allFieldNames = getAllFieldNames();
+    final Collection<String> allFieldNames = getAllSortFieldNames();
 
     final SolrInfoMBean filterCacheStats 
       = h.getCore().getInfoRegistry().get("filterCache");
@@ -488,7 +551,7 @@ public class CursorPagingTest extends So
   /** randomized testing of a non-trivial number of docs using assertFullWalkNoDups 
    */
   public void testRandomSortsOnLargeIndex() throws Exception {
-    final Collection<String> allFieldNames = getAllFieldNames();
+    final Collection<String> allFieldNames = getAllSortFieldNames();
 
     final int initialDocs = _TestUtil.nextInt(random(),100,200);
     final int totalDocs = atLeast(5000);
@@ -555,16 +618,45 @@ public class CursorPagingTest extends So
   }
   
   /**
-   * a list of the fields in the schema - excluding _version_
+   * An immutable list of the fields in the schema that can be used for sorting,
+   * deterministically random order.
    */
-  private Collection<String> getAllFieldNames() {
+  private List<String> getAllSortFieldNames() {
+    return pruneAndDeterministicallySort
+      (h.getCore().getLatestSchema().getFields().keySet());
+  }
+
+  
+  /**
+   * <p>
+   * Given a list of field names in the schema, returns an immutable list in 
+   * deterministically random order with the following things removed:
+   * </p>
+   * <ul>
+   *  <li><code>_version_</code> is removed</li>
+   *  <li><code>dv_last</code> and <code>dv_first</code> fields are removed 
+   *      if the codec doesn't support them</li>
+   * </ul>
+   * @see #defaultCodecSupportsMissingDocValues
+   */
+  public static List<String> pruneAndDeterministicallySort(Collection<String> raw) {
+
+    final boolean prune_dv_missing = ! defaultCodecSupportsMissingDocValues();
+
     ArrayList<String> names = new ArrayList<String>(37);
-    for (String f : h.getCore().getLatestSchema().getFields().keySet()) {
-      if (! f.equals("_version_")) {
-        names.add(f);
+    for (String f : raw) {
+      if (f.equals("_version_")) {
+        continue;
+      }
+      if (prune_dv_missing && (f.endsWith("_dv_last") || f.endsWith("_dv_first")) ) {
+        continue;
       }
+      names.add(f);
     }
-    return Collections.<String>unmodifiableCollection(names);
+
+    Collections.sort(names);
+    Collections.shuffle(names,random());
+    return Collections.<String>unmodifiableList(names);
   }
 
   /**
@@ -625,9 +717,9 @@ public class CursorPagingTest extends So
     }
     assertU(commit());
 
-    Collection<String> allFieldNames = getAllFieldNames();
+    Collection<String> allFieldNames = getAllSortFieldNames();
     String[] fieldNames = new String[allFieldNames.size()];
-    getAllFieldNames().toArray(fieldNames);
+    allFieldNames.toArray(fieldNames);
     String f = fieldNames[_TestUtil.nextInt(random(), 0, fieldNames.length - 1)];
     String order = 0 == _TestUtil.nextInt(random(), 0, 1) ? " asc" : " desc";
     String sort = f + order + (f.equals("id") ? "" : ", id" + order);

Modified: lucene/dev/branches/lucene5376/solr/core/src/test/org/apache/solr/analytics/AbstractAnalyticsStatsTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376/solr/core/src/test/org/apache/solr/analytics/AbstractAnalyticsStatsTest.java?rev=1562403&r1=1562402&r2=1562403&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376/solr/core/src/test/org/apache/solr/analytics/AbstractAnalyticsStatsTest.java (original)
+++ lucene/dev/branches/lucene5376/solr/core/src/test/org/apache/solr/analytics/AbstractAnalyticsStatsTest.java Wed Jan 29 11:14:53 2014
@@ -110,6 +110,7 @@ public class AbstractAnalyticsStatsTest 
         case DATE:    return val;
       }
     } catch (Exception e) {
+      e.printStackTrace();
       fail("Caught exception in getStatResult, xPath = " + sb.toString() + " \nraw data: " + rawResponse);
     }
     fail("Unknown type used in getStatResult");

Modified: lucene/dev/branches/lucene5376/solr/core/src/test/org/apache/solr/cloud/BasicDistributedZkTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376/solr/core/src/test/org/apache/solr/cloud/BasicDistributedZkTest.java?rev=1562403&r1=1562402&r2=1562403&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376/solr/core/src/test/org/apache/solr/cloud/BasicDistributedZkTest.java (original)
+++ lucene/dev/branches/lucene5376/solr/core/src/test/org/apache/solr/cloud/BasicDistributedZkTest.java Wed Jan 29 11:14:53 2014
@@ -21,12 +21,12 @@ import java.io.File;
 import java.io.IOException;
 import java.net.MalformedURLException;
 import java.util.ArrayList;
+import java.util.Collections;
 import java.util.HashMap;
 import java.util.HashSet;
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
-import java.util.Collections;
 import java.util.concurrent.Callable;
 import java.util.concurrent.CompletionService;
 import java.util.concurrent.ExecutorCompletionService;
@@ -336,6 +336,24 @@ public class BasicDistributedZkTest exte
     query(false, new Object[] {"q", "id:[1 TO 5]", CommonParams.DEBUG, CommonParams.RESULTS});
     query(false, new Object[] {"q", "id:[1 TO 5]", CommonParams.DEBUG, CommonParams.QUERY});
 
+    // try commitWithin
+    long before = cloudClient.query(new SolrQuery("*:*")).getResults().getNumFound();
+    ModifiableSolrParams params = new ModifiableSolrParams();
+    params.set("commitWithin", 10);
+    add(cloudClient, params , getDoc("id", 300));
+    
+    long timeout = System.currentTimeMillis() + 15000;
+    while (cloudClient.query(new SolrQuery("*:*")).getResults().getNumFound() != before + 1) {
+      if (timeout <= System.currentTimeMillis()) {
+        fail("commitWithin did not work");
+      }
+      Thread.sleep(100);
+    }
+    
+    for (SolrServer client : clients) {
+      assertEquals("commitWithin did not work", before + 1, client.query(new SolrQuery("*:*")).getResults().getNumFound());
+    }
+    
     // TODO: This test currently fails because debug info is obtained only
     // on shards with matches.
     // query("q","matchesnothing","fl","*,score", "debugQuery", "true");

Modified: lucene/dev/branches/lucene5376/solr/core/src/test/org/apache/solr/cloud/ChaosMonkeyNothingIsSafeTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376/solr/core/src/test/org/apache/solr/cloud/ChaosMonkeyNothingIsSafeTest.java?rev=1562403&r1=1562402&r2=1562403&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376/solr/core/src/test/org/apache/solr/cloud/ChaosMonkeyNothingIsSafeTest.java (original)
+++ lucene/dev/branches/lucene5376/solr/core/src/test/org/apache/solr/cloud/ChaosMonkeyNothingIsSafeTest.java Wed Jan 29 11:14:53 2014
@@ -155,8 +155,13 @@ public class ChaosMonkeyNothingIsSafeTes
         if (RUN_LENGTH != -1) {
           runLength = RUN_LENGTH;
         } else {
-          int[] runTimes = new int[] {5000, 6000, 10000, 15000, 25000, 30000,
-              30000, 45000, 90000, 120000};
+          int[] runTimes;
+          if (TEST_NIGHTLY) {
+            runTimes = new int[] {5000, 6000, 10000, 15000, 25000, 30000,
+                30000, 45000, 90000, 120000};
+          } else {
+            runTimes = new int[] {145000, 240000, 300000};
+          }
           runLength = runTimes[random().nextInt(runTimes.length - 1)];
         }
         
@@ -199,7 +204,7 @@ public class ChaosMonkeyNothingIsSafeTes
       // we expect full throttle fails, but cloud client should not easily fail
       for (StopableThread indexThread : threads) {
         if (indexThread instanceof StopableIndexingThread && !(indexThread instanceof FullThrottleStopableIndexingThread)) {
-          assertEquals("There were expected update fails", 0, ((StopableIndexingThread) indexThread).getFails());
+          assertFalse("There were too many update fails - we expect it can happen, but shouldn't easily", ((StopableIndexingThread) indexThread).getFails() > 1);
         }
       }
       

Modified: lucene/dev/branches/lucene5376/solr/core/src/test/org/apache/solr/cloud/CollectionsAPIDistributedZkTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376/solr/core/src/test/org/apache/solr/cloud/CollectionsAPIDistributedZkTest.java?rev=1562403&r1=1562402&r2=1562403&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376/solr/core/src/test/org/apache/solr/cloud/CollectionsAPIDistributedZkTest.java (original)
+++ lucene/dev/branches/lucene5376/solr/core/src/test/org/apache/solr/cloud/CollectionsAPIDistributedZkTest.java Wed Jan 29 11:14:53 2014
@@ -362,10 +362,10 @@ public class CollectionsAPIDistributedZk
     String baseUrl = getBaseUrl((HttpSolrServer) clients.get(0));
     // now try to remove a collection when a couple of it's nodes are down
     if (secondConfigSet) {
-      createCollection(null, "halfdeletedcollection2", 3, 2, 6,
+      createCollection(null, "halfdeletedcollection2", 3, 3, 6,
           createNewSolrServer("", baseUrl), null, "conf2");
     } else {
-      createCollection(null, "halfdeletedcollection2", 3, 2, 6,
+      createCollection(null, "halfdeletedcollection2", 3, 3, 6,
           createNewSolrServer("", baseUrl), null);
     }
     
@@ -375,6 +375,11 @@ public class CollectionsAPIDistributedZk
     ChaosMonkey.stop(jettys.get(0));
     ChaosMonkey.stop(jettys.get(1));
     
+    // wait for leaders to settle out
+    for (int i = 1; i < 4; i++) {
+      cloudClient.getZkStateReader().getLeaderRetry("halfdeletedcollection2", "shard" + i, 15000);
+    }
+    
     baseUrl = getBaseUrl((HttpSolrServer) clients.get(2));
     
     // remove a collection
@@ -387,7 +392,7 @@ public class CollectionsAPIDistributedZk
     createNewSolrServer("", baseUrl).request(request);
     
     cloudClient.getZkStateReader().updateClusterState(true);
-    assertFalse(cloudClient.getZkStateReader().getClusterState().hasCollection("halfdeletedcollection2"));
+    assertFalse("Still found collection that should be gone", cloudClient.getZkStateReader().getClusterState().hasCollection("halfdeletedcollection2"));
     
   }
 

Modified: lucene/dev/branches/lucene5376/solr/core/src/test/org/apache/solr/cloud/CustomCollectionTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376/solr/core/src/test/org/apache/solr/cloud/CustomCollectionTest.java?rev=1562403&r1=1562402&r2=1562403&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376/solr/core/src/test/org/apache/solr/cloud/CustomCollectionTest.java (original)
+++ lucene/dev/branches/lucene5376/solr/core/src/test/org/apache/solr/cloud/CustomCollectionTest.java Wed Jan 29 11:14:53 2014
@@ -69,7 +69,6 @@ import org.junit.Ignore;
  * Tests the Custom Sharding API.
  */
 @Slow
-@Ignore("I am broken since SOLR-5492")
 public class CustomCollectionTest extends AbstractFullDistribZkTestBase {
 
   private static final String DEFAULT_COLLECTION = "collection1";
@@ -247,16 +246,6 @@ public class CustomCollectionTest extend
     assertEquals(0, collectionClient.query(new SolrQuery("*:*").setParam(_ROUTE_,"b")).getResults().getNumFound());
     assertEquals(3, collectionClient.query(new SolrQuery("*:*").setParam(_ROUTE_,"a")).getResults().getNumFound());
 
-    // test shards.info with _route_ param
-    QueryResponse resp = collectionClient.query(new SolrQuery("*:*").setParam(_ROUTE_, "a").setParam(ShardParams.SHARDS_INFO, true));
-    NamedList<?> sinfo = (NamedList<?>) resp.getResponse().get(ShardParams.SHARDS_INFO);
-    assertNotNull("missing shard info", sinfo);
-    for (Map.Entry<String,?> entry : sinfo) {
-      NamedList<?> info = (NamedList<?>) entry.getValue();
-      assertTrue("Expected to find numFound in the up shard info",info.get("numFound") != null);
-      assertTrue("Expected to find shardAddress in the up shard info",info.get("shardAddress") != null);
-    }
-
     collectionClient.deleteByQuery("*:*");
     collectionClient.commit(true,true);
     assertEquals(0, collectionClient.query(new SolrQuery("*:*")).getResults().getNumFound());

Modified: lucene/dev/branches/lucene5376/solr/core/src/test/org/apache/solr/cloud/DistribCursorPagingTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376/solr/core/src/test/org/apache/solr/cloud/DistribCursorPagingTest.java?rev=1562403&r1=1562402&r2=1562403&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376/solr/core/src/test/org/apache/solr/cloud/DistribCursorPagingTest.java (original)
+++ lucene/dev/branches/lucene5376/solr/core/src/test/org/apache/solr/cloud/DistribCursorPagingTest.java Wed Jan 29 11:14:53 2014
@@ -29,6 +29,7 @@ import org.apache.solr.common.SolrDocume
 import org.apache.solr.common.SolrDocumentList;
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.SolrException.ErrorCode;
+import org.apache.solr.common.SolrInputField;
 import org.apache.solr.common.util.NamedList;
 import org.apache.solr.common.params.SolrParams;
 import org.apache.solr.common.params.CommonParams;
@@ -38,7 +39,11 @@ import static org.apache.solr.common.par
 import static org.apache.solr.common.params.CursorMarkParams.CURSOR_MARK_START;
 import org.apache.solr.search.CursorMark; //jdoc
 
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
 import java.io.IOException;
+import java.util.List;
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.Collections;
@@ -54,6 +59,8 @@ import java.util.Map;
 @Slow
 public class DistribCursorPagingTest extends AbstractFullDistribZkTestBase {
 
+  public static Logger log = LoggerFactory.getLogger(DistribCursorPagingTest.class);
+
   public DistribCursorPagingTest() {
     System.setProperty("solr.test.useFilterForSortedQuery", Boolean.toString(random().nextBoolean()));
     configString = CursorPagingTest.TEST_SOLRCONFIG_NAME;
@@ -142,7 +149,10 @@ public class DistribCursorPagingTest ext
     String cursorMark = CURSOR_MARK_START;
     SolrParams params = null;
     QueryResponse rsp = null;
-    
+
+    final String intsort = "int" + (random().nextBoolean() ? "" : "_dv");
+    final String intmissingsort = defaultCodecSupportsMissingDocValues() ? intsort : "int";
+
     // trivial base case: ensure cursorMark against an empty index doesn't blow up
     cursorMark = CURSOR_MARK_START;
     params = params("q", "*:*", 
@@ -166,7 +176,7 @@ public class DistribCursorPagingTest ext
     indexDoc(sdoc("id", "6", "str", "a", "float", "64.5", "int", "7"));
     indexDoc(sdoc("id", "1", "str", "a", "float", "64.5", "int", "7"));
     indexDoc(sdoc("id", "4", "str", "a", "float", "11.1", "int", "6"));
-    indexDoc(sdoc("id", "3", "str", "a", "float", "11.1", "int", "3"));
+    indexDoc(sdoc("id", "3", "str", "a", "float", "11.1")); // int is missing
     commit();
 
     // base case: ensure cursorMark that matches no docs doesn't blow up
@@ -241,7 +251,7 @@ public class DistribCursorPagingTest ext
                     "facet", "true",
                     "facet.field", "str",
                     "json.nl", "map",
-                    "sort", "int asc, id asc");
+                    "sort", intsort + " asc, id asc");
     rsp = query(p(params, CURSOR_MARK_PARAM, cursorMark));
     assertNumFound(8, rsp);
     assertStartsAt(0, rsp);
@@ -275,6 +285,70 @@ public class DistribCursorPagingTest ext
     assertEquals("no more docs, but cursorMark has changed", 
                  cursorMark, assertHashNextCursorMark(rsp));
   
+    // int missing first sort with dups, id tie breaker
+    cursorMark = CURSOR_MARK_START;
+    params = params("q", "-int:2001 -int:4055", 
+                    "rows","3",
+                    "fl", "id",
+                    "json.nl", "map",
+                    "sort", intmissingsort + "_first  asc, id asc");
+    rsp = query(p(params, CURSOR_MARK_PARAM, cursorMark));
+    assertNumFound(8, rsp);
+    assertStartsAt(0, rsp);
+    assertDocList(rsp, 3, 7, 0);
+    cursorMark = assertHashNextCursorMark(rsp);
+    //
+    rsp = query(p(params, CURSOR_MARK_PARAM, cursorMark));
+    assertNumFound(8, rsp);
+    assertStartsAt(0, rsp);
+    assertDocList(rsp, 4, 1, 6);
+    cursorMark = assertHashNextCursorMark(rsp);
+    //
+    rsp = query(p(params, CURSOR_MARK_PARAM, cursorMark));
+    assertNumFound(8, rsp);
+    assertStartsAt(0, rsp);
+    assertDocList(rsp, 9, 2);
+    cursorMark = assertHashNextCursorMark(rsp);
+    //
+    rsp = query(p(params, CURSOR_MARK_PARAM, cursorMark));
+    assertNumFound(8, rsp);
+    assertStartsAt(0, rsp);
+    assertDocList(rsp);
+    assertEquals("no more docs, but cursorMark has changed", 
+                 cursorMark, assertHashNextCursorMark(rsp));
+
+    // int missing last sort with dups, id tie breaker
+    cursorMark = CURSOR_MARK_START;
+    params = params("q", "-int:2001 -int:4055", 
+                    "rows","3",
+                    "fl", "id",
+                    "json.nl", "map",
+                    "sort", intmissingsort + "_last asc, id asc");
+    rsp = query(p(params, CURSOR_MARK_PARAM, cursorMark));
+    assertNumFound(8, rsp);
+    assertStartsAt(0, rsp);
+    assertDocList(rsp, 7, 0, 4);
+    cursorMark = assertHashNextCursorMark(rsp);
+    //
+    rsp = query(p(params, CURSOR_MARK_PARAM, cursorMark));
+    assertNumFound(8, rsp);
+    assertStartsAt(0, rsp);
+    assertDocList(rsp, 1, 6, 9);
+    cursorMark = assertHashNextCursorMark(rsp);
+    //
+    rsp = query(p(params, CURSOR_MARK_PARAM, cursorMark));
+    assertNumFound(8, rsp);
+    assertStartsAt(0, rsp);
+    assertDocList(rsp, 2, 3);
+    cursorMark = assertHashNextCursorMark(rsp);
+    //
+    rsp = query(p(params, CURSOR_MARK_PARAM, cursorMark));
+    assertNumFound(8, rsp);
+    assertStartsAt(0, rsp);
+    assertDocList(rsp);
+    assertEquals("no more docs, but cursorMark has changed", 
+                 cursorMark, assertHashNextCursorMark(rsp));
+
     // string sort with dups, id tie breaker
     cursorMark = CURSOR_MARK_START;
     params = params("q", "*:*", 
@@ -305,7 +379,7 @@ public class DistribCursorPagingTest ext
     params = params("q", "*:*", 
                     "rows","2",
                     "fl", "id",
-                    "sort", "float asc, int desc, id desc");
+                    "sort", "float asc, "+intsort+" desc, id desc");
     rsp = query(p(params, CURSOR_MARK_PARAM, cursorMark));
     assertNumFound(10, rsp);
     assertStartsAt(0, rsp);
@@ -349,7 +423,7 @@ public class DistribCursorPagingTest ext
     params = params("q", "id:3 id:7", 
                     "rows","111",
                     "fl", "id",
-                    "sort", "int asc, id asc");
+                    "sort", intsort + " asc, id asc");
     rsp = query(p(params, CURSOR_MARK_PARAM, cursorMark));
     assertNumFound(2, rsp);
     assertStartsAt(0, rsp);
@@ -442,35 +516,67 @@ public class DistribCursorPagingTest ext
   /** randomized testing of a non-trivial number of docs using assertFullWalkNoDups 
    */
   public void doRandomSortsOnLargeIndex() throws Exception {
-    final Collection<String> allFieldNames = getAllFieldNames();
+    final Collection<String> allFieldNames = getAllSortFieldNames();
 
-    final int initialDocs = _TestUtil.nextInt(random(),100,200);
+    final int numInitialDocs = _TestUtil.nextInt(random(),100,200);
     final int totalDocs = atLeast(5000);
 
     // start with a smallish number of documents, and test that we can do a full walk using a 
     // sort on *every* field in the schema...
 
-    for (int i = 1; i <= initialDocs; i++) {
+    List<SolrInputDocument> initialDocs = new ArrayList<SolrInputDocument>();
+    for (int i = 1; i <= numInitialDocs; i++) {
       SolrInputDocument doc = CursorPagingTest.buildRandomDocument(i);
+      initialDocs.add(doc);
       indexDoc(doc);
     }
     commit();
 
+    log.info("SOLR-5652: Begining Loop over smallish num of docs");
+    final boolean SOLR_5652 = true;
+
     for (String f : allFieldNames) {
       for (String order : new String[] {" asc", " desc"}) {
         String sort = f + order + ("id".equals(f) ? "" : ", id" + order);
         String rows = "" + _TestUtil.nextInt(random(),13,50);
-        SentinelIntSet ids = assertFullWalkNoDups(initialDocs, 
+        SentinelIntSet ids = assertFullWalkNoDups(SOLR_5652,
+                                                  numInitialDocs,
                                                   params("q", "*:*",
-                                                         "fl","id",
+                                                         "fl","id,"+f,
                                                          "rows",rows,
                                                          "sort",sort));
-        assertEquals(initialDocs, ids.size());
+        if (numInitialDocs != ids.size()) {
+          StringBuilder message = new StringBuilder
+              ("Expected " + numInitialDocs + " docs but got " + ids.size() + ". ");
+          message.append("sort=");
+          message.append(sort);
+          message.append(". ");
+          if (ids.size() < numInitialDocs) {
+            message.append("Missing doc(s): ");
+            for (SolrInputDocument doc : initialDocs) {
+              int id = ((Integer)doc.get("id").getValue()).intValue();
+              if ( ! ids.exists(id)) {
+                QueryResponse rsp = cloudClient.query(params("q", "id:" + id,
+                                                             "rows", "1"));
+                if (0 == rsp.getResults().size()) {
+                  message.append("<NOT RETRIEVABLE>:");
+                  message.append(doc.values());
+                } else {
+                  message.append(rsp.getResults().get(0).getFieldValueMap().toString());
+                }
+                message.append("; ");
+              }
+            }
+          }
+          fail(message.toString());
+        }
       }
     }
 
+    log.info("SOLR-5652: Ending Loop over smallish num of docs");
+
     // now add a lot more docs, and test a handful of randomized multi-level sorts
-    for (int i = initialDocs+1; i <= totalDocs; i++) {
+    for (int i = numInitialDocs+1; i <= totalDocs; i++) {
       SolrInputDocument doc = CursorPagingTest.buildRandomDocument(i);
       indexDoc(doc);
     }
@@ -484,7 +590,7 @@ public class DistribCursorPagingTest ext
       final boolean matchAll = random().nextBoolean();
       final String q = matchAll ? "*:*" : CursorPagingTest.buildRandomQuery();
 
-      SentinelIntSet ids = assertFullWalkNoDups(totalDocs, 
+      SentinelIntSet ids = assertFullWalkNoDups(totalDocs,
                                                 params("q", q,
                                                        "fl",fl,
                                                        "rows",rows,
@@ -498,21 +604,21 @@ public class DistribCursorPagingTest ext
   }
   
   /**
-   * Asks the LukeRequestHandler on the control client for a list of the fields in the schema - excluding _version_
+   * Asks the LukeRequestHandler on the control client for a list of the fields in the 
+   * schema and then prunes that list down to just the fields that can be used for sorting,
+   * and returns them as an immutable list in a deterministically random order.
    */
-  private Collection<String> getAllFieldNames() throws SolrServerException, IOException {
+  private List<String> getAllSortFieldNames() throws SolrServerException, IOException {
     LukeRequest req = new LukeRequest("/admin/luke");
     req.setShowSchema(true); 
     NamedList<Object> rsp = controlClient.request(req);
     NamedList<Object> fields = (NamedList) ((NamedList)rsp.get("schema")).get("fields");
     ArrayList<String> names = new ArrayList<String>(fields.size());
     for (Map.Entry<String,Object> item : fields) {
-      String f = item.getKey();
-      if (! f.equals("_version_")) {
-        names.add(item.getKey());
-      }
+      names.add(item.getKey());
     }
-    return Collections.<String>unmodifiableCollection(names);
+    
+    return CursorPagingTest.pruneAndDeterministicallySort(names);
   }
 
   /**
@@ -604,11 +710,19 @@ public class DistribCursorPagingTest ext
    * </p>
    */
   public SentinelIntSet assertFullWalkNoDups(int maxSize, SolrParams params) throws Exception {
+    return assertFullWalkNoDups(false, maxSize, params);
+  }
+
+  /** :TODO: refactor method into two arg version once SOLR-5652 is resolved */
+  private SentinelIntSet assertFullWalkNoDups(final boolean verbose, 
+                                              final int maxSize, 
+                                              final SolrParams params) throws Exception {
     SentinelIntSet ids = new SentinelIntSet(maxSize, -1);
     String cursorMark = CURSOR_MARK_START;
     int docsOnThisPage = Integer.MAX_VALUE;
     while (0 < docsOnThisPage) {
-      QueryResponse rsp = cloudClient.query(p(params, CURSOR_MARK_PARAM, cursorMark));
+      final SolrParams p = p(params, CURSOR_MARK_PARAM, cursorMark);
+      QueryResponse rsp = cloudClient.query(p);
       String nextCursorMark = assertHashNextCursorMark(rsp);
       SolrDocumentList docs = extractDocList(rsp);
       docsOnThisPage = docs.size();
@@ -621,12 +735,24 @@ public class DistribCursorPagingTest ext
         assertEquals("no more docs, but "+CURSOR_MARK_NEXT+" isn't same",
                      cursorMark, nextCursorMark);
       }
+
+      if (verbose) { // SOLR-5652
+        // SolrDocument is a bit more verbose then we need
+        StringBuilder s = new StringBuilder();
+        for (SolrDocument doc : docs) {
+          s.append(doc.getFieldValuesMap().toString());
+          s.append("; ");
+        }
+        log.info("SOLR-5652: ({}) gave us these docs: {}", p, s);
+      }
+
       for (SolrDocument doc : docs) {
         int id = ((Integer)doc.get("id")).intValue();
         if (ids.exists(id)) {
-          String msg = "walk already seen: " + id;
+          String msg = "(" + p + ") walk already seen: " + id;
           try {
-            queryAndCompareShards(params("q","id:"+id));
+            queryAndCompareShards(params("distrib","false",
+                                         "q","id:"+id));
           } catch (AssertionError ae) {
             throw new AssertionError(msg + ", found shard inconsistency that would explain it...", ae);
           }

Modified: lucene/dev/branches/lucene5376/solr/core/src/test/org/apache/solr/cloud/FullSolrCloudDistribCmdsTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376/solr/core/src/test/org/apache/solr/cloud/FullSolrCloudDistribCmdsTest.java?rev=1562403&r1=1562402&r2=1562403&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376/solr/core/src/test/org/apache/solr/cloud/FullSolrCloudDistribCmdsTest.java (original)
+++ lucene/dev/branches/lucene5376/solr/core/src/test/org/apache/solr/cloud/FullSolrCloudDistribCmdsTest.java Wed Jan 29 11:14:53 2014
@@ -18,6 +18,8 @@ package org.apache.solr.cloud;
  */
 
 import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
 
 import org.apache.lucene.util.LuceneTestCase.Slow;
 import org.apache.solr.client.solrj.SolrQuery;
@@ -25,6 +27,7 @@ import org.apache.solr.client.solrj.Solr
 import org.apache.solr.client.solrj.SolrServerException;
 import org.apache.solr.client.solrj.impl.ConcurrentUpdateSolrServer;
 import org.apache.solr.client.solrj.impl.HttpSolrServer;
+import org.apache.solr.client.solrj.request.QueryRequest;
 import org.apache.solr.client.solrj.request.UpdateRequest;
 import org.apache.solr.client.solrj.response.QueryResponse;
 import org.apache.solr.common.SolrDocument;
@@ -34,11 +37,11 @@ import org.apache.solr.common.cloud.Solr
 import org.apache.solr.common.cloud.ZkNodeProps;
 import org.apache.solr.common.cloud.ZkStateReader;
 import org.apache.solr.common.params.ModifiableSolrParams;
+import org.apache.solr.common.params.CollectionParams.CollectionAction;
 import org.apache.solr.update.VersionInfo;
 import org.apache.solr.update.processor.DistributedUpdateProcessor;
 import org.apache.zookeeper.CreateMode;
 import org.junit.BeforeClass;
-import org.junit.Ignore;
 
 /**
  * Super basic testing, no shard restarting or anything.
@@ -49,12 +52,14 @@ public class FullSolrCloudDistribCmdsTes
   
   @BeforeClass
   public static void beforeSuperClass() {
+    schemaString = "schema15.xml";      // we need a string id
   }
   
   public FullSolrCloudDistribCmdsTest() {
     super();
-    shardCount = 4;
-    sliceCount = 2;
+    fixShardCount = true;
+    shardCount = 6;
+    sliceCount = 3;
   }
   
   @Override
@@ -124,21 +129,28 @@ public class FullSolrCloudDistribCmdsTes
     
     docId = testIndexQueryDeleteHierarchical(docId);
     
-    testIndexingWithSuss();
+    docId = testIndexingDocPerRequestWithHttpSolrServer(docId);
+    
+    testIndexingWithSuss(docId);
     
     // TODO: testOptimisticUpdate(results);
     
     testDeleteByQueryDistrib();
     
-    testThatCantForwardToLeaderFails();
+    docId = testThatCantForwardToLeaderFails(docId);
+    
+    
+    docId = testIndexingBatchPerRequestWithHttpSolrServer(docId);
   }
 
-  private void testThatCantForwardToLeaderFails() throws Exception {
+  private long testThatCantForwardToLeaderFails(long docId) throws Exception {
     ZkStateReader zkStateReader = cloudClient.getZkStateReader();
     ZkNodeProps props = zkStateReader.getLeaderRetry(DEFAULT_COLLECTION, "shard1");
     
     chaosMonkey.stopShard("shard1");
-
+    
+    Thread.sleep(1000);
+    
     // fake that the leader is still advertised
     String leaderPath = ZkStateReader.getShardLeadersPath(DEFAULT_COLLECTION, "shard1");
     SolrZkClient zkClient = new SolrZkClient(zkServer.getZkAddress(), 10000);
@@ -146,9 +158,9 @@ public class FullSolrCloudDistribCmdsTes
     try {
       zkClient.makePath(leaderPath, ZkStateReader.toJSON(props),
           CreateMode.EPHEMERAL, true);
-      for (int i = 200; i < 210; i++) {
+      for (int i = 0; i < 200; i++) {
         try {
-          index_specific(cloudClient, id, i);
+          index_specific(shardToJetty.get("shard2").get(0).client.solrClient, id, docId++);
         } catch (SolrException e) {
           // expected
           fails++;
@@ -162,8 +174,9 @@ public class FullSolrCloudDistribCmdsTes
     } finally {
       zkClient.close();
     }
-    
+
     assertTrue("A whole shard is down - some of these should fail", fails > 0);
+    return docId;
   }
 
   private long addTwoDocsInOneRequest(long docId) throws
@@ -171,14 +184,8 @@ public class FullSolrCloudDistribCmdsTes
     QueryResponse results;
     UpdateRequest uReq;
     uReq = new UpdateRequest();
-    //uReq.setParam(UpdateParams.UPDATE_CHAIN, DISTRIB_UPDATE_CHAIN);
-    SolrInputDocument doc1 = new SolrInputDocument();
-
-    addFields(doc1, "id", docId++);
-    uReq.add(doc1);
-    SolrInputDocument doc2 = new SolrInputDocument();
-    addFields(doc2, "id", docId++);
-    uReq.add(doc2);
+    docId = addDoc(docId, uReq);
+    docId = addDoc(docId, uReq);
     
     uReq.process(cloudClient);
     uReq.process(controlClient);
@@ -307,23 +314,146 @@ public class FullSolrCloudDistribCmdsTes
     return docId;
   }
   
-  private void testIndexingWithSuss() throws Exception {
+  
+  private long testIndexingDocPerRequestWithHttpSolrServer(long docId) throws Exception {
+    int docs = random().nextInt(TEST_NIGHTLY ? 4013 : 97) + 1;
+    for (int i = 0; i < docs; i++) {
+      UpdateRequest uReq;
+      uReq = new UpdateRequest();
+      docId = addDoc(docId, uReq);
+      
+      uReq.process(cloudClient);
+      uReq.process(controlClient);
+      
+    }
+    commit();
+    
+    checkShardConsistency();
+    assertDocCounts(VERBOSE);
+    
+    return docId++;
+  }
+  
+  private long testIndexingBatchPerRequestWithHttpSolrServer(long docId) throws Exception {
+    
+    // remove collection
+    ModifiableSolrParams params = new ModifiableSolrParams();
+    params.set("action", CollectionAction.DELETE.toString());
+    params.set("name", "collection1");
+    QueryRequest request = new QueryRequest(params);
+    request.setPath("/admin/collections");
+    
+  
+    cloudClient.request(request);
+    
+    controlClient.deleteByQuery("*:*");
+    controlClient.commit();
+    
+    // somtimes we use an oversharded collection
+    createCollection(null, "collection2", 7, 3, 100000, cloudClient, null, "conf1");
+    cloudClient.setDefaultCollection("collection2");
+    waitForRecoveriesToFinish("collection2", false);
+    
+    class IndexThread extends Thread {
+      Integer name;
+      
+      public IndexThread(Integer name) {
+        this.name = name;
+      }
+      
+      @Override
+      public void run() {
+        int rnds = random().nextInt(TEST_NIGHTLY ? 25 : 3) + 1;
+        for (int i = 0; i < rnds; i++) {
+          UpdateRequest uReq;
+          uReq = new UpdateRequest();
+          int cnt = random().nextInt(TEST_NIGHTLY ? 3313 : 350) + 1;
+          for (int j = 0; j <cnt; j++) {
+            addDoc("thread" + name + "_" + i + "_" + j, uReq);
+          }
+          
+          try {
+            uReq.process(cloudClient);
+            uReq.process(controlClient);
+          } catch (SolrServerException e) {
+            throw new RuntimeException(e);
+          } catch (IOException e) {
+            throw new RuntimeException(e);
+          }
+
+          
+        }
+      }
+    };
+    List<Thread> threads = new ArrayList<Thread>();
+
+    int nthreads = random().nextInt(TEST_NIGHTLY ? 4 : 2) + 1;
+    for (int i = 0; i < nthreads; i++) {
+      IndexThread thread = new IndexThread(i);
+      threads.add(thread);
+      thread.start();
+    }
+    
+    for (Thread thread : threads) {
+      thread.join();
+    }
+    
+    commit();
+    
+    waitForRecoveriesToFinish("collection2", false);
+    
+    printLayout();
+    
+    SolrQuery query = new SolrQuery("*:*");
+    long controlCount = controlClient.query(query).getResults()
+        .getNumFound();
+    long cloudCount = cloudClient.query(query).getResults().getNumFound();
+
+    
+    compareResults(controlCount, cloudCount);
+    
+    assertEquals("Control does not match cloud", controlCount, cloudCount);
+    System.out.println("DOCS:" + controlCount);
+
+    return docId;
+  }
+
+  private long addDoc(long docId, UpdateRequest uReq) {
+    addDoc(Long.toString(docId++), uReq);
+    return docId;
+  }
+  
+  private long addDoc(String docId, UpdateRequest uReq) {
+    SolrInputDocument doc1 = new SolrInputDocument();
+    
+    uReq.add(doc1);
+    addFields(doc1, "id", docId, "text_t", "some text so that it not's negligent work to parse this doc, even though it's still a pretty short doc");
+    return -1;
+  }
+  
+  private long testIndexingWithSuss(long docId) throws Exception {
     ConcurrentUpdateSolrServer suss = new ConcurrentUpdateSolrServer(
-        ((HttpSolrServer) clients.get(0)).getBaseURL(), 3, 1);
+        ((HttpSolrServer) clients.get(0)).getBaseURL(), 10, 2);
+    QueryResponse results = query(cloudClient);
+    long beforeCount = results.getResults().getNumFound();
+    int cnt = TEST_NIGHTLY ? 2933 : 313;
     try {
       suss.setConnectionTimeout(15000);
-      suss.setSoTimeout(30000);
-      for (int i = 100; i < 150; i++) {
-        index_specific(suss, id, i);
+      for (int i = 0; i < cnt; i++) {
+        index_specific(suss, id, docId++, "text_t", "some text so that it not's negligent work to parse this doc, even though it's still a pretty short doc");
       }
       suss.blockUntilFinished();
       
       commit();
-      
+
       checkShardConsistency();
+      assertDocCounts(VERBOSE);
     } finally {
       suss.shutdown();
     }
+    results = query(cloudClient);
+    assertEquals(beforeCount + cnt, results.getResults().getNumFound());
+    return docId;
   }
   
   private void testOptimisticUpdate(QueryResponse results) throws Exception {
@@ -375,5 +505,9 @@ public class FullSolrCloudDistribCmdsTes
   public void tearDown() throws Exception {
     super.tearDown();
   }
+  
+  protected SolrInputDocument addRandFields(SolrInputDocument sdoc) {
+    return sdoc;
+  }
 
 }

Modified: lucene/dev/branches/lucene5376/solr/core/src/test/org/apache/solr/cloud/OverseerRolesTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376/solr/core/src/test/org/apache/solr/cloud/OverseerRolesTest.java?rev=1562403&r1=1562402&r2=1562403&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376/solr/core/src/test/org/apache/solr/cloud/OverseerRolesTest.java (original)
+++ lucene/dev/branches/lucene5376/solr/core/src/test/org/apache/solr/cloud/OverseerRolesTest.java Wed Jan 29 11:14:53 2014
@@ -21,6 +21,7 @@ import org.apache.solr.client.solrj.Solr
 import org.apache.solr.client.solrj.SolrServerException;
 import org.apache.solr.client.solrj.impl.CloudSolrServer;
 import org.apache.solr.client.solrj.request.QueryRequest;
+import org.apache.solr.common.cloud.SolrZkClient;
 import org.apache.solr.common.cloud.ZkStateReader;
 import org.apache.solr.common.params.MapSolrParams;
 import org.apache.solr.common.params.SolrParams;
@@ -29,6 +30,7 @@ import org.apache.zookeeper.data.Stat;
 import org.junit.After;
 import org.junit.Before;
 import org.junit.BeforeClass;
+import org.junit.Ignore;
 
 import java.io.IOException;
 import java.util.ArrayList;
@@ -42,9 +44,10 @@ import java.util.Set;
 import static org.apache.solr.cloud.OverseerCollectionProcessor.MAX_SHARDS_PER_NODE;
 import static org.apache.solr.cloud.OverseerCollectionProcessor.NUM_SLICES;
 import static org.apache.solr.cloud.OverseerCollectionProcessor.REPLICATION_FACTOR;
+import static org.apache.solr.cloud.OverseerCollectionProcessor.getSortedNodeNames;
 import static org.apache.solr.common.cloud.ZkNodeProps.makeMap;
 import static org.apache.solr.common.params.CollectionParams.CollectionAction;
-
+@Ignore("needs to restart the OverSeer")
 public class OverseerRolesTest  extends AbstractFullDistribZkTestBase{
   private CloudSolrServer client;
 
@@ -93,11 +96,10 @@ public class OverseerRolesTest  extends 
     createCollection(collectionName, client);
 
     waitForRecoveriesToFinish(collectionName, false);
-    Set<String> nodes = client.getZkStateReader().getClusterState().getLiveNodes();
+    List<String> l = OverseerCollectionProcessor.getSortedNodeNames(client.getZkStateReader().getZkClient()) ;
 
-    ArrayList<String> l = new ArrayList<>(nodes);
     log.info("All nodes {}", l);
-    String currentLeader = getLeaderNode(client);
+    String currentLeader = OverseerCollectionProcessor.getLeaderNode(client.getZkStateReader().getZkClient());
     log.info("Current leader {} ", currentLeader);
     l.remove(currentLeader);
 
@@ -106,17 +108,22 @@ public class OverseerRolesTest  extends 
     log.info("overseerDesignate {}",overseerDesignate);
     setOverseerRole(CollectionAction.ADDROLE,overseerDesignate);
 
-    long timeout = System.currentTimeMillis()+10000;
+    long timeout = System.currentTimeMillis()+15000;
 
     boolean leaderchanged = false;
     for(;System.currentTimeMillis() < timeout;){
-      if(getLeaderNode(client).equals(overseerDesignate)){
+      if(overseerDesignate.equals(OverseerCollectionProcessor.getLeaderNode(client.getZkStateReader().getZkClient()))){
         log.info("overseer designate is the new overseer");
         leaderchanged =true;
         break;
       }
       Thread.sleep(100);
     }
+    if(!leaderchanged){
+      log.warn("expected {}, current order {}",
+          overseerDesignate,
+          getSortedNodeNames(client.getZkStateReader().getZkClient())+ " ldr :"+ OverseerCollectionProcessor.getLeaderNode(client.getZkStateReader().getZkClient()) );
+    }
     assertTrue("could not set the new overseer",leaderchanged);
 
 
@@ -135,20 +142,15 @@ public class OverseerRolesTest  extends 
     timeout = System.currentTimeMillis()+10000;
     leaderchanged = false;
     for(;System.currentTimeMillis() < timeout;){
-      log.info(" count {}", System.currentTimeMillis());
-      List<String> seqs = client.getZkStateReader().getZkClient().getChildren("/overseer_elect/election", null, true);
-      LeaderElector.sortSeqs(seqs);
-
-      log.info("seqs : {} ",seqs);
-//
-      if(LeaderElector.getNodeName(seqs.get(1)).equals(anotherOverseer)){
+      List<String> sortedNodeNames = getSortedNodeNames(client.getZkStateReader().getZkClient());
+      if(sortedNodeNames.get(1) .equals(anotherOverseer) || sortedNodeNames.get(0).equals(anotherOverseer)){
         leaderchanged =true;
         break;
       }
       Thread.sleep(100);
     }
 
-    assertTrue("New overseer not the frontrunner", leaderchanged);
+    assertTrue("New overseer not the frontrunner : "+ getSortedNodeNames(client.getZkStateReader().getZkClient()) + " expected : "+ anotherOverseer, leaderchanged);
 
 
     client.shutdown();
@@ -156,7 +158,7 @@ public class OverseerRolesTest  extends 
 
   }
 
-  private void setOverseerRole(CollectionAction action, String overseerDesignate) throws SolrServerException, IOException {
+  private void setOverseerRole(CollectionAction action, String overseerDesignate) throws Exception, IOException {
     log.info("Adding overseer designate {} ", overseerDesignate);
     Map m = makeMap(
         "action", action.toString().toLowerCase(Locale.ROOT),
@@ -168,15 +170,6 @@ public class OverseerRolesTest  extends 
     client.request(request);
   }
 
-  private String getLeaderNode(CloudSolrServer client) throws KeeperException, InterruptedException {
-    Map m = (Map) ZkStateReader.fromJSON(client.getZkStateReader().getZkClient().getData("/overseer_elect/leader", null, new Stat(), true));
-    String s = (String) m.get("id");
-//    log.info("leader-id {}",s);
-    String nodeName = LeaderElector.getNodeName(s);
-//    log.info("Leader {}", nodeName);
-    return nodeName;
-  }
-
   protected void createCollection(String COLL_NAME, CloudSolrServer client) throws Exception {
     int replicationFactor = 2;
     int numShards = 4;

Modified: lucene/dev/branches/lucene5376/solr/core/src/test/org/apache/solr/cloud/hdfs/HdfsTestUtil.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376/solr/core/src/test/org/apache/solr/cloud/hdfs/HdfsTestUtil.java?rev=1562403&r1=1562402&r2=1562403&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376/solr/core/src/test/org/apache/solr/cloud/hdfs/HdfsTestUtil.java (original)
+++ lucene/dev/branches/lucene5376/solr/core/src/test/org/apache/solr/cloud/hdfs/HdfsTestUtil.java Wed Jan 29 11:14:53 2014
@@ -4,12 +4,16 @@ import java.io.File;
 import java.io.IOException;
 import java.net.URI;
 import java.util.Locale;
+import java.util.Map;
+import java.util.Timer;
+import java.util.TimerTask;
+import java.util.concurrent.ConcurrentHashMap;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
+import org.apache.hadoop.hdfs.server.namenode.NameNodeAdapter;
 import org.apache.lucene.util.LuceneTestCase;
 import org.apache.solr.SolrTestCaseJ4;
-import org.junit.Assert;
 
 /*
  * Licensed to the Apache Software Foundation (ASF) under one or more
@@ -31,6 +35,8 @@ import org.junit.Assert;
 public class HdfsTestUtil {
   
   private static Locale savedLocale;
+  
+  private static Map<MiniDFSCluster,Timer> timers = new ConcurrentHashMap<MiniDFSCluster,Timer>();
 
   public static MiniDFSCluster setupClass(String dataDir) throws Exception {
     LuceneTestCase.assumeFalse("HDFS tests were disabled by -Dtests.disableHdfs",
@@ -58,7 +64,22 @@ public class HdfsTestUtil {
     
     System.setProperty("solr.hdfs.home", "/solr_hdfs_home");
     
-    MiniDFSCluster dfsCluster = new MiniDFSCluster(conf, dataNodes, true, null);
+    final MiniDFSCluster dfsCluster = new MiniDFSCluster(conf, dataNodes, true, null);
+    dfsCluster.waitActive();
+    
+    NameNodeAdapter.enterSafeMode(dfsCluster.getNameNode(), false);
+    
+    int rnd = LuceneTestCase.random().nextInt(10000);
+    Timer timer = new Timer();
+    timer.schedule(new TimerTask() {
+      
+      @Override
+      public void run() {
+        NameNodeAdapter.leaveSafeMode(dfsCluster.getNameNode());
+      }
+    }, rnd);
+    
+    timers.put(dfsCluster, timer);
     
     SolrTestCaseJ4.useFactory("org.apache.solr.core.HdfsDirectoryFactory");
     
@@ -72,6 +93,7 @@ public class HdfsTestUtil {
     System.clearProperty("test.cache.data");
     System.clearProperty("solr.hdfs.home");
     if (dfsCluster != null) {
+      timers.remove(dfsCluster);
       dfsCluster.shutdown();
     }
     

Modified: lucene/dev/branches/lucene5376/solr/core/src/test/org/apache/solr/cloud/hdfs/StressHdfsTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376/solr/core/src/test/org/apache/solr/cloud/hdfs/StressHdfsTest.java?rev=1562403&r1=1562402&r2=1562403&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376/solr/core/src/test/org/apache/solr/cloud/hdfs/StressHdfsTest.java (original)
+++ lucene/dev/branches/lucene5376/solr/core/src/test/org/apache/solr/cloud/hdfs/StressHdfsTest.java Wed Jan 29 11:14:53 2014
@@ -23,11 +23,15 @@ import java.net.URI;
 import java.net.URISyntaxException;
 import java.util.ArrayList;
 import java.util.List;
+import java.util.Timer;
+import java.util.TimerTask;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
+import org.apache.hadoop.hdfs.server.namenode.NameNodeAdapter;
+import org.apache.lucene.util.LuceneTestCase;
 import org.apache.lucene.util.LuceneTestCase.Slow;
 import org.apache.solr.client.solrj.SolrQuery;
 import org.apache.solr.client.solrj.SolrServer;
@@ -35,6 +39,7 @@ import org.apache.solr.client.solrj.Solr
 import org.apache.solr.client.solrj.impl.HttpSolrServer;
 import org.apache.solr.client.solrj.request.QueryRequest;
 import org.apache.solr.cloud.BasicDistributedZkTest;
+import org.apache.solr.cloud.ChaosMonkey;
 import org.apache.solr.common.params.CollectionParams.CollectionAction;
 import org.apache.solr.common.params.ModifiableSolrParams;
 import org.apache.solr.common.util.NamedList;
@@ -52,6 +57,9 @@ public class StressHdfsTest extends Basi
   private static final String DELETE_DATA_DIR_COLLECTION = "delete_data_dir";
   private static MiniDFSCluster dfsCluster;
   
+
+  private boolean testRestartIntoSafeMode;
+  
   @BeforeClass
   public static void setupClass() throws Exception {
 
@@ -67,7 +75,6 @@ public class StressHdfsTest extends Basi
     System.clearProperty("solr.hdfs.home");
     dfsCluster = null;
   }
-
   
   @Override
   protected String getDataDir(String dataDir) throws IOException {
@@ -77,7 +84,8 @@ public class StressHdfsTest extends Basi
   public StressHdfsTest() {
     super();
     sliceCount = 1;
-    shardCount = TEST_NIGHTLY ? 13 : random().nextInt(3) + 1;
+    shardCount = TEST_NIGHTLY ? 7 : random().nextInt(2) + 1;
+    testRestartIntoSafeMode = random().nextBoolean();
   }
   
   protected String getSolrXml() {
@@ -90,6 +98,31 @@ public class StressHdfsTest extends Basi
     for (int i = 0; i < cnt; i++) {
       createAndDeleteCollection();
     }
+
+    if (testRestartIntoSafeMode) {
+      createCollection(DELETE_DATA_DIR_COLLECTION, 1, 1, 1);
+      
+      waitForRecoveriesToFinish(DELETE_DATA_DIR_COLLECTION, false);
+      
+      ChaosMonkey.stop(jettys.get(0));
+      
+      // enter safe mode and restart a node
+      NameNodeAdapter.enterSafeMode(dfsCluster.getNameNode(), false);
+      
+      int rnd = LuceneTestCase.random().nextInt(10000);
+      Timer timer = new Timer();
+      timer.schedule(new TimerTask() {
+        
+        @Override
+        public void run() {
+          NameNodeAdapter.leaveSafeMode(dfsCluster.getNameNode());
+        }
+      }, rnd);
+      
+      ChaosMonkey.start(jettys.get(0));
+      
+      waitForRecoveriesToFinish(DELETE_DATA_DIR_COLLECTION, false);
+    }
   }
 
   private void createAndDeleteCollection() throws SolrServerException,
@@ -97,18 +130,29 @@ public class StressHdfsTest extends Basi
       URISyntaxException {
     
     boolean overshard = random().nextBoolean();
+    int rep;
+    int nShards;
+    int maxReplicasPerNode;
     if (overshard) {
-      createCollection(DELETE_DATA_DIR_COLLECTION, shardCount * 2, 1, 2);
+      nShards = shardCount * 2;
+      maxReplicasPerNode = 8;
+      rep = 1;
     } else {
-      int rep = shardCount / 2;
-      if (rep == 0) rep = 1;
-      createCollection(DELETE_DATA_DIR_COLLECTION, rep, 2, 1);
+      nShards = shardCount / 2;
+      maxReplicasPerNode = 1;
+      rep = 2;
+      if (nShards == 0) nShards = 1;
     }
+    
+    createCollection(DELETE_DATA_DIR_COLLECTION, nShards, rep, maxReplicasPerNode);
 
     waitForRecoveriesToFinish(DELETE_DATA_DIR_COLLECTION, false);
     cloudClient.setDefaultCollection(DELETE_DATA_DIR_COLLECTION);
     cloudClient.getZkStateReader().updateClusterState(true);
     
+    for (int i = 1; i < nShards + 1; i++) {
+      cloudClient.getZkStateReader().getLeaderRetry(DELETE_DATA_DIR_COLLECTION, "shard" + i, 30000);
+    }
     
     // collect the data dirs
     List<String> dataDirs = new ArrayList<String>();

Modified: lucene/dev/branches/lucene5376/solr/core/src/test/org/apache/solr/highlight/TestPostingsSolrHighlighter.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376/solr/core/src/test/org/apache/solr/highlight/TestPostingsSolrHighlighter.java?rev=1562403&r1=1562402&r2=1562403&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376/solr/core/src/test/org/apache/solr/highlight/TestPostingsSolrHighlighter.java (original)
+++ lucene/dev/branches/lucene5376/solr/core/src/test/org/apache/solr/highlight/TestPostingsSolrHighlighter.java Wed Jan 29 11:14:53 2014
@@ -155,4 +155,12 @@ public class TestPostingsSolrHighlighter
         req("q", "text:document", "sort", "id asc", "hl", "true", "hl.encoder", "html"),
         "//lst[@name='highlighting']/lst[@name='103']/arr[@name='text']/str='<em>Document</em>&#32;one&#32;has&#32;a&#32;first&#32;&lt;i&gt;sentence&lt;&#x2F;i&gt;&#46;'");
   }
+  
+  public void testWildcard() {
+    assertQ("simplest test", 
+        req("q", "text:doc*ment", "sort", "id asc", "hl", "true", "hl.highlightMultiTerm", "true"),
+        "count(//lst[@name='highlighting']/*)=2",
+        "//lst[@name='highlighting']/lst[@name='101']/arr[@name='text']/str='<em>document</em> one'",
+        "//lst[@name='highlighting']/lst[@name='102']/arr[@name='text']/str='second <em>document</em>'");
+  }
 }

Modified: lucene/dev/branches/lucene5376/solr/core/src/test/org/apache/solr/search/TestCollapseQParserPlugin.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376/solr/core/src/test/org/apache/solr/search/TestCollapseQParserPlugin.java?rev=1562403&r1=1562402&r2=1562403&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376/solr/core/src/test/org/apache/solr/search/TestCollapseQParserPlugin.java (original)
+++ lucene/dev/branches/lucene5376/solr/core/src/test/org/apache/solr/search/TestCollapseQParserPlugin.java Wed Jan 29 11:14:53 2014
@@ -286,5 +286,13 @@ public class TestCollapseQParserPlugin e
     params.add("facet.mincount", "1");
     assertQ(req(params), "*[count(//doc)=1]", "*[count(//lst[@name='facet_fields']/lst[@name='test_ti']/int)=2]");
 
+    // SOLR-5230 - ensure CollapsingFieldValueCollector.finish() is called
+    params = new ModifiableSolrParams();
+    params.add("q", "*:*");
+    params.add("fq", "{!collapse field=group_s}");
+    params.add("group", "true");
+    params.add("group.field", "id");
+    assertQ(req(params), "*[count(//doc)=2]");
+
   }
 }

Modified: lucene/dev/branches/lucene5376/solr/core/src/test/org/apache/solr/search/TestRecoveryHdfs.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376/solr/core/src/test/org/apache/solr/search/TestRecoveryHdfs.java?rev=1562403&r1=1562402&r2=1562403&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376/solr/core/src/test/org/apache/solr/search/TestRecoveryHdfs.java (original)
+++ lucene/dev/branches/lucene5376/solr/core/src/test/org/apache/solr/search/TestRecoveryHdfs.java Wed Jan 29 11:14:53 2014
@@ -90,8 +90,8 @@ public class TestRecoveryHdfs extends So
       throw new RuntimeException(e);
     }
     
-    hdfsDataDir = hdfsUri + "/solr/shard1";
-    System.setProperty("solr.data.dir", hdfsUri + "/solr/shard1");
+    //hdfsDataDir = hdfsUri + "/solr/shard1";
+    // System.setProperty("solr.data.dir", hdfsUri + "/solr/shard1");
     System.setProperty("solr.ulog.dir", hdfsUri + "/solr/shard1");
     
     initCore("solrconfig-tlog.xml","schema15.xml");

Modified: lucene/dev/branches/lucene5376/solr/core/src/test/org/apache/solr/search/TestSolr4Spatial.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376/solr/core/src/test/org/apache/solr/search/TestSolr4Spatial.java?rev=1562403&r1=1562402&r2=1562403&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376/solr/core/src/test/org/apache/solr/search/TestSolr4Spatial.java (original)
+++ lucene/dev/branches/lucene5376/solr/core/src/test/org/apache/solr/search/TestSolr4Spatial.java Wed Jan 29 11:14:53 2014
@@ -21,12 +21,22 @@ import com.carrotsearch.randomizedtestin
 import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
 import com.spatial4j.core.context.SpatialContext;
 import com.spatial4j.core.distance.DistanceUtils;
+import com.spatial4j.core.shape.Point;
+import com.spatial4j.core.shape.Rectangle;
+import com.spatial4j.core.shape.impl.RectangleImpl;
+import org.apache.lucene.spatial.SpatialStrategy;
 import org.apache.solr.SolrTestCaseJ4;
 import org.apache.solr.common.SolrException;
+import org.apache.solr.core.SolrCore;
+import org.apache.solr.schema.AbstractSpatialFieldType;
+import org.apache.solr.schema.FieldType;
+import org.apache.solr.schema.IndexSchema;
+import org.apache.solr.util.SpatialUtils;
 import org.junit.Before;
 import org.junit.BeforeClass;
 import org.junit.Test;
 
+import java.text.ParseException;
 import java.util.Arrays;
 
 /**
@@ -151,20 +161,20 @@ public class TestSolr4Spatial extends So
 
     assertQ(req(
         "fl", "id," + fieldName, "q", "*:*", "rows", "1000",
-        "fq", "{!field f="+fieldName+"}Intersects(Circle(89.9,-130 d=9))"),
+        "fq", "{!geofilt sfield="+fieldName+" pt="+IN+" d=9}"),
         "//result/doc/*[@name='" + fieldName + "']//text()='" + OUT + "'");
   }
 
   @Test
-  public void checkQueryEmptyIndex() {
+  public void checkQueryEmptyIndex() throws ParseException {
     checkHits(fieldName, "0,0", 100, 0);//doesn't error
   }
 
-  private void checkHits(String fieldName, String pt, double distKM, int count, int ... docIds) {
+  private void checkHits(String fieldName, String pt, double distKM, int count, int ... docIds) throws ParseException {
     checkHits(fieldName, true, pt, distKM, count, docIds);
   }
 
-  private void checkHits(String fieldName, boolean exact, String ptStr, double distKM, int count, int ... docIds) {
+  private void checkHits(String fieldName, boolean exact, String ptStr, double distKM, int count, int ... docIds) throws ParseException {
     String [] tests = new String[docIds != null && docIds.length > 0 ? docIds.length + 1 : 1];
     //test for presence of required ids first
     int i = 0;
@@ -177,20 +187,23 @@ public class TestSolr4Spatial extends So
     // that there may be a more specific detailed id to investigate.
     tests[i++] = "*[count(//doc)=" + count + "]";
 
-    //Test using the Solr 4 syntax
+    //Test using the Lucene spatial syntax
     {
       //never actually need the score but lets test
       String score = new String[]{null, "none","distance","recipDistance"}[random().nextInt(4)];
 
       double distDEG = DistanceUtils.dist2Degrees(distKM, DistanceUtils.EARTH_MEAN_RADIUS_KM);
-      String circleStr = "Circle(" + ptStr.replaceAll(" ", "") + " d=" + distDEG + ")";
+      Point point = SpatialUtils.parsePoint(ptStr, SpatialContext.GEO);
+      String circleStr = "BUFFER(POINT(" + point.getX()+" "+point.getY()+")," + distDEG + ")";
       String shapeStr;
       if (exact) {
         shapeStr = circleStr;
       } else {//bbox
         //the GEO is an assumption
         SpatialContext ctx = SpatialContext.GEO;
-        shapeStr = ctx.toString( ctx.readShape(circleStr).getBoundingBox() );
+        Rectangle bbox = ctx.readShapeFromWkt(circleStr).getBoundingBox();
+        shapeStr = "ENVELOPE(" + bbox.getMinX() + ", " + bbox.getMaxX() +
+            ", " + bbox.getMaxY() + ", " + bbox.getMinY() + ")";
       }
 
       //FYI default distErrPct=0.025 works with the tests in this file
@@ -200,7 +213,7 @@ public class TestSolr4Spatial extends So
               + "}Intersects(" + shapeStr + ")"),
           tests);
     }
-    //Test using the Solr 3 syntax
+    //Test using geofilt
     {
       assertQ(req(
           "fl", "id", "q", "*:*", "rows", "1000",
@@ -219,8 +232,8 @@ public class TestSolr4Spatial extends So
 
     String score = random().nextBoolean() ? "none" : "distance";//never actually need the score but lets test
     assertQ(req(
-        "fl", "id", "q","*:*", "rows", "1000",
-        "fq", "{! score="+score+" df="+fieldName+"}[32,-80 TO 33,-79]"),//lower-left to upper-right
+        "fl", "id", "q","*:*", "rows", "1000",    // testing quotes in range too
+        "fq", "{! score="+score+" df="+fieldName+"}[32,-80 TO \"33 , -79\"]"),//lower-left to upper-right
 
         "//result/doc/*[@name='id'][.='" + docId + "']",
         "*[count(//doc)=" + count + "]");
@@ -234,8 +247,9 @@ public class TestSolr4Spatial extends So
     assertU(commit());
 
     //test absence of score=distance means it doesn't score
+
     assertJQ(req(
-        "q", fieldName +":\"Intersects(Circle(3,4 d=9))\"",
+        "q", radiusQuery(3, 4, 9, null, null),
         "fl","id,score")
         , 1e-9
         , "/response/docs/[0]/score==1.0"
@@ -244,7 +258,7 @@ public class TestSolr4Spatial extends So
 
     //score by distance
     assertJQ(req(
-        "q", "{! score=distance}"+fieldName +":\"Intersects(Circle(3,4 d=9))\"",
+        "q", radiusQuery(3, 4, 9, "distance", null),
         "fl","id,score",
         "sort","score asc")//want ascending due to increasing distance
         , 1e-3
@@ -255,7 +269,7 @@ public class TestSolr4Spatial extends So
     );
     //score by recipDistance
     assertJQ(req(
-        "q", "{! score=recipDistance}"+fieldName +":\"Intersects(Circle(3,4 d=9))\"",
+        "q", radiusQuery(3, 4, 9, "recipDistance", null),
         "fl","id,score",
         "sort","score desc")//want descending
         , 1e-3
@@ -268,7 +282,7 @@ public class TestSolr4Spatial extends So
     //score by distance and don't filter
     assertJQ(req(
         //circle radius is small and shouldn't match either, but we disable filtering
-        "q", "{! score=distance filter=false}"+fieldName +":\"Intersects(Circle(3,4 d=0.000001))\"",
+        "q", radiusQuery(3, 4, 0.000001, "distance", "false"),
         "fl","id,score",
         "sort","score asc")//want ascending due to increasing distance
         , 1e-3
@@ -280,7 +294,7 @@ public class TestSolr4Spatial extends So
 
     //query again with the query point closer to #101, and check the new ordering
     assertJQ(req(
-        "q", "{! score=distance}"+fieldName +":\"Intersects(Circle(4,0 d=9))\"",
+        "q", radiusQuery(4, 0, 9, "distance", null),
         "fl","id,score",
         "sort","score asc")//want ascending due to increasing distance
         , 1e-4
@@ -293,7 +307,7 @@ public class TestSolr4Spatial extends So
         "q","-id:999",//exclude that doc
         "fl","id,score",
         "sort","query($sortQuery) asc", //want ascending due to increasing distance
-        "sortQuery", "{! score=distance}"+fieldName +":\"Intersects(Circle(3,4 d=9))\"" )
+        "sortQuery", radiusQuery(3, 4, 9, "distance", null))
         , 1e-4
         , "/response/docs/[0]/id=='100'"
         , "/response/docs/[1]/id=='101'"  );
@@ -303,12 +317,28 @@ public class TestSolr4Spatial extends So
         "q","-id:999",//exclude that doc
         "fl","id,score",
         "sort","query($sortQuery) asc", //want ascending due to increasing distance
-        "sortQuery", "{! score=distance}"+fieldName +":\"Intersects(Circle(4,0 d=9))\"" )
+        "sortQuery", radiusQuery(4, 0, 9, "distance", null))
         , 1e-4
         , "/response/docs/[0]/id=='101'"
         , "/response/docs/[1]/id=='100'"  );
   }
 
+  private String radiusQuery(double lat, double lon, double dDEG, String score, String filter) {
+    //Choose between the Solr/Geofilt syntax, and the Lucene spatial module syntax
+    if (random().nextBoolean()) {
+      return "{!geofilt " +
+          "sfield=" + fieldName + " "
+          + (score != null ? "score="+score : "") + " "
+          + (filter != null ? "filter="+filter : "") + " "
+          + "pt=" + lat + "," + lon + " d=" + (dDEG * DistanceUtils.DEG_TO_KM) + "}";
+    } else {
+      return "{! "
+          + (score != null ? "score="+score : "") + " "
+          + (filter != null ? "filter="+filter : "") + " "
+          + "}" + fieldName + ":\"Intersects(BUFFER(POINT(" + lon + " " + lat + ")," + dDEG + "))\"";
+    }
+  }
+
   @Test
   public void testSortMultiVal() throws Exception {
     RandomizedTest.assumeFalse("Multivalue not supported for this field", fieldName.equals("pointvector"));
@@ -318,7 +348,7 @@ public class TestSolr4Spatial extends So
     assertU(commit());
 
     assertJQ(req(
-        "q", "{! score=distance}"+fieldName +":\"Intersects(Circle(3,4 d=9))\"",
+        "q", radiusQuery(3, 4, 9, "distance", null),
         "fl","id,score",
         "sort","score asc")//want ascending due to increasing distance
         , 1e-4
@@ -327,4 +357,31 @@ public class TestSolr4Spatial extends So
     );
   }
 
+  @Test
+  public void solr4OldShapeSyntax() throws Exception {
+    assumeFalse("Mostly just valid for prefix-tree", fieldName.equals("pointvector"));
+
+    //we also test that the old syntax is parsed in worldBounds in the schema
+    {
+      IndexSchema schema = h.getCore().getLatestSchema();
+      AbstractSpatialFieldType type = (AbstractSpatialFieldType) schema.getFieldTypeByName("stqpt_u_oldworldbounds");
+      SpatialContext ctx = type.getStrategy("foo").getSpatialContext();
+      assertEquals(new RectangleImpl(0, 1000, 0, 1000, ctx), ctx.getWorldBounds());
+    }
+
+    //syntax supported in Solr 4 but not beyond
+    //   See Spatial4j LegacyShapeReadWriterFormat
+    String rect = "-74.093 41.042 -69.347 44.558";//minX minY maxX maxY
+    String circ = "Circle(4.56,1.23 d=0.0710)";
+
+    //show we can index this (without an error)
+    assertU(adoc("id", "rect", fieldName, rect));
+    assertU(adoc("id", "circ", fieldName, circ));
+    assertU(commit());
+
+    //only testing no error
+    assertJQ(req("q", "{!field f=" + fieldName + "}Intersects(" + rect + ")"));
+    assertJQ(req("q", "{!field f=" + fieldName + "}Intersects(" + circ + ")"));
+  }
+
 }

Modified: lucene/dev/branches/lucene5376/solr/core/src/test/org/apache/solr/store/hdfs/HdfsLockFactoryTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376/solr/core/src/test/org/apache/solr/store/hdfs/HdfsLockFactoryTest.java?rev=1562403&r1=1562402&r2=1562403&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376/solr/core/src/test/org/apache/solr/store/hdfs/HdfsLockFactoryTest.java (original)
+++ lucene/dev/branches/lucene5376/solr/core/src/test/org/apache/solr/store/hdfs/HdfsLockFactoryTest.java Wed Jan 29 11:14:53 2014
@@ -75,7 +75,7 @@ public class HdfsLockFactoryTest extends
     assertTrue("We could not get the lock when it should be available", success);
     success = lock.obtain();
     assertFalse("We got the lock but it should be unavailble", success);
-    lock.release();
+    lock.close();
     success = lock.obtain();
     assertTrue("We could not get the lock when it should be available", success);
     success = lock.obtain();

Modified: lucene/dev/branches/lucene5376/solr/example/example-DIH/solr/db/conf/db-data-config.xml
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376/solr/example/example-DIH/solr/db/conf/db-data-config.xml?rev=1562403&r1=1562402&r2=1562403&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376/solr/example/example-DIH/solr/db/conf/db-data-config.xml (original)
+++ lucene/dev/branches/lucene5376/solr/example/example-DIH/solr/db/conf/db-data-config.xml Wed Jan 29 11:14:53 2014
@@ -20,7 +20,7 @@
                         query="select DESCRIPTION from category where ID = '${item_category.CATEGORY_ID}'"
                         deltaQuery="select ID from category where last_modified > '${dataimporter.last_index_time}'"
                         parentDeltaQuery="select ITEM_ID, CATEGORY_ID from item_category where CATEGORY_ID=${category.ID}">
-                    <field column="description" name="cat" />
+                    <field column="DESCRIPTION" name="cat" />
                 </entity>
             </entity>
         </entity>

Modified: lucene/dev/branches/lucene5376/solr/licenses/spatial4j-NOTICE.txt
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376/solr/licenses/spatial4j-NOTICE.txt?rev=1562403&r1=1562402&r2=1562403&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376/solr/licenses/spatial4j-NOTICE.txt (original)
+++ lucene/dev/branches/lucene5376/solr/licenses/spatial4j-NOTICE.txt Wed Jan 29 11:14:53 2014
@@ -1,5 +1,5 @@
-Apache Commons Lang
-Copyright 2001-2008 The Apache Software Foundation
+Spatial4j
+Copyright 2012-2014 The Apache Software Foundation
 
 This product includes software developed by
 The Apache Software Foundation (http://www.apache.org/).

Modified: lucene/dev/branches/lucene5376/solr/solrj/src/java/org/apache/solr/client/solrj/impl/ConcurrentUpdateSolrServer.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376/solr/solrj/src/java/org/apache/solr/client/solrj/impl/ConcurrentUpdateSolrServer.java?rev=1562403&r1=1562402&r2=1562403&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376/solr/solrj/src/java/org/apache/solr/client/solrj/impl/ConcurrentUpdateSolrServer.java (original)
+++ lucene/dev/branches/lucene5376/solr/solrj/src/java/org/apache/solr/client/solrj/impl/ConcurrentUpdateSolrServer.java Wed Jan 29 11:14:53 2014
@@ -255,16 +255,8 @@ public class ConcurrentUpdateSolrServer 
         }
         handleError(e);
       } finally {
-
-        // remove it from the list of running things unless we are the last
-        // runner and the queue is full...
-        // in which case, the next queue.put() would block and there would be no
-        // runners to handle it.
-        // This case has been further handled by using offer instead of put, and
-        // using a retry loop
-        // to avoid blocking forever (see request()).
         synchronized (runners) {
-          if (runners.size() == 1 && queue.remainingCapacity() == 0) {
+          if (runners.size() == 1 && !queue.isEmpty()) {
             // keep this runner alive
             scheduler.execute(this);
           } else {
@@ -394,6 +386,10 @@ public class ConcurrentUpdateSolrServer 
           runner.runnerLock.lock();
           runner.runnerLock.unlock();
         } else if (!queue.isEmpty()) {
+          // failsafe - should not be necessary, but a good
+          // precaution to ensure blockUntilFinished guarantees
+          // all updates are emptied from the queue regardless of
+          // any bugs around starting or retaining runners
           Runner r = new Runner();
           runners.add(r);
           scheduler.execute(r);

Modified: lucene/dev/branches/lucene5376/solr/solrj/src/java/org/apache/solr/client/solrj/request/JavaBinUpdateRequestCodec.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376/solr/solrj/src/java/org/apache/solr/client/solrj/request/JavaBinUpdateRequestCodec.java?rev=1562403&r1=1562402&r2=1562403&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376/solr/solrj/src/java/org/apache/solr/client/solrj/request/JavaBinUpdateRequestCodec.java (original)
+++ lucene/dev/branches/lucene5376/solr/solrj/src/java/org/apache/solr/client/solrj/request/JavaBinUpdateRequestCodec.java Wed Jan 29 11:14:53 2014
@@ -25,7 +25,6 @@ import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
 import java.util.Map.Entry;
-import java.util.Set;
 
 import org.apache.solr.common.SolrInputDocument;
 import org.apache.solr.common.params.ModifiableSolrParams;
@@ -60,9 +59,6 @@ public class JavaBinUpdateRequestCodec {
     }
     Iterator<SolrInputDocument> docIter = null;
 
-    if (updateRequest.getDocuments() != null) {
-      docIter = updateRequest.getDocuments().iterator();
-    }
     if(updateRequest.getDocIterator() != null){
       docIter = updateRequest.getDocIterator();
     }
@@ -70,10 +66,19 @@ public class JavaBinUpdateRequestCodec {
     Map<SolrInputDocument,Map<String,Object>> docMap = updateRequest.getDocumentsMap();
 
     nl.add("params", params);// 0: params
-    nl.add("delByIdMap", updateRequest.getDeleteByIdMap());
+    if (updateRequest.getDeleteByIdMap() != null) {
+      nl.add("delByIdMap", updateRequest.getDeleteByIdMap());
+    }
     nl.add("delByQ", updateRequest.getDeleteQuery());
-    nl.add("docs", docIter);
-    nl.add("docsMap", docMap);
+
+    if (docMap != null) {
+      nl.add("docsMap", docMap.entrySet().iterator());
+    } else {
+      if (updateRequest.getDocuments() != null) {
+        docIter = updateRequest.getDocuments().iterator();
+      }
+      nl.add("docs", docIter);
+    }
     JavaBinCodec codec = new JavaBinCodec();
     codec.marshal(nl, os);
   }
@@ -92,7 +97,7 @@ public class JavaBinUpdateRequestCodec {
   public UpdateRequest unmarshal(InputStream is, final StreamingUpdateHandler handler) throws IOException {
     final UpdateRequest updateRequest = new UpdateRequest();
     List<List<NamedList>> doclist;
-    Map<SolrInputDocument,Map<String,Object>>  docMap;
+    List<Entry<SolrInputDocument,Map<Object,Object>>>  docMap;
     List<String> delById;
     Map<String,Map<String,Object>> delByIdMap;
     List<String> delByQ;
@@ -132,9 +137,11 @@ public class JavaBinUpdateRequestCodec {
       }
 
       private List readOuterMostDocIterator(DataInputInputStream fis) throws IOException {
-        NamedList params = (NamedList) namedList[0].getVal(0);
+        NamedList params = (NamedList) namedList[0].get("params");
         updateRequest.setParams(new ModifiableSolrParams(SolrParams.toSolrParams(params)));
         if (handler == null) return super.readIterator(fis);
+        Integer commitWithin = null;
+        Boolean overwrite = null;
         while (true) {
           Object o = readVal(fis);
           if (o == END_OBJ) break;
@@ -144,16 +151,24 @@ public class JavaBinUpdateRequestCodec {
           } else if (o instanceof NamedList)  {
             UpdateRequest req = new UpdateRequest();
             req.setParams(new ModifiableSolrParams(SolrParams.toSolrParams((NamedList) o)));
-            handler.update(null, req);
+            handler.update(null, req, null, null);
+          } else if (o instanceof Map.Entry){
+            sdoc = (SolrInputDocument) ((Map.Entry) o).getKey();
+            Map p = (Map) ((Map.Entry) o).getValue();
+            if (p != null) {
+              commitWithin = (Integer) p.get(UpdateRequest.COMMIT_WITHIN);
+              overwrite = (Boolean) p.get(UpdateRequest.OVERWRITE);
+            }
           } else  {
+          
             sdoc = (SolrInputDocument) o;
           }
-          handler.update(sdoc, updateRequest);
+          handler.update(sdoc, updateRequest, commitWithin, overwrite);
         }
         return Collections.EMPTY_LIST;
       }
-    };
 
+    };
 
     codec.unmarshal(is);
     
@@ -161,6 +176,7 @@ public class JavaBinUpdateRequestCodec {
     // must be loaded now
     if(updateRequest.getParams()==null) {
       NamedList params = (NamedList) namedList[0].get("params");
+      System.out.println("unmarchal params:" + params);
       if(params!=null) {
         updateRequest.setParams(new ModifiableSolrParams(SolrParams.toSolrParams(params)));
       }
@@ -169,32 +185,12 @@ public class JavaBinUpdateRequestCodec {
     delByIdMap = (Map<String,Map<String,Object>>) namedList[0].get("delByIdMap");
     delByQ = (List<String>) namedList[0].get("delByQ");
     doclist = (List) namedList[0].get("docs");
-    docMap =  (Map<SolrInputDocument,Map<String,Object>>) namedList[0].get("docsMap");
+    docMap =  (List<Entry<SolrInputDocument,Map<Object,Object>>>) namedList[0].get("docsMap");
+    
+
+    // we don't add any docs, because they were already processed
+    // deletes are handled later, and must be passed back on the UpdateRequest
     
-    if (doclist != null && !doclist.isEmpty()) {
-      List<SolrInputDocument> solrInputDocs = new ArrayList<SolrInputDocument>();
-      for (Object o : doclist) {
-        if (o instanceof List) {
-          solrInputDocs.add(listToSolrInputDocument((List<NamedList>)o));
-        } else  {
-          solrInputDocs.add((SolrInputDocument)o);
-        }
-      }
-      updateRequest.add(solrInputDocs);
-    }
-    if (docMap != null && !docMap.isEmpty()) {
-      Set<Entry<SolrInputDocument,Map<String,Object>>> entries = docMap.entrySet();
-      for (Entry<SolrInputDocument,Map<String,Object>> entry : entries) {
-        Map<String,Object> map = entry.getValue();
-        Boolean overwrite = null;
-        Integer commitWithin = null;
-        if (map != null) {
-          overwrite = (Boolean) map.get(UpdateRequest.OVERWRITE);
-          commitWithin = (Integer) map.get(UpdateRequest.COMMIT_WITHIN);
-        }
-        updateRequest.add(entry.getKey(), commitWithin, overwrite);
-      }
-    }
     if (delById != null) {
       for (String s : delById) {
         updateRequest.deleteById(s);
@@ -204,7 +200,7 @@ public class JavaBinUpdateRequestCodec {
       for (Map.Entry<String,Map<String,Object>> entry : delByIdMap.entrySet()) {
         Map<String,Object> params = entry.getValue();
         if (params != null) {
-          Long version = (Long) params.get("ver");
+          Long version = (Long) params.get(UpdateRequest.VER);
           updateRequest.deleteById(entry.getKey(), version);
         } else {
           updateRequest.deleteById(entry.getKey());
@@ -217,8 +213,8 @@ public class JavaBinUpdateRequestCodec {
         updateRequest.deleteByQuery(s);
       }
     }
+    
     return updateRequest;
-
   }
 
   private SolrInputDocument listToSolrInputDocument(List<NamedList> namedList) {
@@ -242,6 +238,6 @@ public class JavaBinUpdateRequestCodec {
   }
 
   public static interface StreamingUpdateHandler {
-    public void update(SolrInputDocument document, UpdateRequest req);
+    public void update(SolrInputDocument document, UpdateRequest req, Integer commitWithin, Boolean override);
   }
 }

Modified: lucene/dev/branches/lucene5376/solr/solrj/src/java/org/apache/solr/client/solrj/request/UpdateRequest.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376/solr/solrj/src/java/org/apache/solr/client/solrj/request/UpdateRequest.java?rev=1562403&r1=1562402&r2=1562403&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376/solr/solrj/src/java/org/apache/solr/client/solrj/request/UpdateRequest.java (original)
+++ lucene/dev/branches/lucene5376/solr/solrj/src/java/org/apache/solr/client/solrj/request/UpdateRequest.java Wed Jan 29 11:14:53 2014
@@ -47,7 +47,7 @@ import org.apache.solr.common.util.XML;
  */
 public class UpdateRequest extends AbstractUpdateRequest {
   
-  private static final String VER = "ver";
+  public static final String VER = "ver";
   public static final String OVERWRITE = "ow";
   public static final String COMMIT_WITHIN = "cw";
   private Map<SolrInputDocument,Map<String,Object>> documents = null;

Modified: lucene/dev/branches/lucene5376/solr/solrj/src/java/org/apache/solr/common/cloud/ConnectionManager.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5376/solr/solrj/src/java/org/apache/solr/common/cloud/ConnectionManager.java?rev=1562403&r1=1562402&r2=1562403&view=diff
==============================================================================
--- lucene/dev/branches/lucene5376/solr/solrj/src/java/org/apache/solr/common/cloud/ConnectionManager.java (original)
+++ lucene/dev/branches/lucene5376/solr/solrj/src/java/org/apache/solr/common/cloud/ConnectionManager.java Wed Jan 29 11:14:53 2014
@@ -63,23 +63,17 @@ public class ConnectionManager implement
   }
   
   private synchronized void connected() {
-    if (disconnectedTimer != null) {
-      disconnectedTimer.cancel();
-      disconnectedTimer = null;
-    }
+    cancelTimer();
     connected = true;
     likelyExpired = false;
     notifyAll();
   }
 
   private synchronized void disconnected() {
-    if (disconnectedTimer != null) {
-      disconnectedTimer.cancel();
-      disconnectedTimer = null;
-    }
+    cancelTimer();
     if (!isClosed) {
-      disconnectedTimer = new Timer(true);
-      disconnectedTimer.schedule(new TimerTask() {
+      Timer newDcTimer = new Timer(true);
+      newDcTimer.schedule(new TimerTask() {
         
         @Override
         public void run() {
@@ -87,11 +81,35 @@ public class ConnectionManager implement
         }
         
       }, (long) (client.getZkClientTimeout() * 0.90));
+      if (isClosed) {
+        // we might have closed after getting by isClosed
+        // and before starting the new timer
+        newDcTimer.cancel();
+      } else {
+        disconnectedTimer = newDcTimer;
+        if (isClosed) {
+          // now deal with we may have been closed after getting
+          // by isClosed but before setting disconnectedTimer -
+          // if close happens after isClosed check this time, it 
+          // will handle stopping the timer
+          cancelTimer();
+        }
+      }
     }
     connected = false;
     notifyAll();
   }
 
+  private void cancelTimer() {
+    try {
+      this.disconnectedTimer.cancel();
+    } catch (NullPointerException e) {
+      // fine
+    } finally {
+      this.disconnectedTimer = null;
+    }
+  }
+
   @Override
   public void process(WatchedEvent event) {
     if (log.isInfoEnabled()) {
@@ -111,10 +129,10 @@ public class ConnectionManager implement
       clientConnected.countDown();
       connectionStrategy.connected();
     } else if (state == KeeperState.Expired) {
-      if (disconnectedTimer != null) {
-        disconnectedTimer.cancel();
-        disconnectedTimer = null;
-      }
+      // we don't call disconnected because there
+      // is no need to start the timer - if we are expired
+      // likelyExpired can just be set to true
+      cancelTimer();
       
       connected = false;
       likelyExpired = true;
@@ -187,17 +205,11 @@ public class ConnectionManager implement
   }
   
   // we use a volatile rather than sync
-  // to avoid deadlock on shutdown
+  // to avoid possible deadlock on shutdown
   public void close() {
     this.isClosed = true;
     this.likelyExpired = true;
-    try {
-      this.disconnectedTimer.cancel();
-    } catch (NullPointerException e) {
-      // fine
-    } finally {
-      this.disconnectedTimer = null;
-    }
+    cancelTimer();
   }
   
   public boolean isLikelyExpired() {



Mime
View raw message