incubator-connectors-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From kwri...@apache.org
Subject svn commit: r1195471 [4/5] - in /incubator/lcf/trunk: ./ framework/agents/src/main/java/org/apache/manifoldcf/agents/incrementalingest/ framework/agents/src/main/java/org/apache/manifoldcf/agents/outputconnection/ framework/core/src/main/java/org/apach...
Date Mon, 31 Oct 2011 14:26:36 GMT
Modified: incubator/lcf/trunk/framework/pull-agent/src/main/java/org/apache/manifoldcf/crawler/jobs/JobQueue.java
URL: http://svn.apache.org/viewvc/incubator/lcf/trunk/framework/pull-agent/src/main/java/org/apache/manifoldcf/crawler/jobs/JobQueue.java?rev=1195471&r1=1195470&r2=1195471&view=diff
==============================================================================
--- incubator/lcf/trunk/framework/pull-agent/src/main/java/org/apache/manifoldcf/crawler/jobs/JobQueue.java (original)
+++ incubator/lcf/trunk/framework/pull-agent/src/main/java/org/apache/manifoldcf/crawler/jobs/JobQueue.java Mon Oct 31 14:26:33 2011
@@ -203,11 +203,11 @@ public class JobQueue extends org.apache
       // Handle indexes
       IndexDescription uniqueIndex = new IndexDescription(true,new String[]{docHashField,jobIDField});
       IndexDescription jobStatusIndex = new IndexDescription(false,new String[]{jobIDField,statusField});
-      IndexDescription jobSeedIndex = new IndexDescription(false,new String[]{jobIDField,isSeedField});
-      IndexDescription jobHashStatusIndex = new IndexDescription(false,new String[]{docHashField,statusField});
+      IndexDescription jobSeedIndex = new IndexDescription(false,new String[]{isSeedField,jobIDField});
+      IndexDescription failTimeIndex = new IndexDescription(false,new String[]{failTimeField,jobIDField});
       IndexDescription actionTimeStatusIndex = new IndexDescription(false,new String[]{statusField,checkActionField,checkTimeField});
-      IndexDescription prioritysetStatusIndex = new IndexDescription(false,new String[]{prioritySetField,statusField,checkActionField});
-      IndexDescription docpriorityIndex = new IndexDescription(false,new String[]{docPriorityField});
+      IndexDescription prioritysetStatusIndex = new IndexDescription(false,new String[]{statusField,prioritySetField});
+      IndexDescription docpriorityIndex = new IndexDescription(false,new String[]{docPriorityField,statusField,checkActionField,checkTimeField});
 
       // Get rid of unused indexes
       Map indexes = getTableIndexes(null,null);
@@ -223,8 +223,8 @@ public class JobQueue extends org.apache
           jobStatusIndex = null;
         else if (jobSeedIndex != null && id.equals(jobSeedIndex))
           jobSeedIndex = null;
-        else if (jobHashStatusIndex != null && id.equals(jobHashStatusIndex))
-          jobHashStatusIndex = null;
+        else if (failTimeIndex != null && id.equals(failTimeIndex))
+          failTimeIndex = null;
         else if (actionTimeStatusIndex != null && id.equals(actionTimeStatusIndex))
           actionTimeStatusIndex = null;
         else if (prioritysetStatusIndex != null && id.equals(prioritysetStatusIndex))
@@ -244,9 +244,9 @@ public class JobQueue extends org.apache
       if (jobSeedIndex != null)
         performAddIndex(null,jobSeedIndex);
 
-      if (jobHashStatusIndex != null)
-        performAddIndex(null,jobHashStatusIndex);
-
+      if (failTimeIndex != null)
+        performAddIndex(null,failTimeIndex);
+      
       if (actionTimeStatusIndex != null)
         performAddIndex(null,actionTimeStatusIndex);
 
@@ -312,42 +312,52 @@ public class JobQueue extends org.apache
     HashMap map = new HashMap();
     map.put(statusField,statusToString(STATUS_PENDING));
     ArrayList list = new ArrayList();
-    list.add(statusToString(STATUS_ACTIVE));
-    list.add(statusToString(STATUS_ACTIVENEEDRESCAN));
-    performUpdate(map,"WHERE "+statusField+" IN (?,?)",list,null);
+    String query = buildConjunctionClause(list,new ClauseDescription[]{
+      new MultiClause(statusField,new Object[]{
+        statusToString(STATUS_ACTIVE),
+        statusToString(STATUS_ACTIVENEEDRESCAN)})});
+    performUpdate(map,"WHERE "+query,list,null);
 
     // Map ACTIVEPURGATORY to PENDINGPURGATORY
     map.put(statusField,statusToString(STATUS_PENDINGPURGATORY));
     list.clear();
-    list.add(statusToString(STATUS_ACTIVEPURGATORY));
-    list.add(statusToString(STATUS_ACTIVENEEDRESCANPURGATORY));
-    performUpdate(map,"WHERE "+statusField+" IN (?,?)",list,null);
+    query = buildConjunctionClause(list,new ClauseDescription[]{
+      new MultiClause(statusField,new Object[]{
+        statusToString(STATUS_ACTIVEPURGATORY),
+        statusToString(STATUS_ACTIVENEEDRESCANPURGATORY)})});
+    performUpdate(map,"WHERE "+query,list,null);
 
     // Map BEINGDELETED to ELIGIBLEFORDELETE
     map.put(statusField,statusToString(STATUS_ELIGIBLEFORDELETE));
     map.put(checkTimeField,new Long(0L));
     list.clear();
-    list.add(statusToString(STATUS_BEINGDELETED));
-    performUpdate(map,"WHERE "+statusField+"=?",list,null);
+    query = buildConjunctionClause(list,new ClauseDescription[]{
+      new UnitaryClause(statusField,statusToString(STATUS_BEINGDELETED))});
+    performUpdate(map,"WHERE "+query,list,null);
 
     // Map BEINGCLEANED to PURGATORY
     map.put(statusField,statusToString(STATUS_PURGATORY));
     map.put(checkTimeField,new Long(0L));
     list.clear();
-    list.add(statusToString(STATUS_BEINGCLEANED));
-    performUpdate(map,"WHERE "+statusField+"=?",list,null);
+    query = buildConjunctionClause(list,new ClauseDescription[]{
+      new UnitaryClause(statusField,statusToString(STATUS_BEINGCLEANED))});
+    performUpdate(map,"WHERE "+query,list,null);
 
     // Map newseed fields to seed
     map.put(isSeedField,seedstatusToString(SEEDSTATUS_SEED));
     list.clear();
-    list.add(seedstatusToString(SEEDSTATUS_NEWSEED));
-    performUpdate(map,"WHERE "+isSeedField+"=?",list,null);
+    query = buildConjunctionClause(list,new ClauseDescription[]{
+      new UnitaryClause(isSeedField,seedstatusToString(SEEDSTATUS_NEWSEED))});
+    performUpdate(map,"WHERE "+query,list,null);
 
     // Clear out all failtime fields (since we obviously haven't been retrying whilst we were not
     // running)
     map = new HashMap();
     map.put(failTimeField,null);
-    performUpdate(map,"WHERE "+failTimeField+" IS NOT NULL",null,null);
+    list.clear();
+    query = buildConjunctionClause(list,new ClauseDescription[]{
+      new NullCheckClause(failTimeField,false)});
+    performUpdate(map,"WHERE "+query,list,null);
     // Reindex the jobqueue table, since we've probably made lots of bad tuples doing the above operations.
     reindexTable();
     unconditionallyAnalyzeTables();
@@ -361,11 +371,13 @@ public class JobQueue extends org.apache
   public void clearFailTimes(Long jobID)
     throws ManifoldCFException
   {
-    ArrayList list = new ArrayList();
-    list.add(jobID);
     Map map = new HashMap();
     map.put(failTimeField,null);
-    performUpdate(map,"WHERE "+jobIDField+"=? AND "+failTimeField+" IS NOT NULL",list,null);
+    ArrayList list = new ArrayList();
+    String query = buildConjunctionClause(list,new ClauseDescription[]{
+      new NullCheckClause(failTimeField,false),
+      new UnitaryClause(jobIDField,jobID)});
+    performUpdate(map,"WHERE "+query,list,null);
   }
 
   /** Reset as part of restoring document worker threads.
@@ -380,16 +392,20 @@ public class JobQueue extends org.apache
     HashMap map = new HashMap();
     map.put(statusField,statusToString(STATUS_PENDING));
     ArrayList list = new ArrayList();
-    list.add(statusToString(STATUS_ACTIVE));
-    list.add(statusToString(STATUS_ACTIVENEEDRESCAN));
-    performUpdate(map,"WHERE "+statusField+" IN (?,?)",list,null);
+    String query = buildConjunctionClause(list,new ClauseDescription[]{
+      new MultiClause(statusField,new Object[]{
+        statusToString(STATUS_ACTIVE),
+        statusToString(STATUS_ACTIVENEEDRESCAN)})});
+    performUpdate(map,"WHERE "+query,list,null);
 
     // Map ACTIVEPURGATORY to PENDINGPURGATORY
     map.put(statusField,statusToString(STATUS_PENDINGPURGATORY));
     list.clear();
-    list.add(statusToString(STATUS_ACTIVEPURGATORY));
-    list.add(statusToString(STATUS_ACTIVENEEDRESCANPURGATORY));
-    performUpdate(map,"WHERE "+statusField+" IN (?,?)",list,null);
+    query = buildConjunctionClause(list,new ClauseDescription[]{
+      new MultiClause(statusField,new Object[]{
+        statusToString(STATUS_ACTIVEPURGATORY),
+        statusToString(STATUS_ACTIVENEEDRESCANPURGATORY)})});
+    performUpdate(map,"WHERE "+query,list,null);
   }
 
   /** Reset doc delete worker status.
@@ -398,13 +414,13 @@ public class JobQueue extends org.apache
     throws ManifoldCFException
   {
     HashMap map = new HashMap();
-    ArrayList list = new ArrayList();
     // Map BEINGDELETED to ELIGIBLEFORDELETE
     map.put(statusField,statusToString(STATUS_ELIGIBLEFORDELETE));
     map.put(checkTimeField,new Long(0L));
-    list.clear();
-    list.add(statusToString(STATUS_BEINGDELETED));
-    performUpdate(map,"WHERE "+statusField+"=?",list,null);
+    ArrayList list = new ArrayList();
+    String query = buildConjunctionClause(list,new ClauseDescription[]{
+      new UnitaryClause(statusField,statusToString(STATUS_BEINGDELETED))});
+    performUpdate(map,"WHERE "+query,list,null);
   }
 
   /** Reset doc cleaning worker status.
@@ -413,13 +429,13 @@ public class JobQueue extends org.apache
     throws ManifoldCFException
   {
     HashMap map = new HashMap();
-    ArrayList list = new ArrayList();
     // Map BEINGCLEANED to PURGATORY
     map.put(statusField,statusToString(STATUS_PURGATORY));
     map.put(checkTimeField,new Long(0L));
-    list.clear();
-    list.add(statusToString(STATUS_BEINGCLEANED));
-    performUpdate(map,"WHERE "+statusField+"=?",list,null);
+    ArrayList list = new ArrayList();
+    String query = buildConjunctionClause(list,new ClauseDescription[]{
+      new UnitaryClause(statusField,statusToString(STATUS_BEINGCLEANED))});
+    performUpdate(map,"WHERE "+query,list,null);
   }
 
   /** Prepare for a job delete pass.  This will not be called
@@ -438,7 +454,11 @@ public class JobQueue extends org.apache
     list.add(statusToString(STATUS_PENDING));
     // Clean out prereqevents table first
     prereqEventManager.deleteRows(getTableName()+" t0","t0."+idField,"t0."+jobIDField+"=? AND t0."+statusField+"=?",list);
-    performDelete("WHERE "+jobIDField+"=? AND "+statusField+"=?",list,null);
+    list.clear();
+    String query = buildConjunctionClause(list,new ClauseDescription[]{
+      new UnitaryClause(jobIDField,jobID),
+      new UnitaryClause(statusField,statusToString(STATUS_PENDING))});
+    performDelete("WHERE "+query,list,null);
 
     // Turn PENDINGPURGATORY, PURGATORY, COMPLETED into ELIGIBLEFORDELETE.
     HashMap map = new HashMap();
@@ -448,11 +468,13 @@ public class JobQueue extends org.apache
     map.put(failTimeField,null);
     map.put(failCountField,null);
     list.clear();
-    list.add(jobID);
-    list.add(statusToString(STATUS_PENDINGPURGATORY));
-    list.add(statusToString(STATUS_COMPLETE));
-    list.add(statusToString(STATUS_PURGATORY));
-    performUpdate(map,"WHERE "+jobIDField+"=? AND "+statusField+" IN (?,?,?)",list,null);
+    query = buildConjunctionClause(list,new ClauseDescription[]{
+      new UnitaryClause(jobIDField,jobID),
+      new MultiClause(statusField,new Object[]{
+        statusToString(STATUS_PENDINGPURGATORY),
+        statusToString(STATUS_COMPLETE),
+        statusToString(STATUS_PURGATORY)})});
+    performUpdate(map,"WHERE "+query,list,null);
 
     // Not accurate, but best we can do without overhead
     noteModifications(0,2,0);
@@ -478,7 +500,11 @@ public class JobQueue extends org.apache
     list.add(statusToString(STATUS_PENDING));
     // Clean out prereqevents table first
     prereqEventManager.deleteRows(getTableName()+" t0","t0."+idField,"t0."+jobIDField+"=? AND t0."+statusField+"=?",list);
-    performDelete("WHERE "+jobIDField+"=? AND "+statusField+"=?",list,null);
+    list.clear();
+    String query = buildConjunctionClause(list,new ClauseDescription[]{
+      new UnitaryClause(jobIDField,jobID),
+      new UnitaryClause(statusField,statusToString(STATUS_PENDING))});
+    performDelete("WHERE "+query,list,null);
 
     // Turn PENDINGPURGATORY, COMPLETED into PURGATORY.
     HashMap map = new HashMap();
@@ -493,10 +519,12 @@ public class JobQueue extends org.apache
     // without for now.
 
     list.clear();
-    list.add(jobID);
-    list.add(statusToString(STATUS_PENDINGPURGATORY));
-    list.add(statusToString(STATUS_COMPLETE));
-    performUpdate(map,"WHERE "+jobIDField+"=? AND "+statusField+" IN (?,?)",list,null);
+    query = buildConjunctionClause(list,new ClauseDescription[]{
+      new UnitaryClause(jobIDField,jobID),
+      new MultiClause(statusField,new Object[]{  
+        statusToString(STATUS_PENDINGPURGATORY),
+        statusToString(STATUS_COMPLETE)})});
+    performUpdate(map,"WHERE "+query,list,null);
 
     // Not accurate, but best we can do without overhead
     noteModifications(0,2,0);
@@ -528,9 +556,10 @@ public class JobQueue extends org.apache
     map.put(failTimeField,null);
     map.put(failCountField,null);
     ArrayList list = new ArrayList();
-    list.add(jobID);
-    list.add(statusToString(STATUS_COMPLETE));
-    performUpdate(map,"WHERE "+jobIDField+"=? AND "+statusField+"=?",list,null);
+    String query = buildConjunctionClause(list,new ClauseDescription[]{
+      new UnitaryClause(jobIDField,jobID),
+      new UnitaryClause(statusField,statusToString(STATUS_COMPLETE))});
+    performUpdate(map,"WHERE "+query,list,null);
     noteModifications(0,1,0);
     // Do an analyze, otherwise our plans are going to be crap right off the bat
     unconditionallyAnalyzeTables();
@@ -545,17 +574,13 @@ public class JobQueue extends org.apache
     throws ManifoldCFException
   {
     ArrayList list = new ArrayList();
-    StringBuilder sb = new StringBuilder();
     int i = 0;
     while (i < identifiers.length)
     {
-      if (i > 0)
-        sb.append(',');
-      sb.append('?');
       list.add(identifiers[i].getID());
       i++;
     }
-    doDeletes(list,sb.toString());
+    doDeletes(list);
     noteModifications(0,0,identifiers.length);
   }
 
@@ -564,13 +589,15 @@ public class JobQueue extends org.apache
     throws ManifoldCFException
   {
     ArrayList list = new ArrayList();
-    list.add(jobID);
-    list.add(statusToString(STATUS_ACTIVE));
-    list.add(statusToString(STATUS_ACTIVEPURGATORY));
-    list.add(statusToString(STATUS_ACTIVENEEDRESCAN));
-    list.add(statusToString(STATUS_ACTIVENEEDRESCANPURGATORY));
+    String query = buildConjunctionClause(list,new ClauseDescription[]{
+      new UnitaryClause(jobIDField,jobID),
+      new MultiClause(statusField,new Object[]{
+        statusToString(STATUS_ACTIVE),
+        statusToString(STATUS_ACTIVEPURGATORY),
+        statusToString(STATUS_ACTIVENEEDRESCAN),
+        statusToString(STATUS_ACTIVENEEDRESCANPURGATORY)})});
     IResultSet set = performQuery("SELECT "+docHashField+" FROM "+getTableName()+
-      " WHERE "+jobIDField+"=? AND "+statusField+" IN (?,?,?,?) "+constructOffsetLimitClause(0,1),list,null,null,1);
+      " WHERE "+query+" "+constructOffsetLimitClause(0,1),list,null,null,1);
     return set.getRowCount() > 0;
   }
 
@@ -584,7 +611,10 @@ public class JobQueue extends org.apache
     list.add(jobID);
     // Clean out prereqevents table first
     prereqEventManager.deleteRows(getTableName()+" t0","t0."+idField,"t0."+jobIDField+"=?",list);
-    performDelete("WHERE "+jobIDField+"=?",list,null);
+    list.clear();
+    String query = buildConjunctionClause(list,new ClauseDescription[]{
+      new UnitaryClause(jobIDField,jobID)});
+    performDelete("WHERE "+query,list,null);
     noteModifications(0,0,1);
   }
 
@@ -596,8 +626,9 @@ public class JobQueue extends org.apache
     map.put(prioritySetField,new Long(currentTime));
     map.put(docPriorityField,new Double(priority));
     ArrayList list = new ArrayList();
-    list.add(rowID);
-    performUpdate(map,"WHERE "+idField+"=?",list,null);
+    String query = buildConjunctionClause(list,new ClauseDescription[]{
+      new UnitaryClause(idField,rowID)});
+    performUpdate(map,"WHERE "+query,list,null);
     noteModifications(0,1,0);
   }
 
@@ -638,8 +669,9 @@ public class JobQueue extends org.apache
     //map.put(docPriorityField,new Double(1.0));
     //map.put(prioritySetField,new Long(0L));
     ArrayList list = new ArrayList();
-    list.add(recID);
-    performUpdate(map,"WHERE "+idField+"=?",list,null);
+    String query = buildConjunctionClause(list,new ClauseDescription[]{
+      new UnitaryClause(idField,recID)});
+    performUpdate(map,"WHERE "+query,list,null);
   }
 
   /** Set the status to active on a record, leaving alone priority or check time.
@@ -662,11 +694,12 @@ public class JobQueue extends org.apache
       throw new ManifoldCFException("Unexpected status value for jobqueue record "+id.toString()+"; got "+Integer.toString(currentStatus));
     }
 
-    ArrayList list = new ArrayList();
-    list.add(id);
     HashMap map = new HashMap();
     map.put(statusField,statusToString(newStatus));
-    performUpdate(map,"WHERE "+idField+"=?",list,null);
+    ArrayList list = new ArrayList();
+    String query = buildConjunctionClause(list,new ClauseDescription[]{
+      new UnitaryClause(idField,id)});
+    performUpdate(map,"WHERE "+query,list,null);
     noteModifications(0,1,0);
   }
 
@@ -679,8 +712,6 @@ public class JobQueue extends org.apache
     Long checkTime, int action, long failTime, int failCount)
     throws ManifoldCFException
   {
-    ArrayList list = new ArrayList();
-    list.add(id);
     HashMap map = new HashMap();
     map.put(statusField,statusToString(status));
     map.put(checkTimeField,checkTime);
@@ -695,7 +726,10 @@ public class JobQueue extends org.apache
       map.put(failCountField,new Long(failCount));
     // This does not need to set docPriorityField, because we want to preserve whatever
     // priority was in place from before.
-    performUpdate(map,"WHERE "+idField+"=?",list,null);
+    ArrayList list = new ArrayList();
+    String query = buildConjunctionClause(list,new ClauseDescription[]{
+      new UnitaryClause(idField,id)});
+    performUpdate(map,"WHERE "+query,list,null);
     noteModifications(0,1,0);
   }
 
@@ -704,11 +738,12 @@ public class JobQueue extends org.apache
   public void setDeletingStatus(Long id)
     throws ManifoldCFException
   {
-    ArrayList list = new ArrayList();
-    list.add(id);
     HashMap map = new HashMap();
     map.put(statusField,statusToString(STATUS_BEINGDELETED));
-    performUpdate(map,"WHERE "+idField+"=?",list,null);
+    ArrayList list = new ArrayList();
+    String query = buildConjunctionClause(list,new ClauseDescription[]{
+      new UnitaryClause(idField,id)});
+    performUpdate(map,"WHERE "+query,list,null);
     noteModifications(0,1,0);
   }
 
@@ -723,8 +758,10 @@ public class JobQueue extends org.apache
     map.put(failTimeField,null);
     map.put(failCountField,null);
     ArrayList list = new ArrayList();
-    list.add(id);
-    performUpdate(map,"WHERE "+idField+"=?",list,null);
+    String query = buildConjunctionClause(list,new ClauseDescription[]{
+      new UnitaryClause(idField,id)});
+    performUpdate(map,"WHERE "+query,list,null);
+    noteModifications(0,1,0);
   }
 
   /** Set the status of a document to "being cleaned".
@@ -732,11 +769,12 @@ public class JobQueue extends org.apache
   public void setCleaningStatus(Long id)
     throws ManifoldCFException
   {
-    ArrayList list = new ArrayList();
-    list.add(id);
     HashMap map = new HashMap();
     map.put(statusField,statusToString(STATUS_BEINGCLEANED));
-    performUpdate(map,"WHERE "+idField+"=?",list,null);
+    ArrayList list = new ArrayList();
+    String query = buildConjunctionClause(list,new ClauseDescription[]{
+      new UnitaryClause(idField,id)});
+    performUpdate(map,"WHERE "+query,list,null);
     noteModifications(0,1,0);
   }
 
@@ -751,8 +789,10 @@ public class JobQueue extends org.apache
     map.put(failTimeField,null);
     map.put(failCountField,null);
     ArrayList list = new ArrayList();
-    list.add(id);
-    performUpdate(map,"WHERE "+idField+"=?",list,null);
+    String query = buildConjunctionClause(list,new ClauseDescription[]{
+      new UnitaryClause(idField,id)});
+    performUpdate(map,"WHERE "+query,list,null);
+    noteModifications(0,1,0);
   }
 
   /** Remove multiple records entirely.
@@ -762,39 +802,44 @@ public class JobQueue extends org.apache
     throws ManifoldCFException
   {
     // Delete in chunks
-    int maxInClause = getMaxInClause();
+    int maxClause = maxClauseDoDeletes();
     ArrayList list = new ArrayList();
-    StringBuilder sb = new StringBuilder();
     int j = 0;
     int i = 0;
     while (i < ids.length)
     {
-      if (j == maxInClause)
+      if (j == maxClause)
       {
-        doDeletes(list,sb.toString());
+        doDeletes(list);
         list.clear();
-        sb.setLength(0);
         j = 0;
       }
-      if (j > 0)
-        sb.append(',');
-      sb.append('?');
       list.add(ids[i++]);
       j++;
     }
     if (j > 0)
-      doDeletes(list,sb.toString());
+      doDeletes(list);
     noteModifications(0,0,ids.length);
   }
 
+  /** Calculate the number of deletes we can do at once.
+  */
+  protected int maxClauseDoDeletes()
+  {
+    return findConjunctionClauseMax(new ClauseDescription[]{});
+  }
+    
   /** Do a batch of deletes.
   */
-  protected void doDeletes(ArrayList list, String queryPart)
+  protected void doDeletes(ArrayList list)
     throws ManifoldCFException
   {
     // Clean out prereqevents table first
-    prereqEventManager.deleteRows(queryPart,list);
-    performDelete("WHERE "+idField+" IN("+queryPart+")",list,null);
+    prereqEventManager.deleteRows(list);
+    ArrayList newList = new ArrayList();
+    String query = buildConjunctionClause(newList,new ClauseDescription[]{
+      new MultiClause(idField,list)});
+    performDelete("WHERE "+query,newList,null);
   }
 
   /** Remove a record entirely.
@@ -886,11 +931,12 @@ public class JobQueue extends org.apache
 
     }
     map.put(isSeedField,seedstatusToString(SEEDSTATUS_NEWSEED));
-    ArrayList list = new ArrayList();
-    list.add(recordID);
     // Delete any existing prereqevent entries first
     prereqEventManager.deleteRows(recordID);
-    performUpdate(map,"WHERE "+idField+"=?",list,null);
+    ArrayList list = new ArrayList();
+    String query = buildConjunctionClause(list,new ClauseDescription[]{
+      new UnitaryClause(idField,recordID)});
+    performUpdate(map,"WHERE "+query,list,null);
     // Insert prereqevent entries, if any
     prereqEventManager.addRows(recordID,prereqEvents);
     noteModifications(0,1,0);
@@ -951,12 +997,10 @@ public class JobQueue extends org.apache
 
     HashMap idMap = new HashMap();
     int k = 0;
-    int maxInClause = getMaxInClause();
 
     // To avoid deadlock, use 1 instead of something larger.  The docIDs are presumed to come in in sorted order.
     int maxClause = 1;
 
-    StringBuilder sb = new StringBuilder();
     ArrayList list = new ArrayList();
     j = 0;
     while (j < docIDHashes.length)
@@ -965,56 +1009,56 @@ public class JobQueue extends org.apache
 
       if (k == maxClause)
       {
-        processRemainingDocuments(idMap,sb.toString(),list,inSet);
-        sb.setLength(0);
+        processRemainingDocuments(idMap,jobID,list,inSet);
         k = 0;
         list.clear();
       }
 
-      if (k > 0)
-        sb.append(" OR");
-      sb.append("(").append(jobIDField).append("=? AND ").append(docHashField).append("=? AND ").append(isSeedField)
-        .append("=?)");
-      list.add(jobID);
       list.add(docIDHash);
-      list.add(seedstatusToString(SEEDSTATUS_SEED));
       k++;
     }
     if (k > 0)
-      processRemainingDocuments(idMap,sb.toString(),list,inSet);
+      processRemainingDocuments(idMap,jobID,list,inSet);
 
     // We have a set of id's.  Process those in bulk.
     k = 0;
-    sb.setLength(0);
     list.clear();
+    maxClause = maxClauseUpdateRemainingDocuments();
     Iterator idValues = idMap.keySet().iterator();
     while (idValues.hasNext())
     {
-      if (k == maxInClause)
+      if (k == maxClause)
       {
-        updateRemainingDocuments(sb.toString(),list);
-        sb.setLength(0);
+        updateRemainingDocuments(list);
         k = 0;
         list.clear();
       }
-      Long idValue = (Long)idValues.next();
-      if (k > 0)
-        sb.append(",");
-      sb.append("?");
-      list.add(idValue);
+      list.add(idValues.next());
       k++;
     }
     if (k > 0)
-      updateRemainingDocuments(sb.toString(),list);
+      updateRemainingDocuments(list);
     noteModifications(0,docIDHashes.length,0);
   }
 
+  /** Calculate max */
+  protected int maxClauseProcessRemainingDocuments(Long jobID)
+  {
+    return findConjunctionClauseMax(new ClauseDescription[]{
+      new UnitaryClause(jobIDField,jobID)});
+  }
+  
   /** Process the specified set of documents. */
-  protected void processRemainingDocuments(Map idMap, String query, ArrayList list, Map inSet)
+  protected void processRemainingDocuments(Map idMap, Long jobID, ArrayList list, Map inSet)
     throws ManifoldCFException
   {
+    ArrayList newList = new ArrayList();
+    String query = buildConjunctionClause(newList,new ClauseDescription[]{
+      new MultiClause(docHashField,list),
+      new UnitaryClause(jobIDField,jobID)});
+    newList.add(seedstatusToString(SEEDSTATUS_SEED));
     IResultSet set = performQuery("SELECT "+idField+","+docHashField+" FROM "+getTableName()+
-      " WHERE "+query+" FOR UPDATE",list,null,null);
+      " WHERE "+query+" AND "+isSeedField+"=? FOR UPDATE",newList,null,null);
     int i = 0;
     while (i < set.getRowCount())
     {
@@ -1028,13 +1072,22 @@ public class JobQueue extends org.apache
     }
   }
 
+  /** Get the maximum count */
+  protected int maxClauseUpdateRemainingDocuments()
+  {
+    return findConjunctionClauseMax(new ClauseDescription[]{});
+  }
+  
   /** Update the specified set of documents to be "NEWSEED" */
-  protected void updateRemainingDocuments(String query, ArrayList list)
+  protected void updateRemainingDocuments(ArrayList list)
     throws ManifoldCFException
   {
     HashMap map = new HashMap();
     map.put(isSeedField,seedstatusToString(SEEDSTATUS_NEWSEED));
-    performUpdate(map,"WHERE "+idField+" IN("+query+")",list,null);
+    ArrayList newList = new ArrayList();
+    String query = buildConjunctionClause(newList,new ClauseDescription[]{
+      new MultiClause(idField,list)});
+    performUpdate(map,"WHERE "+query,newList,null);
   }
 
   /** Complete the initial set of documents.  This method converts the seeding statuses for the
@@ -1048,19 +1101,22 @@ public class JobQueue extends org.apache
     throws ManifoldCFException
   {
     ArrayList list = new ArrayList();
+    String query;
     HashMap map = new HashMap();
     if (!isPartial)
     {
-      list.add(jobID);
-      list.add(seedstatusToString(SEEDSTATUS_SEED));
+      query = buildConjunctionClause(list,new ClauseDescription[]{
+        new UnitaryClause(isSeedField,seedstatusToString(SEEDSTATUS_SEED)),
+        new UnitaryClause(jobIDField,jobID)});
       map.put(isSeedField,seedstatusToString(SEEDSTATUS_NOTSEED));
-      performUpdate(map,"WHERE "+jobIDField+"=? AND "+isSeedField+"=?",list,null);
+      performUpdate(map,"WHERE "+query,list,null);
       list.clear();
     }
-    list.add(jobID);
-    list.add(seedstatusToString(SEEDSTATUS_NEWSEED));
+    query =  buildConjunctionClause(list,new ClauseDescription[]{
+      new UnitaryClause(isSeedField,seedstatusToString(SEEDSTATUS_SEED)),
+      new UnitaryClause(jobIDField,jobID)});
     map.put(isSeedField,seedstatusToString(SEEDSTATUS_SEED));
-    performUpdate(map,"WHERE "+jobIDField+"=? AND "+isSeedField+"=?",list,null);
+    performUpdate(map,"WHERE "+query,list,null);
   }
 
   /** Get all the current seeds.
@@ -1072,9 +1128,10 @@ public class JobQueue extends org.apache
     throws ManifoldCFException
   {
     ArrayList list = new ArrayList();
-    list.add(jobID);
-    list.add(seedstatusToString(SEEDSTATUS_SEED));
-    IResultSet set = performQuery("SELECT "+docHashField+" FROM "+getTableName()+" WHERE "+jobIDField+"=? AND "+isSeedField+"=?",
+    String query = buildConjunctionClause(list,new ClauseDescription[]{
+      new UnitaryClause(isSeedField,seedstatusToString(SEEDSTATUS_SEED)),
+      new UnitaryClause(jobIDField,jobID)});
+    IResultSet set = performQuery("SELECT "+docHashField+" FROM "+getTableName()+" WHERE "+query,
       list,null,null);
     String[] rval = new String[set.getRowCount()];
     int i = 0;
@@ -1206,10 +1263,11 @@ public class JobQueue extends org.apache
     default:
       return rval;
     }
-    ArrayList list = new ArrayList();
-    list.add(recordID);
     prereqEventManager.deleteRows(recordID);
-    performUpdate(map,"WHERE "+idField+"=?",list,null);
+    ArrayList list = new ArrayList();
+    String query = buildConjunctionClause(list,new ClauseDescription[]{
+      new UnitaryClause(idField,recordID)});
+    performUpdate(map,"WHERE "+query,list,null);
     prereqEventManager.addRows(recordID,prereqEvents);
     noteModifications(0,1,0);
     return rval;

Modified: incubator/lcf/trunk/framework/pull-agent/src/main/java/org/apache/manifoldcf/crawler/jobs/Jobs.java
URL: http://svn.apache.org/viewvc/incubator/lcf/trunk/framework/pull-agent/src/main/java/org/apache/manifoldcf/crawler/jobs/Jobs.java?rev=1195471&r1=1195470&r2=1195471&view=diff
==============================================================================
--- incubator/lcf/trunk/framework/pull-agent/src/main/java/org/apache/manifoldcf/crawler/jobs/Jobs.java (original)
+++ incubator/lcf/trunk/framework/pull-agent/src/main/java/org/apache/manifoldcf/crawler/jobs/Jobs.java Mon Oct 31 14:26:33 2011
@@ -285,7 +285,9 @@ public class Jobs extends org.apache.man
       hopFilterManager.install(getTableName(),idField);
 
       // Index management
-      IndexDescription statusIndex = new IndexDescription(false,new String[]{statusField});
+      IndexDescription statusIndex = new IndexDescription(false,new String[]{statusField,idField,priorityField});
+      IndexDescription connectionIndex = new IndexDescription(false,new String[]{connectionNameField});
+      IndexDescription outputIndex = new IndexDescription(false,new String[]{outputNameField});
 
       // Get rid of indexes that shouldn't be there
       Map indexes = getTableIndexes(null,null);
@@ -297,6 +299,10 @@ public class Jobs extends org.apache.man
 
         if (statusIndex != null && id.equals(statusIndex))
           statusIndex = null;
+        else if (connectionIndex != null && id.equals(connectionIndex))
+          connectionIndex = null;
+        else if (outputIndex != null && id.equals(outputIndex))
+          outputIndex = null;
         else if (indexName.indexOf("_pkey") == -1)
           // This index shouldn't be here; drop it
           performRemoveIndex(indexName);
@@ -305,6 +311,10 @@ public class Jobs extends org.apache.man
       // Add the ones we didn't find
       if (statusIndex != null)
         performAddIndex(null,statusIndex);
+      if (connectionIndex != null)
+        performAddIndex(null,connectionIndex);
+      if (outputIndex != null)
+        performAddIndex(null,outputIndex);
 
       break;
 
@@ -374,28 +384,23 @@ public class Jobs extends org.apache.man
     beginTransaction();
     try
     {
-      StringBuilder sb = new StringBuilder();
       ArrayList params = new ArrayList();
       int j = 0;
-      int maxIn = getMaxInClause();
+      int maxIn = scheduleManager.maxClauseGetRowsAlternate();
       Iterator iter = uniqueIDs.keySet().iterator();
       while (iter.hasNext())
       {
         if (j == maxIn)
         {
-          scheduleManager.getRowsAlternate(returnValues,sb.toString(),params);
-          sb.setLength(0);
+          scheduleManager.getRowsAlternate(returnValues,params);
           params.clear();
           j = 0;
         }
-        if (j > 0)
-          sb.append(',');
-        sb.append('?');
-        params.add((Long)iter.next());
+        params.add(iter.next());
         j++;
       }
       if (j > 0)
-        scheduleManager.getRowsAlternate(returnValues,sb.toString(),params);
+        scheduleManager.getRowsAlternate(returnValues,params);
     }
     catch (Error e)
     {

Modified: incubator/lcf/trunk/framework/pull-agent/src/main/java/org/apache/manifoldcf/crawler/jobs/PrereqEventManager.java
URL: http://svn.apache.org/viewvc/incubator/lcf/trunk/framework/pull-agent/src/main/java/org/apache/manifoldcf/crawler/jobs/PrereqEventManager.java?rev=1195471&r1=1195470&r2=1195471&view=diff
==============================================================================
--- incubator/lcf/trunk/framework/pull-agent/src/main/java/org/apache/manifoldcf/crawler/jobs/PrereqEventManager.java (original)
+++ incubator/lcf/trunk/framework/pull-agent/src/main/java/org/apache/manifoldcf/crawler/jobs/PrereqEventManager.java Mon Oct 31 14:26:33 2011
@@ -133,20 +133,33 @@ public class PrereqEventManager extends 
     throws ManifoldCFException
   {
     StringBuilder sb = new StringBuilder();
-    sb.append("WHERE EXISTS(SELECT 'x' FROM ").append(parentTableName).append(" WHERE ").append(joinField).append("=")
-      .append(getTableName()).append(".").append(ownerField);
+    ArrayList newList = new ArrayList();
+    
+    sb.append("WHERE EXISTS(SELECT 'x' FROM ").append(parentTableName).append(" WHERE ")
+      .append(buildConjunctionClause(newList,new ClauseDescription[]{
+        new JoinClause(joinField,getTableName() + "." + ownerField)}));
+
     if (parentCriteria != null)
+    {
       sb.append(" AND ").append(parentCriteria);
+      if (list != null)
+        newList.addAll(list);
+    }
+    
     sb.append(")");
-    performDelete(sb.toString(),list,null);
+    
+    performDelete(sb.toString(),newList,null);
     noteModifications(0,0,1);
   }
 
   /** Delete specified rows, as directly specified without a join. */
-  public void deleteRows(String ownerQueryPart, ArrayList list)
+  public void deleteRows(ArrayList list)
     throws ManifoldCFException
   {
-    performDelete("WHERE "+ownerField+" IN("+ownerQueryPart+")",list,null);
+    ArrayList newList = new ArrayList();
+    String query = buildConjunctionClause(newList,new ClauseDescription[]{
+      new MultiClause(ownerField,list)});
+    performDelete("WHERE "+query,newList,null);
     noteModifications(0,0,1);
   }
 
@@ -155,8 +168,9 @@ public class PrereqEventManager extends 
     throws ManifoldCFException
   {
     ArrayList list = new ArrayList();
-    list.add(recordID);
-    performDelete(" WHERE "+ownerField+"=?",list,null);
+    String query = buildConjunctionClause(list,new ClauseDescription[]{
+      new UnitaryClause(ownerField,recordID)});
+    performDelete(" WHERE "+query,list,null);
     noteModifications(0,0,1);
   }
 

Modified: incubator/lcf/trunk/framework/pull-agent/src/main/java/org/apache/manifoldcf/crawler/jobs/ScheduleManager.java
URL: http://svn.apache.org/viewvc/incubator/lcf/trunk/framework/pull-agent/src/main/java/org/apache/manifoldcf/crawler/jobs/ScheduleManager.java?rev=1195471&r1=1195470&r2=1195471&view=diff
==============================================================================
--- incubator/lcf/trunk/framework/pull-agent/src/main/java/org/apache/manifoldcf/crawler/jobs/ScheduleManager.java (original)
+++ incubator/lcf/trunk/framework/pull-agent/src/main/java/org/apache/manifoldcf/crawler/jobs/ScheduleManager.java Mon Oct 31 14:26:33 2011
@@ -177,15 +177,25 @@ public class ScheduleManager extends org
     }
   }
 
+  /** Get the max clauses that can be used with getRowsAlternate.
+  */
+  public int maxClauseGetRowsAlternate()
+  {
+    return findConjunctionClauseMax(new ClauseDescription[]{});
+  }
+    
   /** Fill in a set of schedules corresponding to a set of owner id's.
   *@param returnValues is a map keyed by ownerID, with a value that is an ArrayList of ScheduleRecord objects.
-  *@param ownerIDList is the list of owner id's.
   *@param ownerIDParams is the corresponding set of owner id parameters.
   */
-  public void getRowsAlternate(Map returnValues, String ownerIDList, ArrayList ownerIDParams)
+  public void getRowsAlternate(Map returnValues, ArrayList ownerIDParams)
     throws ManifoldCFException
   {
-    IResultSet set = performQuery("SELECT * FROM "+getTableName()+" WHERE "+ownerIDField+" IN ("+ownerIDList+") ORDER BY "+ordinalField+" ASC",ownerIDParams,
+    ArrayList list = new ArrayList();
+    String query = buildConjunctionClause(list,new ClauseDescription[]{
+      new MultiClause(ownerIDField,ownerIDParams)});
+      
+    IResultSet set = performQuery("SELECT * FROM "+getTableName()+" WHERE "+query+" ORDER BY "+ordinalField+" ASC",list,
       null,null);
     int i = 0;
     while (i < set.getRowCount())
@@ -264,8 +274,10 @@ public class ScheduleManager extends org
     throws ManifoldCFException
   {
     ArrayList list = new ArrayList();
-    list.add(ownerID);
-    performDelete("WHERE "+ownerIDField+"=?",list,null);
+    String query = buildConjunctionClause(list,new ClauseDescription[]{
+      new UnitaryClause(ownerIDField,ownerID)});
+      
+    performDelete("WHERE "+query,list,null);
   }
 
   /** Go from string to enumerated value.

Modified: incubator/lcf/trunk/framework/pull-agent/src/main/java/org/apache/manifoldcf/crawler/repository/RepositoryConnectionManager.java
URL: http://svn.apache.org/viewvc/incubator/lcf/trunk/framework/pull-agent/src/main/java/org/apache/manifoldcf/crawler/repository/RepositoryConnectionManager.java?rev=1195471&r1=1195470&r2=1195471&view=diff
==============================================================================
--- incubator/lcf/trunk/framework/pull-agent/src/main/java/org/apache/manifoldcf/crawler/repository/RepositoryConnectionManager.java (original)
+++ incubator/lcf/trunk/framework/pull-agent/src/main/java/org/apache/manifoldcf/crawler/repository/RepositoryConnectionManager.java Mon Oct 31 14:26:33 2011
@@ -121,8 +121,34 @@ public class RepositoryConnectionManager
       historyManager.install(getTableName(),nameField);
       throttleSpecManager.install(getTableName(),nameField);
 
-      // Index management goes here.
-
+      // Index management
+      IndexDescription authorityIndex = new IndexDescription(false,new String[]{authorityNameField});
+      IndexDescription classIndex = new IndexDescription(false,new String[]{classNameField});
+      
+      // Get rid of indexes that shouldn't be there
+      Map indexes = getTableIndexes(null,null);
+      Iterator iter = indexes.keySet().iterator();
+      while (iter.hasNext())
+      {
+        String indexName = (String)iter.next();
+        IndexDescription id = (IndexDescription)indexes.get(indexName);
+
+        if (authorityIndex != null && id.equals(authorityIndex))
+          authorityIndex = null;
+        else if (classIndex != null && id.equals(classIndex))
+          classIndex = null;
+        else if (indexName.indexOf("_pkey") == -1)
+          // This index shouldn't be here; drop it
+          performRemoveIndex(indexName);
+      }
+
+      // Add the ones we didn't find
+      if (authorityIndex != null)
+        performAddIndex(null,authorityIndex);
+
+      if (classIndex != null)
+        performAddIndex(null,classIndex);
+      
       break;
     }
   }
@@ -320,6 +346,7 @@ public class RepositoryConnectionManager
     while (true)
     {
       // Catch deadlock condition
+      long sleepAmt = 0L;
       try
       {
         ICacheHandle ch = cacheManager.enterCache(null,cacheKeys,getTransactionID());
@@ -334,9 +361,10 @@ public class RepositoryConnectionManager
             boolean isNew = object.getIsNew();
             // See whether the instance exists
             ArrayList params = new ArrayList();
-            params.add(object.getName());
+            String query = buildConjunctionClause(params,new ClauseDescription[]{
+              new UnitaryClause(nameField,object.getName())});
             IResultSet set = performQuery("SELECT * FROM "+getTableName()+" WHERE "+
-              nameField+"=? FOR UPDATE",params,null,null);
+              query+" FOR UPDATE",params,null,null);
             HashMap values = new HashMap();
             values.put(descriptionField,object.getDescription());
             values.put(classNameField,object.getClassName());
@@ -360,8 +388,9 @@ public class RepositoryConnectionManager
               
               // Update
               params.clear();
-              params.add(object.getName());
-              performUpdate(values," WHERE "+nameField+"=?",params,null);
+              query = buildConjunctionClause(params,new ClauseDescription[]{
+                new UnitaryClause(nameField,object.getName())});
+              performUpdate(values," WHERE "+query,params,null);
               throttleSpecManager.deleteRows(object.getName());
             }
             else
@@ -414,14 +443,11 @@ public class RepositoryConnectionManager
         // Is this a deadlock exception?  If so, we want to try again.
         if (e.getErrorCode() != ManifoldCFException.DATABASE_TRANSACTION_ABORT)
           throw e;
-        try
-        {
-          ManifoldCF.sleep((long)(random.nextDouble() * 60000.0 + 500.0));
-        }
-        catch (InterruptedException e2)
-        {
-          throw new ManifoldCFException(e2.getMessage(),e2,ManifoldCFException.INTERRUPTED);
-        }
+        sleepAmt = getSleepAmt();
+      }
+      finally
+      {
+        sleepFor(sleepAmt);
       }
     }
   }
@@ -453,8 +479,9 @@ public class RepositoryConnectionManager
         throttleSpecManager.deleteRows(name);
         historyManager.deleteOwner(name,null);
         ArrayList params = new ArrayList();
-        params.add(name);
-        performDelete("WHERE "+nameField+"=?",params,null);
+        String query = buildConjunctionClause(params,new ClauseDescription[]{
+          new UnitaryClause(nameField,name)});
+        performDelete("WHERE "+query,params,null);
         cacheManager.invalidateKeys(ch);
       }
       catch (ManifoldCFException e)
@@ -490,8 +517,9 @@ public class RepositoryConnectionManager
     ssb.add(getRepositoryConnectionsKey());
     StringSet localCacheKeys = new StringSet(ssb);
     ArrayList params = new ArrayList();
-    params.add(authorityName);
-    IResultSet set = performQuery("SELECT "+nameField+" FROM "+getTableName()+" WHERE "+authorityNameField+"=?",params,
+    String query = buildConjunctionClause(params,new ClauseDescription[]{
+      new UnitaryClause(authorityNameField,authorityName)});
+    IResultSet set = performQuery("SELECT "+nameField+" FROM "+getTableName()+" WHERE "+query,params,
       localCacheKeys,null);
     return set.getRowCount() > 0;
   }
@@ -507,8 +535,9 @@ public class RepositoryConnectionManager
     ssb.add(getRepositoryConnectionsKey());
     StringSet localCacheKeys = new StringSet(ssb);
     ArrayList params = new ArrayList();
-    params.add(className);
-    IResultSet set = performQuery("SELECT "+nameField+" FROM "+getTableName()+" WHERE "+classNameField+"=?",params,
+    String query = buildConjunctionClause(params,new ClauseDescription[]{
+      new UnitaryClause(classNameField,className)});
+    IResultSet set = performQuery("SELECT "+nameField+" FROM "+getTableName()+" WHERE "+query,params,
       localCacheKeys,null);
     String[] rval = new String[set.getRowCount()];
     int i = 0;
@@ -536,8 +565,9 @@ public class RepositoryConnectionManager
       ssb.add(getRepositoryConnectionKey(name));
       StringSet localCacheKeys = new StringSet(ssb);
       ArrayList params = new ArrayList();
-      params.add(name);
-      IResultSet set = performQuery("SELECT "+classNameField+" FROM "+getTableName()+" WHERE "+nameField+"=?",params,
+      String query = buildConjunctionClause(params,new ClauseDescription[]{
+        new UnitaryClause(nameField,name)});
+      IResultSet set = performQuery("SELECT "+classNameField+" FROM "+getTableName()+" WHERE "+query,params,
         localCacheKeys,null);
       if (set.getRowCount() == 0)
         throw new ManifoldCFException("No such connection: '"+name+"'");
@@ -770,28 +800,23 @@ public class RepositoryConnectionManager
     try
     {
       i = 0;
-      StringBuilder sb = new StringBuilder();
       ArrayList params = new ArrayList();
       int j = 0;
-      int maxIn = getMaxInClause();
+      int maxIn = maxClauseGetRepositoryConnectionsChunk();
       while (i < connectionNames.length)
       {
         if (j == maxIn)
         {
-          getRepositoryConnectionsChunk(rval,returnIndex,sb.toString(),params);
-          sb.setLength(0);
+          getRepositoryConnectionsChunk(rval,returnIndex,params);
           params.clear();
           j = 0;
         }
-        if (j > 0)
-          sb.append(',');
-        sb.append('?');
         params.add(connectionNames[i]);
         i++;
         j++;
       }
       if (j > 0)
-        getRepositoryConnectionsChunk(rval,returnIndex,sb.toString(),params);
+        getRepositoryConnectionsChunk(rval,returnIndex,params);
       return rval;
     }
     catch (Error e)
@@ -810,18 +835,27 @@ public class RepositoryConnectionManager
     }
   }
 
+  /** Calculate how many repository connections to get at once.
+  */
+  protected int maxClauseGetRepositoryConnectionsChunk()
+  {
+    return Math.min(findConjunctionClauseMax(new ClauseDescription[]{}),
+      throttleSpecManager.maxClauseGetRows());
+  }
+  
   /** Read a chunk of repository connections.
   *@param rval is the place to put the read policies.
   *@param returnIndex is a map from the object id (resource id) and the rval index.
-  *@param idList is the list of id's.
   *@param params is the set of parameters.
   */
-  protected void getRepositoryConnectionsChunk(RepositoryConnection[] rval, Map returnIndex, String idList, ArrayList params)
+  protected void getRepositoryConnectionsChunk(RepositoryConnection[] rval, Map returnIndex, ArrayList params)
     throws ManifoldCFException
   {
-    IResultSet set;
-    set = performQuery("SELECT * FROM "+getTableName()+" WHERE "+
-      nameField+" IN ("+idList+")",params,null,null);
+    ArrayList list = new ArrayList();
+    String query = buildConjunctionClause(list,new ClauseDescription[]{
+      new MultiClause(nameField,params)});
+    IResultSet set = performQuery("SELECT * FROM "+getTableName()+" WHERE "+
+      query,list,null,null);
     int i = 0;
     while (i < set.getRowCount())
     {
@@ -842,7 +876,7 @@ public class RepositoryConnectionManager
     }
 
     // Do throttle part
-    throttleSpecManager.getRows(rval,returnIndex,idList,params);
+    throttleSpecManager.getRows(rval,returnIndex,params);
   }
 
   // The cached instance will be a RepositoryConnection.  The cached version will be duplicated when it is returned

Modified: incubator/lcf/trunk/framework/pull-agent/src/main/java/org/apache/manifoldcf/crawler/repository/RepositoryHistoryManager.java
URL: http://svn.apache.org/viewvc/incubator/lcf/trunk/framework/pull-agent/src/main/java/org/apache/manifoldcf/crawler/repository/RepositoryHistoryManager.java?rev=1195471&r1=1195470&r2=1195471&view=diff
==============================================================================
--- incubator/lcf/trunk/framework/pull-agent/src/main/java/org/apache/manifoldcf/crawler/repository/RepositoryHistoryManager.java (original)
+++ incubator/lcf/trunk/framework/pull-agent/src/main/java/org/apache/manifoldcf/crawler/repository/RepositoryHistoryManager.java Mon Oct 31 14:26:33 2011
@@ -165,8 +165,9 @@ public class RepositoryHistoryManager ex
     throws ManifoldCFException
   {
     ArrayList params = new ArrayList();
-    params.add(owner);
-    performDelete("WHERE "+ownerNameField+"=?",params,invKeys);
+    String query = buildConjunctionClause(params,new ClauseDescription[]{
+      new UnitaryClause(ownerNameField,owner)});
+    performDelete("WHERE "+query,params,invKeys);
   }
 
   /** Add row to table, and reanalyze if necessary.
@@ -691,10 +692,6 @@ public class RepositoryHistoryManager ex
         sb.append(field);
 	// Always make it DESC order...
 	sb.append(" DESC");
-        //if (j == 0)
-	//  sb.append(" DESC");
-        //else
-        //  sb.append(" ASC");
         i++;
       }
       j++;

Modified: incubator/lcf/trunk/framework/pull-agent/src/main/java/org/apache/manifoldcf/crawler/repository/ThrottleSpecManager.java
URL: http://svn.apache.org/viewvc/incubator/lcf/trunk/framework/pull-agent/src/main/java/org/apache/manifoldcf/crawler/repository/ThrottleSpecManager.java?rev=1195471&r1=1195470&r2=1195471&view=diff
==============================================================================
--- incubator/lcf/trunk/framework/pull-agent/src/main/java/org/apache/manifoldcf/crawler/repository/ThrottleSpecManager.java (original)
+++ incubator/lcf/trunk/framework/pull-agent/src/main/java/org/apache/manifoldcf/crawler/repository/ThrottleSpecManager.java Mon Oct 31 14:26:33 2011
@@ -137,21 +137,31 @@ public class ThrottleSpecManager extends
     throws ManifoldCFException
   {
     ArrayList list = new ArrayList();
-    list.add(name);
+    String query = buildConjunctionClause(list,new ClauseDescription[]{
+      new UnitaryClause(ownerNameField,name)});
     return performQuery("SELECT "+descriptionField+" AS description,"+matchField+" AS match,"+throttleField+" AS value FROM "+
-      getTableName()+" WHERE "+ownerNameField+"=?",list,null,null);
+      getTableName()+" WHERE "+query,list,null,null);
   }
 
+  /** Calculate the maximum number of clauses we can use with getRows.
+  */
+  public int maxClauseGetRows()
+  {
+    return findConjunctionClauseMax(new ClauseDescription[]{});
+  }
+    
   /** Fill in a set of throttles corresponding to a set of connection names.
   *@param connections is the set of connections to fill in.
   *@param indexMap maps the connection name to the index in the connections array.
-  *@param ownerNameList is the list of connection names.
   *@param ownerNameParams is the corresponding set of connection name parameters.
   */
-  public void getRows(IRepositoryConnection[] connections, Map indexMap, String ownerNameList, ArrayList ownerNameParams)
+  public void getRows(IRepositoryConnection[] connections, Map indexMap, ArrayList ownerNameParams)
     throws ManifoldCFException
   {
-    IResultSet set = performQuery("SELECT * FROM "+getTableName()+" WHERE "+ownerNameField+" IN ("+ownerNameList+")",ownerNameParams,
+    ArrayList list = new ArrayList();
+    String query = buildConjunctionClause(list,new ClauseDescription[]{
+      new MultiClause(ownerNameField,ownerNameParams)});
+    IResultSet set = performQuery("SELECT * FROM "+getTableName()+" WHERE "+query,list,
       null,null);
     int i = 0;
     while (i < set.getRowCount())
@@ -217,8 +227,9 @@ public class ThrottleSpecManager extends
     throws ManifoldCFException
   {
     ArrayList list = new ArrayList();
-    list.add(owner);
-    performDelete("WHERE "+ownerNameField+"=?",list,null);
+    String query = buildConjunctionClause(list,new ClauseDescription[]{
+      new UnitaryClause(ownerNameField,owner)});
+    performDelete("WHERE "+query,list,null);
   }
 
 }

Added: incubator/lcf/trunk/loadtests/rss/src/test/java/org/apache/manifoldcf/rss_loadtests/BaseDerby.java
URL: http://svn.apache.org/viewvc/incubator/lcf/trunk/loadtests/rss/src/test/java/org/apache/manifoldcf/rss_loadtests/BaseDerby.java?rev=1195471&view=auto
==============================================================================
--- incubator/lcf/trunk/loadtests/rss/src/test/java/org/apache/manifoldcf/rss_loadtests/BaseDerby.java (added)
+++ incubator/lcf/trunk/loadtests/rss/src/test/java/org/apache/manifoldcf/rss_loadtests/BaseDerby.java Mon Oct 31 14:26:33 2011
@@ -0,0 +1,461 @@
+/* $Id$ */
+
+/**
+* Licensed to the Apache Software Foundation (ASF) under one or more
+* contributor license agreements. See the NOTICE file distributed with
+* this work for additional information regarding copyright ownership.
+* The ASF licenses this file to You under the Apache License, Version 2.0
+* (the "License"); you may not use this file except in compliance with
+* the License. You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+package org.apache.manifoldcf.rss_loadtests;
+
+import org.apache.manifoldcf.core.interfaces.*;
+import org.apache.manifoldcf.agents.interfaces.*;
+import org.apache.manifoldcf.crawler.interfaces.*;
+import org.apache.manifoldcf.crawler.system.ManifoldCF;
+
+import java.io.*;
+import java.util.*;
+import org.junit.*;
+
+import org.mortbay.jetty.Handler;
+import org.mortbay.jetty.Server;
+import org.mortbay.jetty.Connector;
+import org.mortbay.jetty.webapp.WebAppContext;
+import org.mortbay.jetty.servlet.Context;
+import org.mortbay.jetty.servlet.FilterHolder;
+import org.mortbay.log.Logger;
+
+import org.apache.commons.httpclient.*;
+import org.apache.commons.httpclient.methods.*;
+
+/** Tests that run the "agents daemon" should be derived from this */
+public class BaseDerby extends org.apache.manifoldcf.crawler.tests.ConnectorBase
+{
+  public static final String agentShutdownSignal = "agent-process";
+  public static final int testPort = 8346;
+  
+  protected DaemonThread daemonThread = null;
+  protected Server server = null;
+
+  protected String[] getConnectorNames()
+  {
+    return new String[]{"File Connector"};
+  }
+  
+  protected String[] getConnectorClasses()
+  {
+    return new String[]{"org.apache.manifoldcf.crawler.connectors.rss.RSSConnector"};
+  }
+  
+  protected String[] getOutputNames()
+  {
+    return new String[]{"Null Output"};
+  }
+  
+  protected String[] getOutputClasses()
+  {
+    return new String[]{"org.apache.manifoldcf.agents.output.nullconnector.NullConnector"};
+  }
+  
+  // API support
+  
+  // These methods allow communication with the ManifoldCF api webapp, via the locally-instantiated jetty
+  
+  /** Construct a command url.
+  */
+  protected String makeAPIURL(String command)
+  {
+    return "http://localhost:"+Integer.toString(testPort)+"/mcf-api-service/json/"+command;
+  }
+  
+  /** Perform an json API GET operation.
+  *@param apiURL is the operation.
+  *@param expectedResponse is the expected response code.
+  *@return the json response.
+  */
+  protected String performAPIGetOperation(String apiURL, int expectedResponse)
+    throws Exception
+  {
+    HttpClient client = new HttpClient();
+    GetMethod method = new GetMethod(apiURL);
+    int response = client.executeMethod(method);
+    byte[] responseData = method.getResponseBody();
+    String responseString = new String(responseData,"utf-8");
+    if (response != expectedResponse)
+      throw new Exception("API http error; expected "+Integer.toString(expectedResponse)+", saw "+Integer.toString(response)+": "+responseString);
+    // We presume that the data is utf-8, since that's what the API uses throughout.
+    return responseString;
+  }
+
+  /** Perform an json API DELETE operation.
+  *@param apiURL is the operation.
+  *@param expectedResponse is the expected response code.
+  *@return the json response.
+  */
+  protected String performAPIDeleteOperation(String apiURL, int expectedResponse)
+    throws Exception
+  {
+    HttpClient client = new HttpClient();
+    DeleteMethod method = new DeleteMethod(apiURL);
+    int response = client.executeMethod(method);
+    byte[] responseData = method.getResponseBody();
+    String responseString = new String(responseData,"utf-8");
+    if (response != expectedResponse)
+      throw new Exception("API http error; expected "+Integer.toString(expectedResponse)+", saw "+Integer.toString(response)+": "+responseString);
+    // We presume that the data is utf-8, since that's what the API uses throughout.
+    return responseString;
+  }
+
+  /** Perform an json API PUT operation.
+  *@param apiURL is the operation.
+  *@param input is the input JSON.
+  *@param expectedResponse is the expected response code.
+  *@return the json response.
+  */
+  protected String performAPIPutOperation(String apiURL, int expectedResponse, String input)
+    throws Exception
+  {
+    HttpClient client = new HttpClient();
+    PutMethod method = new PutMethod(apiURL);
+    method.setRequestHeader("Content-type", "text/plain; charset=UTF-8");
+    method.setRequestBody(input);
+    int response = client.executeMethod(method);
+    byte[] responseData = method.getResponseBody();
+    String responseString = new String(responseData,"utf-8");
+    if (response != expectedResponse)
+      throw new Exception("API http error; expected "+Integer.toString(expectedResponse)+", saw "+Integer.toString(response)+": "+responseString);
+    // We presume that the data is utf-8, since that's what the API uses throughout.
+    return responseString;
+  }
+
+  /** Perform an json API POST operation.
+  *@param apiURL is the operation.
+  *@param input is the input JSON.
+  *@param expectedResponse is the expected response code.
+  *@return the json response.
+  */
+  protected String performAPIPostOperation(String apiURL, int expectedResponse, String input)
+    throws Exception
+  {
+    HttpClient client = new HttpClient();
+    PostMethod method = new PostMethod(apiURL);
+    method.setRequestHeader("Content-type", "text/plain; charset=UTF-8");
+    method.setRequestBody(input);
+    int response = client.executeMethod(method);
+    byte[] responseData = method.getResponseBody();
+    String responseString = new String(responseData,"utf-8");
+    if (response != expectedResponse)
+      throw new Exception("API http error; expected "+Integer.toString(expectedResponse)+", saw "+Integer.toString(response)+": "+responseString);
+    // We presume that the data is utf-8, since that's what the API uses throughout.
+    return responseString;
+  }
+
+  /** Perform a json GET API operation, using Configuration structures to represent the json.  This is for testing convenience,
+  * mostly.
+  */
+  protected Configuration performAPIGetOperationViaNodes(String command, int expectedResponse)
+    throws Exception
+  {
+    String result = performAPIGetOperation(makeAPIURL(command),expectedResponse);
+    Configuration cfg = new Configuration();
+    cfg.fromJSON(result);
+    return cfg;
+  }
+
+  /** Perform a json DELETE API operation, using Configuration structures to represent the json.  This is for testing convenience,
+  * mostly.
+  */
+  protected Configuration performAPIDeleteOperationViaNodes(String command, int expectedResponse)
+    throws Exception
+  {
+    String result = performAPIDeleteOperation(makeAPIURL(command),expectedResponse);
+    Configuration cfg = new Configuration();
+    cfg.fromJSON(result);
+    return cfg;
+  }
+
+  /** Perform a json PUT API operation, using Configuration structures to represent the json.  This is for testing convenience,
+  * mostly.
+  */
+  protected Configuration performAPIPutOperationViaNodes(String command, int expectedResponse, Configuration argument)
+    throws Exception
+  {
+    String argumentJson;
+    if (argument != null)
+      argumentJson = argument.toJSON();
+    else
+      argumentJson = null;
+    
+    String result = performAPIPutOperation(makeAPIURL(command),expectedResponse,argumentJson);
+    Configuration cfg = new Configuration();
+    cfg.fromJSON(result);
+    return cfg;
+  }
+
+  /** Perform a json POST API operation, using Configuration structures to represent the json.  This is for testing convenience,
+  * mostly.
+  */
+  protected Configuration performAPIPostOperationViaNodes(String command, int expectedResponse, Configuration argument)
+    throws Exception
+  {
+    String argumentJson;
+    if (argument != null)
+      argumentJson = argument.toJSON();
+    else
+      argumentJson = null;
+    
+    String result = performAPIPostOperation(makeAPIURL(command),expectedResponse,argumentJson);
+    Configuration cfg = new Configuration();
+    cfg.fromJSON(result);
+    return cfg;
+  }
+
+  // Setup/teardown
+  
+  @Before
+  public void setUp()
+    throws Exception
+  {
+    super.setUp();
+    // Start jetty
+    server = new Server( testPort );    
+    server.setStopAtShutdown( true );
+
+    
+    String crawlerWarPath = "../../framework/dist/web/war/mcf-crawler-ui.war";
+    String authorityserviceWarPath = "../../framework/dist/web/war/mcf-authority-service.war";
+    String apiWarPath = "../../framework/dist/web/war/mcf-api-service.war";
+
+    if (System.getProperty("crawlerWarPath") != null)
+    	crawlerWarPath = System.getProperty("crawlerWarPath");
+    if (System.getProperty("authorityserviceWarPath") != null)
+    	authorityserviceWarPath = System.getProperty("authorityserviceWarPath");
+    if (System.getProperty("apiWarPath") != null)
+    	apiWarPath = System.getProperty("apiWarPath");
+    
+    // Initialize the servlets
+    WebAppContext lcfCrawlerUI = new WebAppContext(crawlerWarPath,"/mcf-crawler-ui");
+    // This will cause jetty to ignore all of the framework and jdbc jars in the war, which is what we want.
+    lcfCrawlerUI.setParentLoaderPriority(true);
+    server.addHandler(lcfCrawlerUI);
+    WebAppContext lcfAuthorityService = new WebAppContext(authorityserviceWarPath,"/mcf-authority-service");
+    // This will cause jetty to ignore all of the framework and jdbc jars in the war, which is what we want.
+    lcfAuthorityService.setParentLoaderPriority(true);
+    server.addHandler(lcfAuthorityService);
+    WebAppContext lcfApi = new WebAppContext(apiWarPath,"/mcf-api-service");
+    lcfApi.setParentLoaderPriority(true);
+    server.addHandler(lcfApi);
+    server.start();
+
+    // If all worked, then we can start the daemon.
+    // Clear the agents shutdown signal.
+    IThreadContext tc = ThreadContextFactory.make();
+    ILockManager lockManager = LockManagerFactory.make(tc);
+    lockManager.clearGlobalFlag(agentShutdownSignal);
+
+    daemonThread = new DaemonThread();
+    daemonThread.start();
+  }
+  
+  @After
+  public void cleanUp()
+    throws Exception
+  {
+    initialize();
+    if (isInitialized())
+    {
+      Exception currentException = null;
+      IThreadContext tc = ThreadContextFactory.make();
+
+      // Delete all jobs (and wait for them to go away)
+      if (daemonThread != null)
+      {
+        IJobManager jobManager = JobManagerFactory.make(tc);
+        
+        // Get a list of the current active jobs
+        IJobDescription[] jobs = jobManager.getAllJobs();
+        int i = 0;
+        while (i < jobs.length)
+        {
+          IJobDescription desc = jobs[i++];
+          // Abort this job, if it is running
+          try
+          {
+            jobManager.manualAbort(desc.getID());
+          }
+          catch (ManifoldCFException e)
+          {
+            // This generally means that the job was not running
+          }
+        }
+        i = 0;
+        while (i < jobs.length)
+        {
+          IJobDescription desc = jobs[i++];
+          // Wait for this job to stop
+          while (true)
+          {
+            JobStatus status = jobManager.getStatus(desc.getID());
+            if (status != null)
+            {
+              int statusValue = status.getStatus();
+              switch (statusValue)
+              {
+              case JobStatus.JOBSTATUS_NOTYETRUN:
+              case JobStatus.JOBSTATUS_COMPLETED:
+              case JobStatus.JOBSTATUS_ERROR:
+                break;
+              default:
+                ManifoldCF.sleep(10000);
+                continue;
+              }
+            }
+            break;
+          }
+        }
+
+        // Now, delete them all
+        i = 0;
+        while (i < jobs.length)
+        {
+          IJobDescription desc = jobs[i++];
+          try
+          {
+            jobManager.deleteJob(desc.getID());
+          }
+          catch (ManifoldCFException e)
+          {
+            // This usually means that the job is already being deleted
+          }
+        }
+
+        i = 0;
+        while (i < jobs.length)
+        {
+          IJobDescription desc = jobs[i++];
+          // Wait for this job to disappear
+          while (true)
+          {
+            JobStatus status = jobManager.getStatus(desc.getID());
+            if (status != null)
+            {
+              ManifoldCF.sleep(10000);
+              continue;
+            }
+            break;
+          }
+        }
+
+        // Shut down daemon
+        ILockManager lockManager = LockManagerFactory.make(tc);
+        lockManager.setGlobalFlag(agentShutdownSignal);
+      
+        // Wait for daemon thread to exit.
+        while (true)
+        {
+          if (daemonThread.isAlive())
+          {
+            Thread.sleep(1000L);
+            continue;
+          }
+          break;
+        }
+
+        Exception e = daemonThread.getDaemonException();
+        if (e != null)
+          currentException = e;
+      }
+      
+      if (server != null)
+      {
+        server.stop();
+        server.join();
+        server = null;
+      }
+      
+      // Clean up everything else
+      try
+      {
+        super.cleanUp();
+      }
+      catch (Exception e)
+      {
+        if (currentException == null)
+          currentException = e;
+      }
+      if (currentException != null)
+        throw currentException;
+    }
+  }
+  
+  protected static class DaemonThread extends Thread
+  {
+    protected Exception daemonException = null;
+    
+    public DaemonThread()
+    {
+      setName("Daemon thread");
+    }
+    
+    public void run()
+    {
+      IThreadContext tc = ThreadContextFactory.make();
+      // Now, start the server, and then wait for the shutdown signal.  On shutdown, we have to actually do the cleanup,
+      // because the JVM isn't going away.
+      try
+      {
+        ILockManager lockManager = LockManagerFactory.make(tc);
+        while (true)
+        {
+          // Any shutdown signal yet?
+          if (lockManager.checkGlobalFlag(agentShutdownSignal))
+            break;
+            
+          // Start whatever agents need to be started
+          ManifoldCF.startAgents(tc);
+
+          try
+          {
+            ManifoldCF.sleep(5000);
+          }
+          catch (InterruptedException e)
+          {
+            break;
+          }
+        }
+      }
+      catch (ManifoldCFException e)
+      {
+        daemonException = e;
+      }
+      finally
+      {
+        try
+        {
+          ManifoldCF.stopAgents(tc);
+        }
+        catch (ManifoldCFException e)
+        {
+          daemonException = e;
+        }
+      }
+    }
+    
+    public Exception getDaemonException()
+    {
+      return daemonException;
+    }
+    
+  }
+
+}

Propchange: incubator/lcf/trunk/loadtests/rss/src/test/java/org/apache/manifoldcf/rss_loadtests/BaseDerby.java
------------------------------------------------------------------------------
    svn:eol-style = native

Propchange: incubator/lcf/trunk/loadtests/rss/src/test/java/org/apache/manifoldcf/rss_loadtests/BaseDerby.java
------------------------------------------------------------------------------
    svn:keywords = Id

Added: incubator/lcf/trunk/loadtests/rss/src/test/java/org/apache/manifoldcf/rss_loadtests/BaseHSQLDB.java
URL: http://svn.apache.org/viewvc/incubator/lcf/trunk/loadtests/rss/src/test/java/org/apache/manifoldcf/rss_loadtests/BaseHSQLDB.java?rev=1195471&view=auto
==============================================================================
--- incubator/lcf/trunk/loadtests/rss/src/test/java/org/apache/manifoldcf/rss_loadtests/BaseHSQLDB.java (added)
+++ incubator/lcf/trunk/loadtests/rss/src/test/java/org/apache/manifoldcf/rss_loadtests/BaseHSQLDB.java Mon Oct 31 14:26:33 2011
@@ -0,0 +1,461 @@
+/* $Id$ */
+
+/**
+* Licensed to the Apache Software Foundation (ASF) under one or more
+* contributor license agreements. See the NOTICE file distributed with
+* this work for additional information regarding copyright ownership.
+* The ASF licenses this file to You under the Apache License, Version 2.0
+* (the "License"); you may not use this file except in compliance with
+* the License. You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+package org.apache.manifoldcf.rss_loadtests;
+
+import org.apache.manifoldcf.core.interfaces.*;
+import org.apache.manifoldcf.agents.interfaces.*;
+import org.apache.manifoldcf.crawler.interfaces.*;
+import org.apache.manifoldcf.crawler.system.ManifoldCF;
+
+import java.io.*;
+import java.util.*;
+import org.junit.*;
+
+import org.mortbay.jetty.Handler;
+import org.mortbay.jetty.Server;
+import org.mortbay.jetty.Connector;
+import org.mortbay.jetty.webapp.WebAppContext;
+import org.mortbay.jetty.servlet.Context;
+import org.mortbay.jetty.servlet.FilterHolder;
+import org.mortbay.log.Logger;
+
+import org.apache.commons.httpclient.*;
+import org.apache.commons.httpclient.methods.*;
+
+/** Tests that run the "agents daemon" should be derived from this */
+public class BaseHSQLDB extends org.apache.manifoldcf.crawler.tests.ConnectorBaseHSQLDB
+{
+  public static final String agentShutdownSignal = "agent-process";
+  public static final int testPort = 8346;
+  
+  protected DaemonThread daemonThread = null;
+  protected Server server = null;
+
+  protected String[] getConnectorNames()
+  {
+    return new String[]{"File Connector"};
+  }
+  
+  protected String[] getConnectorClasses()
+  {
+    return new String[]{"org.apache.manifoldcf.crawler.connectors.rss.RSSConnector"};
+  }
+  
+  protected String[] getOutputNames()
+  {
+    return new String[]{"Null Output"};
+  }
+  
+  protected String[] getOutputClasses()
+  {
+    return new String[]{"org.apache.manifoldcf.agents.output.nullconnector.NullConnector"};
+  }
+  
+  // API support
+  
+  // These methods allow communication with the ManifoldCF api webapp, via the locally-instantiated jetty
+  
+  /** Construct a command url.
+  */
+  protected String makeAPIURL(String command)
+  {
+    return "http://localhost:"+Integer.toString(testPort)+"/mcf-api-service/json/"+command;
+  }
+  
+  /** Perform an json API GET operation.
+  *@param apiURL is the operation.
+  *@param expectedResponse is the expected response code.
+  *@return the json response.
+  */
+  protected String performAPIGetOperation(String apiURL, int expectedResponse)
+    throws Exception
+  {
+    HttpClient client = new HttpClient();
+    GetMethod method = new GetMethod(apiURL);
+    int response = client.executeMethod(method);
+    byte[] responseData = method.getResponseBody();
+    String responseString = new String(responseData,"utf-8");
+    if (response != expectedResponse)
+      throw new Exception("API http error; expected "+Integer.toString(expectedResponse)+", saw "+Integer.toString(response)+": "+responseString);
+    // We presume that the data is utf-8, since that's what the API uses throughout.
+    return responseString;
+  }
+
+  /** Perform an json API DELETE operation.
+  *@param apiURL is the operation.
+  *@param expectedResponse is the expected response code.
+  *@return the json response.
+  */
+  protected String performAPIDeleteOperation(String apiURL, int expectedResponse)
+    throws Exception
+  {
+    HttpClient client = new HttpClient();
+    DeleteMethod method = new DeleteMethod(apiURL);
+    int response = client.executeMethod(method);
+    byte[] responseData = method.getResponseBody();
+    String responseString = new String(responseData,"utf-8");
+    if (response != expectedResponse)
+      throw new Exception("API http error; expected "+Integer.toString(expectedResponse)+", saw "+Integer.toString(response)+": "+responseString);
+    // We presume that the data is utf-8, since that's what the API uses throughout.
+    return responseString;
+  }
+
+  /** Perform an json API PUT operation.
+  *@param apiURL is the operation.
+  *@param input is the input JSON.
+  *@param expectedResponse is the expected response code.
+  *@return the json response.
+  */
+  protected String performAPIPutOperation(String apiURL, int expectedResponse, String input)
+    throws Exception
+  {
+    HttpClient client = new HttpClient();
+    PutMethod method = new PutMethod(apiURL);
+    method.setRequestHeader("Content-type", "text/plain; charset=UTF-8");
+    method.setRequestBody(input);
+    int response = client.executeMethod(method);
+    byte[] responseData = method.getResponseBody();
+    String responseString = new String(responseData,"utf-8");
+    if (response != expectedResponse)
+      throw new Exception("API http error; expected "+Integer.toString(expectedResponse)+", saw "+Integer.toString(response)+": "+responseString);
+    // We presume that the data is utf-8, since that's what the API uses throughout.
+    return responseString;
+  }
+
+  /** Perform an json API POST operation.
+  *@param apiURL is the operation.
+  *@param input is the input JSON.
+  *@param expectedResponse is the expected response code.
+  *@return the json response.
+  */
+  protected String performAPIPostOperation(String apiURL, int expectedResponse, String input)
+    throws Exception
+  {
+    HttpClient client = new HttpClient();
+    PostMethod method = new PostMethod(apiURL);
+    method.setRequestHeader("Content-type", "text/plain; charset=UTF-8");
+    method.setRequestBody(input);
+    int response = client.executeMethod(method);
+    byte[] responseData = method.getResponseBody();
+    String responseString = new String(responseData,"utf-8");
+    if (response != expectedResponse)
+      throw new Exception("API http error; expected "+Integer.toString(expectedResponse)+", saw "+Integer.toString(response)+": "+responseString);
+    // We presume that the data is utf-8, since that's what the API uses throughout.
+    return responseString;
+  }
+
+  /** Perform a json GET API operation, using Configuration structures to represent the json.  This is for testing convenience,
+  * mostly.
+  */
+  protected Configuration performAPIGetOperationViaNodes(String command, int expectedResponse)
+    throws Exception
+  {
+    String result = performAPIGetOperation(makeAPIURL(command),expectedResponse);
+    Configuration cfg = new Configuration();
+    cfg.fromJSON(result);
+    return cfg;
+  }
+
+  /** Perform a json DELETE API operation, using Configuration structures to represent the json.  This is for testing convenience,
+  * mostly.
+  */
+  protected Configuration performAPIDeleteOperationViaNodes(String command, int expectedResponse)
+    throws Exception
+  {
+    String result = performAPIDeleteOperation(makeAPIURL(command),expectedResponse);
+    Configuration cfg = new Configuration();
+    cfg.fromJSON(result);
+    return cfg;
+  }
+
+  /** Perform a json PUT API operation, using Configuration structures to represent the json.  This is for testing convenience,
+  * mostly.
+  */
+  protected Configuration performAPIPutOperationViaNodes(String command, int expectedResponse, Configuration argument)
+    throws Exception
+  {
+    String argumentJson;
+    if (argument != null)
+      argumentJson = argument.toJSON();
+    else
+      argumentJson = null;
+    
+    String result = performAPIPutOperation(makeAPIURL(command),expectedResponse,argumentJson);
+    Configuration cfg = new Configuration();
+    cfg.fromJSON(result);
+    return cfg;
+  }
+
+  /** Perform a json POST API operation, using Configuration structures to represent the json.  This is for testing convenience,
+  * mostly.
+  */
+  protected Configuration performAPIPostOperationViaNodes(String command, int expectedResponse, Configuration argument)
+    throws Exception
+  {
+    String argumentJson;
+    if (argument != null)
+      argumentJson = argument.toJSON();
+    else
+      argumentJson = null;
+    
+    String result = performAPIPostOperation(makeAPIURL(command),expectedResponse,argumentJson);
+    Configuration cfg = new Configuration();
+    cfg.fromJSON(result);
+    return cfg;
+  }
+
+  // Setup/teardown
+  
+  @Before
+  public void setUp()
+    throws Exception
+  {
+    super.setUp();
+    // Start jetty
+    server = new Server( testPort );    
+    server.setStopAtShutdown( true );
+
+    
+    String crawlerWarPath = "../../framework/dist/web/war/mcf-crawler-ui.war";
+    String authorityserviceWarPath = "../../framework/dist/web/war/mcf-authority-service.war";
+    String apiWarPath = "../../framework/dist/web/war/mcf-api-service.war";
+
+    if (System.getProperty("crawlerWarPath") != null)
+    	crawlerWarPath = System.getProperty("crawlerWarPath");
+    if (System.getProperty("authorityserviceWarPath") != null)
+    	authorityserviceWarPath = System.getProperty("authorityserviceWarPath");
+    if (System.getProperty("apiWarPath") != null)
+    	apiWarPath = System.getProperty("apiWarPath");
+    
+    // Initialize the servlets
+    WebAppContext lcfCrawlerUI = new WebAppContext(crawlerWarPath,"/mcf-crawler-ui");
+    // This will cause jetty to ignore all of the framework and jdbc jars in the war, which is what we want.
+    lcfCrawlerUI.setParentLoaderPriority(true);
+    server.addHandler(lcfCrawlerUI);
+    WebAppContext lcfAuthorityService = new WebAppContext(authorityserviceWarPath,"/mcf-authority-service");
+    // This will cause jetty to ignore all of the framework and jdbc jars in the war, which is what we want.
+    lcfAuthorityService.setParentLoaderPriority(true);
+    server.addHandler(lcfAuthorityService);
+    WebAppContext lcfApi = new WebAppContext(apiWarPath,"/mcf-api-service");
+    lcfApi.setParentLoaderPriority(true);
+    server.addHandler(lcfApi);
+    server.start();
+
+    // If all worked, then we can start the daemon.
+    // Clear the agents shutdown signal.
+    IThreadContext tc = ThreadContextFactory.make();
+    ILockManager lockManager = LockManagerFactory.make(tc);
+    lockManager.clearGlobalFlag(agentShutdownSignal);
+
+    daemonThread = new DaemonThread();
+    daemonThread.start();
+  }
+  
+  @After
+  public void cleanUp()
+    throws Exception
+  {
+    initialize();
+    if (isInitialized())
+    {
+      Exception currentException = null;
+      IThreadContext tc = ThreadContextFactory.make();
+
+      // Delete all jobs (and wait for them to go away)
+      if (daemonThread != null)
+      {
+        IJobManager jobManager = JobManagerFactory.make(tc);
+        
+        // Get a list of the current active jobs
+        IJobDescription[] jobs = jobManager.getAllJobs();
+        int i = 0;
+        while (i < jobs.length)
+        {
+          IJobDescription desc = jobs[i++];
+          // Abort this job, if it is running
+          try
+          {
+            jobManager.manualAbort(desc.getID());
+          }
+          catch (ManifoldCFException e)
+          {
+            // This generally means that the job was not running
+          }
+        }
+        i = 0;
+        while (i < jobs.length)
+        {
+          IJobDescription desc = jobs[i++];
+          // Wait for this job to stop
+          while (true)
+          {
+            JobStatus status = jobManager.getStatus(desc.getID());
+            if (status != null)
+            {
+              int statusValue = status.getStatus();
+              switch (statusValue)
+              {
+              case JobStatus.JOBSTATUS_NOTYETRUN:
+              case JobStatus.JOBSTATUS_COMPLETED:
+              case JobStatus.JOBSTATUS_ERROR:
+                break;
+              default:
+                ManifoldCF.sleep(10000);
+                continue;
+              }
+            }
+            break;
+          }
+        }
+
+        // Now, delete them all
+        i = 0;
+        while (i < jobs.length)
+        {
+          IJobDescription desc = jobs[i++];
+          try
+          {
+            jobManager.deleteJob(desc.getID());
+          }
+          catch (ManifoldCFException e)
+          {
+            // This usually means that the job is already being deleted
+          }
+        }
+
+        i = 0;
+        while (i < jobs.length)
+        {
+          IJobDescription desc = jobs[i++];
+          // Wait for this job to disappear
+          while (true)
+          {
+            JobStatus status = jobManager.getStatus(desc.getID());
+            if (status != null)
+            {
+              ManifoldCF.sleep(10000);
+              continue;
+            }
+            break;
+          }
+        }
+
+        // Shut down daemon
+        ILockManager lockManager = LockManagerFactory.make(tc);
+        lockManager.setGlobalFlag(agentShutdownSignal);
+      
+        // Wait for daemon thread to exit.
+        while (true)
+        {
+          if (daemonThread.isAlive())
+          {
+            Thread.sleep(1000L);
+            continue;
+          }
+          break;
+        }
+
+        Exception e = daemonThread.getDaemonException();
+        if (e != null)
+          currentException = e;
+      }
+      
+      if (server != null)
+      {
+        server.stop();
+        server.join();
+        server = null;
+      }
+      
+      // Clean up everything else
+      try
+      {
+        super.cleanUp();
+      }
+      catch (Exception e)
+      {
+        if (currentException == null)
+          currentException = e;
+      }
+      if (currentException != null)
+        throw currentException;
+    }
+  }
+  
+  protected static class DaemonThread extends Thread
+  {
+    protected Exception daemonException = null;
+    
+    public DaemonThread()
+    {
+      setName("Daemon thread");
+    }
+    
+    public void run()
+    {
+      IThreadContext tc = ThreadContextFactory.make();
+      // Now, start the server, and then wait for the shutdown signal.  On shutdown, we have to actually do the cleanup,
+      // because the JVM isn't going away.
+      try
+      {
+        ILockManager lockManager = LockManagerFactory.make(tc);
+        while (true)
+        {
+          // Any shutdown signal yet?
+          if (lockManager.checkGlobalFlag(agentShutdownSignal))
+            break;
+            
+          // Start whatever agents need to be started
+          ManifoldCF.startAgents(tc);
+
+          try
+          {
+            ManifoldCF.sleep(5000);
+          }
+          catch (InterruptedException e)
+          {
+            break;
+          }
+        }
+      }
+      catch (ManifoldCFException e)
+      {
+        daemonException = e;
+      }
+      finally
+      {
+        try
+        {
+          ManifoldCF.stopAgents(tc);
+        }
+        catch (ManifoldCFException e)
+        {
+          daemonException = e;
+        }
+      }
+    }
+    
+    public Exception getDaemonException()
+    {
+      return daemonException;
+    }
+    
+  }
+
+}

Propchange: incubator/lcf/trunk/loadtests/rss/src/test/java/org/apache/manifoldcf/rss_loadtests/BaseHSQLDB.java
------------------------------------------------------------------------------
    svn:eol-style = native

Propchange: incubator/lcf/trunk/loadtests/rss/src/test/java/org/apache/manifoldcf/rss_loadtests/BaseHSQLDB.java
------------------------------------------------------------------------------
    svn:keywords = Id



Mime
View raw message