incubator-connectors-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From kwri...@apache.org
Subject svn commit: r911418 [7/8] - in /incubator/lcf/trunk/modules: connectors/webcrawler/connector/org/apache/lcf/crawler/connectors/webcrawler/ framework/agents/org/apache/lcf/agents/agentmanager/ framework/agents/org/apache/lcf/agents/incrementalingest/ fr...
Date Thu, 18 Feb 2010 14:31:31 GMT
Modified: incubator/lcf/trunk/modules/framework/pull-agent/org/apache/lcf/crawler/jobs/HopDeleteDeps.java
URL: http://svn.apache.org/viewvc/incubator/lcf/trunk/modules/framework/pull-agent/org/apache/lcf/crawler/jobs/HopDeleteDeps.java?rev=911418&r1=911417&r2=911418&view=diff
==============================================================================
--- incubator/lcf/trunk/modules/framework/pull-agent/org/apache/lcf/crawler/jobs/HopDeleteDeps.java (original)
+++ incubator/lcf/trunk/modules/framework/pull-agent/org/apache/lcf/crawler/jobs/HopDeleteDeps.java Thu Feb 18 14:31:31 2010
@@ -29,336 +29,326 @@
 */
 public class HopDeleteDeps extends org.apache.lcf.core.database.BaseTable
 {
-	public static final String _rcsid = "@(#)$Id$";
+        public static final String _rcsid = "@(#)$Id$";
 
-	// Field names
-	public static final String jobIDField = "jobid";
-	public static final String ownerIDField = "ownerid";
-	public static final String linkTypeField = "linktype";
-	public static final String parentIDHashField = "parentidhash";
-	public static final String childIDHashField = "childidhash";
-
-	/** Counter for kicking off analyze */
-	protected static AnalyzeTracker tracker = new AnalyzeTracker();
-	/** Counter for kicking off reindex */
-	protected static AnalyzeTracker reindexTracker = new AnalyzeTracker();
-	
-	// Number of events before reindex occurs
-	protected static final long REINDEX_COUNT = 250000L;
-
-	/** Constructor.
-	*@param database is the database handle.
-	*/
-	public HopDeleteDeps(IDBInterface database)
-		throws LCFException
-	{
-		super(database,"hopdeletedeps");
-	}
-
-	/** Install or upgrade.
-	*/
-	public void install(String jobsTable, String jobsColumn, String hopCountTable, String idColumn)
-		throws LCFException
-	{
-		beginTransaction();
-		try
-		{
-			Map existing = getTableSchema(null,null);
-			if (existing == null)
-			{
-				HashMap map = new HashMap();
-				map.put(jobIDField,new ColumnDescription("BIGINT",false,false,jobsTable,jobsColumn,false));
-				map.put(ownerIDField,new ColumnDescription("BIGINT",false,false,hopCountTable,idColumn,false));
-				map.put(linkTypeField,new ColumnDescription("VARCHAR(255)",false,true,null,null,false));
-				map.put(parentIDHashField,new ColumnDescription("VARCHAR(40)",false,false,null,null,false));
-				map.put(childIDHashField,new ColumnDescription("VARCHAR(40)",false,true,null,null,false));
-
-				performCreate(map,null);
-
-				// Build indexes
-				ArrayList list = new ArrayList();
-				list.add(jobIDField);
-				addTableIndex(false,list);
-				
-				list.clear();
-				list.add(ownerIDField);
-				list.add(linkTypeField);
-				list.add(parentIDHashField);
-				list.add(childIDHashField);
-				addTableIndex(true,list);
-				
-				list.clear();
-				list.add(ownerIDField);
-				addTableIndex(false,list);
-				
-				list.clear();
-				list.add(jobIDField);
-				list.add(childIDHashField);
-				addTableIndex(false,list);
-			}
-			else
-			{
-				ColumnDescription cd;
-				
-				// Get rid of unused columns
-				cd = (ColumnDescription)existing.get("childid");
-				if (cd != null)
-				{
-					// Remove childid and parentid columns
-					ArrayList list = new ArrayList();
-					list.add("childid");
-					list.add("parentid");
-					performAlter(null,null,list,null);
-				}
-				
-				// No index modifications required!
-			}
-			return;
-		}
-		catch (LCFException e)
-		{
-			signalRollback();
-			throw e;
-		}
-		catch (Error e)
-		{
-			signalRollback();
-			throw e;
-		}
-		finally
-		{
-			endTransaction();
-		}
-	}
-
-	/** Uninstall.
-	*/
-	public void deinstall()
-		throws LCFException
-	{
-		performDrop(null);
-	}
-
-	/** Analyze job tables that need analysis.
-	*/
-	public void analyzeTables()
-		throws LCFException
-	{
-		long startTime = System.currentTimeMillis();
-		Logging.perf.debug("Beginning to analyze hopdeletedeps table");
-		analyzeTable();
-		Logging.perf.debug("Done analyzing hopdeletedeps table in "+new Long(System.currentTimeMillis()-startTime)+" ms");
-	}
-
-	/** Delete a job. */
-	public void deleteJob(Long jobID)
-		throws LCFException
-	{
-		ArrayList list = new ArrayList();
-		list.add(jobID);
-		performDelete("WHERE "+jobIDField+"=?",list,null);
-		// Log one event - it may not be enough, but it's the best we can do without overhead
-		reindexTracker.noteInsert();
-	}
-
-	/** Remove rows that correspond to specific hopcount records.
-	*/
-	public void removeMarkedRows(String parentTable, String parentIDHashField, String query, ArrayList queryList)
-		throws LCFException
-	{
-		// This didn't perform very well.
-		//performDelete("WHERE EXISTS(SELECT 'x' FROM "+parentTable+" t0 WHERE t0."+parentIDField+"="+ownerIDField+
-		//	" AND t0."+markField+"=?)",list,null);
-		performDelete("WHERE "+ownerIDField+" IN(SELECT "+parentIDHashField+" FROM "+parentTable+" WHERE "+query+")",
-			queryList,null);
-		// Log one event - it may not be enough, but it's the best we can do without overhead
-		reindexTracker.noteInsert();
-	}
-
-	/** Delete rows related to specified owners.  The list of
-	* specified owners does not exceed the maximum database in-clause
-	* size.
-	*/
-	public void deleteOwnerRows(Long[] ownerIDs)
-		throws LCFException
-	{
-		StringBuffer sb = new StringBuffer("WHERE ");
-		sb.append(ownerIDField).append(" IN(");
-		ArrayList list = new ArrayList();
-		int i = 0;
-		while (i < ownerIDs.length)
-		{
-			if (i > 0)
-				sb.append(",");
-			sb.append("?");
-			list.add(ownerIDs[i++]);
-		}
-		sb.append(")");
-		performDelete(sb.toString(),list,null);
-		reindexTracker.noteInsert(ownerIDs.length);
-	}
-
-	/** Get the delete dependencies for an owner.
-	*@return the links
-	*/
-	public DeleteDependency[] getDeleteDependencies(Long ownerID)
-		throws LCFException
-	{
-		ArrayList list = new ArrayList();
-		list.add(ownerID);
-		IResultSet set = performQuery("SELECT "+linkTypeField+", "+parentIDHashField+", "+
-			childIDHashField+" FROM "+getTableName()+" WHERE "+ownerIDField+"=?",list,null,null);
-		DeleteDependency[] rval = new DeleteDependency[set.getRowCount()];
-		int i = 0;
-		while (i < rval.length)
-		{
-			IResultRow row = set.getRow(i);
-			rval[i] = new DeleteDependency((String)row.getValue(linkTypeField),
-				(String)row.getValue(parentIDHashField),
-				(String)row.getValue(childIDHashField));
-			i++;
-		}
-		return rval;
-	}
-
-	/** Delete a dependency */
-	public void deleteDependency(Long ownerID, DeleteDependency dd)
-		throws LCFException
-	{
-		ArrayList list = new ArrayList();
-		StringBuffer sb = new StringBuffer("WHERE ");
-		sb.append(ownerIDField).append("=? AND ");
-		list.add(ownerID);
-		if (dd.getLinkType().length() > 0)
-		{
-			sb.append(linkTypeField).append("=? AND ");
-			list.add(dd.getLinkType());
-		}
-		else
-			sb.append(linkTypeField).append(" IS NULL AND ");
-		sb.append(parentIDHashField).append("=? AND ");
-		list.add(dd.getParentIDHash());
-		if (dd.getChildIDHash().length() > 0)
-		{
-			sb.append(childIDHashField).append("=?");
-			list.add(dd.getChildIDHash());
-		}
-		else
-			sb.append(childIDHashField).append(" IS NULL");
-		performDelete(sb.toString(),list,null);
-		reindexTracker.noteInsert();
-	}
-
-	/** Write a delete dependency.
-	*/
-	public void writeDependency(Long ownerID, Long jobID, DeleteDependency dd)
-		throws LCFException
-	{
-		HashMap map = new HashMap();
-		map.put(jobIDField,jobID);
-		map.put(ownerIDField,ownerID);
-		if (dd.getLinkType().length() > 0)
-			map.put(linkTypeField,dd.getLinkType());
-		map.put(parentIDHashField,dd.getParentIDHash());
-		if (dd.getChildIDHash().length() > 0)
-		{
-			map.put(childIDHashField,dd.getChildIDHash());
-		}
-		performInsert(map,null);
-		tracker.noteInsert();
-	}
-
-
-	/** Conditionally do analyze operation.
-	*/
-	public void conditionallyAnalyzeTables()
-		throws LCFException
-	{
-		if (tracker.checkAnalyze())
-		{
-			try
-			{
-				// Do the analyze
-				analyzeTable();
-				// Get the size of the table
-			}
-			finally
-			{
-				// For this table, we base the wait time on the number of rows in it.
-				// Simply reanalyze every n inserts
-				tracker.doAnalyze(60000L);
-			}
-		}
-		if (reindexTracker.checkAnalyze())
-		{
-			try
-			{
-				// Do the reindex
-				reindexTable();
-				// Get the size of the table
-			}
-			finally
-			{
-				// For this table, we base the wait time on the number of rows in it.
-				// Simply reanalyze every n inserts
-				reindexTracker.doAnalyze(REINDEX_COUNT);
-			}
-		}
-
-	}
-
-
-	/** Analyze tracker class.
-	*/
-	protected static class AnalyzeTracker
-	{
-		// Number of records to insert before we need to analyze again.
-		// After start, we wait 1000 before analyzing the first time.
-		protected long recordCount = 1000L;
-		protected boolean busy = false;
-		
-		/** Constructor.
-		*/
-		public AnalyzeTracker()
-		{
-
-		}
-
-		/** Note an analyze.
-		*/
-		public synchronized void doAnalyze(long repeatCount)
-		{
-			recordCount = repeatCount;
-			busy = false;
-		}
-
-		public synchronized void noteInsert(int count)
-		{
-			if (recordCount >= (long)count)
-				recordCount -= (long)count;
-			else
-				recordCount = 0L;
-		}
-
-		/** Note an insert */
-		public synchronized void noteInsert()
-		{
-			if (recordCount > 0L)
-				recordCount--;
-		}
-		
-		/** Prepare to insert/delete a record, and see if analyze is required.
-		*/
-		public synchronized boolean checkAnalyze()
-		{
-			if (busy)
-				return false;
-			busy = (recordCount == 0L);
-			return busy;
-		}
+        // Field names
+        public static final String jobIDField = "jobid";
+        public static final String ownerIDField = "ownerid";
+        public static final String linkTypeField = "linktype";
+        public static final String parentIDHashField = "parentidhash";
+        public static final String childIDHashField = "childidhash";
+
+        /** Counter for kicking off analyze */
+        protected static AnalyzeTracker tracker = new AnalyzeTracker();
+        /** Counter for kicking off reindex */
+        protected static AnalyzeTracker reindexTracker = new AnalyzeTracker();
+        
+        // Number of events before reindex occurs
+        protected static final long REINDEX_COUNT = 250000L;
+
+        /** Constructor.
+        *@param database is the database handle.
+        */
+        public HopDeleteDeps(IDBInterface database)
+                throws LCFException
+        {
+                super(database,"hopdeletedeps");
+        }
+
+        /** Install or upgrade.
+        */
+        public void install(String jobsTable, String jobsColumn, String hopCountTable, String idColumn)
+                throws LCFException
+        {
+                // Standard practice: outer retry loop
+                while (true)
+                {
+                        Map existing = getTableSchema(null,null);
+                        if (existing == null)
+                        {
+                                HashMap map = new HashMap();
+                                map.put(jobIDField,new ColumnDescription("BIGINT",false,false,jobsTable,jobsColumn,false));
+                                map.put(ownerIDField,new ColumnDescription("BIGINT",false,false,hopCountTable,idColumn,false));
+                                map.put(linkTypeField,new ColumnDescription("VARCHAR(255)",false,true,null,null,false));
+                                map.put(parentIDHashField,new ColumnDescription("VARCHAR(40)",false,false,null,null,false));
+                                map.put(childIDHashField,new ColumnDescription("VARCHAR(40)",false,true,null,null,false));
+
+                                performCreate(map,null);
+                        }
+                        else
+                        {
+                                // Upgrade code goes here, if needed.
+                        }
+                        
+                        // Index management
+                        IndexDescription ownerIndex = new IndexDescription(false,new String[]{ownerIDField});
+                        IndexDescription jobIndex = new IndexDescription(false,new String[]{jobIDField});
+                        IndexDescription completeIndex = new IndexDescription(true,new String[]{ownerIDField,linkTypeField,parentIDHashField,childIDHashField});
+                        IndexDescription jobChildIndex = new IndexDescription(false,new String[]{jobIDField,childIDHashField});
+                        
+                        // Get rid of indexes that shouldn't be there
+                        Map indexes = getTableIndexes(null,null);
+                        Iterator iter = indexes.keySet().iterator();
+                        while (iter.hasNext())
+                        {
+                                String indexName = (String)iter.next();
+                                IndexDescription id = (IndexDescription)indexes.get(indexName);
+                            
+                                if (ownerIndex != null && id.equals(ownerIndex))
+                                        ownerIndex = null;
+                                else if (jobIndex != null && id.equals(jobIndex))
+                                        jobIndex = null;
+                                else if (completeIndex != null && id.equals(completeIndex))
+                                        completeIndex = null;
+                                else if (jobChildIndex != null && id.equals(jobChildIndex))
+                                        jobChildIndex = null;
+                                else if (indexName.indexOf("_pkey") == -1)
+                                        // This index shouldn't be here; drop it
+                                        performRemoveIndex(indexName);
+                        }
+
+                        // Add the ones we didn't find
+                        if (ownerIndex != null)
+                                performAddIndex(null,ownerIndex);
+                        if (jobIndex != null)
+                                performAddIndex(null,jobIndex);
+                        if (completeIndex != null)
+                                performAddIndex(null,completeIndex);
+                        if (jobChildIndex != null)
+                                performAddIndex(null,jobChildIndex);
+                                
+                        break;
+                }
+        }
+
+        /** Uninstall.
+        */
+        public void deinstall()
+                throws LCFException
+        {
+                performDrop(null);
+        }
+
+        /** Analyze job tables that need analysis.
+        */
+        public void analyzeTables()
+                throws LCFException
+        {
+                long startTime = System.currentTimeMillis();
+                Logging.perf.debug("Beginning to analyze hopdeletedeps table");
+                analyzeTable();
+                Logging.perf.debug("Done analyzing hopdeletedeps table in "+new Long(System.currentTimeMillis()-startTime)+" ms");
+        }
+
+        /** Delete a job. */
+        public void deleteJob(Long jobID)
+                throws LCFException
+        {
+                ArrayList list = new ArrayList();
+                list.add(jobID);
+                performDelete("WHERE "+jobIDField+"=?",list,null);
+                // Log one event - it may not be enough, but it's the best we can do without overhead
+                reindexTracker.noteInsert();
+        }
+
+        /** Remove rows that correspond to specific hopcount records.
+        */
+        public void removeMarkedRows(String parentTable, String parentIDHashField, String query, ArrayList queryList)
+                throws LCFException
+        {
+                // This didn't perform very well.
+                //performDelete("WHERE EXISTS(SELECT 'x' FROM "+parentTable+" t0 WHERE t0."+parentIDField+"="+ownerIDField+
+                //	" AND t0."+markField+"=?)",list,null);
+                performDelete("WHERE "+ownerIDField+" IN(SELECT "+parentIDHashField+" FROM "+parentTable+" WHERE "+query+")",
+                        queryList,null);
+                // Log one event - it may not be enough, but it's the best we can do without overhead
+                reindexTracker.noteInsert();
+        }
+
+        /** Delete rows related to specified owners.  The list of
+        * specified owners does not exceed the maximum database in-clause
+        * size.
+        */
+        public void deleteOwnerRows(Long[] ownerIDs)
+                throws LCFException
+        {
+                StringBuffer sb = new StringBuffer("WHERE ");
+                sb.append(ownerIDField).append(" IN(");
+                ArrayList list = new ArrayList();
+                int i = 0;
+                while (i < ownerIDs.length)
+                {
+                        if (i > 0)
+                                sb.append(",");
+                        sb.append("?");
+                        list.add(ownerIDs[i++]);
+                }
+                sb.append(")");
+                performDelete(sb.toString(),list,null);
+                reindexTracker.noteInsert(ownerIDs.length);
+        }
+
+        /** Get the delete dependencies for an owner.
+        *@return the links
+        */
+        public DeleteDependency[] getDeleteDependencies(Long ownerID)
+                throws LCFException
+        {
+                ArrayList list = new ArrayList();
+                list.add(ownerID);
+                IResultSet set = performQuery("SELECT "+linkTypeField+", "+parentIDHashField+", "+
+                        childIDHashField+" FROM "+getTableName()+" WHERE "+ownerIDField+"=?",list,null,null);
+                DeleteDependency[] rval = new DeleteDependency[set.getRowCount()];
+                int i = 0;
+                while (i < rval.length)
+                {
+                        IResultRow row = set.getRow(i);
+                        rval[i] = new DeleteDependency((String)row.getValue(linkTypeField),
+                                (String)row.getValue(parentIDHashField),
+                                (String)row.getValue(childIDHashField));
+                        i++;
+                }
+                return rval;
+        }
+
+        /** Delete a dependency */
+        public void deleteDependency(Long ownerID, DeleteDependency dd)
+                throws LCFException
+        {
+                ArrayList list = new ArrayList();
+                StringBuffer sb = new StringBuffer("WHERE ");
+                sb.append(ownerIDField).append("=? AND ");
+                list.add(ownerID);
+                if (dd.getLinkType().length() > 0)
+                {
+                        sb.append(linkTypeField).append("=? AND ");
+                        list.add(dd.getLinkType());
+                }
+                else
+                        sb.append(linkTypeField).append(" IS NULL AND ");
+                sb.append(parentIDHashField).append("=? AND ");
+                list.add(dd.getParentIDHash());
+                if (dd.getChildIDHash().length() > 0)
+                {
+                        sb.append(childIDHashField).append("=?");
+                        list.add(dd.getChildIDHash());
+                }
+                else
+                        sb.append(childIDHashField).append(" IS NULL");
+                performDelete(sb.toString(),list,null);
+                reindexTracker.noteInsert();
+        }
+
+        /** Write a delete dependency.
+        */
+        public void writeDependency(Long ownerID, Long jobID, DeleteDependency dd)
+                throws LCFException
+        {
+                HashMap map = new HashMap();
+                map.put(jobIDField,jobID);
+                map.put(ownerIDField,ownerID);
+                if (dd.getLinkType().length() > 0)
+                        map.put(linkTypeField,dd.getLinkType());
+                map.put(parentIDHashField,dd.getParentIDHash());
+                if (dd.getChildIDHash().length() > 0)
+                {
+                        map.put(childIDHashField,dd.getChildIDHash());
+                }
+                performInsert(map,null);
+                tracker.noteInsert();
+        }
+
+
+        /** Conditionally do analyze operation.
+        */
+        public void conditionallyAnalyzeTables()
+                throws LCFException
+        {
+                if (tracker.checkAnalyze())
+                {
+                        try
+                        {
+                                // Do the analyze
+                                analyzeTable();
+                                // Get the size of the table
+                        }
+                        finally
+                        {
+                                // For this table, we base the wait time on the number of rows in it.
+                                // Simply reanalyze every n inserts
+                                tracker.doAnalyze(60000L);
+                        }
+                }
+                if (reindexTracker.checkAnalyze())
+                {
+                        try
+                        {
+                                // Do the reindex
+                                reindexTable();
+                                // Get the size of the table
+                        }
+                        finally
+                        {
+                                // For this table, we base the wait time on the number of rows in it.
+                                // Simply reanalyze every n inserts
+                                reindexTracker.doAnalyze(REINDEX_COUNT);
+                        }
+                }
+
+        }
+
+
+        /** Analyze tracker class.
+        */
+        protected static class AnalyzeTracker
+        {
+                // Number of records to insert before we need to analyze again.
+                // After start, we wait 1000 before analyzing the first time.
+                protected long recordCount = 1000L;
+                protected boolean busy = false;
+                
+                /** Constructor.
+                */
+                public AnalyzeTracker()
+                {
+
+                }
+
+                /** Note an analyze.
+                */
+                public synchronized void doAnalyze(long repeatCount)
+                {
+                        recordCount = repeatCount;
+                        busy = false;
+                }
+
+                public synchronized void noteInsert(int count)
+                {
+                        if (recordCount >= (long)count)
+                                recordCount -= (long)count;
+                        else
+                                recordCount = 0L;
+                }
+
+                /** Note an insert */
+                public synchronized void noteInsert()
+                {
+                        if (recordCount > 0L)
+                                recordCount--;
+                }
+                
+                /** Prepare to insert/delete a record, and see if analyze is required.
+                */
+                public synchronized boolean checkAnalyze()
+                {
+                        if (busy)
+                                return false;
+                        busy = (recordCount == 0L);
+                        return busy;
+                }
 
 
-	}
+        }
 
 
 

Modified: incubator/lcf/trunk/modules/framework/pull-agent/org/apache/lcf/crawler/jobs/HopFilterManager.java
URL: http://svn.apache.org/viewvc/incubator/lcf/trunk/modules/framework/pull-agent/org/apache/lcf/crawler/jobs/HopFilterManager.java?rev=911418&r1=911417&r2=911418&view=diff
==============================================================================
--- incubator/lcf/trunk/modules/framework/pull-agent/org/apache/lcf/crawler/jobs/HopFilterManager.java (original)
+++ incubator/lcf/trunk/modules/framework/pull-agent/org/apache/lcf/crawler/jobs/HopFilterManager.java Thu Feb 18 14:31:31 2010
@@ -28,171 +28,180 @@
 */
 public class HopFilterManager extends org.apache.lcf.core.database.BaseTable
 {
-	public static final String _rcsid = "@(#)$Id$";
+        public static final String _rcsid = "@(#)$Id$";
 
-	// Schema
-	public final static String ownerIDField = "ownerid";
-	public final static String linkTypeField = "linktype";
-	public final static String maxHopsField = "maxhops";
-
-	/** Constructor.
-	*@param threadContext is the thread context.
-	*@param database is the database instance.
-	*/
-	public HopFilterManager(IThreadContext threadContext, IDBInterface database)
-		throws LCFException
-	{
-		super(database,"jobhopfilters");
-	}
-
-	/** Install or upgrade.
-	*@param ownerTable is the name of the table that owns this one.
-	*@param owningTablePrimaryKey is the primary key of the owning table.
-	*/
-	public void install(String ownerTable, String owningTablePrimaryKey)
-		throws LCFException
-	{
-		beginTransaction();
-		try
-		{
-			Map existing = getTableSchema(null,null);
-			if (existing == null)
-			{
-				HashMap map = new HashMap();
-				map.put(ownerIDField,new ColumnDescription("BIGINT",false,false,ownerTable,owningTablePrimaryKey,false));
-				// Null link types are NOT allowed here.  The restrictions can only be made on a real link type.
-				map.put(linkTypeField,new ColumnDescription("VARCHAR(255)",false,false,null,null,false));
-				map.put(maxHopsField,new ColumnDescription("BIGINT",false,false,null,null,false));
-				performCreate(map,null);
-
-				ArrayList list = new ArrayList();
-				list.add(ownerIDField);
-				list.add(linkTypeField);
-				addTableIndex(true,list);
-			}
-		}
-		catch (LCFException e)
-		{
-			signalRollback();
-			throw e;
-		}
-		catch (Error e)
-		{
-			signalRollback();
-			throw e;
-		}
-		finally
-		{
-			endTransaction();
-		}
-	}
-
-	/** Uninstall.
-	*/
-	public void deinstall()
-		throws LCFException
-	{
-		performDrop(null);
-	}
-
-	/** Read rows for a given owner id.
-	*@param id is the owner id.
-	*@return a map of link type to max hop count (as a Long).
-	*/
-	public Map readRows(Long id)
-		throws LCFException
-	{
-		ArrayList list = new ArrayList();
-		list.add(id);
-		IResultSet set = performQuery("SELECT "+linkTypeField+","+maxHopsField+" FROM "+getTableName()+" WHERE "+ownerIDField+"=?",list,
-			null,null);
-		Map rval = new HashMap();
-		if (set.getRowCount() == 0)
-			return rval;
-		int i = 0;
-		while (i < set.getRowCount())
-		{
-			IResultRow row = set.getRow(i);
-			String linkType = (String)row.getValue(linkTypeField);
-			Long max = (Long)row.getValue(maxHopsField);
-			rval.put(linkType,max);
-			i++;
-		}
-		return rval;
-	}
-
-	/** Fill in a set of filters corresponding to a set of owner id's.
-	*@param returnValues is a map keyed by ownerID, with value of JobDescription.
-	*@param ownerIDList is the list of owner id's.
-	*@param ownerIDParams is the corresponding set of owner id parameters.
-	*/
-	public void getRows(Map returnValues, String ownerIDList, ArrayList ownerIDParams)
-		throws LCFException
-	{
-		IResultSet set = performQuery("SELECT * FROM "+getTableName()+" WHERE "+ownerIDField+" IN ("+ownerIDList+")",ownerIDParams,
-			null,null);
-		int i = 0;
-		while (i < set.getRowCount())
-		{
-			IResultRow row = set.getRow(i);
-			Long ownerID = (Long)row.getValue(ownerIDField);
-			String linkType = (String)row.getValue(linkTypeField);
-			Long maxHops = (Long)row.getValue(maxHopsField);
-			((JobDescription)returnValues.get(ownerID)).addHopCountFilter(linkType,maxHops);
-			i++;
-		}
-	}
-
-	/** Write a filter list into the database.
-	*@param ownerID is the owning identifier.
-	*@param list is the job description to write hopcount filters for.
-	*/
-	public void writeRows(Long ownerID, IJobDescription list)
-		throws LCFException
-	{
-		beginTransaction();
-		try
-		{
-			int i = 0;
-			HashMap map = new HashMap();
-			Map filters = list.getHopCountFilters();
-			Iterator iter = filters.keySet().iterator();
-			while (iter.hasNext())
-			{
-				String linkType = (String)iter.next();
-				Long maxHops = (Long)filters.get(linkType);
-				map.clear();
-				map.put(linkTypeField,linkType);
-				map.put(maxHopsField,maxHops);
-				map.put(ownerIDField,ownerID);
-				performInsert(map,null);
-			}
-		}
-		catch (LCFException e)
-		{
-			signalRollback();
-			throw e;
-		}
-		catch (Error e)
-		{
-			signalRollback();
-			throw e;
-		}
-		finally
-		{
-			endTransaction();
-		}
-	}
-
-	/** Delete rows.
-	*@param ownerID is the owner whose rows to delete.
-	*/
-	public void deleteRows(Long ownerID)
-		throws LCFException
-	{
-		ArrayList list = new ArrayList();
-		list.add(ownerID);
-		performDelete("WHERE "+ownerIDField+"=?",list,null);
-	}
+        // Schema
+        public final static String ownerIDField = "ownerid";
+        public final static String linkTypeField = "linktype";
+        public final static String maxHopsField = "maxhops";
+
+        /** Constructor.
+        *@param threadContext is the thread context.
+        *@param database is the database instance.
+        */
+        public HopFilterManager(IThreadContext threadContext, IDBInterface database)
+                throws LCFException
+        {
+                super(database,"jobhopfilters");
+        }
+
+        /** Install or upgrade.
+        *@param ownerTable is the name of the table that owns this one.
+        *@param owningTablePrimaryKey is the primary key of the owning table.
+        */
+        public void install(String ownerTable, String owningTablePrimaryKey)
+                throws LCFException
+        {
+                // Standard practice: outer loop
+                while (true)
+                {
+                        Map existing = getTableSchema(null,null);
+                        if (existing == null)
+                        {
+                                HashMap map = new HashMap();
+                                map.put(ownerIDField,new ColumnDescription("BIGINT",false,false,ownerTable,owningTablePrimaryKey,false));
+                                // Null link types are NOT allowed here.  The restrictions can only be made on a real link type.
+                                map.put(linkTypeField,new ColumnDescription("VARCHAR(255)",false,false,null,null,false));
+                                map.put(maxHopsField,new ColumnDescription("BIGINT",false,false,null,null,false));
+                                performCreate(map,null);
+                        }
+                        else
+                        {
+                                // Upgrade code goes here, as needed
+                        }
+                        
+                        // Index management
+                        IndexDescription ownerIndex = new IndexDescription(true,new String[]{ownerIDField,linkTypeField});
+                        
+                        // Get rid of indexes that shouldn't be there
+                        Map indexes = getTableIndexes(null,null);
+                        Iterator iter = indexes.keySet().iterator();
+                        while (iter.hasNext())
+                        {
+                                String indexName = (String)iter.next();
+                                IndexDescription id = (IndexDescription)indexes.get(indexName);
+                            
+                                if (ownerIndex != null && id.equals(ownerIndex))
+                                        ownerIndex = null;
+                                else if (indexName.indexOf("_pkey") == -1)
+                                        // This index shouldn't be here; drop it
+                                        performRemoveIndex(indexName);
+                        }
+
+                        // Add the ones we didn't find
+                        if (ownerIndex != null)
+                                performAddIndex(null,ownerIndex);
+                        
+                        break;
+                }
+        }
+
+        /** Uninstall.
+        */
+        public void deinstall()
+                throws LCFException
+        {
+                performDrop(null);
+        }
+
+        /** Read rows for a given owner id.
+        *@param id is the owner id.
+        *@return a map of link type to max hop count (as a Long).
+        */
+        public Map readRows(Long id)
+                throws LCFException
+        {
+                ArrayList list = new ArrayList();
+                list.add(id);
+                IResultSet set = performQuery("SELECT "+linkTypeField+","+maxHopsField+" FROM "+getTableName()+" WHERE "+ownerIDField+"=?",list,
+                        null,null);
+                Map rval = new HashMap();
+                if (set.getRowCount() == 0)
+                        return rval;
+                int i = 0;
+                while (i < set.getRowCount())
+                {
+                        IResultRow row = set.getRow(i);
+                        String linkType = (String)row.getValue(linkTypeField);
+                        Long max = (Long)row.getValue(maxHopsField);
+                        rval.put(linkType,max);
+                        i++;
+                }
+                return rval;
+        }
+
+        /** Fill in a set of filters corresponding to a set of owner id's.
+        *@param returnValues is a map keyed by ownerID, with value of JobDescription.
+        *@param ownerIDList is the list of owner id's.
+        *@param ownerIDParams is the corresponding set of owner id parameters.
+        */
+        public void getRows(Map returnValues, String ownerIDList, ArrayList ownerIDParams)
+                throws LCFException
+        {
+                IResultSet set = performQuery("SELECT * FROM "+getTableName()+" WHERE "+ownerIDField+" IN ("+ownerIDList+")",ownerIDParams,
+                        null,null);
+                int i = 0;
+                while (i < set.getRowCount())
+                {
+                        IResultRow row = set.getRow(i);
+                        Long ownerID = (Long)row.getValue(ownerIDField);
+                        String linkType = (String)row.getValue(linkTypeField);
+                        Long maxHops = (Long)row.getValue(maxHopsField);
+                        ((JobDescription)returnValues.get(ownerID)).addHopCountFilter(linkType,maxHops);
+                        i++;
+                }
+        }
+
+        /** Write a filter list into the database.
+        *@param ownerID is the owning identifier.
+        *@param list is the job description to write hopcount filters for.
+        */
+        public void writeRows(Long ownerID, IJobDescription list)
+                throws LCFException
+        {
+                beginTransaction();
+                try
+                {
+                        int i = 0;
+                        HashMap map = new HashMap();
+                        Map filters = list.getHopCountFilters();
+                        Iterator iter = filters.keySet().iterator();
+                        while (iter.hasNext())
+                        {
+                                String linkType = (String)iter.next();
+                                Long maxHops = (Long)filters.get(linkType);
+                                map.clear();
+                                map.put(linkTypeField,linkType);
+                                map.put(maxHopsField,maxHops);
+                                map.put(ownerIDField,ownerID);
+                                performInsert(map,null);
+                        }
+                }
+                catch (LCFException e)
+                {
+                        signalRollback();
+                        throw e;
+                }
+                catch (Error e)
+                {
+                        signalRollback();
+                        throw e;
+                }
+                finally
+                {
+                        endTransaction();
+                }
+        }
+
+        /** Delete rows.
+        *@param ownerID is the owner whose rows to delete.
+        */
+        public void deleteRows(Long ownerID)
+                throws LCFException
+        {
+                ArrayList list = new ArrayList();
+                list.add(ownerID);
+                performDelete("WHERE "+ownerIDField+"=?",list,null);
+        }
 
 }

Modified: incubator/lcf/trunk/modules/framework/pull-agent/org/apache/lcf/crawler/jobs/JobQueue.java
URL: http://svn.apache.org/viewvc/incubator/lcf/trunk/modules/framework/pull-agent/org/apache/lcf/crawler/jobs/JobQueue.java?rev=911418&r1=911417&r2=911418&view=diff
==============================================================================
--- incubator/lcf/trunk/modules/framework/pull-agent/org/apache/lcf/crawler/jobs/JobQueue.java (original)
+++ incubator/lcf/trunk/modules/framework/pull-agent/org/apache/lcf/crawler/jobs/JobQueue.java Thu Feb 18 14:31:31 2010
@@ -147,95 +147,36 @@
         public void install(String jobsTable, String jobsColumn)
                 throws LCFException
         {
-                // It is possible that we will fail to properly create the unique index, so structure the code to retry after fixup should this occur...
+                // Standard practice to use outer loop to allow retry in case of upgrade.
                 while (true)
                 {
                         // Handle schema
-                        beginTransaction();
-                        try
+                        Map existing = getTableSchema(null,null);
+                        if (existing == null)
                         {
-                                Map existing = getTableSchema(null,null);
-                                if (existing == null)
-                                {
-                                        HashMap map = new HashMap();
-                                        map.put(idField,new ColumnDescription("BIGINT",true,false,null,null,false));
-                                        map.put(jobIDField,new ColumnDescription("BIGINT",false,false,jobsTable,jobsColumn,false));
-                                        // this is the local document identifier.
-                                        map.put(docHashField,new ColumnDescription("VARCHAR(40)",false,false,null,null,false));
-                                        map.put(docIDField,new ColumnDescription("LONGTEXT",false,false,null,null,false));
-                                        map.put(checkTimeField,new ColumnDescription("BIGINT",false,true,null,null,false));
-                                        map.put(failTimeField,new ColumnDescription("BIGINT",false,true,null,null,false));
-                                        map.put(failCountField,new ColumnDescription("BIGINT",false,true,null,null,false));
-                                        map.put(statusField,new ColumnDescription("CHAR(1)",false,false,null,null,false));
-                                        map.put(isSeedField,new ColumnDescription("CHAR(1)",false,true,null,null,false));
-                                        map.put(docPriorityField,new ColumnDescription("FLOAT",false,true,null,null,false));
-                                        map.put(prioritySetField,new ColumnDescription("BIGINT",false,true,null,null,false));
-                                        map.put(checkActionField,new ColumnDescription("CHAR(1)",false,true,null,null,false));
-                                        performCreate(map,null);
-                                }
-                                else
-                                {
-                                        
-                                        if (existing.get(failTimeField) == null)
-                                        {
-                                                // Add the fail time field to the table
-                                                HashMap map = new HashMap();
-                                                map.put(failTimeField,new ColumnDescription("BIGINT",false,true,null,null,false));
-                                                map.put(failCountField,new ColumnDescription("BIGINT",false,true,null,null,false));
-                                                performAlter(map,null,null,null);
-                                        }
-                                        if (existing.get(isSeedField) == null)
-                                        {
-                                                // Add the isSeed field to the table
-                                                HashMap map = new HashMap();
-                                                map.put(isSeedField,new ColumnDescription("CHAR(1)",false,true,null,null,false));
-                                                performAlter(map,null,null,null);
-                                        }
-                                        
-                                        if (existing.get(docPriorityField) == null)
-                                        {
-                                                // Add document priority
-                                                HashMap map = new HashMap();
-                                                map.put(docPriorityField,new ColumnDescription("FLOAT",false,true,null,null,false));
-                                                map.put(prioritySetField,new ColumnDescription("BIGINT",false,true,null,null,false));
-                                                performAlter(map,null,null,null);
-                                                map.clear();
-                                                map.put(docPriorityField,new Double(1.0));
-                                                map.put(prioritySetField,new Long(0L));
-                                                performUpdate(map,"",null,null);
-                                        }
-                                        
-                                        if (existing.get(checkActionField) == null)
-                                        {
-                                                HashMap map = new HashMap();
-                                                map.put(checkActionField,new ColumnDescription("CHAR(1)",false,true,null,null,false));
-                                                performAlter(map,null,null,null);
-                                                
-                                        }
-                                        
-                                        if (existing.get("priority") != null)
-                                        {
-                                                // Get rid of old priority column.
-                                                ArrayList list = new ArrayList();
-                                                list.add("priority");
-                                                performAlter(null,null,list,null);
-                                        }
-                                }
-                        }
-                        catch (LCFException e)
-                        {
-                                signalRollback();
-                                throw e;
+                                HashMap map = new HashMap();
+                                map.put(idField,new ColumnDescription("BIGINT",true,false,null,null,false));
+                                map.put(jobIDField,new ColumnDescription("BIGINT",false,false,jobsTable,jobsColumn,false));
+                                // this is the local document identifier.
+                                map.put(docHashField,new ColumnDescription("VARCHAR(40)",false,false,null,null,false));
+                                map.put(docIDField,new ColumnDescription("LONGTEXT",false,false,null,null,false));
+                                map.put(checkTimeField,new ColumnDescription("BIGINT",false,true,null,null,false));
+                                map.put(failTimeField,new ColumnDescription("BIGINT",false,true,null,null,false));
+                                map.put(failCountField,new ColumnDescription("BIGINT",false,true,null,null,false));
+                                map.put(statusField,new ColumnDescription("CHAR(1)",false,false,null,null,false));
+                                map.put(isSeedField,new ColumnDescription("CHAR(1)",false,true,null,null,false));
+                                map.put(docPriorityField,new ColumnDescription("FLOAT",false,true,null,null,false));
+                                map.put(prioritySetField,new ColumnDescription("BIGINT",false,true,null,null,false));
+                                map.put(checkActionField,new ColumnDescription("CHAR(1)",false,true,null,null,false));
+                                performCreate(map,null);
                         }
-                        catch (Error e)
-                        {
-                                signalRollback();
-                                throw e;
-                        }
-                        finally
+                        else
                         {
-                                endTransaction();
+                                // Upgrade code goes here, if needed
                         }
+
+                        // Secondary table installation
+                        prereqEventManager.install(getTableName(),idField);
                         
                         // Handle indexes
                         IndexDescription uniqueIndex = new IndexDescription(true,new String[]{docHashField,jobIDField});
@@ -300,98 +241,13 @@
                                 performAddIndex(null,docpriorityIndex);
 
                         if (uniqueIndex != null)
-                        {
-                                // This is the primary unique constraint on the table.  Only one row allowed per given docid and jobid.
-                                // This create can fail
-                                try
-                                {
-                                        performAddIndex(null,uniqueIndex);
-                                }
-                                catch (LCFException e)
-                                {
-                                        if (e.getMessage().indexOf("could not create unique index") == -1)
-                                                throw e;
-                                        removeDuplicates();
-                                        continue;
-                                }
-                        }
+                                performAddIndex(null,uniqueIndex);
 
-                        // Secondary table installation
-                        prereqEventManager.install(getTableName(),idField);
 
                         break;
                 }
         }
         
-        /** Remove duplicates, as part of upgrade */
-        protected void removeDuplicates()
-                throws LCFException
-        {
-                // If we get here it means we could not create the unique index on this table.
-                // We therefore need to remove duplicate rows, finish the job of creating the index, and try again.
-                        
-                Logging.jobs.warn("Jobqueue has duplicate jobid,dochash pairs!  Cleaning up...");
-                
-                // First, create a temporary non-unique index that we intend to remove at the end of this process.  We need this index in order to be able to
-                // order retrieval of rows by the proposed key order.
-                performAddIndex("temp_index_jobqueue",new IndexDescription(false,new String[]{jobIDField,docHashField}));
-                        
-                // The fastest way to eliminate duplicates is to read rows in sorted order, and delete those that are duplicates.  The index created above
-                // will be used and will guarantee that we don't use excessive postgresql server memory.  A client-side filter will be used to eliminate results
-                // that are not duplicates, which should prevent unbounded client memory usage as well.
-                
-                // Count the rows first
-                IResultSet countSet = performQuery("SELECT COUNT(*) AS countvar FROM "+getTableName(),null,null,null);
-                IResultRow countRow = countSet.getRow(0);
-                int count;
-                try
-                {
-                        count = Integer.parseInt(countRow.getValue("countvar").toString());
-                }
-                catch (NumberFormatException e)
-                {
-                        throw new LCFException(e.getMessage(),e);
-                }
-
-                // Now, amass a list of duplicates
-                ArrayList duplicateList = new ArrayList();
-                DuplicateFinder duplicateFinder = new DuplicateFinder();
-                int j = 0;
-                while (j < count)
-                {
-
-                        IResultSet resultSet = getDBInterface().performQuery("SELECT "+idField+","+jobIDField+","+docHashField+" FROM "+getTableName()+
-                                " ORDER BY "+jobIDField+" ASC,"+docHashField+" ASC OFFSET "+Integer.toString(j)+" LIMIT 10000",null,null,null,-1,new DuplicateFinder());
-                        
-                        int i = 0;
-                        while (i < resultSet.getRowCount())
-                        {
-                                IResultRow row = resultSet.getRow(i++);
-                                Long id = (Long)row.getValue(idField);
-                                Logging.jobs.warn("Duplicate entry detected in jobqueue table, id="+id);
-                                duplicateList.add(id);
-                        }
-                        
-                        j += 10000;
-                }
-                
-                // Go through the duplicatelist, and remove the duplicates
-                j = 0;
-                while (j < duplicateList.size())
-                {
-			Long id = (Long)duplicateList.get(j++);
-
-                        ArrayList list = new ArrayList();
-                        list.add(id);
-                        performDelete("WHERE "+idField+"=?",list,null);
-                }
-                        
-                // Drop the temporary index
-                performRemoveIndex("temp_index_jobqueue");
-                        
-                Logging.jobs.warn("Cleanup of jobqueue duplicate jobid,docid pairs completed.");
-        }
-
         /** Analyze job tables due to major event */
         public void unconditionallyAnalyzeTables()
                 throws LCFException

Modified: incubator/lcf/trunk/modules/framework/pull-agent/org/apache/lcf/crawler/jobs/Jobs.java
URL: http://svn.apache.org/viewvc/incubator/lcf/trunk/modules/framework/pull-agent/org/apache/lcf/crawler/jobs/Jobs.java?rev=911418&r1=911417&r2=911418&view=diff
==============================================================================
--- incubator/lcf/trunk/modules/framework/pull-agent/org/apache/lcf/crawler/jobs/Jobs.java (original)
+++ incubator/lcf/trunk/modules/framework/pull-agent/org/apache/lcf/crawler/jobs/Jobs.java Thu Feb 18 14:31:31 2010
@@ -194,8 +194,8 @@
         public void install(String outputTableName, String outputNameField, String connectionTableName, String connectionNameField)
                 throws LCFException
         {
-                beginTransaction();
-                try
+                // Standard practice: Have a loop around everything, in case upgrade needs it.
+                while (true)
                 {
                         Map existing = getTableSchema(null,null);
                         if (existing == null)
@@ -223,100 +223,40 @@
                                 map.put(reseedTimeField,new ColumnDescription("BIGINT",false,true,null,null,false));
                                 map.put(hopcountModeField,new ColumnDescription("CHAR(1)",false,true,null,null,false));
                                 performCreate(map,null);
-
-                                // Set up index
-                                ArrayList list = new ArrayList();
-                                list.add(statusField);
-                                addTableIndex(false,list);
                         }
                         else
                         {
-                                // Need to add proper upgrade for: this.outputNameField
-                                // MHL
-                                // Here's a temporary bit o' code that will work ONLY if there are no actual jobs defined...
-                                if (existing.get(this.outputNameField) == null)
-                                {
-                                        // Upgrade: add on output name field
-                                        HashMap map = new HashMap();
-                                        map.put(this.outputNameField,new ColumnDescription("VARCHAR(32)",false,false,outputTableName,outputNameField,false));
-                                        performAlter(map,null,null,null);
-                                }
-                                
-                                if (existing.get(outputSpecField) == null)
-                                {
-                                        // Upgrade: add on document spec field
-                                        HashMap map = new HashMap();
-                                        map.put(outputSpecField,new ColumnDescription("LONGTEXT",false,true,null,null,false));
-                                        performAlter(map,null,null,null);
-                                }
-                                if (existing.get("documenttemplate") != null)
-                                {
-                                        // Upgrade; remove document template field
-                                        ArrayList list = new ArrayList();
-                                        list.add("documenttemplate");
-                                        performAlter(null,null,list,null);
-                                }
-                                if (existing.get(reseedIntervalField) == null)
-                                {
-                                        // Upgrade; add on reseed interval and time fields
-                                        HashMap map = new HashMap();
-                                        map.put(reseedIntervalField,new ColumnDescription("BIGINT",false,true,null,null,false));
-                                        map.put(reseedTimeField,new ColumnDescription("BIGINT",false,true,null,null,false));
-                                        performAlter(map,null,null,null);
-                                }
-                                if (existing.get(hopcountModeField) == null)
-                                {
-                                        HashMap map = new HashMap();
-                                        map.put(hopcountModeField,new ColumnDescription("CHAR(1)",false,true,null,null,false));
-                                        performAlter(map,null,null,null);
-                                }
-                                if (existing.get(expirationField) == null)
-                                {
-                                        HashMap map = new HashMap();
-                                        map.put(expirationField,new ColumnDescription("BIGINT",false,true,null,null,false));
-                                        performAlter(map,null,null,null);
-                                }
-                                ColumnDescription cd = (ColumnDescription)existing.get(intervalField);
-                                if (cd.getIsNull() == false)
-                                {
-                                        HashMap map = new HashMap();
-                                        map.put(intervalField,new ColumnDescription("BIGINT",false,true,null,null,false));
-                                        performAlter(null,map,null,null);
-                                }
-                                if (existing.get("crawltype") != null)
-                                {
-                                        // Upgrade: get rid of crawltype field
-                                        ArrayList list = new ArrayList();
-                                        list.add("crawltype");
-                                        performAlter(null,null,list,null);
-                                }
-                                if (existing.get("throttle") != null)
-                                {
-                                        ArrayList list = new ArrayList();
-                                        list.add("throttle");
-                                        performAlter(null,null,list,null);
-                                }
+                                // Do any needed upgrades
                         }
 
-                        // If the table "jobcollections" is there, delete it.
-                        // MHL
-                        
+                        // Handle related tables
                         scheduleManager.install(getTableName(),idField);
                         hopFilterManager.install(getTableName(),idField);
-                }
-                catch (LCFException e)
-                {
-                        signalRollback();
-                        throw e;
-                }
-                catch (Error e)
-                {
-                        signalRollback();
-                        throw e;
-                }
-                finally
-                {
-                        endTransaction();
+
+                        // Index management
+                        IndexDescription statusIndex = new IndexDescription(false,new String[]{statusField});
+                        
+                        // Get rid of indexes that shouldn't be there
+                        Map indexes = getTableIndexes(null,null);
+                        Iterator iter = indexes.keySet().iterator();
+                        while (iter.hasNext())
+                        {
+                                String indexName = (String)iter.next();
+                                IndexDescription id = (IndexDescription)indexes.get(indexName);
+                            
+                                if (statusIndex != null && id.equals(statusIndex))
+                                        statusIndex = null;
+                                else if (indexName.indexOf("_pkey") == -1)
+                                        // This index shouldn't be here; drop it
+                                        performRemoveIndex(indexName);
+                        }
+
+                        // Add the ones we didn't find
+                        if (statusIndex != null)
+                                performAddIndex(null,statusIndex);
+
+                        break;
+
                 }
         }
 

Modified: incubator/lcf/trunk/modules/framework/pull-agent/org/apache/lcf/crawler/jobs/PrereqEventManager.java
URL: http://svn.apache.org/viewvc/incubator/lcf/trunk/modules/framework/pull-agent/org/apache/lcf/crawler/jobs/PrereqEventManager.java?rev=911418&r1=911417&r2=911418&view=diff
==============================================================================
--- incubator/lcf/trunk/modules/framework/pull-agent/org/apache/lcf/crawler/jobs/PrereqEventManager.java (original)
+++ incubator/lcf/trunk/modules/framework/pull-agent/org/apache/lcf/crawler/jobs/PrereqEventManager.java Thu Feb 18 14:31:31 2010
@@ -60,8 +60,8 @@
         public void install(String ownerTableName, String ownerColumn)
                 throws LCFException
         {
-                beginTransaction();
-                try
+                // Standard practice: Outer loop for upgrade support.
+                while (true)
                 {
                         Map existing = getTableSchema(null,null);
                         if (existing == null)
@@ -70,30 +70,35 @@
                                 map.put(ownerField,new ColumnDescription("BIGINT",false,false,ownerTableName,ownerColumn,false));
                                 map.put(eventNameField,new ColumnDescription("VARCHAR(255)",false,false,null,null,false));
                                 performCreate(map,null);
-                            
-                                // Indexes
-                                ArrayList list = new ArrayList();
-                                list.add(ownerField);
-                                addTableIndex(false,list);
                         }
                         else
                         {
-                                // No upgrade is possible since this table has just been introduced.
+                                // Schema upgrade goes here, when needed.
                         }
-                }
-                catch (LCFException e)
-                {
-                        signalRollback();
-                        throw e;
-                }
-                catch (Error e)
-                {
-                        signalRollback();
-                        throw e;
-                }
-                finally
-                {
-                        endTransaction();
+                        
+			// Index management
+			IndexDescription ownerIndex = new IndexDescription(false,new String[]{ownerField});
+			
+			// Get rid of indexes that shouldn't be there
+			Map indexes = getTableIndexes(null,null);
+			Iterator iter = indexes.keySet().iterator();
+			while (iter.hasNext())
+			{
+				String indexName = (String)iter.next();
+				IndexDescription id = (IndexDescription)indexes.get(indexName);
+                            
+				if (ownerIndex != null && id.equals(ownerIndex))
+					ownerIndex = null;
+				else if (indexName.indexOf("_pkey") == -1)
+					// This index shouldn't be here; drop it
+					performRemoveIndex(indexName);
+			}
+
+			// Add the ones we didn't find
+                        if (ownerIndex != null)
+				performAddIndex(null,ownerIndex);
+
+                        break;
                 }
         }
 

Modified: incubator/lcf/trunk/modules/framework/pull-agent/org/apache/lcf/crawler/jobs/ScheduleManager.java
URL: http://svn.apache.org/viewvc/incubator/lcf/trunk/modules/framework/pull-agent/org/apache/lcf/crawler/jobs/ScheduleManager.java?rev=911418&r1=911417&r2=911418&view=diff
==============================================================================
--- incubator/lcf/trunk/modules/framework/pull-agent/org/apache/lcf/crawler/jobs/ScheduleManager.java (original)
+++ incubator/lcf/trunk/modules/framework/pull-agent/org/apache/lcf/crawler/jobs/ScheduleManager.java Thu Feb 18 14:31:31 2010
@@ -28,264 +28,274 @@
 */
 public class ScheduleManager extends org.apache.lcf.core.database.BaseTable
 {
-	public static final String _rcsid = "@(#)$Id$";
+        public static final String _rcsid = "@(#)$Id$";
 
-	// Schema
-	public final static String ownerIDField = "ownerid";
-	public final static String ordinalField = "ordinal";
-	public final static String dayOfWeekField = "dayofweek";
-	public final static String dayOfMonthField = "dayofmonth";
-	public final static String monthOfYearField = "monthofyear";
-	public final static String yearField = "year";
-	public final static String hourOfDayField = "hourofday";
-	public final static String minutesOfHourField = "minutesofhour";
-	public final static String timezoneField = "timezone";
-	public final static String windowDurationField = "windowlength";
-
-
-	/** Constructor.
-	*@param threadContext is the thread context.
-	*@param database is the database instance.
-	*/
-	public ScheduleManager(IThreadContext threadContext, IDBInterface database)
-		throws LCFException
-	{
-		super(database,"schedules");
-	}
-
-	/** Install or upgrade.
-	*@param ownerTable is the name of the table that owns this one.
-	*@param owningTablePrimaryKey is the primary key of the owning table.
-	*/
-	public void install(String ownerTable, String owningTablePrimaryKey)
-		throws LCFException
-	{
-		beginTransaction();
-		try
-		{
-			Map existing = getTableSchema(null,null);
-			if (existing == null)
-			{
-				HashMap map = new HashMap();
-				map.put(ownerIDField,new ColumnDescription("BIGINT",false,false,ownerTable,owningTablePrimaryKey,false));
-				map.put(ordinalField,new ColumnDescription("BIGINT",false,false,null,null,false));
-				map.put(dayOfWeekField,new ColumnDescription("VARCHAR(255)",false,true,null,null,false));
-				map.put(dayOfMonthField,new ColumnDescription("VARCHAR(255)",false,true,null,null,false));
-				map.put(monthOfYearField,new ColumnDescription("VARCHAR(255)",false,true,null,null,false));
-				map.put(yearField,new ColumnDescription("VARCHAR(255)",false,true,null,null,false));
-				map.put(hourOfDayField,new ColumnDescription("VARCHAR(255)",false,true,null,null,false));
-				map.put(minutesOfHourField,new ColumnDescription("VARCHAR(255)",false,true,null,null,false));
-				map.put(timezoneField,new ColumnDescription("VARCHAR(32)",false,true,null,null,false));
-				map.put(windowDurationField,new ColumnDescription("BIGINT",false,true,null,null,false));
-				performCreate(map,null);
-
-				ArrayList list = new ArrayList();
-				list.add(ownerIDField);
-				addTableIndex(false,list);
-			}
-		}
-		catch (LCFException e)
-		{
-			signalRollback();
-			throw e;
-		}
-		catch (Error e)
-		{
-			signalRollback();
-			throw e;
-		}
-		finally
-		{
-			endTransaction();
-		}
-	}
-
-	/** Uninstall.
-	*/
-	public void deinstall()
-		throws LCFException
-	{
-		performDrop(null);
-	}
-
-	/** Fill in a set of schedules corresponding to a set of owner id's.
-	*@param returnValues is a map keyed by ownerID, with value of JobDescription.
-	*@param ownerIDList is the list of owner id's.
-	*@param ownerIDParams is the corresponding set of owner id parameters.
-	*/
-	public void getRows(Map returnValues, String ownerIDList, ArrayList ownerIDParams)
-		throws LCFException
-	{
-		IResultSet set = performQuery("SELECT * FROM "+getTableName()+" WHERE "+ownerIDField+" IN ("+ownerIDList+") ORDER BY "+ordinalField+" ASC",ownerIDParams,
-			null,null);
-		int i = 0;
-		while (i < set.getRowCount())
-		{
-			IResultRow row = set.getRow(i);
-			Long ownerID = (Long)row.getValue(ownerIDField);
-			ScheduleRecord sr = new ScheduleRecord(stringToEnumeratedValue((String)row.getValue(dayOfWeekField)),
-				stringToEnumeratedValue((String)row.getValue(monthOfYearField)),
-				stringToEnumeratedValue((String)row.getValue(dayOfMonthField)),
-				stringToEnumeratedValue((String)row.getValue(yearField)),
-				stringToEnumeratedValue((String)row.getValue(hourOfDayField)),
-				stringToEnumeratedValue((String)row.getValue(minutesOfHourField)),
-				(String)row.getValue(timezoneField),
-				(Long)row.getValue(windowDurationField));
-			((JobDescription)returnValues.get(ownerID)).addScheduleRecord(sr);
-			i++;
-		}
-	}
-
-	/** Fill in a set of schedules corresponding to a set of owner id's.
-	*@param returnValues is a map keyed by ownerID, with a value that is an ArrayList of ScheduleRecord objects.
-	*@param ownerIDList is the list of owner id's.
-	*@param ownerIDParams is the corresponding set of owner id parameters.
-	*/
-	public void getRowsAlternate(Map returnValues, String ownerIDList, ArrayList ownerIDParams)
-		throws LCFException
-	{
-		IResultSet set = performQuery("SELECT * FROM "+getTableName()+" WHERE "+ownerIDField+" IN ("+ownerIDList+") ORDER BY "+ordinalField+" ASC",ownerIDParams,
-			null,null);
-		int i = 0;
-		while (i < set.getRowCount())
-		{
-			IResultRow row = set.getRow(i);
-			Long ownerID = (Long)row.getValue(ownerIDField);
-			ScheduleRecord sr = new ScheduleRecord(stringToEnumeratedValue((String)row.getValue(dayOfWeekField)),
-				stringToEnumeratedValue((String)row.getValue(monthOfYearField)),
-				stringToEnumeratedValue((String)row.getValue(dayOfMonthField)),
-				stringToEnumeratedValue((String)row.getValue(yearField)),
-				stringToEnumeratedValue((String)row.getValue(hourOfDayField)),
-				stringToEnumeratedValue((String)row.getValue(minutesOfHourField)),
-				(String)row.getValue(timezoneField),
-				(Long)row.getValue(windowDurationField));
-			ArrayList theList = (ArrayList)returnValues.get(ownerID);
-			if (theList == null)
-			{
-				theList = new ArrayList();
-				returnValues.put(ownerID,theList);
-			}
-			theList.add(sr);
-			i++;
-		}
-	}
-
-	/** Write a schedule list into the database.
-	*@param ownerID is the owning identifier.
-	*@param schedule is the schedule list.
-	*/
-	public void writeRows(Long ownerID, IJobDescription list)
-		throws LCFException
-	{
-		beginTransaction();
-		try
-		{
-			int i = 0;
-			HashMap map = new HashMap();
-			while (i < list.getScheduleRecordCount())
-			{
-				ScheduleRecord record = list.getScheduleRecord(i);
-				map.clear();
-				map.put(dayOfWeekField,enumeratedValueToString(record.getDayOfWeek()));
-				map.put(monthOfYearField,enumeratedValueToString(record.getMonthOfYear()));
-				map.put(dayOfMonthField,enumeratedValueToString(record.getDayOfMonth()));
-				map.put(yearField,enumeratedValueToString(record.getYear()));
-				map.put(hourOfDayField,enumeratedValueToString(record.getHourOfDay()));
-				map.put(minutesOfHourField,enumeratedValueToString(record.getMinutesOfHour()));
-				map.put(timezoneField,record.getTimezone());
-				map.put(windowDurationField,record.getDuration());
-				map.put(ownerIDField,ownerID);
-				map.put(ordinalField,new Long((long)i));
-				performInsert(map,null);
-				i++;
-			}
-		}
-		catch (LCFException e)
-		{
-			signalRollback();
-			throw e;
-		}
-		catch (Error e)
-		{
-			signalRollback();
-			throw e;
-		}
-		finally
-		{
-			endTransaction();
-		}
-	}
-
-	/** Delete rows.
-	*@param ownerID is the owner whose rows to delete.
-	*/
-	public void deleteRows(Long ownerID)
-		throws LCFException
-	{
-		ArrayList list = new ArrayList();
-		list.add(ownerID);
-		performDelete("WHERE "+ownerIDField+"=?",list,null);
-	}
-
-	/** Go from string to enumerated value.
-	*@param value is the input.
-	*@return the enumerated value.
-	*/
-	public static EnumeratedValues stringToEnumeratedValue(String value)
-		throws LCFException
-	{
-	    if (value == null)
-		return null;
-	    try
-	    {
-		ArrayList valStore = new ArrayList();
-		if (!value.equals("*"))
-		{
-			int curpos = 0;
-			while (true)
-			{
-				int newpos = value.indexOf(",",curpos);
-				if (newpos == -1)
-				{
-					valStore.add(new Integer(value.substring(curpos)));
-					break;
-				}
-				valStore.add(new Integer(value.substring(curpos,newpos)));
-				curpos = newpos+1;
-			}
-		}
-		return new EnumeratedValues(valStore);
-	    }
-	    catch (NumberFormatException e)
-	    {
-		throw new LCFException("Bad number: '"+value+"'",e);
-	    }
-
-	}
-
-	/** Go from enumerated value to string.
-	*@param values is the enumerated value.
-	*@return the string value.
-	*/
-	public static String enumeratedValueToString(EnumeratedValues values)
-	{
-		if (values == null)
-			return null;
-		if (values.size() == 0)
-			return "*";
-		StringBuffer rval = new StringBuffer();
-		Iterator iter = values.getValues();
-		boolean first = true;
-		while (iter.hasNext())
-		{
-			if (first)
-				first = false;
-			else
-				rval.append(',');
-			rval.append(((Integer)iter.next()).toString());
-		}
-		return rval.toString();
-	}
+        // Schema
+        public final static String ownerIDField = "ownerid";
+        public final static String ordinalField = "ordinal";
+        public final static String dayOfWeekField = "dayofweek";
+        public final static String dayOfMonthField = "dayofmonth";
+        public final static String monthOfYearField = "monthofyear";
+        public final static String yearField = "year";
+        public final static String hourOfDayField = "hourofday";
+        public final static String minutesOfHourField = "minutesofhour";
+        public final static String timezoneField = "timezone";
+        public final static String windowDurationField = "windowlength";
+
+
+        /** Constructor.
+        *@param threadContext is the thread context.
+        *@param database is the database instance.
+        */
+        public ScheduleManager(IThreadContext threadContext, IDBInterface database)
+                throws LCFException
+        {
+                super(database,"schedules");
+        }
+
+        /** Install or upgrade.
+        *@param ownerTable is the name of the table that owns this one.
+        *@param owningTablePrimaryKey is the primary key of the owning table.
+        */
+        public void install(String ownerTable, String owningTablePrimaryKey)
+                throws LCFException
+        {
+                // Standard practice: Outer loop to support upgrades
+                while (true)
+                {
+                        Map existing = getTableSchema(null,null);
+                        if (existing == null)
+                        {
+                                HashMap map = new HashMap();
+                                map.put(ownerIDField,new ColumnDescription("BIGINT",false,false,ownerTable,owningTablePrimaryKey,false));
+                                map.put(ordinalField,new ColumnDescription("BIGINT",false,false,null,null,false));
+                                map.put(dayOfWeekField,new ColumnDescription("VARCHAR(255)",false,true,null,null,false));
+                                map.put(dayOfMonthField,new ColumnDescription("VARCHAR(255)",false,true,null,null,false));
+                                map.put(monthOfYearField,new ColumnDescription("VARCHAR(255)",false,true,null,null,false));
+                                map.put(yearField,new ColumnDescription("VARCHAR(255)",false,true,null,null,false));
+                                map.put(hourOfDayField,new ColumnDescription("VARCHAR(255)",false,true,null,null,false));
+                                map.put(minutesOfHourField,new ColumnDescription("VARCHAR(255)",false,true,null,null,false));
+                                map.put(timezoneField,new ColumnDescription("VARCHAR(32)",false,true,null,null,false));
+                                map.put(windowDurationField,new ColumnDescription("BIGINT",false,true,null,null,false));
+                                performCreate(map,null);
+                        }
+                        else
+                        {
+                                // Upgrade code goes here, if needed.
+                        }
+                        
+                        // Index management
+                        IndexDescription ownerIndex = new IndexDescription(false,new String[]{ownerIDField});
+                        
+                        // Get rid of indexes that shouldn't be there
+                        Map indexes = getTableIndexes(null,null);
+                        Iterator iter = indexes.keySet().iterator();
+                        while (iter.hasNext())
+                        {
+                                String indexName = (String)iter.next();
+                                IndexDescription id = (IndexDescription)indexes.get(indexName);
+                            
+                                if (ownerIndex != null && id.equals(ownerIndex))
+                                        ownerIndex = null;
+                                else if (indexName.indexOf("_pkey") == -1)
+                                        // This index shouldn't be here; drop it
+                                        performRemoveIndex(indexName);
+                        }
+
+                        // Add the ones we didn't find
+                        if (ownerIndex != null)
+                                performAddIndex(null,ownerIndex);
+
+                        break;
+                }
+        }
+
+        /** Uninstall.
+        */
+        public void deinstall()
+                throws LCFException
+        {
+                performDrop(null);
+        }
+
+        /** Fill in a set of schedules corresponding to a set of owner id's.
+        *@param returnValues is a map keyed by ownerID, with value of JobDescription.
+        *@param ownerIDList is the list of owner id's.
+        *@param ownerIDParams is the corresponding set of owner id parameters.
+        */
+        public void getRows(Map returnValues, String ownerIDList, ArrayList ownerIDParams)
+                throws LCFException
+        {
+                IResultSet set = performQuery("SELECT * FROM "+getTableName()+" WHERE "+ownerIDField+" IN ("+ownerIDList+") ORDER BY "+ordinalField+" ASC",ownerIDParams,
+                        null,null);
+                int i = 0;
+                while (i < set.getRowCount())
+                {
+                        IResultRow row = set.getRow(i);
+                        Long ownerID = (Long)row.getValue(ownerIDField);
+                        ScheduleRecord sr = new ScheduleRecord(stringToEnumeratedValue((String)row.getValue(dayOfWeekField)),
+                                stringToEnumeratedValue((String)row.getValue(monthOfYearField)),
+                                stringToEnumeratedValue((String)row.getValue(dayOfMonthField)),
+                                stringToEnumeratedValue((String)row.getValue(yearField)),
+                                stringToEnumeratedValue((String)row.getValue(hourOfDayField)),
+                                stringToEnumeratedValue((String)row.getValue(minutesOfHourField)),
+                                (String)row.getValue(timezoneField),
+                                (Long)row.getValue(windowDurationField));
+                        ((JobDescription)returnValues.get(ownerID)).addScheduleRecord(sr);
+                        i++;
+                }
+        }
+
+        /** Fill in a set of schedules corresponding to a set of owner id's.
+        *@param returnValues is a map keyed by ownerID, with a value that is an ArrayList of ScheduleRecord objects.
+        *@param ownerIDList is the list of owner id's.
+        *@param ownerIDParams is the corresponding set of owner id parameters.
+        */
+        public void getRowsAlternate(Map returnValues, String ownerIDList, ArrayList ownerIDParams)
+                throws LCFException
+        {
+                IResultSet set = performQuery("SELECT * FROM "+getTableName()+" WHERE "+ownerIDField+" IN ("+ownerIDList+") ORDER BY "+ordinalField+" ASC",ownerIDParams,
+                        null,null);
+                int i = 0;
+                while (i < set.getRowCount())
+                {
+                        IResultRow row = set.getRow(i);
+                        Long ownerID = (Long)row.getValue(ownerIDField);
+                        ScheduleRecord sr = new ScheduleRecord(stringToEnumeratedValue((String)row.getValue(dayOfWeekField)),
+                                stringToEnumeratedValue((String)row.getValue(monthOfYearField)),
+                                stringToEnumeratedValue((String)row.getValue(dayOfMonthField)),
+                                stringToEnumeratedValue((String)row.getValue(yearField)),
+                                stringToEnumeratedValue((String)row.getValue(hourOfDayField)),
+                                stringToEnumeratedValue((String)row.getValue(minutesOfHourField)),
+                                (String)row.getValue(timezoneField),
+                                (Long)row.getValue(windowDurationField));
+                        ArrayList theList = (ArrayList)returnValues.get(ownerID);
+                        if (theList == null)
+                        {
+                                theList = new ArrayList();
+                                returnValues.put(ownerID,theList);
+                        }
+                        theList.add(sr);
+                        i++;
+                }
+        }
+
+        /** Write a schedule list into the database.
+        *@param ownerID is the owning identifier.
+        *@param schedule is the schedule list.
+        */
+        public void writeRows(Long ownerID, IJobDescription list)
+                throws LCFException
+        {
+                beginTransaction();
+                try
+                {
+                        int i = 0;
+                        HashMap map = new HashMap();
+                        while (i < list.getScheduleRecordCount())
+                        {
+                                ScheduleRecord record = list.getScheduleRecord(i);
+                                map.clear();
+                                map.put(dayOfWeekField,enumeratedValueToString(record.getDayOfWeek()));
+                                map.put(monthOfYearField,enumeratedValueToString(record.getMonthOfYear()));
+                                map.put(dayOfMonthField,enumeratedValueToString(record.getDayOfMonth()));
+                                map.put(yearField,enumeratedValueToString(record.getYear()));
+                                map.put(hourOfDayField,enumeratedValueToString(record.getHourOfDay()));
+                                map.put(minutesOfHourField,enumeratedValueToString(record.getMinutesOfHour()));
+                                map.put(timezoneField,record.getTimezone());
+                                map.put(windowDurationField,record.getDuration());
+                                map.put(ownerIDField,ownerID);
+                                map.put(ordinalField,new Long((long)i));
+                                performInsert(map,null);
+                                i++;
+                        }
+                }
+                catch (LCFException e)
+                {
+                        signalRollback();
+                        throw e;
+                }
+                catch (Error e)
+                {
+                        signalRollback();
+                        throw e;
+                }
+                finally
+                {
+                        endTransaction();
+                }
+        }
+
+        /** Delete rows.
+        *@param ownerID is the owner whose rows to delete.
+        */
+        public void deleteRows(Long ownerID)
+                throws LCFException
+        {
+                ArrayList list = new ArrayList();
+                list.add(ownerID);
+                performDelete("WHERE "+ownerIDField+"=?",list,null);
+        }
+
+        /** Go from string to enumerated value.
+        *@param value is the input.
+        *@return the enumerated value.
+        */
+        public static EnumeratedValues stringToEnumeratedValue(String value)
+                throws LCFException
+        {
+            if (value == null)
+                return null;
+            try
+            {
+                ArrayList valStore = new ArrayList();
+                if (!value.equals("*"))
+                {
+                        int curpos = 0;
+                        while (true)
+                        {
+                                int newpos = value.indexOf(",",curpos);
+                                if (newpos == -1)
+                                {
+                                        valStore.add(new Integer(value.substring(curpos)));
+                                        break;
+                                }
+                                valStore.add(new Integer(value.substring(curpos,newpos)));
+                                curpos = newpos+1;
+                        }
+                }
+                return new EnumeratedValues(valStore);
+            }
+            catch (NumberFormatException e)
+            {
+                throw new LCFException("Bad number: '"+value+"'",e);
+            }
+
+        }
+
+        /** Go from enumerated value to string.
+        *@param values is the enumerated value.
+        *@return the string value.
+        */
+        public static String enumeratedValueToString(EnumeratedValues values)
+        {
+                if (values == null)
+                        return null;
+                if (values.size() == 0)
+                        return "*";
+                StringBuffer rval = new StringBuffer();
+                Iterator iter = values.getValues();
+                boolean first = true;
+                while (iter.hasNext())
+                {
+                        if (first)
+                                first = false;
+                        else
+                                rval.append(',');
+                        rval.append(((Integer)iter.next()).toString());
+                }
+                return rval.toString();
+        }
 
-							
+                                                        
 }

Modified: incubator/lcf/trunk/modules/framework/pull-agent/org/apache/lcf/crawler/repository/RepositoryConnectionManager.java
URL: http://svn.apache.org/viewvc/incubator/lcf/trunk/modules/framework/pull-agent/org/apache/lcf/crawler/repository/RepositoryConnectionManager.java?rev=911418&r1=911417&r2=911418&view=diff
==============================================================================
--- incubator/lcf/trunk/modules/framework/pull-agent/org/apache/lcf/crawler/repository/RepositoryConnectionManager.java (original)
+++ incubator/lcf/trunk/modules/framework/pull-agent/org/apache/lcf/crawler/repository/RepositoryConnectionManager.java Thu Feb 18 14:31:31 2010
@@ -78,8 +78,8 @@
 		// First, get the authority manager table name and name column
 		IAuthorityConnectionManager authMgr = AuthorityConnectionManagerFactory.make(threadContext);
 
-		beginTransaction();
-		try
+		// Always use a loop, and no transaction, as we may need to retry due to upgrade
+		while (true)
 		{
 			Map existing = getTableSchema(null,null);
 			if (existing == null)
@@ -97,60 +97,16 @@
 			}
 			else
 			{
-				if (existing.get(configField) == null)
-				{
-					// Add the configField column, and transfer data into it from the old configuration table
-					HashMap map = new HashMap();
-					map.put(configField,new ColumnDescription("LONGTEXT",false,true,null,null,false));
-					performAlter(map,null,null,null);
-					IResultSet set = getDBInterface().performQuery("SELECT * FROM repoconfigs",null,null,null);
-					int i = 0;
-					Map xmlMap = new HashMap();
-					while (i < set.getRowCount())
-					{
-						IResultRow row = set.getRow(i++);
-						String owner = (String)row.getValue("owner");
-						String name = (String)row.getValue("name");
-						String value = (String)row.getValue("value");
-						ConfigParams cp = (ConfigParams)xmlMap.get(owner);
-						if (cp == null)
-						{
-							cp = new ConfigParams();
-							xmlMap.put(owner,cp);
-						}
-						cp.setParameter(name,value);
-					}
-					getDBInterface().performDrop("repoconfigs",null);
-					Iterator iter = xmlMap.keySet().iterator();
-					while (iter.hasNext())
-					{
-						String owner = (String)iter.next();
-						ConfigParams cp = (ConfigParams)xmlMap.get(owner);
-						map = new HashMap();
-						ArrayList list = new ArrayList();
-						list.add(owner);
-						map.put(configField,cp.toXML());
-						performUpdate(map,"WHERE "+nameField+"=?",list,null);
-					}
-				}
+				// Upgrade code would go here, if needed.
 			}
 
+			// Install dependent tables.
 			historyManager.install(getTableName(),nameField);
 			throttleSpecManager.install(getTableName(),nameField);
-		}
-		catch (LCFException e)
-		{
-			signalRollback();
-			throw e;
-		}
-		catch (Error e)
-		{
-			signalRollback();
-			throw e;
-		}
-		finally
-		{
-			endTransaction();
+			
+			// Index management goes here.
+			
+			break;
 		}
 	}
 



Mime
View raw message