You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@manifoldcf.apache.org by kw...@apache.org on 2010/06/03 03:20:46 UTC

svn commit: r950850 - in /incubator/lcf/trunk/modules/framework: core/org/apache/lcf/core/database/ core/org/apache/lcf/core/interfaces/ pull-agent/org/apache/lcf/crawler/jobs/ pull-agent/org/apache/lcf/crawler/repository/

Author: kwright
Date: Thu Jun  3 01:20:45 2010
New Revision: 950850

URL: http://svn.apache.org/viewvc?rev=950850&view=rev
Log:
More Derby work.  Abstract from the LIMIT construct, and also establish a base path property for Derby databases.

Modified:
    incubator/lcf/trunk/modules/framework/core/org/apache/lcf/core/database/BaseTable.java
    incubator/lcf/trunk/modules/framework/core/org/apache/lcf/core/database/DBInterfaceDerby.java
    incubator/lcf/trunk/modules/framework/core/org/apache/lcf/core/database/DBInterfaceMySQL.java
    incubator/lcf/trunk/modules/framework/core/org/apache/lcf/core/database/DBInterfacePostgreSQL.java
    incubator/lcf/trunk/modules/framework/core/org/apache/lcf/core/database/Database.java
    incubator/lcf/trunk/modules/framework/core/org/apache/lcf/core/interfaces/IDBInterface.java
    incubator/lcf/trunk/modules/framework/pull-agent/org/apache/lcf/crawler/jobs/HopCount.java
    incubator/lcf/trunk/modules/framework/pull-agent/org/apache/lcf/crawler/jobs/IntrinsicLink.java
    incubator/lcf/trunk/modules/framework/pull-agent/org/apache/lcf/crawler/jobs/JobManager.java
    incubator/lcf/trunk/modules/framework/pull-agent/org/apache/lcf/crawler/jobs/JobQueue.java
    incubator/lcf/trunk/modules/framework/pull-agent/org/apache/lcf/crawler/jobs/Jobs.java
    incubator/lcf/trunk/modules/framework/pull-agent/org/apache/lcf/crawler/repository/RepositoryHistoryManager.java

Modified: incubator/lcf/trunk/modules/framework/core/org/apache/lcf/core/database/BaseTable.java
URL: http://svn.apache.org/viewvc/incubator/lcf/trunk/modules/framework/core/org/apache/lcf/core/database/BaseTable.java?rev=950850&r1=950849&r2=950850&view=diff
==============================================================================
--- incubator/lcf/trunk/modules/framework/core/org/apache/lcf/core/database/BaseTable.java (original)
+++ incubator/lcf/trunk/modules/framework/core/org/apache/lcf/core/database/BaseTable.java Thu Jun  3 01:20:45 2010
@@ -229,6 +229,21 @@ public class BaseTable
     return dbInterface.performQuery(query,params,cacheKeys,queryClass);
   }
 
+  /** Perform a general "data fetch" query, with the ability to provide a limit.
+  *@param query is the query string.
+  *@param params are the parameterized values, if needed.
+  *@param cacheKeys are the cache keys, if needed (null if no cache desired).
+  *@param queryClass is the LRU class name against which this query would be cached,
+  * or null if no LRU behavior desired.
+  *@param resultLimit is the maximum number of results desired.
+  *@return a resultset.
+  */
+  protected IResultSet performQuery(String query, ArrayList params, StringSet cacheKeys, String queryClass, int resultLimit)
+    throws LCFException
+  {
+    return dbInterface.performQuery(query,params,cacheKeys,queryClass,resultLimit,null);
+  }
+
   /** Begin a database transaction.  This method call MUST be paired with an endTransaction() call,
   * or database handles will be lost.  If the transaction should be rolled back, then signalRollback() should
   * be called before the transaction is ended.
@@ -268,6 +283,15 @@ public class BaseTable
     return CacheKeyFactory.makeTableKey(null,tableName,dbInterface.getDatabaseName());
   }
 
+  /** Construct a limit clause.
+  * This method constructs a limit clause in the proper manner for the database in question.
+  *@param limit is the limit number.
+  *@return the proper clause, with no padding spaces on either side.
+  */
+  public String constructLimitClause(int limit)
+  {
+    return dbInterface.constructLimitClause(limit);
+  }
 
   /** Quote a sql string.
   * This method quotes a sql string in the proper manner for the database in question.

Modified: incubator/lcf/trunk/modules/framework/core/org/apache/lcf/core/database/DBInterfaceDerby.java
URL: http://svn.apache.org/viewvc/incubator/lcf/trunk/modules/framework/core/org/apache/lcf/core/database/DBInterfaceDerby.java?rev=950850&r1=950849&r2=950850&view=diff
==============================================================================
--- incubator/lcf/trunk/modules/framework/core/org/apache/lcf/core/database/DBInterfaceDerby.java (original)
+++ incubator/lcf/trunk/modules/framework/core/org/apache/lcf/core/database/DBInterfaceDerby.java Thu Jun  3 01:20:45 2010
@@ -19,6 +19,7 @@
 package org.apache.lcf.core.database;
 
 import org.apache.lcf.core.interfaces.*;
+import org.apache.lcf.core.system.LCF;
 import org.apache.lcf.core.system.Logging;
 import java.util.*;
 import java.io.*;
@@ -30,6 +31,8 @@ public class DBInterfaceDerby extends Da
   protected final static String _url = "jdbc:derby:";
   protected final static String _driver = "org.apache.derby.jdbc.EmbeddedDriver";
   
+  public final static String databasePathProperty = "org.apache.lcf.derbydatabasepath";
+  
   protected String userName;
   protected String password;
   
@@ -38,10 +41,22 @@ public class DBInterfaceDerby extends Da
   // So, once we enter the serializable realm, STOP any additional transactions from doing anything at all.
   protected int serializableDepth = 0;
 
+  protected static String getFullDatabasePath(String databaseName)
+    throws LCFException
+  {
+    String path = LCF.getProperty(databasePathProperty);
+    if (path == null)
+      throw new LCFException("Derby database requires '"+databasePathProperty+"' property, containing a full path");
+    path = path.replace("\\","/");
+    if (!path.endsWith("/"))
+      path = path + "/";
+    return path + databaseName;
+  }
+  
   public DBInterfaceDerby(IThreadContext tc, String databaseName, String userName, String password)
     throws LCFException
   {
-    super(tc,_url+((databaseName==null)?"default":databaseName)+";create=true;user="+userName+";password="+password,_driver,((databaseName==null)?"default":databaseName),userName,password);
+    super(tc,_url+getFullDatabasePath((databaseName==null)?"default":databaseName)+";create=true;user="+userName+";password="+password,_driver,getFullDatabasePath((databaseName==null)?"default":databaseName),userName,password);
     cacheKey = CacheKeyFactory.makeDatabaseKey(this.databaseName);
     this.userName = userName;
     this.password = password;
@@ -756,6 +771,16 @@ public class DBInterfaceDerby extends Da
     }
   }
 
+  /** Construct a limit clause.
+  * This method constructs a limit clause in the proper manner for the database in question.
+  *@param limit is the limit number.
+  *@return the proper clause, with no padding spaces on either side.
+  */
+  public String constructLimitClause(int limit)
+  {
+    return "";
+  }
+
   /** Quote a sql string.
   * This method quotes a sql string in the proper manner for the database in question.
   *@param string is the input string.
@@ -978,6 +1003,11 @@ public class DBInterfaceDerby extends Da
       throw new LCFException("Transaction nesting error!");
   }
 
+  /** Abstract method for mapping a column name from resultset */
+  protected String mapColumnName(String rawColumnName)
+  {
+    return rawColumnName.toLowerCase();
+  }
 
 }
 

Modified: incubator/lcf/trunk/modules/framework/core/org/apache/lcf/core/database/DBInterfaceMySQL.java
URL: http://svn.apache.org/viewvc/incubator/lcf/trunk/modules/framework/core/org/apache/lcf/core/database/DBInterfaceMySQL.java?rev=950850&r1=950849&r2=950850&view=diff
==============================================================================
--- incubator/lcf/trunk/modules/framework/core/org/apache/lcf/core/database/DBInterfaceMySQL.java (original)
+++ incubator/lcf/trunk/modules/framework/core/org/apache/lcf/core/database/DBInterfaceMySQL.java Thu Jun  3 01:20:45 2010
@@ -574,6 +574,16 @@ public class DBInterfaceMySQL extends Da
     return executeQuery(query,params,cacheKeys,null,queryClass,true,maxResults,resultSpec,returnLimit);
   }
 
+  /** Construct a limit clause.
+  * This method constructs a limit clause in the proper manner for the database in question.
+  *@param limit is the limit number.
+  *@return the proper clause, with no padding spaces on either side.
+  */
+  public String constructLimitClause(int limit)
+  {
+    return "LIMIT "+Integer.toString(limit);
+  }
+
   /** Quote a sql string.
   * This method quotes a sql string in the proper manner for the database in question.
   *@param string is the input string.

Modified: incubator/lcf/trunk/modules/framework/core/org/apache/lcf/core/database/DBInterfacePostgreSQL.java
URL: http://svn.apache.org/viewvc/incubator/lcf/trunk/modules/framework/core/org/apache/lcf/core/database/DBInterfacePostgreSQL.java?rev=950850&r1=950849&r2=950850&view=diff
==============================================================================
--- incubator/lcf/trunk/modules/framework/core/org/apache/lcf/core/database/DBInterfacePostgreSQL.java (original)
+++ incubator/lcf/trunk/modules/framework/core/org/apache/lcf/core/database/DBInterfacePostgreSQL.java Thu Jun  3 01:20:45 2010
@@ -801,6 +801,16 @@ public class DBInterfacePostgreSQL exten
     }
   }
 
+  /** Construct a limit clause.
+  * This method constructs a limit clause in the proper manner for the database in question.
+  *@param limit is the limit number.
+  *@return the proper clause, with no padding spaces on either side.
+  */
+  public String constructLimitClause(int limit)
+  {
+    return "LIMIT "+Integer.toString(limit);
+  }
+
   /** Quote a sql string.
   * This method quotes a sql string in the proper manner for the database in question.
   *@param string is the input string.

Modified: incubator/lcf/trunk/modules/framework/core/org/apache/lcf/core/database/Database.java
URL: http://svn.apache.org/viewvc/incubator/lcf/trunk/modules/framework/core/org/apache/lcf/core/database/Database.java?rev=950850&r1=950849&r2=950850&view=diff
==============================================================================
--- incubator/lcf/trunk/modules/framework/core/org/apache/lcf/core/database/Database.java (original)
+++ incubator/lcf/trunk/modules/framework/core/org/apache/lcf/core/database/Database.java Thu Jun  3 01:20:45 2010
@@ -104,6 +104,12 @@ public class Database
   {
   }
   
+  /** Abstract method for mapping a column name from resultset */
+  protected String mapColumnName(String rawColumnName)
+  {
+    return rawColumnName;
+  }
+  
   /** Execute arbitrary database query, and optionally cache the result.  Cached results are
   * returned for this operation if they are valid and appropriate.  Note that any cached results
   * returned were only guaranteed to be pertinent at the time the cached result was obtained; the
@@ -612,7 +618,7 @@ public class Database
             resultCols = new String[colcount];
             for (int i = 0; i < colcount; i++)
             {
-              resultCols[i] = rsmd.getColumnName(i+1);
+              resultCols[i] = mapColumnName(rsmd.getColumnName(i+1));
             }
           }
 

Modified: incubator/lcf/trunk/modules/framework/core/org/apache/lcf/core/interfaces/IDBInterface.java
URL: http://svn.apache.org/viewvc/incubator/lcf/trunk/modules/framework/core/org/apache/lcf/core/interfaces/IDBInterface.java?rev=950850&r1=950849&r2=950850&view=diff
==============================================================================
--- incubator/lcf/trunk/modules/framework/core/org/apache/lcf/core/interfaces/IDBInterface.java (original)
+++ incubator/lcf/trunk/modules/framework/core/org/apache/lcf/core/interfaces/IDBInterface.java Thu Jun  3 01:20:45 2010
@@ -273,6 +273,13 @@ public interface IDBInterface
     int maxResults, ResultSpecification resultSpec, ILimitChecker returnLimit)
     throws LCFException;
 
+  /** Construct a limit clause.
+  * This method constructs a limit clause in the proper manner for the database in question.
+  *@param limit is the limit number.
+  *@return the proper clause, with no padding spaces on either side.
+  */
+  public String constructLimitClause(int limit);
+  
   /** Quote a sql string.
   * This method quotes a sql string in the proper manner for the database in question.
   *@param string is the input string.

Modified: incubator/lcf/trunk/modules/framework/pull-agent/org/apache/lcf/crawler/jobs/HopCount.java
URL: http://svn.apache.org/viewvc/incubator/lcf/trunk/modules/framework/pull-agent/org/apache/lcf/crawler/jobs/HopCount.java?rev=950850&r1=950849&r2=950850&view=diff
==============================================================================
--- incubator/lcf/trunk/modules/framework/pull-agent/org/apache/lcf/crawler/jobs/HopCount.java (original)
+++ incubator/lcf/trunk/modules/framework/pull-agent/org/apache/lcf/crawler/jobs/HopCount.java Thu Jun  3 01:20:45 2010
@@ -678,7 +678,7 @@ public class HopCount extends org.apache
     list.add(jobID);
     list.add(markToString(MARK_QUEUED));
     IResultSet set = performQuery("SELECT "+linkTypeField+","+parentIDHashField+" FROM "+
-      getTableName()+" WHERE "+jobIDField+"=? AND "+markForDeathField+"=?"+" LIMIT 200 FOR UPDATE",list,null,null);
+      getTableName()+" WHERE "+jobIDField+"=? AND "+markForDeathField+"=? "+constructLimitClause(200)+" FOR UPDATE",list,null,null,200);
 
     // No more entries == we are done
     if (set.getRowCount() == 0)

Modified: incubator/lcf/trunk/modules/framework/pull-agent/org/apache/lcf/crawler/jobs/IntrinsicLink.java
URL: http://svn.apache.org/viewvc/incubator/lcf/trunk/modules/framework/pull-agent/org/apache/lcf/crawler/jobs/IntrinsicLink.java?rev=950850&r1=950849&r2=950850&view=diff
==============================================================================
--- incubator/lcf/trunk/modules/framework/pull-agent/org/apache/lcf/crawler/jobs/IntrinsicLink.java (original)
+++ incubator/lcf/trunk/modules/framework/pull-agent/org/apache/lcf/crawler/jobs/IntrinsicLink.java Thu Jun  3 01:20:45 2010
@@ -84,48 +84,20 @@ public class IntrinsicLink extends org.a
     while (true)
     {
       // Schema
-      beginTransaction();
-      try
+      Map existing = getTableSchema(null,null);
+      if (existing == null)
       {
-        Map existing = getTableSchema(null,null);
-        if (existing == null)
-        {
-          HashMap map = new HashMap();
-          map.put(jobIDField,new ColumnDescription("BIGINT",false,false,jobsTable,jobsColumn,false));
-          map.put(linkTypeField,new ColumnDescription("VARCHAR(255)",false,true,null,null,false));
-          map.put(parentIDHashField,new ColumnDescription("VARCHAR(40)",false,false,null,null,false));
-          map.put(childIDHashField,new ColumnDescription("VARCHAR(40)",false,true,null,null,false));
-          map.put(newField,new ColumnDescription("CHAR(1)",false,true,null,null,false));
-          performCreate(map,null);
-        }
-        else
-        {
-          ColumnDescription cd;
-
-          // If there exists a childid field, remove parent and child id fields, and do the index manipulation we need.
-          cd = (ColumnDescription)existing.get("childid");
-          if (cd != null)
-          {
-            ArrayList list = new ArrayList();
-            list.add("childid");
-            list.add("parentid");
-            performAlter(null,null,list,null);
-          }
-        }
-      }
-      catch (LCFException e)
-      {
-        signalRollback();
-        throw e;
-      }
-      catch (Error e)
-      {
-        signalRollback();
-        throw e;
+        HashMap map = new HashMap();
+        map.put(jobIDField,new ColumnDescription("BIGINT",false,false,jobsTable,jobsColumn,false));
+        map.put(linkTypeField,new ColumnDescription("VARCHAR(255)",false,true,null,null,false));
+        map.put(parentIDHashField,new ColumnDescription("VARCHAR(40)",false,false,null,null,false));
+        map.put(childIDHashField,new ColumnDescription("VARCHAR(40)",false,true,null,null,false));
+        map.put(newField,new ColumnDescription("CHAR(1)",false,true,null,null,false));
+        performCreate(map,null);
       }
-      finally
+      else
       {
-        endTransaction();
+        // Perform upgrade, if needed.
       }
 
       // Indexes
@@ -159,138 +131,13 @@ public class IntrinsicLink extends org.a
 
       // Create the indexes we are still missing
       if (uniqueIndex != null)
-      {
-        // This create may fail
-        try
-        {
-          performAddIndex(null,uniqueIndex);
-        }
-        catch (LCFException e)
-        {
-          if (e.getMessage().indexOf("could not create unique index") == -1)
-            throw e;
-          removeDuplicates();
-          // Go back around
-          continue;
-        }
-      }
-
+        performAddIndex(null,uniqueIndex);
+      
       // All done
       break;
     }
   }
 
-  /** Remove duplicates, as part of upgrade */
-  protected void removeDuplicates()
-    throws LCFException
-  {
-    // If we get here, it's because we couldn't create the necessary unique table constraint.  Fix this by removing undesired duplicates.
-
-    Logging.jobs.warn("Found duplicate keys in the intrinsiclink table - correcting...");
-
-    // First, create a temporary non-unique index that we intend to remove at the end of this process.  We need this index in order to be able to
-    // order retrieval of rows by the proposed key order.
-    performAddIndex("temp_index_intrinsiclink",new IndexDescription(false,new String[]{jobIDField,linkTypeField,parentIDHashField,childIDHashField}));
-
-    // The fastest way to eliminate duplicates is to read rows in sorted order, and delete those that are duplicates.  The index created above
-    // will be used and will guarantee that we don't use excessive postgresql server memory.  A client-side filter will be used to eliminate results
-    // that are not duplicates, which should prevent unbounded client memory usage as well.
-
-    // Count the rows first
-    IResultSet countSet = performQuery("SELECT COUNT(*) AS countvar FROM "+getTableName(),null,null,null);
-    IResultRow countRow = countSet.getRow(0);
-    int count;
-    try
-    {
-      count = Integer.parseInt(countRow.getValue("countvar").toString());
-    }
-    catch (NumberFormatException e)
-    {
-      throw new LCFException(e.getMessage(),e);
-    }
-
-    // Now, amass a list of duplicates
-    ArrayList duplicateList = new ArrayList();
-    DuplicateFinder duplicateFinder = new DuplicateFinder();
-    int j = 0;
-    while (j < count)
-    {
-      IResultSet resultSet = getDBInterface().performQuery("SELECT "+jobIDField+","+linkTypeField+","+parentIDHashField+","+childIDHashField+","+newField+" FROM "+getTableName()+
-        " ORDER BY "+jobIDField+" ASC,"+linkTypeField+" ASC,"+parentIDHashField+" ASC,"+childIDHashField+" ASC OFFSET "+Integer.toString(j)+" LIMIT 10000",null,null,null,-1,duplicateFinder);
-
-      int i = 0;
-      while (i < resultSet.getRowCount())
-      {
-        IResultRow row = resultSet.getRow(i++);
-        Long jobID = (Long)row.getValue(jobIDField);
-        String linkType = (String)row.getValue(linkTypeField);
-        String parentIDHash = (String)row.getValue(parentIDHashField);
-        String childIDHash = (String)row.getValue(childIDHashField);
-        String newValue = (String)row.getValue(newField);
-
-        Logging.jobs.warn("Duplicate intrinsiclink row detected: job "+jobID.toString()+", linktype = '"+linkType+"', parentIDHash = "+((parentIDHash==null)?"None":parentIDHash)+", childIDHash = "+((childIDHash==null)?"None":childIDHash));
-        HashMap map = new HashMap();
-        map.put(jobIDField,jobID);
-        map.put(linkTypeField,linkType);
-        if (parentIDHash!=null)
-          map.put(parentIDHashField,parentIDHash);
-        if (childIDHash!=null)
-          map.put(childIDHashField,childIDHash);
-        if (newValue != null)
-          map.put(newField,newValue);
-        duplicateList.add(map);
-      }
-
-      j += 10000;
-    }
-
-    // Go through the duplicatelist, and remove the duplicates
-    j = 0;
-    while (j < duplicateList.size())
-    {
-      HashMap map = (HashMap)duplicateList.get(j++);
-
-      beginTransaction();
-      try
-      {
-        // Since there's no row ID, all we can do is delete all rows that match, and then create a (single) row that is the one we want to retain.
-        ArrayList list = new ArrayList();
-        list.add(map.get(jobIDField));
-        list.add(map.get(linkTypeField));
-        String parentIDHash = (String)map.get(parentIDHashField);
-        String childIDHash = (String)map.get(childIDHashField);
-        if (parentIDHash != null)
-          list.add(parentIDHash);
-        if (childIDHash != null)
-          list.add(childIDHash);
-        performDelete("WHERE "+jobIDField+"=? AND "+linkTypeField+"=? AND "+parentIDHashField+((parentIDHash!=null)?"=?":" IS NULL")+
-          " AND "+childIDHashField+((childIDHash!=null)?"=?":" IS NULL"),list,null);
-
-        performInsert(map,null);
-      }
-      catch (LCFException e)
-      {
-        signalRollback();
-        throw e;
-      }
-      catch (Error e)
-      {
-        signalRollback();
-        throw e;
-      }
-      finally
-      {
-        endTransaction();
-      }
-    }
-
-    // Remove the temporary index
-    performRemoveIndex("temp_index_intrinsiclink");
-
-    Logging.jobs.warn("Done cleaning out duplicate rows in the intrinsiclink table.");
-
-  }
-
   /** Uninstall.
   */
   public void deinstall()

Modified: incubator/lcf/trunk/modules/framework/pull-agent/org/apache/lcf/crawler/jobs/JobManager.java
URL: http://svn.apache.org/viewvc/incubator/lcf/trunk/modules/framework/pull-agent/org/apache/lcf/crawler/jobs/JobManager.java?rev=950850&r1=950849&r2=950850&view=diff
==============================================================================
--- incubator/lcf/trunk/modules/framework/pull-agent/org/apache/lcf/crawler/jobs/JobManager.java (original)
+++ incubator/lcf/trunk/modules/framework/pull-agent/org/apache/lcf/crawler/jobs/JobManager.java Thu Jun  3 01:20:45 2010
@@ -844,7 +844,7 @@ public class JobManager implements IJobM
           database.quoteSQLString(jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCAN))+","+
           database.quoteSQLString(jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCANPURGATORY))+","+
           database.quoteSQLString(jobQueue.statusToString(jobQueue.STATUS_BEINGDELETED))+
-          ")) LIMIT "+Integer.toString(maxCount),
+          ")) "+database.constructLimitClause(maxCount),
           null,null,null,maxCount,null);
 
         if (Logging.perf.isDebugEnabled())
@@ -1108,10 +1108,10 @@ public class JobManager implements IJobM
       .append(jobQueue.prioritySetField).append("<? AND ").append(jobQueue.statusField).append(" IN(")
       .append(database.quoteSQLString(jobQueue.statusToString(JobQueue.STATUS_COMPLETE))).append(",")
       .append(database.quoteSQLString(jobQueue.statusToString(JobQueue.STATUS_PURGATORY))).append(")")
-      .append(" LIMIT ").append(Integer.toString(n));
+      .append(" ").append(database.constructLimitClause(n));
     list.add(new Long(currentTime));
 
-    IResultSet set = database.performQuery(sb.toString(),list,null,null);
+    IResultSet set = database.performQuery(sb.toString(),list,null,null,n,null);
 
     DocumentDescription[] rval = new DocumentDescription[set.getRowCount()];
 
@@ -1170,13 +1170,13 @@ public class JobManager implements IJobM
       .append(database.quoteSQLString(JobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY))).append(") AND (")
       .append(jobQueue.checkActionField).append(" IS NULL OR ")
       .append(jobQueue.checkActionField).append("=").append(database.quoteSQLString(jobQueue.actionToString(JobQueue.ACTION_RESCAN)))
-      .append(") LIMIT ").append(Integer.toString(n));
+      .append(") ").append(database.constructLimitClause(n));
     list.add(new Long(currentTime));
 
     // Analyze jobqueue tables unconditionally, since it's become much more sensitive in 8.3 than it used to be.
     jobQueue.unconditionallyAnalyzeTables();
 
-    IResultSet set = database.performQuery(sb.toString(),list,null,null);
+    IResultSet set = database.performQuery(sb.toString(),list,null,null,n,null);
 
     DocumentDescription[] rval = new DocumentDescription[set.getRowCount()];
 
@@ -1334,7 +1334,7 @@ public class JobManager implements IJobM
       .append(database.quoteSQLString(jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCANPURGATORY))).append(",")
       .append(database.quoteSQLString(jobQueue.statusToString(jobQueue.STATUS_BEINGDELETED)))
       .append("))");
-    sb.append(" LIMIT ").append(Integer.toString(n));
+    sb.append(" ").append(database.constructLimitClause(n));
 
     // Analyze jobqueue tables unconditionally, since it's become much more sensitive in 8.3 than it used to be.
     jobQueue.unconditionallyAnalyzeTables();
@@ -1356,7 +1356,7 @@ public class JobManager implements IJobM
       database.beginTransaction();
       try
       {
-        IResultSet set = database.performQuery(sb.toString(),list,null,null,-1,null);
+        IResultSet set = database.performQuery(sb.toString(),list,null,null,n,null);
 
         if (Logging.perf.isDebugEnabled())
           Logging.perf.debug(" Expiring "+Integer.toString(set.getRowCount())+" documents");
@@ -1721,14 +1721,14 @@ public class JobManager implements IJobM
       .append(jobQueue.checkActionField).append("=?")
       .append(") AND (")
       .append(jobQueue.statusField).append("=? OR ").append(jobQueue.statusField).append("=?)")
-      .append(" ORDER BY ").append(jobQueue.docPriorityField).append(" ASC LIMIT 1");
+      .append(" ORDER BY ").append(jobQueue.docPriorityField).append(" ASC ").append(database.constructLimitClause(1));
 
     list.add(currentTimeValue);
     list.add(jobQueue.actionToString(JobQueue.ACTION_RESCAN));
     list.add(jobQueue.statusToString(JobQueue.STATUS_PENDING));
     list.add(jobQueue.statusToString(JobQueue.STATUS_PENDINGPURGATORY));
 
-    IResultSet set = database.performQuery(sb.toString(),list,null,null);
+    IResultSet set = database.performQuery(sb.toString(),list,null,null,1,null);
     if (set.getRowCount() > 0)
     {
       IResultRow row = set.getRow(0);
@@ -1836,7 +1836,8 @@ public class JobManager implements IJobM
       }
 
       // Now we can tack the limit onto the query.  Before this point, remainingDocuments would be crap
-      sb.append(" LIMIT ").append(vList.getRemainingDocuments());
+      int limitValue = vList.getRemainingDocuments();
+      sb.append(database.constructLimitClause(limitValue));
 
       if (Logging.perf.isDebugEnabled())
       {
@@ -5068,7 +5069,7 @@ public class JobManager implements IJobM
             "("+jobQueue.jobIDField+"=? AND "+jobQueue.statusField+"=?) OR "+
             "("+jobQueue.jobIDField+"=? AND "+jobQueue.statusField+"=?) OR "+
             "("+jobQueue.jobIDField+"=? AND "+jobQueue.statusField+"=?) OR "+
-            "("+jobQueue.jobIDField+"=? AND "+jobQueue.statusField+"=?) LIMIT 1",list,null,null);
+            "("+jobQueue.jobIDField+"=? AND "+jobQueue.statusField+"=?) "+database.constructLimitClause(1),list,null,null,1,null);
 
           if (confirmSet.getRowCount() > 0)
             continue;
@@ -5209,7 +5210,7 @@ public class JobManager implements IJobM
             "("+jobQueue.jobIDField+"=? AND "+jobQueue.statusField+"=?) OR "+
             "("+jobQueue.jobIDField+"=? AND "+jobQueue.statusField+"=?) OR "+
             "("+jobQueue.jobIDField+"=? AND "+jobQueue.statusField+"=?) OR "+
-            "("+jobQueue.jobIDField+"=? AND "+jobQueue.statusField+"=?) LIMIT 1",list,null,null);
+            "("+jobQueue.jobIDField+"=? AND "+jobQueue.statusField+"=?) "+database.constructLimitClause(1),list,null,null,1,null);
 
           if (confirmSet.getRowCount() > 0)
             continue;
@@ -5297,7 +5298,7 @@ public class JobManager implements IJobM
             "("+jobQueue.jobIDField+"=? AND "+jobQueue.statusField+"=?) OR "+
             "("+jobQueue.jobIDField+"=? AND "+jobQueue.statusField+"=?) OR "+
             "("+jobQueue.jobIDField+"=? AND "+jobQueue.statusField+"=?) OR "+
-            "("+jobQueue.jobIDField+"=? AND "+jobQueue.statusField+"=?) LIMIT 1",list,null,null);
+            "("+jobQueue.jobIDField+"=? AND "+jobQueue.statusField+"=?) "+database.constructLimitClause(1),list,null,null,1,null);
 
           if (confirmSet.getRowCount() > 0)
             continue;
@@ -5396,7 +5397,7 @@ public class JobManager implements IJobM
           IResultSet confirmSet = database.performQuery("SELECT "+jobQueue.idField+" FROM "+
             jobQueue.getTableName()+" WHERE "+
             "("+jobQueue.jobIDField+"=? AND "+jobQueue.statusField+"=?) OR "+
-            "("+jobQueue.jobIDField+"=? AND "+jobQueue.statusField+"=?) LIMIT 1",list,null,null);
+            "("+jobQueue.jobIDField+"=? AND "+jobQueue.statusField+"=?) "+database.constructLimitClause(1),list,null,null,1,null);
 
           if (confirmSet.getRowCount() > 0)
             continue;
@@ -5776,7 +5777,7 @@ public class JobManager implements IJobM
     addCriteria(sb,"t0.",connectionName,filterCriteria,true);
     addOrdering(sb,new String[]{"identifier","job","state","status","scheduled","action","retrycount","retrylimit"},sortOrder);
     addLimits(sb,startRow,rowCount);
-    return database.performQuery(sb.toString(),null,null,null);
+    return database.performQuery(sb.toString(),null,null,null,rowCount,null);
   }
 
   /** Run a 'queue status' report.
@@ -5884,7 +5885,7 @@ public class JobManager implements IJobM
     sb.append(" GROUP BY idbucket");
     addOrdering(sb,new String[]{"idbucket","inactive","processing","expiring","deleting","processready","expireready","processwaiting","expirewaiting","waitingforever"},sortOrder);
     addLimits(sb,startRow,rowCount);
-    return database.performQuery(sb.toString(),null,null,null);
+    return database.performQuery(sb.toString(),null,null,null,rowCount,null);
   }
 
   // Protected methods for report generation
@@ -6139,7 +6140,7 @@ public class JobManager implements IJobM
   */
   protected void addLimits(StringBuffer sb, int startRow, int maxRowCount)
   {
-    sb.append(" LIMIT ").append(Integer.toString(maxRowCount)).append(" OFFSET ").append(Integer.toString(startRow));
+    sb.append(" ").append(database.constructLimitClause(maxRowCount)).append(" OFFSET ").append(Integer.toString(startRow));
   }
 
 

Modified: incubator/lcf/trunk/modules/framework/pull-agent/org/apache/lcf/crawler/jobs/JobQueue.java
URL: http://svn.apache.org/viewvc/incubator/lcf/trunk/modules/framework/pull-agent/org/apache/lcf/crawler/jobs/JobQueue.java?rev=950850&r1=950849&r2=950850&view=diff
==============================================================================
--- incubator/lcf/trunk/modules/framework/pull-agent/org/apache/lcf/crawler/jobs/JobQueue.java (original)
+++ incubator/lcf/trunk/modules/framework/pull-agent/org/apache/lcf/crawler/jobs/JobQueue.java Thu Jun  3 01:20:45 2010
@@ -528,7 +528,7 @@ public class JobQueue extends org.apache
       quoteSQLString(statusToString(STATUS_ACTIVE))+","+
       quoteSQLString(statusToString(STATUS_ACTIVEPURGATORY))+","+
       quoteSQLString(statusToString(STATUS_ACTIVENEEDRESCAN))+","+
-      quoteSQLString(statusToString(STATUS_ACTIVENEEDRESCANPURGATORY))+") LIMIT 1",list,null,null);
+      quoteSQLString(statusToString(STATUS_ACTIVENEEDRESCANPURGATORY))+") "+constructLimitClause(1),list,null,null,1);
     return set.getRowCount() > 0;
   }
 

Modified: incubator/lcf/trunk/modules/framework/pull-agent/org/apache/lcf/crawler/jobs/Jobs.java
URL: http://svn.apache.org/viewvc/incubator/lcf/trunk/modules/framework/pull-agent/org/apache/lcf/crawler/jobs/Jobs.java?rev=950850&r1=950849&r2=950850&view=diff
==============================================================================
--- incubator/lcf/trunk/modules/framework/pull-agent/org/apache/lcf/crawler/jobs/Jobs.java (original)
+++ incubator/lcf/trunk/modules/framework/pull-agent/org/apache/lcf/crawler/jobs/Jobs.java Thu Jun  3 01:20:45 2010
@@ -1753,8 +1753,8 @@ public class Jobs extends org.apache.lcf
   {
     IResultSet set = performQuery("SELECT "+idField+" FROM "+getTableName()+" WHERE "+
       statusField+" IN ("+quoteSQLString(statusToString(STATUS_READYFORDELETE))+","+
-      quoteSQLString(statusToString(STATUS_SHUTTINGDOWN))+") LIMIT 1",
-      null,new StringSet(getJobStatusKey()),null);
+      quoteSQLString(statusToString(STATUS_SHUTTINGDOWN))+") "+constructLimitClause(1),
+      null,new StringSet(getJobStatusKey()),null,1);
     return set.getRowCount() > 0;
   }
 
@@ -1772,7 +1772,7 @@ public class Jobs extends org.apache.lcf
       statusField+" IN ("+
       quoteSQLString(statusToString(STATUS_ACTIVE)) + "," +
       quoteSQLString(statusToString(STATUS_ACTIVESEEDING)) +
-      ") LIMIT 1",null,new StringSet(getJobStatusKey()),null);
+      ") "+constructLimitClause(1),null,new StringSet(getJobStatusKey()),null,1);
     return set.getRowCount() > 0;
   }
 

Modified: incubator/lcf/trunk/modules/framework/pull-agent/org/apache/lcf/crawler/repository/RepositoryHistoryManager.java
URL: http://svn.apache.org/viewvc/incubator/lcf/trunk/modules/framework/pull-agent/org/apache/lcf/crawler/repository/RepositoryHistoryManager.java?rev=950850&r1=950849&r2=950850&view=diff
==============================================================================
--- incubator/lcf/trunk/modules/framework/pull-agent/org/apache/lcf/crawler/repository/RepositoryHistoryManager.java (original)
+++ incubator/lcf/trunk/modules/framework/pull-agent/org/apache/lcf/crawler/repository/RepositoryHistoryManager.java Thu Jun  3 01:20:45 2010
@@ -266,7 +266,7 @@ public class RepositoryHistoryManager ex
     addCriteria(sb,"",connectionName,criteria,false);
     addOrdering(sb,new String[]{"starttime","activity","elapsedtime","resultcode","resultdesc","bytes","identifier"},sort);
     addLimits(sb,startRow,maxRowCount);
-    return performQuery(sb.toString(),null,null,null);
+    return performQuery(sb.toString(),null,null,null,maxRowCount);
   }
 
   /** Count the number of rows specified by a given set of criteria.  This can be used to make decisions
@@ -374,7 +374,7 @@ public class RepositoryHistoryManager ex
     sb.append(" GROUP BY bucket,windowstart,windowend) t2 ORDER BY bucket ASC, activitycount DESC) t3) t4");
     addOrdering(sb,new String[]{"activitycount","starttime","endtime","idbucket"},sort);
     addLimits(sb,startRow,maxRowCount);
-    return performQuery(sb.toString(),null,null,null);
+    return performQuery(sb.toString(),null,null,null,maxRowCount);
   }
 
 
@@ -463,7 +463,7 @@ public class RepositoryHistoryManager ex
 
     addOrdering(sb,new String[]{"bytecount","starttime","endtime","idbucket"},sort);
     addLimits(sb,startRow,maxRowCount);
-    return performQuery(sb.toString(),null,null,null);
+    return performQuery(sb.toString(),null,null,null,maxRowCount);
   }
 
   /** Get a bucketed history of different result code/identifier combinations.
@@ -490,7 +490,7 @@ public class RepositoryHistoryManager ex
     sb.append(" GROUP BY resultcodebucket,idbucket");
     addOrdering(sb,new String[]{"eventcount","resultcodebucket","idbucket"},sort);
     addLimits(sb,startRow,maxRowCount);
-    return performQuery(sb.toString(),null,null,null);
+    return performQuery(sb.toString(),null,null,null,maxRowCount);
   }
 
   /** Turn a bucket description into a return column.
@@ -644,7 +644,7 @@ public class RepositoryHistoryManager ex
   */
   protected void addLimits(StringBuffer sb, int startRow, int maxRowCount)
   {
-    sb.append(" LIMIT ").append(Integer.toString(maxRowCount)).append(" OFFSET ").append(Integer.toString(startRow));
+    sb.append(" ").append(constructLimitClause(maxRowCount)).append(" OFFSET ").append(Integer.toString(startRow));
   }