You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@jackrabbit.apache.org by ju...@apache.org on 2009/09/08 18:09:45 UTC

svn commit: r812570 [10/24] - in /jackrabbit/sandbox/JCR-1456: ./ jackrabbit-api/ jackrabbit-api/src/main/appended-resources/ jackrabbit-api/src/main/appended-resources/META-INF/ jackrabbit-api/src/main/java/org/apache/jackrabbit/api/security/ jackrabb...

Modified: jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/ParentAxisQuery.java
URL: http://svn.apache.org/viewvc/jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/ParentAxisQuery.java?rev=812570&r1=812569&r2=812570&view=diff
==============================================================================
--- jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/ParentAxisQuery.java (original)
+++ jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/ParentAxisQuery.java Tue Sep  8 16:09:28 2009
@@ -42,7 +42,7 @@
     /**
      * Default score is 1.0f.
      */
-    private static final Float DEFAULT_SCORE = new Float(1.0f);
+    private static final Float DEFAULT_SCORE = 1.0f;
 
     /**
      * The context query
@@ -241,7 +241,7 @@
          * key=[Integer] id of selected document from context query<br>
          * value=[Float] score for that document
          */
-        private final Map scores = new HashMap();
+        private final Map<Integer, Float> scores = new HashMap<Integer, Float>();
 
         /**
          * The next document id to return
@@ -286,11 +286,11 @@
          * {@inheritDoc}
          */
         public float score() throws IOException {
-            Float score = (Float) scores.get(new Integer(nextDoc));
+            Float score = scores.get(nextDoc);
             if (score == null) {
                 score = DEFAULT_SCORE;
             }
-            return score.floatValue();
+            return score;
         }
 
         /**
@@ -327,14 +327,14 @@
                             if (docs.length == 1) {
                                 // optimize single value
                                 hits.set(docs[0]);
-                                if (score != DEFAULT_SCORE.floatValue()) {
-                                    scores.put(new Integer(docs[0]), new Float(score));
+                                if (score != DEFAULT_SCORE) {
+                                    scores.put(docs[0], score);
                                 }
                             } else {
-                                for (int i = 0; i < docs.length; i++) {
-                                    hits.set(docs[i]);
-                                    if (score != DEFAULT_SCORE.floatValue()) {
-                                        scores.put(new Integer(docs[i]), new Float(score));
+                                for (int docNum : docs) {
+                                    hits.set(docNum);
+                                    if (score != DEFAULT_SCORE) {
+                                        scores.put(docNum, score);
                                     }
                                 }
                             }

Modified: jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/PerQueryCache.java
URL: http://svn.apache.org/viewvc/jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/PerQueryCache.java?rev=812570&r1=812569&r2=812570&view=diff
==============================================================================
--- jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/PerQueryCache.java (original)
+++ jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/PerQueryCache.java Tue Sep  8 16:09:28 2009
@@ -30,7 +30,7 @@
     /**
      * The internal map of this <code>PerQueryCache</code>.
      */
-    private final Map map = new HashMap();
+    private final Map<Key, Object> map = new HashMap<Key, Object>();
 
     /**
      * Private constructor.
@@ -41,13 +41,13 @@
     /**
      * The per thread cache instance.
      */
-    private static final ThreadLocal CACHE = new ThreadLocal();
+    private static final ThreadLocal<PerQueryCache> CACHE = new ThreadLocal<PerQueryCache>();
 
     /**
      * @return <code>PerQueryCache</code> for the current thread.
      */
     static PerQueryCache getInstance() {
-        PerQueryCache cache = (PerQueryCache) CACHE.get();
+        PerQueryCache cache = CACHE.get();
         if (cache == null) {
             cache = new PerQueryCache();
             CACHE.set(cache);

Modified: jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/PersistentIndex.java
URL: http://svn.apache.org/viewvc/jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/PersistentIndex.java?rev=812570&r1=812569&r2=812570&view=diff
==============================================================================
--- jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/PersistentIndex.java (original)
+++ jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/PersistentIndex.java Tue Sep  8 16:09:28 2009
@@ -19,6 +19,7 @@
 import org.apache.lucene.analysis.Analyzer;
 import org.apache.lucene.index.IndexReader;
 import org.apache.lucene.index.Term;
+import org.apache.lucene.index.IndexDeletionPolicy;
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.store.IndexInput;
 import org.apache.lucene.store.IndexOutput;
@@ -43,6 +44,17 @@
     private IndexListener listener;
 
     /**
+     * The index deletion policy. Old index generations are deleted when they
+     * reach a certain age.
+     */
+    private final IndexDeletionPolicyImpl indexDelPolicy;
+
+    /**
+     * The current generation of this persistent index.
+     */
+    private long generation;
+
+    /**
      * Creates a new <code>PersistentIndex</code>.
      *
      * @param name the name of this index.
@@ -51,17 +63,21 @@
      * @param cache the document number cache
      * @param indexingQueue the indexing queue.
      * @param directoryManager the directory manager.
+     * @param generationMaxAge age in seconds after which an index generation is
+     *          deleted.
      * @throws IOException if an error occurs while opening / creating the
      *  index.
      */
     PersistentIndex(String name, Analyzer analyzer,
                     Similarity similarity, DocNumberCache cache,
                     IndexingQueue indexingQueue,
-                    DirectoryManager directoryManager)
+                    DirectoryManager directoryManager, long generationMaxAge)
             throws IOException {
         super(analyzer, similarity, directoryManager.getDirectory(name),
                 cache, indexingQueue);
         this.name = name;
+        this.indexDelPolicy = new IndexDeletionPolicyImpl(this,
+                generationMaxAge * 1000);
         if (isExisting()) {
             IndexMigration.migrate(this, directoryManager);
         }
@@ -79,6 +95,13 @@
     }
 
     /**
+     * @return the index deletion policy of this index.
+     */
+    protected IndexDeletionPolicy getIndexDeletionPolicy() {
+        return indexDelPolicy;
+    }
+
+    /**
      * Merges the provided indexes into this index. After this completes, the
      * index is optimized.
      * <p/>
@@ -108,10 +131,10 @@
         Directory dir = index.getDirectory();
         Directory dest = getDirectory();
         String[] files = dir.list();
-        for (int i = 0; i < files.length; i++) {
-            IndexInput in = dir.openInput(files[i]);
+        for (String file : files) {
+            IndexInput in = dir.openInput(file);
             try {
-                IndexOutput out = dest.createOutput(files[i]);
+                IndexOutput out = dest.createOutput(file);
                 try {
                     long remaining = in.length();
                     while (remaining > 0) {
@@ -127,6 +150,8 @@
                 in.close();
             }
         }
+        // refresh current generation
+        indexDelPolicy.readCurrentGeneration();
     }
 
     /**
@@ -169,4 +194,21 @@
     String getName() {
         return name;
     }
+
+    /**
+     * @return the current generation of this index.
+     */
+    long getCurrentGeneration() {
+        return generation;
+    }
+
+    /**
+     * Sets the current generation of this index. This method should only be
+     * called by {@link IndexDeletionPolicyImpl}.
+     *
+     * @param generation the current generation.
+     */
+    void setCurrentGeneration(long generation) {
+        this.generation = generation;
+    }
 }

Modified: jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/PooledTextExtractor.java
URL: http://svn.apache.org/viewvc/jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/PooledTextExtractor.java?rev=812570&r1=812569&r2=812570&view=diff
==============================================================================
--- jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/PooledTextExtractor.java (original)
+++ jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/PooledTextExtractor.java Tue Sep  8 16:09:28 2009
@@ -20,9 +20,9 @@
 import org.slf4j.LoggerFactory;
 import org.slf4j.Logger;
 
+import java.io.IOException;
 import java.io.Reader;
 import java.io.InputStream;
-import java.io.IOException;
 
 import EDU.oswego.cs.dl.util.concurrent.PooledExecutor;
 import EDU.oswego.cs.dl.util.concurrent.Channel;
@@ -105,11 +105,17 @@
      * <p/>
      * This implementation returns an instance of {@link TextExtractorReader}.
      */
-    public Reader extractText(InputStream stream,
-                              String type,
-                              String encoding) throws IOException {
+    public Reader extractText(
+            InputStream stream, String type, String encoding)
+            throws IOException {
         TextExtractorJob job = new TextExtractorJob(extractor, stream, type, encoding);
-        return new TextExtractorReader(job, executor, timout);
+        try {
+            executor.execute(job);
+        } catch (InterruptedException e) {
+            log.warn("Failed to start a background text extraction task", e);
+            stream.close();
+        }
+        return new TextExtractorReader(job, timout);
     }
 
     /**

Modified: jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/PredicateDerefQuery.java
URL: http://svn.apache.org/viewvc/jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/PredicateDerefQuery.java?rev=812570&r1=812569&r2=812570&view=diff
==============================================================================
--- jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/PredicateDerefQuery.java (original)
+++ jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/PredicateDerefQuery.java Tue Sep  8 16:09:28 2009
@@ -79,7 +79,6 @@
      * Creates a new <code>DerefQuery</code> based on a <code>context</code>
      * query.
      *
-     * @param context the context for this query.
      * @param subQuery TODO
      * @param refProperty the name of the reference property.
      * @param nameTest a name test or <code>null</code> if any node is

Modified: jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/PropertiesSynonymProvider.java
URL: http://svn.apache.org/viewvc/jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/PropertiesSynonymProvider.java?rev=812570&r1=812569&r2=812570&view=diff
==============================================================================
--- jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/PropertiesSynonymProvider.java (original)
+++ jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/PropertiesSynonymProvider.java Tue Sep  8 16:09:28 2009
@@ -16,16 +16,15 @@
  */
 package org.apache.jackrabbit.core.query.lucene;
 
-import org.slf4j.LoggerFactory;
-import org.slf4j.Logger;
-import org.apache.jackrabbit.core.fs.FileSystemResource;
-import org.apache.jackrabbit.core.fs.FileSystemException;
-
 import java.io.IOException;
-import java.util.Map;
 import java.util.HashMap;
+import java.util.Map;
 import java.util.Properties;
-import java.util.Iterator;
+
+import org.apache.jackrabbit.core.fs.FileSystemException;
+import org.apache.jackrabbit.core.fs.FileSystemResource;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Implements a synonym provider based on a properties file. Each line in the
@@ -73,7 +72,7 @@
     /**
      * Contains the synonym mapping. Map&lt;String, String[]>
      */
-    private Map synonyms = new HashMap();
+    private Map<String, String[]> synonyms = new HashMap<String, String[]>();
 
     /**
      * {@inheritDoc}
@@ -101,7 +100,7 @@
         term = term.toLowerCase();
         String[] syns;
         synchronized (this) {
-            syns = (String[]) synonyms.get(term);
+            syns = synonyms.get(term);
         }
         if (syns == null) {
             syns = EMPTY_ARRAY;
@@ -144,16 +143,14 @@
      * @throws IOException if an error occurs while reading from the file system
      *                     resource.
      */
-    private static Map getSynonyms(FileSystemResource config) throws IOException {
+    private static Map<String, String[]> getSynonyms(FileSystemResource config) throws IOException {
         try {
-            Map synonyms = new HashMap();
+            Map<String, String[]> synonyms = new HashMap<String, String[]>();
             Properties props = new Properties();
             props.load(config.getInputStream());
-            Iterator it = props.entrySet().iterator();
-            while (it.hasNext()) {
-                Map.Entry e = (Map.Entry) it.next();
-                String key = (String) e.getKey();
-                String value = (String) e.getValue();
+            for (Map.Entry<Object, Object> entry : props.entrySet()) {
+                String key = (String) entry.getKey();
+                String value = (String) entry.getValue();
                 addSynonym(key, value, synonyms);
                 addSynonym(value, key, synonyms);
             }
@@ -170,9 +167,9 @@
      * @param synonym  synonym for <code>term</code>.
      * @param synonyms the Map containing the synonyms.
      */
-    private static void addSynonym(String term, String synonym, Map synonyms) {
+    private static void addSynonym(String term, String synonym, Map<String, String[]> synonyms) {
         term = term.toLowerCase();
-        String[] syns = (String[]) synonyms.get(term);
+        String[] syns = synonyms.get(term);
         if (syns == null) {
             syns = new String[]{synonym};
         } else {

Modified: jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/QueryHitsQuery.java
URL: http://svn.apache.org/viewvc/jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/QueryHitsQuery.java?rev=812570&r1=812569&r2=812570&view=diff
==============================================================================
--- jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/QueryHitsQuery.java (original)
+++ jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/QueryHitsQuery.java Tue Sep  8 16:09:28 2009
@@ -171,7 +171,7 @@
          * Maps <code>Integer</code> document numbers to <code>Float</code>
          * scores.
          */
-        private final Map scores = new HashMap();
+        private final Map<Integer, Float> scores = new HashMap<Integer, Float>();
 
         /**
          * The current document number.
@@ -190,7 +190,7 @@
                 throws IOException {
             super(similarity);
             ScoreNode node;
-            Set sortedDocs = new TreeSet();
+            Set<Integer> sortedDocs = new TreeSet<Integer>();
             try {
                 while ((node = hits.nextScoreNode()) != null) {
                     String uuid = node.getNodeId().toString();
@@ -198,9 +198,9 @@
                     TermDocs tDocs = reader.termDocs(id);
                     try {
                         if (tDocs.next()) {
-                            Integer doc = new Integer(tDocs.doc());
+                            Integer doc = tDocs.doc();
                             sortedDocs.add(doc);
-                            scores.put(doc, new Float(node.getScore()));
+                            scores.put(doc, node.getScore());
                         }
                     } finally {
                         tDocs.close();
@@ -227,14 +227,14 @@
          * {@inheritDoc}
          */
         public int doc() {
-            return currentDoc.intValue();
+            return currentDoc;
         }
 
         /**
          * {@inheritDoc}
          */
         public float score() throws IOException {
-            return ((Float) scores.get(currentDoc)).floatValue();
+            return scores.get(currentDoc);
         }
 
         /**

Modified: jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/QueryResultImpl.java
URL: http://svn.apache.org/viewvc/jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/QueryResultImpl.java?rev=812570&r1=812569&r2=812570&view=diff
==============================================================================
--- jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/QueryResultImpl.java (original)
+++ jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/QueryResultImpl.java Tue Sep  8 16:09:28 2009
@@ -16,6 +16,19 @@
  */
 package org.apache.jackrabbit.core.query.lucene;
 
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.NoSuchElementException;
+
+import javax.jcr.ItemNotFoundException;
+import javax.jcr.NodeIterator;
+import javax.jcr.RepositoryException;
+import javax.jcr.query.QueryResult;
+import javax.jcr.query.RowIterator;
+
 import org.apache.jackrabbit.core.ItemManager;
 import org.apache.jackrabbit.core.SessionImpl;
 import org.apache.jackrabbit.core.security.AccessManager;
@@ -24,19 +37,6 @@
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import javax.jcr.ItemNotFoundException;
-import javax.jcr.NodeIterator;
-import javax.jcr.RepositoryException;
-import javax.jcr.UnsupportedRepositoryOperationException;
-import javax.jcr.query.QueryResult;
-import javax.jcr.query.RowIterator;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.NoSuchElementException;
-import java.util.Map;
-import java.util.LinkedHashMap;
-
 /**
  * Implements the <code>QueryResult</code> interface.
  */

Modified: jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/RangeQuery.java
URL: http://svn.apache.org/viewvc/jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/RangeQuery.java?rev=812570&r1=812569&r2=812570&view=diff
==============================================================================
--- jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/RangeQuery.java (original)
+++ jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/RangeQuery.java Tue Sep  8 16:09:28 2009
@@ -33,7 +33,6 @@
 import java.util.Map;
 import java.util.List;
 import java.util.ArrayList;
-import java.util.Iterator;
 import java.util.HashMap;
 import java.util.Set;
 
@@ -205,7 +204,7 @@
          */
         protected Scorer createScorer(IndexReader reader) {
             return new RangeQueryScorer(searcher.getSimilarity(), reader);
-        };
+        }
 
         /**
          * Returns this <code>RangeQuery</code>.
@@ -279,13 +278,14 @@
         /**
          * The map to store the results.
          */
-        private final Map resultMap;
+        private final Map<String, BitSet> resultMap;
 
         /**
          * Creates a new RangeQueryScorer.
          * @param similarity the similarity implementation.
          * @param reader the index reader to use.
          */
+        @SuppressWarnings({"unchecked"})
         RangeQueryScorer(Similarity similarity, IndexReader reader) {
             super(similarity);
             this.reader = reader;
@@ -302,14 +302,14 @@
             this.cacheKey = key.toString();
             // check cache
             PerQueryCache cache = PerQueryCache.getInstance();
-            Map m = (Map) cache.get(RangeQueryScorer.class, reader);
+            Map<String, BitSet> m = (Map<String, BitSet>) cache.get(RangeQueryScorer.class, reader);
             if (m == null) {
-                m = new HashMap();
+                m = new HashMap<String, BitSet>();
                 cache.put(RangeQueryScorer.class, reader, m);
             }
             resultMap = m;
 
-            BitSet result = (BitSet) resultMap.get(cacheKey);
+            BitSet result = resultMap.get(cacheKey);
             if (result == null) {
                 result = new BitSet(reader.maxDoc());
             } else {
@@ -380,7 +380,7 @@
             if (propNameLength > 0) {
                 namePrefix = lowerTerm.text().substring(0, propNameLength);
             }
-            List startTerms = new ArrayList(2);
+            List<Term> startTerms = new ArrayList<Term>(2);
 
             if (transform == TRANSFORM_NONE || lowerTerm.text().length() <= propNameLength) {
                 // use lowerTerm as is
@@ -397,25 +397,20 @@
                 startTerms.add(new Term(lowerTerm.field(), termText.toString()));
             }
 
-            Iterator it = startTerms.iterator();
-            while (it.hasNext()) {
-                Term startTerm = (Term) it.next();
-
+            for (Term startTerm : startTerms) {
                 TermEnum terms = reader.terms(startTerm);
                 try {
                     TermDocs docs = reader.termDocs();
                     try {
                         do {
                             Term term = terms.term();
-                            if (term != null
-                                    && term.field() == testField
-                                    && term.text().startsWith(namePrefix)) {
+                            if (term != null && term.field() == testField && term.text().startsWith(namePrefix)) {
                                 if (checkLower) {
                                     int compare = termCompare(term.text(), lowerTerm.text(), propNameLength);
                                     if (compare > 0 || compare == 0 && inclusive) {
                                         // do not check lower term anymore if no
                                         // transformation is done on the term enum
-                                        checkLower = transform == TRANSFORM_NONE ? false : true;
+                                        checkLower = transform != TRANSFORM_NONE;
                                     } else {
                                         // continue with next term
                                         continue;
@@ -452,7 +447,7 @@
                             } else {
                                 break;
                             }
-                        } while(terms.next());
+                        } while (terms.next());
                     } finally {
                         docs.close();
                     }

Modified: jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/Recovery.java
URL: http://svn.apache.org/viewvc/jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/Recovery.java?rev=812570&r1=812569&r2=812570&view=diff
==============================================================================
--- jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/Recovery.java (original)
+++ jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/Recovery.java Tue Sep  8 16:09:28 2009
@@ -21,7 +21,6 @@
 
 import java.util.Set;
 import java.util.HashSet;
-import java.util.Iterator;
 import java.util.List;
 import java.io.IOException;
 
@@ -46,9 +45,9 @@
     private final RedoLog redoLog;
 
     /**
-     * The ids of the uncommitted transactions. Set of Integer objects.
+     * The ids of the uncommitted transactions. Set of Long objects.
      */
-    private final Set losers = new HashSet();
+    private final Set<Long> losers = new HashSet<Long>();
 
     /**
      * Creates a new Recovery instance.
@@ -92,23 +91,22 @@
      * @throws IOException if the recovery fails.
      */
     private void run() throws IOException {
-        List actions = redoLog.getActions();
+        List<MultiIndex.Action> actions = redoLog.getActions();
 
         // find loser transactions
-        for (Iterator it = actions.iterator(); it.hasNext();) {
-            MultiIndex.Action a = (MultiIndex.Action) it.next();
+        for (MultiIndex.Action a : actions) {
             if (a.getType() == MultiIndex.Action.TYPE_START) {
-                losers.add(new Long(a.getTransactionId()));
+                losers.add(a.getTransactionId());
             } else if (a.getType() == MultiIndex.Action.TYPE_COMMIT) {
-                losers.remove(new Long(a.getTransactionId()));
+                losers.remove(a.getTransactionId());
             }
         }
 
         // find last volatile commit without changes from a loser
         int lastSafeVolatileCommit = -1;
-        Set transactionIds = new HashSet();
+        Set<Long> transactionIds = new HashSet<Long>();
         for (int i = 0; i < actions.size(); i++) {
-            MultiIndex.Action a = (MultiIndex.Action) actions.get(i);
+            MultiIndex.Action a = actions.get(i);
             if (a.getType() == MultiIndex.Action.TYPE_COMMIT) {
                 transactionIds.clear();
             } else if (a.getType() == MultiIndex.Action.TYPE_VOLATILE_COMMIT) {
@@ -121,13 +119,13 @@
                     lastSafeVolatileCommit = i;
                 }
             } else {
-                transactionIds.add(new Long(a.getTransactionId()));
+                transactionIds.add(a.getTransactionId());
             }
         }
 
         // delete dirty indexes
         for (int i = lastSafeVolatileCommit + 1; i < actions.size(); i++) {
-            MultiIndex.Action a = (MultiIndex.Action) actions.get(i);
+            MultiIndex.Action a = actions.get(i);
             if (a.getType() == MultiIndex.Action.TYPE_CREATE_INDEX) {
                 a.undo(index);
             }
@@ -136,7 +134,7 @@
         // replay actions up to last safe volatile commit
         // ignore add node actions, they are included in volatile commits
         for (int i = 0; i < actions.size() && i <= lastSafeVolatileCommit; i++) {
-            MultiIndex.Action a = (MultiIndex.Action) actions.get(i);
+            MultiIndex.Action a = actions.get(i);
             switch (a.getType()) {
                 case MultiIndex.Action.TYPE_ADD_INDEX:
                 case MultiIndex.Action.TYPE_CREATE_INDEX:
@@ -156,7 +154,7 @@
 
         // now replay the rest until we encounter a loser transaction
         for (int i = lastSafeVolatileCommit + 1; i < actions.size(); i++) {
-            MultiIndex.Action a = (MultiIndex.Action) actions.get(i);
+            MultiIndex.Action a = actions.get(i);
             if (losers.contains(new Long(a.getTransactionId()))) {
                 break;
             } else {

Modified: jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/RowIteratorImpl.java
URL: http://svn.apache.org/viewvc/jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/RowIteratorImpl.java?rev=812570&r1=812569&r2=812570&view=diff
==============================================================================
--- jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/RowIteratorImpl.java (original)
+++ jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/RowIteratorImpl.java Tue Sep  8 16:09:28 2009
@@ -341,7 +341,7 @@
                     return valueFactory.createValue(p);
                 } else if (n.hasProperty(col.getPropertyName())) {
                     Property p = n.getProperty(col.getPropertyName());
-                    if (p.getDefinition().isMultiple()) {
+                    if (p.isMultiple()) {
                         // mvp values cannot be returned
                         return null;
                     } else {

Modified: jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/SearchIndex.java
URL: http://svn.apache.org/viewvc/jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/SearchIndex.java?rev=812570&r1=812569&r2=812570&view=diff
==============================================================================
--- jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/SearchIndex.java (original)
+++ jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/SearchIndex.java Tue Sep  8 16:09:28 2009
@@ -182,12 +182,12 @@
     /**
      * The path of the root node.
      */
-    private static final Path ROOT_PATH;
+    protected static final Path ROOT_PATH;
 
     /**
      * The path <code>/jcr:system</code>.
      */
-    private static final Path JCR_SYSTEM_PATH;
+    protected static final Path JCR_SYSTEM_PATH;
 
     static {
         ROOT_PATH = PATH_FACTORY.create(NameConstants.ROOT);
@@ -202,7 +202,7 @@
     /**
      * The actual index
      */
-    private MultiIndex index;
+    protected MultiIndex index;
 
     /**
      * The analyzer we use for indexing.
@@ -251,6 +251,13 @@
     private int volatileIdleTime = 3;
 
     /**
+     * The maximum age (in seconds) of the index history. The default value is
+     * zero. Which means, index commits are deleted as soon as they are not used
+     * anymore.
+     */
+    private long maxHistoryAge = 0;
+
+    /**
      * maxMergeDocs config parameter
      */
     private int maxMergeDocs = DEFAULT_MAX_MERGE_DOCS;
@@ -453,6 +460,16 @@
     private int indexMergerPoolSize = DEFAULT_INDEX_MERGER_POOL_SIZE;
 
     /**
+     * The name of the redo log factory class implementation.
+     */
+    private String redoLogFactoryClass = DefaultRedoLogFactory.class.getName();
+
+    /**
+     * The redo log factory.
+     */
+    private RedoLogFactory redoLogFactory;
+
+    /**
      * Indicates if this <code>SearchIndex</code> is closed and cannot be used
      * anymore.
      */
@@ -486,6 +503,7 @@
         extractor = createTextExtractor();
         synProvider = createSynonymProvider();
         directoryManager = createDirectoryManager();
+        redoLogFactory = createRedoLogFactory();
 
         if (context.getParentHandler() instanceof SearchIndex) {
             // use system namespace mappings
@@ -528,8 +546,7 @@
                 && (index.getRedoLogApplied() || forceConsistencyCheck)) {
             log.info("Running consistency check...");
             try {
-                ConsistencyCheck check = ConsistencyCheck.run(index,
-                        context.getItemStateManager());
+                ConsistencyCheck check = runConsistencyCheck();
                 if (autoRepair) {
                     check.repair(true);
                 } else {
@@ -746,6 +763,24 @@
     }
 
     /**
+     * Waits until all pending text extraction tasks have been processed
+     * and the updated index has been flushed to disk.
+     *
+     * @throws RepositoryException if the index update can not be written
+     */
+    public void flush() throws RepositoryException {
+        try {
+            index.getIndexingQueue().waitUntilEmpty();
+            index.flush();
+            // flush may have pushed nodes into the indexing queue
+            // -> wait again
+            index.getIndexingQueue().waitUntilEmpty();
+        } catch (IOException e) {
+            throw new RepositoryException("Failed to flush the index", e);
+        }
+    }
+
+    /**
      * Closes this <code>QueryHandler</code> and frees resources attached
      * to this handler.
      */
@@ -975,6 +1010,23 @@
     }
 
     /**
+     * @return the redo log factory for this search index.
+     */
+    public RedoLogFactory getRedoLogFactory() {
+        return redoLogFactory;
+    }
+
+    /**
+     * Runs a consistency check on this search index.
+     *
+     * @return the result of the consistency check.
+     * @throws IOException if an error occurs while running the check.
+     */
+    public ConsistencyCheck runConsistencyCheck() throws IOException {
+        return index.runConsistencyCheck();
+    }
+
+    /**
      * Returns an index reader for this search index. The caller of this method
      * is responsible for closing the index reader when he is finished using
      * it.
@@ -1171,12 +1223,34 @@
     }
 
     /**
+     * Creates a redo log factory based on {@link #getRedoLogFactoryClass()}.
+     *
+     * @return the redo log factory.
+     * @throws IOException if an error occurs while creating the factory.
+     */
+    protected RedoLogFactory createRedoLogFactory() throws IOException {
+        try {
+            Class<?> clazz = Class.forName(redoLogFactoryClass);
+            if (!RedoLogFactory.class.isAssignableFrom(clazz)) {
+                throw new IOException(redoLogFactoryClass +
+                        " is not a RedoLogFactory implementation");
+            }
+            return (RedoLogFactory) clazz.newInstance();
+        } catch (Exception e) {
+            IOException ex = new IOException();
+            ex.initCause(e);
+            throw ex;
+        }
+    }
+
+    /**
      * Creates a file system resource to the synonym provider configuration.
      *
      * @return a file system resource or <code>null</code> if no path was
      *         configured.
      * @throws FileSystemException if an exception occurs accessing the file
      *                             system.
+     * @throws IOException         if another exception occurs.
      */
     protected FileSystemResource createSynonymProviderConfigResource()
             throws FileSystemException, IOException {
@@ -1315,24 +1389,28 @@
                             String namePrefix = FieldNames.createNamedValue(getNamespaceMappings().translateName(propState.getName()), "");
                             NodeState parent = (NodeState) ism.getItemState(propState.getParentId());
                             Document aDoc = createDocument(parent, getNamespaceMappings(), getIndex().getIndexFormatVersion());
-                            // find the right fields to transfer
-                            Fieldable[] fields = aDoc.getFieldables(FieldNames.PROPERTIES);
-                            Token t = new Token();
-                            for (Fieldable field : fields) {
-                                // assume properties fields use SingleTokenStream
-                                t = field.tokenStreamValue().next(t);
-                                String value = new String(t.termBuffer(), 0, t.termLength());
-                                if (value.startsWith(namePrefix)) {
-                                    // extract value
-                                    value = value.substring(namePrefix.length());
-                                    // create new named value
-                                    Path p = getRelativePath(state, propState);
-                                    String path = getNamespaceMappings().translatePath(p);
-                                    value = FieldNames.createNamedValue(path, value);
-                                    t.setTermBuffer(value);
-                                    doc.add(new Field(field.name(), new SingletonTokenStream(t)));
-                                    doc.add(new Field(FieldNames.AGGREGATED_NODE_UUID, parent.getNodeId().toString(), Field.Store.NO, Field.Index.NOT_ANALYZED_NO_NORMS));
+                            try {
+                                // find the right fields to transfer
+                                Fieldable[] fields = aDoc.getFieldables(FieldNames.PROPERTIES);
+                                Token t = new Token();
+                                for (Fieldable field : fields) {
+                                    // assume properties fields use SingleTokenStream
+                                    t = field.tokenStreamValue().next(t);
+                                    String value = new String(t.termBuffer(), 0, t.termLength());
+                                    if (value.startsWith(namePrefix)) {
+                                        // extract value
+                                        value = value.substring(namePrefix.length());
+                                        // create new named value
+                                        Path p = getRelativePath(state, propState);
+                                        String path = getNamespaceMappings().translatePath(p);
+                                        value = FieldNames.createNamedValue(path, value);
+                                        t.setTermBuffer(value);
+                                        doc.add(new Field(field.name(), new SingletonTokenStream(t)));
+                                        doc.add(new Field(FieldNames.AGGREGATED_NODE_UUID, parent.getNodeId().toString(), Field.Store.NO, Field.Index.NOT_ANALYZED_NO_NORMS));
+                                    }
                                 }
+                            } finally {
+                                Util.disposeDocument(aDoc);
                             }
                         }
                     }
@@ -2196,6 +2274,41 @@
         this.indexMergerPoolSize = indexMergerPoolSize;
     }
 
+    /**
+     * @return the maximum age in seconds for outdated generations of
+     * {@link IndexInfos}.
+     */
+    public long getMaxHistoryAge() {
+        return maxHistoryAge;
+    }
+
+    /**
+     * Sets a new value for the maximum age in seconds for outdated generations
+     * of {@link IndexInfos}.
+     *
+     * @param maxHistoryAge age in seconds.
+     */
+    public void setMaxHistoryAge(long maxHistoryAge) {
+        this.maxHistoryAge = maxHistoryAge;
+    }
+
+    /**
+     * @return the name of the redo log factory class.
+     */
+    public String getRedoLogFactoryClass() {
+        return redoLogFactoryClass;
+    }
+
+    /**
+     * Sets the name of the redo log factory class. Must implement
+     * {@link RedoLogFactory}.
+     *
+     * @param className the name of the redo log factory class.
+     */
+    public void setRedoLogFactoryClass(String className) {
+        this.redoLogFactoryClass = className;
+    }
+
     //----------------------------< internal >----------------------------------
 
     /**
@@ -2204,7 +2317,7 @@
      *
      * @throws IOException if this <code>SearchIndex</code> had been closed.
      */
-    private void checkOpen() throws IOException {
+    protected void checkOpen() throws IOException {
         if (closed) {
             throw new IOException("query handler closed and cannot be used anymore.");
         }

Modified: jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/SharedFieldSortComparator.java
URL: http://svn.apache.org/viewvc/jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/SharedFieldSortComparator.java?rev=812570&r1=812569&r2=812570&view=diff
==============================================================================
--- jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/SharedFieldSortComparator.java (original)
+++ jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/SharedFieldSortComparator.java Tue Sep  8 16:09:28 2009
@@ -34,7 +34,6 @@
 import org.apache.jackrabbit.spi.commons.name.PathBuilder;
 import org.apache.jackrabbit.spi.commons.name.PathFactoryImpl;
 import org.apache.jackrabbit.spi.commons.conversion.IllegalNameException;
-import org.apache.jackrabbit.uuid.UUID;
 
 /**
  * Implements a <code>SortComparator</code> which knows how to sort on a lucene

Modified: jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/SimpleExcerptProvider.java
URL: http://svn.apache.org/viewvc/jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/SimpleExcerptProvider.java?rev=812570&r1=812569&r2=812570&view=diff
==============================================================================
--- jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/SimpleExcerptProvider.java (original)
+++ jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/SimpleExcerptProvider.java Tue Sep  8 16:09:28 2009
@@ -68,8 +68,8 @@
                     text.append(separator);
                     separator = " ... ";
                     InternalValue[] values = propState.getValues();
-                    for (int i = 0; i < values.length; i++) {
-                        text.append(values[i].toString());
+                    for (InternalValue value : values) {
+                        text.append(value.toString());
                     }
                 }
             }

Modified: jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/SortedLuceneQueryHits.java
URL: http://svn.apache.org/viewvc/jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/SortedLuceneQueryHits.java?rev=812570&r1=812569&r2=812570&view=diff
==============================================================================
--- jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/SortedLuceneQueryHits.java (original)
+++ jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/SortedLuceneQueryHits.java Tue Sep  8 16:09:28 2009
@@ -22,7 +22,6 @@
 import org.apache.lucene.search.ScoreDoc;
 import org.apache.lucene.index.IndexReader;
 import org.apache.jackrabbit.core.id.NodeId;
-import org.apache.jackrabbit.uuid.UUID;
 import org.slf4j.LoggerFactory;
 import org.slf4j.Logger;
 

Modified: jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/TermDocsCache.java
URL: http://svn.apache.org/viewvc/jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/TermDocsCache.java?rev=812570&r1=812569&r2=812570&view=diff
==============================================================================
--- jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/TermDocsCache.java (original)
+++ jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/TermDocsCache.java Tue Sep  8 16:09:28 2009
@@ -22,13 +22,12 @@
 import java.util.BitSet;
 import java.util.Arrays;
 import java.util.Iterator;
+import java.util.LinkedHashMap;
 
 import org.apache.lucene.index.IndexReader;
 import org.apache.lucene.index.TermDocs;
 import org.apache.lucene.index.Term;
 import org.apache.lucene.index.TermEnum;
-import org.apache.commons.collections.map.LRUMap;
-import org.apache.commons.collections.map.LinkedMap;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -61,13 +60,19 @@
     /**
      * Map of {@link Term#text()} that are unknown to the underlying index.
      */
-    private final Map unknownValues = Collections.synchronizedMap(new LRUMap(100));
+    private final Map<String, String> unknownValues = Collections.synchronizedMap(new LinkedHashMap<String, String>() {
+        private static final long serialVersionUID = 1443679637070403838L;
+
+        protected boolean removeEldestEntry(Map.Entry<String, String> eldest) {
+            return size() > 100;
+        }
+    });
 
     /**
      * The cache of the {@link #CACHE_SIZE} most frequently requested TermDocs.
      * Maps term text <code>String</code> to {@link CacheEntry}.
      */
-    private final LinkedMap cache = new LinkedMap();
+    private final Map<String, CacheEntry> cache = new LinkedHashMap<String, CacheEntry>();
 
     /**
      * Creates a new cache for the given <code>reader</code> and
@@ -102,23 +107,23 @@
         // maintain cache
         CacheEntry entry;
         synchronized (cache) {
-            entry = (CacheEntry) cache.get(text);
+            entry = cache.get(text);
             if (entry == null) {
                 // check space
                 if (cache.size() >= CACHE_SIZE) {
                     // prune half of them and adjust the rest
-                    CacheEntry[] entries = (CacheEntry[]) cache.values().toArray(
+                    CacheEntry[] entries = cache.values().toArray(
                             new CacheEntry[cache.size()]);
                     Arrays.sort(entries);
                     int threshold = entries[CACHE_SIZE / 2].numAccessed;
-                    for (Iterator it = cache.entrySet().iterator(); it.hasNext(); ) {
-                        Map.Entry e = (Map.Entry) it.next();
-                        if (((CacheEntry) e.getValue()).numAccessed <= threshold) {
+                    for (Iterator<Map.Entry<String, CacheEntry>> it = cache.entrySet().iterator(); it.hasNext(); ) {
+                        Map.Entry<String, CacheEntry> e = it.next();
+                        if (e.getValue().numAccessed <= threshold) {
                             // prune
                             it.remove();
                         } else {
                             // adjust
-                            CacheEntry ce = (CacheEntry) e.getValue();
+                            CacheEntry ce = e.getValue();
                             ce.numAccessed = (int) Math.sqrt(ce.numAccessed);
                         }
                     }
@@ -135,8 +140,7 @@
         if (entry.numAccessed < 10) {
             if (log.isDebugEnabled()) {
                 log.debug("#{} TermDocs({},{})",
-                        new Object[]{new Integer(entry.numAccessed),
-                                field, text});
+                        new Object[]{entry.numAccessed, field, text});
             }
             return reader.termDocs(t);
         }
@@ -167,8 +171,7 @@
         } else {
             if (log.isDebugEnabled()) {
                 log.debug("CachedTermDocs({},{},{}/{})", new Object[]{
-                        field, text, new Integer(entry.bits.cardinality()),
-                        new Integer(reader.maxDoc())});
+                        field, text, entry.bits.cardinality(), reader.maxDoc()});
             }
             return new CachedTermDocs(entry.bits);
         }

Modified: jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/TextExtractorJob.java
URL: http://svn.apache.org/viewvc/jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/TextExtractorJob.java?rev=812570&r1=812569&r2=812570&view=diff
==============================================================================
--- jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/TextExtractorJob.java (original)
+++ jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/TextExtractorJob.java Tue Sep  8 16:09:28 2009
@@ -16,47 +16,34 @@
  */
 package org.apache.jackrabbit.core.query.lucene;
 
-import EDU.oswego.cs.dl.util.concurrent.FutureResult;
-import EDU.oswego.cs.dl.util.concurrent.Callable;
-
 import org.apache.commons.io.IOUtils;
 import org.apache.jackrabbit.extractor.TextExtractor;
-import org.apache.jackrabbit.util.LazyFileInputStream;
 import org.slf4j.LoggerFactory;
 import org.slf4j.Logger;
 
 import java.io.InputStream;
 import java.io.Reader;
-import java.io.IOException;
-import java.io.File;
-import java.io.FileOutputStream;
-import java.io.OutputStreamWriter;
-import java.io.Writer;
-import java.io.BufferedWriter;
-import java.io.InputStreamReader;
-import java.io.StringReader;
-import java.lang.reflect.InvocationTargetException;
 
 /**
  * <code>TextExtractorJob</code> implements a future result and is runnable
  * in a background thread.
  */
-public class TextExtractorJob extends FutureResult implements Runnable {
+public class TextExtractorJob implements Runnable {
 
     /**
-     * UTF-8 encoding.
+     * The logger instance for this class.
      */
-    private static final String ENCODING_UTF8 = "UTF-8";
+    private static final Logger log = LoggerFactory.getLogger(TextExtractorJob.class);
 
     /**
-     * The logger instance for this class.
+     * The text extractor.
      */
-    private static final Logger log = LoggerFactory.getLogger(TextExtractorJob.class);
+    private final TextExtractor extractor;
 
     /**
-     * The command of the future result.
+     * The binary stream.
      */
-    private final Runnable cmd;
+    private final InputStream stream;
 
     /**
      * The mime type of the resource to extract text from.
@@ -64,14 +51,14 @@
     private final String type;
 
     /**
-     * Set to <code>true</code> if this job timed out.
+     * The encoding of the binary content, or <code>null</code>.
      */
-    private transient boolean timedOut = false;
+    private final String encoding;
 
     /**
-     * <code>true</code> if this extractor job has been flaged as discarded.
+     * The extracted text. Set when the text extraction task completes.
      */
-    private transient boolean discarded = false;
+    private volatile String text = null;
 
     /**
      * Creates a new <code>TextExtractorJob</code> with the given
@@ -83,76 +70,40 @@
      * @param encoding  the encoding of the binary content. May be
      *                  <code>null</code>.
      */
-    public TextExtractorJob(final TextExtractor extractor,
-                            final InputStream stream,
-                            final String type,
-                            final String encoding) {
+    public TextExtractorJob(
+            TextExtractor extractor,
+            InputStream stream, String type, String encoding) {
+        this.extractor = extractor;
+        this.stream = stream;
         this.type = type;
-        this.cmd = setter(new Callable() {
-            public Object call() throws Exception {
-                Reader r = extractor.extractText(stream, type, encoding);
-                if (r != null) {
-                    if (discarded) {
-                        r.close();
-                        r = null;
-                    } else if (timedOut) {
-                        // spool a temp file to save memory
-                        r = getSwappedOutReader(r);
-                    }
-                }
-                return r;
-            }
-        });
+        this.encoding = encoding;
+    }
+
+    public boolean hasExtractedText() {
+        return text != null;
     }
 
     /**
      * Returns the reader with the extracted text from the input stream passed
-     * to the constructor of this <code>TextExtractorJob</code>. The caller of
-     * this method is responsible for closing the returned reader. Returns
+     * to the constructor of this <code>TextExtractorJob</code>. Returns
      * <code>null</code> if a <code>timeout</code>occurs while waiting for the
      * text extractor to get the reader.
      *
-     * @return the Reader with the extracted text. Returns <code>null</code> if
-     *         a timeout or an exception occured extracting the text.
+     * @return the extracted text, or <code>null</code> if a timeout or
+     *         an exception occurred while extracting the text
      */
-    public Reader getReader(long timeout) {
-        Reader reader = null;
-        try {
-            reader = (Reader) timedGet(timeout);
-        } catch (InterruptedException e) {
-            // also covers TimeoutException
-            // text not extracted within timeout or interrupted
-            if (timeout > 0) {
-                log.debug("Text extraction for {} timed out (>{}ms).",
-                        type, new Long(timeout));
-                timedOut = true;
-            }
-        } catch (InvocationTargetException e) {
-            // extraction failed
-            log.warn("Exception while indexing binary property: " + e.getCause());
-            log.debug("Dump: ", e.getCause());
-        }
-        return reader;
-    }
-
-    /**
-     * Discards this extractor job. If the reader within this job is ready at
-     * the time of this call, it is closed. If the reader is not yet ready this
-     * job will be flaged as discarded and any later call to
-     * {@link #getReader(long)} will return <code>null</code>. The reader that
-     * is about to be constructed by a background thread will be closed
-     * automatically as soon as it becomes ready.
-     */
-    void discard() {
-        discarded = true;
-        Reader r = (Reader) peek();
-        if (r != null) {
+    public synchronized String getExtractedText(long timeout) {
+        if (text == null) {
             try {
-                r.close();
-            } catch (IOException e) {
-                log.warn("Exception when trying to discard extractor job: " + e);
+                wait(timeout);
+            } catch (InterruptedException e) {
+                if (text == null) {
+                    log.debug("Text extraction for {} timed out (> {}ms)",
+                            type, timeout);
+                }
             }
         }
+        return text;
     }
 
     /**
@@ -168,69 +119,20 @@
      * Runs the actual text extraction.
      */
     public void run() {
-        // forward to command
-        cmd.run();
-    }
-
-    //----------------------------< internal >----------------------------------
-
-    /**
-     * Returns a <code>Reader</code> for <code>r</code> using a temp file.
-     *
-     * @param r the reader to swap out into a temp file.
-     * @return a reader to the temp file.
-     */
-    private Reader getSwappedOutReader(Reader r) {
-        final File temp;
-        try {
-            temp = File.createTempFile("extractor", null);
-        } catch (IOException e) {
-            // unable to create temp file
-            // return reader as is
-            return r;
-        }
-        Writer out;
-        try {
-            out = new BufferedWriter(new OutputStreamWriter(
-                            new FileOutputStream(temp), ENCODING_UTF8));
-        } catch (IOException e) {
-            // should never happend actually
-            if (!temp.delete()) {
-                temp.deleteOnExit();
-            }
-            return r;
-        }
-
-        // spool into temp file
-        InputStream in = null;
         try {
             try {
-                IOUtils.copy(r, out);
-                out.close();
+                Reader reader = extractor.extractText(stream, type, encoding);
+                this.text = IOUtils.toString(reader);
             } finally {
-                r.close();
-            }
-            in = new LazyFileInputStream(temp);
-
-            return new InputStreamReader(in, ENCODING_UTF8) {
-                public void close() throws IOException {
-                    super.close();
-                    // delete file
-                    if (!temp.delete()) {
-                        temp.deleteOnExit();
-                    }
-                }
-            };
-        } catch (IOException e) {
-            // do some clean up
-            IOUtils.closeQuietly(out);
-            IOUtils.closeQuietly(in);
-
-            if (!temp.delete()) {
-                temp.deleteOnExit();
+                stream.close();
             }
-            // use empty string reader as fallback
-            return new StringReader("");
+        } catch (Throwable e) {
+            log.warn("Text extraction failed for type " + type, e);
+            this.text = "";
+        }
+        synchronized (this) {
+            notifyAll();
         }
     }
+
 }

Modified: jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/TextExtractorReader.java
URL: http://svn.apache.org/viewvc/jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/TextExtractorReader.java?rev=812570&r1=812569&r2=812570&view=diff
==============================================================================
--- jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/TextExtractorReader.java (original)
+++ jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/TextExtractorReader.java Tue Sep  8 16:09:28 2009
@@ -20,9 +20,6 @@
 import java.io.IOException;
 import java.io.StringReader;
 
-import EDU.oswego.cs.dl.util.concurrent.Executor;
-import EDU.oswego.cs.dl.util.concurrent.DirectExecutor;
-
 /**
  * <code>TextExtractorReader</code> implements a specialized reader that runs
  * the text extractor in a background thread.
@@ -30,11 +27,6 @@
 class TextExtractorReader extends Reader {
 
     /**
-     * A direct executor in case text extraction is requested for immediate use.
-     */
-    private static final Executor DIRECT_EXECUTOR = new DirectExecutor();
-
-    /**
      * Reference to the extracted text. This reference is initially
      * <code>null</code> and later set to a valid reader when the text extractor
      * finished its work.
@@ -47,33 +39,20 @@
     private TextExtractorJob job;
 
     /**
-     * The pooled executor.
-     */
-    private final Executor executor;
-
-    /**
      * The timeout in milliseconds to wait at most for the text extractor
      * when {@link #isExtractorFinished()} is called.
      */
     private final long timeout;
 
     /**
-     * Set to <code>true</code> when the text extractor job has been started
-     * and is running.
-     */
-    private boolean jobStarted = false;
-
-    /**
      * Creates a new <code>TextExtractorReader</code> with the given
      * <code>job</code>.
      *
      * @param job      the extractor job.
-     * @param executor the executor to use when text extraction is requested.
      * @param timeout  the timeout to wait at most for the text extractor.
      */
-    TextExtractorReader(TextExtractorJob job, Executor executor, long timeout) {
+    TextExtractorReader(TextExtractorJob job, long timeout) {
         this.job = job;
-        this.executor = executor;
         this.timeout = timeout;
     }
 
@@ -86,9 +65,6 @@
         if (extractedText != null) {
             extractedText.close();
         }
-        if (jobStarted) {
-            job.discard();
-        }
     }
 
     /**
@@ -96,26 +72,11 @@
      */
     public int read(char[] cbuf, int off, int len) throws IOException {
         if (extractedText == null) {
-            // no reader present
-            // check if job is started already
-            if (jobStarted) {
-                // wait until available
-                extractedText = job.getReader(Long.MAX_VALUE);
-            } else {
-                // execute with current thread
-                try {
-                    DIRECT_EXECUTOR.execute(job);
-                } catch (InterruptedException e) {
-                    // current thread is in interrupted state
-                    // -> ignore (job will not return a reader, which is fine)
-                }
-                extractedText = job.getReader(0);
-            }
-
-            if (extractedText == null) {
-                // exception occurred
-                extractedText = new StringReader("");
+            String text = job.getExtractedText(timeout);
+            if (text == null) {
+                text = "";
             }
+            extractedText = new StringReader(text);
         }
         return extractedText.read(cbuf, off, len);
     }
@@ -125,25 +86,6 @@
      *         finished its work and this reader will return extracted text.
      */
     public boolean isExtractorFinished() {
-        if (!jobStarted) {
-            try {
-                executor.execute(job);
-                jobStarted = true;
-            } catch (InterruptedException e) {
-                // this thread is in interrupted state
-                return false;
-            }
-            extractedText = job.getReader(timeout);
-        } else {
-            // job is already running, check for immediate result
-            extractedText = job.getReader(0);
-        }
-
-        if (extractedText == null && job.getException() != null) {
-            // exception occurred
-            extractedText = new StringReader("");
-        }
-
-        return extractedText != null;
+        return job.hasExtractedText();
     }
 }

Modified: jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/WeightedHighlighter.java
URL: http://svn.apache.org/viewvc/jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/WeightedHighlighter.java?rev=812570&r1=812569&r2=812570&view=diff
==============================================================================
--- jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/WeightedHighlighter.java (original)
+++ jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/WeightedHighlighter.java Tue Sep  8 16:09:28 2009
@@ -18,6 +18,7 @@
 
 import org.apache.lucene.index.TermPositionVector;
 import org.apache.lucene.index.TermVectorOffsetInfo;
+import org.apache.lucene.index.Term;
 import org.apache.lucene.util.PriorityQueue;
 
 import java.util.Set;
@@ -76,7 +77,7 @@
      *         highlighted
      */
     public static String highlight(TermPositionVector tvec,
-                                   Set queryTerms,
+                                   Set<Term> queryTerms,
                                    String text,
                                    String excerptStart,
                                    String excerptEnd,
@@ -102,7 +103,7 @@
      *         highlighted
      */
     public static String highlight(TermPositionVector tvec,
-                                   Set queryTerms,
+                                   Set<Term> queryTerms,
                                    String text,
                                    int maxFragments,
                                    int surround) throws IOException {
@@ -150,21 +151,21 @@
 
         // retrieve fragment infos from queue and fill into list, least
         // fragment comes out first
-        List infos = new LinkedList();
+        List<FragmentInfo> infos = new LinkedList<FragmentInfo>();
         while (bestFragments.size() > 0) {
             FragmentInfo fi = (FragmentInfo) bestFragments.pop();
             infos.add(0, fi);
         }
 
-        Map offsetInfos = new IdentityHashMap();
+        Map<TermVectorOffsetInfo, Object> offsetInfos = new IdentityHashMap<TermVectorOffsetInfo, Object>();
         // remove overlapping fragment infos
-        Iterator it = infos.iterator();
+        Iterator<FragmentInfo> it = infos.iterator();
         while (it.hasNext()) {
-            FragmentInfo fi = (FragmentInfo) it.next();
+            FragmentInfo fi = it.next();
             boolean overlap = false;
-            Iterator fit = fi.iterator();
+            Iterator<TermVectorOffsetInfo> fit = fi.iterator();
             while (fit.hasNext() && !overlap) {
-                TermVectorOffsetInfo oi = (TermVectorOffsetInfo) fit.next();
+                TermVectorOffsetInfo oi = fit.next();
                 if (offsetInfos.containsKey(oi)) {
                     overlap = true;
                 }
@@ -172,7 +173,7 @@
             if (overlap) {
                 it.remove();
             } else {
-                Iterator oit = fi.iterator();
+                Iterator<TermVectorOffsetInfo> oit = fi.iterator();
                 while (oit.hasNext()) {
                     offsetInfos.put(oit.next(), null);
                 }
@@ -183,7 +184,7 @@
         StringBuffer sb = new StringBuffer(excerptStart);
         it = infos.iterator();
         while (it.hasNext()) {
-            FragmentInfo fi = (FragmentInfo) it.next();
+            FragmentInfo fi = it.next();
             sb.append(fragmentStart);
             int limit = Math.max(0, fi.getStartOffset() / 2 + fi.getEndOffset() / 2 - surround);
             int len = startFragment(sb, text, fi.getStartOffset(), limit);
@@ -271,14 +272,14 @@
     }
 
     private static class FragmentInfo {
-        ArrayList offsetInfosList;
+        List<TermVectorOffsetInfo> offsetInfosList;
         int startOffset;
         int endOffset;
         int maxFragmentSize;
         int quality;
 
         public FragmentInfo(TermVectorOffsetInfo offsetinfo, int maxFragmentSize) {
-            offsetInfosList = new ArrayList();
+            offsetInfosList = new ArrayList<TermVectorOffsetInfo>();
             offsetInfosList.add(offsetinfo);
             startOffset = offsetinfo.getStartOffset();
             endOffset = offsetinfo.getEndOffset();
@@ -313,7 +314,7 @@
             return true;
         }
 
-        public Iterator iterator() {
+        public Iterator<TermVectorOffsetInfo> iterator() {
             return offsetInfosList.iterator();
         }
 

Modified: jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/WildcardQuery.java
URL: http://svn.apache.org/viewvc/jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/WildcardQuery.java?rev=812570&r1=812569&r2=812570&view=diff
==============================================================================
--- jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/WildcardQuery.java (original)
+++ jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/WildcardQuery.java Tue Sep  8 16:09:28 2009
@@ -265,7 +265,7 @@
         /**
          * The map to store the results.
          */
-        private final Map resultMap;
+        private final Map<String, BitSet> resultMap;
 
         /**
          * Creates a new WildcardQueryScorer.
@@ -273,20 +273,21 @@
          * @param similarity the similarity implementation.
          * @param reader     the index reader to use.
          */
+        @SuppressWarnings({"unchecked"})
         WildcardQueryScorer(Similarity similarity, IndexReader reader) {
             super(similarity);
             this.reader = reader;
             this.cacheKey = field + '\uFFFF' + propName + '\uFFFF' + transform + '\uFFFF' + pattern;
             // check cache
             PerQueryCache cache = PerQueryCache.getInstance();
-            Map m = (Map) cache.get(WildcardQueryScorer.class, reader);
+            Map<String, BitSet> m = (Map<String, BitSet>) cache.get(WildcardQueryScorer.class, reader);
             if (m == null) {
-                m = new HashMap();
+                m = new HashMap<String, BitSet>();
                 cache.put(WildcardQueryScorer.class, reader, m);
             }
             resultMap = m;
 
-            BitSet result = (BitSet) resultMap.get(cacheKey);
+            BitSet result = resultMap.get(cacheKey);
             if (result == null) {
                 result = new BitSet(reader.maxDoc());
             } else {

Modified: jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/WildcardTermEnum.java
URL: http://svn.apache.org/viewvc/jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/WildcardTermEnum.java?rev=812570&r1=812569&r2=812570&view=diff
==============================================================================
--- jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/WildcardTermEnum.java (original)
+++ jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/WildcardTermEnum.java Tue Sep  8 16:09:28 2009
@@ -163,12 +163,12 @@
         /**
          * The matching terms
          */
-        private final Map orderedTerms = new LinkedHashMap();
+        private final Map<Term, Integer> orderedTerms = new LinkedHashMap<Term, Integer>();
 
         /**
          * Iterator over all matching terms
          */
-        private final Iterator it;
+        private final Iterator<Term> it;
 
         public LowerUpperCaseTermEnum(IndexReader reader,
                                       String field,
@@ -191,7 +191,7 @@
 
             if (!neverMatches) {
                 // create range scans
-                List rangeScans = new ArrayList(2);
+                List<RangeScan> rangeScans = new ArrayList<RangeScan>(2);
                 try {
                     int idx = 0;
                     while (idx < pattern.length()
@@ -228,14 +228,13 @@
                     }
 
                     // do range scans with pattern matcher
-                    for (Iterator it = rangeScans.iterator(); it.hasNext(); ) {
-                        RangeScan scan = (RangeScan) it.next();
+                    for (RangeScan scan : rangeScans) {
                         do {
                             Term t = scan.term();
                             if (t != null) {
                                 input.setBase(t.text());
                                 if (WildcardTermEnum.this.pattern.reset().matches()) {
-                                    orderedTerms.put(t, new Integer(scan.docFreq()));
+                                    orderedTerms.put(t, scan.docFreq());
                                 }
                             }
                         } while (scan.next());
@@ -243,9 +242,7 @@
 
                 } finally {
                     // close range scans
-                    Iterator it = rangeScans.iterator();
-                    while (it.hasNext()) {
-                        RangeScan scan = (RangeScan) it.next();
+                    for (RangeScan scan : rangeScans) {
                         try {
                             scan.close();
                         } catch (IOException e) {
@@ -283,8 +280,8 @@
          * {@inheritDoc}
          */
         public int docFreq() {
-            Integer docFreq = (Integer) orderedTerms.get(current);
-            return docFreq != null ? docFreq.intValue() : 0;
+            Integer docFreq = orderedTerms.get(current);
+            return docFreq != null ? docFreq : 0;
         }
 
         /**
@@ -299,7 +296,7 @@
          * <code>null</code> if there is no next.
          */
         private void getNext() {
-            current = it.hasNext() ? (Term) it.next() : null;
+            current = it.hasNext() ? it.next() : null;
         }
     }
 }

Modified: jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/constraint/ConstraintBuilder.java
URL: http://svn.apache.org/viewvc/jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/constraint/ConstraintBuilder.java?rev=812570&r1=812569&r2=812570&view=diff
==============================================================================
--- jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/constraint/ConstraintBuilder.java (original)
+++ jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/constraint/ConstraintBuilder.java Tue Sep  8 16:09:28 2009
@@ -188,7 +188,7 @@
                 staticValue.getLong();
             } catch (ValueFormatException e) {
                 throw new InvalidQueryException("Static value " +
-                        staticValue.toString() + " cannot be converted to a Long");
+                        staticValue.getString() + " cannot be converted to a Long");
             }
             PropertyValueImpl propValue = (PropertyValueImpl) node.getPropertyValue();
             return new LengthOperand((PropertyValueOperand) propValue.accept(this, null));

Modified: jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/constraint/PropertyValueOperand.java
URL: http://svn.apache.org/viewvc/jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/constraint/PropertyValueOperand.java?rev=812570&r1=812569&r2=812570&view=diff
==============================================================================
--- jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/constraint/PropertyValueOperand.java (original)
+++ jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/constraint/PropertyValueOperand.java Tue Sep  8 16:09:28 2009
@@ -104,7 +104,7 @@
         if (prop == null) {
             return EMPTY;
         } else {
-            if (prop.getDefinition().isMultiple()) {
+            if (prop.isMultiple()) {
                 return prop.getValues();
             } else {
                 return new Value[]{prop.getValue()};

Modified: jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/directory/FSDirectoryManager.java
URL: http://svn.apache.org/viewvc/jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/directory/FSDirectoryManager.java?rev=812570&r1=812569&r2=812570&view=diff
==============================================================================
--- jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/directory/FSDirectoryManager.java (original)
+++ jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/directory/FSDirectoryManager.java Tue Sep  8 16:09:28 2009
@@ -19,6 +19,10 @@
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.store.FSDirectory;
 import org.apache.lucene.store.NativeFSLockFactory;
+import org.apache.lucene.store.IndexOutput;
+import org.apache.lucene.store.IndexInput;
+import org.apache.lucene.store.Lock;
+import org.apache.lucene.store.LockFactory;
 import org.apache.jackrabbit.core.query.lucene.SearchIndex;
 
 import java.io.IOException;
@@ -61,7 +65,7 @@
         } else {
             dir = new File(baseDir, name);
         }
-        return FSDirectory.getDirectory(dir, new NativeFSLockFactory(dir));
+        return new FSDir(dir);
     }
 
     /**
@@ -73,11 +77,15 @@
                 return pathname.isDirectory();
             }
         });
-        String[] names = new String[dirs.length];
-        for (int i = 0; i < dirs.length; i++) {
-            names[i] = dirs[i].getName();
+        if (dirs != null) {
+            String[] names = new String[dirs.length];
+            for (int i = 0; i < dirs.length; i++) {
+                names[i] = dirs[i].getName();
+            }
+            return names;
+        } else {
+            throw new IOException("listFiles for " + baseDir.getPath() + " returned null");
         }
-        return names;
     }
 
     /**
@@ -90,10 +98,15 @@
             return true;
         }
         // delete files first
-        for (File file : directory.listFiles()) {
-            if (!file.delete()) {
-                return false;
+        File[] files = directory.listFiles();
+        if (files != null) {
+            for (File file : files) {
+                if (!file.delete()) {
+                    return false;
+                }
             }
+        } else {
+            return false;
         }
         // now delete directory itself
         return directory.delete();
@@ -113,4 +126,99 @@
      */
     public void dispose() {
     }
+
+    //-----------------------< internal >---------------------------------------
+
+    private static final class FSDir extends Directory {
+
+        private static final FileFilter FILTER = new FileFilter() {
+            public boolean accept(File pathname) {
+                return pathname.isFile();
+            }
+        };
+
+        private final FSDirectory directory;
+
+        public FSDir(File dir) throws IOException {
+            directory = FSDirectory.getDirectory(dir,
+                    new NativeFSLockFactory(dir));
+        }
+
+        public String[] list() throws IOException {
+            File[] files = directory.getFile().listFiles(FILTER);
+            if (files == null) {
+                return null;
+            }
+            String[] names = new String[files.length];
+            for (int i = 0; i < names.length; i++) {
+                names[i] = files[i].getName();
+            }
+            return names;
+        }
+
+        public boolean fileExists(String name) throws IOException {
+            return directory.fileExists(name);
+        }
+
+        public long fileModified(String name) throws IOException {
+            return directory.fileModified(name);
+        }
+
+        public void touchFile(String name) throws IOException {
+            directory.touchFile(name);
+        }
+
+        public void deleteFile(String name) throws IOException {
+            directory.deleteFile(name);
+        }
+
+        public void renameFile(String from, String to) throws IOException {
+            directory.renameFile(from, to);
+        }
+
+        public long fileLength(String name) throws IOException {
+            return directory.fileLength(name);
+        }
+
+        public IndexOutput createOutput(String name) throws IOException {
+            return directory.createOutput(name);
+        }
+
+        public IndexInput openInput(String name) throws IOException {
+            return directory.openInput(name);
+        }
+
+        public void close() throws IOException {
+            directory.close();
+        }
+
+        public IndexInput openInput(String name, int bufferSize)
+                throws IOException {
+            return directory.openInput(name, bufferSize);
+        }
+
+        public Lock makeLock(String name) {
+            return directory.makeLock(name);
+        }
+
+        public void clearLock(String name) throws IOException {
+            directory.clearLock(name);
+        }
+
+        public void setLockFactory(LockFactory lockFactory) {
+            directory.setLockFactory(lockFactory);
+        }
+
+        public LockFactory getLockFactory() {
+            return directory.getLockFactory();
+        }
+
+        public String getLockID() {
+            return directory.getLockID();
+        }
+
+        public String toString() {
+            return this.getClass().getName() + "@" + directory;
+        }
+    }
 }

Modified: jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/join/AncestorNodeJoin.java
URL: http://svn.apache.org/viewvc/jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/join/AncestorNodeJoin.java?rev=812570&r1=812569&r2=812570&view=diff
==============================================================================
--- jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/join/AncestorNodeJoin.java (original)
+++ jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/join/AncestorNodeJoin.java Tue Sep  8 16:09:28 2009
@@ -56,7 +56,7 @@
     /**
      * Reusable list of ancestor document numbers.
      */
-    private final List ancestors = new ArrayList();
+    private final List<Integer> ancestors = new ArrayList<Integer>();
 
     /**
      * Creates a new ancestor node join condition.
@@ -77,11 +77,10 @@
         int idx = getIndex(context, contextSelectorName);
         ScoreNode[] nodes;
         while ((nodes = context.nextScoreNodes()) != null) {
-            Integer docNum = new Integer(nodes[idx].getDoc(reader));
+            Integer docNum = nodes[idx].getDoc(reader);
             ancestors.clear();
-            collectAncestors(docNum.intValue());
-            for (int i = 0; i < ancestors.size(); i++) {
-                Integer doc = (Integer) ancestors.get(i);
+            collectAncestors(docNum);
+            for (Integer doc : ancestors) {
                 contextIndex.addScoreNodes(doc, nodes);
             }
         }
@@ -94,7 +93,7 @@
      */
     public ScoreNode[][] getMatchingScoreNodes(ScoreNode ancestor)
             throws IOException {
-        Integer doc = new Integer(ancestor.getDoc(reader));
+        Integer doc = ancestor.getDoc(reader);
         return contextIndex.getScoreNodes(doc);
     }
 
@@ -108,14 +107,13 @@
     private void collectAncestors(int doc) throws IOException {
         docNums = resolver.getParents(doc, docNums);
         if (docNums.length == 1) {
-            ancestors.add(new Integer(docNums[0]));
+            ancestors.add(docNums[0]);
             collectAncestors(docNums[0]);
         } else if (docNums.length > 1) {
             // clone because recursion uses docNums again
-            int[] tmp = (int[]) docNums.clone();
-            for (int i = 0; i < tmp.length; i++) {
-                ancestors.add(new Integer(tmp[i]));
-                collectAncestors(tmp[i]);
+            for (int docNum : docNums.clone()) {
+                ancestors.add(docNum);
+                collectAncestors(docNum);
             }
         }
     }

Modified: jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/join/ChildNodeJoin.java
URL: http://svn.apache.org/viewvc/jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/join/ChildNodeJoin.java?rev=812570&r1=812569&r2=812570&view=diff
==============================================================================
--- jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/join/ChildNodeJoin.java (original)
+++ jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/join/ChildNodeJoin.java Tue Sep  8 16:09:28 2009
@@ -55,7 +55,7 @@
     /**
      * Reusable list of score nodes.
      */
-    private List tmpScoreNodes = new ArrayList();
+    private List<ScoreNode[]> tmpScoreNodes = new ArrayList<ScoreNode[]>();
 
     /**
      * Creates a new child node join condition.
@@ -77,7 +77,7 @@
         int idx = getIndex(parent, condition.getParentSelectorQName());
         ScoreNode[] nodes;
         while ((nodes = parent.nextScoreNodes()) != null) {
-            Integer docNum = new Integer(nodes[idx].getDoc(reader));
+            Integer docNum = nodes[idx].getDoc(reader);
             parentIndex.addScoreNodes(docNum, nodes);
         }
     }
@@ -90,18 +90,18 @@
     public ScoreNode[][] getMatchingScoreNodes(ScoreNode child) throws IOException {
         docNums = resolver.getParents(child.getDoc(reader), docNums);
         tmpScoreNodes.clear();
-        for (int i = 0; i < docNums.length; i++) {
-            ScoreNode[][] sn = parentIndex.getScoreNodes(new Integer(docNums[i]));
+        for (int docNum : docNums) {
+            ScoreNode[][] sn = parentIndex.getScoreNodes(docNum);
             if (sn != null) {
-                for (int j = 0; j < sn.length; j++) {
-                    tmpScoreNodes.add(sn[j]);
+                for (ScoreNode[] aSn : sn) {
+                    tmpScoreNodes.add(aSn);
                 }
             }
         }
         if (tmpScoreNodes.isEmpty()) {
             return null;
         } else {
-            return (ScoreNode[][]) tmpScoreNodes.toArray(new ScoreNode[tmpScoreNodes.size()][]);
+            return tmpScoreNodes.toArray(new ScoreNode[tmpScoreNodes.size()][]);
         }
     }
 }

Modified: jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/join/DescendantNodeJoin.java
URL: http://svn.apache.org/viewvc/jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/join/DescendantNodeJoin.java?rev=812570&r1=812569&r2=812570&view=diff
==============================================================================
--- jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/join/DescendantNodeJoin.java (original)
+++ jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/join/DescendantNodeJoin.java Tue Sep  8 16:09:28 2009
@@ -55,12 +55,12 @@
     /**
      * Reusable list of document number.
      */
-    private final List ancestors = new ArrayList();
+    private final List<Integer> ancestors = new ArrayList<Integer>();
 
     /**
      * Reusable list of score nodes.
      */
-    private final List scoreNodes = new ArrayList();
+    private final List<ScoreNode[]> scoreNodes = new ArrayList<ScoreNode[]>();
 
     /**
      * Creates a new descendant node join condition.
@@ -81,7 +81,7 @@
         int idx = getIndex(context, contextSelectorName);
         ScoreNode[] nodes;
         while ((nodes = context.nextScoreNodes()) != null) {
-            Integer docNum = new Integer(nodes[idx].getDoc(reader));
+            Integer docNum = nodes[idx].getDoc(reader);
             contextIndex.addScoreNodes(docNum, nodes);
         }
     }
@@ -96,19 +96,18 @@
         ancestors.clear();
         collectAncestors(descendant.getDoc(reader));
         scoreNodes.clear();
-        for (int i = 0; i < ancestors.size(); i++) {
-            Integer ancestor = (Integer) ancestors.get(i);
+        for (Integer ancestor : ancestors) {
             ScoreNode[][] sn = contextIndex.getScoreNodes(ancestor);
             if (sn != null) {
-                for (int j = 0; j < sn.length; j++) {
-                    scoreNodes.add(sn[j]);
+                for (ScoreNode[] aSn : sn) {
+                    scoreNodes.add(aSn);
                 }
             }
         }
         if (scoreNodes.isEmpty()) {
             return null;
         } else {
-            return (ScoreNode[][]) scoreNodes.toArray(new ScoreNode[scoreNodes.size()][]);
+            return scoreNodes.toArray(new ScoreNode[scoreNodes.size()][]);
         }
     }
 
@@ -122,14 +121,13 @@
     private void collectAncestors(int doc) throws IOException {
         docNums = resolver.getParents(doc, docNums);
         if (docNums.length == 1) {
-            ancestors.add(new Integer(docNums[0]));
+            ancestors.add(docNums[0]);
             collectAncestors(docNums[0]);
         } else if (docNums.length > 1) {
             // clone because recursion uses docNums again
-            int[] tmp = (int[]) docNums.clone();
-            for (int i = 0; i < tmp.length; i++) {
-                ancestors.add(new Integer(tmp[i]));
-                collectAncestors(tmp[i]);
+            for (int docNum : docNums.clone()) {
+                ancestors.add(docNum);
+                collectAncestors(docNum);
             }
         }
     }

Modified: jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/join/EquiJoin.java
URL: http://svn.apache.org/viewvc/jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/join/EquiJoin.java?rev=812570&r1=812569&r2=812570&view=diff
==============================================================================
--- jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/join/EquiJoin.java (original)
+++ jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/join/EquiJoin.java Tue Sep  8 16:09:28 2009
@@ -77,8 +77,7 @@
         ScoreNode[] nodes;
         // create lookup map
         while ((nodes = inner.nextScoreNodes()) != null) {
-            Integer doc = new Integer(nodes[innerScoreNodeIndex].getDoc(reader));
-            sDoc.doc = doc.intValue();
+            sDoc.doc = nodes[innerScoreNodeIndex].getDoc(reader);
             Comparable value = comparator.sortValue(sDoc);
             if (value != null) {
                 innerScoreNodes.addScoreNodes(value, nodes);