You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@jackrabbit.apache.org by ju...@apache.org on 2009/09/08 18:09:45 UTC

svn commit: r812570 [9/24] - in /jackrabbit/sandbox/JCR-1456: ./ jackrabbit-api/ jackrabbit-api/src/main/appended-resources/ jackrabbit-api/src/main/appended-resources/META-INF/ jackrabbit-api/src/main/java/org/apache/jackrabbit/api/security/ jackrabbi...

Modified: jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/IndexInfos.java
URL: http://svn.apache.org/viewvc/jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/IndexInfos.java?rev=812570&r1=812569&r2=812570&view=diff
==============================================================================
--- jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/IndexInfos.java (original)
+++ jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/IndexInfos.java Tue Sep  8 16:09:28 2009
@@ -23,17 +23,36 @@
 import java.io.OutputStream;
 import java.util.ArrayList;
 import java.util.List;
-import java.util.Set;
-import java.util.HashSet;
+import java.util.LinkedHashMap;
+import java.util.Iterator;
+import java.util.NoSuchElementException;
+import java.util.Map;
 
 import org.apache.lucene.store.Directory;
 import org.apache.jackrabbit.core.query.lucene.directory.IndexInputStream;
 import org.apache.jackrabbit.core.query.lucene.directory.IndexOutputStream;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
- * Stores a sequence of index names.
+ * Stores a sequence of index names and their current generation.
  */
-class IndexInfos {
+class IndexInfos implements Cloneable {
+
+    /**
+     * Logger instance for this class
+     */
+    private static final Logger log = LoggerFactory.getLogger(IndexInfos.class);
+
+    /**
+     * IndexInfos version for Jackrabbit 1.0 to 1.5.x
+     */
+    private static final int NAMES_ONLY = 0;
+
+    /**
+     * IndexInfos version for Jackrabbit 2.0
+     */
+    private static final int WITH_GENERATION = 1;
 
     /**
      * For new segment names.
@@ -41,116 +60,134 @@
     private int counter = 0;
 
     /**
-     * Flag that indicates if index infos needs to be written to disk.
+     * Map of {@link IndexInfo}s. Key=name
      */
-    private boolean dirty = false;
+    private LinkedHashMap<String, IndexInfo> indexes = new LinkedHashMap<String, IndexInfo>();
 
     /**
-     * List of index names
+     * The directory where the index infos are stored.
      */
-    private List indexes = new ArrayList();
+    private final Directory directory;
 
     /**
-     * Set of names for quick lookup.
+     * Base name of the file where the infos are stored.
      */
-    private Set names = new HashSet();
+    private final String name;
 
     /**
-     * Name of the file where the infos are stored.
+     * The generation for this index infos.
      */
-    private final String name;
+    private long generation = 0;
 
     /**
-     * Creates a new IndexInfos using <code>fileName</code>.
-     *
-     * @param fileName the name of the file where infos are stored.
+     * When this index infos were last modified.
      */
-    IndexInfos(String fileName) {
-        this.name = fileName;
-    }
+    private long lastModified;
 
     /**
-     * Returns <code>true</code> if this index infos exists in
-     * <code>dir</code>.
+     * Creates a new IndexInfos using <code>baseName</code> and reads the
+     * current generation.
      *
-     * @param dir the directory where to look for the index infos.
-     * @return <code>true</code> if it exists; <code>false</code> otherwise.
-     * @throws IOException if an error occurs while reading from the directory.
-     */
-    boolean exists(Directory dir) throws IOException {
-        return dir.fileExists(name);
+     * @param dir the directory where the index infos are stored.
+     * @param baseName the name of the file where infos are stored.
+     * @throws IOException if an error occurs while reading the index infos
+     * file.
+     */
+    IndexInfos(Directory dir, String baseName) throws IOException {
+        this.directory = dir;
+        this.name = baseName;
+        long gen = getCurrentGeneration(getFileNames(dir, baseName), baseName);
+        if (gen == -1) {
+            // write initial infos
+            write();
+        } else {
+            this.generation = gen;
+            read();
+        }
     }
 
     /**
-     * Returns the name of the file where infos are stored.
+     * Creates a new IndexInfos using <code>fileName</code> and reads the given
+     * <code>generation</code> of the index infos.
      *
-     * @return the name of the file where infos are stored.
-     */
-    String getFileName() {
-        return name;
+     * @param dir the directory where the index infos are stored.
+     * @param baseName the name of the file where infos are stored.
+     * @param generation the generation to read.
+     * @throws IOException if an error occurs while reading the index infos
+     * file.
+     */
+    IndexInfos(Directory dir, String baseName, long generation) throws IOException {
+        if (generation < 0) {
+            throw new IllegalArgumentException();
+        }
+        this.directory = dir;
+        this.name = baseName;
+        this.generation = generation;
+        read();
     }
 
     /**
-     * Reads the index infos.
+     * Returns the name of the file with the most current version where infos
+     * are stored.
      *
-     * @param dir the directory from where to read the index infos.
-     * @throws IOException if an error occurs.
+     * @return the name of the file where infos are stored.
      */
-    void read(Directory dir) throws IOException {
-        InputStream in = new IndexInputStream(dir.openInput(name));
-        try {
-            DataInputStream di = new DataInputStream(in);
-            counter = di.readInt();
-            for (int i = di.readInt(); i > 0; i--) {
-                String indexName = di.readUTF();
-                indexes.add(indexName);
-                names.add(indexName);
-            }
-        } finally {
-            in.close();
-        }
+    String getFileName() {
+        return getFileName(generation);
     }
 
     /**
-     * Writes the index infos to disk if they are dirty.
+     * Writes the index infos to disk.
      *
-     * @param dir the directory where to write the index infos.
      * @throws IOException if an error occurs.
      */
-    void write(Directory dir) throws IOException {
-        // do not write if not dirty
-        if (!dirty) {
-            return;
-        }
-
-        OutputStream out = new IndexOutputStream(dir.createOutput(name + ".new"));
+    void write() throws IOException {
+        // increment generation
+        generation++;
+        String newName = getFileName();
+        boolean success = false;
         try {
-            DataOutputStream dataOut = new DataOutputStream(out);
-            dataOut.writeInt(counter);
-            dataOut.writeInt(indexes.size());
-            for (int i = 0; i < indexes.size(); i++) {
-                dataOut.writeUTF(getName(i));
+            OutputStream out = new IndexOutputStream(
+                    directory.createOutput(newName));
+            try {
+                log.debug("Writing IndexInfos {}", newName);
+                DataOutputStream dataOut = new DataOutputStream(out);
+                dataOut.writeInt(WITH_GENERATION);
+                dataOut.writeInt(counter);
+                dataOut.writeInt(indexes.size());
+                for (Iterator it = iterator(); it.hasNext(); ) {
+                    IndexInfo info = (IndexInfo) it.next();
+                    dataOut.writeUTF(info.getName());
+                    dataOut.writeLong(info.getGeneration());
+                    log.debug("  + {}:{}", info.getName(), info.getGeneration());
+                }
+            } finally {
+                out.close();
             }
+            lastModified = System.currentTimeMillis();
+            success = true;
         } finally {
-            out.close();
-        }
-        // delete old
-        if (dir.fileExists(name)) {
-            dir.deleteFile(name);
+            if (!success) {
+                // try to delete the file and decrement generation
+                try {
+                    directory.deleteFile(newName);
+                } catch (IOException e) {
+                    log.warn("Unable to delete file: " + directory + "/" + newName);
+                }
+                generation--;
+            }
         }
-        dir.renameFile(name + ".new", name);
-        dirty = false;
     }
 
     /**
-     * Returns the index name at position <code>i</code>.
-     * @param i the position.
-     * @return the index name.
+     * @return an iterator over the {@link IndexInfo}s contained in this index
+     *          infos.
      */
-    String getName(int i) {
-        return (String) indexes.get(i);
+    Iterator iterator() {
+        return indexes.values().iterator();
     }
 
+
     /**
      * Returns the number of index names.
      * @return the number of index names.
@@ -160,16 +197,33 @@
     }
 
     /**
+     * @return the time when this index infos where last modified.
+     */
+    long getLastModified() {
+        return lastModified;
+    }
+
+    /**
      * Adds a name to the index infos.
+     *
      * @param name the name to add.
+     * @param generation the current generation of the index.
      */
-    void addName(String name) {
-        if (names.contains(name)) {
+    void addName(String name, long generation) {
+        if (indexes.containsKey(name)) {
             throw new IllegalArgumentException("already contains: " + name);
         }
-        indexes.add(name);
-        names.add(name);
-        dirty = true;
+        indexes.put(name, new IndexInfo(name, generation));
+    }
+
+    void updateGeneration(String name, long generation) {
+        IndexInfo info = indexes.get(name);
+        if (info == null) {
+            throw new NoSuchElementException(name);
+        }
+        if (info.getGeneration() != generation) {
+            info.setGeneration(generation);
+        }
     }
 
     /**
@@ -178,18 +232,6 @@
      */
     void removeName(String name) {
         indexes.remove(name);
-        names.remove(name);
-        dirty = true;
-    }
-
-    /**
-     * Removes the name from the index infos.
-     * @param i the position.
-     */
-    void removeName(int i) {
-        Object name = indexes.remove(i);
-        names.remove(name);
-        dirty = true;
     }
 
     /**
@@ -200,7 +242,14 @@
      * @return <code>true</code> it is exists in this <code>IndexInfos</code>.
      */
     boolean contains(String name) {
-        return names.contains(name);
+        return indexes.containsKey(name);
+    }
+
+    /**
+     * @return the generation of this index infos.
+     */
+    long getGeneration() {
+        return generation;
     }
 
     /**
@@ -208,7 +257,136 @@
      * @return a new unique name for an index folder.
      */
     String newName() {
-        dirty = true;
         return "_" + Integer.toString(counter++, Character.MAX_RADIX);
     }
+
+    /**
+     * Clones this index infos.
+     *
+     * @return a clone of this index infos.
+     */
+    public IndexInfos clone() {
+        try {
+            IndexInfos clone = (IndexInfos) super.clone();
+            clone.indexes = (LinkedHashMap) indexes.clone();
+            for (Map.Entry<String, IndexInfo> entry : clone.indexes.entrySet()) {
+                entry.setValue(entry.getValue().clone());
+            }
+            return clone;
+        } catch (CloneNotSupportedException e) {
+            // never happens, this class is cloneable
+            throw new RuntimeException();
+        }
+    }
+
+    //----------------------------------< internal >----------------------------
+
+    /**
+     * Reads the index infos with the currently set {@link #generation}.
+     *
+     * @throws IOException if an error occurs.
+     */
+    private void read() throws IOException {
+        String fileName = getFileName(generation);
+        InputStream in = new IndexInputStream(directory.openInput(fileName));
+        try {
+            LinkedHashMap<String, IndexInfo> indexes = new LinkedHashMap<String, IndexInfo>();
+            DataInputStream di = new DataInputStream(in);
+            int version;
+            if (generation == 0) {
+                version = NAMES_ONLY;
+            } else {
+                version = di.readInt();
+            }
+            int counter = di.readInt();
+            for (int i = di.readInt(); i > 0; i--) {
+                String indexName = di.readUTF();
+                long gen = 0;
+                if (version >= WITH_GENERATION) {
+                    gen = di.readLong();
+                }
+                indexes.put(indexName, new IndexInfo(indexName, gen));
+            }
+            // when successfully read set values
+            this.lastModified = directory.fileModified(fileName);
+            this.indexes = indexes;
+            this.counter = counter;
+        } finally {
+            in.close();
+        }
+    }
+
+    /**
+     * Returns the name of the file with the given generation where infos
+     * are stored.
+     *
+     * @param gen the generation of the file.
+     * @return the name of the file where infos are stored.
+     */
+    private String getFileName(long gen) {
+        if (gen == 0) {
+            return name;
+        } else {
+            return name + "_" + Long.toString(gen, Character.MAX_RADIX);
+        }
+    }
+
+    /**
+     * Returns all generations of this index infos.
+     *
+     * @param directory the directory where the index infos are stored.
+     * @param base the base name for the index infos.
+     * @return names of all generation files of this index infos.
+     */
+    private static String[] getFileNames(Directory directory, final String base) {
+        String[] names = new String[0];
+        try {
+            names = directory.list();
+        } catch (IOException e) {
+            // TODO: log warning? or throw?
+        }
+        List<String> nameList = new ArrayList<String>(names.length);
+        for (String n : names) {
+            if (n.startsWith(base)) {
+                nameList.add(n);
+            }
+        }
+        return nameList.toArray(new String[nameList.size()]);
+    }
+
+    /**
+     * Parse the generation off the file name and return it.
+     *
+     * @param fileName the generation file that contains index infos.
+     * @param base the base name.
+     * @return the generation of the given file.
+     */
+    private static long generationFromFileName(String fileName, String base) {
+        if (fileName.equals(base)) {
+            return 0;
+        } else {
+            return Long.parseLong(fileName.substring(base.length() + 1),
+                    Character.MAX_RADIX);
+        }
+    }
+
+    /**
+     * Returns the most current generation of the given files.
+     *
+     * @param fileNames the file names from where to obtain the generation.
+     * @param base the base name.
+     * @return the most current generation.
+     */
+    private static long getCurrentGeneration(String[] fileNames, String base) {
+        long max = -1;
+        int i = 0;
+        while (i < fileNames.length) {
+            long gen = generationFromFileName(fileNames[i], base);
+            if (gen > max) {
+                max = gen;
+            }
+            i++;
+        }
+        return max;
+    }
 }

Modified: jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/IndexMigration.java
URL: http://svn.apache.org/viewvc/jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/IndexMigration.java?rev=812570&r1=812569&r2=812570&view=diff
==============================================================================
--- jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/IndexMigration.java (original)
+++ jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/IndexMigration.java Tue Sep  8 16:09:28 2009
@@ -141,11 +141,10 @@
             Fieldable[] fields = doc.getFieldables(FieldNames.PROPERTIES);
             if (fields != null) {
                 doc.removeFields(FieldNames.PROPERTIES);
-                for (int i = 0; i < fields.length; i++) {
-                    String value = fields[i].stringValue();
+                for (Fieldable field : fields) {
+                    String value = field.stringValue();
                     value = value.replace('\uFFFF', '[');
-                    doc.add(new Field(FieldNames.PROPERTIES, value,
-                            Field.Store.YES, Field.Index.NOT_ANALYZED_NO_NORMS));
+                    doc.add(new Field(FieldNames.PROPERTIES, value, Field.Store.YES, Field.Index.NOT_ANALYZED_NO_NORMS));
                 }
             }
             return doc;

Modified: jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/IndexingConfigurationEntityResolver.java
URL: http://svn.apache.org/viewvc/jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/IndexingConfigurationEntityResolver.java?rev=812570&r1=812569&r2=812570&view=diff
==============================================================================
--- jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/IndexingConfigurationEntityResolver.java (original)
+++ jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/IndexingConfigurationEntityResolver.java Tue Sep  8 16:09:28 2009
@@ -35,10 +35,10 @@
     /**
      * Maps system ids to DTD resource names.
      */
-    private static final Map SYSTEM_IDS;
+    private static final Map<String, String> SYSTEM_IDS;
 
     static {
-        Map systemIds = new HashMap();
+        Map<String, String> systemIds = new HashMap<String, String>();
         systemIds.put(
                 "http://jackrabbit.apache.org/dtd/indexing-configuration-1.0.dtd",
                 "indexing-configuration-1.0.dtd");
@@ -56,7 +56,7 @@
      */
     public InputSource resolveEntity(String publicId, String systemId)
             throws SAXException, IOException {
-        String resourceName = (String) SYSTEM_IDS.get(systemId);
+        String resourceName = SYSTEM_IDS.get(systemId);
         if (resourceName != null) {
             InputStream in = getClass().getResourceAsStream(resourceName);
             if (in != null) {

Modified: jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/IndexingConfigurationImpl.java
URL: http://svn.apache.org/viewvc/jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/IndexingConfigurationImpl.java?rev=812570&r1=812569&r2=812570&view=diff
==============================================================================
--- jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/IndexingConfigurationImpl.java (original)
+++ jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/IndexingConfigurationImpl.java Tue Sep  8 16:09:28 2009
@@ -96,7 +96,7 @@
     /**
      * The {@link IndexingRule}s inside this configuration.
      */
-    private Map configElements = new HashMap();
+    private Map<Name, List<IndexingRule>> configElements = new HashMap<Name, List<IndexingRule>>();
 
     /**
      * The indexing aggregates inside this configuration.
@@ -106,7 +106,7 @@
     /**
      * The configured analyzers for indexing properties.
      */
-    private Map analyzers = new HashMap();
+    private Map<String, Analyzer> analyzers = new HashMap<String, Analyzer>();
 
     /**
      * {@inheritDoc}
@@ -120,7 +120,7 @@
 
         NodeTypeRegistry ntReg = context.getNodeTypeRegistry();
         Name[] ntNames = ntReg.getRegisteredNodeTypes();
-        List idxAggregates = new ArrayList();
+        List<AggregateRule> idxAggregates = new ArrayList<AggregateRule>();
         NodeList indexingConfigs = config.getChildNodes();
         for (int i = 0; i < indexingConfigs.getLength(); i++) {
             Node configNode = indexingConfigs.item(i);
@@ -128,15 +128,15 @@
                 IndexingRule element = new IndexingRule(configNode);
                 // register under node type and all its sub types
                 log.debug("Found rule '{}' for NodeType '{}'", element, element.getNodeTypeName());
-                for (int n = 0; n < ntNames.length; n++) {
-                    if (ntReg.getEffectiveNodeType(ntNames[n]).includesNodeType(element.getNodeTypeName())) {
-                        List perNtConfig = (List) configElements.get(ntNames[n]);
+                for (Name ntName : ntNames) {
+                    if (ntReg.getEffectiveNodeType(ntName).includesNodeType(element.getNodeTypeName())) {
+                        List<IndexingRule> perNtConfig = configElements.get(ntName);
                         if (perNtConfig == null) {
-                            perNtConfig = new ArrayList();
-                            configElements.put(ntNames[n], perNtConfig);
+                            perNtConfig = new ArrayList<IndexingRule>();
+                            configElements.put(ntName, perNtConfig);
                         }
-                        log.debug("Registering it for name '{}'", ntNames[n]);
-                        perNtConfig.add(new IndexingRule(element, ntNames[n]));
+                        log.debug("Registering it for name '{}'", ntName);
+                        perNtConfig.add(new IndexingRule(element, ntName));
                     }
                 }
             } else if (configNode.getNodeName().equals("aggregate")) {
@@ -187,8 +187,7 @@
             }
 
         }
-        aggregateRules = (AggregateRule[]) idxAggregates.toArray(
-                new AggregateRule[idxAggregates.size()]);
+        aggregateRules = idxAggregates.toArray(new AggregateRule[idxAggregates.size()]);
     }
 
     /**
@@ -303,7 +302,7 @@
      */
     public Analyzer getPropertyAnalyzer(String fieldName) {
         if (analyzers.containsKey(fieldName)) {
-            return (Analyzer) analyzers.get(fieldName);
+            return analyzers.get(fieldName);
         }
         return null;
     }
@@ -317,30 +316,27 @@
      * @return the indexing rule or <code>null</code> if none applies.
      */
     private IndexingRule getApplicableIndexingRule(NodeState state) {
-        List rules = null;
-        List r = (List) configElements.get(state.getNodeTypeName());
+        List<IndexingRule> rules = null;
+        List<IndexingRule> r = configElements.get(state.getNodeTypeName());
         if (r != null) {
-            rules = new ArrayList();
+            rules = new ArrayList<IndexingRule>();
             rules.addAll(r);
         }
 
-        Iterator it = state.getMixinTypeNames().iterator();
-        while (it.hasNext()) {
-            r = (List) configElements.get(it.next());
+        for (Name name : state.getMixinTypeNames()) {
+            r = configElements.get(name);
             if (r != null) {
                 if (rules == null) {
-                    rules = new ArrayList();
+                    rules = new ArrayList<IndexingRule>();
                 }
                 rules.addAll(r);
             }
         }
 
         if (rules != null) {
-            it = rules.iterator();
-            while (it.hasNext()) {
-                IndexingRule ir = (IndexingRule) it.next();
-                if (ir.appliesTo(state)) {
-                    return ir;
+            for (IndexingRule rule : rules) {
+                if (rule.appliesTo(state)) {
+                    return rule;
                 }
             }
         }
@@ -381,8 +377,8 @@
      *                                prefix.
      */
     private void createPropertyConfigs(Node config,
-                                       Map propConfigs,
-                                       List namePatterns)
+                                       Map<Name, PropertyConfig> propConfigs,
+                                       List<NamePattern> namePatterns)
             throws IllegalNameException, NamespaceException {
         NodeList childNodes = config.getChildNodes();
         for (int i = 0; i < childNodes.getLength(); i++) {
@@ -404,24 +400,21 @@
                 boolean nodeScopeIndex = true;
                 Node nsIndex = attributes.getNamedItem("nodeScopeIndex");
                 if (nsIndex != null) {
-                    nodeScopeIndex = Boolean.valueOf(
-                            nsIndex.getNodeValue()).booleanValue();
+                    nodeScopeIndex = Boolean.valueOf(nsIndex.getNodeValue());
                 }
 
                 // get isRegexp flag
                 boolean isRegexp = false;
                 Node regexp = attributes.getNamedItem("isRegexp");
                 if (regexp != null) {
-                    isRegexp = Boolean.valueOf(
-                            regexp.getNodeValue()).booleanValue();
+                    isRegexp = Boolean.valueOf(regexp.getNodeValue());
                 }
 
                 // get useInExcerpt flag
                 boolean useInExcerpt = true;
                 Node excerpt = attributes.getNamedItem("useInExcerpt");
                 if (excerpt != null) {
-                    useInExcerpt = Boolean.valueOf(
-                            excerpt.getNodeValue()).booleanValue();
+                    useInExcerpt = Boolean.valueOf(excerpt.getNodeValue());
                 }
 
                 PropertyConfig pc = new PropertyConfig(
@@ -607,12 +600,12 @@
         /**
          * Map of {@link PropertyConfig}. Key=Name of property.
          */
-        private final Map propConfigs;
+        private final Map<Name, PropertyConfig> propConfigs;
 
         /**
          * List of {@link NamePattern}s.
          */
-        private final List namePatterns;
+        private final List<NamePattern> namePatterns;
 
         /**
          * An expression based on a relative path.
@@ -651,8 +644,8 @@
             this.nodeTypeName = getNodeTypeName(config);
             this.condition = getCondition(config);
             this.boost = getNodeBoost(config);
-            this.propConfigs = new HashMap();
-            this.namePatterns = new ArrayList();
+            this.propConfigs = new HashMap<Name, PropertyConfig>();
+            this.namePatterns = new ArrayList<NamePattern>();
             createPropertyConfigs(config, propConfigs, namePatterns);
         }
 
@@ -712,11 +705,7 @@
          */
         public boolean isIncludedInNodeScopeIndex(Name propertyName) {
             PropertyConfig config = getConfig(propertyName);
-            if (config != null) {
-                return config.nodeScopeIndex;
-            } else {
-                return false;
-            }
+            return config != null && config.nodeScopeIndex;
         }
 
         /**
@@ -730,11 +719,7 @@
          */
         public boolean useInExcerpt(Name propertyName) {
             PropertyConfig config = getConfig(propertyName);
-            if (config != null) {
-                return config.useInExcerpt;
-            } else {
-                return true;
-            }
+            return config == null || config.useInExcerpt;
         }
 
         /**
@@ -749,11 +734,7 @@
             if (!nodeTypeName.equals(state.getNodeTypeName())) {
                 return false;
             }
-            if (condition == null) {
-                return true;
-            } else {
-                return condition.evaluate(state);
-            }
+            return condition == null || condition.evaluate(state);
         }
 
         //-------------------------< internal >---------------------------------
@@ -765,14 +746,13 @@
          *         property.
          */
         private PropertyConfig getConfig(Name propertyName) {
-            PropertyConfig config = (PropertyConfig) propConfigs.get(propertyName);
+            PropertyConfig config = propConfigs.get(propertyName);
             if (config != null) {
                 return config;
             } else if (namePatterns.size() > 0) {
                 Path path = PATH_FACTORY.create(propertyName);
                 // check patterns
-                for (Iterator it = namePatterns.iterator(); it.hasNext(); ) {
-                    NamePattern np = (NamePattern) it.next();
+                for (NamePattern np : namePatterns) {
                     if (np.matches(path)) {
                         return np.getConfig();
                     }
@@ -974,8 +954,8 @@
                     PropertyState propState =
                             (PropertyState) ism.getItemState(propId);
                     InternalValue[] values = propState.getValues();
-                    for (int i = 0; i < values.length; i++) {
-                        if (values[i].toString().equals(propertyValue)) {
+                    for (InternalValue value : values) {
+                        if (value.toString().equals(propertyValue)) {
                             return true;
                         }
                     }

Modified: jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/IndexingQueue.java
URL: http://svn.apache.org/viewvc/jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/IndexingQueue.java?rev=812570&r1=812569&r2=812570&view=diff
==============================================================================
--- jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/IndexingQueue.java (original)
+++ jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/IndexingQueue.java Tue Sep  8 16:09:28 2009
@@ -24,7 +24,6 @@
 import java.util.ArrayList;
 
 import org.apache.jackrabbit.core.id.NodeId;
-import org.apache.jackrabbit.uuid.UUID;
 import org.apache.lucene.document.Document;
 import org.apache.lucene.index.Term;
 import org.apache.lucene.index.TermDocs;
@@ -52,7 +51,7 @@
     /**
      * Maps UUID {@link String}s to {@link Document}s.
      */
-    private final Map pendingDocuments = new HashMap();
+    private final Map<String, Document> pendingDocuments = new HashMap<String, Document>();
 
     /**
      * Flag that indicates whether this indexing queue had been
@@ -96,18 +95,18 @@
             reader.release();
         }
         String[] uuids = queueStore.getPending();
-        for (int i = 0; i < uuids.length; i++) {
+        for (String uuid : uuids) {
             try {
-                Document doc = index.createDocument(new NodeId(uuids[i]));
-                pendingDocuments.put(uuids[i], doc);
+                Document doc = index.createDocument(new NodeId(uuid));
+                pendingDocuments.put(uuid, doc);
                 log.debug("added node {}. New size of indexing queue: {}",
-                        uuids[i], new Integer(pendingDocuments.size()));
+                        uuid, pendingDocuments.size());
             } catch (IllegalArgumentException e) {
-                log.warn("Invalid UUID in indexing queue store: " + uuids[i]);
+                log.warn("Invalid UUID in indexing queue store: " + uuid);
             } catch (RepositoryException e) {
                 // node does not exist anymore
-                log.debug("Node with uuid {} does not exist anymore", uuids[i]);
-                queueStore.removeUUID(uuids[i]);
+                log.debug("Node with uuid {} does not exist anymore", uuid);
+                queueStore.removeUUID(uuid);
             }
         }
         initialized = true;
@@ -120,7 +119,7 @@
      */
     public Document[] getFinishedDocuments() {
         checkInitialized();
-        List finished = new ArrayList();
+        List<Document> finished = new ArrayList<Document>();
         synchronized (this) {
             finished.addAll(pendingDocuments.values());
         }
@@ -132,7 +131,7 @@
                 it.remove();
             }
         }
-        return (Document[]) finished.toArray(new Document[finished.size()]);
+        return finished.toArray(new Document[finished.size()]);
     }
 
     /**
@@ -146,11 +145,12 @@
      */
     public synchronized Document removeDocument(String uuid) {
         checkInitialized();
-        Document doc = (Document) pendingDocuments.remove(uuid);
+        Document doc = pendingDocuments.remove(uuid);
         if (doc != null) {
             queueStore.removeUUID(uuid);
             log.debug("removed node {}. New size of indexing queue: {}",
-                    uuid, new Integer(pendingDocuments.size()));
+                    uuid, pendingDocuments.size());
+            notifyIfEmpty();
         }
         return doc;
     }
@@ -166,9 +166,9 @@
     public synchronized Document addDocument(Document doc) {
         checkInitialized();
         String uuid = doc.get(FieldNames.UUID);
-        Document existing = (Document) pendingDocuments.put(uuid, doc);
+        Document existing = pendingDocuments.put(uuid, doc);
         log.debug("added node {}. New size of indexing queue: {}",
-                uuid, new Integer(pendingDocuments.size()));
+                uuid, pendingDocuments.size());
         if (existing == null) {
             // document wasn't present, add it to the queue store
             queueStore.addUUID(uuid);
@@ -183,13 +183,14 @@
     public synchronized void close() {
         checkInitialized();
         // go through pending documents and close readers
-        Iterator it = pendingDocuments.values().iterator();
+        Iterator<Document> it = pendingDocuments.values().iterator();
         while (it.hasNext()) {
-            Document doc = (Document) it.next();
+            Document doc = it.next();
             Util.disposeDocument(doc);
             it.remove();
         }
         queueStore.close();
+        notifyIfEmpty();
     }
 
     /**
@@ -202,14 +203,36 @@
         }
     }
 
-    //----------------------------< testing only >------------------------------
+    /**
+     * Notifies all threads waiting for this queue to become empty.
+     * The notification is only sent if this queue actually is empty.
+     */
+    private synchronized void notifyIfEmpty() {
+        if (pendingDocuments.isEmpty()) {
+            notifyAll();
+        }
+    }
 
     /**
-     * <b>This method is for testing only!</b>
+     * Waits until this queue is empty.
+     */
+    synchronized void waitUntilEmpty() {
+        while (!pendingDocuments.isEmpty()) {
+            try {
+                wait();
+            } catch (InterruptedException e) {
+                // Interrupted, check again if we're empty
+            }
+        }
+    }
+
+    /**
+     * Returns the number of pending documents.
      *
      * @return the number of the currently pending documents.
      */
     synchronized int getNumPendingDocuments() {
         return pendingDocuments.size();
     }
+
 }

Modified: jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/IndexingQueueStore.java
URL: http://svn.apache.org/viewvc/jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/IndexingQueueStore.java?rev=812570&r1=812569&r2=812570&view=diff
==============================================================================
--- jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/IndexingQueueStore.java (original)
+++ jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/IndexingQueueStore.java Tue Sep  8 16:09:28 2009
@@ -65,7 +65,7 @@
     /**
      * The UUID Strings of the pending documents.
      */
-    private final Set pending = new HashSet();
+    private final Set<String> pending = new HashSet<String>();
 
     /**
      * The directory from where to read pending document UUIDs.
@@ -87,7 +87,7 @@
      * @return the UUIDs of the pending text extraction jobs.
      */
     public String[] getPending() {
-        return (String[]) pending.toArray(new String[pending.size()]);
+        return pending.toArray(new String[pending.size()]);
     }
 
     /**

Modified: jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/JackrabbitQueryParser.java
URL: http://svn.apache.org/viewvc/jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/JackrabbitQueryParser.java?rev=812570&r1=812569&r2=812570&view=diff
==============================================================================
--- jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/JackrabbitQueryParser.java (original)
+++ jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/JackrabbitQueryParser.java Tue Sep  8 16:09:28 2009
@@ -107,18 +107,16 @@
      */
     protected Query getSynonymQuery(String field, String termStr)
             throws ParseException {
-        List synonyms = new ArrayList();
+        List<BooleanClause> synonyms = new ArrayList<BooleanClause>();
         synonyms.add(new BooleanClause(getFieldQuery(field, termStr),
                 BooleanClause.Occur.SHOULD));
         if (synonymProvider != null) {
-            String[] terms = synonymProvider.getSynonyms(termStr);
-            for (int i = 0; i < terms.length; i++) {
-                synonyms.add(new BooleanClause(getFieldQuery(field, terms[i]),
-                        BooleanClause.Occur.SHOULD));
+            for (String term : synonymProvider.getSynonyms(termStr)) {
+                synonyms.add(new BooleanClause(getFieldQuery(field, term), BooleanClause.Occur.SHOULD));
             }
         }
         if (synonyms.size() == 1) {
-            return ((BooleanClause) synonyms.get(0)).getQuery();
+            return synonyms.get(0).getQuery();
         } else {
             return getBooleanQuery(synonyms);
         }

Modified: jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/LuceneQueryBuilder.java
URL: http://svn.apache.org/viewvc/jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/LuceneQueryBuilder.java?rev=812570&r1=812569&r2=812570&view=diff
==============================================================================
--- jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/LuceneQueryBuilder.java (original)
+++ jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/LuceneQueryBuilder.java Tue Sep  8 16:09:28 2009
@@ -19,7 +19,6 @@
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Calendar;
-import java.util.Iterator;
 import java.util.List;
 import java.math.BigDecimal;
 
@@ -152,7 +151,7 @@
     /**
      * Exceptions thrown during tree translation
      */
-    private final List exceptions = new ArrayList();
+    private final List<Exception> exceptions = new ArrayList<Exception>();
 
     /**
      * Creates a new <code>LuceneQueryBuilder</code> instance.
@@ -228,8 +227,8 @@
         Query q = builder.createLuceneQuery();
         if (builder.exceptions.size() > 0) {
             StringBuffer msg = new StringBuffer();
-            for (Iterator it = builder.exceptions.iterator(); it.hasNext();) {
-                msg.append(it.next().toString()).append('\n');
+            for (Exception exception : builder.exceptions) {
+                msg.append(exception.toString()).append('\n');
             }
             throw new RepositoryException("Exception building query: " + msg.toString());
         }
@@ -241,7 +240,8 @@
      * {@link org.apache.lucene.search.Query}.
      *
      * @return the lucene <code>Query</code>.
-     * @throws RepositoryException
+     * @throws RepositoryException if an error occurs while building the lucene
+     *                             query.
      */
     private Query createLuceneQuery() throws RepositoryException {
         return (Query) root.accept(this, null);
@@ -263,8 +263,8 @@
     public Object visit(OrQueryNode node, Object data) throws RepositoryException {
         BooleanQuery orQuery = new BooleanQuery();
         Object[] result = node.acceptOperands(this, null);
-        for (int i = 0; i < result.length; i++) {
-            Query operand = (Query) result[i];
+        for (Object aResult : result) {
+            Query operand = (Query) aResult;
             orQuery.add(operand, Occur.SHOULD);
         }
         return orQuery;
@@ -276,8 +276,8 @@
             return null;
         }
         BooleanQuery andQuery = new BooleanQuery();
-        for (int i = 0; i < result.length; i++) {
-            Query operand = (Query) result[i];
+        for (Object aResult : result) {
+            Query operand = (Query) aResult;
             andQuery.add(operand, Occur.MUST);
         }
         return andQuery;
@@ -290,8 +290,8 @@
         }
         // join the results
         BooleanQuery b = new BooleanQuery();
-        for (int i = 0; i < result.length; i++) {
-            b.add((Query) result[i], Occur.SHOULD);
+        for (Object aResult : result) {
+            b.add((Query) aResult, Occur.SHOULD);
         }
         // negate
         return new NotQuery(b);
@@ -311,7 +311,7 @@
 
     public Object visit(NodeTypeQueryNode node, Object data) {
 
-        List terms = new ArrayList();
+        List<Term> terms = new ArrayList<Term>();
         try {
             String mixinTypesField = resolver.getJCRName(NameConstants.JCR_MIXINTYPES);
             String primaryTypeField = resolver.getJCRName(NameConstants.JCR_PRIMARYTYPE);
@@ -363,11 +363,11 @@
             // exception occured
             return new BooleanQuery();
         } else if (terms.size() == 1) {
-            return new JackrabbitTermQuery((Term) terms.get(0));
+            return new JackrabbitTermQuery(terms.get(0));
         } else {
             BooleanQuery b = new BooleanQuery();
-            for (Iterator it = terms.iterator(); it.hasNext();) {
-                b.add(new JackrabbitTermQuery((Term) it.next()), Occur.SHOULD);
+            for (Term term : terms) {
+                b.add(new JackrabbitTermQuery(term), Occur.SHOULD);
             }
             return b;
         }
@@ -464,8 +464,8 @@
             exceptions.add(new InvalidQueryException("Number of location steps must be > 0"));
         }
         // loop over steps
-        for (int i = 0; i < steps.length; i++) {
-            context = (Query) steps[i].accept(this, context);
+        for (LocationStepQueryNode step : steps) {
+            context = (Query) step.accept(this, context);
         }
         if (data instanceof BooleanQuery) {
             BooleanQuery constraint = (BooleanQuery) data;
@@ -487,15 +487,15 @@
 
         // predicate on step?
         Object[] predicates = node.acceptOperands(this, data);
-        for (int i = 0; i < predicates.length; i++) {
-            andQuery.add((Query) predicates[i], Occur.MUST);
+        for (Object predicate : predicates) {
+            andQuery.add((Query) predicate, Occur.MUST);
         }
 
         // check for position predicate
         QueryNode[] pred = node.getPredicates();
-        for (int i = 0; i < pred.length; i++) {
-            if (pred[i].getType() == QueryNode.TYPE_RELATION) {
-                RelationQueryNode pos = (RelationQueryNode) pred[i];
+        for (QueryNode aPred : pred) {
+            if (aPred.getType() == QueryNode.TYPE_RELATION) {
+                RelationQueryNode pos = (RelationQueryNode) aPred;
                 if (pos.getValueType() == QueryConstants.TYPE_POSITION) {
                     node.setIndex(pos.getPositionValue());
                 }
@@ -585,8 +585,8 @@
             Object[] predicates = node.acceptOperands(this, data);
             if (predicates.length > 0) {
                 BooleanQuery andQuery = new BooleanQuery();
-                for (int i = 0; i < predicates.length; i++) {
-                    andQuery.add((Query) predicates[i], Occur.MUST);
+                for (Object predicate : predicates) {
+                    andQuery.add((Query) predicate, Occur.MUST);
                 }
                 andQuery.add(context, Occur.MUST);
                 context = andQuery;
@@ -709,13 +709,14 @@
                 case QueryConstants.OPERATION_EQ_VALUE:      // =
                 case QueryConstants.OPERATION_EQ_GENERAL:
                     BooleanQuery or = new BooleanQuery();
-                    for (int i = 0; i < stringValues.length; i++) {
+                    for (String value : stringValues) {
                         Term t = new Term(FieldNames.PROPERTIES,
-                                    FieldNames.createNamedValue(field, stringValues[i]));
+                                FieldNames.createNamedValue(field, value));
                         Query q;
                         if (transform[0] == TransformConstants.TRANSFORM_UPPER_CASE) {
                             q = new CaseTermQuery.Upper(t);
-                        } else if (transform[0] == TransformConstants.TRANSFORM_LOWER_CASE) {
+                        } else
+                        if (transform[0] == TransformConstants.TRANSFORM_LOWER_CASE) {
                             q = new CaseTermQuery.Lower(t);
                         } else {
                             q = new JackrabbitTermQuery(t);
@@ -730,8 +731,8 @@
                 case QueryConstants.OPERATION_GE_VALUE:      // >=
                 case QueryConstants.OPERATION_GE_GENERAL:
                     or = new BooleanQuery();
-                    for (int i = 0; i < stringValues.length; i++) {
-                        Term lower = new Term(FieldNames.PROPERTIES, FieldNames.createNamedValue(field, stringValues[i]));
+                    for (String value : stringValues) {
+                        Term lower = new Term(FieldNames.PROPERTIES, FieldNames.createNamedValue(field, value));
                         Term upper = new Term(FieldNames.PROPERTIES, FieldNames.createNamedValue(field, "\uFFFF"));
                         or.add(new RangeQuery(lower, upper, true, transform[0]), Occur.SHOULD);
                     }
@@ -743,8 +744,8 @@
                 case QueryConstants.OPERATION_GT_VALUE:      // >
                 case QueryConstants.OPERATION_GT_GENERAL:
                     or = new BooleanQuery();
-                    for (int i = 0; i < stringValues.length; i++) {
-                        Term lower = new Term(FieldNames.PROPERTIES, FieldNames.createNamedValue(field, stringValues[i]));
+                    for (String value : stringValues) {
+                        Term lower = new Term(FieldNames.PROPERTIES, FieldNames.createNamedValue(field, value));
                         Term upper = new Term(FieldNames.PROPERTIES, FieldNames.createNamedValue(field, "\uFFFF"));
                         or.add(new RangeQuery(lower, upper, false, transform[0]), Occur.SHOULD);
                     }
@@ -756,9 +757,9 @@
                 case QueryConstants.OPERATION_LE_VALUE:      // <=
                 case QueryConstants.OPERATION_LE_GENERAL:      // <=
                     or = new BooleanQuery();
-                    for (int i = 0; i < stringValues.length; i++) {
+                    for (String value : stringValues) {
                         Term lower = new Term(FieldNames.PROPERTIES, FieldNames.createNamedValue(field, ""));
-                        Term upper = new Term(FieldNames.PROPERTIES, FieldNames.createNamedValue(field, stringValues[i]));
+                        Term upper = new Term(FieldNames.PROPERTIES, FieldNames.createNamedValue(field, value));
                         or.add(new RangeQuery(lower, upper, true, transform[0]), Occur.SHOULD);
                     }
                     query = or;
@@ -778,9 +779,9 @@
                 case QueryConstants.OPERATION_LT_VALUE:      // <
                 case QueryConstants.OPERATION_LT_GENERAL:
                     or = new BooleanQuery();
-                    for (int i = 0; i < stringValues.length; i++) {
+                    for (String value : stringValues) {
                         Term lower = new Term(FieldNames.PROPERTIES, FieldNames.createNamedValue(field, ""));
-                        Term upper = new Term(FieldNames.PROPERTIES, FieldNames.createNamedValue(field, stringValues[i]));
+                        Term upper = new Term(FieldNames.PROPERTIES, FieldNames.createNamedValue(field, value));
                         or.add(new RangeQuery(lower, upper, false, transform[0]), Occur.SHOULD);
                     }
                     query = or;
@@ -793,12 +794,13 @@
                     BooleanQuery notQuery = new BooleanQuery();
                     notQuery.add(Util.createMatchAllQuery(field, indexFormatVersion), Occur.SHOULD);
                     // exclude all nodes where 'field' has the term in question
-                    for (int i = 0; i < stringValues.length; i++) {
-                        Term t = new Term(FieldNames.PROPERTIES, FieldNames.createNamedValue(field, stringValues[i]));
+                    for (String value : stringValues) {
+                        Term t = new Term(FieldNames.PROPERTIES, FieldNames.createNamedValue(field, value));
                         Query q;
                         if (transform[0] == TransformConstants.TRANSFORM_UPPER_CASE) {
                             q = new CaseTermQuery.Upper(t);
-                        } else if (transform[0] == TransformConstants.TRANSFORM_LOWER_CASE) {
+                        } else
+                        if (transform[0] == TransformConstants.TRANSFORM_LOWER_CASE) {
                             q = new CaseTermQuery.Lower(t);
                         } else {
                             q = new JackrabbitTermQuery(t);
@@ -818,15 +820,16 @@
                     //    all values are equal to term in question
                     notQuery = new BooleanQuery();
                     notQuery.add(Util.createMatchAllQuery(field, indexFormatVersion), Occur.SHOULD);
-                    for (int i = 0; i < stringValues.length; i++) {
+                    for (String value : stringValues) {
                         // exclude the nodes that have the term and are single valued
-                        Term t = new Term(FieldNames.PROPERTIES, FieldNames.createNamedValue(field, stringValues[i]));
+                        Term t = new Term(FieldNames.PROPERTIES, FieldNames.createNamedValue(field, value));
                         Query svp = new NotQuery(new JackrabbitTermQuery(new Term(FieldNames.MVP, field)));
                         BooleanQuery and = new BooleanQuery();
                         Query q;
                         if (transform[0] == TransformConstants.TRANSFORM_UPPER_CASE) {
                             q = new CaseTermQuery.Upper(t);
-                        } else if (transform[0] == TransformConstants.TRANSFORM_LOWER_CASE) {
+                        } else
+                        if (transform[0] == TransformConstants.TRANSFORM_LOWER_CASE) {
                             q = new CaseTermQuery.Lower(t);
                         } else {
                             q = new JackrabbitTermQuery(t);
@@ -958,8 +961,8 @@
             Object[] predicates = node.acceptOperands(this, data);
             if (predicates.length > 0) {
                 BooleanQuery andQuery = new BooleanQuery();
-                for (int i = 0; i < predicates.length; i++) {
-                    andQuery.add((Query) predicates[i], Occur.MUST);
+                for (Object predicate : predicates) {
+                    andQuery.add((Query) predicate, Occur.MUST);
                 }
                 andQuery.add(context, Occur.MUST);
                 context = andQuery;
@@ -1021,9 +1024,9 @@
      */
     private String[] getStringValues(Name propertyName, String literal) {
         PropertyTypeRegistry.TypeMapping[] types = propRegistry.getPropertyTypes(propertyName);
-        List values = new ArrayList();
-        for (int i = 0; i < types.length; i++) {
-            switch (types[i].type) {
+        List<String> values = new ArrayList<String>();
+        for (PropertyTypeRegistry.TypeMapping type : types) {
+            switch (type.type) {
                 case PropertyType.NAME:
                     // try to translate name
                     try {
@@ -1149,6 +1152,6 @@
             values.add(literal);
             log.debug("Using literal " + literal + " as is.");
         }
-        return (String[]) values.toArray(new String[values.size()]);
+        return values.toArray(new String[values.size()]);
     }
 }

Modified: jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/LuceneQueryFactoryImpl.java
URL: http://svn.apache.org/viewvc/jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/LuceneQueryFactoryImpl.java?rev=812570&r1=812569&r2=812570&view=diff
==============================================================================
--- jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/LuceneQueryFactoryImpl.java (original)
+++ jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/LuceneQueryFactoryImpl.java Tue Sep  8 16:09:28 2009
@@ -19,7 +19,6 @@
 import java.util.List;
 import java.util.ArrayList;
 import java.util.Arrays;
-import java.util.Iterator;
 
 import javax.jcr.nodetype.NodeTypeManager;
 import javax.jcr.nodetype.NodeType;
@@ -126,7 +125,7 @@
      * {@inheritDoc}
      */
     public Query create(SelectorImpl selector) throws RepositoryException {
-        List terms = new ArrayList();
+        List<Term> terms = new ArrayList<Term>();
         String mixinTypesField = npResolver.getJCRName(NameConstants.JCR_MIXINTYPES);
         String primaryTypeField = npResolver.getJCRName(NameConstants.JCR_PRIMARYTYPE);
 
@@ -177,11 +176,11 @@
         }
         Query q;
         if (terms.size() == 1) {
-            q = new JackrabbitTermQuery((Term) terms.get(0));
+            q = new JackrabbitTermQuery(terms.get(0));
         } else {
             BooleanQuery b = new BooleanQuery();
-            for (Iterator it = terms.iterator(); it.hasNext();) {
-                b.add(new JackrabbitTermQuery((Term) it.next()), BooleanClause.Occur.SHOULD);
+            for (Term term : terms) {
+                b.add(new JackrabbitTermQuery(term), BooleanClause.Occur.SHOULD);
             }
             q = b;
         }

Modified: jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/LuceneQueryHits.java
URL: http://svn.apache.org/viewvc/jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/LuceneQueryHits.java?rev=812570&r1=812569&r2=812570&view=diff
==============================================================================
--- jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/LuceneQueryHits.java (original)
+++ jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/LuceneQueryHits.java Tue Sep  8 16:09:28 2009
@@ -23,7 +23,6 @@
 import org.apache.lucene.search.Scorer;
 import org.apache.lucene.index.IndexReader;
 import org.apache.jackrabbit.core.id.NodeId;
-import org.apache.jackrabbit.uuid.UUID;
 
 /**
  * Wraps a lucene query result and adds a close method that allows to release

Modified: jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/MatchAllScorer.java
URL: http://svn.apache.org/viewvc/jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/MatchAllScorer.java?rev=812570&r1=812569&r2=812570&view=diff
==============================================================================
--- jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/MatchAllScorer.java (original)
+++ jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/MatchAllScorer.java Tue Sep  8 16:09:28 2009
@@ -135,15 +135,16 @@
      * @throws IOException if an error occurs while reading from
      *                     the search index.
      */
+    @SuppressWarnings({"unchecked"})
     private void calculateDocFilter() throws IOException {
         PerQueryCache cache = PerQueryCache.getInstance();
-        Map readerCache = (Map) cache.get(MatchAllScorer.class, reader);
+        Map<String, BitSet> readerCache = (Map<String, BitSet>) cache.get(MatchAllScorer.class, reader);
         if (readerCache == null) {
-            readerCache = new HashMap();
+            readerCache = new HashMap<String, BitSet>();
             cache.put(MatchAllScorer.class, reader, readerCache);
         }
         // get BitSet for field
-        docFilter = (BitSet) readerCache.get(field);
+        docFilter = readerCache.get(field);
 
         if (docFilter != null) {
             // use cached BitSet;

Modified: jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/MultiIndex.java
URL: http://svn.apache.org/viewvc/jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/MultiIndex.java?rev=812570&r1=812569&r2=812570&view=diff
==============================================================================
--- jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/MultiIndex.java (original)
+++ jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/MultiIndex.java Tue Sep  8 16:09:28 2009
@@ -23,7 +23,6 @@
 import org.apache.jackrabbit.core.state.NoSuchItemStateException;
 import org.apache.jackrabbit.core.state.NodeState;
 import org.apache.jackrabbit.core.state.ChildNodeEntry;
-import org.apache.jackrabbit.uuid.Constants;
 import org.apache.jackrabbit.util.Timer;
 import org.apache.jackrabbit.spi.Path;
 import org.apache.jackrabbit.spi.PathFactory;
@@ -49,6 +48,8 @@
 import java.util.Map;
 import java.util.Collection;
 import java.util.Collections;
+import java.util.Calendar;
+import java.text.DateFormat;
 
 /**
  * A <code>MultiIndex</code> consists of a {@link VolatileIndex} and multiple
@@ -91,12 +92,18 @@
     /**
      * Names of active persistent index directories.
      */
-    private final IndexInfos indexNames = new IndexInfos("indexes");
+    private final IndexInfos indexNames;
+
+    /**
+     * The history of the multi index.
+     */
+    private final IndexHistory indexHistory;
 
     /**
      * Names of index directories that can be deleted.
+     * Key = index name (String), Value = time when last in use (Long)
      */
-    private final Set<String> deletable = new HashSet<String>();
+    private final Map<String, Long> deletable = new HashMap<String, Long>();
 
     /**
      * List of open persistent indexes. This list may also contain an open
@@ -118,6 +125,11 @@
     private final DirectoryManager directoryManager;
 
     /**
+     * The redo log factory
+     */
+    private final RedoLogFactory redoLogFactory;
+
+    /**
      * The base directory to store the index.
      */
     private final Directory indexDir;
@@ -183,7 +195,7 @@
     /**
      * The RedoLog of this <code>MultiIndex</code>.
      */
-    private final RedoLog redoLog;
+    private RedoLog redoLog;
 
     /**
      * The indexing queue with pending text extraction jobs.
@@ -225,20 +237,23 @@
      */
     MultiIndex(SearchIndex handler, Set<NodeId> excludedIDs) throws IOException {
         this.directoryManager = handler.getDirectoryManager();
+        this.redoLogFactory = handler.getRedoLogFactory();
         this.indexDir = directoryManager.getDirectory(".");
         this.handler = handler;
         this.cache = new DocNumberCache(handler.getCacheSize());
-        this.redoLog = new RedoLog(indexDir);
         this.excludedIDs = new HashSet<NodeId>(excludedIDs);
         this.nsMappings = handler.getNamespaceMappings();
 
-        if (indexNames.exists(indexDir)) {
-            indexNames.read(indexDir);
-        }
+        indexNames = new IndexInfos(indexDir, "indexes");
+
+        this.indexHistory = new IndexHistory(indexDir,
+                handler.getMaxHistoryAge() * 1000);
 
         // as of 1.5 deletable file is not used anymore
         removeDeletable();
 
+        this.redoLog = redoLogFactory.createRedoLog(this);
+
         // initialize IndexMerger
         merger = new IndexMerger(this, handler.getIndexMergerPoolSize());
         merger.setMaxMergeDocs(handler.getMaxMergeDocs());
@@ -251,8 +266,9 @@
         this.indexingQueue = new IndexingQueue(store);
 
         // open persistent indexes
-        for (int i = 0; i < indexNames.size(); i++) {
-            String name = indexNames.getName(i);
+        for (Iterator it = indexNames.iterator(); it.hasNext(); ) {
+            IndexInfo info = (IndexInfo) it.next();
+            String name = info.getName();
             // only open if it still exists
             // it is possible that indexNames still contains a name for
             // an index that has been deleted, but indexNames has not been
@@ -264,7 +280,8 @@
             }
             PersistentIndex index = new PersistentIndex(name,
                     handler.getTextAnalyzer(), handler.getSimilarity(),
-                    cache, indexingQueue, directoryManager);
+                    cache, indexingQueue, directoryManager,
+                    handler.getMaxHistoryAge());
             index.setMaxFieldLength(handler.getMaxFieldLength());
             index.setUseCompoundFile(handler.getUseCompoundFile());
             index.setTermInfosIndexDivisor(handler.getTermInfosIndexDivisor());
@@ -373,7 +390,8 @@
                 NodeState rootState = (NodeState) stateMgr.getItemState(rootId);
                 count = createIndex(rootState, rootPath, stateMgr, count);
                 executeAndLog(new Commit(getTransactionId()));
-                log.info("Created initial index for {} nodes", new Long(count));
+                log.info("Created initial index for {} nodes", count);
+                releaseMultiReader();
                 scheduleFlushTask();
             } catch (Exception e) {
                 String msg = "Error indexing workspace";
@@ -404,7 +422,12 @@
             throws IOException {
         // make sure a reader is available during long updates
         if (add.size() > handler.getBufferSize()) {
-            getIndexReader().release();
+            try {
+                getIndexReader().release();
+            } catch (IOException e) {
+                // do not fail if an exception is thrown here
+                log.warn("unable to prepare index reader for queries during update", e);
+            }
         }
 
         synchronized (updateMonitor) {
@@ -414,7 +437,6 @@
             long transactionId = nextTransactionId++;
             executeAndLog(new Start(transactionId));
 
-            boolean flush = false;
 
             for (NodeId id : remove) {
                 executeAndLog(new DeleteNode(transactionId, id));
@@ -424,15 +446,10 @@
                 if (document != null) {
                     executeAndLog(new AddNode(transactionId, document));
                     // commit volatile index if needed
-                    flush |= checkVolatileCommit();
+                    checkVolatileCommit();
                 }
             }
             executeAndLog(new Commit(transactionId));
-
-            // flush whole index when volatile index has been commited.
-            if (flush) {
-                flush();
-            }
         } finally {
             synchronized (updateMonitor) {
                 updateInProgress = false;
@@ -484,8 +501,7 @@
             if (num > 0) {
                 redoLog.append(new DeleteNode(getTransactionId(), id));
             }
-            for (int i = 0; i < indexes.size(); i++) {
-                PersistentIndex index = (PersistentIndex) indexes.get(i);
+            for (PersistentIndex index : indexes) {
                 // only remove documents from registered indexes
                 if (indexNames.contains(index.getName())) {
                     int removed = index.removeDocument(idTerm);
@@ -574,9 +590,19 @@
                 indexName = indexNames.newName();
             } while (directoryManager.hasDirectory(indexName));
         }
-        PersistentIndex index = new PersistentIndex(indexName,
-                handler.getTextAnalyzer(), handler.getSimilarity(),
-                cache, indexingQueue, directoryManager);
+        PersistentIndex index;
+        try {
+            index = new PersistentIndex(indexName,
+                    handler.getTextAnalyzer(), handler.getSimilarity(),
+                    cache, indexingQueue, directoryManager,
+                    handler.getMaxHistoryAge());
+        } catch (IOException e) {
+            // do some clean up
+            if (!directoryManager.delete(indexName)) {
+                deletable.put(indexName, Long.MIN_VALUE);
+            }
+            throw e;
+        }
         index.setMaxFieldLength(handler.getMaxFieldLength());
         index.setUseCompoundFile(handler.getUseCompoundFile());
         index.setTermInfosIndexDivisor(handler.getTermInfosIndexDivisor());
@@ -629,7 +655,7 @@
             long time = System.currentTimeMillis();
             index.getReadOnlyIndexReader(true).release();
             time = System.currentTimeMillis() - time;
-            log.debug("hierarchy cache initialized in {} ms", new Long(time));
+            log.debug("hierarchy cache initialized in {} ms", time);
         }
 
         synchronized (this) {
@@ -701,7 +727,7 @@
      * @return an <code>IndexReader</code>.
      * @throws IOException if an error occurs constructing the <code>IndexReader</code>.
      */
-    public CachingMultiIndexReader getIndexReader(boolean initCache) throws IOException {
+    public synchronized CachingMultiIndexReader getIndexReader(boolean initCache) throws IOException {
         synchronized (updateMonitor) {
             if (multiReader != null) {
                 multiReader.acquire();
@@ -721,8 +747,7 @@
             if (multiReader == null) {
                 List<ReadOnlyIndexReader> readerList =
                     new ArrayList<ReadOnlyIndexReader>();
-                for (int i = 0; i < indexes.size(); i++) {
-                    PersistentIndex pIdx = (PersistentIndex) indexes.get(i);
+                for (PersistentIndex pIdx : indexes) {
                     if (indexNames.contains(pIdx.getName())) {
                         readerList.add(pIdx.getReadOnlyIndexReader(initCache));
                     }
@@ -747,6 +772,17 @@
     }
 
     /**
+     * Runs a consistency check on this multi index.
+     *
+     * @return the consistency check.
+     * @throws IOException if an error occurs while running the check.
+     */
+    ConsistencyCheck runConsistencyCheck() throws IOException {
+        return ConsistencyCheck.run(this,
+                handler.getContext().getItemStateManager());
+    }
+
+    /**
      * Closes this <code>MultiIndex</code>.
      */
     void close() {
@@ -772,8 +808,8 @@
                 log.error("Exception while closing search index.", e);
             }
             volatileIndex.close();
-            for (int i = 0; i < indexes.size(); i++) {
-                ((PersistentIndex) indexes.get(i)).close();
+            for (PersistentIndex index : indexes) {
+                index.close();
             }
 
             // close indexing queue
@@ -805,6 +841,20 @@
     }
 
     /**
+     * @return the base directory of the index.
+     */
+    Directory getDirectory() {
+        return indexDir;
+    }
+
+    /**
+     * @return the current generation of the index names.
+     */
+    long getIndexGeneration() {
+        return indexNames.getGeneration();
+    }
+
+    /**
      * Returns a lucene Document for the <code>node</code>.
      *
      * @param node the node to index.
@@ -846,9 +896,9 @@
     }
 
     /**
-     * Removes the <code>index</code> from the list of active sub indexes. The
-     * Index is not acutally deleted right away, but postponed to the transaction
-     * commit.
+     * Removes the <code>index</code> from the list of active sub indexes.
+     * Depending on the {@link SearchIndex#getMaxHistoryAge()}, the
+     * Index is not deleted right away.
      * <p/>
      * This method does not close the index, but rather expects that the index
      * has already been closed.
@@ -861,7 +911,7 @@
         indexNames.removeName(index.getName());
         synchronized (deletable) {
             log.debug("Moved " + index.getName() + " to deletable");
-            deletable.add(index.getName());
+            deletable.put(index.getName(), System.currentTimeMillis());
         }
     }
 
@@ -873,34 +923,64 @@
      */
     void flush() throws IOException {
         synchronized (this) {
-            // commit volatile index
-            executeAndLog(new Start(Action.INTERNAL_TRANSACTION));
-            commitVolatileIndex();
 
+            // only start transaction when there is something to commit
+            boolean transactionStarted = false;
+
+            if (volatileIndex.getNumDocuments() > 0) {
+                // commit volatile index
+                executeAndLog(new Start(Action.INTERNAL_TRANSACTION));
+                transactionStarted = true;
+                commitVolatileIndex();
+            }
+
+            boolean indexesModified = false;
             // commit persistent indexes
             for (int i = indexes.size() - 1; i >= 0; i--) {
-                PersistentIndex index = (PersistentIndex) indexes.get(i);
+                PersistentIndex index = indexes.get(i);
                 // only commit indexes we own
                 // index merger also places PersistentIndex instances in indexes,
                 // but does not make them public by registering the name in indexNames
                 if (indexNames.contains(index.getName())) {
+                    long gen = index.getCurrentGeneration();
                     index.commit();
+                    if (gen != index.getCurrentGeneration()) {
+                        indexesModified = true;
+                        log.debug("Committed revision {} of index {}",
+                                Long.toString(index.getCurrentGeneration(), Character.MAX_RADIX),
+                                index.getName());
+                    }
                     // check if index still contains documents
                     if (index.getNumDocuments() == 0) {
+                        if (!transactionStarted) {
+                            executeAndLog(new Start(Action.INTERNAL_TRANSACTION));
+                            transactionStarted = true;
+                        }
                         executeAndLog(new DeleteIndex(getTransactionId(), index.getName()));
                     }
                 }
             }
-            executeAndLog(new Commit(getTransactionId()));
 
-            indexNames.write(indexDir);
+            if (transactionStarted) {
+                executeAndLog(new Commit(getTransactionId()));
+            }
+
+            if (transactionStarted || indexesModified || redoLog.hasEntries()) {
+                indexNames.write();
+
+                indexHistory.addIndexInfos(indexNames);
 
-            // reset redo log
-            redoLog.clear();
+                // close redo.log and create a new one based
+                // on the new indexNames generation
+                redoLog.close();
+                redoLog = redoLogFactory.createRedoLog(this);
+            }
 
             lastFlushTime = System.currentTimeMillis();
         }
 
+        indexHistory.pruneOutdated();
+
         // delete obsolete indexes
         attemptDelete();
     }
@@ -938,12 +1018,29 @@
      */
     private void enqueueUnusedSegments() throws IOException {
         // walk through index segments
-        String[] dirNames = directoryManager.getDirectoryNames();
-        for (int i = 0; i < dirNames.length; i++) {
-            if (dirNames[i].startsWith("_") && !indexNames.contains(dirNames[i])) {
-                deletable.add(dirNames[i]);
+        for (String name : directoryManager.getDirectoryNames()) {
+            if (!name.startsWith("_")) {
+                continue;
+            }
+            long lastUse = indexHistory.getLastUseOf(name);
+            if (lastUse != Long.MAX_VALUE) {
+                if (log.isDebugEnabled()) {
+                    String msg = "Segment " + name + " not is use anymore. ";
+                    if (lastUse != Long.MIN_VALUE) {
+                        Calendar cal = Calendar.getInstance();
+                        DateFormat df = DateFormat.getInstance();
+                        cal.setTimeInMillis(lastUse);
+                        msg += "Unused since: " + df.format(cal.getTime());
+                    } else {
+                        msg += "(orphaned)";
+                    }
+                    log.debug(msg);
+                }
+                deletable.put(name, lastUse);
             }
         }
+        // now prune outdated index infos
+        indexHistory.pruneOutdated();
     }
 
     private void scheduleFlushTask() {
@@ -953,6 +1050,8 @@
 
     /**
      * Resets the volatile index to a new instance.
+     *
+     * @throws IOException if the volatile index cannot be reset.
      */
     private void resetVolatileIndex() throws IOException {
         volatileIndex = new VolatileIndex(handler.getTextAnalyzer(),
@@ -1049,7 +1148,7 @@
      * <code>node</code>.
      *
      * @param node     the current NodeState.
-     * @param path     the path of the current node.
+     * @param path     the path of the current <code>node</code> state.
      * @param stateMgr the shared item state manager.
      * @param count    the number of nodes already indexed.
      * @return the number of nodes indexed so far.
@@ -1071,7 +1170,7 @@
         if (++count % 100 == 0) {
             PathResolver resolver = new DefaultNamePathResolver(
                     handler.getContext().getNamespaceRegistry());
-            log.info("indexing... {} ({})", resolver.getJCRPath(path), new Long(count));
+            log.info("indexing... {} ({})", resolver.getJCRPath(path), count);
         }
         if (count % 10 == 0) {
             checkIndexingQueue(true);
@@ -1095,16 +1194,21 @@
     }
 
     /**
-     * Attempts to delete all files recorded in {@link #deletable}.
+     * Attempts to delete all files that are older than
+     *{@link SearchIndex#getMaxHistoryAge()}.
      */
     private void attemptDelete() {
         synchronized (deletable) {
-            for (Iterator<String> it = deletable.iterator(); it.hasNext(); ) {
-                String indexName = it.next();
-                if (directoryManager.delete(indexName)) {
-                    it.remove();
-                } else {
-                    log.info("Unable to delete obsolete index: " + indexName);
+            for (Iterator<Map.Entry<String, Long>> it = deletable.entrySet().iterator(); it.hasNext(); ) {
+                Map.Entry<String, Long> entry = it.next();
+                String indexName = entry.getKey();
+                long lastUse = entry.getValue();
+                if (System.currentTimeMillis() - handler.getMaxHistoryAge() * 1000 > lastUse) {
+                    if (directoryManager.delete(indexName)) {
+                        it.remove();
+                    } else {
+                        log.info("Unable to delete obsolete index: " + indexName);
+                    }
                 }
             }
         }
@@ -1190,7 +1294,7 @@
         // now update index with the remaining ones if there are any
         if (!finished.isEmpty()) {
             log.info("updating index with {} nodes from indexing queue.",
-                    new Long(finished.size()));
+                    finished.size());
 
             // remove documents from the queue
             for (NodeId id : finished.keySet()) {
@@ -1477,7 +1581,7 @@
         public void execute(MultiIndex index) throws IOException {
             PersistentIndex idx = index.getOrCreateIndex(indexName);
             if (!index.indexNames.contains(indexName)) {
-                index.indexNames.addName(indexName);
+                index.indexNames.addName(indexName, idx.getCurrentGeneration());
                 // now that the index is in the active list let the merger know about it
                 index.merger.indexAdded(indexName, idx.getNumDocuments());
             }
@@ -1505,10 +1609,9 @@
         /**
          * The maximum length of a AddNode String.
          */
-        private static final int ENTRY_LENGTH = Long.toString(Long.MAX_VALUE).length()
-                + Action.ADD_NODE.length()
-                + Constants.UUID_FORMATTED_LENGTH
-                + 2;
+        private static final int ENTRY_LENGTH =
+            Long.toString(Long.MAX_VALUE).length() + Action.ADD_NODE.length()
+            + new NodeId().toString().length() + 2;
 
         /**
          * The id of the node to add.
@@ -1554,10 +1657,6 @@
          */
         static AddNode fromString(long transactionId, String arguments)
                 throws IllegalArgumentException {
-            // simple length check
-            if (arguments.length() != Constants.UUID_FORMATTED_LENGTH) {
-                throw new IllegalArgumentException("arguments is not a uuid");
-            }
             return new AddNode(transactionId, new NodeId(arguments));
         }
 
@@ -1793,10 +1892,9 @@
         /**
          * The maximum length of a DeleteNode String.
          */
-        private static final int ENTRY_LENGTH = Long.toString(Long.MAX_VALUE).length()
-                + Action.DELETE_NODE.length()
-                + Constants.UUID_FORMATTED_LENGTH
-                + 2;
+        private static final int ENTRY_LENGTH =
+            Long.toString(Long.MAX_VALUE).length() + Action.DELETE_NODE.length()
+            + new NodeId().toString().length() + 2;
 
         /**
          * The id of the node to remove.
@@ -1825,10 +1923,6 @@
          *                                  UUID.
          */
         static DeleteNode fromString(long transactionId, String arguments) {
-            // simple length check
-            if (arguments.length() != Constants.UUID_FORMATTED_LENGTH) {
-                throw new IllegalArgumentException("arguments is not a uuid");
-            }
             return new DeleteNode(transactionId, new NodeId(arguments));
         }
 
@@ -1852,7 +1946,7 @@
             if (num == 0) {
                 for (int i = index.indexes.size() - 1; i >= 0; i--) {
                     // only look in registered indexes
-                    PersistentIndex idx = (PersistentIndex) index.indexes.get(i);
+                    PersistentIndex idx = index.indexes.get(i);
                     if (index.indexNames.contains(idx.getName())) {
                         num = idx.removeDocument(idTerm);
                         if (num > 0) {
@@ -1934,6 +2028,8 @@
          * Creates a new VolatileCommit action.
          *
          * @param transactionId the id of the transaction that executes this action.
+         * @param targetIndex   the name of the index where the volatile index
+         *                      will be committed.
          */
         VolatileCommit(long transactionId, String targetIndex) {
             super(transactionId, Action.TYPE_VOLATILE_COMMIT);

Modified: jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/NamePathResolverImpl.java
URL: http://svn.apache.org/viewvc/jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/NamePathResolverImpl.java?rev=812570&r1=812569&r2=812570&view=diff
==============================================================================
--- jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/NamePathResolverImpl.java (original)
+++ jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/NamePathResolverImpl.java Tue Sep  8 16:09:28 2009
@@ -16,31 +16,27 @@
  */
 package org.apache.jackrabbit.core.query.lucene;
 
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
+import javax.jcr.NamespaceException;
+
 import org.apache.jackrabbit.spi.Name;
 import org.apache.jackrabbit.spi.NameFactory;
 import org.apache.jackrabbit.spi.PathFactory;
-import org.apache.jackrabbit.spi.commons.conversion.NameResolver;
+import org.apache.jackrabbit.spi.commons.conversion.DefaultNamePathResolver;
 import org.apache.jackrabbit.spi.commons.conversion.IllegalNameException;
 import org.apache.jackrabbit.spi.commons.conversion.NameParser;
-import org.apache.jackrabbit.spi.commons.conversion.DefaultNamePathResolver;
-import org.apache.jackrabbit.spi.commons.conversion.ParsingPathResolver;
 import org.apache.jackrabbit.spi.commons.conversion.NamePathResolver;
+import org.apache.jackrabbit.spi.commons.conversion.NameResolver;
+import org.apache.jackrabbit.spi.commons.conversion.ParsingPathResolver;
 import org.apache.jackrabbit.spi.commons.conversion.PathResolver;
-import org.apache.jackrabbit.spi.commons.namespace.NamespaceResolver;
 import org.apache.jackrabbit.spi.commons.name.NameFactoryImpl;
 import org.apache.jackrabbit.spi.commons.name.PathFactoryImpl;
-
-import javax.jcr.NamespaceException;
+import org.apache.jackrabbit.spi.commons.namespace.NamespaceResolver;
 
 /**
  * <code>NamePathResolverImpl</code>...
  */
 public class NamePathResolverImpl extends DefaultNamePathResolver {
 
-    private static Logger log = LoggerFactory.getLogger(NamePathResolverImpl.class);
-
     private static final NameFactory NAME_FACTORY = NameFactoryImpl.getInstance();
     private static final PathFactory PATH_FACTORY = PathFactoryImpl.getInstance();
 

Modified: jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/NodeIndexer.java
URL: http://svn.apache.org/viewvc/jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/NodeIndexer.java?rev=812570&r1=812569&r2=812570&view=diff
==============================================================================
--- jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/NodeIndexer.java (original)
+++ jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/NodeIndexer.java Tue Sep  8 16:09:28 2009
@@ -175,7 +175,7 @@
      * @throws RepositoryException if an error occurs while reading property
      *                             values from the <code>ItemStateProvider</code>.
      */
-    protected Document createDoc() throws RepositoryException {
+    public Document createDoc() throws RepositoryException {
         doNotUseInExcerpt.clear();
         Document doc = new Document();
 
@@ -253,7 +253,7 @@
      *
      * @param e the base exception.
      */
-    private void throwRepositoryException(Exception e)
+    protected void throwRepositoryException(Exception e)
             throws RepositoryException {
         String msg = "Error while indexing node: " + node.getNodeId() + " of "
             + "type: " + node.getNodeTypeName();
@@ -267,7 +267,7 @@
      * @param doc  the lucene document.
      * @param name the name of the multi-value property.
      */
-    private void addMVPName(Document doc, Name name) {
+    protected void addMVPName(Document doc, Name name) {
         try {
             String propName = resolver.getJCRName(name);
             doc.add(new Field(FieldNames.MVP, propName, Field.Store.NO, Field.Index.NOT_ANALYZED_NO_NORMS, Field.TermVector.NO));
@@ -283,7 +283,7 @@
      * @param value the internal jackrabbit value.
      * @param name  the name of the property.
      */
-    private void addValue(Document doc, InternalValue value, Name name) throws RepositoryException {
+    protected void addValue(Document doc, InternalValue value, Name name) throws RepositoryException {
         String fieldName = name.getLocalName();
         try {
             fieldName = resolver.getJCRName(name);
@@ -381,7 +381,7 @@
      * @param doc  the document.
      * @param name the name of the property.
      */
-    private void addPropertyName(Document doc, Name name) {
+    protected void addPropertyName(Document doc, Name name) {
         String fieldName = name.getLocalName();
         try {
             fieldName = resolver.getJCRName(name);

Modified: jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/NodeTraversingQueryHits.java
URL: http://svn.apache.org/viewvc/jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/NodeTraversingQueryHits.java?rev=812570&r1=812569&r2=812570&view=diff
==============================================================================
--- jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/NodeTraversingQueryHits.java (original)
+++ jackrabbit/sandbox/JCR-1456/jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/NodeTraversingQueryHits.java Tue Sep  8 16:09:28 2009
@@ -83,7 +83,7 @@
      * Implements a node iterator that traverses a node tree in document
      * order.
      */
-    private class TraversingNodeIterator implements Iterator {
+    private class TraversingNodeIterator implements Iterator<Node> {
 
         /**
          * The current <code>Node</code>, which acts as the starting point for
@@ -100,7 +100,7 @@
          * The chain of iterators which includes the iterators of the children
          * of the current node.
          */
-        private Iterator selfAndChildren;
+        private Iterator<Node> selfAndChildren;
 
         /**
          * Creates a <code>TraversingNodeIterator</code>.
@@ -134,19 +134,19 @@
         /**
          * @inheritDoc
          */
-        public Object next() {
+        public Node next() {
             init();
-            NodeImpl n = (NodeImpl) selfAndChildren.next();
-            return n;
+            return selfAndChildren.next();
         }
 
         /**
          * Initializes the iterator chain once.
          */
+        @SuppressWarnings({"unchecked"})
         private void init() {
             if (selfAndChildren == null) {
-                List allIterators = new ArrayList();
-                Iterator current = Collections.singletonList(currentNode).iterator();
+                List<Iterator<Node>> allIterators = new ArrayList<Iterator<Node>>();
+                Iterator<Node> current = Collections.singletonList(currentNode).iterator();
                 allIterators.add(current);
                 if (maxDepth == 0) {
                     // only current node