You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@jackrabbit.apache.org by mr...@apache.org on 2005/07/13 13:40:12 UTC

svn commit: r216142 - in /incubator/jackrabbit/trunk/core/src/java/org/apache/jackrabbit/core/query/lucene: MultiIndex.java NodeIndexer.java PersistentIndex.java

Author: mreutegg
Date: Wed Jul 13 04:40:11 2005
New Revision: 216142

URL: http://svn.apache.org/viewcvs?rev=216142&view=rev
Log:
JCR-160: Query index not in sync with workspace
- A possible commit.lock is now also removed on startup. Previously only a write.lock was removed.
- An integrity check is run if the search index detects a commit or write lock on startup. This check removes nodes from the index that are not available anymore through the ItemStateManager.
- Applying the redo log on startup is now more failsafe.

Modified:
    incubator/jackrabbit/trunk/core/src/java/org/apache/jackrabbit/core/query/lucene/MultiIndex.java
    incubator/jackrabbit/trunk/core/src/java/org/apache/jackrabbit/core/query/lucene/NodeIndexer.java
    incubator/jackrabbit/trunk/core/src/java/org/apache/jackrabbit/core/query/lucene/PersistentIndex.java

Modified: incubator/jackrabbit/trunk/core/src/java/org/apache/jackrabbit/core/query/lucene/MultiIndex.java
URL: http://svn.apache.org/viewcvs/incubator/jackrabbit/trunk/core/src/java/org/apache/jackrabbit/core/query/lucene/MultiIndex.java?rev=216142&r1=216141&r2=216142&view=diff
==============================================================================
--- incubator/jackrabbit/trunk/core/src/java/org/apache/jackrabbit/core/query/lucene/MultiIndex.java (original)
+++ incubator/jackrabbit/trunk/core/src/java/org/apache/jackrabbit/core/query/lucene/MultiIndex.java Wed Jul 13 04:40:11 2005
@@ -214,9 +214,20 @@
                         deleteNodePersistent(entry.uuid);
                     }
                 }
+                maybeMergeIndexes();
                 log.warn("Redo changes applied.");
                 redoLog.clear();
             }
+
+            // execute integrity check on persistent indexes with locks on startup
+            for (Iterator it = indexes.iterator(); it.hasNext(); ) {
+                PersistentIndex index = (PersistentIndex) it.next();
+                if (index.getLockEncountered()) {
+                    log.info("Running integrity check on index: " + index.getName());
+                    index.integrityCheck(stateMgr);
+                }
+            }
+
             volatileIndex = new VolatileIndex(handler.getAnalyzer(), redoLog);
             volatileIndex.setUseCompoundFile(false);
 
@@ -445,15 +456,23 @@
     /**
      * Adds a node to the persistent index. This method will <b>not</b> aquire a
      * write lock while writing!
+     * <p/>
+     * If an error occurs when reading from the ItemStateManager an error log
+     * message is written and the node is ignored.
      *
      * @param node the node to add.
      * @throws IOException         if an error occurs while writing to the
      *                             index.
-     * @throws RepositoryException if any other error occurs
      */
     private void addNodePersistent(NodeState node)
-            throws IOException, RepositoryException {
-        Document doc = handler.createDocument(node, nsMappings);
+            throws IOException {
+        Document doc;
+        try {
+            doc = handler.createDocument(node, nsMappings);
+        } catch (RepositoryException e) {
+            log.warn("RepositoryException: " + e.getMessage());
+            return;
+        }
         // make sure at least one persistent index exists
         if (indexes.size() == 0) {
             try {
@@ -475,7 +494,6 @@
         // add node to last index
         PersistentIndex last = (PersistentIndex) indexes.get(indexes.size() - 1);
         last.addDocument(doc);
-        maybeMergeIndexes();
     }
 
     /**

Modified: incubator/jackrabbit/trunk/core/src/java/org/apache/jackrabbit/core/query/lucene/NodeIndexer.java
URL: http://svn.apache.org/viewcvs/incubator/jackrabbit/trunk/core/src/java/org/apache/jackrabbit/core/query/lucene/NodeIndexer.java?rev=216142&r1=216141&r2=216142&view=diff
==============================================================================
--- incubator/jackrabbit/trunk/core/src/java/org/apache/jackrabbit/core/query/lucene/NodeIndexer.java (original)
+++ incubator/jackrabbit/trunk/core/src/java/org/apache/jackrabbit/core/query/lucene/NodeIndexer.java Wed Jul 13 04:40:11 2005
@@ -143,9 +143,9 @@
                 doc.add(new Field(FieldNames.LABEL, name, false, true, false));
             }
         } catch (NoSuchItemStateException e) {
-            throw new RepositoryException("Error while indexing node: " + node.getUUID(), e);
+            throwRepositoryException(e);
         } catch (ItemStateException e) {
-            throw new RepositoryException("Error while indexing node: " + node.getUUID(), e);
+            throwRepositoryException(e);
         } catch (NoPrefixDeclaredException e) {
             // will never happen, because this.mappings will dynamically add
             // unknown uri<->prefix mappings
@@ -166,12 +166,25 @@
                     addMVPName(doc, propState.getName());
                 }
             } catch (NoSuchItemStateException e) {
-                throw new RepositoryException("Error while indexing node: " + node.getUUID(), e);
+                throwRepositoryException(e);
             } catch (ItemStateException e) {
-                throw new RepositoryException("Error while indexing node: " + node.getUUID(), e);
+                throwRepositoryException(e);
             }
         }
         return doc;
+    }
+
+    /**
+     * Wraps the exception <code>e</code> into a <code>RepositoryException</code>
+     * and throws the created exception.
+     *
+     * @param e the base exception.
+     */
+    private void throwRepositoryException(Exception e)
+            throws RepositoryException {
+        String msg = "Error while indexing node: " + node.getUUID() + " of " +
+                "type: " + node.getNodeTypeName();
+        throw new RepositoryException(msg, e);
     }
 
     /**

Modified: incubator/jackrabbit/trunk/core/src/java/org/apache/jackrabbit/core/query/lucene/PersistentIndex.java
URL: http://svn.apache.org/viewcvs/incubator/jackrabbit/trunk/core/src/java/org/apache/jackrabbit/core/query/lucene/PersistentIndex.java?rev=216142&r1=216141&r2=216142&view=diff
==============================================================================
--- incubator/jackrabbit/trunk/core/src/java/org/apache/jackrabbit/core/query/lucene/PersistentIndex.java (original)
+++ incubator/jackrabbit/trunk/core/src/java/org/apache/jackrabbit/core/query/lucene/PersistentIndex.java Wed Jul 13 04:40:11 2005
@@ -18,12 +18,19 @@
 
 import org.apache.jackrabbit.core.fs.FileSystem;
 import org.apache.jackrabbit.core.fs.FileSystemException;
+import org.apache.jackrabbit.core.state.ItemStateManager;
+import org.apache.jackrabbit.core.NodeId;
 import org.apache.log4j.Logger;
 import org.apache.lucene.analysis.Analyzer;
 import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.index.IndexWriter;
 import org.apache.lucene.store.Directory;
+import org.apache.lucene.document.Document;
 
 import java.io.IOException;
+import java.util.List;
+import java.util.ArrayList;
+import java.util.Iterator;
 
 /**
  * Implements a lucene index which is based on a
@@ -35,7 +42,10 @@
     private static final Logger log = Logger.getLogger(PersistentIndex.class);
 
     /** Name of the write lock file */
-    private static final String WRITE_LOCK = "write.lock";
+    private static final String WRITE_LOCK = IndexWriter.WRITE_LOCK_NAME;
+
+    /** Name of the commit lock file */
+    private static final String COMMIT_LOCK = IndexWriter.COMMIT_LOCK_NAME;
 
     /** The underlying filesystem to store the index */
     private final FileSystem fs;
@@ -43,6 +53,9 @@
     /** The name of this persistent index */
     private final String name;
 
+    /** Set to <code>true</code> if this index encountered locks on startup */
+    private boolean lockEncountered = false;
+
     /**
      * Creates a new <code>PersistentIndex</code> based on the file system
      * <code>fs</code>.
@@ -64,6 +77,7 @@
         // check if index is locked, probably from an unclean repository
         // shutdown
         if (fs.exists(WRITE_LOCK)) {
+            lockEncountered = true;
             log.warn("Removing write lock on search index.");
             try {
                 fs.deleteFile(WRITE_LOCK);
@@ -71,6 +85,26 @@
                 log.error("Unable to remove write lock on search index.");
             }
         }
+        if (fs.exists(COMMIT_LOCK)) {
+            lockEncountered = true;
+            log.warn("Removing commit lock on search index.");
+            try {
+                fs.deleteFile(COMMIT_LOCK);
+            } catch (FileSystemException e) {
+                log.error("Unable to remove write lock on search index.");
+            }
+        }
+    }
+
+    /**
+     * Returns <code>true</code> if this index encountered a lock on the file
+     * system during startup. This indicates a unclean shutdown.
+     *
+     * @return <code>true</code> if this index encountered a lock on startup;
+     *         <code>false</code> otherwise.
+     */
+    boolean getLockEncountered() {
+        return lockEncountered;
     }
 
     /**
@@ -114,7 +148,7 @@
         for (int i = 0; i < reader.maxDoc(); i++) {
             if (!reader.isDeleted(i)) {
                 return true;
-            }
+    }
         }
         return false;
     }
@@ -125,6 +159,54 @@
      */
     String getName() {
         return name;
+    }
+
+    /**
+     * Checks if the nodes in this index still exist in the ItemStateManager
+     * <code>mgr</code>. Nodes that do not exist in <code>mgr</code> will
+     * be deleted from the index.
+     *
+     * @param mgr the ItemStateManager.
+     */
+    void integrityCheck(ItemStateManager mgr) {
+        // List<Integer> of document numbers to delete
+        List deleted = new ArrayList();
+        IndexReader reader;
+        try {
+            reader = getIndexReader();
+            int maxDoc = reader.maxDoc();
+            for (int i = 0; i < maxDoc; i++) {
+                if (!reader.isDeleted(i)) {
+                    Document d = reader.document(i);
+                    NodeId id = new NodeId(d.get(FieldNames.UUID));
+                    if (!mgr.hasItemState(id)) {
+                        // not known to ItemStateManager
+                        deleted.add(new Integer(i));
+                        log.warn("Node " + id.getUUID() + " does not exist anymore. Will be removed from index.");
+                    }
+                }
+            }
+        } catch (IOException e) {
+            log.error("Unable to read from index: " + e);
+            return;
+        }
+
+        // now delete them
+        for (Iterator it = deleted.iterator(); it.hasNext(); ) {
+            int docNum = ((Integer) it.next()).intValue();
+            try {
+                reader.delete(docNum);
+            } catch (IOException e) {
+                log.error("Unable to delete inexistent node from index: " + e);
+            }
+        }
+
+        // commit changes on reader
+        try {
+            commit();
+        } catch (IOException e) {
+            log.error("Unable to commit index: " + e);
+        }
     }
 
     /**